diff -pruN 0.3.0-3/CHANGES 2.0.5-0ubuntu2/CHANGES
--- 0.3.0-3/CHANGES	2013-03-01 10:32:27.000000000 +0000
+++ 2.0.5-0ubuntu2/CHANGES	2014-02-04 08:58:41.000000000 +0000
@@ -1,41 +1,164 @@
-CHANGES
-=======
-2013/03/01 (IBM_DB_SA adapter 0.3.0)
-- Add support for LIMIT/OFFSET
-- Add support for savepoints
-- Add support for double-precision floating-point number
-- Fixed reflection for get_view_names and get_view_definition
- 
-2013/02/06
-- Add support for SQLAlchemy 0.7/0.8
-- Refactor code layout
-- Now supporting "db2://" scheme as well as
-  "ibm_db://" for backwards compatibility
-- Add/fix support for explicit sequences
-
- 2011/09/27 (IBM_DB_SA adapter 0.2.1):
- - fix reflection problem
- - support alternate DB2 LUW connection via PyODBC
- - support alternate DB2 i5/OS (iSeries) via PyODBC
- - support alternate DB2 i5/OS (iSeries) via ZxJDBC (Jython)
-
- 2011/08/28 (IBM_DB_SA adapter 0.2.0):
- - Support of SQLAlchemy 0.6/0.7
- - Add Jython support
-
- 2008/11/06 (IBM_DB_SA adapter 0.1.6):
- - fixed Metadata not loading any table info (defect #158705)
- - fixed problems while using different schema names (defect #163785)
- - fixed keyerror in length in visit_function (defect #166292)
+Change Log
+=============
 
-2008/03/28 (IBM_DB_SA adapter 0.1.5):
- - fixed BIGINT driver return issue #5 (defect #150638)
+2007/12/04 (setup.py 0.2.0):
+ - added the ability to create Python Eggs by running: 
+   $ python setup.py bdist_egg
+
+2008/02/08 (setup.py 0.2.5, ibm_db-0.2.5, ibm_db_dbi-0.2.5):
+ - added support for ibm_db_sa-0.1.0 SQLAlchemy-0.4 adapter release
+ 
+2008/02/15 (setup.py 0.2.6, ibm_db-0.2.5, ibm_db_dbi-0.2.5):
+ - fixed .egg setup loading issue #1 (defect #154259) 
+ 
+2008/03/06 (setup.py 0.2.7, ibm_db-0.2.7, ibm_db_dbi-0.2.7):
+ - fixed error handling for execute when connection handle is invalid (defect #151164)
+
+2008/03/28 (setup.py 0.2.8, ibm_db-0.2.8, ibm_db_dbi-0.2.8):
+ - fixed BIGINT return handling issue #5 (defect #150638)
  - fixed autocommit default issue #6 (defect #156919)
  - fixed _get_exception() tuple issue #8 (defect #156925)
- - fixed create_engine DSN support issue (defect #156930)
 
-2008/02/15 (IBM_DB_SA adapter 0.1.1):
- - fixed .egg setup loading issue #1 (defect #154259)
+2008/05/08 (setup.py 0.2.9, ibm_db-0.2.9, ibm_db_dbi-0.2.9):
+ - fixed fetch first N rows
+only optimize for N rows issue #2 (defect #154260)
+ - fixed extra count() query issue #7 (defect #156926)
+ - fixed setup.py exit issue if env not set (defect #158296)
 
-2008/02/08 (IBM_DB_SA adapter 0.1.0):
- - initial alpha release
+2008/07/16 (setup.py 0.2.9.1, ibm_db-0.2.9.1, ibm_db_dbi-0.2.9.1):
+ - fixed Compilation fail on DB2 versions < 9 due to SQL_DECFLOAT declaration ( defect #162728)
+ 
+2008/08/24 (setup.py 0.3.0, ibm_db-0.3.0, ibm_db_dbi-0.3.0):
+ - Added trusted context support in driver.
+ 
+2008/10/08 (setup.py 0.4.0, ibm_db-0.4.0, ibm_db_dbi-0.4.0):
+ - fixed memory leak in fetch_assoc issue #13 (defect #165205)
+ - fixed reference counting errors in _python_ibm_db_bind_fetch_helper issue #14 (defect #167122)
+ - fixed ibm_db_dbi.Connection.tables() returning empty list issue #16 (defect #167059)
+ - adjusted error message if IBM DS environment variables not set.
+
+2008/11/05 (setup.py 0.4.1, ibm_db-0.4.1, ibm_db_dbi-0.4.1):
+ - Added zOS and iSeries(System i) support.
+ 
+2009/02/16 (setup.py 0.6.0, ibm_db-0.6.0, ibm_db_dbi-0.6.0):
+ - New feature - Unicode support for driver and wrapper (defect #166036)
+ - New Feature - DB2 on MacOS support (both Client and Server)
+ - fixed Metadata not loading table info (defect #158705)
+ - fixed problem of real datatype (defect #168619)
+ - Python 2.6 support on driver and wrapper (defect #169629)
+ - decfloat support (defect #95795)
+ - Code clean up and formatting.
+ - Test cases renamed to reflect their functions.
+ 
+2009/03/16 (setup.py 0.7.0, ibm_db-0.7.0, ibm_db_dbi-0.7.0):
+ - Fixed Graphic column returning nil problem (defect #172150)
+ - Removed warnings while building driver (defect #163787)
+ - Fixed ibm_db.set_option() failing if unicode is passed to dictionary. (defect #172432)
+ - Code clean up and formatting. 
+ 
+2009/05/11 (setup.py 0.7.2, ibm_db-0.7.2, ibm_db_dbi-0.7.2):
+ - Memory cleanup - Fixed a lot of issues leading to memory leaks.
+ - New Feature - Added support for SQLRowCount to return correct results on select queries.
+   DB2 supports this from DB2 V95FP3 onwards. See test cases 17,18,19 for samples. (defect #173237)
+ 
+2009/05/27 (setup.py 0.7.2.1, ibm_db-0.7.2.1, ibm_db_dbi-0.7.2.1):
+ - Fixed Backward Compatibility issue with SQLROWCOUNT (defect #185120)
+
+2009/06/25 (setup.py 0.7.2.5, ibm_db-0.7.2.5, ibm_db_dbi-0.7.2.5):
+ - Support for inserting BigInt numbers in bigInt type column with prepare statement (defect #184705)
+ 
+2009/09/03 (setup.py 0.8.0, ibm_db-0.8.0, ibm_db_dbi-0.8.0):
+ - Added new API "callproc" to support stored procedure (defect#183022)-  Refer test cases for samples and "http://code.google.com/p/ibm-db/wiki/APIs" for API Usage
+ - support OUT variable in stored procedure
+ - Changes in Test cases (test_146_CallSPINAndOUTParams.py, test_148_CallSPDiffBindPattern_01.py, test_52949_TestSPIntVarcharXml.py) for calling strored procedure through new API "callproc"
+ - Removed warnings while building driver
+
+2009/10/14 (setup.py 1.0, ibm_db-1.0, ibm_db_dbi-1.0):
+ - New Feature - added support for UCS-4 build python (defect #174236)
+ - Removed memory leak in callproc API (defect #191526)
+ - Added support for datetime/time,buffer object type in callproc API (defect #191029)
+ - Removed error in executing statement through wrapper when result set contains LOBs/xml data
+
+2010/03/25 (setup.py 1.0.1, ibm_db-1.0.1, ibm_db_dbi-1.0.1):
+ - Fixed reading LONGVARCHAR columns (defect #197044)
+ - Fixed reading BINARY Data (defect #204272)
+ - Fixed reading rows in which CLOB column has NULL value (defect #204273)
+ - Fixed connect-segfault(with None value and 4th agrument is not a Dictionary type) (defect #204271)
+ - Fixed distinction between INT and BIGINT in Field Type return(defect #204278)
+ - New Feature - added new method in ibm_db_dbi wrapper "pconnect"  to establish persistance connection (defect #204278)
+
+2010/04/28 (setup.py 1.0.2, ibm_db-1.0.2, ibm_db_dbi-1.0.2)
+ - Fixed issues with Integer argument in AIX (defect #197018)
+ - Fixed Performance issues with dbi wrapper (defect #206854) 
+
+2010/08/06 (setup.py 1.0.3, ibm_db-1.0.3, ibm_db_dbi-1.0.3)
+ - Added Decimal object support in ibm_db Driver (defect #212225)
+ - Added test case for Decimal object Support
+ - Enhanced Performance by Allowing other threads for concurrent operation during time consuming db calls (defect #208042)
+
+2011/05/17 (setup.py 1.0.4, ibm_db-1.0.4, ibm_db_dbi-1.0.4)
+ - Added new API execute_many in ibm_db Driver(defect #225893)
+ - Added new test case for ibm_db's execute_many API
+ - Modified executemany function of ibm_db_dbi adaptor to use ibm_db's execute_many API(defect #225894)
+ - Fixed cursor.description for fetching multiple resultset(defect #225938)
+ - Fixed error handling for insertion of clob data in ibm_db Driver(defect #225892)
+ 
+2011/09/06 (setup.py 1.0.5, ibm_db-1.0.5, ibm_db_dbi-1.0.5)
+ - Added new API's createdb, dropdb, recreatedb, createdbNX in ibm_db Driver and ibm_db_dbi wrapper (defect #231289 & #231293)
+ - Added new test cases for createdb, dropdb, recreatedb, createdbNX APIs (defect #231289)
+ - Fixed execute_many API doesn't gives relevent error message when non homogeneous array value arguments passed(defect #231294)
+ - Modified test_execute_many.py test case to test ERROR message also. (defect #231294)
+ 
+2012/05/24 (setup.py 1.0.6, ibm_db-1.0.6, ibm_db_dbi-1.0.6)
+  - Fixed: Python crashed instead of raising Error message when trying to insert non numeric string to Integer Field on executemany of dbi wrapper ( issue #86)
+  - Added an option in connect and pconnect API to turned OFF CLI numeric literal feature. (issue #87)
+  - Added a messages attribute in cursor object of dbi wrapper to store error and warning messages. Through this we can now return the partial list retrieved by fetch**() function and the error messages also. (issue #91, 95)
+  - Fixed: cursor.description gives incorrect value upon consecutive calls for INSERT/DELETE (issue #94)
+  - Fixed: Segmentation fault when retrieving value of DBCLOB of very large dimensions (issue#100)
+  - Fixed: DeprecationWarning: BaseException.message has been deprecated as of Python 2.6 (issue#101)
+  - Fixed: The column type SQL_WLONGVARCHAR is unsupported and will cause a segfault when returned. (issue#102)
+  - Made autocommit OFF as default in dbi wrapper(issue #103)
+  - Added runtime version check through __version__ attribute to in ibm_db and dbi wrapper(issue #105)
+  - Removed duplicate cursor constructor and enhanced the 1st constructor itself with 2nd constructor property (issue #108)
+  - Enhanced execute_many API to accept NULL mixed with other values (defect #240445)
+  - Fixed: OverflowError on Long Integer in Decimal-Field (defect #240445)
+  - Fixed: Stored procedure truncates string-parameter (defect #240445)
+  - Added a environment variable IBM_DB_HOME to make the installation process easy (defect #240445)
+  
+2012/09/21 (setup.py 2.0.0, ibm_db-2.0.0, ibm_db_dbi-2.0.0)
+  - New Feature - Added Python-3.x support for driver and wrapper (defect #243457)
+  - Fixed: Resource leak over the lifetime of connection objects in wrapper (issue #120)
+  - Enhanced: Strict type checking for connection and statement object.
+  - Fixed: Allow trial license to connect ZOS/i5(defect #243556)
+  
+2013/02/03 (setup.py 2.0.1, ibm_db-2.0.1, ibm_db_dbi-2.0.1)
+  - Fixed: Retrieving NULL CLOB/XML/BLOB values(issue #121, #125)
+  - Fixed: free_stmt() not able to free stmt resources(issue #124)
+  
+2013/03/04 (setup.py 2.0.2, ibm_db-2.0.2, ibm_db_dbi-2.0.2)
+  - Fixed: Memory leak with LONGVARCHAR (defect #248442)
+  - Fixed: Handling of CLOB datatype in execute_many function (defect #248442)
+  - Enhanced: Keeps weakref of open cursor in connection's _cursor_list (defect #248442)
+  - Enhanced: dbi cursor is now compatible to the iteration protocol (defect #248442)
+  
+2013/06/17 (setup.py 2.0.3, ibm_db-2.0.3, ibm_db_dbi-2.0.3)
+  - Fixed: Performance issue due to datetime, date, time conversion inside dbi wrapper. ibm_db driver now returns datetime, date, time object for TIMESTAMP, DATE, TIME field type correspondingly instead of string object
+  - Enhanced: ibm_db driver are now capable to handle datetime, date, time object
+  - Fixed: Now quoted literal replacement off by default
+  - Enhanced: Added a new method in ibm_db to check indicated column in result set is nullable or not, cursor's description of ibm_db_dbi now keep the nullable column info also
+  - Fixed: callproc api now return long type object instead of string for BIGINT field type
+  - Added: New test case test_InsertRetrieveDateTimeTypeColumn.py for handling of datetime object in ibm_db driver
+  
+2013/09/11 (setup.py 2.0.4, ibm_db-2.0.4, ibm_db_dbi-2.0.4)
+  - Fixed: ISO8601 datetime with 'T' delimiter (defect #495080)
+  - Fixed: Build error for Python-3.3.x with Visual Studio (defect #495080)
+  
+2013/09/25 (setup.py 2.0.4.1, ibm_db-2.0.4.1, ibm_db_dbi-2.0.4.1)
+  - Fixed: unicode ISO8601 datetime with 'T' delimiter  (defect #495340)
+  - Modified test_6792_FieldTypeRetStrDatetimeTimestamp.py test case to add test for ISO8601 with 'T' delimiter (defect #495340)
+
+2014/02/04 (setup.py 2.0.5, ibm_db-2.0.5, ibm_db_dbi-2.0.5)
+ - Enhanced: Performance of query involving LOBs fields (defect #498081)
+ - Added: SQL_ATTR_INFO_PROGRAMNAME connection attribute (issue #144)
+ - Fixed: Unhashable type issue with 'DBAPITypeObject' (issue #145)
+ - Fixed: Tests adjustments for IBM Informix (issue #139)  
\ No newline at end of file
diff -pruN 0.3.0-3/config.py 2.0.5-0ubuntu2/config.py
--- 0.3.0-3/config.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/config.py	2014-02-03 06:13:31.000000000 +0000
@@ -0,0 +1,12 @@
+test_dir	=	'tests'		# Location of testsuite file (relative to current directory) (Don't change this.)
+
+database	=	'sample'	# Database to connect to. Please use an empty database for best results.
+user		=	'db2inst1'	# User ID to connect with
+password	=	'password'	# Password for given User ID
+hostname	=	'localhost'	# Hostname
+port		=	50000		# Port Number
+
+auth_user	=	'auth_user'	# Authentic user of Database
+auth_pass	=	'auth_pass'	# Password for Authentic user
+tc_user		=	'tc_user'	# Trusted user
+tc_pass		=	'tc_pass'	# Password to trusted user
diff -pruN 0.3.0-3/debian/changelog 2.0.5-0ubuntu2/debian/changelog
--- 0.3.0-3/debian/changelog	2016-11-28 10:56:45.000000000 +0000
+++ 2.0.5-0ubuntu2/debian/changelog	2015-07-24 08:42:36.000000000 +0000
@@ -1,17 +1,15 @@
-python-ibm-db-sa (0.3.0-3) unstable; urgency=medium
+python-ibm-db-sa (2.0.5-0ubuntu2) wily; urgency=medium
 
-  * Fixed homepage (https).
-  * Fixed VCS URLs (https).
-  * d/copyright: Changed source URL to https protocol
-  * Added dh-python to B-D
+  * No change rebuild with SQLAlchemy 1.0.6.
 
- -- Ondřej Nový <onovy@debian.org>  Mon, 28 Nov 2016 11:56:45 +0100
+ -- James Page <james.page@ubuntu.com>  Fri, 24 Jul 2015 09:42:32 +0100
 
-python-ibm-db-sa (0.3.0-2) unstable; urgency=medium
+python-ibm-db-sa (2.0.5-0ubuntu1) utopic; urgency=medium
 
-  * Rebuilt with python-sqlalchemy 1.0.6.
+  * New upstream release.
+  * Added dep-8 tests.
 
- -- Thomas Goirand <zigo@debian.org>  Fri, 26 Jun 2015 22:25:48 +0000
+ -- Chuck Short <zulcss@ubuntu.com>  Thu, 01 May 2014 12:49:36 -0400
 
 python-ibm-db-sa (0.3.0-1) unstable; urgency=low
 
diff -pruN 0.3.0-3/debian/control 2.0.5-0ubuntu2/debian/control
--- 0.3.0-3/debian/control	2016-11-28 10:55:57.000000000 +0000
+++ 2.0.5-0ubuntu2/debian/control	2014-05-01 17:27:42.000000000 +0000
@@ -1,22 +1,24 @@
 Source: python-ibm-db-sa
 Section: python
 Priority: optional
-Maintainer: PKG OpenStack <openstack-devel@lists.alioth.debian.org>
+Maintainer: Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>
+XSBC-Original-Maintainer: PKG OpenStack <openstack-devel@lists.alioth.debian.org>
 Uploaders: Julien Danjou <acid@debian.org>,
            Thomas Goirand <zigo@debian.org>,
            Mehdi Abaakouk <sileht@sileht.net>
 Build-Depends: debhelper (>= 9),
-               dh-python,
                python-all (>= 2.6.6-3~),
                python-nose,
                python-setuptools,
+               python-sqlalchemy,
                python3-all,
                python3-nose,
                python3-setuptools
 Standards-Version: 3.9.5
-Vcs-Browser: https://anonscm.debian.org/cgit/openstack/python-ibm-db-sa.git/
-Vcs-Git: https://anonscm.debian.org/git/openstack/python-ibm-db-sa.git
-Homepage: https://pypi.python.org/pypi/ibm_db_sa/
+Vcs-Browser: http://anonscm.debian.org/gitweb/?p=openstack/python-ibm-db-sa.git
+Vcs-Git: git://anonscm.debian.org/openstack/python-ibm-db-sa.git
+Homepage: http://pypi.python.org/pypi/ibm_db_sa/
+XS-Testsuite: autopkgtest
 
 Package: python-ibm-db-sa
 Architecture: all
diff -pruN 0.3.0-3/debian/copyright 2.0.5-0ubuntu2/debian/copyright
--- 0.3.0-3/debian/copyright	2016-11-28 10:51:51.000000000 +0000
+++ 2.0.5-0ubuntu2/debian/copyright	2014-02-25 06:27:06.000000000 +0000
@@ -1,7 +1,7 @@
 Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
 Upstream-Name: ibm_db_sa
 Upstream-Contact: IBM Application Development Team <opendev@us.ibm.com>
-Source: https://pypi.python.org/pypi/ibm_db/
+Source: http://pypi.python.org/pypi/ibm_db/
 
 Files: debian/*
 Copyright: (c) 2014, Thomas Goirand <zigo@debian.org>
diff -pruN 0.3.0-3/debian/gbp.conf 2.0.5-0ubuntu2/debian/gbp.conf
--- 0.3.0-3/debian/gbp.conf	2016-11-28 10:51:51.000000000 +0000
+++ 2.0.5-0ubuntu2/debian/gbp.conf	2014-02-25 06:27:06.000000000 +0000
@@ -3,7 +3,7 @@ upstream-branch = upstream-unstable
 debian-branch = debian-unstable
 pristine-tar = True
 
-[buildpackage]
+[git-buildpackage]
 export-dir = ../build-area/
 
 [git-import-orig]
diff -pruN 0.3.0-3/debian/tests/control 2.0.5-0ubuntu2/debian/tests/control
--- 0.3.0-3/debian/tests/control	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/debian/tests/control	2014-05-01 17:28:44.000000000 +0000
@@ -0,0 +1,3 @@
+Tests: python-ibm-db-sa
+Depends: @, python-nose
+Restrictions: allow-stderr
diff -pruN 0.3.0-3/debian/tests/python-ibm-db-sa 2.0.5-0ubuntu2/debian/tests/python-ibm-db-sa
--- 0.3.0-3/debian/tests/python-ibm-db-sa	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/debian/tests/python-ibm-db-sa	2014-05-01 17:29:23.000000000 +0000
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+set -e -u
+python tests.py
diff -pruN 0.3.0-3/.gitignore 2.0.5-0ubuntu2/.gitignore
--- 0.3.0-3/.gitignore	2013-02-27 17:30:18.000000000 +0000
+++ 2.0.5-0ubuntu2/.gitignore	1970-01-01 00:00:00.000000000 +0000
@@ -1,9 +0,0 @@
-*.pyc
-*.swp
-*.orig
-build
-tmp
-dist
-.venv
-ibm_db_sa.egg-info/
-.coverage
diff -pruN 0.3.0-3/ibm_db.c 2.0.5-0ubuntu2/ibm_db.c
--- 0.3.0-3/ibm_db.c	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/ibm_db.c	2014-02-04 07:06:11.000000000 +0000
@@ -0,0 +1,10547 @@
+/*
++--------------------------------------------------------------------------+
+| Licensed Materials - Property of IBM                                     |
+|                                                                          |
+| (C) Copyright IBM Corporation 2006-2013                                  |
++--------------------------------------------------------------------------+ 
+| This module complies with SQLAlchemy 0.4 and is                          |
+| Licensed under the Apache License, Version 2.0 (the "License");          |
+| you may not use this file except in compliance with the License.         |
+| You may obtain a copy of the License at                                  |
+| http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable |
+| law or agreed to in writing, software distributed under the License is   |
+| distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
+| KIND, either express or implied. See the License for the specific        |
+| language governing permissions and limitations under the License.        |
++--------------------------------------------------------------------------+
+| Authors: Manas Dadarkar, Salvador Ledezma, Sushant Koduru,               |
+|   Lynh Nguyen, Kanchana Padmanabhan, Dan Scott, Helmut Tessarek,         |
+|   Sam Ruby, Kellen Bombardier, Tony Cairns, Abhigyan Agrawal,            |
+|   Tarun Pasrija, Rahul Priyadarshi                                       |
++--------------------------------------------------------------------------+
+*/
+
+#define MODULE_RELEASE "2.0.5"
+
+#include <Python.h>
+#include <datetime.h>
+#include "ibm_db.h"
+#include <ctype.h>
+#ifdef _WIN32
+#include <windows.h>
+#else
+#include <dlfcn.h>
+#endif
+
+/* True global resources - no need for thread safety here */
+static struct _ibm_db_globals *ibm_db_globals;
+
+static void _python_ibm_db_check_sql_errors( SQLHANDLE handle, SQLSMALLINT hType, int rc, int cpy_to_global, char* ret_str, int API, SQLSMALLINT recno );
+static int _python_ibm_db_assign_options( void* handle, int type, long opt_key, PyObject *data );
+static SQLWCHAR* getUnicodeDataAsSQLWCHAR(PyObject *pyobj, int *isNewBuffer);
+static PyObject* getSQLWCharAsPyUnicodeObject(SQLWCHAR* sqlwcharData, int sqlwcharBytesLen);
+
+const int _check_i = 1;
+#define is_bigendian() ( (*(char*)&_check_i) == 0 )
+static int is_systemi, is_informix;	  /* 1 == TRUE; 0 == FALSE; */
+#ifdef _WIN32
+#define DLOPEN LoadLibrary
+#define DLSYM GetProcAddress
+#define DLCLOSE FreeLibrary
+#define LIBDB2 "db2cli.dll"
+#elif _AIX
+#define DLOPEN dlopen
+#define DLSYM dlsym
+#define DLCLOSE dlclose
+#define LIBDB2 "libdb2.a"
+#else
+#define DLOPEN dlopen
+#define DLSYM dlsym
+#define DLCLOSE dlclose
+#define LIBDB2 "libdb2.so.1"
+#endif
+
+/* Defines a linked list structure for error messages */
+typedef struct _error_msg_node {
+	char err_msg[DB2_MAX_ERR_MSG_LEN];
+	struct _error_msg_node *next;
+} error_msg_node;
+
+/* Defines a linked list structure for caching param data */
+typedef struct _param_cache_node {
+	SQLSMALLINT data_type;			/* Datatype */
+	SQLUINTEGER param_size;			/* param size */
+	SQLSMALLINT nullable;			/* is Nullable */
+	SQLSMALLINT scale;			/* Decimal scale */
+	SQLUINTEGER file_options;		/* File options if PARAM_FILE */
+	SQLINTEGER	bind_indicator;		/* indicator variable for SQLBindParameter */
+	int		param_num;		/* param number in stmt */
+	int		param_type;		/* Type of param - INP/OUT/INP-OUT/FILE */
+	int		size;			/* Size of param */
+	char	*varname;			/* bound variable name */
+	PyObject  *var_pyvalue;			/* bound variable value */
+	SQLINTEGER	  ivalue;		/* Temp storage value */
+	double	fvalue;				/* Temp storage value */
+	char	  *svalue;			/* Temp storage value */
+	SQLWCHAR *uvalue;			/* Temp storage value */
+	DATE_STRUCT *date_value;		/* Temp storage value */
+	TIME_STRUCT *time_value;		/* Temp storage value */
+	TIMESTAMP_STRUCT *ts_value;		/* Temp storage value */
+	struct _param_cache_node *next;		/* Pointer to next node */
+} param_node;
+
+typedef struct _conn_handle_struct {
+	PyObject_HEAD
+	SQLHANDLE henv;
+	SQLHANDLE hdbc;
+	long auto_commit;
+	long c_bin_mode;
+	long c_case_mode;
+	long c_cursor_type;
+	int handle_active;
+	SQLSMALLINT error_recno_tracker;
+	SQLSMALLINT errormsg_recno_tracker;
+	int flag_pconnect; /* Indicates that this connection is persistent */
+} conn_handle;
+
+static void _python_ibm_db_free_conn_struct(conn_handle *handle);
+
+static PyTypeObject conn_handleType = {
+	  PyVarObject_HEAD_INIT(NULL, 0)
+	  "ibm_db.IBM_DBConnection",			 /*tp_name*/
+	  sizeof(conn_handle),					/*tp_basicsize*/
+	  0,									 /*tp_itemsize*/
+	  (destructor)_python_ibm_db_free_conn_struct, /*tp_dealloc*/
+	  0,									 /*tp_print*/
+	  0,									 /*tp_getattr*/
+	  0,									 /*tp_setattr*/
+	  0,									 /*tp_compare*/
+	  0,									 /*tp_repr*/
+	  0,									 /*tp_as_number*/
+	  0,									 /*tp_as_sequence*/
+	  0,									 /*tp_as_mapping*/
+	  0,									 /*tp_hash */
+	  0,									 /*tp_call*/
+	  0,									 /*tp_str*/
+	  0,									 /*tp_getattro*/
+	  0,									 /*tp_setattro*/
+	  0,									 /*tp_as_buffer*/
+	  Py_TPFLAGS_DEFAULT,					/*tp_flags*/
+	  "IBM DataServer connection object",	/* tp_doc */
+	  0,									 /* tp_traverse */
+	  0,									 /* tp_clear */
+	  0,									 /* tp_richcompare */
+	  0,									 /* tp_weaklistoffset */
+	  0,									 /* tp_iter */
+	  0,									 /* tp_iternext */
+	  0,									 /* tp_methods */
+	  0,									 /* tp_members */
+	  0,									 /* tp_getset */
+	  0,									 /* tp_base */
+	  0,									 /* tp_dict */
+	  0,									 /* tp_descr_get */
+	  0,									 /* tp_descr_set */
+	  0,									 /* tp_dictoffset */
+	  0,									 /* tp_init */
+};
+
+
+
+typedef union {
+	SQLINTEGER i_val;
+	SQLDOUBLE d_val;
+	SQLFLOAT f_val;
+	SQLSMALLINT s_val;
+	SQLCHAR *str_val;
+	SQLREAL r_val;
+	SQLWCHAR *w_val;
+	TIMESTAMP_STRUCT *ts_val;
+	DATE_STRUCT *date_val;
+	TIME_STRUCT *time_val;
+} ibm_db_row_data_type;
+
+
+typedef struct {
+	SQLINTEGER out_length;
+	ibm_db_row_data_type data;
+} ibm_db_row_type;
+
+typedef struct _ibm_db_result_set_info_struct {
+	SQLCHAR	*name;
+	SQLSMALLINT type;
+	SQLUINTEGER size;
+	SQLSMALLINT scale;
+	SQLSMALLINT nullable;
+	unsigned char *mem_alloc;  /* Mem free */
+} ibm_db_result_set_info;
+
+typedef struct _row_hash_struct {
+	PyObject *hash;
+} row_hash_struct;
+
+typedef struct _stmt_handle_struct {
+	PyObject_HEAD
+	SQLHANDLE hdbc;
+	SQLHANDLE hstmt;
+	long s_bin_mode;
+	long cursor_type;
+	long s_case_mode;
+	SQLSMALLINT error_recno_tracker;
+	SQLSMALLINT errormsg_recno_tracker;
+
+	/* Parameter Caching variables */
+	param_node *head_cache_list;
+	param_node *current_node;
+
+	int num_params;		  /* Number of Params */
+	int file_param;		  /* if option passed in is FILE_PARAM */
+	int num_columns;
+	ibm_db_result_set_info *column_info;
+	ibm_db_row_type *row_data;
+} stmt_handle;
+
+static void _python_ibm_db_free_stmt_struct(stmt_handle *handle);
+
+static PyTypeObject stmt_handleType = {
+	PyVarObject_HEAD_INIT(NULL, 0)
+	"ibm_db.IBM_DBStatement", /*tp_name			 */
+	sizeof(stmt_handle), /*tp_basicsize			 */
+	0,						 /*tp_itemsize		*/
+	(destructor)_python_ibm_db_free_stmt_struct, /*tp_dealloc	*/
+	0,						 /*tp_print			*/
+	0,						 /*tp_getattr		 */
+	0,						 /*tp_setattr		 */
+	0,						 /*tp_compare		 */
+	0,						 /*tp_repr			*/
+	0,						 /*tp_as_number		*/
+	0,						 /*tp_as_sequence	 */
+	0,						 /*tp_as_mapping	  */
+	0,						 /*tp_hash			*/
+	0,						 /*tp_call			*/
+	0,						 /*tp_str			 */
+	0,						 /*tp_getattro		*/
+	0,						 /*tp_setattro		*/
+	0,						 /*tp_as_buffer		*/
+	Py_TPFLAGS_DEFAULT,		/*tp_flags			*/
+	"IBM DataServer cursor object", /* tp_doc		*/
+	0,						 /* tp_traverse		*/
+	0,						 /* tp_clear		  */
+	0,						 /* tp_richcompare	*/
+	0,						 /* tp_weaklistoffset */
+	0,						 /* tp_iter			*/
+	0,						 /* tp_iternext		*/
+	0,						 /* tp_methods		*/
+	0,						 /* tp_members		*/
+	0,						 /* tp_getset		 */
+	0,						 /* tp_base			*/
+	0,						 /* tp_dict			*/
+	0,						 /* tp_descr_get	  */
+	0,						 /* tp_descr_set	  */
+	0,						 /* tp_dictoffset	 */
+	0,						 /* tp_init			*/
+};
+
+/* equivalent functions on different platforms */
+#ifdef _WIN32
+#define STRCASECMP stricmp
+#else
+#define STRCASECMP strcasecmp
+#endif
+
+static void python_ibm_db_init_globals(struct _ibm_db_globals *ibm_db_globals) {
+	/* env handle */
+	ibm_db_globals->bin_mode = 1;
+
+	memset(ibm_db_globals->__python_conn_err_msg, 0, DB2_MAX_ERR_MSG_LEN);
+	memset(ibm_db_globals->__python_stmt_err_msg, 0, DB2_MAX_ERR_MSG_LEN);
+	memset(ibm_db_globals->__python_conn_err_state, 0, SQL_SQLSTATE_SIZE + 1);
+	memset(ibm_db_globals->__python_stmt_err_state, 0, SQL_SQLSTATE_SIZE + 1);
+}
+
+static PyObject *persistent_list;
+
+char *estrdup(char *data) {
+	int len = strlen(data);
+	char *dup = ALLOC_N(char, len+1);
+	if ( dup == NULL ) {
+		PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+		return NULL;
+	}
+	strcpy(dup, data);
+	return dup;
+} 
+	
+char *estrndup(char *data, int max) {
+	int len = strlen(data);
+	char *dup;
+	if (len > max){
+		len = max;
+	}
+	dup = ALLOC_N(char, len+1);
+	if ( dup == NULL ) {
+		PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+		return NULL;
+	}
+	strcpy(dup, data);
+	return dup;
+} 
+	
+char *strtolower(char *data, int max) {
+	while (max--){
+		data[max] = tolower(data[max]);
+	}
+	return data;
+} 
+	
+char *strtoupper(char *data, int max) {
+	while (max--){
+		data[max] = toupper(data[max]);
+	}
+	return data;
+} 
+
+/*	static void _python_ibm_db_free_conn_struct */
+static void _python_ibm_db_free_conn_struct(conn_handle *handle) {
+
+	/* Disconnect from DB. If stmt is allocated, it is freed automatically */
+	if ( handle->handle_active && !handle->flag_pconnect) {
+		if(handle->auto_commit == 0){
+			SQLEndTran(SQL_HANDLE_DBC, (SQLHDBC)handle->hdbc, SQL_ROLLBACK);
+		}
+		SQLDisconnect((SQLHDBC)handle->hdbc);
+		SQLFreeHandle(SQL_HANDLE_DBC, handle->hdbc);
+		SQLFreeHandle(SQL_HANDLE_ENV, handle->henv);
+	}
+	Py_TYPE(handle)->tp_free((PyObject*)handle);
+}
+
+/*	static void _python_ibm_db_free_row_struct */
+/*
+ * static void _python_ibm_db_free_row_struct(row_hash_struct *handle) {
+ *  free(handle);
+ * }
+ */
+
+/*	static void _python_ibm_db_free_result_struct(stmt_handle* handle) */
+static void _python_ibm_db_free_result_struct(stmt_handle* handle) {
+	int i;
+	param_node *curr_ptr = NULL, *prev_ptr = NULL;
+
+	if ( handle != NULL ) {
+		/* Free param cache list */
+		curr_ptr = handle->head_cache_list;
+		prev_ptr = handle->head_cache_list;
+
+		while (curr_ptr != NULL) {
+			curr_ptr = curr_ptr->next;
+			if (prev_ptr->varname) {
+				PyMem_Del(prev_ptr->varname);
+				prev_ptr->varname = NULL;
+			}
+			if (prev_ptr->svalue){ 
+				PyMem_Del(prev_ptr->svalue);
+				prev_ptr->svalue = NULL;
+			}
+			if (prev_ptr->uvalue){ 
+				PyMem_Del(prev_ptr->uvalue);
+				prev_ptr->uvalue = NULL;
+			}
+			PyMem_Del(prev_ptr);
+
+			prev_ptr = curr_ptr;
+		}
+		handle->head_cache_list = NULL;
+		/* free row data cache */
+		if (handle->row_data) {
+			for (i = 0; i<handle->num_columns; i++) {
+				switch (handle->column_info[i].type) {
+					case SQL_CHAR:
+					case SQL_VARCHAR:
+					case SQL_LONGVARCHAR:
+					case SQL_WCHAR:
+					case SQL_WVARCHAR:
+					case SQL_GRAPHIC:
+					case SQL_VARGRAPHIC:
+					case SQL_LONGVARGRAPHIC:
+					case SQL_BIGINT:
+					case SQL_DECIMAL:
+					case SQL_NUMERIC:
+					case SQL_XML:
+					case SQL_DECFLOAT:
+						if ( handle->row_data[i].data.str_val != NULL ) {
+							PyMem_Del(handle->row_data[i].data.str_val);
+							handle->row_data[i].data.str_val = NULL;
+						}
+						if ( handle->row_data[i].data.w_val != NULL ) {
+							PyMem_Del(handle->row_data[i].data.w_val);
+							handle->row_data[i].data.w_val = NULL;
+						}
+						break;
+					case SQL_TYPE_TIMESTAMP:
+						if ( handle->row_data[i].data.ts_val != NULL ) {
+							PyMem_Del(handle->row_data[i].data.ts_val);
+							handle->row_data[i].data.ts_val = NULL;
+						}
+						break;
+					case SQL_TYPE_DATE:
+						if ( handle->row_data[i].data.date_val != NULL ) {
+							PyMem_Del(handle->row_data[i].data.date_val);
+							handle->row_data[i].data.date_val = NULL;
+						}
+						break;
+					case SQL_TYPE_TIME:
+						if ( handle->row_data[i].data.time_val != NULL ) {
+							PyMem_Del(handle->row_data[i].data.time_val);
+							handle->row_data[i].data.time_val = NULL;
+						}
+						break;
+				}
+			}
+			PyMem_Del(handle->row_data);
+			handle->row_data = NULL;
+		}
+
+		/* free column info cache */
+		if ( handle->column_info ) {
+			for (i = 0; i<handle->num_columns; i++) {
+				PyMem_Del(handle->column_info[i].name);
+				/* Mem free */
+				if(handle->column_info[i].mem_alloc){
+					PyMem_Del(handle->column_info[i].mem_alloc);
+				}
+			}
+			PyMem_Del(handle->column_info);
+			handle->column_info = NULL;
+			handle->num_columns = 0;
+		}
+	}
+}
+
+/* static stmt_handle *_ibm_db_new_stmt_struct(conn_handle* conn_res) */	
+static stmt_handle *_ibm_db_new_stmt_struct(conn_handle* conn_res) {
+	stmt_handle *stmt_res;
+
+	stmt_res = PyObject_NEW(stmt_handle, &stmt_handleType);
+	/* memset(stmt_res, 0, sizeof(stmt_handle)); */
+
+	/* Initialize stmt resource so parsing assigns updated options if needed */
+	stmt_res->hdbc = conn_res->hdbc;
+	stmt_res->s_bin_mode = conn_res->c_bin_mode;
+	stmt_res->cursor_type = conn_res->c_cursor_type;
+	stmt_res->s_case_mode = conn_res->c_case_mode;
+
+	stmt_res->head_cache_list = NULL;
+	stmt_res->current_node = NULL;
+
+	stmt_res->num_params = 0;
+	stmt_res->file_param = 0;
+
+	stmt_res->column_info = NULL;
+	stmt_res->num_columns = 0;
+
+	stmt_res->error_recno_tracker = 1;
+	stmt_res->errormsg_recno_tracker = 1;
+
+	stmt_res->row_data = NULL;
+
+	return stmt_res;
+}
+
+/*	static _python_ibm_db_free_stmt_struct */
+static void _python_ibm_db_free_stmt_struct(stmt_handle *handle) {
+	if ( handle->hstmt != -1 ) {
+		SQLFreeHandle( SQL_HANDLE_STMT, handle->hstmt);
+		if ( handle ) {
+			_python_ibm_db_free_result_struct(handle);
+		}
+	}
+	Py_TYPE(handle)->tp_free((PyObject*)handle);
+}
+
+/*	static void _python_ibm_db_init_error_info(stmt_handle *stmt_res) */
+static void _python_ibm_db_init_error_info(stmt_handle *stmt_res) {
+	stmt_res->error_recno_tracker = 1;
+	stmt_res->errormsg_recno_tracker = 1;
+}
+
+/*	static void _python_ibm_db_check_sql_errors( SQLHANDLE handle, SQLSMALLINT hType, int rc, int cpy_to_global, char* ret_str, int API SQLSMALLINT recno)
+*/
+static void _python_ibm_db_check_sql_errors( SQLHANDLE handle, SQLSMALLINT hType, int rc, int cpy_to_global, char* ret_str, int API, SQLSMALLINT recno )
+{
+	SQLCHAR msg[SQL_MAX_MESSAGE_LENGTH + 1];
+	SQLCHAR sqlstate[SQL_SQLSTATE_SIZE + 1];
+	SQLCHAR errMsg[DB2_MAX_ERR_MSG_LEN];
+	SQLINTEGER sqlcode;
+	SQLSMALLINT length;
+	char *p;
+
+	memset(errMsg, '\0', DB2_MAX_ERR_MSG_LEN);
+	memset(msg, '\0', SQL_MAX_MESSAGE_LENGTH + 1);
+	if ( SQLGetDiagRec(hType, handle, recno, sqlstate, &sqlcode, msg,
+		SQL_MAX_MESSAGE_LENGTH + 1, &length ) == SQL_SUCCESS) {
+
+			while ((p = strchr( (char *)msg, '\n' ))) {
+				*p = '\0';
+			}
+			sprintf((char*)errMsg, "%s SQLCODE=%d", (char*)msg, (int)sqlcode);
+			if (cpy_to_global != 0) {
+				PyErr_SetString(PyExc_Exception, (char *) errMsg);
+			}
+
+			switch (rc) {
+				case SQL_ERROR:
+					/* Need to copy the error msg and sqlstate into the symbol Table 
+					* to cache these results */
+					if ( cpy_to_global ) {
+						switch (hType) {
+							case SQL_HANDLE_DBC:
+								strncpy(IBM_DB_G(__python_conn_err_state), (char*)sqlstate, SQL_SQLSTATE_SIZE+1);
+								strncpy(IBM_DB_G(__python_conn_err_msg), (char*)errMsg, DB2_MAX_ERR_MSG_LEN);
+								break;
+
+							case SQL_HANDLE_STMT:
+								strncpy(IBM_DB_G(__python_stmt_err_state), (char*)sqlstate, SQL_SQLSTATE_SIZE+1);
+								strncpy(IBM_DB_G(__python_stmt_err_msg), (char*)errMsg, DB2_MAX_ERR_MSG_LEN);
+								break;
+						}
+					}
+					/* This call was made from ibm_db_errmsg or ibm_db_error */
+					/* Check for error and return */
+					switch (API) {
+						case DB2_ERR:
+							if ( ret_str != NULL ) {
+								strncpy(ret_str, (char*)sqlstate, SQL_SQLSTATE_SIZE+1);
+							}
+							return;
+						case DB2_ERRMSG:
+							if ( ret_str != NULL ) {
+								strncpy(ret_str, (char*)errMsg, DB2_MAX_ERR_MSG_LEN);
+							}
+							return;
+						default:
+							break;
+					}
+					break;
+				default:
+					break;
+			}
+		}
+}
+
+/*	static int _python_ibm_db_assign_options( void *handle, int type, long opt_key, PyObject *data ) */
+static int _python_ibm_db_assign_options( void *handle, int type, long opt_key, PyObject *data )
+{
+	int rc = 0;
+	long option_num = 0;
+	SQLWCHAR *option_str = NULL;
+	int isNewBuffer;
+
+	/* First check to see if it is a non-cli attribut */
+	if (opt_key == ATTR_CASE) {
+		option_num = NUM2LONG(data);
+		if (type == SQL_HANDLE_STMT) {
+			switch (option_num) {
+				case CASE_LOWER:
+					((stmt_handle*)handle)->s_case_mode = CASE_LOWER;
+					break;
+				case CASE_UPPER:
+					((stmt_handle*)handle)->s_case_mode = CASE_UPPER;
+					break;
+				case CASE_NATURAL:
+					((stmt_handle*)handle)->s_case_mode = CASE_NATURAL;
+					break;
+				default:
+					PyErr_SetString(PyExc_Exception, "ATTR_CASE attribute must be one of CASE_LOWER, CASE_UPPER, or CASE_NATURAL");
+					return -1;
+			}
+		} else if (type == SQL_HANDLE_DBC) {
+			switch (option_num) {
+				case CASE_LOWER:
+					((conn_handle*)handle)->c_case_mode = CASE_LOWER;
+					break;
+				case CASE_UPPER:
+					((conn_handle*)handle)->c_case_mode = CASE_UPPER;
+					break;
+				case CASE_NATURAL:
+					((conn_handle*)handle)->c_case_mode = CASE_NATURAL;
+					break;
+				default:
+					PyErr_SetString(PyExc_Exception, "ATTR_CASE attribute must be one of CASE_LOWER, CASE_UPPER, or CASE_NATURAL");
+					return -1;
+			}
+		} else {
+			PyErr_SetString(PyExc_Exception, "Connection or statement handle must be passed in.");
+			return -1;
+		}
+	} else if (type == SQL_HANDLE_STMT) {
+		if (PyString_Check(data)|| PyUnicode_Check(data)) {
+			data = PyUnicode_FromObject(data);
+			option_str = getUnicodeDataAsSQLWCHAR(data, &isNewBuffer);
+			rc = SQLSetStmtAttrW((SQLHSTMT)((stmt_handle *)handle)->hstmt, opt_key, (SQLPOINTER)option_str, SQL_IS_INTEGER );
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors((SQLHSTMT)((stmt_handle *)handle)->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+			}
+			if (isNewBuffer)
+				PyMem_Del(option_str);
+			
+		} else {
+			option_num = NUM2LONG(data);
+			if (opt_key == SQL_ATTR_AUTOCOMMIT && option_num == SQL_AUTOCOMMIT_OFF) ((conn_handle*)handle)->auto_commit = 0;
+			else if (opt_key == SQL_ATTR_AUTOCOMMIT && option_num == SQL_AUTOCOMMIT_ON) ((conn_handle*)handle)->auto_commit = 1;
+			rc = SQLSetStmtAttr((SQLHSTMT)((stmt_handle *)handle)->hstmt, opt_key, (SQLPOINTER)option_num, SQL_IS_INTEGER );
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors((SQLHSTMT)((stmt_handle *)handle)->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+			}
+		}
+	} else if (type == SQL_HANDLE_DBC) {
+		if (PyString_Check(data)|| PyUnicode_Check(data)) {
+			data = PyUnicode_FromObject(data);
+			option_str = getUnicodeDataAsSQLWCHAR(data, &isNewBuffer);
+			rc = SQLSetConnectAttrW((SQLHSTMT)((conn_handle*)handle)->hdbc, opt_key, (SQLPOINTER)option_str, SQL_NTS);
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors((SQLHSTMT)((stmt_handle *)handle)->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+			}
+			if (isNewBuffer)
+				PyMem_Del(option_str);
+			
+		} else {
+			option_num = NUM2LONG(data);
+			if (opt_key == SQL_ATTR_AUTOCOMMIT && option_num == SQL_AUTOCOMMIT_OFF) ((conn_handle*)handle)->auto_commit = 0;
+			else if (opt_key == SQL_ATTR_AUTOCOMMIT && option_num == SQL_AUTOCOMMIT_ON) ((conn_handle*)handle)->auto_commit = 1;
+			rc = SQLSetConnectAttrW((SQLHSTMT)((conn_handle*)handle)->hdbc, opt_key, (SQLPOINTER)option_num, SQL_IS_INTEGER);
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors((SQLHSTMT)((stmt_handle *)handle)->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+			}
+		}
+	} else {
+		PyErr_SetString(PyExc_Exception, "Connection or statement handle must be passed in.");
+		return -1;
+	}
+	return 0;
+}
+
+/*	static int _python_ibm_db_parse_options( PyObject *options, int type, void *handle)
+*/
+static int _python_ibm_db_parse_options ( PyObject *options, int type, void *handle )
+{
+	int numOpts = 0, i = 0;
+	PyObject *keys = NULL;
+	PyObject *key = NULL; /* Holds the Option Index Key */
+	PyObject *data = NULL;
+	PyObject *tc_pass = NULL;
+	int rc = 0;
+
+	if ( !NIL_P(options) ) {
+		keys = PyDict_Keys(options);
+		numOpts = PyList_Size(keys);
+
+		for ( i = 0; i < numOpts; i++) {
+			key = PyList_GetItem(keys, i);
+			data = PyDict_GetItem(options, key);
+
+			if(NUM2LONG(key) == SQL_ATTR_TRUSTED_CONTEXT_PASSWORD) {
+				tc_pass = data;
+			} else {
+				/* Assign options to handle. */
+				/* Sets the options in the handle with CLI/ODBC calls */
+				rc = _python_ibm_db_assign_options(handle, type, NUM2LONG(key), data);
+			}
+			if (rc)
+				return SQL_ERROR;
+		}
+		if (!NIL_P(tc_pass) ) {
+			rc = _python_ibm_db_assign_options(handle, type, SQL_ATTR_TRUSTED_CONTEXT_PASSWORD, tc_pass);
+		}
+		if (rc)
+			return SQL_ERROR;
+	}
+	return SQL_SUCCESS;
+}
+
+/*	static int _python_ibm_db_get_result_set_info(stmt_handle *stmt_res)
+initialize the result set information of each column. This must be done once
+*/
+static int _python_ibm_db_get_result_set_info(stmt_handle *stmt_res)
+{
+	int rc = -1, i;
+	SQLSMALLINT nResultCols = 0, name_length;
+	SQLCHAR tmp_name[BUFSIZ];
+
+	Py_BEGIN_ALLOW_THREADS;
+	rc = SQLNumResultCols((SQLHSTMT)stmt_res->hstmt, &nResultCols);
+	Py_END_ALLOW_THREADS;
+
+	if ( rc == SQL_ERROR || nResultCols == 0) {
+	  _python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+									  SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+	  return -1;
+	}
+	stmt_res->num_columns = nResultCols;
+	stmt_res->column_info = ALLOC_N(ibm_db_result_set_info, nResultCols);
+	if ( stmt_res->column_info == NULL ) {
+	  PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+	  return -1;
+	}
+	memset(stmt_res->column_info, 0, sizeof(ibm_db_result_set_info)*nResultCols);
+	/* return a set of attributes for a column */
+	for (i = 0 ; i < nResultCols; i++) {
+	  Py_BEGIN_ALLOW_THREADS;
+	  rc = SQLDescribeCol((SQLHSTMT)stmt_res->hstmt, (SQLSMALLINT)(i + 1 ),
+						  (SQLCHAR *)&tmp_name, BUFSIZ, &name_length, 
+						  &stmt_res->column_info[i].type,
+						  &stmt_res->column_info[i].size, 
+						  &stmt_res->column_info[i].scale,
+						  &stmt_res->column_info[i].nullable);
+	  Py_END_ALLOW_THREADS;
+
+	  if ( rc == SQL_ERROR ) {
+		 _python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+										 SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+		 return -1;
+	  }
+	  if ( name_length <= 0 ) {
+		 stmt_res->column_info[i].name = (SQLCHAR *)estrdup("");
+		 if ( stmt_res->column_info[i].name == NULL ) {
+			PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+			 return -1;
+		 }
+
+	  } else if (name_length >= BUFSIZ ) {
+		 /* column name is longer than BUFSIZ */
+		 stmt_res->column_info[i].name = (SQLCHAR*)ALLOC_N(char, name_length+1);
+		 if ( stmt_res->column_info[i].name == NULL ) {
+			PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+			 return -1;
+		 }
+
+		 Py_BEGIN_ALLOW_THREADS;
+		 rc = SQLDescribeCol((SQLHSTMT)stmt_res->hstmt, (SQLSMALLINT)(i + 1),
+							 stmt_res->column_info[i].name, name_length, 
+							 &name_length, &stmt_res->column_info[i].type, 
+							 &stmt_res->column_info[i].size, 
+							 &stmt_res->column_info[i].scale, 
+							 &stmt_res->column_info[i].nullable);
+		 Py_END_ALLOW_THREADS;
+
+		 if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+											SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+			return -1;
+		 }
+	  } else {
+		 stmt_res->column_info[i].name = (SQLCHAR*)estrdup((char*)tmp_name);
+		 if ( stmt_res->column_info[i].name == NULL ) {
+			PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+			 return -1;
+		 }
+
+	  }
+	}
+	return 0;
+}
+
+/*	static int _python_ibn_bind_column_helper(stmt_handle *stmt_res)
+	bind columns to data, this must be done once
+*/
+static int _python_ibm_db_bind_column_helper(stmt_handle *stmt_res)
+{
+	SQLINTEGER in_length = 0;
+	SQLSMALLINT column_type;
+	ibm_db_row_data_type *row_data;
+	int i, rc = SQL_SUCCESS;
+
+	stmt_res->row_data = ALLOC_N(ibm_db_row_type, stmt_res->num_columns);
+	if ( stmt_res->row_data == NULL ) {
+		PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+		return -1;
+	}
+	memset(stmt_res->row_data, 0, sizeof(ibm_db_row_type)*stmt_res->num_columns);
+
+	for (i = 0; i<stmt_res->num_columns; i++) {
+		column_type = stmt_res->column_info[i].type;
+		row_data = &stmt_res->row_data[i].data;
+		switch(column_type) {
+			case SQL_CHAR:
+			case SQL_VARCHAR:
+			case SQL_LONGVARCHAR:
+			case SQL_WCHAR:
+			case SQL_WVARCHAR:
+			case SQL_GRAPHIC:
+			case SQL_VARGRAPHIC:
+			case SQL_LONGVARGRAPHIC:
+				in_length = stmt_res->column_info[i].size+1;
+				row_data->w_val = (SQLWCHAR *) ALLOC_N(SQLWCHAR, in_length);
+				rc = SQLBindCol((SQLHSTMT)stmt_res->hstmt, (SQLUSMALLINT)(i+1),
+					SQL_C_WCHAR, row_data->w_val, in_length * sizeof(SQLWCHAR),
+					(SQLINTEGER *)(&stmt_res->row_data[i].out_length));
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+						SQL_HANDLE_STMT, rc, 1, NULL, 
+						-1, 1);
+				}
+				break;
+
+			case SQL_BINARY:
+			case SQL_LONGVARBINARY:
+			case SQL_VARBINARY:
+				if ( stmt_res->s_bin_mode == CONVERT ) {
+					in_length = 2*(stmt_res->column_info[i].size)+1;
+					row_data->str_val = (SQLCHAR *)ALLOC_N(char, in_length);
+					if ( row_data->str_val == NULL ) {
+						PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+						return -1;
+					}
+
+					Py_BEGIN_ALLOW_THREADS;
+					rc = SQLBindCol((SQLHSTMT)stmt_res->hstmt, (SQLUSMALLINT)(i+1),
+						SQL_C_CHAR, row_data->str_val, in_length,
+						(SQLINTEGER *)(&stmt_res->row_data[i].out_length));
+					Py_END_ALLOW_THREADS;
+
+					if ( rc == SQL_ERROR ) {
+						_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+							SQL_HANDLE_STMT, rc, 1, NULL,
+							-1, 1);
+					}
+				} else {
+					in_length = stmt_res->column_info[i].size+1;
+					row_data->str_val = (SQLCHAR *)ALLOC_N(char, in_length);
+					if ( row_data->str_val == NULL ) {
+						PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+						return -1;
+					}
+
+					Py_BEGIN_ALLOW_THREADS;
+					rc = SQLBindCol((SQLHSTMT)stmt_res->hstmt, (SQLUSMALLINT)(i+1),
+						SQL_C_DEFAULT, row_data->str_val, in_length,
+						(SQLINTEGER *)(&stmt_res->row_data[i].out_length));
+					Py_END_ALLOW_THREADS;
+
+					if ( rc == SQL_ERROR ) {
+						_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+							SQL_HANDLE_STMT, rc, 1, NULL,
+							-1, 1);
+					}
+				}
+				break;
+
+			case SQL_BIGINT:
+			case SQL_DECFLOAT:
+				in_length = stmt_res->column_info[i].size+2;
+				row_data->str_val = (SQLCHAR *)ALLOC_N(char, in_length);
+				if ( row_data->str_val == NULL ) {
+					PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+					return -1;
+				}
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindCol((SQLHSTMT)stmt_res->hstmt, (SQLUSMALLINT)(i+1),
+					SQL_C_CHAR, row_data->str_val, in_length,
+					(SQLINTEGER *)(&stmt_res->row_data[i].out_length));
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+						SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+				}
+				break;
+
+			case SQL_TYPE_DATE:
+				row_data->date_val = ALLOC(DATE_STRUCT);
+				if ( row_data->date_val == NULL ) {
+					PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+					return -1;
+				}
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindCol((SQLHSTMT)stmt_res->hstmt, (SQLUSMALLINT)(i+1),
+					SQL_C_TYPE_DATE, row_data->date_val, sizeof(DATE_STRUCT),
+					(SQLINTEGER *)(&stmt_res->row_data[i].out_length));
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt,
+						SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+				}
+				break;
+
+			case SQL_TYPE_TIME:
+				row_data->time_val = ALLOC(TIME_STRUCT);
+				if ( row_data->time_val == NULL ) {
+					PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+					return -1;
+				}
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindCol((SQLHSTMT)stmt_res->hstmt, (SQLUSMALLINT)(i+1),
+					SQL_C_TYPE_TIME, row_data->time_val, sizeof(TIME_STRUCT),
+					(SQLINTEGER *)(&stmt_res->row_data[i].out_length));
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt,
+						SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+				}
+				break;
+
+			case SQL_TYPE_TIMESTAMP:
+				row_data->ts_val = ALLOC(TIMESTAMP_STRUCT);
+				if ( row_data->ts_val == NULL ) {
+					PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+					return -1;
+				}
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindCol((SQLHSTMT)stmt_res->hstmt, (SQLUSMALLINT)(i+1),
+					SQL_C_TYPE_TIMESTAMP, row_data->time_val, sizeof(TIMESTAMP_STRUCT),
+					(SQLINTEGER *)(&stmt_res->row_data[i].out_length));
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt,
+						SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+					return -1;
+				}
+				break;
+
+			case SQL_SMALLINT:
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindCol((SQLHSTMT)stmt_res->hstmt, (SQLUSMALLINT)(i+1),
+					SQL_C_DEFAULT, &row_data->s_val, 
+					sizeof(row_data->s_val),
+					(SQLINTEGER *)(&stmt_res->row_data[i].out_length));
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+						SQL_HANDLE_STMT, rc, 1, NULL, -1,
+						1);
+				}
+				break;
+
+			case SQL_INTEGER:
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindCol((SQLHSTMT)stmt_res->hstmt, (SQLUSMALLINT)(i+1),
+					SQL_C_DEFAULT, &row_data->i_val, 
+					sizeof(row_data->i_val),
+					(SQLINTEGER *)(&stmt_res->row_data[i].out_length));
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+						SQL_HANDLE_STMT, rc, 1, NULL, -1,
+						1);
+				}
+				break;
+
+			case SQL_REAL:
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindCol((SQLHSTMT)stmt_res->hstmt, (SQLUSMALLINT)(i+1),
+					SQL_C_FLOAT, &row_data->r_val, 
+					sizeof(row_data->r_val),
+					(SQLINTEGER *)(&stmt_res->row_data[i].out_length));
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+						SQL_HANDLE_STMT, rc, 1, NULL, -1,
+						1);
+				}
+				break;
+
+			case SQL_FLOAT:
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindCol((SQLHSTMT)stmt_res->hstmt, (SQLUSMALLINT)(i+1),
+					SQL_C_DEFAULT, &row_data->f_val, 
+					sizeof(row_data->f_val),
+					(SQLINTEGER *)(&stmt_res->row_data[i].out_length));
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+						SQL_HANDLE_STMT, rc, 1, NULL, -1,
+						1);
+				}
+				break;
+
+			case SQL_DOUBLE:
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindCol((SQLHSTMT)stmt_res->hstmt, (SQLUSMALLINT)(i+1),
+					SQL_C_DEFAULT, &row_data->d_val, 
+					sizeof(row_data->d_val),
+					(SQLINTEGER *)(&stmt_res->row_data[i].out_length));
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+						SQL_HANDLE_STMT, rc, 1, NULL, -1,
+						1);
+				}
+				break;
+
+			case SQL_DECIMAL:
+			case SQL_NUMERIC:
+				in_length = stmt_res->column_info[i].size +
+					stmt_res->column_info[i].scale + 2 + 1;
+				row_data->str_val = (SQLCHAR *)ALLOC_N(char, in_length);
+				if ( row_data->str_val == NULL ) {
+					PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+					return -1;
+				}
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindCol((SQLHSTMT)stmt_res->hstmt, (SQLUSMALLINT)(i+1),
+					SQL_C_CHAR, row_data->str_val, in_length,
+					(SQLINTEGER *)(&stmt_res->row_data[i].out_length));
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+						SQL_HANDLE_STMT, rc, 1, NULL, -1,
+						1);
+				}
+				break;
+
+			case SQL_BLOB:
+			case SQL_CLOB:
+			case SQL_DBCLOB:
+			case SQL_XML:
+				stmt_res->row_data[i].out_length = 0;
+				break;
+
+			default:
+				break;
+		}
+	}
+	return rc;
+}
+
+/*	static void _python_ibm_db_clear_stmt_err_cache () */
+static void _python_ibm_db_clear_stmt_err_cache(void)
+{
+	memset(IBM_DB_G(__python_stmt_err_msg), 0, DB2_MAX_ERR_MSG_LEN);
+	memset(IBM_DB_G(__python_stmt_err_state), 0, SQL_SQLSTATE_SIZE + 1);
+}
+
+/*	static int _python_ibm_db_connect_helper( argc, argv, isPersistent ) */
+static PyObject *_python_ibm_db_connect_helper( PyObject *self, PyObject *args, int isPersistent )
+{
+	PyObject *databaseObj = NULL;
+	PyObject *uidObj = NULL;
+	PyObject *passwordObj = NULL;
+	SQLWCHAR *database = NULL;
+	SQLWCHAR *uid = NULL;
+	SQLWCHAR *password = NULL;
+	PyObject *options = NULL;
+	PyObject *literal_replacementObj = NULL;
+	SQLINTEGER literal_replacement;
+	PyObject *equal = StringOBJ_FromASCII("=");
+	int rc = 0;
+	SQLINTEGER conn_alive;
+	conn_handle *conn_res = NULL;
+	int reused = 0;
+	PyObject *hKey = NULL;
+	PyObject *entry = NULL;
+	char server[2048];
+	int isNewBuffer;
+	
+	conn_alive = 1;	
+
+	if (!PyArg_ParseTuple(args, "OOO|OO", &databaseObj, &uidObj, &passwordObj, &options, &literal_replacementObj)){
+		return NULL;
+	}
+	do {
+		databaseObj = PyUnicode_FromObject(databaseObj);
+		uidObj = PyUnicode_FromObject(uidObj);
+		passwordObj = PyUnicode_FromObject(passwordObj);
+
+		/* Check if we already have a connection for this userID & database 
+		* combination
+		*/ 
+		if (isPersistent) {
+			hKey = PyUnicode_Concat(StringOBJ_FromASCII("__ibm_db_"), uidObj);
+			hKey = PyUnicode_Concat(hKey, databaseObj);
+			hKey = PyUnicode_Concat(hKey, passwordObj);
+
+			entry = PyDict_GetItem(persistent_list, hKey);
+
+			if (entry != NULL) {
+				Py_INCREF(entry);
+				conn_res = (conn_handle *)entry;
+#ifndef PASE /* i5/OS server mode is persistant */
+				/* Need to reinitialize connection? */
+				rc = SQLGetConnectAttr(conn_res->hdbc, SQL_ATTR_PING_DB, 
+					(SQLPOINTER)&conn_alive, 0, NULL);
+				if ( (rc == SQL_SUCCESS) && conn_alive ) {
+					_python_ibm_db_check_sql_errors( conn_res->hdbc, SQL_HANDLE_DBC, 
+						rc, 1, NULL, -1, 1);
+					reused = 1;
+				} /* else will re-connect since connection is dead */
+#endif /* PASE */
+				reused = 1;
+			}
+		} else {
+			/* Need to check for max pconnections? */
+		}
+
+		if ( !NIL_P(literal_replacementObj) ) {
+			literal_replacement = (SQLINTEGER) PyInt_AsLong(literal_replacementObj);
+		} else {
+			literal_replacement = SET_QUOTED_LITERAL_REPLACEMENT_OFF; /*QUOTED LITERAL replacemnt is OFF by default*/
+		}
+				
+		if (conn_res == NULL) {
+			conn_res = PyObject_NEW(conn_handle, &conn_handleType);
+			conn_res->henv = 0;
+			conn_res->hdbc = 0;
+		}
+
+		/* We need to set this early, in case we get an error below,
+		so we know how to free the connection */
+		conn_res->flag_pconnect = isPersistent;
+		/* Allocate ENV handles if not present */
+		if ( !conn_res->henv ) {
+			rc = SQLAllocHandle(SQL_HANDLE_ENV, SQL_NULL_HANDLE, &(conn_res->henv));
+			if (rc != SQL_SUCCESS) {
+				_python_ibm_db_check_sql_errors( conn_res->henv, SQL_HANDLE_ENV, rc,
+					1, NULL, -1, 1);
+				break;
+			}
+			rc = SQLSetEnvAttr((SQLHENV)conn_res->henv, SQL_ATTR_ODBC_VERSION, 
+				(void *)SQL_OV_ODBC3, 0);
+		}
+
+		if (!reused) {
+			/* Alloc CONNECT Handle */
+			rc = SQLAllocHandle(SQL_HANDLE_DBC, conn_res->henv, &(conn_res->hdbc));
+			if (rc != SQL_SUCCESS) {
+				_python_ibm_db_check_sql_errors(conn_res->henv, SQL_HANDLE_ENV, rc, 
+					1, NULL, -1, 1);
+				break;
+			}
+		}
+
+		/* Set this after the connection handle has been allocated to avoid
+		unnecessary network flows. Initialize the structure to default values */
+		conn_res->auto_commit = SQL_AUTOCOMMIT_ON;
+		rc = SQLSetConnectAttr((SQLHDBC)conn_res->hdbc, SQL_ATTR_AUTOCOMMIT, 
+			(SQLPOINTER)(conn_res->auto_commit), SQL_NTS);
+
+		conn_res->c_bin_mode = IBM_DB_G(bin_mode);
+		conn_res->c_case_mode = CASE_NATURAL;
+		conn_res->c_cursor_type = SQL_SCROLL_FORWARD_ONLY;
+
+		conn_res->error_recno_tracker = 1;
+		conn_res->errormsg_recno_tracker = 1;
+
+		/* handle not active as of yet */
+		conn_res->handle_active = 0;
+
+		/* Set Options */
+		if ( !NIL_P(options) ) {
+			if(!PyDict_Check(options)) {
+				PyErr_SetString(PyExc_Exception, "options Parameter must be of type dictionay");
+				return NULL;
+			}
+			rc = _python_ibm_db_parse_options( options, SQL_HANDLE_DBC, conn_res );
+			if (rc != SQL_SUCCESS) {
+				SQLFreeHandle(SQL_HANDLE_DBC, conn_res->hdbc);
+				SQLFreeHandle(SQL_HANDLE_ENV, conn_res->henv);
+				break;
+			}
+		}
+
+		if (! reused) {
+			/* Connect */
+			/* If the string contains a =, use SQLDriverConnect */
+			if (NIL_P(databaseObj)) {
+				PyErr_SetString(PyExc_Exception, "Supplied Parameter is invalid");
+				return NULL;
+			}
+			database = getUnicodeDataAsSQLWCHAR(databaseObj, &isNewBuffer);
+			if ( PyUnicode_Contains(databaseObj, equal) > 0 ) {
+				rc = SQLDriverConnectW((SQLHDBC)conn_res->hdbc, (SQLHWND)NULL,
+					database, SQL_NTS, NULL, 0, NULL, 
+					SQL_DRIVER_NOPROMPT );
+			} else {
+				if (NIL_P(uidObj) || NIL_P(passwordObj)) { 
+					PyErr_SetString(PyExc_Exception, "Supplied Parameter is invalid");
+					return NULL;
+				}
+				uid = getUnicodeDataAsSQLWCHAR(uidObj, &isNewBuffer);
+				password = getUnicodeDataAsSQLWCHAR(passwordObj, &isNewBuffer);
+				rc = SQLConnectW((SQLHDBC)conn_res->hdbc,
+					database,
+					PyUnicode_GetSize(databaseObj),
+					uid, 
+					PyUnicode_GetSize(uidObj),
+					password,
+					PyUnicode_GetSize(passwordObj));
+			} 
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 
+					1, NULL, -1, 1);
+				SQLFreeHandle(SQL_HANDLE_DBC, conn_res->hdbc);
+				SQLFreeHandle(SQL_HANDLE_ENV, conn_res->henv);
+				break;
+			}
+			
+#ifdef CLI_DBC_SERVER_TYPE_DB2LUW
+#ifdef SQL_ATTR_DECFLOAT_ROUNDING_MODE
+			/**
+			* Code for setting SQL_ATTR_DECFLOAT_ROUNDING_MODE
+			* for implementation of Decfloat Datatype
+			*/
+
+			rc = _python_ibm_db_set_decfloat_rounding_mode_client(conn_res->hdbc);
+			if (rc != SQL_SUCCESS){
+				  _python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc,
+								  1, NULL, -1, 1);
+			}
+#endif
+#endif
+
+			/* Get the server name */
+			memset(server, 0, sizeof(server));
+
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLGetInfo(conn_res->hdbc, SQL_DBMS_NAME, (SQLPOINTER)server, 
+				2048, NULL);
+			Py_END_ALLOW_THREADS;
+
+			if (!strcmp(server, "AS")) is_systemi = 1;
+			if (!strncmp(server, "IDS", 3)) is_informix = 1;
+
+			/* Set SQL_ATTR_REPLACE_QUOTED_LITERALS connection attribute to
+			* enable CLI numeric literal feature. This is equivalent to
+			* PATCH2=71 in the db2cli.ini file
+			* Note, for backward compatibility with older CLI drivers having a
+			* different value for SQL_ATTR_REPLACE_QUOTED_LITERALS, we call
+			* SQLSetConnectAttr() with both the old and new value
+			*/
+			/* Only enable this feature if we are not connected to an Informix data 
+			* server 
+			*/
+			if (!is_informix && (literal_replacement == SET_QUOTED_LITERAL_REPLACEMENT_ON)) {
+				rc = SQLSetConnectAttr((SQLHDBC)conn_res->hdbc, 
+					SQL_ATTR_REPLACE_QUOTED_LITERALS, 
+					(SQLPOINTER) (ENABLE_NUMERIC_LITERALS), 
+					SQL_IS_INTEGER);
+				if (rc != SQL_SUCCESS)
+					rc = SQLSetConnectAttr((SQLHDBC)conn_res->hdbc,
+					SQL_ATTR_REPLACE_QUOTED_LITERALS_OLDVALUE,
+					(SQLPOINTER)(ENABLE_NUMERIC_LITERALS), 
+					SQL_IS_INTEGER);
+			}
+			if (rc != SQL_SUCCESS) {
+				_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 
+					1, NULL, -1, 1);
+			}
+		}
+		Py_XDECREF(databaseObj);
+		Py_XDECREF(uidObj);
+		Py_XDECREF(passwordObj);
+		conn_res->handle_active = 1;
+	} while (0);
+
+	if (hKey != NULL) {
+		if (! reused && rc == SQL_SUCCESS) {
+			/* If we created a new persistent connection, add it to the 
+			*  persistent_list
+			*/
+			PyDict_SetItem(persistent_list, hKey, (PyObject *)conn_res);
+		}
+		Py_DECREF(hKey);
+	}
+
+	if (isNewBuffer) {
+		PyMem_Del(database);
+		PyMem_Del(uid);
+		PyMem_Del(password);
+	}
+	
+	if ( rc != SQL_SUCCESS ) {
+		if (conn_res != NULL && conn_res->handle_active) {
+			rc = SQLFreeHandle(SQL_HANDLE_DBC, conn_res->hdbc);
+			rc = SQLFreeHandle(SQL_HANDLE_ENV, conn_res->henv);
+		}
+		if (conn_res != NULL) {
+			PyObject_Del(conn_res);
+		}
+		return NULL;						  
+	} 
+	return (PyObject *)conn_res;
+}
+
+
+/**
+*This function takes a SQLWCHAR buffer (UCS-2) and returns back a PyUnicode object 
+* of it that is in the correct current UCS encoding (either UCS2 or UCS4)of
+* the current executing python VM
+*
+* @sqlwcharBytesLen - the length of sqlwcharData in bytes (not characters)
+**/
+static PyObject* getSQLWCharAsPyUnicodeObject(SQLWCHAR* sqlwcharData, int sqlwcharBytesLen)
+{
+	PyObject *sysmodule = NULL, *maxuni = NULL;
+	long maxuniValue;
+	PyObject* u;
+	sysmodule = PyImport_ImportModule("sys");
+	maxuni = PyObject_GetAttrString(sysmodule, "maxunicode");
+	maxuniValue = PyInt_AsLong(maxuni);
+
+	if (maxuniValue <= 65536) {
+	/* this is UCS2 python.. nothing to do really */
+		return PyUnicode_FromUnicode((Py_UNICODE *)sqlwcharData, sqlwcharBytesLen / sizeof(SQLWCHAR));
+    	}
+	
+	if (is_bigendian()) {
+		int bo = 1;
+		u = PyUnicode_DecodeUTF16((char *)sqlwcharData, sqlwcharBytesLen, "strict", &bo);
+	} else {
+		int bo = -1;
+		u = PyUnicode_DecodeUTF16((char *)sqlwcharData, sqlwcharBytesLen, "strict", &bo);
+	}
+	return u;
+}
+
+
+/**
+*This function takes value as pyObject and convert it to SQLWCHAR and return it 
+*
+**/
+static SQLWCHAR* getUnicodeDataAsSQLWCHAR(PyObject *pyobj, int *isNewBuffer)
+{	
+	PyObject *sysmodule = NULL, *maxuni = NULL;
+	long maxuniValue;
+	PyObject *pyUTFobj;
+	SQLWCHAR* pNewBuffer = NULL;
+	int nCharLen = PyUnicode_GET_SIZE(pyobj);
+
+	sysmodule = PyImport_ImportModule("sys");
+	maxuni = PyObject_GetAttrString(sysmodule, "maxunicode");
+	maxuniValue = PyInt_AsLong(maxuni);
+
+	if (maxuniValue <= 65536) {
+		*isNewBuffer = 0;
+		return (SQLWCHAR*)PyUnicode_AS_UNICODE(pyobj);
+	}
+
+	*isNewBuffer = 1;
+	pNewBuffer = (SQLWCHAR *)ALLOC_N(SQLWCHAR, nCharLen + 1);
+	memset(pNewBuffer, 0, sizeof(SQLWCHAR) * (nCharLen + 1));
+	if (is_bigendian()) {
+		pyUTFobj = PyCodec_Encode(pyobj, "utf-16-be", "strict");
+	} else {
+		pyUTFobj = PyCodec_Encode(pyobj, "utf-16-le", "strict");
+	}
+	memcpy(pNewBuffer, PyBytes_AsString(pyUTFobj), sizeof(SQLWCHAR) * (nCharLen) );
+	Py_DECREF(pyUTFobj);
+	return pNewBuffer;
+
+}
+
+
+#ifdef CLI_DBC_SERVER_TYPE_DB2LUW
+#ifdef SQL_ATTR_DECFLOAT_ROUNDING_MODE
+
+/**
+ * Function for implementation of DECFLOAT Datatype
+ *
+ * Description :
+ * This function retrieves the value of special register decflt_rounding
+ * from the database server which signifies the current rounding mode set
+ * on the server. For using decfloat, the rounding mode has to be in sync
+ * on the client as well as server. Thus we set here on the client, the
+ * same rounding mode as the server.
+ *
+ * @return: success or failure
+ * */
+static int _python_ibm_db_set_decfloat_rounding_mode_client(SQLHANDLE hdbc)
+{
+	SQLCHAR decflt_rounding[20];
+	SQLHANDLE hstmt;
+	int rc = 0;
+	int rounding_mode;
+	SQLINTEGER decfloat;
+
+
+	SQLCHAR *stmt = (SQLCHAR *)"values current decfloat rounding mode";
+
+	/* Allocate a Statement Handle */
+	rc = SQLAllocHandle(SQL_HANDLE_STMT, hdbc, &hstmt);
+	if (rc == SQL_ERROR) {
+		_python_ibm_db_check_sql_errors(hdbc, SQL_HANDLE_DBC, rc, 1,
+			NULL, -1, 1);
+		return rc;
+	}
+
+	Py_BEGIN_ALLOW_THREADS;
+	rc = SQLExecDirect((SQLHSTMT)hstmt, stmt, SQL_NTS);
+	Py_END_ALLOW_THREADS;
+
+	if ( rc == SQL_ERROR ) {
+		_python_ibm_db_check_sql_errors((SQLHSTMT)hstmt,
+			SQL_HANDLE_STMT, rc, 1, NULL,
+			-1, 1);
+		return rc;
+	}
+
+	Py_BEGIN_ALLOW_THREADS;
+	rc = SQLBindCol((SQLHSTMT)hstmt, 1, SQL_C_DEFAULT, decflt_rounding, 20, NULL);
+	Py_END_ALLOW_THREADS;
+
+	if ( rc == SQL_ERROR ) {
+		_python_ibm_db_check_sql_errors((SQLHSTMT)hstmt,
+			SQL_HANDLE_STMT, rc, 1, NULL,
+			-1, 1);
+		return rc;
+	}
+
+	Py_BEGIN_ALLOW_THREADS;
+	rc = SQLFetch(hstmt);
+	Py_END_ALLOW_THREADS;
+
+	rc = SQLFreeHandle(SQL_HANDLE_STMT, hstmt);
+	
+	/* Now setting up the same rounding mode on the client*/
+	if (strcmp(decflt_rounding, "ROUND_HALF_EVEN") == 0) rounding_mode = ROUND_HALF_EVEN;
+	if (strcmp(decflt_rounding, "ROUND_HALF_UP") == 0) rounding_mode = ROUND_HALF_UP;
+	if (strcmp(decflt_rounding, "ROUND_DOWN") == 0) rounding_mode = ROUND_DOWN;
+	if (strcmp(decflt_rounding, "ROUND_CEILING") == 0) rounding_mode = ROUND_CEILING;
+	if (strcmp(decflt_rounding, "ROUND_FLOOR") == 0) rounding_mode = ROUND_FLOOR;
+
+#ifndef PASE
+	rc = SQLSetConnectAttr(hdbc, SQL_ATTR_DECFLOAT_ROUNDING_MODE, (SQLPOINTER)rounding_mode, SQL_NTS);
+#else
+	rc = SQLSetConnectAttr(hdbc, SQL_ATTR_DECFLOAT_ROUNDING_MODE, (SQLPOINTER)&rounding_mode, SQL_NTS);
+#endif
+
+	return rc;
+
+}
+#endif
+#endif
+
+/* static void _python_ibm_db_clear_conn_err_cache () */
+static void _python_ibm_db_clear_conn_err_cache(void)
+{
+	/* Clear out the cached conn messages */
+	memset(IBM_DB_G(__python_conn_err_msg), 0, DB2_MAX_ERR_MSG_LEN);
+	memset(IBM_DB_G(__python_conn_err_state), 0, SQL_SQLSTATE_SIZE + 1);
+}
+
+/*!#
+ * ibm_db.connect
+ * ibm_db.pconnect
+ * ibm_db.autocommit
+ * ibm_db.bind_param
+ * ibm_db.close
+ * ibm_db.column_privileges
+ * ibm_db.columns
+ * ibm_db.foreign_keys
+ * ibm_db.primary_keys
+ * ibm_db.procedure_columns
+ * ibm_db.procedures
+ * ibm_db.special_columns
+ * ibm_db.statistics
+ * ibm_db.table_privileges
+ * ibm_db.tables
+ * ibm_db.commit
+ * ibm_db.exec
+ * ibm_db.free_result
+ * ibm_db.prepare
+ * ibm_db.execute
+ * ibm_db.conn_errormsg
+ * ibm_db.stmt_errormsg
+ * ibm_db.conn_error
+ * ibm_db.stmt_error
+ * ibm_db.next_result
+ * ibm_db.num_fields
+ * ibm_db.num_rows
+ * ibm_db.get_num_result
+ * ibm_db.field_name
+ * ibm_db.field_display_size
+ * ibm_db.field_num
+ * ibm_db.field_precision
+ * ibm_db.field_scale
+ * ibm_db.field_type
+ * ibm_db.field_width
+ * ibm_db.cursor_type
+ * ibm_db.rollback
+ * ibm_db.free_stmt
+ * ibm_db.result
+ * ibm_db.fetch_row
+ * ibm_db.fetch_assoc
+ * ibm_db.fetch_array
+ * ibm_db.fetch_both
+ * ibm_db.set_option
+ * ibm_db.server_info
+ * ibm_db.client_info
+ * ibm_db.active
+ * ibm_db.get_option
+ */
+
+
+
+/*!# ibm_db.connect
+ *
+ * ===Description
+ *
+ *  --	Returns a connection to a database
+ * IBM_DBConnection ibm_db.connect (dsn=<..>, user=<..>, password=<..>,
+ *								  host=<..>, database=<..>, options=<..>)
+ *
+ * Creates a new connection to an IBM DB2 Universal Database, IBM Cloudscape,
+ * or Apache Derby database.
+ *
+ * ===Parameters
+ *
+ * ====dsn
+ *
+ * For an uncataloged connection to a database, database represents a complete
+ * connection string in the following format:
+ * DRIVER={IBM DB2 ODBC DRIVER};DATABASE=database;HOSTNAME=hostname;PORT=port;
+ * PROTOCOL=TCPIP;UID=username;PWD=password;
+ *	  where the parameters represent the following values:
+ *		hostname
+ *			The hostname or IP address of the database server.
+ *		port
+ *			The TCP/IP port on which the database is listening for requests.
+ *		username
+ *			The username with which you are connecting to the database.
+ *		password
+ *			The password with which you are connecting to the database.
+ *
+ * ====user
+ *
+ * The username with which you are connecting to the database.
+ * This is optional if username is specified in the "dsn" string
+ *
+ * ====password
+ *
+ * The password with which you are connecting to the database.
+ * This is optional if password is specified in the "dsn" string
+ *
+ * ====host
+ *
+ * The hostname or IP address of the database server.
+ * This is optional if hostname is specified in the "dsn" string
+ *
+ * ====database
+ *
+ * For a cataloged connection to a database, database represents the database
+ * alias in the DB2 client catalog.
+ * This is optional if database is specified in the "dsn" string
+ *
+ * ====options
+ *
+ *	  An dictionary of connection options that affect the behavior of the
+ *	  connection,
+ *	  where valid array keys include:
+ *		SQL_ATTR_AUTOCOMMIT
+ *			Passing the SQL_AUTOCOMMIT_ON value turns autocommit on for this
+ *			connection handle.
+ *			Passing the SQL_AUTOCOMMIT_OFF value turns autocommit off for this
+ *			connection handle.
+ *		ATTR_CASE
+ *			Passing the CASE_NATURAL value specifies that column names are
+ *			returned in natural case.
+ *			Passing the CASE_LOWER value specifies that column names are
+ *			returned in lower case.
+ *			Passing the CASE_UPPER value specifies that column names are
+ *			returned in upper case.
+ *		SQL_ATTR_CURSOR_TYPE
+ *			Passing the SQL_SCROLL_FORWARD_ONLY value specifies a forward-only
+ *			cursor for a statement resource.
+ *			This is the default cursor type and is supported on all database
+ *			servers.
+ *			Passing the SQL_CURSOR_KEYSET_DRIVEN value specifies a scrollable
+ *			cursor for a statement resource.
+ *			This mode enables random access to rows in a result set, but
+ *			currently is supported only by IBM DB2 Universal Database.
+ * ====set_replace_quoted_literal
+ *	  This variable indicates if the CLI Connection attribute SQL_ATTR_REPLACE_QUOTED_LITERAL is to be set or not
+ *	  To turn it ON pass  IBM_DB::SET_QUOTED_LITERAL_REPLACEMENT_ON
+ *	  To turn it OFF pass IBM_DB::SET_QUOTED_LITERAL_REPLACEMENT_OFF
+ *
+ *	  Default Setting: - IBM_DB::SET_QUOTED_LITERAL_REPLACEMENT_ON
+ *
+ * ===Return Values
+ *
+ *
+ * Returns a IBM_DBConnection connection object if the connection attempt is
+ * successful.
+ * If the connection attempt fails, ibm_db.connect() returns None.
+ *
+ */ 
+static PyObject *ibm_db_connect(PyObject *self, PyObject *args)
+{
+	_python_ibm_db_clear_conn_err_cache();
+	return _python_ibm_db_connect_helper( self, args, 0 );
+}
+
+/*!# ibm_db.pconnect
+ *
+ * ===Description
+ *  --	Returns a persistent connection to a database
+ * resource ibm_db.pconnect ( string database, string username, string password
+ * [, array options] )
+ *
+ * Returns a persistent connection to an IBM DB2 Universal Database,
+ * IBM Cloudscape, Apache Derby or Informix Dynamic Server database.
+ *
+ * Calling ibm_db.close() on a persistent connection always returns TRUE, but
+ * the underlying DB2 client connection remains open and waiting to serve the
+ * next matching ibm_db.pconnect() request.
+ *
+ * ===Parameters
+ *
+ * ====database
+ *		The database alias in the DB2 client catalog.
+ *
+ * ====username
+ *		The username with which you are connecting to the database.
+ *
+ * ====password
+ *		The password with which you are connecting to the database.
+ *
+ * ====options
+ *		An associative array of connection options that affect the behavior of
+ * the connection,
+ *		where valid array keys include:
+ *
+ *		autocommit
+ *			 Passing the DB2_AUTOCOMMIT_ON value turns autocommit on for this
+ * connection handle.
+ *			 Passing the DB2_AUTOCOMMIT_OFF value turns autocommit off for
+ * this connection handle.
+ *
+ *		DB2_ATTR_CASE
+ *			 Passing the DB2_CASE_NATURAL value specifies that column names
+ * are returned in natural case.
+ *			 Passing the DB2_CASE_LOWER value specifies that column names are
+ * returned in lower case.
+ *			 Passing the DB2_CASE_UPPER value specifies that column names are
+ * returned in upper case.
+ *
+ *		CURSOR
+ *			 Passing the SQL_SCROLL_FORWARD_ONLY value specifies a
+ * forward-only cursor for a statement resource.  This is the default cursor
+ * type and is supported on all database servers.
+ *			 Passing the SQL_CURSOR_KEYSET_DRIVEN value specifies a scrollable
+ * cursor for a statement resource. This mode enables random access to rows in a
+ * result set, but currently is supported only by IBM DB2 Universal Database.
+ *
+ * ====set_replace_quoted_literal
+ *	  This variable indicates if the CLI Connection attribute SQL_ATTR_REPLACE_QUOTED_LITERAL is to be set or not
+ *	  To turn it ON pass  IBM_DB::SET_QUOTED_LITERAL_REPLACEMENT_ON
+ *	  To turn it OFF pass IBM_DB::SET_QUOTED_LITERAL_REPLACEMENT_OFF
+ *
+ *	  Default Setting: - IBM_DB::SET_QUOTED_LITERAL_REPLACEMENT_ON
+ *
+ * ===Return Values
+ *
+ * Returns a connection handle resource if the connection attempt is successful.
+ * ibm_db.pconnect() tries to reuse an existing connection resource that exactly
+ * matches the database, username, and password parameters. If the connection
+ * attempt fails, ibm_db.pconnect() returns FALSE.
+ */
+static PyObject *ibm_db_pconnect(PyObject *self, PyObject *args)
+{
+	_python_ibm_db_clear_conn_err_cache();
+	return _python_ibm_db_connect_helper( self, args, 1);
+}
+
+/*
+ * static void _python_clear_local_var(PyObject *dbNameObj, SQLWCHAR *dbName, PyObject *codesetObj, SQLWCHAR *codesetObj, PyObject *modeObj, SQLWCHAR *mode, int isNewBuffer)
+ */
+static void _python_clear_local_var(PyObject *dbNameObj, SQLWCHAR *dbName, PyObject *codesetObj, SQLWCHAR *codeset, PyObject *modeObj, SQLWCHAR *mode, int isNewBuffer)
+{
+	if ( !NIL_P( dbNameObj ) ) {
+		Py_XDECREF( dbNameObj );
+		if ( isNewBuffer ) {
+			PyMem_Del( dbName );
+		}
+	}
+
+	if ( !NIL_P( codesetObj ) ) {
+		Py_XDECREF( codesetObj );
+		if ( isNewBuffer ) {
+			PyMem_Del( codeset );
+		}
+	}
+
+	if ( !NIL_P( modeObj ) ) {
+		Py_XDECREF( modeObj );
+		if ( isNewBuffer ) {
+			PyMem_Del( mode );
+		}
+	}
+}
+
+/*
+ * static int _python_ibm_db_createdb(conn_handle *conn_res, PyObject *dbNameObj, PyObject *codesetObj, PyObject *modeObj, int createNX)
+ */
+static int _python_ibm_db_createdb(conn_handle *conn_res, PyObject *dbNameObj, PyObject *codesetObj, PyObject *modeObj, int createNX)
+{
+	SQLWCHAR *dbName = NULL;
+	SQLWCHAR *codeset = NULL;
+	SQLWCHAR *mode = NULL;
+	SQLINTEGER sqlcode;
+	SQLSMALLINT length;
+	SQLCHAR msg[SQL_MAX_MESSAGE_LENGTH + 1];
+	SQLCHAR sqlstate[SQL_SQLSTATE_SIZE + 1];
+	int isNewBuffer;
+	int rc = 0;
+#ifdef _WIN32
+	HINSTANCE cliLib = NULL;
+	FARPROC sqlcreatedb;
+#else
+	void *cliLib = NULL;
+	typedef int (*sqlcreatedbType)( SQLHDBC, SQLWCHAR *, SQLINTEGER, SQLWCHAR *, SQLINTEGER, SQLWCHAR *, SQLINTEGER );
+	sqlcreatedbType sqlcreatedb;
+#endif
+
+#if defined __APPLE__ || defined _AIX
+	PyErr_SetString( PyExc_Exception, "Not supported: This function is currently not supported on this platform" );
+	return -1;
+#else
+
+	if ( !NIL_P( conn_res ) ) {
+		if ( NIL_P( dbNameObj ) ) {
+			PyErr_SetString( PyExc_Exception, "Supplied database name Parameter is invalid" );
+			return -1;
+		}
+		/* Check to ensure the connection resource given is active */
+		if ( !conn_res->handle_active ) {
+			PyErr_SetString( PyExc_Exception, "Connection is not active" );
+			return -1;
+		}
+
+		dbNameObj = PyUnicode_FromObject( dbNameObj );
+		if ( dbNameObj != NULL &&  dbNameObj != Py_None ) {
+			dbName = getUnicodeDataAsSQLWCHAR( dbNameObj, &isNewBuffer );
+		} else {
+			return -1;
+		}
+		
+		if ( !NIL_P( codesetObj ) ) {
+			codesetObj = PyUnicode_FromObject( codesetObj );
+			if ( codesetObj != NULL &&  codesetObj != Py_None ) {
+				codeset = getUnicodeDataAsSQLWCHAR( codesetObj, &isNewBuffer );
+			} else {
+				_python_clear_local_var( dbNameObj, dbName, NULL, NULL, NULL, NULL, isNewBuffer );
+				return -1;
+			}
+				
+		}
+		if ( !NIL_P( modeObj ) ) {
+			modeObj = PyUnicode_FromObject( modeObj );
+			if ( codesetObj != NULL &&  codesetObj != Py_None ) {
+				mode = getUnicodeDataAsSQLWCHAR( modeObj, &isNewBuffer );	
+			} else {
+				_python_clear_local_var( dbNameObj, dbName, codesetObj, codeset, NULL, NULL, isNewBuffer );
+				return -1;
+			}
+		}
+
+#ifdef _WIN32
+		cliLib = DLOPEN( LIBDB2 );
+#else
+		cliLib = DLOPEN( LIBDB2, RTLD_LAZY );
+#endif
+		if ( !cliLib ) {
+			sprintf( (char *)msg, "Error in loading %s library file", LIBDB2 );
+			PyErr_SetString( PyExc_Exception,  (char *)msg );
+			_python_clear_local_var( dbNameObj, dbName, codesetObj, codeset, modeObj, mode, isNewBuffer );	
+			return -1;
+		}
+#ifdef _WIN32
+		sqlcreatedb =  DLSYM( cliLib, "SQLCreateDbW" );
+#else
+		sqlcreatedb = (sqlcreatedbType) DLSYM( cliLib, "SQLCreateDbW" );
+#endif
+		if ( sqlcreatedb == NULL )  {
+#ifdef _WIN32
+			sprintf( (char *)msg, "Not supported: This function is only supported from v97fp4 version of cli on window" );
+#else
+			sprintf( (char *)msg, "Not supported: This function is only supported from v97fp3 version of cli" );
+#endif
+			PyErr_SetString( PyExc_Exception, (char *)msg );
+			DLCLOSE( cliLib );
+			_python_clear_local_var( dbNameObj, dbName, codesetObj, codeset, modeObj, mode, isNewBuffer );
+			return -1;
+		}
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = (*sqlcreatedb)( (SQLHDBC)conn_res->hdbc, dbName, SQL_NTS, codeset, SQL_NTS, mode, SQL_NTS );
+		Py_END_ALLOW_THREADS;
+		
+		DLCLOSE( cliLib );		
+		if ( rc != SQL_SUCCESS ) {
+			if ( createNX == 1 ) {
+				if ( SQLGetDiagRec( SQL_HANDLE_DBC, (SQLHDBC)conn_res->hdbc, 1, sqlstate, &sqlcode, msg, SQL_MAX_MESSAGE_LENGTH + 1, &length ) == SQL_SUCCESS ) {
+					if ( sqlcode == -1005 ) {
+						_python_clear_local_var( dbNameObj, dbName, codesetObj, codeset, modeObj, mode, isNewBuffer );
+						return 0;
+					}
+				}
+			}
+			_python_ibm_db_check_sql_errors( conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, NULL, -1, 1 );
+			_python_clear_local_var( dbNameObj, dbName, codesetObj, codeset, modeObj, mode, isNewBuffer );
+			return -1;
+		}
+		_python_clear_local_var( dbNameObj, dbName, codesetObj, codeset, modeObj, mode, isNewBuffer );
+		return 0;
+	} else {
+		PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+		return -1;
+	}
+#endif
+}
+
+/*
+ * static int _python_ibm_db_dropdb(conn_handle *conn_res, PyObject *dbNameObj, int recreate)
+ */
+static int _python_ibm_db_dropdb(conn_handle *conn_res, PyObject *dbNameObj, int recreate)
+{
+	SQLWCHAR *dbName = NULL;
+	SQLINTEGER sqlcode;
+	SQLSMALLINT length;
+	SQLCHAR msg[SQL_MAX_MESSAGE_LENGTH + 1];
+	SQLCHAR sqlstate[SQL_SQLSTATE_SIZE + 1];
+	int isNewBuffer;
+	int rc = 0;
+#ifdef _WIN32
+	FARPROC sqldropdb;
+	HINSTANCE cliLib = NULL;
+#else
+	typedef int (*sqldropdbType)( SQLHDBC, SQLWCHAR *, SQLINTEGER );
+	sqldropdbType sqldropdb;	
+	void *cliLib;
+#endif
+
+#if defined __APPLE__ || defined _AIX
+	PyErr_SetString( PyExc_Exception, "Not supported: This function is currently not supported on this platform" );
+	return -1;
+#else
+
+	if ( !NIL_P( conn_res ) ) {
+		if ( NIL_P( dbNameObj ) ) {
+			PyErr_SetString( PyExc_Exception, "Supplied database name Parameter is invalid" );
+			return -1;
+		}
+		/* Check to ensure the connection resource given is active */
+		if ( !conn_res->handle_active ) {
+			PyErr_SetString( PyExc_Exception, "Connection is not active" );
+			return -1;
+		}
+
+		dbNameObj = PyUnicode_FromObject( dbNameObj );
+		if ( dbNameObj != NULL &&  dbNameObj != Py_None ) {
+			dbName = getUnicodeDataAsSQLWCHAR( dbNameObj, &isNewBuffer );	
+		} else {
+			return -1;
+		}
+
+#ifdef _WIN32
+		cliLib = DLOPEN( LIBDB2 );
+#else
+		cliLib = DLOPEN( LIBDB2, RTLD_LAZY );
+#endif
+		if ( !cliLib ) {
+			sprintf( (char *)msg, "Error in loading %s library file", LIBDB2 );
+			PyErr_SetString( PyExc_Exception, (char *)msg );
+			_python_clear_local_var( dbNameObj, dbName, NULL, NULL, NULL, NULL, isNewBuffer );
+			return -1;
+		}
+#ifdef _WIN32
+		sqldropdb = DLSYM( cliLib, "SQLDropDbW" );
+#else
+		sqldropdb = (sqldropdbType)DLSYM( cliLib, "SQLDropDbW" );
+#endif
+		if ( sqldropdb == NULL)  {
+#ifdef _WIN32
+			sprintf( (char *)msg, "Not supported: This function is only supported from v97fp4 version of cli on window" );
+#else
+			sprintf( (char *)msg, "Not supported: This function is only supported from v97fp3 version of cli" );
+#endif
+			PyErr_SetString( PyExc_Exception, (char *)msg );
+			DLCLOSE( cliLib );
+			_python_clear_local_var( dbNameObj, dbName, NULL, NULL, NULL, NULL, isNewBuffer );
+			return -1;
+		}
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = sqldropdb( conn_res->hdbc, dbName, SQL_NTS );
+		Py_END_ALLOW_THREADS;
+		
+		DLCLOSE( cliLib );
+		if ( rc != SQL_SUCCESS ) {
+			if ( recreate ) {
+				if ( SQLGetDiagRec( SQL_HANDLE_DBC, (SQLHDBC)conn_res->hdbc, 1, sqlstate, &sqlcode, msg, SQL_MAX_MESSAGE_LENGTH + 1, &length ) == SQL_SUCCESS ) {
+					if ( sqlcode == -1013 ) {
+						_python_clear_local_var( dbNameObj, dbName, NULL, NULL, NULL, NULL, isNewBuffer );
+						return 0;
+					}
+				}
+			}
+			_python_ibm_db_check_sql_errors( conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, NULL, -1, 1 );
+			return -1;
+		}
+		_python_clear_local_var( dbNameObj, dbName, NULL, NULL, NULL, NULL, isNewBuffer );
+		return 0;
+	} else {
+		PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+		return -1;
+	}
+#endif
+}
+
+/*!# ibm_db.createdb
+ *
+ * ===Description
+ *  True/None ibm_db.createdb ( IBM_DBConnection connection, string dbName [, codeSet, mode] )
+ *
+ * Creates a database by using the specified database name, code set, and mode
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *      A valid database server instance connection resource variable as returned from ibm_db.connect() by specifying the ATTACH keyword.
+ *
+ * ====dbName
+ *      Name of the database that is to be created.
+ *
+ * ====codeSet
+ *      Database code set information.
+ *      Note: If the value of the codeSet argument not specified, the database is created in the Unicode code page for DB2 data servers and in the UTF-8 code page for IDS data servers.
+ *
+ * ====mode
+ *      Database logging mode.
+ *      Note: This value is applicable only to IDS data servers.
+ *
+ * ===Return Value
+ *  Returns True on successful creation of database else return None
+ */
+PyObject *ibm_db_createdb(PyObject *self, PyObject *args)
+{
+	PyObject *py_conn_res = NULL;
+	PyObject *dbNameObj = NULL;
+	PyObject *codesetObj = NULL;
+	PyObject *modeObj = NULL;
+	int rc = -1;
+
+	if ( !PyArg_ParseTuple( args, "OO|OO", &py_conn_res, &dbNameObj, &codesetObj, &modeObj ) ) {
+		return NULL;
+	}
+	if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+		PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+		return NULL;
+	}
+	rc = _python_ibm_db_createdb((conn_handle *)py_conn_res, dbNameObj, codesetObj, modeObj, 0);
+	if ( rc == 0 ) {
+		Py_RETURN_TRUE;
+	} else {
+		return NULL;
+	}	
+}
+
+/*!# ibm_db.dropdb
+ *
+ * ===Description
+ *  True/None ibm_db.dropdb ( IBM_DBConnection connection, string dbName )
+ *
+ * Drops the specified database
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *      A valid database server instance connection resource variable as returned from ibm_db.connect() by specifying the ATTACH keyword.
+ *
+ * ====dbName
+ *      Name of the database that is to be dropped.
+ *
+ * ===Return Value
+ *  Returns True if specified database dropped sucessfully else None
+ */
+PyObject *ibm_db_dropdb(PyObject *self, PyObject *args)
+{
+	PyObject *py_conn_res = NULL;
+	PyObject *dbNameObj = NULL;
+	int rc = -1;
+
+	if ( !PyArg_ParseTuple( args, "OO", &py_conn_res, &dbNameObj ) ) {
+		return NULL;
+	}
+	if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+		PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+		return NULL;
+	}
+	rc = _python_ibm_db_dropdb( (conn_handle *)py_conn_res, dbNameObj, 0 );
+	if ( rc == 0 ) {
+		Py_RETURN_TRUE;
+	} else {
+		return NULL;
+	}
+}
+
+/*ibm_db.recreatedb
+ *
+ * ===Description
+ *  True/None ibm_db.recreatedb ( IBM_DBConnection connection, string dbName [, codeSet, mode] )
+ *
+ * Drop and then recreates a database by using the specified database name, code set, and mode
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *      A valid database server instance connection resource variable as returned from ibm_db.connect() by specifying the ATTACH keyword.
+ *
+ * ====dbName
+ *      Name of the database that is to be created.
+ *
+ * ====codeSet
+ *      Database code set information.
+ *      Note: If the value of the codeSet argument not specified, the database is created in the Unicode code page for DB2 data servers and in the UTF-8 code page for IDS data servers.
+ *
+ * ====mode
+ *      Database logging mode.
+ *      Note: This value is applicable only to IDS data servers.
+ *
+ * ===Return Value
+ *  Returns True if specified database created successfully else return None
+ */
+PyObject *ibm_db_recreatedb(PyObject *self, PyObject *args)
+{
+	PyObject *py_conn_res = NULL;
+	PyObject *dbNameObj = NULL;
+	PyObject *codesetObj = NULL;
+	PyObject *modeObj = NULL;
+	int rc = -1;
+
+	if ( !PyArg_ParseTuple( args, "OO|OO", &py_conn_res, &dbNameObj, &codesetObj, &modeObj ) ) {
+		return NULL;
+	}
+	if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+		PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+		return NULL;
+	}
+	rc = _python_ibm_db_dropdb((conn_handle *)py_conn_res, dbNameObj, 1 );
+	if ( rc != 0 ) {
+		return NULL; 
+	} 
+
+	rc = _python_ibm_db_createdb((conn_handle *)py_conn_res, dbNameObj, codesetObj, modeObj, 0);
+	if ( rc == 0 ) {
+		Py_RETURN_TRUE;
+	} else {
+		return NULL;
+	}
+}
+
+/*!# ibm_db.createdbNX 
+ *
+ * ===Description
+ *  True/None ibm_db.createdbNX ( IBM_DBConnection connection, string dbName [, codeSet, mode] )
+ *
+ * Creates the database if not exist by using the specified database name, code set, and mode
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *      A valid database server instance connection resource variable as returned from ibm_db.connect() by specifying the ATTACH keyword.
+ *
+ * ====dbName
+ *      Name of the database that is to be created.
+ *
+ * ====codeSet
+ *      Database code set information.
+ *      Note: If the value of the codeSet argument not specified, the database is created in the Unicode code page for DB2 data servers and in the UTF-8 code page for IDS data servers.
+ *
+ * ====mode
+ *      Database logging mode.
+ *      Note: This value is applicable only to IDS data servers.
+ *
+ * ===Return Value
+ *  Returns True if database already exists or created sucessfully else return None
+ */
+PyObject *ibm_db_createdbNX(PyObject *self, PyObject *args)
+{
+	PyObject *py_conn_res = NULL;
+	PyObject *dbNameObj = NULL;
+	PyObject *codesetObj = NULL;
+	PyObject *modeObj = NULL;
+	int rc = -1;
+	
+	if ( !PyArg_ParseTuple( args, "OO|OO", &py_conn_res, &dbNameObj, &codesetObj, &modeObj ) ) {
+		return NULL;
+	}
+	if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+		PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+		return NULL;
+	}	
+	rc = _python_ibm_db_createdb((conn_handle *)py_conn_res, dbNameObj, codesetObj, modeObj, 1);
+	if ( rc == 0 ) {
+		Py_RETURN_TRUE;
+	} else {
+		return NULL;
+	}
+}
+
+/*!# ibm_db.autocommit
+ *
+ * ===Description
+ *
+ * mixed ibm_db.autocommit ( resource connection [, bool value] )
+ *
+ * Returns or sets the AUTOCOMMIT behavior of the specified connection resource.
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *	A valid database connection resource variable as returned from connect()
+ * or pconnect().
+ *
+ * ====value
+ *	One of the following constants:
+ *	SQL_AUTOCOMMIT_OFF
+ *		  Turns AUTOCOMMIT off.
+ *	SQL_AUTOCOMMIT_ON
+ *		  Turns AUTOCOMMIT on.
+ *
+ * ===Return Values
+ *
+ * When ibm_db.autocommit() receives only the connection parameter, it returns
+ * the current state of AUTOCOMMIT for the requested connection as an integer
+ * value. A value of 0 indicates that AUTOCOMMIT is off, while a value of 1
+ * indicates that AUTOCOMMIT is on.
+ *
+ * When ibm_db.autocommit() receives both the connection parameter and
+ * autocommit parameter, it attempts to set the AUTOCOMMIT state of the
+ * requested connection to the corresponding state.
+ *
+ * Returns TRUE on success or FALSE on failure.
+ */ 
+static PyObject *ibm_db_autocommit(PyObject *self, PyObject *args)			
+{
+	PyObject *py_autocommit = NULL;
+	PyObject *py_conn_res = NULL;
+	conn_handle *conn_res = NULL;
+	int rc;
+	SQLINTEGER autocommit = -1;
+
+	if (!PyArg_ParseTuple(args, "O|O", &py_conn_res, &py_autocommit)){
+		return NULL;
+	}
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+		if (!NIL_P(py_autocommit)) {
+			if (PyInt_Check(py_autocommit)) { 
+				autocommit = (SQLINTEGER)PyInt_AsLong(py_autocommit);
+			} else {
+				PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+				return NULL;
+			}
+		}
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			return NULL;
+		}
+
+	  /* If value in handle is different from value passed in */
+		if (PyTuple_Size(args) == 2) {
+			if(autocommit != (conn_res->auto_commit)) {
+#ifndef PASE
+				rc = SQLSetConnectAttr((SQLHDBC)conn_res->hdbc, SQL_ATTR_AUTOCOMMIT, (SQLPOINTER) (autocommit == 0 ? SQL_AUTOCOMMIT_OFF : SQL_AUTOCOMMIT_ON), SQL_IS_INTEGER);
+#else
+				rc = SQLSetConnectAttr((SQLHDBC)conn_res->hdbc, SQL_ATTR_AUTOCOMMIT, (SQLPOINTER)&autocommit, SQL_IS_INTEGER);
+#endif
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, 
+												rc, 1, NULL, -1, 1);
+				}
+				conn_res->auto_commit = autocommit;
+			}
+			Py_INCREF(Py_True);
+			return Py_True;
+		} else {
+			return PyInt_FromLong(conn_res->auto_commit);
+		}
+	}
+	return NULL;
+}
+
+/*	static void _python_ibm_db_add_param_cache( stmt_handle *stmt_res, int param_no, PyObject *var_pyvalue, char *varname, int varname_len, int param_type, int size, SQLSMALLINT data_type, SQLSMALLINT precision, SQLSMALLINT scale, SQLSMALLINT nullable )
+*/
+static void _python_ibm_db_add_param_cache( stmt_handle *stmt_res, int param_no, PyObject *var_pyvalue, int param_type, int size, SQLSMALLINT data_type, SQLUINTEGER precision, SQLSMALLINT scale, SQLSMALLINT nullable )
+{
+	param_node *tmp_curr = NULL, *prev = stmt_res->head_cache_list, *curr = stmt_res->head_cache_list;
+
+	while ( (curr != NULL) && (curr->param_num < param_no) ) {
+		prev = curr;
+		curr = curr->next;
+	}
+
+	if ( curr == NULL || curr->param_num != param_no ) {
+		/* Allocate memory and make new node to be added */
+		tmp_curr = ALLOC(param_node);
+		memset(tmp_curr, 0, sizeof(param_node));
+
+		/* assign values */
+		tmp_curr->data_type = data_type;
+		tmp_curr->param_size = precision;
+		tmp_curr->nullable = nullable;
+		tmp_curr->scale = scale;
+		tmp_curr->param_num = param_no;
+		tmp_curr->file_options = SQL_FILE_READ;
+		tmp_curr->param_type = param_type;
+		tmp_curr->size = size;
+
+		/* Set this flag in stmt_res if a FILE INPUT is present */
+		if ( param_type == PARAM_FILE) {
+			stmt_res->file_param = 1;
+		}
+
+		if ( var_pyvalue != NULL) {
+			tmp_curr->var_pyvalue = var_pyvalue;
+		}
+
+		/* link pointers for the list */
+		if ( prev == NULL ) {
+			stmt_res->head_cache_list = tmp_curr;
+		} else {
+			prev->next = tmp_curr;
+		}
+		tmp_curr->next = curr;
+
+		/* Increment num params added */
+		stmt_res->num_params++;
+	} else {
+		/* Both the nodes are for the same param no */
+		/* Replace Information */
+		curr->data_type = data_type;
+		curr->param_size = precision;
+		curr->nullable = nullable;
+		curr->scale = scale;
+		curr->param_num = param_no;
+		curr->file_options = SQL_FILE_READ;
+		curr->param_type = param_type;
+		curr->size = size;
+
+		/* Set this flag in stmt_res if a FILE INPUT is present */
+		if ( param_type == PARAM_FILE) {
+			stmt_res->file_param = 1;
+		}
+
+		if ( var_pyvalue != NULL) {
+			curr->var_pyvalue = var_pyvalue;
+		}
+
+	}
+}
+
+/*
+ * static PyObject *_python_ibm_db_bind_param_helper(int argc, stmt_handle *stmt_res, SQLUSMALLINT param_no, PyObject *var_pyvalue, long param_type,
+ * 					 long data_type, long precision, long scale, long size)
+ */
+static PyObject *_python_ibm_db_bind_param_helper(int argc, stmt_handle *stmt_res, SQLUSMALLINT param_no, PyObject *var_pyvalue, long param_type, long data_type, long precision, long scale, long size)
+{
+	SQLSMALLINT sql_data_type = 0;
+	SQLUINTEGER sql_precision = 0;
+	SQLSMALLINT sql_scale = 0;
+	SQLSMALLINT sql_nullable = SQL_NO_NULLS;
+	char error[DB2_MAX_ERR_MSG_LEN];
+	int rc = 0;
+	
+	/* Check for Param options */
+	switch (argc) {
+		/* if argc == 3, then the default value for param_type will be used */
+		case 3:
+			param_type = SQL_PARAM_INPUT;
+
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLDescribeParam((SQLHSTMT)stmt_res->hstmt, (SQLUSMALLINT)param_no, &sql_data_type, &sql_precision, &sql_scale, &sql_nullable);
+			Py_END_ALLOW_THREADS;
+
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+				sprintf(error, "Describe Param Failed: %s", 
+				IBM_DB_G(__python_stmt_err_msg));
+				PyErr_SetString(PyExc_Exception, error);
+				return NULL;
+			}
+			/* Add to cache */
+			_python_ibm_db_add_param_cache(stmt_res, param_no, var_pyvalue, 
+										param_type, size, 
+										sql_data_type, sql_precision, 
+										sql_scale, sql_nullable );
+			break;
+
+		case 4:
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLDescribeParam((SQLHSTMT)stmt_res->hstmt, 
+								(SQLUSMALLINT)param_no, &sql_data_type, 
+								&sql_precision, &sql_scale, &sql_nullable);
+			Py_END_ALLOW_THREADS;
+
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT,
+												rc, 1, NULL, -1, 1);
+				sprintf(error, "Describe Param Failed: %s", 
+						IBM_DB_G(__python_stmt_err_msg));
+				PyErr_SetString(PyExc_Exception, error);
+				return NULL;
+			}
+			/* Add to cache */
+			_python_ibm_db_add_param_cache(stmt_res, param_no, var_pyvalue,
+											param_type, size, 
+											sql_data_type, sql_precision, 
+											sql_scale, sql_nullable );
+			break;
+
+		case 5:
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLDescribeParam((SQLHSTMT)stmt_res->hstmt, 
+								(SQLUSMALLINT)param_no, &sql_data_type, 
+								&sql_precision, &sql_scale, &sql_nullable);
+			Py_END_ALLOW_THREADS;
+
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, 
+								rc, 1, NULL, -1, 1);
+				sprintf(error, "Describe Param Failed: %s", 
+								IBM_DB_G(__python_stmt_err_msg));
+				PyErr_SetString(PyExc_Exception, error);
+				return NULL;
+			}
+			sql_data_type = (SQLSMALLINT)data_type;
+			/* Add to cache */
+			_python_ibm_db_add_param_cache(stmt_res, param_no, var_pyvalue,
+											param_type, size, 
+											sql_data_type, sql_precision, 
+											sql_scale, sql_nullable );
+			break;
+
+		case 6:
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLDescribeParam((SQLHSTMT)stmt_res->hstmt, 
+								(SQLUSMALLINT)param_no, &sql_data_type, 
+							&sql_precision, &sql_scale, &sql_nullable);
+			Py_END_ALLOW_THREADS;
+
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT,
+												rc, 1, NULL, -1, 1);
+				sprintf(error, "Describe Param Failed: %s", 
+						IBM_DB_G(__python_stmt_err_msg));
+				PyErr_SetString(PyExc_Exception, error);
+				return NULL;
+			}
+			sql_data_type = (SQLSMALLINT)data_type;
+			sql_precision = (SQLUINTEGER)precision;
+			/* Add to cache */
+			_python_ibm_db_add_param_cache(stmt_res, param_no, var_pyvalue,
+											param_type, size, 
+											sql_data_type, sql_precision, 
+											sql_scale, sql_nullable );
+			break;
+
+		case 7:
+		case 8:
+			/* Cache param data passed 
+			* I am using a linked list of nodes here because I don't know 
+			* before hand how many params are being passed in/bound. 
+			* To determine this, a call to SQLNumParams is necessary. 
+			* This is take away any advantages an array would have over 
+			* linked list access 
+			* Data is being copied over to the correct types for subsequent 
+			* CLI call because this might cause problems on other platforms 
+			* such as AIX 
+			*/
+			sql_data_type = (SQLSMALLINT)data_type;
+			sql_precision = (SQLUINTEGER)precision;
+			sql_scale = (SQLSMALLINT)scale;
+			_python_ibm_db_add_param_cache(stmt_res, param_no, var_pyvalue,
+											param_type, size, 
+											sql_data_type, sql_precision, 
+											sql_scale, sql_nullable );
+			break;
+	
+		default:
+			/* WRONG_PARAM_COUNT; */
+			return NULL;
+	}
+	/* end Switch */
+
+	/* We bind data with DB2 CLI in ibm_db.execute() */
+	/* This will save network flow if we need to override params in it */
+
+	Py_INCREF(Py_True);
+	return Py_True;
+}
+
+/*!# ibm_db.bind_param
+ *
+ * ===Description
+ * Py_True/Py_None ibm_db.bind_param (resource stmt, int parameter-number,
+ *									string variable [, int parameter-type
+ *									[, int data-type [, int precision
+ *									[, int scale [, int size[]]]]]] )
+ *
+ * Binds a Python variable to an SQL statement parameter in a IBM_DBStatement
+ * resource returned by ibm_db.prepare().
+ * This function gives you more control over the parameter type, data type,
+ * precision, and scale for the parameter than simply passing the variable as
+ * part of the optional input array to ibm_db.execute().
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *
+ *	A prepared statement returned from ibm_db.prepare().
+ *
+ * ====parameter-number
+ *
+ *	Specifies the 1-indexed position of the parameter in the prepared
+ * statement.
+ *
+ * ====variable
+ *
+ *	A Python variable to bind to the parameter specified by parameter-number.
+ *
+ * ====parameter-type
+ *
+ *	A constant specifying whether the Python variable should be bound to the
+ * SQL parameter as an input parameter (SQL_PARAM_INPUT), an output parameter
+ * (SQL_PARAM_OUTPUT), or as a parameter that accepts input and returns output
+ * (SQL_PARAM_INPUT_OUTPUT). To avoid memory overhead, you can also specify
+ * PARAM_FILE to bind the Python variable to the name of a file that contains
+ * large object (BLOB, CLOB, or DBCLOB) data.
+ *
+ * ====data-type
+ *
+ *	A constant specifying the SQL data type that the Python variable should be
+ * bound as: one of SQL_BINARY, DB2_CHAR, DB2_DOUBLE, or DB2_LONG .
+ *
+ * ====precision
+ *
+ *	Specifies the precision that the variable should be bound to the database. *
+ * ====scale
+ *
+ *	  Specifies the scale that the variable should be bound to the database.
+ *
+ * ====size
+ *
+ *	  Specifies the size that should be retreived from an INOUT/OUT parameter.
+ *
+ * ===Return Values
+ *
+ *	Returns Py_True on success or NULL on failure.
+ */
+static PyObject *ibm_db_bind_param(PyObject *self, PyObject *args)			
+{
+	PyObject *var_pyvalue = NULL;
+	PyObject *py_param_type = NULL;
+	PyObject *py_data_type = NULL;
+	PyObject *py_precision = NULL;
+	PyObject *py_scale = NULL;
+	PyObject *py_size = NULL;
+	PyObject *py_param_no = NULL;
+	PyObject *py_stmt_res = NULL;
+
+	long param_type = SQL_PARAM_INPUT;
+	/* LONG types used for data being passed in */
+	SQLUSMALLINT param_no = 0;
+	long data_type = 0;
+	long precision = 0;
+	long scale = 0;
+	long size = 0;
+	stmt_handle *stmt_res;
+	
+	if (!PyArg_ParseTuple(args, "OOO|OOOOO", &py_stmt_res, &py_param_no, 
+						&var_pyvalue, &py_param_type, 
+						&py_data_type, &py_precision, 
+						&py_scale, &py_size)) {
+	
+		return NULL;
+	}
+
+	if (!NIL_P(py_param_no)) {
+		if (PyInt_Check(py_param_no)) {
+			param_no = (SQLUSMALLINT) PyInt_AsLong(py_param_no);
+		} else {
+			PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+			return NULL;
+		}
+	}
+	if (py_param_type != NULL && py_param_type != Py_None && 
+		TYPE(py_param_type) == PYTHON_FIXNUM) {
+		param_type = PyInt_AS_LONG(py_param_type);
+	}
+
+	if (py_data_type != NULL && py_data_type != Py_None && 
+		TYPE(py_data_type) == PYTHON_FIXNUM) {
+		data_type = PyInt_AS_LONG(py_data_type);
+	}
+
+	if (py_precision != NULL && py_precision != Py_None && 
+		TYPE(py_precision) == PYTHON_FIXNUM) {
+		precision = PyInt_AS_LONG(py_precision);
+	}
+
+	if (py_scale != NULL && py_scale != Py_None && 
+		TYPE(py_scale) == PYTHON_FIXNUM) {
+		scale = PyInt_AS_LONG(py_scale);
+	}
+
+	if (py_size != NULL && py_size != Py_None && 
+		TYPE(py_size) == PYTHON_FIXNUM) {
+		size = PyInt_AS_LONG(py_size);
+	}
+
+	if (!NIL_P(py_stmt_res)) {
+		if (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+			return NULL;
+		} else {
+			stmt_res = (stmt_handle *)py_stmt_res;
+		}
+		return _python_ibm_db_bind_param_helper(PyTuple_Size(args), stmt_res, param_no, var_pyvalue, param_type, data_type, precision, scale, size);
+	} else {
+		PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+		return NULL;
+	}
+}
+
+
+/*!# ibm_db.close
+ *
+ * ===Description
+ *
+ * bool ibm_db.close ( resource connection )
+ *
+ * This function closes a DB2 client connection created with ibm_db.connect()
+ * and returns the corresponding resources to the database server.
+ *
+ * If you attempt to close a persistent DB2 client connection created with
+ * ibm_db.pconnect(), the close request returns TRUE and the persistent IBM Data
+ * Server client connection remains available for the next caller.
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *	Specifies an active DB2 client connection.
+ *
+ * ===Return Values
+ * Returns TRUE on success or FALSE on failure.
+ */
+static PyObject *ibm_db_close(PyObject *self, PyObject *args)			
+{
+	PyObject *py_conn_res = NULL;
+	conn_handle *conn_res = NULL;
+	int rc;
+
+	if (!PyArg_ParseTuple(args, "O", &py_conn_res))
+		return NULL;
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		/* Check to see if it's a persistent connection; 
+		 * if so, just return true 
+		*/
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			return NULL;
+		}
+
+		if ( conn_res->handle_active && !conn_res->flag_pconnect ) {
+			/* Disconnect from DB. If stmt is allocated, 
+			* it is freed automatically 
+			*/
+			if (conn_res->auto_commit == 0) {
+				rc = SQLEndTran(SQL_HANDLE_DBC, (SQLHDBC)conn_res->hdbc, 
+								SQL_ROLLBACK);
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, 
+													rc, 1, NULL, -1, 1);
+					return NULL;
+				}
+			}
+			rc = SQLDisconnect((SQLHDBC)conn_res->hdbc);
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 
+												1, NULL, -1, 1);
+				return NULL;
+			}
+
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLFreeHandle(SQL_HANDLE_DBC, conn_res->hdbc);
+			Py_END_ALLOW_THREADS;
+
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 
+												1, NULL, -1, 1);
+
+				rc = SQLFreeHandle(SQL_HANDLE_ENV, conn_res->henv);
+				return NULL;
+			}
+
+			rc = SQLFreeHandle(SQL_HANDLE_ENV, conn_res->henv);
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors(conn_res->henv, SQL_HANDLE_ENV, rc, 
+												1, NULL, -1, 1);
+				return NULL;
+			}
+
+			conn_res->handle_active = 0;
+			Py_INCREF(Py_True);
+			return Py_True;
+		} else if ( conn_res->flag_pconnect ) {
+			/* Do we need to call FreeStmt or something to close cursors? */
+			Py_INCREF(Py_True);
+			return Py_True;
+		} else {
+			return NULL;
+		}
+	} else {
+		return NULL;
+	}
+}
+
+/*!# ibm_db.column_privileges
+ *
+ * ===Description
+ * resource ibm_db.column_privileges ( resource connection [, string qualifier
+ * [, string schema [, string table-name [, string column-name]]]] )
+ *
+ * Returns a result set listing the columns and associated privileges for a
+ * table.
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *		A valid connection to an IBM DB2, Cloudscape, or Apache Derby database.
+ *
+ * ====qualifier
+ *		A qualifier for DB2 databases running on OS/390 or z/OS servers. For
+ * other databases, pass NULL or an empty string.
+ *
+ * ====schema
+ *		The schema which contains the tables. To match all schemas, pass NULL
+ * or an empty string.
+ *
+ * ====table-name
+ *		The name of the table or view. To match all tables in the database,
+ * pass NULL or an empty string.
+ *
+ * ====column-name
+ *		The name of the column. To match all columns in the table, pass NULL
+ * or an empty string.
+ *
+ * ===Return Values
+ * Returns a statement resource with a result set containing rows describing
+ * the column privileges for columns matching the specified parameters. The rows
+ * are composed of the following columns:
+ *
+ * TABLE_CAT:: Name of the catalog. The value is NULL if this table does not
+ * have catalogs.
+ * TABLE_SCHEM:: Name of the schema.
+ * TABLE_NAME:: Name of the table or view.
+ * COLUMN_NAME:: Name of the column.
+ * GRANTOR:: Authorization ID of the user who granted the privilege.
+ * GRANTEE:: Authorization ID of the user to whom the privilege was granted.
+ * PRIVILEGE:: The privilege for the column.
+ * IS_GRANTABLE:: Whether the GRANTEE is permitted to grant this privilege to
+ * other users.
+ */
+static PyObject *ibm_db_column_privileges(PyObject *self, PyObject *args)
+{
+	SQLWCHAR *qualifier = NULL;
+	SQLWCHAR *owner = NULL;
+	SQLWCHAR *table_name = NULL;
+	SQLWCHAR *column_name = NULL;
+	PyObject *py_qualifier = NULL;
+	PyObject *py_owner = NULL;
+	PyObject *py_table_name = NULL;
+	PyObject *py_column_name = NULL;
+	PyObject *py_conn_res = NULL;
+	conn_handle *conn_res;
+	stmt_handle *stmt_res;
+	int rc;
+	int isNewBuffer;
+
+	if (!PyArg_ParseTuple(args, "O|OOOO", &py_conn_res, &py_qualifier, &py_owner,
+		&py_table_name, &py_column_name))
+		return NULL;
+	
+	if (py_qualifier != NULL && py_qualifier != Py_None) {
+		if (PyString_Check(py_qualifier) || PyUnicode_Check(py_qualifier)){
+			py_qualifier = PyUnicode_FromObject(py_qualifier);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "qualifier must be a string or unicode");
+			return NULL;
+		}
+	}
+
+	if (py_owner != NULL && py_owner != Py_None) {
+		if (PyString_Check(py_owner) || PyUnicode_Check(py_owner)){
+			py_owner = PyUnicode_FromObject(py_owner);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "owner must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			return NULL;
+		}
+	}
+
+	if (py_table_name != NULL && py_table_name != Py_None) {
+		if (PyString_Check(py_table_name) || PyUnicode_Check(py_table_name)){
+			py_table_name = PyUnicode_FromObject(py_table_name);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "table_name must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			return NULL;
+		}
+	}
+
+	if (py_column_name != NULL && py_column_name != Py_None) {
+		if (PyString_Check(py_column_name) || PyUnicode_Check(py_table_name)){
+			py_column_name = PyUnicode_FromObject(py_column_name);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "column_name must be a string");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			return NULL;
+		}
+	}
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			Py_XDECREF(py_column_name);
+			return NULL;
+		}
+
+		stmt_res = _ibm_db_new_stmt_struct(conn_res);
+		rc = SQLAllocHandle(SQL_HANDLE_STMT, conn_res->hdbc, &(stmt_res->hstmt));
+		if (rc == SQL_ERROR) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+				NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			Py_XDECREF(py_column_name);
+
+			Py_RETURN_FALSE;
+		}
+		if (py_qualifier && py_qualifier != Py_None )
+			qualifier = getUnicodeDataAsSQLWCHAR(py_qualifier, &isNewBuffer);
+		if (py_owner &&  py_owner != Py_None )
+			owner = getUnicodeDataAsSQLWCHAR(py_owner, &isNewBuffer);
+		if (py_table_name && py_table_name != Py_None )
+			table_name = getUnicodeDataAsSQLWCHAR(py_table_name, &isNewBuffer);
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLColumnPrivilegesW((SQLHSTMT)stmt_res->hstmt, qualifier, SQL_NTS,
+							owner, SQL_NTS, table_name, SQL_NTS, column_name,
+							SQL_NTS);
+		Py_END_ALLOW_THREADS;
+
+		if (isNewBuffer) {
+			if(qualifier) PyMem_Del(qualifier);
+			if(owner) PyMem_Del(owner);
+			if(table_name) PyMem_Del(table_name);
+		}
+		
+		if (rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+							SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			Py_XDECREF(py_column_name);
+
+			Py_RETURN_FALSE;
+		}
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_table_name);
+		Py_XDECREF(py_column_name);
+
+		return (PyObject *)stmt_res;
+	} else {
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_table_name);
+		Py_XDECREF(py_column_name);
+
+		Py_RETURN_FALSE;
+	}
+}
+
+/*!# ibm_db.columns
+ * ===Description
+ * resource ibm_db.columns ( resource connection [, string qualifier
+ * [, string schema [, string table-name [, string column-name]]]] )
+ *
+ * Returns a result set listing the columns and associated metadata for a table.
+ *
+ * ===Parameters
+ * ====connection
+ *		A valid connection to an IBM DB2, Cloudscape, or Apache Derby database.
+ *
+ * ====qualifier
+ *		A qualifier for DB2 databases running on OS/390 or z/OS servers. For
+ * other databases, pass NULL or an empty string.
+ *
+ * ====schema
+ *		The schema which contains the tables. To match all schemas, pass '%'.
+ *
+ * ====table-name
+ *		The name of the table or view. To match all tables in the database,
+ * pass NULL or an empty string.
+ *
+ * ====column-name
+ *		The name of the column. To match all columns in the table, pass NULL or
+ * an empty string.
+ *
+ * ===Return Values
+ * Returns a statement resource with a result set containing rows describing the
+ * columns matching the specified parameters.
+ * The rows are composed of the following columns:
+ *
+ * TABLE_CAT:: Name of the catalog. The value is NULL if this table does not
+ * have catalogs.
+ * TABLE_SCHEM:: Name of the schema.
+ * TABLE_NAME:: Name of the table or view.
+ * COLUMN_NAME:: Name of the column.
+ * DATA_TYPE:: The SQL data type for the column represented as an integer value.
+ * TYPE_NAME:: A string representing the data type for the column.
+ * COLUMN_SIZE:: An integer value representing the size of the column.
+ * BUFFER_LENGTH:: Maximum number of bytes necessary to store data from this
+ * column.
+ * DECIMAL_DIGITS:: The scale of the column, or NULL where scale is not
+ * applicable.
+ * NUM_PREC_RADIX:: An integer value of either 10 (representing an exact numeric
+ * data type), 2 (representing an approximate numeric data type), or NULL
+ * (representing a data type for which radix is not applicable).
+ * NULLABLE:: An integer value representing whether the column is nullable or
+ * not.
+ * REMARKS:: Description of the column.
+ * COLUMN_DEF:: Default value for the column.
+ * SQL_DATA_TYPE:: An integer value representing the size of the column.
+ * SQL_DATETIME_SUB:: Returns an integer value representing a datetime subtype
+ * code, or NULL for SQL data types to which this does not apply.
+ * CHAR_OCTET_LENGTH::	Maximum length in octets for a character data type
+ * column, which matches COLUMN_SIZE for single-byte character set data, or
+ * NULL for non-character data types.
+ * ORDINAL_POSITION:: The 1-indexed position of the column in the table.
+ * IS_NULLABLE:: A string value where 'YES' means that the column is nullable
+ * and 'NO' means that the column is not nullable.
+ */
+static PyObject *ibm_db_columns(PyObject *self, PyObject *args)			
+{
+	SQLWCHAR *qualifier = NULL;
+	SQLWCHAR *owner = NULL;
+	SQLWCHAR *table_name = NULL;
+	SQLWCHAR *column_name = NULL;
+	PyObject *py_qualifier = NULL;
+	PyObject *py_owner = NULL;
+	PyObject *py_table_name = NULL;
+	PyObject *py_column_name = NULL;
+	PyObject *py_conn_res = NULL;
+	conn_handle *conn_res;
+	stmt_handle *stmt_res;
+	int rc;
+	int isNewBuffer;
+
+	if (!PyArg_ParseTuple(args, "O|OOOO", &py_conn_res, &py_qualifier, &py_owner,
+		&py_table_name, &py_column_name))
+		return NULL;
+
+	if (py_qualifier != NULL && py_qualifier != Py_None) {
+		if (PyString_Check(py_qualifier) || PyUnicode_Check(py_qualifier)){
+			py_qualifier = PyUnicode_FromObject(py_qualifier);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "qualifier must be a string or unicode");
+			return NULL;
+		}
+	}
+
+	if (py_owner != NULL && py_owner != Py_None) {
+		if (PyString_Check(py_owner) || PyUnicode_Check(py_owner)){
+			py_owner = PyUnicode_FromObject(py_owner);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "owner must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			return NULL;
+		}
+	}
+
+	if (py_table_name != NULL && py_table_name != Py_None) {
+		if (PyString_Check(py_table_name) || PyUnicode_Check(py_table_name)){
+			py_table_name = PyUnicode_FromObject(py_table_name);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "table_name must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			return NULL;
+		}
+	}
+
+	if (py_column_name != NULL && py_column_name != Py_None) {
+		if (PyString_Check(py_column_name) || PyUnicode_Check(py_table_name)){
+			py_column_name = PyUnicode_FromObject(py_column_name);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "column_name must be a string");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			return NULL;
+		}
+	}
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			Py_XDECREF(py_column_name);
+			return NULL;
+		}
+
+		stmt_res = _ibm_db_new_stmt_struct(conn_res);
+
+		rc = SQLAllocHandle(SQL_HANDLE_STMT, conn_res->hdbc, &(stmt_res->hstmt));
+		if (rc == SQL_ERROR) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+				NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			Py_XDECREF(py_column_name);
+
+			Py_RETURN_FALSE;
+		}
+
+		if (py_qualifier && py_qualifier != Py_None )
+			qualifier = getUnicodeDataAsSQLWCHAR(py_qualifier, &isNewBuffer);
+		if (py_owner &&  py_owner != Py_None )
+			owner = getUnicodeDataAsSQLWCHAR(py_owner, &isNewBuffer);
+		if (py_table_name && py_table_name != Py_None )
+			table_name = getUnicodeDataAsSQLWCHAR(py_table_name, &isNewBuffer);
+		if (py_column_name && py_column_name != Py_None )
+			column_name = getUnicodeDataAsSQLWCHAR(py_column_name, &isNewBuffer);
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLColumnsW((SQLHSTMT)stmt_res->hstmt, qualifier, SQL_NTS,
+			owner,SQL_NTS, table_name, SQL_NTS, column_name, SQL_NTS);
+		Py_END_ALLOW_THREADS;
+
+		if (isNewBuffer) {
+		    if(qualifier) PyMem_Del(qualifier);
+		    if(owner) PyMem_Del(owner);
+		    if(table_name) PyMem_Del(table_name);
+		    if(column_name) PyMem_Del(column_name);
+		}	
+				
+		if (rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, 
+				SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			Py_XDECREF(py_column_name);
+
+			Py_RETURN_FALSE;
+		}
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_table_name);
+		Py_XDECREF(py_column_name);
+
+		return (PyObject *)stmt_res;				  
+	} else {
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_table_name);
+		Py_XDECREF(py_column_name);
+
+		Py_RETURN_FALSE;
+	}
+}
+
+/*!# ibm_db.foreign_keys
+ *
+ * ===Description
+ * resource ibm_db.foreign_keys ( resource connection, string pk_qualifier,
+ * string pk_schema, string pk_table-name, string fk_qualifier
+ * string fk_schema, string fk_table-name )
+ *
+ * Returns a result set listing the foreign keys for a table.
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *		A valid connection to an IBM DB2, Cloudscape, or Apache Derby database.
+ *
+ * ====pk_qualifier
+ *		A qualifier for the pk_table-name argument for the DB2 databases 
+ * running on OS/390 or z/OS servers. For other databases, pass NULL or an empty 
+ * string. 
+ *
+ * ====pk_schema
+ *		The schema for the pk_table-name argument which contains the tables. If 
+ * schema is NULL, ibm_db.foreign_keys() matches the schema for the current 
+ * connection. 
+ *
+ * ====pk_table-name
+ *		The name of the table which contains the primary key.
+ *
+ * ====fk_qualifier
+ *		A qualifier for the fk_table-name argument for the DB2 databases
+ * running on OS/390 or z/OS servers. For other databases, pass NULL or an empty
+ * string.
+ *
+ * ====fk_schema
+ *		The schema for the fk_table-name argument which contains the tables. If
+ * schema is NULL, ibm_db.foreign_keys() matches the schema for the current
+ * connection.
+ *
+ * ====fk_table-name
+ *		The name of the table which contains the foreign key.
+ *
+ * ===Return Values
+ *
+ * Returns a statement resource with a result set containing rows describing the
+ * foreign keys for the specified table. The result set is composed of the
+ * following columns:
+ *
+ * Column name::	Description
+ * PKTABLE_CAT:: Name of the catalog for the table containing the primary key.
+ * The value is NULL if this table does not have catalogs.
+ * PKTABLE_SCHEM:: Name of the schema for the table containing the primary key.
+ * PKTABLE_NAME:: Name of the table containing the primary key.
+ * PKCOLUMN_NAME:: Name of the column containing the primary key.
+ * FKTABLE_CAT:: Name of the catalog for the table containing the foreign key.
+ * The value is NULL if this table does not have catalogs.
+ * FKTABLE_SCHEM:: Name of the schema for the table containing the foreign key.
+ * FKTABLE_NAME:: Name of the table containing the foreign key.
+ * FKCOLUMN_NAME:: Name of the column containing the foreign key.
+ * KEY_SEQ:: 1-indexed position of the column in the key.
+ * UPDATE_RULE:: Integer value representing the action applied to the foreign
+ * key when the SQL operation is UPDATE.
+ * DELETE_RULE:: Integer value representing the action applied to the foreign
+ * key when the SQL operation is DELETE.
+ * FK_NAME:: The name of the foreign key.
+ * PK_NAME:: The name of the primary key.
+ * DEFERRABILITY:: An integer value representing whether the foreign key
+ * deferrability is SQL_INITIALLY_DEFERRED, SQL_INITIALLY_IMMEDIATE, or
+ * SQL_NOT_DEFERRABLE.
+ */
+static PyObject *ibm_db_foreign_keys(PyObject *self, PyObject *args)
+{
+	SQLWCHAR *pk_qualifier = NULL;
+	SQLWCHAR *pk_owner = NULL;
+	SQLWCHAR *pk_table_name = NULL;
+	SQLWCHAR *fk_qualifier = NULL;
+	SQLWCHAR *fk_owner = NULL;
+	SQLWCHAR *fk_table_name = NULL;
+	int rc;
+	conn_handle *conn_res = NULL;
+	stmt_handle *stmt_res;
+	PyObject *py_conn_res = NULL;
+	PyObject *py_pk_qualifier = NULL;
+	PyObject *py_pk_owner = NULL;
+	PyObject *py_pk_table_name = NULL;
+	PyObject *py_fk_qualifier = NULL;
+	PyObject *py_fk_owner = NULL;
+	PyObject *py_fk_table_name = NULL;
+	int isNewBuffer = 0;
+
+	if (!PyArg_ParseTuple(args, "OOOO|OOO", &py_conn_res, &py_pk_qualifier, 
+		&py_pk_owner, &py_pk_table_name, &py_fk_qualifier,
+		&py_fk_owner, &py_fk_table_name))
+		return NULL;
+
+	if (py_pk_qualifier != NULL && py_pk_qualifier != Py_None) {
+		if (PyString_Check(py_pk_qualifier) || PyUnicode_Check(py_pk_qualifier)){
+			py_pk_qualifier = PyUnicode_FromObject(py_pk_qualifier);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, 
+				"qualifier for table containing primary key must be a string or unicode");
+			return NULL;
+		}
+	}
+
+	if (py_pk_owner != NULL && py_pk_owner != Py_None) {
+		if (PyString_Check(py_pk_owner) || PyUnicode_Check(py_pk_owner)){
+			py_pk_owner = PyUnicode_FromObject(py_pk_owner);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception,  
+				"owner of table containing primary key must be a string or unicode");
+			Py_XDECREF(py_pk_qualifier);
+			return NULL;
+		}
+	}
+
+	if (py_pk_table_name != NULL && py_pk_table_name != Py_None) {
+		if (PyString_Check(py_pk_table_name) || PyUnicode_Check(py_pk_table_name)){
+			py_pk_table_name = PyUnicode_FromObject(py_pk_table_name);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, 
+				"name of the table that contains primary key must be a string or unicode");
+			Py_XDECREF(py_pk_qualifier);
+			Py_XDECREF(py_pk_owner);
+			return NULL;
+		}
+	}
+
+	if (py_fk_qualifier != NULL && py_fk_qualifier != Py_None) {
+		if (PyString_Check(py_fk_qualifier) || PyUnicode_Check(py_fk_qualifier)){
+			py_fk_qualifier = PyUnicode_FromObject(py_fk_qualifier);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, 
+				"qualifier for table containing the foreign key must be a string or unicode");
+			Py_XDECREF(py_pk_qualifier);
+			Py_XDECREF(py_pk_owner);
+			Py_XDECREF(py_pk_table_name);
+			return NULL;
+		}
+	}
+
+	if (py_fk_owner != NULL && py_fk_owner != Py_None) {
+		if (PyString_Check(py_fk_owner) || PyUnicode_Check(py_fk_owner)){
+			py_fk_owner = PyUnicode_FromObject(py_fk_owner);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, 
+				"owner of table containing the foreign key must be a string or unicode");
+			Py_XDECREF(py_pk_qualifier);
+			Py_XDECREF(py_pk_owner);
+			Py_XDECREF(py_pk_table_name);
+			Py_XDECREF(py_fk_qualifier);
+			return NULL;
+		}
+	}
+
+	if (py_fk_table_name != NULL && py_fk_table_name != Py_None) {
+		if (PyString_Check(py_fk_table_name) || PyUnicode_Check(py_fk_table_name)){
+			py_fk_table_name = PyUnicode_FromObject(py_fk_table_name);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, 
+				"name of the table that contains foreign key must be a string or unicode");
+			Py_XDECREF(py_pk_qualifier);
+			Py_XDECREF(py_pk_owner);
+			Py_XDECREF(py_pk_table_name);
+			Py_XDECREF(py_fk_qualifier);
+			Py_XDECREF(py_fk_owner);
+			return NULL;
+		}
+	}
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			Py_XDECREF(py_pk_qualifier);
+			Py_XDECREF(py_pk_owner);
+			Py_XDECREF(py_pk_table_name);
+			Py_XDECREF(py_fk_qualifier);
+			Py_XDECREF(py_fk_owner);
+			Py_XDECREF(py_fk_table_name);
+			return NULL;
+		}
+
+		stmt_res = _ibm_db_new_stmt_struct(conn_res);
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLAllocHandle(SQL_HANDLE_STMT, conn_res->hdbc, &(stmt_res->hstmt));
+		Py_END_ALLOW_THREADS;
+
+		if (rc == SQL_ERROR) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+				NULL, -1, 1);
+			Py_XDECREF(py_pk_qualifier);
+			Py_XDECREF(py_pk_owner);
+			Py_XDECREF(py_pk_table_name);
+			Py_XDECREF(py_fk_qualifier);
+			Py_XDECREF(py_fk_owner);
+			Py_XDECREF(py_fk_table_name);
+
+			Py_RETURN_FALSE;
+		}
+
+		if(py_pk_qualifier && py_pk_qualifier != Py_None)
+			pk_qualifier = getUnicodeDataAsSQLWCHAR(py_pk_qualifier, &isNewBuffer);
+		if(py_pk_owner && py_pk_owner != Py_None)
+			pk_owner = getUnicodeDataAsSQLWCHAR(py_pk_owner, &isNewBuffer);
+		if(py_pk_table_name && py_pk_table_name != Py_None)
+			pk_table_name = getUnicodeDataAsSQLWCHAR(py_pk_table_name, &isNewBuffer);
+		if(py_fk_qualifier && py_fk_qualifier != Py_None)
+			fk_qualifier = getUnicodeDataAsSQLWCHAR(py_fk_qualifier, &isNewBuffer);
+		if(py_fk_owner && py_fk_owner != Py_None)
+			fk_owner = getUnicodeDataAsSQLWCHAR(py_fk_owner, &isNewBuffer);
+		if(py_fk_table_name && py_fk_table_name != Py_None)
+			fk_table_name = getUnicodeDataAsSQLWCHAR(py_fk_table_name, &isNewBuffer);
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLForeignKeysW((SQLHSTMT)stmt_res->hstmt, pk_qualifier, SQL_NTS,
+						pk_owner, SQL_NTS, pk_table_name, SQL_NTS, fk_qualifier, SQL_NTS,
+						fk_owner, SQL_NTS, fk_table_name, SQL_NTS);
+		Py_END_ALLOW_THREADS;
+
+		if (isNewBuffer) {
+		    if(pk_qualifier) PyMem_Del(pk_qualifier);
+		    if(pk_owner) PyMem_Del(pk_owner);
+		    if(pk_table_name) PyMem_Del(pk_table_name);
+		    if(fk_qualifier) PyMem_Del(fk_qualifier);
+		    if(fk_owner) PyMem_Del(fk_owner);
+		    if(fk_table_name) PyMem_Del(fk_table_name);
+		}	
+		
+		if (rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 
+											1, NULL, -1, 1);
+			Py_XDECREF(py_pk_qualifier);
+			Py_XDECREF(py_pk_owner);
+			Py_XDECREF(py_pk_table_name);
+			Py_XDECREF(py_fk_qualifier);
+			Py_XDECREF(py_fk_owner);
+			Py_XDECREF(py_fk_table_name);
+			PyErr_Clear( );
+			Py_RETURN_FALSE;
+		}
+		Py_XDECREF(py_pk_qualifier);
+		Py_XDECREF(py_pk_owner);
+		Py_XDECREF(py_pk_table_name);
+		Py_XDECREF(py_fk_qualifier);
+		Py_XDECREF(py_fk_owner);
+		Py_XDECREF(py_fk_table_name);
+		return (PyObject *)stmt_res;				  
+
+	} else {
+		Py_XDECREF(py_pk_qualifier);
+		Py_XDECREF(py_pk_owner);
+		Py_XDECREF(py_pk_table_name);
+		Py_XDECREF(py_fk_qualifier);
+		Py_XDECREF(py_fk_owner);
+		Py_XDECREF(py_fk_table_name);
+		Py_RETURN_FALSE;
+	}
+}
+
+/*!# ibm_db.primary_keys
+ *
+ * ===Description
+ * resource ibm_db.primary_keys ( resource connection, string qualifier,
+ * string schema, string table-name )
+ *
+ * Returns a result set listing the primary keys for a table.
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *		A valid connection to an IBM DB2, Cloudscape, or Apache Derby database.
+ *
+ * ====qualifier
+ *		A qualifier for DB2 databases running on OS/390 or z/OS servers. For
+ * other databases, pass NULL or an empty string.
+ *
+ * ====schema
+ *		The schema which contains the tables. If schema is NULL,
+ * ibm_db.primary_keys() matches the schema for the current connection.
+ *
+ * ====table-name
+ *		The name of the table.
+ *
+ * ===Return Values
+ *
+ * Returns a statement resource with a result set containing rows describing the
+ * primary keys for the specified table.
+ * The result set is composed of the following columns:
+ *
+ * Column name:: Description
+ * TABLE_CAT:: Name of the catalog for the table containing the primary key.
+ * The value is NULL if this table does not have catalogs.
+ * TABLE_SCHEM:: Name of the schema for the table containing the primary key.
+ * TABLE_NAME:: Name of the table containing the primary key.
+ * COLUMN_NAME:: Name of the column containing the primary key.
+ * KEY_SEQ:: 1-indexed position of the column in the key.
+ * PK_NAME:: The name of the primary key.
+ */
+static PyObject *ibm_db_primary_keys(PyObject *self, PyObject *args)
+{
+	SQLWCHAR *qualifier = NULL;
+	SQLWCHAR *owner = NULL;
+	SQLWCHAR *table_name = NULL;
+	int rc;
+	conn_handle *conn_res;
+	stmt_handle *stmt_res;
+	PyObject *py_conn_res = NULL;
+	PyObject *py_qualifier = NULL;
+	PyObject *py_owner = NULL;
+	PyObject *py_table_name = NULL;
+	int isNewBuffer;
+
+	if (!PyArg_ParseTuple(args, "OOOO", &py_conn_res, &py_qualifier, &py_owner,
+		&py_table_name))
+		return NULL;
+
+	if (py_qualifier != NULL && py_qualifier != Py_None) {
+		if (PyString_Check(py_qualifier) || PyUnicode_Check(py_qualifier)){
+			py_qualifier = PyUnicode_FromObject(py_qualifier);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "qualifier must be a string or unicode");
+			return NULL;
+		}
+	}
+
+	if (py_owner != NULL && py_owner != Py_None) {
+		if (PyString_Check(py_owner) || PyUnicode_Check(py_owner)){
+			py_owner = PyUnicode_FromObject(py_owner);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "owner must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			return NULL;
+		}
+	}
+
+	if (py_table_name != NULL && py_table_name != Py_None) {
+		if (PyString_Check(py_table_name) || PyUnicode_Check(py_table_name)){
+			py_table_name = PyUnicode_FromObject(py_table_name);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "table_name must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			return NULL;
+		}
+	}
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			return NULL;
+		}
+
+		stmt_res = _ibm_db_new_stmt_struct(conn_res);
+
+		rc = SQLAllocHandle(SQL_HANDLE_STMT, conn_res->hdbc, &(stmt_res->hstmt));
+		if (rc == SQL_ERROR) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+				NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+
+			Py_RETURN_FALSE;
+		}
+		if (py_qualifier && py_qualifier != Py_None )
+			qualifier = getUnicodeDataAsSQLWCHAR(py_qualifier, &isNewBuffer);
+		if (py_owner &&  py_owner != Py_None )
+			owner = getUnicodeDataAsSQLWCHAR(py_owner, &isNewBuffer);
+		if (py_table_name && py_table_name != Py_None )
+			table_name = getUnicodeDataAsSQLWCHAR(py_table_name, &isNewBuffer);
+
+		Py_BEGIN_ALLOW_THREADS;	
+		rc = SQLPrimaryKeysW((SQLHSTMT)stmt_res->hstmt, qualifier, SQL_NTS,
+			owner, SQL_NTS, table_name, SQL_NTS);
+		Py_END_ALLOW_THREADS;
+
+		if (isNewBuffer) {
+		    if(qualifier) PyMem_Del(qualifier);
+		    if(owner) PyMem_Del(owner);
+		    if(table_name) PyMem_Del(table_name);
+		}
+		
+		if (rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 
+				1, NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+
+			Py_RETURN_FALSE;
+		}
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_table_name);
+
+		return (PyObject *)stmt_res;			 
+
+	} else {
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_table_name);
+
+		Py_RETURN_FALSE;
+	}
+}
+
+/*!# ibm_db.procedure_columns
+ *
+ * ===Description
+ * resource ibm_db.procedure_columns ( resource connection, string qualifier,
+ * string schema, string procedure, string parameter )
+ *
+ * Returns a result set listing the parameters for one or more stored procedures
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *		A valid connection to an IBM DB2, Cloudscape, or Apache Derby database.
+ *
+ * ====qualifier
+ *		A qualifier for DB2 databases running on OS/390 or z/OS servers. For
+ * other databases, pass NULL or an empty string.
+ *
+ * ====schema
+ *		The schema which contains the procedures. This parameter accepts a
+ * search pattern containing _ and % as wildcards.
+ *
+ * ====procedure
+ *		The name of the procedure. This parameter accepts a search pattern
+ * containing _ and % as wildcards.
+ *
+ * ====parameter
+ *		The name of the parameter. This parameter accepts a search pattern
+ * containing _ and % as wildcards.
+ *		If this parameter is NULL, all parameters for the specified stored
+ * procedures are returned.
+ *
+ * ===Return Values
+ *
+ * Returns a statement resource with a result set containing rows describing the
+ * parameters for the stored procedures matching the specified parameters. The
+ * rows are composed of the following columns:
+ *
+ * Column name::	Description
+ * PROCEDURE_CAT:: The catalog that contains the procedure. The value is NULL
+ * if this table does not have catalogs.
+ * PROCEDURE_SCHEM:: Name of the schema that contains the stored procedure.
+ * PROCEDURE_NAME:: Name of the procedure.
+ * COLUMN_NAME:: Name of the parameter.
+ * COLUMN_TYPE:: An integer value representing the type of the parameter:
+ *					  Return value:: Parameter type
+ *					  1:: (SQL_PARAM_INPUT)	Input (IN) parameter.
+ *					  2:: (SQL_PARAM_INPUT_OUTPUT) Input/output (INOUT)
+ *						  parameter.
+ *					  3:: (SQL_PARAM_OUTPUT) Output (OUT) parameter.
+ * DATA_TYPE:: The SQL data type for the parameter represented as an integer
+ * value.
+ * TYPE_NAME:: A string representing the data type for the parameter.
+ * COLUMN_SIZE:: An integer value representing the size of the parameter.
+ * BUFFER_LENGTH:: Maximum number of bytes necessary to store data for this
+ * parameter.
+ * DECIMAL_DIGITS:: The scale of the parameter, or NULL where scale is not
+ * applicable.
+ * NUM_PREC_RADIX:: An integer value of either 10 (representing an exact numeric
+ * data type), 2 (representing anapproximate numeric data type), or NULL
+ * (representing a data type for which radix is not applicable).
+ * NULLABLE:: An integer value representing whether the parameter is nullable or
+ * not.
+ * REMARKS:: Description of the parameter.
+ * COLUMN_DEF:: Default value for the parameter.
+ * SQL_DATA_TYPE:: An integer value representing the size of the parameter.
+ * SQL_DATETIME_SUB:: Returns an integer value representing a datetime subtype
+ * code, or NULL for SQL data types to which this does not apply.
+ * CHAR_OCTET_LENGTH:: Maximum length in octets for a character data type
+ * parameter, which matches COLUMN_SIZE for single-byte character set data, or
+ * NULL for non-character data types.
+ * ORDINAL_POSITION:: The 1-indexed position of the parameter in the CALL
+ * statement.
+ * IS_NULLABLE:: A string value where 'YES' means that the parameter accepts or
+ * returns NULL values and 'NO' means that the parameter does not accept or
+ * return NULL values.
+ */
+static PyObject *ibm_db_procedure_columns(PyObject *self, PyObject *args)
+{
+	SQLWCHAR *qualifier = NULL;
+	SQLWCHAR *owner = NULL;
+	SQLWCHAR *proc_name = NULL;
+	SQLWCHAR *column_name = NULL;
+	PyObject *py_qualifier = NULL;
+	PyObject *py_owner = NULL;
+	PyObject *py_proc_name = NULL;
+	PyObject *py_column_name = NULL;
+	PyObject *py_conn_res = NULL;
+	int rc = 0;
+	conn_handle *conn_res;
+	stmt_handle *stmt_res;
+	int isNewBuffer;
+
+	if (!PyArg_ParseTuple(args, "O|OOOO", &py_conn_res, &py_qualifier, &py_owner,
+		&py_proc_name, &py_column_name))
+		return NULL;
+
+	if (py_qualifier != NULL && py_qualifier != Py_None) {
+		if (PyString_Check(py_qualifier) || PyUnicode_Check(py_qualifier)){
+			py_qualifier = PyUnicode_FromObject(py_qualifier);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "qualifier must be a string or unicode");
+			return NULL;
+		}
+	}
+
+	if (py_owner != NULL && py_owner != Py_None) {
+		if (PyString_Check(py_owner) || PyUnicode_Check(py_owner)){
+			py_owner = PyUnicode_FromObject(py_owner);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "owner must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			return NULL;
+		}
+	}
+
+	if (py_proc_name != NULL && py_proc_name != Py_None) {
+		if (PyString_Check(py_proc_name) || PyUnicode_Check(py_proc_name)){
+			py_proc_name = PyUnicode_FromObject(py_proc_name);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "table_name must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			return NULL;
+		}
+	}
+
+	if (py_column_name != NULL && py_column_name != Py_None) {
+		if (PyString_Check(py_column_name) || PyUnicode_Check(py_column_name)){
+			py_column_name = PyUnicode_FromObject(py_column_name);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "column_name must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_proc_name);
+			return NULL;
+		}
+	}
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_proc_name);
+			Py_XDECREF(py_column_name);
+			return NULL;
+		}
+
+		stmt_res = _ibm_db_new_stmt_struct(conn_res);
+
+		rc = SQLAllocHandle(SQL_HANDLE_STMT, conn_res->hdbc, &(stmt_res->hstmt));
+		if (rc == SQL_ERROR) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+				NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_proc_name);
+			Py_XDECREF(py_column_name);
+
+			Py_RETURN_FALSE;
+		}
+		if (py_qualifier && py_qualifier != Py_None )
+			qualifier = getUnicodeDataAsSQLWCHAR(py_qualifier, &isNewBuffer);
+		if (py_owner &&  py_owner != Py_None )
+			owner = getUnicodeDataAsSQLWCHAR(py_owner, &isNewBuffer);
+		if (py_proc_name && py_proc_name != Py_None )
+			proc_name = getUnicodeDataAsSQLWCHAR(py_proc_name, &isNewBuffer);
+		if (py_column_name && py_column_name != Py_None )
+			column_name = getUnicodeDataAsSQLWCHAR(py_column_name, &isNewBuffer);
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLProcedureColumnsW((SQLHSTMT)stmt_res->hstmt, qualifier, SQL_NTS, 
+			owner, SQL_NTS, proc_name, SQL_NTS, column_name, 
+			SQL_NTS);
+		Py_END_ALLOW_THREADS;
+
+		if (isNewBuffer) {
+		    if(qualifier) PyMem_Del(qualifier);
+		    if(owner) PyMem_Del(owner);
+		    if(proc_name) PyMem_Del(proc_name);
+			if(column_name) PyMem_Del(column_name);
+		}
+		
+		if (rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 
+				1, NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_proc_name);
+			Py_XDECREF(py_column_name);
+
+			Py_RETURN_FALSE;
+		}
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_proc_name);
+		Py_XDECREF(py_column_name);
+
+		return (PyObject *)stmt_res;			
+	} else {
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_proc_name);
+		Py_XDECREF(py_column_name);
+
+		Py_RETURN_FALSE;
+	}
+}
+
+/*!# ibm_db.procedures
+ *
+ * ===Description
+ * resource ibm_db.procedures ( resource connection, string qualifier,
+ * string schema, string procedure )
+ *
+ * Returns a result set listing the stored procedures registered in a database.
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *		A valid connection to an IBM DB2, Cloudscape, or Apache Derby database.
+ *
+ * ====qualifier
+ *		A qualifier for DB2 databases running on OS/390 or z/OS servers. For
+ * other databases, pass NULL or an empty string.
+ *
+ * ====schema
+ *		The schema which contains the procedures. This parameter accepts a
+ * search pattern containing _ and % as wildcards.
+ *
+ * ====procedure
+ *		The name of the procedure. This parameter accepts a search pattern
+ * containing _ and % as wildcards.
+ *
+ * ===Return Values
+ *
+ * Returns a statement resource with a result set containing rows describing the
+ * stored procedures matching the specified parameters. The rows are composed of
+ * the following columns:
+ *
+ * Column name:: Description
+ * PROCEDURE_CAT:: The catalog that contains the procedure. The value is NULL if
+ * this table does not have catalogs.
+ * PROCEDURE_SCHEM:: Name of the schema that contains the stored procedure.
+ * PROCEDURE_NAME:: Name of the procedure.
+ * NUM_INPUT_PARAMS:: Number of input (IN) parameters for the stored procedure.
+ * NUM_OUTPUT_PARAMS:: Number of output (OUT) parameters for the stored
+ * procedure.
+ * NUM_RESULT_SETS:: Number of result sets returned by the stored procedure.
+ * REMARKS:: Any comments about the stored procedure.
+ * PROCEDURE_TYPE:: Always returns 1, indicating that the stored procedure does
+ * not return a return value.
+ */
+static PyObject *ibm_db_procedures(PyObject *self, PyObject *args)			
+{
+	SQLWCHAR *qualifier = NULL;
+	SQLWCHAR *owner = NULL;
+	SQLWCHAR *proc_name = NULL;
+	int rc = 0;
+	conn_handle *conn_res;
+	stmt_handle *stmt_res;
+	PyObject *py_conn_res = NULL;
+	PyObject *py_qualifier = NULL;
+	PyObject *py_owner = NULL;
+	PyObject *py_proc_name = NULL;
+	int isNewBuffer;
+
+	if (!PyArg_ParseTuple(args, "OOOO", &py_conn_res, &py_qualifier, &py_owner,
+		&py_proc_name))
+		return NULL;
+
+	if (py_qualifier != NULL && py_qualifier != Py_None) {
+		if (PyString_Check(py_qualifier) || PyUnicode_Check(py_qualifier)){
+			py_qualifier = PyUnicode_FromObject(py_qualifier);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "qualifier must be a string or unicode");
+			return NULL;
+		}
+	}
+
+	if (py_owner != NULL && py_owner != Py_None) {
+		if (PyString_Check(py_owner) || PyUnicode_Check(py_owner)){
+			py_owner = PyUnicode_FromObject(py_owner);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "owner must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			return NULL;
+		}
+	}
+
+	if (py_proc_name != NULL && py_proc_name != Py_None) {
+		if (PyString_Check(py_proc_name) || PyUnicode_Check(py_proc_name)){
+			py_proc_name = PyUnicode_FromObject(py_proc_name);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "table_name must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			return NULL;
+		}
+	}
+
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_proc_name);
+			return NULL;
+		}
+
+		stmt_res = _ibm_db_new_stmt_struct(conn_res);
+
+		rc = SQLAllocHandle(SQL_HANDLE_STMT, conn_res->hdbc, &(stmt_res->hstmt));
+		if (rc == SQL_ERROR) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+				NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_proc_name);
+
+			Py_RETURN_FALSE;
+		}
+		if (py_qualifier && py_qualifier != Py_None )
+			qualifier = getUnicodeDataAsSQLWCHAR(py_qualifier, &isNewBuffer);
+		if (py_owner &&  py_owner != Py_None )
+			owner = getUnicodeDataAsSQLWCHAR(py_owner, &isNewBuffer);
+		if (py_proc_name && py_proc_name != Py_None )
+			proc_name = getUnicodeDataAsSQLWCHAR(py_proc_name, &isNewBuffer);
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLProceduresW((SQLHSTMT)stmt_res->hstmt, qualifier, SQL_NTS, owner,
+			SQL_NTS, proc_name, SQL_NTS);
+		Py_END_ALLOW_THREADS;
+
+		if (isNewBuffer) {
+		    if(qualifier) PyMem_Del(qualifier);
+		    if(owner) PyMem_Del(owner);
+		    if(proc_name) PyMem_Del(proc_name);
+		}
+		
+		if (rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 
+				1, NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_proc_name);
+
+			Py_RETURN_FALSE;
+		}
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_proc_name);
+		return (PyObject *)stmt_res;				
+	} else {
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_proc_name);
+
+		Py_RETURN_FALSE;
+	}
+}
+
+/*!# ibm_db.special_columns
+ *
+ * ===Description
+ * resource ibm_db.special_columns ( resource connection, string qualifier,
+ * string schema, string table_name, int scope )
+ *
+ * Returns a result set listing the unique row identifier columns for a table.
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *		A valid connection to an IBM DB2, Cloudscape, or Apache Derby database.
+ *
+ * ====qualifier
+ *		A qualifier for DB2 databases running on OS/390 or z/OS servers. For
+ * other databases, pass NULL or an empty string.
+ *
+ * ====schema
+ *		The schema which contains the tables.
+ *
+ * ====table_name
+ *		The name of the table.
+ *
+ * ====scope
+ *		Integer value representing the minimum duration for which the unique
+ * row identifier is valid. This can be one of the following values:
+ *
+ *		0: Row identifier is valid only while the cursor is positioned on the
+ * row. (SQL_SCOPE_CURROW)
+ *		1: Row identifier is valid for the duration of the transaction.
+ * (SQL_SCOPE_TRANSACTION)
+ *		2: Row identifier is valid for the duration of the connection.
+ * (SQL_SCOPE_SESSION)
+ *
+ * ===Return Values
+ *
+ * Returns a statement resource with a result set containing rows with unique
+ * row identifier information for a table.
+ * The rows are composed of the following columns:
+ *
+ * Column name:: Description
+ *
+ * SCOPE:: Integer value representing the minimum duration for which the unique
+ * row identifier is valid.
+ *
+ *			 0: Row identifier is valid only while the cursor is positioned on
+ * the row. (SQL_SCOPE_CURROW)
+ *
+ *			 1: Row identifier is valid for the duration of the transaction.
+ * (SQL_SCOPE_TRANSACTION)
+ *
+ *			 2: Row identifier is valid for the duration of the connection.
+ * (SQL_SCOPE_SESSION)
+ *
+ * COLUMN_NAME:: Name of the unique column.
+ *
+ * DATA_TYPE:: SQL data type for the column.
+ *
+ * TYPE_NAME:: Character string representation of the SQL data type for the
+ * column.
+ *
+ * COLUMN_SIZE:: An integer value representing the size of the column.
+ *
+ * BUFFER_LENGTH:: Maximum number of bytes necessary to store data from this
+ * column.
+ *
+ * DECIMAL_DIGITS:: The scale of the column, or NULL where scale is not
+ * applicable.
+ *
+ * NUM_PREC_RADIX:: An integer value of either 10 (representing an exact numeric
+ * data type), 2 (representing an approximate numeric data type), or NULL
+ * (representing a data type for which radix is not applicable).
+ *
+ * PSEUDO_COLUMN:: Always returns 1.
+ */
+static PyObject *ibm_db_special_columns(PyObject *self, PyObject *args)
+{
+	SQLWCHAR *qualifier = NULL;
+	SQLWCHAR *owner = NULL;
+	SQLWCHAR *table_name = NULL;
+	int scope = 0;
+	conn_handle *conn_res;
+	stmt_handle *stmt_res;
+	int rc;
+	PyObject *py_conn_res = NULL;
+	PyObject *py_scope = NULL;
+	PyObject *py_qualifier = NULL;
+	PyObject *py_owner = NULL;
+	PyObject *py_table_name = NULL;
+	int isNewBuffer;
+
+	if (!PyArg_ParseTuple(args, "OOOOO", &py_conn_res, &py_qualifier, &py_owner,
+		&py_table_name, &py_scope))
+		return NULL;
+
+	if (!NIL_P(py_scope)) {
+		if (PyInt_Check(py_scope)) {
+			scope = (int) PyInt_AsLong(py_scope);
+		} else {
+			PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+			return NULL;
+		}
+	}
+	if (py_qualifier != NULL && py_qualifier != Py_None) {
+		if (PyString_Check(py_qualifier) || PyUnicode_Check(py_qualifier)){
+			py_qualifier = PyUnicode_FromObject(py_qualifier);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "qualifier must be a string or unicode");
+			return NULL;
+		}
+	}
+
+	if (py_owner != NULL && py_owner != Py_None) {
+		if (PyString_Check(py_owner) || PyUnicode_Check(py_owner)){
+			py_owner = PyUnicode_FromObject(py_owner);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "owner must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			return NULL;
+		}
+	}
+
+	if (py_table_name != NULL && py_table_name != Py_None) {
+		if (PyString_Check(py_table_name) || PyUnicode_Check(py_table_name)){
+			py_table_name = PyUnicode_FromObject(py_table_name);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "table_name must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			return NULL;
+		}
+	}
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			return NULL;
+		}
+
+		stmt_res = _ibm_db_new_stmt_struct(conn_res);
+
+		rc = SQLAllocHandle(SQL_HANDLE_STMT, conn_res->hdbc, &(stmt_res->hstmt));
+		if (rc == SQL_ERROR) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+				NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+
+			Py_RETURN_FALSE;
+		}
+		if (py_qualifier && py_qualifier != Py_None )
+			qualifier = getUnicodeDataAsSQLWCHAR(py_qualifier, &isNewBuffer);
+		if (py_owner &&  py_owner != Py_None )
+			owner = getUnicodeDataAsSQLWCHAR(py_owner, &isNewBuffer);
+		if (py_table_name && py_table_name != Py_None )
+			table_name = getUnicodeDataAsSQLWCHAR(py_table_name, &isNewBuffer);
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLSpecialColumnsW((SQLHSTMT)stmt_res->hstmt, SQL_BEST_ROWID, 
+			qualifier, SQL_NTS, owner, SQL_NTS, table_name,
+			SQL_NTS, scope, SQL_NULLABLE);
+		Py_END_ALLOW_THREADS;
+
+		if (isNewBuffer) {
+		    if(qualifier) PyMem_Del(qualifier);
+		    if(owner) PyMem_Del(owner);
+		    if(table_name) PyMem_Del(table_name);	
+		}
+		
+		if (rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 
+				1, NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+
+			Py_RETURN_FALSE;
+		}
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_table_name);
+
+		return (PyObject *)stmt_res;
+	} else {
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_table_name);
+
+		Py_RETURN_FALSE;
+
+	}
+}
+
+/*!# ibm_db.statistics
+ *
+ * ===Description
+ * resource ibm_db.statistics ( resource connection, string qualifier,
+ * string schema, string table-name, bool unique )
+ *
+ * Returns a result set listing the index and statistics for a table.
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *		A valid connection to an IBM DB2, Cloudscape, or Apache Derby database.
+ *
+ * ====qualifier
+ *		A qualifier for DB2 databases running on OS/390 or z/OS servers. For
+ * other databases, pass NULL or an empty string.
+ *
+ * ====schema
+ *		The schema that contains the targeted table. If this parameter is NULL,
+ * the statistics and indexes are returned for the schema of the current user.
+ *
+ * ====table_name
+ *		The name of the table.
+ *
+ * ====unique
+ *		A boolean value representing the type of index information to return.
+ *
+ *		False	 Return only the information for unique indexes on the table.
+ *
+ *		True	  Return the information for all indexes on the table.
+ *
+ * ===Return Values
+ *
+ * Returns a statement resource with a result set containing rows describing the
+ * statistics and indexes for the base tables matching the specified parameters.
+ * The rows are composed of the following columns:
+ *
+ * Column name:: Description
+ * TABLE_CAT:: The catalog that contains the table. The value is NULL if this
+ * table does not have catalogs.
+ * TABLE_SCHEM:: Name of the schema that contains the table.
+ * TABLE_NAME:: Name of the table.
+ * NON_UNIQUE:: An integer value representing whether the index prohibits unique
+ * values, or whether the row represents statistics on the table itself:
+ *
+ *					 Return value:: Parameter type
+ *					 0 (SQL_FALSE):: The index allows duplicate values.
+ *					 1 (SQL_TRUE):: The index values must be unique.
+ *					 NULL:: This row is statistics information for the table
+ *					 itself.
+ *
+ * INDEX_QUALIFIER:: A string value representing the qualifier that would have
+ * to be prepended to INDEX_NAME to fully qualify the index.
+ * INDEX_NAME:: A string representing the name of the index.
+ * TYPE:: An integer value representing the type of information contained in
+ * this row of the result set:
+ *
+ *			Return value:: Parameter type
+ *			0 (SQL_TABLE_STAT):: The row contains statistics about the table
+ *								 itself.
+ *			1 (SQL_INDEX_CLUSTERED):: The row contains information about a
+ *									  clustered index.
+ *			2 (SQL_INDEX_HASH):: The row contains information about a hashed
+ *								 index.
+ *			3 (SQL_INDEX_OTHER):: The row contains information about a type of
+ * index that is neither clustered nor hashed.
+ *
+ * ORDINAL_POSITION:: The 1-indexed position of the column in the index. NULL if
+ * the row contains statistics information about the table itself.
+ * COLUMN_NAME:: The name of the column in the index. NULL if the row contains
+ * statistics information about the table itself.
+ * ASC_OR_DESC:: A if the column is sorted in ascending order, D if the column
+ * is sorted in descending order, NULL if the row contains statistics
+ * information about the table itself.
+ * CARDINALITY:: If the row contains information about an index, this column
+ * contains an integer value representing the number of unique values in the
+ * index. If the row contains information about the table itself, this column
+ * contains an integer value representing the number of rows in the table.
+ * PAGES:: If the row contains information about an index, this column contains
+ * an integer value representing the number of pages used to store the index. If
+ * the row contains information about the table itself, this column contains an
+ * integer value representing the number of pages used to store the table.
+ * FILTER_CONDITION:: Always returns NULL.
+ */ 
+static PyObject *ibm_db_statistics(PyObject *self, PyObject *args)
+{
+	SQLWCHAR *qualifier = NULL;
+	SQLWCHAR *owner = NULL;
+	SQLWCHAR *table_name = NULL;
+	int unique = 0;
+	int rc = 0;
+	SQLUSMALLINT sql_unique;
+	conn_handle *conn_res;
+	stmt_handle *stmt_res;
+	PyObject *py_conn_res = NULL;
+	PyObject *py_qualifier = NULL;
+	PyObject *py_owner = NULL;
+	PyObject *py_table_name = NULL;
+	PyObject *py_unique = NULL;
+	int isNewBuffer;
+
+	if (!PyArg_ParseTuple(args, "OOOOO", &py_conn_res, &py_qualifier, &py_owner,
+		&py_table_name, &py_unique))
+		return NULL;
+
+	if (py_qualifier != NULL && py_qualifier != Py_None) {
+		if (PyString_Check(py_qualifier) || PyUnicode_Check(py_qualifier)){
+			py_qualifier = PyUnicode_FromObject(py_qualifier);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "qualifier must be a string or unicode");
+			return NULL;
+		}
+	}
+
+	if (py_owner != NULL && py_owner != Py_None) {
+		if (PyString_Check(py_owner) || PyUnicode_Check(py_owner)){
+			py_owner = PyUnicode_FromObject(py_owner);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "owner must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			return NULL;
+		}
+	}
+
+	if (py_table_name != NULL && py_table_name != Py_None) {
+		if (PyString_Check(py_table_name) || PyUnicode_Check(py_table_name)){
+			py_table_name = PyUnicode_FromObject(py_table_name);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "table_name must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			return NULL;
+		}
+	}
+
+	if (py_unique != NULL && py_unique != Py_None) {
+		if (PyBool_Check(py_unique)) {
+			if (py_unique == Py_True)
+				unique = 1;
+			else
+				unique = 0;
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "unique must be a boolean");
+			return NULL;
+		}
+	}
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			return NULL;
+		}
+
+		stmt_res = _ibm_db_new_stmt_struct(conn_res);
+		sql_unique = unique;
+
+		rc = SQLAllocHandle(SQL_HANDLE_STMT, conn_res->hdbc, &(stmt_res->hstmt));
+		if (rc == SQL_ERROR) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+				NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+
+			Py_RETURN_FALSE;
+		}
+		if (py_qualifier && py_qualifier != Py_None )
+			qualifier = getUnicodeDataAsSQLWCHAR(py_qualifier, &isNewBuffer);
+		if (py_owner &&  py_owner != Py_None )
+			owner = getUnicodeDataAsSQLWCHAR(py_owner, &isNewBuffer);
+		if (py_table_name && py_table_name != Py_None )
+			table_name = getUnicodeDataAsSQLWCHAR(py_table_name, &isNewBuffer);
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLStatisticsW((SQLHSTMT)stmt_res->hstmt, qualifier, SQL_NTS, owner,
+			SQL_NTS, table_name, SQL_NTS, sql_unique, SQL_QUICK);
+		Py_END_ALLOW_THREADS;
+
+		if (isNewBuffer) {
+		    if(qualifier) PyMem_Del(qualifier);
+		    if(owner) PyMem_Del(owner);
+		    if(table_name) PyMem_Del(table_name);
+		}
+		
+		if (rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 
+				1, NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+
+			Py_RETURN_FALSE;
+		}
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_table_name);
+
+		return (PyObject *)stmt_res;
+	} else {
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_table_name);
+
+		Py_RETURN_FALSE;
+	}
+}
+
+/*!# ibm_db.table_privileges
+ *
+ * ===Description
+ * resource ibm_db.table_privileges ( resource connection [, string qualifier
+ * [, string schema [, string table_name]]] )
+ *
+ * Returns a result set listing the tables and associated privileges in a
+ * database.
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *		A valid connection to an IBM DB2, Cloudscape, or Apache Derby database.
+ *
+ * ====qualifier
+ *		A qualifier for DB2 databases running on OS/390 or z/OS servers. For
+ * other databases, pass NULL or an empty string.
+ *
+ * ====schema
+ *		The schema which contains the tables. This parameter accepts a search
+ * pattern containing _ and % as wildcards.
+ *
+ * ====table_name
+ *		The name of the table. This parameter accepts a search pattern
+ * containing _ and % as wildcards.
+ *
+ * ===Return Values
+ *
+ * Returns a statement resource with a result set containing rows describing
+ * the privileges for the tables that match the specified parameters. The rows
+ * are composed of the following columns:
+ *
+ * Column name:: Description
+ * TABLE_CAT:: The catalog that contains the table. The value is NULL if this
+ * table does not have catalogs.
+ * TABLE_SCHEM:: Name of the schema that contains the table.
+ * TABLE_NAME:: Name of the table.
+ * GRANTOR:: Authorization ID of the user who granted the privilege.
+ * GRANTEE:: Authorization ID of the user to whom the privilege was granted.
+ * PRIVILEGE:: The privilege that has been granted. This can be one of ALTER,
+ * CONTROL, DELETE, INDEX, INSERT, REFERENCES, SELECT, or UPDATE.
+ * IS_GRANTABLE:: A string value of "YES" or "NO" indicating whether the grantee
+ * can grant the privilege to other users.
+ */
+static PyObject *ibm_db_table_privileges(PyObject *self, PyObject *args)
+{
+	SQLWCHAR *qualifier = NULL;
+	SQLWCHAR *owner = NULL;
+	SQLWCHAR *table_name = NULL;
+	conn_handle *conn_res;
+	stmt_handle *stmt_res;
+	int rc;
+	PyObject *py_conn_res = NULL;
+	PyObject *py_qualifier = NULL;
+	PyObject *py_owner = NULL;
+	PyObject *py_table_name = NULL;
+	int isNewBuffer;
+
+	if (!PyArg_ParseTuple(args, "O|OOO", &py_conn_res, &py_qualifier, &py_owner,
+		&py_table_name))
+		return NULL;
+
+	if (py_qualifier != NULL && py_qualifier != Py_None) {
+		if (PyString_Check(py_qualifier) || PyUnicode_Check(py_qualifier)){
+			py_qualifier = PyUnicode_FromObject(py_qualifier);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "qualifier must be a string or unicode");
+			return NULL;
+		}
+	}
+
+	if (py_owner != NULL && py_owner != Py_None) {
+		if (PyString_Check(py_owner) || PyUnicode_Check(py_owner)){
+			py_owner = PyUnicode_FromObject(py_owner);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "owner must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			return NULL;
+		}
+	}
+
+	if (py_table_name != NULL && py_table_name != Py_None) {
+		if (PyString_Check(py_table_name) || PyUnicode_Check(py_table_name)){
+			py_table_name = PyUnicode_FromObject(py_table_name);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "table_name must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			return NULL;
+		}
+	}
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			return NULL;
+		}
+
+		if (!conn_res) {
+			PyErr_SetString(PyExc_Exception,"Connection Resource cannot be found");
+			Py_RETURN_FALSE;
+		}
+
+		stmt_res = _ibm_db_new_stmt_struct(conn_res);
+
+		rc = SQLAllocHandle(SQL_HANDLE_STMT, conn_res->hdbc, &(stmt_res->hstmt));
+		if (rc == SQL_ERROR) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+				NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+
+			Py_RETURN_FALSE;
+		}
+		if (py_qualifier && py_qualifier != Py_None )
+			qualifier = getUnicodeDataAsSQLWCHAR(py_qualifier, &isNewBuffer);
+		if (py_owner &&  py_owner != Py_None )
+			owner = getUnicodeDataAsSQLWCHAR(py_owner, &isNewBuffer);
+		if (py_table_name && py_table_name != Py_None )
+			table_name = getUnicodeDataAsSQLWCHAR(py_table_name, &isNewBuffer);
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLTablePrivilegesW((SQLHSTMT)stmt_res->hstmt, qualifier, SQL_NTS, 
+			owner, SQL_NTS, table_name, SQL_NTS);
+		Py_END_ALLOW_THREADS;
+
+		if (isNewBuffer) {
+		    if(qualifier) PyMem_Del(qualifier);
+		    if(owner) PyMem_Del(owner);
+		    if(table_name) PyMem_Del(table_name);
+		}
+		
+		if (rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 
+				1, NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+
+			Py_RETURN_FALSE;
+		}
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_table_name);
+
+		return (PyObject *)stmt_res;
+	} else {
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_table_name);
+
+		Py_RETURN_FALSE;
+	}
+}
+
+/*!# ibm_db.tables
+ *
+ * ===Description
+ * resource ibm_db.tables ( resource connection [, string qualifier [, string
+ * schema [, string table-name [, string table-type]]]] )
+ *
+ * Returns a result set listing the tables and associated metadata in a database
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *		A valid connection to an IBM DB2, Cloudscape, or Apache Derby database.
+ *
+ * ====qualifier
+ *		A qualifier for DB2 databases running on OS/390 or z/OS servers. For
+ * other databases, pass NULL or an empty string.
+ *
+ * ====schema
+ *		The schema which contains the tables. This parameter accepts a search
+ * pattern containing _ and % as wildcards.
+ *
+ * ====table-name
+ *		The name of the table. This parameter accepts a search pattern
+ * containing _ and % as wildcards.
+ *
+ * ====table-type
+ *		A list of comma-delimited table type identifiers. To match all table
+ * types, pass NULL or an empty string.
+ *		Valid table type identifiers include: ALIAS, HIERARCHY TABLE,
+ * INOPERATIVE VIEW, NICKNAME, MATERIALIZED QUERY TABLE, SYSTEM TABLE, TABLE,
+ * TYPED TABLE, TYPED VIEW, and VIEW.
+ *
+ * ===Return Values
+ *
+ * Returns a statement resource with a result set containing rows describing
+ * the tables that match the specified parameters.
+ * The rows are composed of the following columns:
+ *
+ * Column name:: Description
+ * TABLE_CAT:: The catalog that contains the table. The value is NULL if this
+ * table does not have catalogs.
+ * TABLE_SCHEMA:: Name of the schema that contains the table.
+ * TABLE_NAME:: Name of the table.
+ * TABLE_TYPE:: Table type identifier for the table.
+ * REMARKS:: Description of the table.
+ */
+static PyObject *ibm_db_tables(PyObject *self, PyObject *args)
+{
+	SQLWCHAR *qualifier = NULL;
+	SQLWCHAR *owner = NULL;
+	SQLWCHAR *table_name = NULL;
+	SQLWCHAR *table_type = NULL;
+	PyObject *py_qualifier = NULL;
+	PyObject *py_owner = NULL;
+	PyObject *py_table_name = NULL;
+	PyObject *py_table_type = NULL;
+	PyObject *py_conn_res;
+	conn_handle *conn_res;
+	stmt_handle *stmt_res;
+	int rc;
+	int isNewBuffer;
+
+	if (!PyArg_ParseTuple(args, "O|OOOO", &py_conn_res, &py_qualifier, &py_owner,
+		&py_table_name, &py_table_type))
+		return NULL;
+
+	if (py_qualifier != NULL && py_qualifier != Py_None) {
+		if (PyString_Check(py_qualifier) || PyUnicode_Check(py_qualifier)){
+			py_qualifier = PyUnicode_FromObject(py_qualifier);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "qualifier must be a string or unicode");
+			return NULL;
+		}
+	}
+
+	if (py_owner != NULL && py_owner != Py_None) {
+		if (PyString_Check(py_owner) || PyUnicode_Check(py_owner)){
+			py_owner = PyUnicode_FromObject(py_owner);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "owner must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			return NULL;
+		}
+	}
+
+	if (py_table_name != NULL && py_table_name != Py_None) {
+		if (PyString_Check(py_table_name) || PyUnicode_Check(py_table_name)){
+			py_table_name = PyUnicode_FromObject(py_table_name);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "table_name must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			return NULL;
+		}
+	}
+
+	if (py_table_type != NULL && py_table_type != Py_None) {
+		if (PyString_Check(py_table_type) || PyUnicode_Check(py_table_type)){
+			py_table_type = PyUnicode_FromObject(py_table_type);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "table type must be a string or unicode");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			return NULL;
+		}
+	}
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			Py_XDECREF(py_table_type);
+			return NULL;
+		}
+
+		stmt_res = _ibm_db_new_stmt_struct(conn_res);
+
+		rc = SQLAllocHandle(SQL_HANDLE_STMT, conn_res->hdbc, &(stmt_res->hstmt));
+		if (rc == SQL_ERROR) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+				NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			Py_XDECREF(py_table_type);
+			
+			Py_RETURN_FALSE;
+		}
+		if (py_qualifier && py_qualifier != Py_None )
+			qualifier = getUnicodeDataAsSQLWCHAR(py_qualifier, &isNewBuffer);
+		if (py_owner &&  py_owner != Py_None )
+			owner = getUnicodeDataAsSQLWCHAR(py_owner, &isNewBuffer);
+		if (py_table_name && py_table_name != Py_None )
+			table_name = getUnicodeDataAsSQLWCHAR(py_table_name, &isNewBuffer);
+		if(py_table_type && py_table_type != Py_None)
+			table_type = getUnicodeDataAsSQLWCHAR(py_table_type, &isNewBuffer);
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLTablesW((SQLHSTMT)stmt_res->hstmt, qualifier, SQL_NTS, owner,
+			SQL_NTS, table_name, SQL_NTS, table_type, SQL_NTS);
+		Py_END_ALLOW_THREADS;
+
+		if (isNewBuffer) {
+		    if(qualifier) PyMem_Del(qualifier);
+		    if(owner) PyMem_Del(owner);
+		    if(table_name) PyMem_Del(table_name);
+			if(table_type) PyMem_Del(table_type);
+		}
+		
+		if (rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 
+				1, NULL, -1, 1);
+			Py_XDECREF(py_qualifier);
+			Py_XDECREF(py_owner);
+			Py_XDECREF(py_table_name);
+			Py_XDECREF(py_table_type);
+
+			Py_RETURN_FALSE;
+		}
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_table_name);
+		Py_XDECREF(py_table_type);
+		
+		return (PyObject *)stmt_res;
+	} else {
+		Py_XDECREF(py_qualifier);
+		Py_XDECREF(py_owner);
+		Py_XDECREF(py_table_name);
+		Py_XDECREF(py_table_type);
+
+		Py_RETURN_FALSE;
+	}
+}
+
+/*!# ibm_db.commit
+ * ===Description
+ * bool ibm_db.commit ( resource connection )
+ *
+ * Commits an in-progress transaction on the specified connection resource and
+ * begins a new transaction.
+ * Python applications normally default to AUTOCOMMIT mode, so ibm_db.commit()
+ * is not necessary unless AUTOCOMMIT has been turned off for the connection
+ * resource.
+ *
+ * Note: If the specified connection resource is a persistent connection, all
+ * transactions in progress for all applications using that persistent
+ * connection will be committed. For this reason, persistent connections are
+ * not recommended for use in applications that require transactions.
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *		A valid database connection resource variable as returned from
+ * ibm_db.connect() or ibm_db.pconnect().
+ *
+ * ===Return Values
+ *
+ * Returns TRUE on success or FALSE on failure.
+ */
+static PyObject *ibm_db_commit(PyObject *self, PyObject *args)			
+{
+	PyObject *py_conn_res = NULL;
+	conn_handle *conn_res;
+	int rc;
+
+	if (!PyArg_ParseTuple(args, "O", &py_conn_res))
+		return NULL;
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			return NULL;
+		}
+
+		rc = SQLEndTran(SQL_HANDLE_DBC, conn_res->hdbc, SQL_COMMIT);
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											 NULL, -1, 1);
+			Py_INCREF(Py_False);
+			return Py_False;
+		} else {
+			Py_INCREF(Py_True);
+			return Py_True;
+		}
+	}
+	Py_INCREF(Py_False);
+	return Py_False;
+}
+
+/* static int _python_ibm_db_do_prepare(SQLHANDLE hdbc, char *stmt_string, stmt_handle *stmt_res, PyObject *options)
+*/
+static int _python_ibm_db_do_prepare(SQLHANDLE hdbc, SQLWCHAR *stmt, int stmt_size, stmt_handle *stmt_res, PyObject *options)
+{
+	int rc;
+
+	/* alloc handle and return only if it errors */
+	rc = SQLAllocHandle(SQL_HANDLE_STMT, hdbc, &(stmt_res->hstmt));
+	if ( rc == SQL_ERROR ) {
+		_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 
+										1, NULL, -1, 1);
+		return rc;
+	}
+
+	/* get the string and its length */
+	if (NIL_P(stmt)) {
+		PyErr_SetString(PyExc_Exception, 
+			"Supplied statement parameter is invalid");
+		return rc;
+	}
+
+	if ( rc < SQL_SUCCESS ) {
+		_python_ibm_db_check_sql_errors(hdbc, SQL_HANDLE_DBC, rc, 1, NULL, -1, 1);
+		PyErr_SetString(PyExc_Exception, "Statement prepare Failed: ");
+		return rc;
+	}
+
+	if (!NIL_P(options)) {
+		rc = _python_ibm_db_parse_options( options, SQL_HANDLE_STMT, stmt_res );
+		if ( rc == SQL_ERROR ) {
+			return rc;
+		}
+	}
+
+	/* Prepare the stmt. The cursor type requested has already been set in 
+	* _python_ibm_db_assign_options 
+	*/
+
+	Py_BEGIN_ALLOW_THREADS;
+	rc = SQLPrepareW((SQLHSTMT)stmt_res->hstmt, stmt, 
+				stmt_size);
+	Py_END_ALLOW_THREADS;
+
+	if ( rc == SQL_ERROR ) {
+		_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 
+										1, NULL, -1, 1);
+	}
+	return rc;
+}
+
+/*!# ibm_db.exec
+ *
+ * ===Description
+ * stmt_handle ibm_db.exec ( IBM_DBConnection connection, string statement
+ *								[, array options] )
+ *
+ * Prepares and executes an SQL statement.
+ *
+ * If you plan to interpolate Python variables into the SQL statement,
+ * understand that this is one of the more common security exposures. Consider
+ * calling ibm_db.prepare() to prepare an SQL statement with parameter markers	* for input values. Then you can call ibm_db.execute() to pass in the input
+ * values and avoid SQL injection attacks.
+ *
+ * If you plan to repeatedly issue the same SQL statement with different
+ * parameters, consider calling ibm_db.:prepare() and ibm_db.execute() to
+ * enable the database server to reuse its access plan and increase the
+ * efficiency of your database access.
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *
+ *		A valid database connection resource variable as returned from
+ * ibm_db.connect() or ibm_db.pconnect().
+ *
+ * ====statement
+ *
+ *		An SQL statement. The statement cannot contain any parameter markers.
+ *
+ * ====options
+ *
+ *		An dictionary containing statement options. You can use this parameter  * to request a scrollable cursor on database servers that support this
+ * functionality.
+ *
+ *		SQL_ATTR_CURSOR_TYPE
+ *			 Passing the SQL_SCROLL_FORWARD_ONLY value requests a forward-only
+ *			 cursor for this SQL statement. This is the default type of
+ *			 cursor, and it is supported by all database servers. It is also
+ *			 much faster than a scrollable cursor.
+ *
+ *			 Passing the SQL_CURSOR_KEYSET_DRIVEN value requests a scrollable  *			 cursor for this SQL statement. This type of cursor enables you to
+ *			 fetch rows non-sequentially from the database server. However, it
+ *			 is only supported by DB2 servers, and is much slower than
+ *			 forward-only cursors.
+ *
+ * ===Return Values
+ *
+ * Returns a stmt_handle resource if the SQL statement was issued
+ * successfully, or FALSE if the database failed to execute the SQL statement.
+ */
+static PyObject *ibm_db_exec(PyObject *self, PyObject *args)			
+{
+	PyObject *options = NULL;
+	PyObject *py_conn_res = NULL;
+	stmt_handle *stmt_res;
+	conn_handle *conn_res;
+	int rc;
+	int isNewBuffer;
+	char* return_str = NULL; /* This variable is used by 
+							 * _python_ibm_db_check_sql_errors to return err 
+							 * strings 
+							 */
+	SQLWCHAR *stmt = NULL;
+	PyObject *py_stmt = NULL;
+
+	/* This function basically is a wrap of the _python_ibm_db_do_prepare and 
+	* _python_ibm_db_Execute_stmt 
+	* After completing statement execution, it returns the statement resource 
+	*/
+
+	if (!PyArg_ParseTuple(args, "OO|O", &py_conn_res, &py_stmt,  &options))
+		return NULL;
+
+	if (py_stmt != NULL && py_stmt != Py_None) {
+		if (PyString_Check(py_stmt) || PyUnicode_Check(py_stmt)){
+			py_stmt = PyUnicode_FromObject(py_stmt);
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "statement must be a string or unicode");
+			return NULL;
+		}
+	}
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			Py_XDECREF(py_stmt);
+			return NULL;
+		}
+
+		return_str = ALLOC_N(char, DB2_MAX_ERR_MSG_LEN);
+		if ( return_str == NULL ) {
+			PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+			Py_XDECREF(py_stmt);
+			return NULL;
+		}
+
+		memset(return_str, 0, DB2_MAX_ERR_MSG_LEN);
+
+		_python_ibm_db_clear_stmt_err_cache();
+
+		stmt_res = _ibm_db_new_stmt_struct(conn_res);
+
+		/* Allocates the stmt handle */
+		/* returns the stat_handle back to the calling function */
+		rc = SQLAllocHandle(SQL_HANDLE_STMT, conn_res->hdbc, &(stmt_res->hstmt));
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1,
+				NULL, -1, 1);
+			PyMem_Del(return_str);
+			Py_XDECREF(py_stmt);
+			return NULL;
+		}
+
+		if (!NIL_P(options)) {
+			rc = _python_ibm_db_parse_options(options, SQL_HANDLE_STMT, stmt_res);
+			if ( rc == SQL_ERROR ) {
+				Py_XDECREF(py_stmt);
+				return NULL;
+			}
+		}
+		if (py_stmt != NULL && py_stmt != Py_None){
+			stmt = getUnicodeDataAsSQLWCHAR(py_stmt, &isNewBuffer);
+     		}
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLExecDirectW((SQLHSTMT)stmt_res->hstmt, stmt, SQL_NTS);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc < SQL_SUCCESS ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, -1, 
+				1, return_str, DB2_ERRMSG, 
+				stmt_res->errormsg_recno_tracker);
+			SQLFreeHandle( SQL_HANDLE_STMT, stmt_res->hstmt );
+			/* TODO: Object freeing */
+			/* free(stmt_res); */
+			if (isNewBuffer) {
+				if(stmt) PyMem_Del(stmt);
+			}
+			Py_XDECREF(py_stmt);
+			PyMem_Del(return_str);
+			return NULL;
+		}
+		if (isNewBuffer) {
+			if(stmt) PyMem_Del(stmt);
+		}	
+		PyMem_Del(return_str);
+		Py_XDECREF(py_stmt);
+		return (PyObject *)stmt_res;				 
+	}
+	Py_XDECREF(py_stmt);
+	return NULL;
+}
+
+/*!# ibm_db.free_result
+ *
+ * ===Description
+ * bool ibm_db.free_result ( resource stmt )
+ *
+ * Frees the system and database resources that are associated with a result
+ * set. These resources are freed implicitly when a script finishes, but you
+ * can call ibm_db.free_result() to explicitly free the result set resources
+ * before the end of the script.
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *		A valid statement resource.
+ *
+ * ===Return Values
+ *
+ * Returns TRUE on success or FALSE on failure.
+ */
+static PyObject *ibm_db_free_result(PyObject *self, PyObject *args)
+{
+	PyObject *py_stmt_res = NULL;
+	stmt_handle *stmt_res;
+	int rc = 0;
+
+	if (!PyArg_ParseTuple(args, "O", &py_stmt_res))
+		return NULL;
+	
+	if (!NIL_P(py_stmt_res)) {
+		if (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+			return NULL;
+		} else {
+			stmt_res = (stmt_handle *)py_stmt_res;
+		}
+		if ( stmt_res->hstmt ) {
+			/* Free any cursors that might have been allocated in a previous call 
+			* to SQLExecute 
+			*/
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLFreeStmt((SQLHSTMT)stmt_res->hstmt, SQL_CLOSE);
+			Py_END_ALLOW_THREADS;
+
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, 
+												rc, 1, NULL, -1, 1);
+				PyErr_Clear( );	
+				Py_RETURN_FALSE;
+			}
+		}
+		_python_ibm_db_free_result_struct(stmt_res);
+	} else {
+		PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+		return NULL;
+	}
+	Py_INCREF(Py_True);
+	return Py_True;
+}
+
+/*
+ * static PyObject *_python_ibm_db_prepare_helper(conn_handle *conn_res, PyObject *py_stmt, PyObject *options)
+ *
+ */
+static PyObject *_python_ibm_db_prepare_helper(conn_handle *conn_res, PyObject *py_stmt, PyObject *options)
+{
+	stmt_handle *stmt_res;
+	int rc;
+	char error[DB2_MAX_ERR_MSG_LEN];
+	SQLWCHAR *stmt = NULL;
+	int stmt_size = 0;
+	int isNewBuffer;
+
+	if (!conn_res->handle_active) {
+		PyErr_SetString(PyExc_Exception, "Connection is not active");
+		return NULL;
+	}
+
+	if (py_stmt != NULL && py_stmt != Py_None) {
+		if (PyString_Check(py_stmt) || PyUnicode_Check(py_stmt)) {
+			py_stmt = PyUnicode_FromObject(py_stmt);
+			if (py_stmt != NULL &&  py_stmt != Py_None) {
+				stmt_size = PyUnicode_GetSize(py_stmt);
+			} else {
+				PyErr_SetString(PyExc_Exception, "Error occure during processing of statement");
+				return NULL;	
+			}
+		}
+		else {
+			PyErr_SetString(PyExc_Exception, "statement must be a string or unicode");
+			return NULL;
+		}
+	}
+
+	_python_ibm_db_clear_stmt_err_cache();
+
+	/* Initialize stmt resource members with default values. */
+	/* Parsing will update options if needed */
+
+	stmt_res = _ibm_db_new_stmt_struct(conn_res);
+
+	/* Allocates the stmt handle */
+	/* Prepares the statement */
+	/* returns the stat_handle back to the calling function */
+	if( py_stmt && py_stmt != Py_None)
+		stmt = getUnicodeDataAsSQLWCHAR(py_stmt, &isNewBuffer);
+		
+	rc = _python_ibm_db_do_prepare(conn_res->hdbc, stmt, stmt_size, stmt_res, options);
+	if (isNewBuffer) {
+		if(stmt) PyMem_Del(stmt);
+	}
+	
+	if ( rc < SQL_SUCCESS ) {
+		sprintf(error, "Statement Prepare Failed: %s", IBM_DB_G(__python_stmt_err_msg));
+		Py_XDECREF(py_stmt);
+		return NULL;
+	}
+	Py_XDECREF(py_stmt);
+	return (PyObject *)stmt_res;		
+}
+
+/*!# ibm_db.prepare
+ *
+ * ===Description
+ * IBMDB_Statement ibm_db.prepare ( IBM_DBConnection connection,
+ *								  string statement [, array options] )
+ *
+ * ibm_db.prepare() creates a prepared SQL statement which can include 0 or
+ * more parameter markers (? characters) representing parameters for input,
+ * output, or input/output. You can pass parameters to the prepared statement
+ * using ibm_db.bind_param(), or for input values only, as an array passed to
+ * ibm_db.execute().
+ *
+ * There are three main advantages to using prepared statements in your
+ * application:
+ *		* Performance: when you prepare a statement, the database server
+ *		 creates an optimized access plan for retrieving data with that
+ *		 statement. Subsequently issuing the prepared statement with
+ *		 ibm_db.execute() enables the statements to reuse that access plan
+ *		 and avoids the overhead of dynamically creating a new access plan
+ *		 for every statement you issue.
+ *		* Security: when you prepare a statement, you can include parameter
+ *		 markers for input values. When you execute a prepared statement
+ *		 with input values for placeholders, the database server checks each
+ *		 input value to ensure that the type matches the column definition or
+ *		 parameter definition.
+ *		* Advanced functionality: Parameter markers not only enable you to
+ *		 pass input values to prepared SQL statements, they also enable you
+ *		 to retrieve OUT and INOUT parameters from stored procedures using
+ *		 ibm_db.bind_param().
+ *
+ * ===Parameters
+ * ====connection
+ *
+ *		A valid database connection resource variable as returned from
+ *		ibm_db.connect() or ibm_db.pconnect().
+ *
+ * ====statement
+ *
+ *		An SQL statement, optionally containing one or more parameter markers.
+ *
+ * ====options
+ *
+ *		An dictionary containing statement options. You can use this parameter
+ *		to request a scrollable cursor on database servers that support this
+ *		functionality.
+ *
+ *		SQL_ATTR_CURSOR_TYPE
+ *			 Passing the SQL_SCROLL_FORWARD_ONLY value requests a forward-only
+ *			 cursor for this SQL statement. This is the default type of
+ *			 cursor, and it is supported by all database servers. It is also
+ *			 much faster than a scrollable cursor.
+ *			 Passing the SQL_CURSOR_KEYSET_DRIVEN value requests a scrollable  
+ *			 cursor for this SQL statement. This type of cursor enables you
+ *			 to fetch rows non-sequentially from the database server. However, 
+ *			 it is only supported by DB2 servers, and is much slower than
+ *			 forward-only cursors.
+ *
+ * ===Return Values
+ * Returns a IBM_DBStatement object if the SQL statement was successfully
+ * parsed and prepared by the database server. Returns FALSE if the database
+ * server returned an error. You can determine which error was returned by
+ * calling ibm_db.stmt_error() or ibm_db.stmt_errormsg().
+ */					 
+static PyObject *ibm_db_prepare(PyObject *self, PyObject *args)			
+{
+	PyObject *py_conn_res = NULL;
+	PyObject *options = NULL;
+	conn_handle *conn_res;
+	
+	PyObject *py_stmt = NULL;
+	
+	if (!PyArg_ParseTuple(args, "OO|O", &py_conn_res, &py_stmt, &options))
+		return NULL;
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+		return _python_ibm_db_prepare_helper(conn_res, py_stmt, options);	 
+	}
+
+	return NULL;
+}
+
+/*	static param_node* build_list( stmt_res, param_no, data_type, precision, scale, nullable )
+*/
+static param_node* build_list( stmt_handle *stmt_res, int param_no, SQLSMALLINT data_type, SQLUINTEGER precision, SQLSMALLINT scale, SQLSMALLINT nullable )
+{
+	param_node *tmp_curr = NULL, *curr = stmt_res->head_cache_list, *prev = NULL;
+
+	/* Allocate memory and make new node to be added */
+	tmp_curr = ALLOC(param_node);
+	memset(tmp_curr, 0, sizeof(param_node));
+	/* assign values */
+	tmp_curr->data_type = data_type;
+	tmp_curr->param_size = precision;
+	tmp_curr->nullable = nullable;
+	tmp_curr->scale = scale;
+	tmp_curr->param_num = param_no;
+	tmp_curr->file_options = SQL_FILE_READ;
+	tmp_curr->param_type = SQL_PARAM_INPUT;
+
+	while ( curr != NULL ) {
+		prev = curr;
+		curr = curr->next;
+	}
+
+	if (stmt_res->head_cache_list == NULL) {
+		stmt_res->head_cache_list = tmp_curr;
+	} else {
+		prev->next = tmp_curr;
+	}
+
+	tmp_curr->next = curr;
+
+	return tmp_curr;
+}
+
+/*	static int _python_ibm_db_bind_data( stmt_handle *stmt_res, param_node *curr, PyObject *bind_data )
+*/
+static int _python_ibm_db_bind_data( stmt_handle *stmt_res, param_node *curr, PyObject *bind_data)
+{
+	int rc;
+	SQLSMALLINT valueType = 0;
+	SQLPOINTER	paramValuePtr;
+#if  PY_MAJOR_VERSION < 3
+	Py_ssize_t buffer_len = 0;
+#endif
+	int param_length;
+	
+	/* Have to use SQLBindFileToParam if PARAM is type PARAM_FILE */
+	/*** Need to fix this***/
+	if ( curr->param_type == PARAM_FILE) {
+		PyObject *FileNameObj = NULL;
+		/* Only string types can be bound */
+		if (PyString_Check(bind_data)) {
+			if (PyUnicode_Check(bind_data)) {
+				FileNameObj = PyUnicode_AsASCIIString(bind_data);
+				if (FileNameObj == NULL) {
+					return SQL_ERROR;
+				}
+			}
+		} else {
+			return SQL_ERROR;
+		}
+		curr->bind_indicator = 0;
+		if(curr->svalue != NULL) {
+			PyMem_Del(curr->svalue);
+			curr->svalue = NULL;
+		}
+		if (FileNameObj != NULL) {
+			curr->svalue = PyBytes_AsString(FileNameObj);
+		} else {
+			curr->svalue = PyBytes_AsString(bind_data);
+		}
+		curr->ivalue = strlen(curr->svalue);
+		curr->svalue = memcpy(PyMem_Malloc((sizeof(char))*(curr->ivalue+1)), curr->svalue, curr->ivalue);
+		curr->svalue[curr->ivalue] = '\0';
+		Py_XDECREF(FileNameObj);
+		valueType = (SQLSMALLINT) curr->ivalue;
+		/* Bind file name string */
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLBindFileToParam((SQLHSTMT)stmt_res->hstmt, curr->param_num,
+					curr->data_type, (SQLCHAR*)curr->svalue,
+					(SQLSMALLINT*)&(curr->ivalue), &(curr->file_options), 
+					(SQLSMALLINT) curr->ivalue, &(curr->bind_indicator));
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, 
+											 rc, 1, NULL, -1, 1);
+		}
+		return rc;
+	}
+	
+	switch(TYPE(bind_data)) {
+		case PYTHON_FIXNUM:
+			if(curr->data_type == SQL_BIGINT || curr->data_type == SQL_DECIMAL ){
+				PyObject *tempobj = NULL;
+#if  PY_MAJOR_VERSION >= 3
+			       	PyObject *tempobj2 = NULL;
+#endif
+				tempobj = PyObject_Str(bind_data);
+#if  PY_MAJOR_VERSION >= 3
+				tempobj2 = PyUnicode_AsASCIIString(tempobj);
+				Py_XDECREF(tempobj);
+				tempobj = tempobj2;
+#endif	
+				curr->svalue = PyBytes_AsString(tempobj);
+				curr->ivalue = strlen(curr->svalue);
+				curr->svalue = memcpy(PyMem_Malloc((sizeof(char))*(curr->ivalue+1)), curr->svalue, curr->ivalue);
+				curr->svalue[curr->ivalue] = '\0'; 
+				curr->bind_indicator = curr->ivalue;
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindParameter(stmt_res->hstmt, curr->param_num,
+					curr->param_type, SQL_C_CHAR, curr->data_type,
+					curr->param_size, curr->scale, curr->svalue, curr->param_size, NULL);
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ){
+					_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, 
+												rc, 1, NULL, -1, 1);
+				}
+				Py_XDECREF(tempobj);
+			}
+			else{
+				curr->ivalue = (SQLINTEGER) PyLong_AsLong(bind_data);
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindParameter(stmt_res->hstmt, curr->param_num,
+							curr->param_type, SQL_C_LONG, curr->data_type,
+							curr->param_size, curr->scale, &curr->ivalue, 0, NULL);
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, 
+												rc, 1, NULL, -1, 1);
+				}
+				curr->data_type = SQL_C_LONG;
+			}
+			break;
+
+		/* Convert BOOLEAN types to LONG for DB2 / Cloudscape */
+		case PYTHON_FALSE:
+			curr->ivalue = 0;
+
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLBindParameter(stmt_res->hstmt, curr->param_num,
+						curr->param_type, SQL_C_LONG, curr->data_type, curr->param_size,
+						curr->scale, &curr->ivalue, 0, NULL);
+			Py_END_ALLOW_THREADS;
+
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, 
+													rc, 1, NULL, -1, 1);
+			}
+			curr->data_type = SQL_C_LONG;
+			break;
+
+		case PYTHON_TRUE:
+			curr->ivalue = 1;
+
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLBindParameter(stmt_res->hstmt, curr->param_num,
+			curr->param_type, SQL_C_LONG, curr->data_type, curr->param_size,
+						curr->scale, &curr->ivalue, 0, NULL);
+			Py_END_ALLOW_THREADS;
+
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, 
+												rc, 1, NULL, -1, 1);
+			}
+			curr->data_type = SQL_C_LONG;
+			break;
+
+		case PYTHON_FLOAT:
+			curr->fvalue = PyFloat_AsDouble(bind_data);
+			
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLBindParameter(stmt_res->hstmt, curr->param_num,
+					curr->param_type, SQL_C_DOUBLE, curr->data_type, curr->param_size,
+					curr->scale, &curr->fvalue, 0, NULL);
+			Py_END_ALLOW_THREADS;
+
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, 
+												rc, 1, NULL, -1, 1);
+			}
+			curr->data_type = SQL_C_DOUBLE;
+			break;
+
+		case PYTHON_UNICODE:
+			{
+				int isNewBuffer;
+				if(PyObject_CheckBuffer(bind_data) && (curr->data_type == SQL_BLOB || curr->data_type == SQL_BINARY || curr->data_type == SQL_VARBINARY)) {
+#if  PY_MAJOR_VERSION >= 3
+					Py_buffer tmp_buffer;
+					PyObject_GetBuffer(bind_data, &tmp_buffer, PyBUF_SIMPLE);
+					curr->uvalue = tmp_buffer.buf;
+					curr->ivalue = tmp_buffer.len;
+#else					
+					PyObject_AsReadBuffer(bind_data, (const void **) &(curr->uvalue), &buffer_len);
+					curr->ivalue = buffer_len;
+#endif
+				} else {
+					if(curr->uvalue != NULL) {
+						PyMem_Del(curr->uvalue);
+						curr->uvalue = NULL;
+					}
+					curr->uvalue = getUnicodeDataAsSQLWCHAR(bind_data, &isNewBuffer);
+					curr->ivalue = PyUnicode_GetSize(bind_data);
+					curr->ivalue = curr->ivalue * sizeof(SQLWCHAR);
+				}
+				param_length = curr->ivalue;
+				if (curr->size != 0) {
+					curr->ivalue = (curr->size + 1) * sizeof(SQLWCHAR);
+				}
+
+				if (curr->param_type == SQL_PARAM_OUTPUT || curr->param_type == SQL_PARAM_INPUT_OUTPUT) {
+					if (curr->size == 0) {
+						if ((curr->data_type == SQL_BLOB) || (curr->data_type == SQL_CLOB) || (curr->data_type == SQL_BINARY)
+#ifndef PASE /* i5/OS SQL_LONGVARBINARY is SQL_VARBINARY */
+								|| (curr->data_type == SQL_LONGVARBINARY)
+#endif /* PASE */
+								|| (curr->data_type == SQL_VARBINARY) || (curr->data_type == SQL_XML)) {
+							if (curr->ivalue <= curr->param_size) {
+								curr->ivalue = curr->param_size + sizeof(SQLWCHAR);
+							}
+						} else {
+							if (curr->ivalue <= (curr->param_size * sizeof(SQLWCHAR))) {
+								curr->ivalue = (curr->param_size + 1) * sizeof(SQLWCHAR);
+							}
+						}
+					}
+				}
+				
+				if (isNewBuffer == 0 ){
+					/* actually make a copy, since this will uvalue will be freed explicitly */
+					SQLWCHAR* tmp = (SQLWCHAR*)ALLOC_N(SQLWCHAR, curr->ivalue + 1);
+					memcpy(tmp, curr->uvalue, (param_length + sizeof(SQLWCHAR)));
+					curr->uvalue = tmp;
+				} else if (param_length <= curr->param_size) {
+					SQLWCHAR* tmp = (SQLWCHAR*)ALLOC_N(SQLWCHAR, curr->ivalue + 1);
+					memcpy(tmp, curr->uvalue, (param_length + sizeof(SQLWCHAR)));
+					PyMem_Del(curr->uvalue);
+					curr->uvalue = tmp;
+				}
+				
+				switch( curr->data_type){
+					case SQL_CLOB:
+					case SQL_DBCLOB:
+						if(curr->param_type == SQL_PARAM_OUTPUT || curr->param_type == SQL_PARAM_INPUT_OUTPUT){
+							curr->bind_indicator =  param_length;
+							paramValuePtr = (SQLPOINTER)curr->uvalue;
+						} else {
+							curr->bind_indicator = SQL_DATA_AT_EXEC;	
+#ifndef PASE
+							paramValuePtr = (SQLPOINTER)(curr);
+#else
+							paramValuePtr = (SQLPOINTER)&(curr);
+#endif
+						}
+						valueType = SQL_C_WCHAR;
+						break;
+					
+					case SQL_BLOB:
+						if (curr->param_type == SQL_PARAM_OUTPUT ||curr->param_type == SQL_PARAM_INPUT_OUTPUT) {
+							curr->bind_indicator = param_length;
+							paramValuePtr = (SQLPOINTER)curr;
+						} else {
+							curr->bind_indicator = SQL_DATA_AT_EXEC;
+#ifndef PASE
+							paramValuePtr = (SQLPOINTER)(curr);
+#else
+							paramValuePtr = (SQLPOINTER)&(curr);
+#endif
+						}
+						valueType = SQL_C_BINARY;
+						break;
+					
+					case SQL_BINARY:
+#ifndef PASE /* i5/OS SQL_LONGVARBINARY is SQL_VARBINARY */
+					case SQL_LONGVARBINARY:
+#endif /* PASE */
+					case SQL_VARBINARY:
+						/* account for bin_mode settings as well */
+						curr->bind_indicator = param_length;
+						valueType = SQL_C_BINARY;
+						paramValuePtr = (SQLPOINTER)curr->uvalue;
+						break;
+
+					case SQL_XML:
+						curr->bind_indicator = param_length;
+						paramValuePtr = (SQLPOINTER)curr->uvalue;
+						valueType = SQL_C_WCHAR;
+						break;
+					case SQL_TYPE_TIMESTAMP:
+						valueType = SQL_C_WCHAR;
+						curr->bind_indicator = SQL_NTS;
+						if(curr->uvalue[10] == 'T'){
+							curr->uvalue[10] = ' ';
+						}
+						paramValuePtr = (SQLPOINTER)(curr->uvalue);
+						break;
+					default:
+						valueType = SQL_C_WCHAR;
+						curr->bind_indicator = param_length;
+						paramValuePtr = (SQLPOINTER)(curr->uvalue);
+				}
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindParameter(stmt_res->hstmt, curr->param_num, curr->param_type, valueType, curr->data_type, curr->param_size, curr->scale, paramValuePtr, curr->ivalue, &(curr->bind_indicator));
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+				}
+				curr->data_type = valueType;
+			}
+			break;
+		
+		case PYTHON_STRING:
+			{
+				char* tmp;
+				if (PyObject_CheckBuffer(bind_data) && (curr->data_type == SQL_BLOB || curr->data_type == SQL_BINARY
+										 || curr->data_type == SQL_VARBINARY) ) {
+#if  PY_MAJOR_VERSION >= 3
+					Py_buffer tmp_buffer;
+					PyObject_GetBuffer(bind_data, &tmp_buffer, PyBUF_SIMPLE);
+					curr->svalue = tmp_buffer.buf;
+					curr->ivalue = tmp_buffer.len;
+#else
+					PyObject_AsReadBuffer(bind_data, (const void **) &(curr->svalue), &buffer_len);
+					curr->ivalue = buffer_len;
+#endif
+				} else {
+					if(curr->svalue != NULL) {
+						PyMem_Del(curr->svalue);
+						curr->svalue = NULL;
+					}
+					curr->svalue = PyBytes_AsString(bind_data);   /** It is PyString_AsString() in PY_MAJOR_VERSION<3, and code execution will not come here in PY_MAJOR_VERSION>=3 **/
+					curr->ivalue = strlen(curr->svalue);
+				}
+				param_length = curr->ivalue;
+				/*
+				* An extra parameter is given by the client to pick the size of the 
+				* string returned. The string is then truncate past that size.	
+				* If no size is given then use BUFSIZ to return the string.
+				*/
+				if (curr->size != 0) {
+					curr->ivalue = curr->size;
+				}
+	            
+				if (curr->param_type == SQL_PARAM_OUTPUT || curr->param_type == SQL_PARAM_INPUT_OUTPUT) {
+					if (curr->size == 0) {
+						if (curr->ivalue <= curr->param_size) {
+							curr->ivalue = curr->param_size + 1;
+						}
+					}
+				}
+				tmp = ALLOC_N(char, curr->ivalue+1);
+				curr->svalue = memcpy(tmp, curr->svalue, param_length);
+				curr->svalue[param_length] = '\0';
+	
+				switch ( curr->data_type ) {
+					case SQL_CLOB:
+					case SQL_DBCLOB:
+						if (curr->param_type == SQL_PARAM_OUTPUT || 
+							curr->param_type == SQL_PARAM_INPUT_OUTPUT) {
+							curr->bind_indicator = param_length;
+							paramValuePtr = (SQLPOINTER)curr->svalue;
+						} else {
+							curr->bind_indicator = SQL_DATA_AT_EXEC;
+							/* The correct dataPtr will be set during SQLPutData with 
+							* the len from this struct 
+							*/
+#ifndef PASE
+							paramValuePtr = (SQLPOINTER)(curr);
+#else
+							paramValuePtr = (SQLPOINTER)&(curr);
+#endif
+						}
+						valueType = SQL_C_CHAR;
+						break;
+
+					case SQL_BLOB:
+						if (curr->param_type == SQL_PARAM_OUTPUT || 
+							curr->param_type == SQL_PARAM_INPUT_OUTPUT) {
+							curr->ivalue = curr->ivalue -1;
+							curr->bind_indicator = param_length;
+							paramValuePtr = (SQLPOINTER)curr;
+						} else {
+							curr->bind_indicator = SQL_DATA_AT_EXEC;
+#ifndef PASE
+							paramValuePtr = (SQLPOINTER)(curr);
+#else
+							paramValuePtr = (SQLPOINTER)&(curr);
+#endif
+						}
+						valueType = SQL_C_BINARY;
+						break;
+	
+					case SQL_BINARY:
+#ifndef PASE /* i5/OS SQL_LONGVARBINARY is SQL_VARBINARY */
+					case SQL_LONGVARBINARY:
+#endif /* PASE */
+					case SQL_VARBINARY:
+					case SQL_XML:
+						/* account for bin_mode settings as well */
+						curr->bind_indicator = curr->ivalue;
+						if (curr->param_type == SQL_PARAM_OUTPUT || curr->param_type == SQL_PARAM_INPUT_OUTPUT) {
+							curr->ivalue = curr->ivalue - 1;
+							curr->bind_indicator = param_length;
+						}
+						
+						valueType = SQL_C_BINARY;
+						paramValuePtr = (SQLPOINTER)curr->svalue;
+						break;
+
+						/* This option should handle most other types such as DATE, 
+						* VARCHAR etc 
+						*/
+					case SQL_TYPE_TIMESTAMP:
+						valueType = SQL_C_CHAR;
+						curr->bind_indicator = curr->ivalue;
+						if (curr->param_type == SQL_PARAM_OUTPUT || curr->param_type == SQL_PARAM_INPUT_OUTPUT) {
+							curr->bind_indicator = SQL_NTS;
+						}
+						if(curr->svalue[10] == 'T'){
+							curr->svalue[10] = ' ';
+						}
+						paramValuePtr = (SQLPOINTER)(curr->svalue);
+						break;
+					default:
+						valueType = SQL_C_CHAR;
+						curr->bind_indicator = curr->ivalue;
+						if (curr->param_type == SQL_PARAM_OUTPUT || curr->param_type == SQL_PARAM_INPUT_OUTPUT) {
+							curr->bind_indicator = SQL_NTS;
+						}
+						paramValuePtr = (SQLPOINTER)(curr->svalue);
+				}
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindParameter(stmt_res->hstmt, curr->param_num,
+					curr->param_type, valueType, curr->data_type, curr->param_size,
+					curr->scale, paramValuePtr, curr->ivalue, &(curr->bind_indicator));
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, 
+												rc, 1, NULL, -1, 1);
+				}
+				curr->data_type = valueType;
+			}
+			break;
+
+		case PYTHON_DECIMAL:
+			if (curr->data_type == SQL_DECIMAL || curr->data_type == SQL_DECFLOAT) {
+				PyObject *tempobj = NULL;
+#if  PY_MAJOR_VERSION >= 3
+			        PyObject *tempobj2 = NULL;
+#endif
+				if(curr->svalue != NULL) {
+					PyMem_Del(curr->svalue);
+					curr->svalue = NULL;
+				}
+				tempobj = PyObject_Str(bind_data);
+#if PY_MAJOR_VERSION >= 3
+				tempobj2 = PyUnicode_AsASCIIString(tempobj);
+				Py_XDECREF(tempobj);
+				tempobj = tempobj2;
+#endif
+				curr->svalue = PyBytes_AsString(tempobj);
+				curr->ivalue = strlen(curr->svalue);
+				curr->svalue = estrdup(curr->svalue);
+				curr->svalue[curr->ivalue] = '\0'; 
+				valueType = SQL_C_CHAR;
+				paramValuePtr = (SQLPOINTER)(curr->svalue);
+				curr->bind_indicator = curr->ivalue;
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLBindParameter(stmt_res->hstmt, curr->param_num, curr->param_type, valueType, curr->data_type, curr->param_size, curr->scale, paramValuePtr, curr->ivalue, &(curr->bind_indicator));
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT,rc, 1, NULL, -1, 1);
+				}
+				curr->data_type = valueType;
+				Py_XDECREF(tempobj);
+				break;
+			}
+
+
+		case PYTHON_DATE:
+			curr->date_value = ALLOC(DATE_STRUCT);
+			curr->date_value->year = PyDateTime_GET_YEAR(bind_data);
+			curr->date_value->month = PyDateTime_GET_MONTH(bind_data);
+			curr->date_value->day = PyDateTime_GET_DAY(bind_data);
+
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLBindParameter(stmt_res->hstmt, curr->param_num,
+				curr->param_type, SQL_C_TYPE_DATE, curr->data_type, curr->param_size,
+				curr->scale, curr->date_value, curr->ivalue, &(curr->bind_indicator));
+			Py_END_ALLOW_THREADS;
+			break;
+
+		case PYTHON_TIME:
+			curr->time_value = ALLOC(TIME_STRUCT);
+			curr->time_value->hour = PyDateTime_TIME_GET_HOUR(bind_data);
+			curr->time_value->minute = PyDateTime_TIME_GET_MINUTE(bind_data);
+			curr->time_value->second = PyDateTime_TIME_GET_SECOND(bind_data);
+
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLBindParameter(stmt_res->hstmt, curr->param_num,
+				curr->param_type, SQL_C_TYPE_TIME, curr->data_type, curr->param_size,
+				curr->scale, curr->time_value, curr->ivalue, &(curr->bind_indicator));
+			Py_END_ALLOW_THREADS;
+			break;
+
+		case PYTHON_TIMESTAMP:
+			curr->ts_value = ALLOC(TIMESTAMP_STRUCT);
+			curr->ts_value->year = PyDateTime_GET_YEAR(bind_data);
+			curr->ts_value->month = PyDateTime_GET_MONTH(bind_data);
+			curr->ts_value->day = PyDateTime_GET_DAY(bind_data);
+			curr->ts_value->hour = PyDateTime_DATE_GET_HOUR(bind_data);
+			curr->ts_value->minute = PyDateTime_DATE_GET_MINUTE(bind_data);
+			curr->ts_value->second = PyDateTime_DATE_GET_SECOND(bind_data);
+			curr->ts_value->fraction = PyDateTime_DATE_GET_MICROSECOND(bind_data) * 1000;
+
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLBindParameter(stmt_res->hstmt, curr->param_num,
+				curr->param_type, SQL_C_TYPE_TIMESTAMP, curr->data_type, curr->param_size,
+				curr->scale, curr->ts_value, curr->ivalue, &(curr->bind_indicator));
+			Py_END_ALLOW_THREADS;
+			break;
+
+		case PYTHON_NIL:
+			curr->ivalue = SQL_NULL_DATA;
+
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLBindParameter(stmt_res->hstmt, curr->param_num,
+				curr->param_type, SQL_C_DEFAULT, curr->data_type, curr->param_size,
+				curr->scale, &curr->ivalue, 0, (SQLLEN *)&(curr->ivalue));
+			Py_END_ALLOW_THREADS;
+
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, 
+											rc, 1, NULL, -1, 1);
+			}
+			break;
+			
+		default:
+			return SQL_ERROR;
+	}
+	return rc;
+}
+
+/* static int _python_ibm_db_execute_helper2(stmt_res, data, int bind_cmp_list)
+	*/
+static int _python_ibm_db_execute_helper2(stmt_handle *stmt_res, PyObject *data, int bind_cmp_list, int bind_params)
+{
+	int rc = SQL_SUCCESS;
+	param_node *curr = NULL;	/* To traverse the list */
+	PyObject *bind_data;		 /* Data value from symbol table */
+	char error[DB2_MAX_ERR_MSG_LEN];
+
+	/* Used in call to SQLDescribeParam if needed */
+	SQLSMALLINT param_no;
+	SQLSMALLINT data_type;
+	SQLUINTEGER precision;
+	SQLSMALLINT scale;
+	SQLSMALLINT nullable;
+
+	/* This variable means that we bind the complete list of params cached */
+	/* The values used are fetched from the active symbol table */
+	/* TODO: Enhance this part to check for stmt_res->file_param */
+	/* If this flag is set, then use SQLBindParam, else use SQLExtendedBind */
+	if ( bind_cmp_list ) {
+		/* Bind the complete list sequentially */
+		/* Used when no parameters array is passed in */
+		curr = stmt_res->head_cache_list;
+
+		while (curr != NULL ) {
+			/* Fetch data from symbol table */
+			if (curr->param_type == PARAM_FILE)
+				bind_data = curr->var_pyvalue;
+			else {
+				bind_data = curr->var_pyvalue;
+			}
+			if (bind_data == NULL)
+				return -1;
+				
+			rc = _python_ibm_db_bind_data( stmt_res, curr, bind_data);
+			if ( rc == SQL_ERROR ) {
+				sprintf(error, "Binding Error 1: %s", 
+						IBM_DB_G(__python_stmt_err_msg));
+				PyErr_SetString(PyExc_Exception, error);
+				return rc;
+			}
+			curr = curr->next;
+		}
+		return 0;
+	} else {
+		/* Bind only the data value passed in to the Current Node */
+		if ( data != NULL ) {
+			if ( bind_params ) {
+				/* This condition applies if the parameter has not been
+				* bound using ibm_db.bind_param. Need to describe the
+				* parameter and then bind it.
+				*/
+				param_no = ++stmt_res->num_params;
+
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLDescribeParam((SQLHSTMT)stmt_res->hstmt, param_no,
+					(SQLSMALLINT*)&data_type, &precision, (SQLSMALLINT*)&scale,
+					(SQLSMALLINT*)&nullable);
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT,
+												rc, 1, NULL, -1, 1);
+					sprintf(error, "Describe Param Failed: %s", 
+							IBM_DB_G(__python_stmt_err_msg));
+					PyErr_SetString(PyExc_Exception, error);
+					return rc;
+				}
+
+				curr = build_list(stmt_res, param_no, data_type, precision, 
+							  scale, nullable);
+				rc = _python_ibm_db_bind_data( stmt_res, curr, data);
+				if ( rc == SQL_ERROR ) {
+					sprintf(error, "Binding Error 2: %s", 
+							IBM_DB_G(__python_stmt_err_msg));
+					PyErr_SetString(PyExc_Exception, error);
+					return rc;
+				}
+			} else {
+				/* This is always at least the head_cache_node -- assigned in
+				* ibm_db.execute(), if params have been bound.
+				*/
+				curr = stmt_res->current_node;
+				if ( curr != NULL ) {
+					rc = _python_ibm_db_bind_data( stmt_res, curr, data);
+					if ( rc == SQL_ERROR ) {
+						sprintf(error, "Binding Error 2: %s", 
+							IBM_DB_G(__python_stmt_err_msg));
+						PyErr_SetString(PyExc_Exception, error);
+						return rc;
+					}
+					stmt_res->current_node = curr->next;
+				}
+			}
+			return rc;
+		}
+	}
+	return rc;
+}
+
+/*
+ * static PyObject *_python_ibm_db_execute_helper1(stmt_handle *stmt_res, PyObject *parameters_tuple)
+ *
+ */ 
+static PyObject *_python_ibm_db_execute_helper1(stmt_handle *stmt_res, PyObject *parameters_tuple)
+{
+	int rc, numOpts, i, bind_params = 0;
+	SQLSMALLINT num;
+	SQLPOINTER valuePtr;
+	PyObject *data;
+	char error[DB2_MAX_ERR_MSG_LEN];
+	/* This is used to loop over the param cache */
+	param_node *prev_ptr, *curr_ptr;
+	/* Free any cursors that might have been allocated in a previous call to 
+	* SQLExecute 
+	*/
+	Py_BEGIN_ALLOW_THREADS;
+	SQLFreeStmt((SQLHSTMT)stmt_res->hstmt, SQL_CLOSE);
+	Py_END_ALLOW_THREADS;
+
+	/* This ensures that each call to ibm_db.execute start from scratch */
+	stmt_res->current_node = stmt_res->head_cache_list;
+	
+	Py_BEGIN_ALLOW_THREADS;
+	rc = SQLNumParams((SQLHSTMT)stmt_res->hstmt, (SQLSMALLINT*)&num);
+	Py_END_ALLOW_THREADS;
+	
+	if ( num != 0 ) {
+		/* Parameter Handling */
+		if ( !NIL_P(parameters_tuple) ) {
+			/* Make sure ibm_db.bind_param has been called */
+			/* If the param list is NULL -- ERROR */
+			if ( stmt_res->head_cache_list == NULL ) {
+				bind_params = 1;
+			}
+
+			if (!PyTuple_Check(parameters_tuple)) {
+				PyErr_SetString(PyExc_Exception, "Param is not a tuple");
+				return NULL;
+			}
+
+			numOpts = PyTuple_Size(parameters_tuple);
+	
+			if (numOpts > num) {
+				/* More are passed in -- Warning - Use the max number present */
+				sprintf(error, "%d params bound not matching %d required", 
+						numOpts, num);
+				PyErr_SetString(PyExc_Exception, error);
+				numOpts = stmt_res->num_params;
+			} else if (numOpts < num) {
+				/* If there are less params passed in, than are present 
+				* -- Error 
+				*/
+				sprintf(error, "%d params bound not matching %d required", 
+						numOpts, num);
+				PyErr_SetString(PyExc_Exception, error);
+				return NULL;
+			}
+
+			for ( i = 0; i < numOpts; i++) {
+				/* Bind values from the parameters_tuple to params */
+				data = PyTuple_GetItem(parameters_tuple, i);
+
+				/* The 0 denotes that you work only with the current node.
+				* The 4th argument specifies whether the data passed in
+				* has been described. So we need to call SQLDescribeParam
+				* before binding depending on this.
+				*/
+				rc = _python_ibm_db_execute_helper2(stmt_res, data, 0, bind_params);
+				if ( rc == SQL_ERROR) {
+					sprintf(error, "Binding Error: %s", IBM_DB_G(__python_stmt_err_msg));
+					PyErr_SetString(PyExc_Exception, error);
+					return NULL;
+				}
+			}
+		} else {
+			/* No additional params passed in. Use values already bound. */
+			if ( num > stmt_res->num_params ) {
+				/* More parameters than we expected */
+				sprintf(error, "%d params bound not matching %d required", 
+						stmt_res->num_params, num);
+				PyErr_SetString(PyExc_Exception, error);
+			} else if ( num < stmt_res->num_params ) {
+				/* Fewer parameters than we expected */
+				sprintf(error, "%d params bound not matching %d required", 
+						stmt_res->num_params, num);
+				PyErr_SetString(PyExc_Exception, error);
+				return NULL;
+			}
+			
+			/* Param cache node list is empty -- No params bound */
+			if ( stmt_res->head_cache_list == NULL ) {
+				PyErr_SetString(PyExc_Exception, "Parameters not bound");
+				return NULL;
+			} else {
+				/* The 1 denotes that you work with the whole list 
+				 * And bind sequentially 				
+				 */
+				rc = _python_ibm_db_execute_helper2(stmt_res, NULL, 1, 0);
+				if ( rc == SQL_ERROR ) {
+					sprintf(error, "Binding Error 3: %s", IBM_DB_G(__python_stmt_err_msg));
+					PyErr_SetString(PyExc_Exception, error);
+					return NULL;
+				}
+			}
+		}
+	} else {
+		/* No Parameters 
+		 * We just execute the statement. No additional work needed. 
+		 */
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLExecute((SQLHSTMT)stmt_res->hstmt);
+		Py_END_ALLOW_THREADS;
+		
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+			sprintf(error, "Statement Execute Failed: %s", IBM_DB_G(__python_stmt_err_msg));
+			PyErr_SetString(PyExc_Exception, error);
+			return NULL;
+		}
+		Py_INCREF(Py_True);
+		return Py_True;
+	}
+		
+	/* Execute Stmt -- All parameters bound */
+	Py_BEGIN_ALLOW_THREADS;
+	rc = SQLExecute((SQLHSTMT)stmt_res->hstmt);
+	Py_END_ALLOW_THREADS;
+	
+	if ( rc == SQL_ERROR ) {
+		_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+		sprintf(error, "Statement Execute Failed: %s", IBM_DB_G(__python_stmt_err_msg));
+		PyErr_SetString(PyExc_Exception, error);
+		return NULL;
+	}
+		
+	if ( rc == SQL_NEED_DATA ) {
+		rc = SQLParamData((SQLHSTMT)stmt_res->hstmt, (SQLPOINTER *)&valuePtr);
+		while ( rc == SQL_NEED_DATA ) {
+			/* passing data value for a parameter */
+			if ( !NIL_P(((param_node*)valuePtr)->svalue)) {
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLPutData((SQLHSTMT)stmt_res->hstmt, (SQLPOINTER)(((param_node*)valuePtr)->svalue), ((param_node*)valuePtr)->ivalue);
+				Py_END_ALLOW_THREADS;
+			} else {
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLPutData((SQLHSTMT)stmt_res->hstmt, (SQLPOINTER)(((param_node*)valuePtr)->uvalue), ((param_node*)valuePtr)->ivalue);
+				Py_END_ALLOW_THREADS;
+			}
+			
+			if ( rc == SQL_ERROR ) {
+				_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT,
+						rc, 1, NULL, -1, 1);
+				sprintf(error, "Sending data failed: %s", 
+						IBM_DB_G(__python_stmt_err_msg));
+				PyErr_SetString(PyExc_Exception, error);
+				return NULL;
+			}
+
+			rc = SQLParamData((SQLHSTMT)stmt_res->hstmt, (SQLPOINTER *)&valuePtr);
+		}
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+			sprintf(error, "Sending data failed: %s", IBM_DB_G(__python_stmt_err_msg));
+			PyErr_SetString(PyExc_Exception, error);
+			return NULL;
+		}
+	}
+		
+	/* cleanup dynamic bindings if present */
+	if ( bind_params == 1 ) {
+		/* Free param cache list */
+		curr_ptr = stmt_res->head_cache_list;
+		prev_ptr = stmt_res->head_cache_list;
+			
+		while (curr_ptr != NULL) {
+			curr_ptr = curr_ptr->next;
+			
+			/* Free Values */
+			if ( prev_ptr->svalue) {
+				PyMem_Del(prev_ptr->svalue);
+			}
+			PyMem_Del(prev_ptr);
+			prev_ptr = curr_ptr;
+       		}
+		
+		stmt_res->head_cache_list = NULL;
+		stmt_res->num_params = 0;
+	}
+	
+	if ( rc != SQL_ERROR ) {
+		Py_INCREF(Py_True);
+		return Py_True;
+	}
+	return NULL;
+}
+
+/*!# ibm_db.execute
+ *
+ * ===Description
+ * Py_True/Py_False ibm_db.execute ( IBM_DBStatement stmt [, tuple parameters] )
+ *
+ * ibm_db.execute() executes an SQL statement that was prepared by
+ * ibm_db.prepare().
+ *
+ * If the SQL statement returns a result set, for example, a SELECT statement
+ * or a CALL to a stored procedure that returns one or more result sets, you
+ * can retrieve a row as an tuple/dictionary from the stmt resource using
+ * ibm_db.fetch_assoc(), ibm_db.fetch_both(), or ibm_db.fetch_tuple().
+ * Alternatively, you can use ibm_db.fetch_row() to move the result set pointer
+ * to the next row and fetch a column at a time from that row with
+ * ibm_db.result().
+ *
+ * Refer to ibm_db.prepare() for a brief discussion of the advantages of using
+ * ibm_db.prepare() and ibm_db.execute() rather than ibm_db.exec().
+ *
+ * ===Parameters
+ * ====stmt
+ *
+ *		A prepared statement returned from ibm_db.prepare().
+ *
+ * ====parameters
+ *
+ *		An tuple of input parameters matching any parameter markers contained
+ * in the prepared statement.
+ *
+ * ===Return Values
+ *
+ * Returns Py_True on success or Py_False on failure.
+ */
+static PyObject *ibm_db_execute(PyObject *self, PyObject *args)			
+{
+	PyObject *py_stmt_res = NULL;
+	PyObject *parameters_tuple = NULL;
+	stmt_handle *stmt_res;
+	if (!PyArg_ParseTuple(args, "O|O", &py_stmt_res, &parameters_tuple))
+		return NULL;
+
+	if (!NIL_P(py_stmt_res)) {
+		if (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+			return NULL;
+		} else {
+			stmt_res = (stmt_handle *)py_stmt_res;
+		}
+		return _python_ibm_db_execute_helper1(stmt_res, parameters_tuple);
+	} else {
+		PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+		return NULL;
+	}
+	
+}
+
+
+/*!# ibm_db.conn_errormsg
+ *
+ * ===Description
+ * string ibm_db.conn_errormsg ( [resource connection] )
+ *
+ * ibm_db.conn_errormsg() returns an error message and SQLCODE value
+ * representing the reason the last database connection attempt failed.
+ * As ibm_db.connect() returns FALSE in the event of a failed connection
+ * attempt, do not pass any parameters to ibm_db.conn_errormsg() to retrieve
+ * the associated error message and SQLCODE value.
+ *
+ * If, however, the connection was successful but becomes invalid over time,
+ * you can pass the connection parameter to retrieve the associated error
+ * message and SQLCODE value for a specific connection.
+ * ===Parameters
+ *
+ * ====connection
+ *		A connection resource associated with a connection that initially
+ * succeeded, but which over time became invalid.
+ *
+ * ===Return Values
+ *
+ * Returns a string containing the error message and SQLCODE value resulting
+ * from a failed connection attempt. If there is no error associated with the
+ * last connection attempt, ibm_db.conn_errormsg() returns an empty string.
+ */
+static PyObject *ibm_db_conn_errormsg(PyObject *self, PyObject *args)
+{
+	conn_handle *conn_res = NULL;
+	PyObject *py_conn_res = NULL;
+	PyObject *retVal = NULL;
+	char* return_str = NULL;	/* This variable is used by 
+					* _python_ibm_db_check_sql_errors to return err 
+					* strings 
+					*/
+
+	if (!PyArg_ParseTuple(args, "|O", &py_conn_res))
+		return NULL;
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+		}
+
+		return_str = ALLOC_N(char, DB2_MAX_ERR_MSG_LEN);
+
+		memset(return_str, 0, DB2_MAX_ERR_MSG_LEN);
+
+		_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, -1, 0, 
+										  return_str, DB2_ERRMSG, 
+										  conn_res->errormsg_recno_tracker);
+		if(conn_res->errormsg_recno_tracker - conn_res->error_recno_tracker >= 1)
+			 conn_res->error_recno_tracker = conn_res->errormsg_recno_tracker;
+		conn_res->errormsg_recno_tracker++;
+
+		retVal =  StringOBJ_FromASCII(return_str);
+		if(return_str != NULL) {
+			PyMem_Del(return_str);
+			return_str = NULL;
+		}
+		return retVal;
+	} else {
+		return StringOBJ_FromASCII(IBM_DB_G(__python_conn_err_msg));
+	}
+}
+
+/*!# ibm_db.stmt_errormsg
+ *
+ * ===Description
+ * string ibm_db.stmt_errormsg ( [resource stmt] )
+ *
+ * Returns a string containing the last SQL statement error message.
+ *
+ * If you do not pass a statement resource as an argument to
+ * ibm_db.stmt_errormsg(), the driver returns the error message associated with
+ * the last attempt to return a statement resource, for example, from
+ * ibm_db.prepare() or ibm_db.exec().
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *		A valid statement resource.
+ *
+ * ===Return Values
+ *
+ * Returns a string containing the error message and SQLCODE value for the last
+ * error that occurred issuing an SQL statement.
+ */
+static PyObject *ibm_db_stmt_errormsg(PyObject *self, PyObject *args)
+{
+	stmt_handle *stmt_res = NULL;
+	PyObject *py_stmt_res = NULL;
+	PyObject *retVal = NULL;
+	char* return_str = NULL;	/* This variable is used by 
+					* _python_ibm_db_check_sql_errors to return err 
+					* strings 
+					*/
+
+	if (!PyArg_ParseTuple(args, "|O", &py_stmt_res))
+		return NULL;
+
+	if (!NIL_P(py_stmt_res)) {
+		if (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+			return NULL;
+		} else {
+			stmt_res = (stmt_handle *)py_stmt_res;
+		}
+		return_str = ALLOC_N(char, DB2_MAX_ERR_MSG_LEN);
+
+		memset(return_str, 0, DB2_MAX_ERR_MSG_LEN);
+
+		_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, -1, 0, 
+										return_str, DB2_ERRMSG, 
+										stmt_res->errormsg_recno_tracker);
+		if(stmt_res->errormsg_recno_tracker - stmt_res->error_recno_tracker >= 1)
+			stmt_res->error_recno_tracker = stmt_res->errormsg_recno_tracker;
+		stmt_res->errormsg_recno_tracker++;
+
+		retVal = StringOBJ_FromASCII(return_str);
+		if(return_str != NULL) {
+			PyMem_Del(return_str);
+			return_str = NULL;
+		}
+		return retVal;
+	} else {
+		return StringOBJ_FromASCII(IBM_DB_G(__python_stmt_err_msg));
+	}
+}
+
+/*!# ibm_db.conn_error
+ * ===Description
+ * string ibm_db.conn_error ( [resource connection] )
+ *
+ * ibm_db.conn_error() returns an SQLSTATE value representing the reason the
+ * last attempt to connect to a database failed. As ibm_db.connect() returns
+ * FALSE in the event of a failed connection attempt, you do not pass any
+ * parameters to ibm_db.conn_error() to retrieve the SQLSTATE value.
+ *
+ * If, however, the connection was successful but becomes invalid over time, you
+ * can pass the connection parameter to retrieve the SQLSTATE value for a
+ * specific connection.
+ *
+ * To learn what the SQLSTATE value means, you can issue the following command
+ * at a DB2 Command Line Processor prompt: db2 '? sqlstate-value'. You can also
+ * call ibm_db.conn_errormsg() to retrieve an explicit error message and the
+ * associated SQLCODE value.
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *		A connection resource associated with a connection that initially
+ * succeeded, but which over time became invalid.
+ *
+ * ===Return Values
+ *
+ * Returns the SQLSTATE value resulting from a failed connection attempt.
+ * Returns an empty string if there is no error associated with the last
+ * connection attempt.
+ */
+static PyObject *ibm_db_conn_error(PyObject *self, PyObject *args)			
+{
+	conn_handle *conn_res = NULL;
+	PyObject *py_conn_res = NULL;
+	PyObject *retVal = NULL;
+	char *return_str = NULL; /* This variable is used by 
+							 * _python_ibm_db_check_sql_errors to return err 
+							 * strings */
+
+	if (!PyArg_ParseTuple(args, "|O", &py_conn_res))
+		return NULL;
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+		return_str = ALLOC_N(char, SQL_SQLSTATE_SIZE + 1);
+
+		memset(return_str, 0, SQL_SQLSTATE_SIZE + 1);
+
+		_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, -1, 0, 
+										return_str, DB2_ERR, 
+										conn_res->error_recno_tracker);
+		if (conn_res->error_recno_tracker-conn_res->errormsg_recno_tracker >= 1) {
+			conn_res->errormsg_recno_tracker = conn_res->error_recno_tracker;
+		}
+		conn_res->error_recno_tracker++;
+
+		retVal = StringOBJ_FromASCII(return_str);
+		if(return_str != NULL) {
+			PyMem_Del(return_str);
+			return_str = NULL;
+		}
+		return retVal;
+	} else {
+		return StringOBJ_FromASCII(IBM_DB_G(__python_conn_err_state));
+	}
+}
+
+/*!# ibm_db.stmt_error
+ *
+ * ===Description
+ * string ibm_db.stmt_error ( [resource stmt] )
+ *
+ * Returns a string containing the SQLSTATE value returned by an SQL statement.
+ *
+ * If you do not pass a statement resource as an argument to
+ * ibm_db.stmt_error(), the driver returns the SQLSTATE value associated with
+ * the last attempt to return a statement resource, for example, from
+ * ibm_db.prepare() or ibm_db.exec().
+ *
+ * To learn what the SQLSTATE value means, you can issue the following command
+ * at a DB2 Command Line Processor prompt: db2 '? sqlstate-value'. You can also
+ * call ibm_db.stmt_errormsg() to retrieve an explicit error message and the
+ * associated SQLCODE value.
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *		A valid statement resource.
+ *
+ * ===Return Values
+ *
+ * Returns a string containing an SQLSTATE value.
+ */
+static PyObject *ibm_db_stmt_error(PyObject *self, PyObject *args)
+{
+	stmt_handle *stmt_res = NULL;
+	PyObject *py_stmt_res = NULL;
+	PyObject *retVal = NULL;
+	char* return_str = NULL; /* This variable is used by 
+							 * _python_ibm_db_check_sql_errors to return err 
+							 * strings 
+							 */
+
+	if (!PyArg_ParseTuple(args, "|O", &py_stmt_res))
+		return NULL;
+
+	if (!NIL_P(py_stmt_res)) {
+		if (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+			return NULL;
+		} else {
+			stmt_res = (stmt_handle *)py_stmt_res;
+		}
+		return_str = ALLOC_N(char, DB2_MAX_ERR_MSG_LEN);
+
+		memset(return_str, 0, DB2_MAX_ERR_MSG_LEN);
+
+		_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, -1, 0, 
+										return_str, DB2_ERR, 
+										stmt_res->error_recno_tracker);
+
+		if (stmt_res->error_recno_tracker-stmt_res->errormsg_recno_tracker >= 1) {
+			stmt_res->errormsg_recno_tracker = stmt_res->error_recno_tracker;
+		}
+		stmt_res->error_recno_tracker++;
+
+		retVal = StringOBJ_FromASCII(return_str);
+		if(return_str != NULL) {
+			PyMem_Del(return_str);
+			return_str = NULL;
+		}
+		return retVal;
+	} else {
+		return StringOBJ_FromASCII(IBM_DB_G(__python_stmt_err_state));
+	}
+}
+
+/*!# ibm_db.next_result
+ *
+ * ===Description
+ * resource ibm_db.next_result ( resource stmt )
+ *
+ * Requests the next result set from a stored procedure.
+ *
+ * A stored procedure can return zero or more result sets. While you handle the
+ * first result set in exactly the same way you would handle the results
+ * returned by a simple SELECT statement, to fetch the second and subsequent
+ * result sets from a stored procedure you must call the ibm_db.next_result()
+ * function and return the result to a uniquely named Python variable.
+ *
+ * ===Parameters
+ * ====stmt
+ *		A prepared statement returned from ibm_db.exec() or ibm_db.execute().
+ *
+ * ===Return Values
+ *
+ * Returns a new statement resource containing the next result set if the stored
+ * procedure returned another result set. Returns FALSE if the stored procedure
+ * did not return another result set.
+ */
+static PyObject *ibm_db_next_result(PyObject *self, PyObject *args)			
+{
+	PyObject *py_stmt_res = NULL;
+	stmt_handle *stmt_res, *new_stmt_res = NULL;
+	int rc = 0;
+	SQLHANDLE new_hstmt;
+
+	if (!PyArg_ParseTuple(args, "O", &py_stmt_res))
+		return NULL;
+
+	if (!NIL_P(py_stmt_res)) {
+		if (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+			return NULL;
+		} else {
+			stmt_res = (stmt_handle *)py_stmt_res;
+		}
+		_python_ibm_db_clear_stmt_err_cache();
+
+		/* alloc handle and return only if it errors */
+		rc = SQLAllocHandle(SQL_HANDLE_STMT, stmt_res->hdbc, &new_hstmt);
+		if ( rc < SQL_SUCCESS ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			Py_INCREF(Py_False);
+			return Py_False;
+		}
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLNextResult((SQLHSTMT)stmt_res->hstmt, (SQLHSTMT)new_hstmt);
+		Py_END_ALLOW_THREADS;
+		
+		if( rc != SQL_SUCCESS ) {
+			if(rc < SQL_SUCCESS) {
+				_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, 
+												rc, 1, NULL, -1, 1);
+			}
+			SQLFreeHandle(SQL_HANDLE_STMT, new_hstmt);
+			Py_INCREF(Py_False);
+			return Py_False;
+		}
+
+		/* Initialize stmt resource members with default values. */
+		/* Parsing will update options if needed */
+		new_stmt_res = PyObject_NEW(stmt_handle, &stmt_handleType);
+		new_stmt_res->s_bin_mode = stmt_res->s_bin_mode;
+		new_stmt_res->cursor_type = stmt_res->cursor_type;
+		new_stmt_res->s_case_mode = stmt_res->s_case_mode;
+		new_stmt_res->head_cache_list = NULL;
+		new_stmt_res->current_node = NULL;
+		new_stmt_res->num_params = 0;
+		new_stmt_res->file_param = 0;
+		new_stmt_res->column_info = NULL;
+		new_stmt_res->num_columns = 0;
+		new_stmt_res->row_data = NULL;
+		new_stmt_res->hstmt = new_hstmt;
+		new_stmt_res->hdbc = stmt_res->hdbc;
+
+		return (PyObject *)new_stmt_res;		
+	} else {
+		PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+		return NULL;
+	}
+}
+
+/*!# ibm_db.num_fields
+ *
+ * ===Description
+ * int ibm_db.num_fields ( resource stmt )
+ *
+ * Returns the number of fields contained in a result set. This is most useful
+ * for handling the result sets returned by dynamically generated queries, or
+ * for result sets returned by stored procedures, where your application cannot
+ * otherwise know how to retrieve and use the results.
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *		A valid statement resource containing a result set.
+ *
+ * ===Return Values
+ *
+ * Returns an integer value representing the number of fields in the result set
+ * associated with the specified statement resource. Returns FALSE if the
+ * statement resource is not a valid input value.
+ */
+static PyObject *ibm_db_num_fields(PyObject *self, PyObject *args)			
+{
+	PyObject *py_stmt_res = NULL;
+	stmt_handle *stmt_res;
+	int rc = 0;
+	SQLSMALLINT indx = 0;
+	char error[DB2_MAX_ERR_MSG_LEN];
+
+	if (!PyArg_ParseTuple(args, "O", &py_stmt_res))
+		return NULL;
+
+	if (!NIL_P(py_stmt_res)) {
+		if (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+			return NULL;
+		} else {
+			stmt_res = (stmt_handle *)py_stmt_res;
+		}
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLNumResultCols((SQLHSTMT)stmt_res->hstmt, &indx);
+		Py_END_ALLOW_THREADS;
+		
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 
+											1, NULL, -1, 1);
+			sprintf(error, "SQLNumResultCols failed: %s", 
+					IBM_DB_G(__python_stmt_err_msg));
+			PyErr_SetString(PyExc_Exception, error);	
+			return NULL;
+		}
+		return PyInt_FromLong(indx);
+	} else {
+		PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+		return NULL;
+	}
+	Py_INCREF(Py_False);
+	return Py_False;
+}
+
+/*!# ibm_db.num_rows
+ *
+ * ===Description
+ * int ibm_db.num_rows ( resource stmt )
+ *
+ * Returns the number of rows deleted, inserted, or updated by an SQL statement.
+ *
+ * To determine the number of rows that will be returned by a SELECT statement,
+ * issue SELECT COUNT(*) with the same predicates as your intended SELECT
+ * statement and retrieve the value. If your application logic checks the number
+ * of rows returned by a SELECT statement and branches if the number of rows is
+ * 0, consider modifying your application to attempt to return the first row
+ * with one of ibm_db.fetch_assoc(), ibm_db.fetch_both(), ibm_db.fetch_array(),
+ * or ibm_db.fetch_row(), and branch if the fetch function returns FALSE.
+ *
+ * Note: If you issue a SELECT statement using a scrollable cursor,
+ * ibm_db.num_rows() returns the number of rows returned by the SELECT
+ * statement. However, the overhead associated with scrollable cursors
+ * significantly degrades the performance of your application, so if this is the
+ * only reason you are considering using scrollable cursors, you should use a
+ * forward-only cursor and either call SELECT COUNT(*) or rely on the boolean
+ * return value of the fetch functions to achieve the equivalent functionality
+ * with much better performance.
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *		A valid stmt resource containing a result set.
+ *
+ * ===Return Values
+ *
+ * Returns the number of rows affected by the last SQL statement issued by the
+ * specified statement handle.
+ */
+static PyObject *ibm_db_num_rows(PyObject *self, PyObject *args)
+{
+	PyObject *py_stmt_res = NULL;
+	stmt_handle *stmt_res;
+	int rc = 0;
+	SQLINTEGER count = 0;
+	char error[DB2_MAX_ERR_MSG_LEN];
+
+	if (!PyArg_ParseTuple(args, "O", &py_stmt_res))
+		return NULL;
+
+	if (!NIL_P(py_stmt_res)) {
+		if (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+			return NULL;
+		} else {
+			stmt_res = (stmt_handle *)py_stmt_res;
+		}
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLRowCount((SQLHSTMT)stmt_res->hstmt, &count);
+		Py_END_ALLOW_THREADS;
+		
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 
+											1, NULL, -1, 1);
+			sprintf(error, "SQLRowCount failed: %s", 
+					IBM_DB_G(__python_stmt_err_msg));
+			PyErr_SetString(PyExc_Exception, error);
+			return NULL;
+		}
+		return PyInt_FromLong(count);
+	} else {
+		PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+		return NULL;
+	}
+	Py_INCREF(Py_False);
+	return Py_False;
+}
+
+/*!# ibm_db.get_num_result 
+ *
+ * ===Description
+ * int ibm_db.num_rows ( resource stmt )
+ *
+ * Returns the number of rows in a current open non-dynamic scrollable cursor.
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *		A valid stmt resource containing a result set.
+ *
+ * ===Return Values
+ *
+ * True on success or False on failure.
+ */
+static PyObject *ibm_db_get_num_result(PyObject *self, PyObject *args)
+{
+	PyObject *py_stmt_res = NULL;
+	stmt_handle *stmt_res;
+	int rc = 0;
+	SQLINTEGER count = 0;
+	char error[DB2_MAX_ERR_MSG_LEN];
+	SQLSMALLINT strLenPtr;
+
+	if (!PyArg_ParseTuple(args, "O", &py_stmt_res))
+		return NULL;
+
+	if (!NIL_P(py_stmt_res)) {
+		if (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+			return NULL;
+		} else {
+			stmt_res = (stmt_handle *)py_stmt_res;
+		}
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetDiagField(SQL_HANDLE_STMT, stmt_res->hstmt, 0,
+								SQL_DIAG_CURSOR_ROW_COUNT, &count, SQL_IS_INTEGER,
+								&strLenPtr);
+		Py_END_ALLOW_THREADS;
+		
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc,
+											1, NULL, -1, 1);
+			sprintf(error, "SQLGetDiagField failed: %s",
+					IBM_DB_G(__python_stmt_err_msg));
+			PyErr_SetString(PyExc_Exception, error);
+			return NULL;
+		}
+		return PyInt_FromLong(count);
+	} else {
+		PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+		return NULL;
+	}
+	Py_INCREF(Py_False);
+	return Py_False;
+}
+
+/* static int _python_ibm_db_get_column_by_name(stmt_handle *stmt_res, char *col_name, int col)
+ */
+static int _python_ibm_db_get_column_by_name(stmt_handle *stmt_res, char *col_name, int col)
+{
+	int i;
+	/* get column header info */
+	if ( stmt_res->column_info == NULL ) {
+		if (_python_ibm_db_get_result_set_info(stmt_res)<0) {
+			return -1;
+		}
+	}
+	if ( col_name == NULL ) {
+		if ( col >= 0 && col < stmt_res->num_columns) {
+			return col;
+		} else {
+			return -1;
+		}
+	}
+	/* should start from 0 */
+	i = 0;
+	while (i < stmt_res->num_columns) {
+		if (strcmp((char*)stmt_res->column_info[i].name, col_name) == 0) {
+			return i;
+		}
+		i++;
+	}
+	return -1;
+}
+
+/*!# ibm_db.field_name
+ *
+ * ===Description
+ * string ibm_db.field_name ( resource stmt, mixed column )
+ *
+ * Returns the name of the specified column in the result set.
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *		Specifies a statement resource containing a result set.
+ *
+ * ====column
+ *		Specifies the column in the result set. This can either be an integer
+ * representing the 0-indexed position of the column, or a string containing the
+ * name of the column.
+ *
+ * ===Return Values
+ *
+ * Returns a string containing the name of the specified column. If the
+ * specified column does not exist in the result set, ibm_db.field_name()
+ * returns FALSE.
+ */
+static PyObject *ibm_db_field_name(PyObject *self, PyObject *args)			
+{
+	PyObject *column = NULL;
+#if  PY_MAJOR_VERSION >= 3
+	PyObject *col_name_py3_tmp = NULL;
+#endif
+	PyObject *py_stmt_res = NULL;
+	stmt_handle* stmt_res = NULL;
+	char *col_name = NULL;
+	int col = -1;
+
+	if (!PyArg_ParseTuple(args, "OO", &py_stmt_res, &column))
+		return NULL;
+
+	if (NIL_P(py_stmt_res) || (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType))) {
+		PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+		return NULL;
+	} else {
+		stmt_res = (stmt_handle *)py_stmt_res;
+	}
+
+	if ( TYPE(column) == PYTHON_FIXNUM ) {
+		col = PyInt_AsLong(column);
+	} else if (PyString_Check(column)) {
+#if  PY_MAJOR_VERSION >= 3
+		col_name_py3_tmp = PyUnicode_AsASCIIString(column);
+		if (col_name_py3_tmp == NULL) {
+			return NULL;
+		}
+		column = col_name_py3_tmp;
+#endif
+		col_name = PyBytes_AsString(column);
+	} else {
+		/* Column argument has to be either an integer or string */
+		Py_RETURN_FALSE;
+	} 
+	col = _python_ibm_db_get_column_by_name(stmt_res, col_name, col);
+#if  PY_MAJOR_VERSION >= 3
+	if (col_name_py3_tmp != NULL) {
+		Py_XDECREF(col_name_py3_tmp);
+	}
+#endif
+	if ( col < 0 ) {
+		Py_INCREF(Py_False);
+		return Py_False;
+	}
+	return StringOBJ_FromASCII((char*)stmt_res->column_info[col].name);
+}
+
+/*!# ibm_db.field_display_size
+ *
+ * ===Description
+ * int ibm_db.field_display_size ( resource stmt, mixed column )
+ *
+ * Returns the maximum number of bytes required to display a column in a result
+ * set.
+ *
+ * ===Parameters
+ * ====stmt
+ *		Specifies a statement resource containing a result set.
+ *
+ * ====column
+ *		Specifies the column in the result set. This can either be an integer
+ * representing the 0-indexed position of the column, or a string containing the
+ * name of the column.
+ *
+ * ===Return Values
+ *
+ * Returns an integer value with the maximum number of bytes required to display
+ * the specified column.
+ * If the column does not exist in the result set, ibm_db.field_display_size()
+ * returns FALSE.
+ */
+static PyObject *ibm_db_field_display_size(PyObject *self, PyObject *args) 
+{
+	PyObject *py_stmt_res = NULL;
+	PyObject *column = NULL;
+#if  PY_MAJOR_VERSION >= 3
+	PyObject *col_name_py3_tmp = NULL;
+#endif
+	int col = -1;
+	char *col_name = NULL;
+	stmt_handle *stmt_res = NULL;
+	int rc;
+	SQLINTEGER colDataDisplaySize;
+
+	if (!PyArg_ParseTuple(args, "OO", &py_stmt_res, &column))
+		return NULL;
+
+	if (NIL_P(py_stmt_res) || (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType))) {
+		PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+		return NULL;
+	} else {
+		stmt_res = (stmt_handle *)py_stmt_res;
+	}
+
+	if ( TYPE(column) == PYTHON_FIXNUM ) {
+		col = PyInt_AsLong(column);
+	} else if (PyString_Check(column)) {
+#if  PY_MAJOR_VERSION >= 3
+		col_name_py3_tmp = PyUnicode_AsASCIIString(column);
+		if (col_name_py3_tmp == NULL) {
+			return NULL;
+		}
+		column = col_name_py3_tmp;
+#endif
+		col_name = PyBytes_AsString(column);
+	} else {
+		/* Column argument has to be either an integer or string */
+		Py_RETURN_FALSE;
+	}	
+	col = _python_ibm_db_get_column_by_name(stmt_res, col_name, col);
+#if  PY_MAJOR_VERSION >= 3
+	if ( col_name_py3_tmp != NULL ) {
+		Py_XDECREF(col_name_py3_tmp);
+	}
+#endif
+	if ( col < 0 ) {
+		Py_RETURN_FALSE;
+	}
+
+	Py_BEGIN_ALLOW_THREADS;
+	rc = SQLColAttributes((SQLHSTMT)stmt_res->hstmt, (SQLSMALLINT)col+1,
+		 SQL_DESC_DISPLAY_SIZE, NULL, 0, NULL, &colDataDisplaySize);
+	Py_END_ALLOW_THREADS;
+	
+	if ( rc < SQL_SUCCESS ) {
+		_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1, 
+									  NULL, -1, 1);
+		Py_INCREF(Py_False);
+		return Py_False;
+	}
+	return PyInt_FromLong(colDataDisplaySize);
+}
+/*!# ibm_db.field_nullable
+ *
+ * ===Description
+ * bool ibm_db.field_nullable ( resource stmt, mixed column )
+ *
+ * Returns True/False based on indicated column in result set is nullable or not.
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *              Specifies a statement resource containing a result set. 
+ *
+ * ====column
+ *              Specifies the column in the result set. This can either be an integer
+ * representing the 0-indexed position of the column, or a string containing the
+ * name of the column.
+ *
+ * ===Return Values
+ *
+ * Returns TRUE if indicated column is nullable else returns FALSE.
+ * If the specified column does not exist in the result set, ibm_db.field_nullable() returns FALSE
+ */
+static PyObject *ibm_db_field_nullable(PyObject *self, PyObject *args)
+{
+	PyObject *py_stmt_res = NULL;
+	PyObject *column = NULL;
+#if  PY_MAJOR_VERSION >= 3
+	PyObject *col_name_py3_tmp = NULL;
+#endif
+	stmt_handle* stmt_res = NULL;
+	char *col_name = NULL;
+	int col = -1;
+	int rc;
+	SQLINTEGER nullableCol;
+
+	if (!PyArg_ParseTuple(args, "OO", &py_stmt_res, &column))
+		return NULL;
+
+	if (NIL_P(py_stmt_res) || (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType))) {
+		PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+		return NULL;
+	} else {
+		stmt_res = (stmt_handle *)py_stmt_res;
+	}
+
+	if ( TYPE(column) == PYTHON_FIXNUM ) {
+		col = PyInt_AsLong(column);
+	} else if (PyString_Check(column)) {
+#if  PY_MAJOR_VERSION >= 3
+		col_name_py3_tmp = PyUnicode_AsASCIIString(column);
+		if (col_name_py3_tmp == NULL) {
+			return NULL;
+		}
+		column = col_name_py3_tmp;
+#endif
+		col_name = PyBytes_AsString(column);
+	} else {
+		/* Column argument has to be either an integer or string */
+		Py_RETURN_FALSE;
+	}
+	col = _python_ibm_db_get_column_by_name(stmt_res, col_name, col);
+#if  PY_MAJOR_VERSION >= 3
+	if ( col_name_py3_tmp != NULL ) {
+		Py_XDECREF(col_name_py3_tmp);
+	}
+#endif
+	if ( col < 0 ) {
+		 Py_RETURN_FALSE;
+	}
+
+	Py_BEGIN_ALLOW_THREADS;
+	rc = SQLColAttributes((SQLHSTMT)stmt_res->hstmt, (SQLSMALLINT)col+1,
+		SQL_DESC_NULLABLE, NULL, 0, NULL, &nullableCol);
+	Py_END_ALLOW_THREADS;
+
+	if ( rc < SQL_SUCCESS ) {
+		_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1,
+					NULL, -1, 1);
+		Py_RETURN_FALSE;
+	} else if ( nullableCol == SQL_NULLABLE ) {
+		Py_RETURN_TRUE;
+	} else {
+		Py_RETURN_FALSE;
+	}
+}
+/*!# ibm_db.field_num
+ *
+ * ===Description
+ * int ibm_db.field_num ( resource stmt, mixed column )
+ *
+ * Returns the position of the named column in a result set.
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *		Specifies a statement resource containing a result set.
+ *
+ * ====column
+ *		Specifies the column in the result set. This can either be an integer
+ * representing the 0-indexed position of the column, or a string containing the
+ * name of the column.
+ *
+ * ===Return Values
+ *
+ * Returns an integer containing the 0-indexed position of the named column in
+ * the result set. If the specified column does not exist in the result set,
+ * ibm_db.field_num() returns FALSE.
+ */
+static PyObject *ibm_db_field_num(PyObject *self, PyObject *args)			
+{
+	PyObject *py_stmt_res = NULL;
+	PyObject *column = NULL;
+#if  PY_MAJOR_VERSION >= 3
+	PyObject *col_name_py3_tmp = NULL;
+#endif
+	stmt_handle* stmt_res = NULL;
+	char *col_name = NULL;
+	int col = -1;
+
+	if (!PyArg_ParseTuple(args, "OO", &py_stmt_res, &column))
+		return NULL;
+
+	if (NIL_P(py_stmt_res) || (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType))) {
+		PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+		return NULL;
+	} else {
+		stmt_res = (stmt_handle *)py_stmt_res;
+	}
+
+	if ( TYPE(column) == PYTHON_FIXNUM ) {
+		col = PyInt_AsLong(column);
+	} else if (PyString_Check(column)) {
+#if  PY_MAJOR_VERSION >= 3
+		col_name_py3_tmp = PyUnicode_AsASCIIString(column);
+		if (col_name_py3_tmp == NULL) {
+			return NULL;
+		}
+		column = col_name_py3_tmp;
+#endif
+		col_name = PyBytes_AsString(column);
+	} else {
+		/* Column argument has to be either an integer or string */
+		Py_RETURN_FALSE;
+	}
+	col = _python_ibm_db_get_column_by_name(stmt_res, col_name, col);
+#if  PY_MAJOR_VERSION >= 3
+	if ( col_name_py3_tmp != NULL ) {
+		Py_XDECREF(col_name_py3_tmp);
+	}
+#endif
+	if ( col < 0 ) {
+		Py_INCREF(Py_False);
+		return Py_False;
+	}
+	return PyInt_FromLong(col);
+}
+
+/*!# ibm_db.field_precision
+ *
+ * ===Description
+ * int ibm_db.field_precision ( resource stmt, mixed column )
+ *
+ * Returns the precision of the indicated column in a result set.
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *		Specifies a statement resource containing a result set.
+ *
+ * ====column
+ *		Specifies the column in the result set. This can either be an integer
+ * representing the 0-indexed position of the column, or a string containing the
+ * name of the column.
+ *
+ * ===Return Values
+ *
+ * Returns an integer containing the precision of the specified column. If the
+ * specified column does not exist in the result set, ibm_db.field_precision()
+ * returns FALSE.
+ */
+static PyObject *ibm_db_field_precision(PyObject *self, PyObject *args)
+{
+	PyObject *py_stmt_res = NULL;
+	PyObject *column = NULL;
+#if  PY_MAJOR_VERSION >= 3
+	PyObject *col_name_py3_tmp = NULL;
+#endif
+	stmt_handle* stmt_res = NULL;
+	char *col_name = NULL;
+	int col = -1;
+
+	if (!PyArg_ParseTuple(args, "OO", &py_stmt_res, &column))
+		return NULL;
+
+	if (NIL_P(py_stmt_res) || (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType))) {
+		PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+		return NULL;
+	} else {
+		stmt_res = (stmt_handle *)py_stmt_res;
+	}
+
+	if ( TYPE(column) == PYTHON_FIXNUM ) {
+		col = PyInt_AsLong(column);
+	} else if (PyString_Check(column)) {
+#if  PY_MAJOR_VERSION >= 3
+		col_name_py3_tmp = PyUnicode_AsASCIIString(column);
+		if (col_name_py3_tmp == NULL) {
+			return NULL;
+		}
+		column = col_name_py3_tmp;
+#endif
+		col_name = PyBytes_AsString(column);
+	} else {
+		/* Column argument has to be either an integer or string */
+		Py_RETURN_FALSE;
+	} 
+	col = _python_ibm_db_get_column_by_name(stmt_res, col_name, col);
+#if  PY_MAJOR_VERSION >= 3
+	if ( col_name_py3_tmp != NULL ) {
+		Py_XDECREF(col_name_py3_tmp);
+	}
+#endif
+	if ( col < 0 ) {
+		Py_RETURN_FALSE;
+	}
+	return PyInt_FromLong(stmt_res->column_info[col].size);
+
+}
+
+/*!# ibm_db.field_scale
+ *
+ * ===Description
+ * int ibm_db.field_scale ( resource stmt, mixed column )
+ *
+ * Returns the scale of the indicated column in a result set.
+ *
+ * ===Parameters
+ * ====stmt
+ *		Specifies a statement resource containing a result set.
+ *
+ * ====column
+ *		Specifies the column in the result set. This can either be an integer
+ * representing the 0-indexed position of the column, or a string containing the
+ * name of the column.
+ *
+ * ===Return Values
+ *
+ * Returns an integer containing the scale of the specified column. If the
+ * specified column does not exist in the result set, ibm_db.field_scale()
+ * returns FALSE.
+ */
+static PyObject *ibm_db_field_scale(PyObject *self, PyObject *args)
+{
+	PyObject *py_stmt_res = NULL;
+	PyObject *column = NULL;
+#if  PY_MAJOR_VERSION >= 3
+	PyObject *col_name_py3_tmp = NULL;
+#endif
+	stmt_handle* stmt_res = NULL;
+	char *col_name = NULL;
+	int col = -1;
+
+	if (!PyArg_ParseTuple(args, "OO", &py_stmt_res, &column))
+		return NULL;
+
+	if (NIL_P(py_stmt_res) || (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType))) {
+		PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+		return NULL;
+	} else {
+		stmt_res = (stmt_handle *)py_stmt_res;
+	}
+	if ( TYPE(column) == PYTHON_FIXNUM ) {
+		col = PyInt_AsLong(column);
+	} else if (PyString_Check(column)) {
+#if  PY_MAJOR_VERSION >= 3
+		col_name_py3_tmp = PyUnicode_AsASCIIString(column);
+		if (col_name_py3_tmp == NULL) {
+			return NULL;
+		}
+		column = col_name_py3_tmp;
+#endif
+		col_name = PyBytes_AsString(column);
+	} else {
+		/* Column argument has to be either an integer or string */
+		Py_RETURN_FALSE;
+	}
+	col = _python_ibm_db_get_column_by_name(stmt_res, col_name, col);
+#if  PY_MAJOR_VERSION >= 3
+	if ( col_name_py3_tmp != NULL ) {
+		Py_XDECREF(col_name_py3_tmp);
+	}
+#endif
+	if ( col < 0 ) {
+		Py_RETURN_FALSE;
+	}
+	return PyInt_FromLong(stmt_res->column_info[col].scale);
+}
+
+/*!# ibm_db.field_type
+ *
+ * ===Description
+ * string ibm_db.field_type ( resource stmt, mixed column )
+ *
+ * Returns the data type of the indicated column in a result set.
+ *
+ * ===Parameters
+ * ====stmt
+ *		Specifies a statement resource containing a result set.
+ *
+ * ====column
+ *		Specifies the column in the result set. This can either be an integer
+ * representing the 0-indexed position of the column, or a string containing the
+ * name of the column.
+ *
+ * ====Return Values
+ *
+ * Returns a string containing the defined data type of the specified column.
+ * If the specified column does not exist in the result set, ibm_db.field_type()
+ * returns FALSE.
+ */
+static PyObject *ibm_db_field_type(PyObject *self, PyObject *args)
+{
+	PyObject *py_stmt_res = NULL;
+	PyObject *column = NULL;
+#if  PY_MAJOR_VERSION >= 3
+	PyObject *col_name_py3_tmp = NULL;
+#endif
+	stmt_handle* stmt_res = NULL;
+	char *col_name = NULL;
+	char *str_val = NULL;
+	int col = -1;
+
+	if (!PyArg_ParseTuple(args, "OO", &py_stmt_res, &column))
+		return NULL;
+
+	if (NIL_P(py_stmt_res) || (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType))) {
+		PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+		return NULL;
+	} else {
+		stmt_res = (stmt_handle *)py_stmt_res;
+	}
+	if ( TYPE(column) == PYTHON_FIXNUM ) {
+		col = PyInt_AsLong(column);
+	} else if (PyString_Check(column)) {
+#if  PY_MAJOR_VERSION >= 3
+		col_name_py3_tmp = PyUnicode_AsASCIIString(column);
+		if (col_name_py3_tmp == NULL) {
+			return NULL;
+		}
+		column = col_name_py3_tmp;
+#endif
+		col_name = PyBytes_AsString(column);
+	} else {
+		/* Column argument has to be either an integer or string */
+		Py_RETURN_FALSE;
+	}
+	col = _python_ibm_db_get_column_by_name(stmt_res, col_name, col);
+#if  PY_MAJOR_VERSION >= 3
+	if ( col_name_py3_tmp != NULL ) {
+		Py_XDECREF(col_name_py3_tmp);
+	}
+#endif
+	if ( col < 0 ) {
+		Py_RETURN_FALSE;
+	}
+	switch (stmt_res->column_info[col].type) {
+		case SQL_SMALLINT:
+		case SQL_INTEGER:
+			str_val = "int";
+			break;
+		case SQL_BIGINT:
+			str_val = "bigint";
+			break;
+		case SQL_REAL:
+		case SQL_FLOAT:
+		case SQL_DOUBLE:
+		case SQL_DECFLOAT:
+			str_val = "real";
+			break;
+		case SQL_DECIMAL:
+		case SQL_NUMERIC:
+			str_val = "decimal";
+			break;
+		case SQL_CLOB:
+			str_val = "clob";
+			break;
+		case SQL_DBCLOB:
+			str_val = "dbclob";
+			break;	
+		case SQL_BLOB:
+			str_val = "blob";
+			break;
+		case SQL_XML:
+			str_val = "xml";
+			break;
+		case SQL_TYPE_DATE:
+			str_val = "date";
+			break;
+		case SQL_TYPE_TIME:
+			str_val = "time";
+			break;
+		case SQL_TYPE_TIMESTAMP:
+			str_val = "timestamp";
+			break;
+		default:
+			str_val = "string";
+			break;
+	}
+	return StringOBJ_FromASCII(str_val);
+}
+
+/*!# ibm_db.field_width
+ *
+ * ===Description
+ * int ibm_db.field_width ( resource stmt, mixed column )
+ *
+ * Returns the width of the current value of the indicated column in a result
+ * set. This is the maximum width of the column for a fixed-length data type, or
+ * the actual width of the column for a variable-length data type.
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *		Specifies a statement resource containing a result set.
+ *
+ * ====column
+ *		Specifies the column in the result set. This can either be an integer
+ * representing the 0-indexed position of the column, or a string containing the
+ * name of the column.
+ *
+ * ===Return Values
+ *
+ * Returns an integer containing the width of the specified character or binary
+ * data type column in a result set. If the specified column does not exist in
+ * the result set, ibm_db.field_width() returns FALSE.
+ */
+static PyObject *ibm_db_field_width(PyObject *self, PyObject *args)
+{
+	PyObject *py_stmt_res = NULL;
+	PyObject *column = NULL;
+#if  PY_MAJOR_VERSION >= 3
+	PyObject *col_name_py3_tmp = NULL;
+#endif
+	int col = -1;
+	char *col_name = NULL;
+	stmt_handle *stmt_res = NULL;
+	int rc;
+	SQLINTEGER colDataSize;
+
+	if (!PyArg_ParseTuple(args, "OO", &py_stmt_res, &column))
+		return NULL;
+
+	if (NIL_P(py_stmt_res) || (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType))) {
+		PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+		return NULL;
+	} else {
+		stmt_res = (stmt_handle *)py_stmt_res;
+	}
+	if ( TYPE(column) == PYTHON_FIXNUM ) {
+		col = PyInt_AsLong(column);
+	} else if (PyString_Check(column)) {
+#if  PY_MAJOR_VERSION >= 3
+		col_name_py3_tmp = PyUnicode_AsASCIIString(column);
+		if (col_name_py3_tmp == NULL) {
+			return NULL;
+		}
+		column = col_name_py3_tmp;
+#endif
+		col_name = PyBytes_AsString(column);
+	} else {
+		/* Column argument has to be either an integer or string */
+		Py_RETURN_FALSE;
+	}
+	col = _python_ibm_db_get_column_by_name(stmt_res, col_name, col);
+#if  PY_MAJOR_VERSION >= 3
+	if ( col_name_py3_tmp != NULL ) {
+		Py_XDECREF(col_name_py3_tmp);
+	}
+#endif
+	if ( col < 0 ) {
+		Py_RETURN_FALSE;
+	}
+	
+	Py_BEGIN_ALLOW_THREADS;
+	rc = SQLColAttributes((SQLHSTMT)stmt_res->hstmt, (SQLSMALLINT)col + 1,
+		SQL_DESC_LENGTH, NULL, 0, NULL, &colDataSize);
+	Py_END_ALLOW_THREADS;
+	
+	if ( rc != SQL_SUCCESS ) {
+		_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1, 
+			NULL, -1, 1);
+		PyErr_Clear();
+		Py_RETURN_FALSE;
+	}
+	return PyInt_FromLong(colDataSize);
+}
+
+/*!# ibm_db.cursor_type
+ *
+ * ===Description
+ * int ibm_db.cursor_type ( resource stmt )
+ *
+ * Returns the cursor type used by a statement resource. Use this to determine
+ * if you are working with a forward-only cursor or scrollable cursor.
+ *
+ * ===Parameters
+ * ====stmt
+ *		A valid statement resource.
+ *
+ * ===Return Values
+ *
+ * Returns either SQL_SCROLL_FORWARD_ONLY if the statement resource uses a
+ * forward-only cursor or SQL_CURSOR_KEYSET_DRIVEN if the statement resource
+ * uses a scrollable cursor.
+ */
+static PyObject *ibm_db_cursor_type(PyObject *self, PyObject *args)			
+{
+	PyObject *py_stmt_res = NULL;
+	stmt_handle *stmt_res = NULL;
+
+	if (!PyArg_ParseTuple(args, "O", &py_stmt_res))
+		return NULL;
+
+	if (NIL_P(py_stmt_res) || (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType))) {
+		PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+		return NULL;
+	} else {
+		stmt_res = (stmt_handle *)py_stmt_res;
+	}
+
+	return PyInt_FromLong(stmt_res->cursor_type != SQL_SCROLL_FORWARD_ONLY);
+}
+
+/*!# ibm_db.rollback
+ *
+ * ===Description
+ * bool ibm_db.rollback ( resource connection )
+ *
+ * Rolls back an in-progress transaction on the specified connection resource
+ * and begins a new transaction. Python applications normally default to
+ * AUTOCOMMIT mode, so ibm_db.rollback() normally has no effect unless
+ * AUTOCOMMIT has been turned off for the connection resource.
+ *
+ * Note: If the specified connection resource is a persistent connection, all
+ * transactions in progress for all applications using that persistent
+ * connection will be rolled back. For this reason, persistent connections are
+ * not recommended for use in applications that require transactions.
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *		A valid database connection resource variable as returned from
+ * ibm_db.connect() or ibm_db.pconnect().
+ *
+ * ===Return Values
+ *
+ * Returns TRUE on success or FALSE on failure.
+ */
+static PyObject *ibm_db_rollback(PyObject *self, PyObject *args)
+{
+	PyObject *py_conn_res = NULL;
+	conn_handle *conn_res;
+	int rc;
+
+	if (!PyArg_ParseTuple(args, "O", &py_conn_res))
+		return NULL;
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			return NULL;
+		}
+
+		rc = SQLEndTran(SQL_HANDLE_DBC, conn_res->hdbc, SQL_ROLLBACK);
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+				NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			Py_RETURN_TRUE;
+		}
+	}
+	Py_RETURN_FALSE;
+}
+
+/*!# ibm_db.free_stmt
+ *
+ * ===Description
+ * bool ibm_db.free_stmt ( resource stmt )
+ *
+ * Frees the system and database resources that are associated with a statement
+ * resource. These resources are freed implicitly when a script finishes, but
+ * you can call ibm_db.free_stmt() to explicitly free the statement resources
+ * before the end of the script.
+ *
+ * ===Parameters
+ * ====stmt
+ *		A valid statement resource.
+ *
+ * ===Return Values
+ *
+ * Returns TRUE on success or FALSE on failure.
+ *
+ * DEPRECATED
+ */
+static PyObject *ibm_db_free_stmt(PyObject *self, PyObject *args)
+{
+	PyObject *py_stmt_res = NULL;
+	stmt_handle *handle;
+	SQLRETURN rc;
+	if (!PyArg_ParseTuple(args, "O", &py_stmt_res))
+		return NULL;
+	if (!NIL_P(py_stmt_res)) {
+		if (PyObject_TypeCheck(py_stmt_res, &stmt_handleType)) {
+			handle = (stmt_handle *)py_stmt_res;
+			if (handle->hstmt != -1) {
+				rc = SQLFreeHandle( SQL_HANDLE_STMT, handle->hstmt);
+				if ( rc == SQL_ERROR ){ 
+					_python_ibm_db_check_sql_errors(handle->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+					Py_RETURN_FALSE;
+				}
+				 _python_ibm_db_free_result_struct(handle);
+				handle->hstmt = -1;
+				Py_RETURN_TRUE;
+			}
+		}
+	}
+	Py_RETURN_NONE;
+}
+
+/*	static RETCODE _python_ibm_db_get_data(stmt_handle *stmt_res, int col_num, short ctype, void *buff, int in_length, SQLINTEGER *out_length) */
+static RETCODE _python_ibm_db_get_data(stmt_handle *stmt_res, int col_num, short ctype, void *buff, int in_length, SQLINTEGER *out_length)
+{
+	RETCODE rc = SQL_SUCCESS;
+	
+	Py_BEGIN_ALLOW_THREADS;
+	rc = SQLGetData((SQLHSTMT)stmt_res->hstmt, col_num, ctype, buff, in_length, 
+		out_length);
+	Py_END_ALLOW_THREADS;
+	
+	if ( rc == SQL_ERROR ) {
+		_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1, 
+			NULL, -1, 1);
+	}
+	return rc;
+}
+
+/*!# ibm_db.result
+ *
+ * ===Description
+ * mixed ibm_db.result ( resource stmt, mixed column )
+ *
+ * Returns a single column from a row in the result set
+ *
+ * Use ibm_db.result() to return the value of a specified column in the current  * row of a result set. You must call ibm_db.fetch_row() before calling
+ * ibm_db.result() to set the location of the result set pointer.
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *		A valid stmt resource.
+ *
+ * ====column
+ *		Either an integer mapping to the 0-indexed field in the result set, or  * a string matching the name of the column.
+ *
+ * ===Return Values
+ *
+ * Returns the value of the requested field if the field exists in the result
+ * set. Returns NULL if the field does not exist, and issues a warning.
+ */
+static PyObject *ibm_db_result(PyObject *self, PyObject *args)
+{
+	PyObject *py_stmt_res = NULL;
+	PyObject *column = NULL;
+#if  PY_MAJOR_VERSION >= 3
+	PyObject *col_name_py3_tmp = NULL;
+#endif
+	PyObject *retVal = NULL;
+	stmt_handle *stmt_res;
+	long col_num;
+	RETCODE rc;
+	void	*out_ptr;
+	DATE_STRUCT *date_ptr;
+	TIME_STRUCT *time_ptr;
+	TIMESTAMP_STRUCT *ts_ptr;
+	char error[DB2_MAX_ERR_MSG_LEN];
+	SQLINTEGER in_length, out_length = -10; /* Initialize out_length to some
+						* meaningless value
+						* */
+	SQLSMALLINT column_type, targetCType = SQL_C_CHAR, len_terChar = 0 ;
+	double double_val;
+	SQLINTEGER long_val;
+	PyObject *return_value = NULL;
+
+	if (!PyArg_ParseTuple(args, "OO", &py_stmt_res, &column))
+		return NULL;
+
+	if (!NIL_P(py_stmt_res)) {
+		if (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+			return NULL;
+		} else {
+			stmt_res = (stmt_handle *)py_stmt_res;
+		}
+
+		if ( TYPE(column) == PYTHON_FIXNUM ) {
+			col_num = PyLong_AsLong(column);
+			
+		} else if (PyString_Check(column)) { 
+#if  PY_MAJOR_VERSION >= 3
+			col_name_py3_tmp = PyUnicode_AsASCIIString(column);
+			if (col_name_py3_tmp == NULL) {
+				return NULL;
+			}
+			column = col_name_py3_tmp;
+#endif
+			col_num = _python_ibm_db_get_column_by_name(stmt_res, PyBytes_AsString(column), -1);
+#if  PY_MAJOR_VERSION >= 3
+			if ( col_name_py3_tmp != NULL ) {
+				Py_XDECREF(col_name_py3_tmp);
+			}
+#endif
+		} else {
+			/* Column argument has to be either an integer or string */
+			Py_RETURN_FALSE;
+		}
+
+	/* get column header info */
+	if ( stmt_res->column_info == NULL ) {
+		if (_python_ibm_db_get_result_set_info(stmt_res)<0) {
+			sprintf(error, "Column information cannot be retrieved: %s", 
+					IBM_DB_G(__python_stmt_err_msg));
+			strcpy(IBM_DB_G(__python_stmt_err_msg), error);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		}
+	}
+
+	if(col_num < 0 || col_num >= stmt_res->num_columns) {
+		strcpy(IBM_DB_G(__python_stmt_err_msg), "Column ordinal out of range");
+		PyErr_Clear();
+		Py_RETURN_NONE;
+	}
+
+	/* get the data */
+	column_type = stmt_res->column_info[col_num].type;
+	switch(column_type) {
+		case SQL_CHAR:
+		case SQL_VARCHAR:
+		case SQL_WCHAR:
+		case SQL_WVARCHAR:
+		case SQL_GRAPHIC:
+		case SQL_VARGRAPHIC:
+#ifndef PASE /* i5/OS SQL_LONGVARCHAR is SQL_VARCHAR */
+		case SQL_LONGVARCHAR:
+		case SQL_LONGVARGRAPHIC:
+#endif /* PASE */
+		case SQL_BIGINT:
+		case SQL_DECIMAL:
+		case SQL_NUMERIC:
+		case SQL_DECFLOAT:
+			if (column_type == SQL_DECIMAL || column_type == SQL_NUMERIC){
+				in_length = stmt_res->column_info[col_num].size + 
+							stmt_res->column_info[col_num].scale + 2 + 1;
+			}
+			else{
+				in_length = stmt_res->column_info[col_num].size+1;
+			}
+			out_ptr = (SQLPOINTER)ALLOC_N(Py_UNICODE, in_length);
+
+			if ( out_ptr == NULL ) {
+				PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+				return NULL;
+			}
+
+			rc = _python_ibm_db_get_data(stmt_res, col_num+1, SQL_C_WCHAR, 
+						 out_ptr, in_length * sizeof(Py_UNICODE), &out_length);
+
+			if ( rc == SQL_ERROR ) {
+				if(out_ptr != NULL) {
+					PyMem_Del(out_ptr);
+					out_ptr = NULL;
+				}
+				PyErr_Clear();
+				Py_RETURN_FALSE;
+			}
+			if (out_length == SQL_NULL_DATA) {
+				Py_INCREF(Py_None);
+				return_value = Py_None;
+			} else if (column_type == SQL_BIGINT){
+				return_value = PyLong_FromString(out_ptr, NULL, 0);
+			} else {
+				return_value = getSQLWCharAsPyUnicodeObject(out_ptr, out_length);
+			}
+			PyMem_Del(out_ptr);
+			out_ptr = NULL;
+			return return_value;
+
+		case SQL_TYPE_DATE:
+			date_ptr = ALLOC(DATE_STRUCT);
+			if (date_ptr == NULL) {
+				PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+				return NULL;
+			}
+
+			rc = _python_ibm_db_get_data(stmt_res, col_num+1, SQL_C_TYPE_DATE,
+						date_ptr, sizeof(DATE_STRUCT), &out_length);
+
+			if ( rc == SQL_ERROR ) {
+				if(date_ptr != NULL) {
+					PyMem_Del(date_ptr);
+					date_ptr = NULL;
+				}
+				PyErr_Clear();
+				Py_RETURN_FALSE;
+			}
+			if (out_length == SQL_NULL_DATA) {
+				PyMem_Del(date_ptr);
+				date_ptr = NULL;
+				Py_RETURN_NONE;
+			} else {
+				return_value = PyDate_FromDate(date_ptr->year, date_ptr->month, date_ptr->day);
+				PyMem_Del(date_ptr);
+				date_ptr = NULL;
+				return return_value;
+			}
+			break;
+
+		case SQL_TYPE_TIME:
+			time_ptr = ALLOC(TIME_STRUCT);
+			if (time_ptr == NULL) {
+				PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+				return NULL;
+			}
+
+			rc = _python_ibm_db_get_data(stmt_res, col_num+1, SQL_C_TYPE_TIME,
+						time_ptr, sizeof(TIME_STRUCT), &out_length);
+
+			if ( rc == SQL_ERROR ) {
+				if(time_ptr != NULL) {
+					PyMem_Del(time_ptr);
+					time_ptr = NULL;
+				}
+				PyErr_Clear();
+				Py_RETURN_FALSE;
+			}
+
+			if (out_length == SQL_NULL_DATA) {
+				PyMem_Del(time_ptr);
+				time_ptr = NULL;
+				Py_RETURN_NONE;
+			} else {
+				return_value = PyTime_FromTime(time_ptr->hour, time_ptr->minute, time_ptr->second, 0);
+				PyMem_Del(time_ptr);
+				time_ptr = NULL;
+				return return_value;
+			}
+			break;
+
+		case SQL_TYPE_TIMESTAMP:
+			ts_ptr = ALLOC(TIMESTAMP_STRUCT);
+			if (ts_ptr == NULL) {
+				PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+				return NULL;
+			}
+
+			rc = _python_ibm_db_get_data(stmt_res, col_num+1, SQL_C_TYPE_TIMESTAMP,
+						ts_ptr, sizeof(TIMESTAMP_STRUCT), &out_length);
+
+			if ( rc == SQL_ERROR ) {
+				if(ts_ptr != NULL) {
+					PyMem_Del(ts_ptr);
+					time_ptr = NULL;
+				}
+				PyErr_Clear();
+				Py_RETURN_FALSE;
+			}
+
+			if (out_length == SQL_NULL_DATA) {
+				PyMem_Del(ts_ptr);
+				ts_ptr = NULL;
+				Py_RETURN_NONE;
+			} else {
+				return_value = PyDateTime_FromDateAndTime(ts_ptr->year, ts_ptr->month, ts_ptr->day, ts_ptr->hour, ts_ptr->minute, ts_ptr->second, ts_ptr->fraction / 1000);
+				PyMem_Del(ts_ptr);
+				ts_ptr = NULL;
+				return return_value;
+			}
+			break;
+
+		case SQL_SMALLINT:
+		case SQL_INTEGER:
+			rc = _python_ibm_db_get_data(stmt_res, col_num+1, SQL_C_LONG, 
+						 &long_val, sizeof(long_val), 
+						 &out_length);
+			if ( rc == SQL_ERROR ) {
+				PyErr_Clear();
+				Py_RETURN_FALSE;
+			}
+			if (out_length == SQL_NULL_DATA) {
+				Py_RETURN_NONE;
+			} else {
+				return PyInt_FromLong(long_val);
+			}
+			break;
+
+		case SQL_REAL:
+		case SQL_FLOAT:
+		case SQL_DOUBLE:
+			rc = _python_ibm_db_get_data(stmt_res, col_num+1, SQL_C_DOUBLE, 
+						 &double_val, sizeof(double_val), 
+						 &out_length);
+			if ( rc == SQL_ERROR ) {
+				PyErr_Clear();
+				Py_RETURN_FALSE;
+			}
+			if (out_length == SQL_NULL_DATA) {
+				Py_RETURN_NONE;
+			} else {
+				return PyFloat_FromDouble(double_val);
+			}
+			break;
+
+		case SQL_BLOB:
+		case SQL_BINARY:
+#ifndef PASE /* i5/OS SQL_LONGVARCHAR is SQL_VARCHAR */
+		case SQL_LONGVARBINARY:
+#endif /* PASE */
+		case SQL_VARBINARY:
+			switch (stmt_res->s_bin_mode) {
+				case PASSTHRU:
+					return PyBytes_FromStringAndSize("", 0);
+					break;
+					/* returns here */
+				case CONVERT:
+					targetCType = SQL_C_CHAR;
+					len_terChar = sizeof(char);
+					break;
+				case BINARY:
+					targetCType = SQL_C_BINARY;
+					len_terChar = 0;
+					break;
+				default:
+					Py_RETURN_FALSE;
+			}
+		case SQL_XML:
+		case SQL_CLOB:
+		case SQL_DBCLOB:
+			if (column_type == SQL_CLOB || column_type == SQL_DBCLOB || column_type == SQL_XML) {
+				len_terChar = sizeof(SQLWCHAR);
+				targetCType = SQL_C_WCHAR;
+			}
+			out_ptr = ALLOC_N(char, INIT_BUFSIZ + len_terChar);
+			if ( out_ptr == NULL ) {
+				 PyErr_SetString(PyExc_Exception,
+						"Failed to Allocate Memory for XML Data");
+				return NULL;
+			}
+			rc = _python_ibm_db_get_data(stmt_res, col_num + 1, targetCType, out_ptr, 
+					INIT_BUFSIZ + len_terChar, &out_length);
+			if ( rc == SQL_SUCCESS_WITH_INFO ) {
+				void *tmp_out_ptr = NULL;
+
+				tmp_out_ptr = ALLOC_N(char, out_length + INIT_BUFSIZ + len_terChar);
+				memcpy(tmp_out_ptr, out_ptr, INIT_BUFSIZ);
+				PyMem_Del(out_ptr);
+				out_ptr = tmp_out_ptr;
+
+				rc = _python_ibm_db_get_data(stmt_res, col_num + 1, targetCType, (char *)out_ptr + INIT_BUFSIZ,
+					out_length + len_terChar, &out_length);
+				if (rc == SQL_ERROR) {
+					PyMem_Del(out_ptr);
+					out_ptr = NULL;
+					return NULL;
+				}
+				if (len_terChar == sizeof(SQLWCHAR)) {
+					retVal = getSQLWCharAsPyUnicodeObject(out_ptr, INIT_BUFSIZ + out_length);
+				} else {
+					retVal = PyBytes_FromStringAndSize((char *)out_ptr, INIT_BUFSIZ + out_length);
+				}
+			} else if ( rc == SQL_ERROR ) {
+				PyMem_Del(out_ptr);
+				out_ptr = NULL;
+				Py_RETURN_FALSE;
+			} else {
+				if (out_length == SQL_NULL_DATA) {
+					Py_INCREF(Py_None);
+					retVal = Py_None;
+				} else {
+					if (len_terChar == 0) {
+						retVal = PyBytes_FromStringAndSize((char *)out_ptr, out_length);
+					} else {
+						retVal = getSQLWCharAsPyUnicodeObject(out_ptr, out_length);
+					}
+				}
+
+			}
+			if (out_ptr != NULL) {
+				PyMem_Del(out_ptr);
+				out_ptr = NULL;
+			}
+			return retVal;
+		default:
+			break;
+		}
+	} else {
+		PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+	}
+	Py_RETURN_FALSE;
+}
+
+/* static void _python_ibm_db_bind_fetch_helper(INTERNAL_FUNCTION_PARAMETERS, 
+												int op)
+*/
+static PyObject *_python_ibm_db_bind_fetch_helper(PyObject *args, int op)
+{
+	int rc = -1;
+	int column_number;
+	SQLINTEGER row_number = -1;
+	stmt_handle *stmt_res = NULL;
+	SQLSMALLINT column_type ;
+	ibm_db_row_data_type *row_data;
+	SQLINTEGER out_length, tmp_length = 0;
+	void *out_ptr = NULL;
+	SQLWCHAR *wout_ptr = NULL;
+	int len_terChar = 0;
+	SQLSMALLINT targetCType = SQL_C_CHAR;
+	PyObject *py_stmt_res = NULL;
+	PyObject *return_value = NULL;
+	PyObject *key = NULL;
+	PyObject *value = NULL;
+	PyObject *py_row_number = NULL;
+	char error[DB2_MAX_ERR_MSG_LEN];
+	
+	if (!PyArg_ParseTuple(args, "O|O", &py_stmt_res, &py_row_number))
+		return NULL;
+	
+	if (NIL_P(py_stmt_res) || (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType))) {
+		PyErr_SetString(PyExc_Exception, "Supplied statement object parameter is invalid");
+		return NULL;
+	} else {
+		stmt_res = (stmt_handle *)py_stmt_res;
+	}
+
+	if (!NIL_P(py_row_number)) {
+		if (PyInt_Check(py_row_number)) {
+			row_number = (SQLINTEGER) PyInt_AsLong(py_row_number);
+		} else {
+			PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+			return NULL;
+		}
+	}
+	_python_ibm_db_init_error_info(stmt_res);
+
+	/* get column header info */
+	if ( stmt_res->column_info == NULL ) {
+		if (_python_ibm_db_get_result_set_info(stmt_res)<0) {
+			sprintf(error, "Column information cannot be retrieved: %s", 
+				IBM_DB_G(__python_stmt_err_msg));
+			PyErr_SetString(PyExc_Exception, error);
+			return NULL;
+		}
+	}
+	/* bind the data */
+	if ( stmt_res->row_data == NULL ) {
+		rc = _python_ibm_db_bind_column_helper(stmt_res);
+		if ( rc != SQL_SUCCESS && rc != SQL_SUCCESS_WITH_INFO ) {
+			sprintf(error, "Column binding cannot be done: %s", 
+				IBM_DB_G(__python_stmt_err_msg));
+			PyErr_SetString(PyExc_Exception, error);
+			return NULL;
+		}
+	}
+	/* check if row_number is present */
+	if (PyTuple_Size(args) == 2 && row_number > 0) {
+#ifndef PASE /* i5/OS problem with SQL_FETCH_ABSOLUTE (temporary until fixed) */
+		if (is_systemi) {
+
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLFetchScroll((SQLHSTMT)stmt_res->hstmt, SQL_FETCH_FIRST, 
+				row_number);
+			Py_END_ALLOW_THREADS;
+			
+			if (row_number>1 && (rc == SQL_SUCCESS || rc == SQL_SUCCESS_WITH_INFO))
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLFetchScroll((SQLHSTMT)stmt_res->hstmt, SQL_FETCH_RELATIVE, 
+				row_number-1);
+				Py_END_ALLOW_THREADS;
+		} else {
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLFetchScroll((SQLHSTMT)stmt_res->hstmt, SQL_FETCH_ABSOLUTE, 
+				row_number);
+			Py_END_ALLOW_THREADS;
+		}
+#else /* PASE */
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLFetchScroll((SQLHSTMT)stmt_res->hstmt, SQL_FETCH_FIRST, 
+			row_number);
+		Py_END_ALLOW_THREADS;
+
+		if (row_number>1 && (rc == SQL_SUCCESS || rc == SQL_SUCCESS_WITH_INFO))
+			Py_BEGIN_ALLOW_THREADS;
+
+			rc = SQLFetchScroll((SQLHSTMT)stmt_res->hstmt, SQL_FETCH_RELATIVE, 
+			row_number-1);
+
+			Py_END_ALLOW_THREADS;
+#endif /* PASE */
+	} else if (PyTuple_Size(args) == 2 && row_number < 0) {
+		PyErr_SetString(PyExc_Exception, 
+			"Requested row number must be a positive value");
+		return NULL;
+	} else {
+		/* row_number is NULL or 0; just fetch next row */
+		Py_BEGIN_ALLOW_THREADS;
+
+		rc = SQLFetch((SQLHSTMT)stmt_res->hstmt);
+
+		Py_END_ALLOW_THREADS;
+	}
+
+	if (rc == SQL_NO_DATA_FOUND) {
+		Py_INCREF(Py_False);
+		return Py_False;
+	} else if ( rc != SQL_SUCCESS && rc != SQL_SUCCESS_WITH_INFO) {
+		_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1, 
+			NULL, -1, 1);
+		sprintf(error, "Fetch Failure: %s", IBM_DB_G(__python_stmt_err_msg));
+		PyErr_SetString(PyExc_Exception, error);
+		return NULL;
+	}
+	/* copy the data over return_value */
+	if ( op & FETCH_ASSOC ) {
+		return_value = PyDict_New();
+	} else if ( op == FETCH_INDEX ) {
+		return_value = PyTuple_New(stmt_res->num_columns);
+	}
+
+	for (column_number = 0; column_number < stmt_res->num_columns; column_number++) {
+		column_type = stmt_res->column_info[column_number].type;
+		row_data = &stmt_res->row_data[column_number].data;
+		out_length = stmt_res->row_data[column_number].out_length;
+
+		switch(stmt_res->s_case_mode) {
+			case CASE_LOWER:
+				stmt_res->column_info[column_number].name = 
+					(SQLCHAR*)strtolower((char*)stmt_res->column_info[column_number].name, 
+					strlen((char*)stmt_res->column_info[column_number].name));
+				break;
+			case CASE_UPPER:
+				stmt_res->column_info[column_number].name = 
+					(SQLCHAR*)strtoupper((char*)stmt_res->column_info[column_number].name, 
+					strlen((char*)stmt_res->column_info[column_number].name));
+				break;
+			case CASE_NATURAL:
+			default:
+					break;
+		}
+		if (out_length == SQL_NULL_DATA) {
+			Py_INCREF(Py_None);
+			value = Py_None;
+		} else {
+			switch(column_type) {
+				case SQL_CHAR:
+				case SQL_VARCHAR:
+				case SQL_WCHAR:
+				case SQL_WVARCHAR:
+				case SQL_GRAPHIC:
+				case SQL_VARGRAPHIC:
+				case SQL_LONGVARGRAPHIC:
+					tmp_length = stmt_res->column_info[column_number].size;
+					value = getSQLWCharAsPyUnicodeObject(row_data->w_val, out_length);
+					break;
+
+#ifndef PASE /* i5/OS SQL_LONGVARCHAR is SQL_VARCHAR */
+				case SQL_LONGVARCHAR:
+				case SQL_WLONGVARCHAR:
+
+#endif /* PASE */
+					/* i5/OS will xlate from EBCIDIC to ASCII (via SQLGetData) */
+					tmp_length = stmt_res->column_info[column_number].size;
+
+					wout_ptr = (SQLWCHAR *)ALLOC_N(SQLWCHAR, tmp_length + 1);
+					if ( wout_ptr == NULL ) {
+						PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+						return NULL;
+					}
+					
+					/*  _python_ibm_db_get_data null terminates all output. */
+					rc = _python_ibm_db_get_data(stmt_res, column_number + 1, SQL_C_WCHAR, wout_ptr,
+						(tmp_length * sizeof(SQLWCHAR) + 1), &out_length);
+					if ( rc == SQL_ERROR ) {
+						return NULL;
+					}
+					if (out_length == SQL_NULL_DATA) {
+						Py_INCREF(Py_None);
+						value = Py_None;
+					} else {
+						value = getSQLWCharAsPyUnicodeObject(wout_ptr, out_length);
+					}
+					if (wout_ptr != NULL) {
+						PyMem_Del(wout_ptr);
+						wout_ptr = NULL;
+					}
+					break;
+				
+				case SQL_DECIMAL:
+				case SQL_NUMERIC:
+				case SQL_DECFLOAT:
+					value = StringOBJ_FromASCII((char *)row_data->str_val);
+					break;
+
+				case SQL_TYPE_DATE:
+					value = PyDate_FromDate(row_data->date_val->year, row_data->date_val->month, row_data->date_val->day);
+					break;
+
+				case SQL_TYPE_TIME:
+					value = PyTime_FromTime(row_data->time_val->hour, row_data->time_val->minute, row_data->time_val->second, 0);
+					break;
+
+				case SQL_TYPE_TIMESTAMP:
+					value = PyDateTime_FromDateAndTime(row_data->ts_val->year, row_data->ts_val->month, row_data->ts_val->day,
+									row_data->ts_val->hour, row_data->ts_val->minute, row_data->ts_val->second,
+									row_data->ts_val->fraction / 1000);
+					break;
+
+				case SQL_BIGINT:
+					value = PyLong_FromString((char *)row_data->str_val, NULL, 10);
+					break;
+
+				case SQL_SMALLINT:
+					value = PyInt_FromLong(row_data->s_val);
+					break;
+
+				case SQL_INTEGER:
+					value = PyInt_FromLong(row_data->i_val);
+					break;
+
+				case SQL_REAL:
+					value = PyFloat_FromDouble(row_data->r_val);
+					break;
+
+				case SQL_FLOAT:
+					value = PyFloat_FromDouble(row_data->f_val);
+					break;
+
+				case SQL_DOUBLE:
+					value = PyFloat_FromDouble(row_data->d_val);
+					break;
+
+				case SQL_BINARY:
+#ifndef PASE /* i5/OS SQL_LONGVARBINARY is SQL_VARBINARY */
+				case SQL_LONGVARBINARY:
+#endif /* PASE */
+				case SQL_VARBINARY:
+					if ( stmt_res->s_bin_mode == PASSTHRU ) {
+						value = PyBytes_FromStringAndSize("", 0);
+					} else {
+						value = PyBytes_FromStringAndSize((char *)row_data->str_val, out_length);
+					}
+					break;
+
+				case SQL_BLOB:
+					switch (stmt_res->s_bin_mode) {
+						case PASSTHRU:
+							Py_RETURN_NONE;
+							break;
+						case CONVERT:
+							len_terChar = sizeof(char);
+							targetCType = SQL_C_CHAR;
+							break;
+						case BINARY:
+							len_terChar = 0;
+							targetCType = SQL_C_BINARY;
+							break;
+						default:
+							len_terChar = -1;
+							break;
+					}
+				case SQL_XML:
+				case SQL_CLOB:
+				case SQL_DBCLOB:
+					if (column_type == SQL_CLOB || column_type == SQL_DBCLOB || column_type == SQL_XML) {
+						len_terChar = sizeof(SQLWCHAR);
+						targetCType = SQL_C_WCHAR;
+					} else if (len_terChar == -1) {
+						break;
+					}
+					out_ptr = (void *)ALLOC_N(char, INIT_BUFSIZ + len_terChar);
+					if (out_ptr == NULL) {
+						PyErr_SetString(PyExc_Exception,
+							"Failed to Allocate Memory for LOB Data");
+						return NULL;
+					}
+					rc = _python_ibm_db_get_data(stmt_res, column_number + 1, targetCType, out_ptr,
+						INIT_BUFSIZ + len_terChar, &out_length);
+					if (rc == SQL_SUCCESS_WITH_INFO) {
+						void *tmp_out_ptr = NULL;
+
+						tmp_out_ptr = (void *)ALLOC_N(char, out_length + INIT_BUFSIZ + len_terChar);
+						memcpy(tmp_out_ptr, out_ptr, INIT_BUFSIZ);
+						PyMem_Del(out_ptr);
+						out_ptr = tmp_out_ptr;
+
+						rc = _python_ibm_db_get_data(stmt_res, column_number + 1, targetCType, (char *)out_ptr + INIT_BUFSIZ,
+							out_length + len_terChar, &out_length);
+						if (rc == SQL_ERROR) {
+							if (out_ptr != NULL) {
+								PyMem_Del(out_ptr);
+								out_ptr = NULL;
+							}
+							sprintf(error, "Failed to fetch LOB Data: %s",
+								IBM_DB_G(__python_stmt_err_msg));
+							PyErr_SetString(PyExc_Exception, error);
+							return NULL;
+						}
+						
+						if (len_terChar == sizeof(SQLWCHAR)) {
+							value = getSQLWCharAsPyUnicodeObject(out_ptr, INIT_BUFSIZ + out_length);
+						} else {
+							value = PyBytes_FromStringAndSize((char*)out_ptr, INIT_BUFSIZ + out_length);
+						}
+					} else if ( rc == SQL_ERROR ) {
+						PyMem_Del(out_ptr);
+						out_ptr = NULL;
+						sprintf(error, "Failed to LOB Data: %s", 
+							IBM_DB_G(__python_stmt_err_msg));
+						PyErr_SetString(PyExc_Exception, error);
+						return NULL;
+					} else {
+						if (out_length == SQL_NULL_DATA) {
+							Py_INCREF(Py_None);
+							value = Py_None;
+						} else {
+							if (len_terChar == sizeof(SQLWCHAR)) {
+								value =  getSQLWCharAsPyUnicodeObject(out_ptr, out_length);
+							} else {
+								value = PyBytes_FromStringAndSize((char*)out_ptr, out_length);
+							}
+						}
+					}
+					if (out_ptr != NULL) {
+						PyMem_Del(out_ptr);
+						out_ptr = NULL;
+					}
+					break;
+
+				default:
+					Py_INCREF(Py_None);
+					value = Py_None;
+					break;
+				}
+		}
+		if (op & FETCH_ASSOC) {
+			key = StringOBJ_FromASCII((char*)stmt_res->column_info[column_number].name);
+			PyDict_SetItem(return_value, key, value);
+			Py_DECREF(key);
+		}
+		if (op == FETCH_INDEX) {
+			/* No need to call Py_DECREF as PyTuple_SetItem steals the reference */
+			PyTuple_SetItem(return_value, column_number, value);
+		} else {
+			if (op == FETCH_BOTH) {
+				key = PyInt_FromLong(column_number);
+				PyDict_SetItem(return_value, key, value);
+				Py_DECREF(key);
+			}
+			Py_DECREF(value);
+		}
+	}
+	return return_value;
+}
+
+/*!# ibm_db.fetch_row
+ *
+ * ===Description
+ * bool ibm_db.fetch_row ( resource stmt [, int row_number] )
+ *
+ * Sets the result set pointer to the next row or requested row
+ *
+ * Use ibm_db.fetch_row() to iterate through a result set, or to point to a
+ * specific row in a result set if you requested a scrollable cursor.
+ *
+ * To retrieve individual fields from the result set, call the ibm_db.result()
+ * function. Rather than calling ibm_db.fetch_row() and ibm_db.result(), most
+ * applications will call one of ibm_db.fetch_assoc(), ibm_db.fetch_both(), or
+ * ibm_db.fetch_array() to advance the result set pointer and return a complete
+ * row as an array.
+ *
+ * ===Parameters
+ * ====stmt
+ *		A valid stmt resource.
+ *
+ * ====row_number
+ *		With scrollable cursors, you can request a specific row number in the
+ * result set. Row numbering is 1-indexed.
+ *
+ * ===Return Values
+ *
+ * Returns TRUE if the requested row exists in the result set. Returns FALSE if
+ * the requested row does not exist in the result set.
+ */
+static PyObject *ibm_db_fetch_row(PyObject *self, PyObject *args)			
+{
+	PyObject *py_stmt_res = NULL;
+	PyObject *py_row_number = NULL;
+	SQLINTEGER row_number = -1;
+	stmt_handle* stmt_res = NULL;
+	int rc;
+	char error[DB2_MAX_ERR_MSG_LEN];
+
+	if (!PyArg_ParseTuple(args, "O|O", &py_stmt_res, &py_row_number))
+		return NULL;
+
+	if (NIL_P(py_stmt_res) || (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType))) {
+		PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+		return NULL;
+	} else {
+		stmt_res = (stmt_handle *)py_stmt_res;
+	}
+
+	if (!NIL_P(py_row_number)) {
+		if (PyInt_Check(py_row_number)) {
+			row_number = (SQLINTEGER) PyInt_AsLong(py_row_number);
+		} else {
+			PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+			return NULL;
+		}
+	}
+	/* get column header info */
+	if ( stmt_res->column_info == NULL ) {
+		if (_python_ibm_db_get_result_set_info(stmt_res)<0) {
+			sprintf(error, "Column information cannot be retrieved: %s", 
+				 IBM_DB_G(__python_stmt_err_msg));
+			PyErr_SetString(PyExc_Exception, error);
+			return NULL;	
+		}
+	}
+
+	/* check if row_number is present */
+	if (PyTuple_Size(args) == 2 && row_number > 0) { 
+#ifndef PASE /* i5/OS problem with SQL_FETCH_ABSOLUTE */
+
+		rc = SQLFetchScroll((SQLHSTMT)stmt_res->hstmt, SQL_FETCH_ABSOLUTE, 
+						  row_number);
+#else /* PASE */
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLFetchScroll((SQLHSTMT)stmt_res->hstmt, SQL_FETCH_FIRST, 
+						  row_number);
+		Py_END_ALLOW_THREADS;
+
+		if (row_number>1 && (rc == SQL_SUCCESS || rc == SQL_SUCCESS_WITH_INFO))
+			Py_BEGIN_ALLOW_THREADS;
+			rc = SQLFetchScroll((SQLHSTMT)stmt_res->hstmt, SQL_FETCH_RELATIVE, 
+							 row_number-1);
+			Py_END_ALLOW_THREADS;
+#endif /* PASE */
+	} else if (PyTuple_Size(args) == 2 && row_number < 0) {
+		PyErr_SetString(PyExc_Exception, 
+				  "Requested row number must be a positive value");
+		return NULL;
+	} else {
+		/* row_number is NULL or 0; just fetch next row */
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLFetch((SQLHSTMT)stmt_res->hstmt);
+		Py_END_ALLOW_THREADS;
+	}
+
+	if (rc == SQL_SUCCESS || rc == SQL_SUCCESS_WITH_INFO) {
+		Py_RETURN_TRUE;
+	} else if (rc == SQL_NO_DATA_FOUND) {
+		Py_RETURN_FALSE;
+	} else {
+		_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1,
+								  NULL, -1, 1);
+		PyErr_Clear();
+		Py_RETURN_FALSE;
+	}
+}
+
+/*!# ibm_db.fetch_assoc
+ *
+ * ===Description
+ * dictionary ibm_db.fetch_assoc ( resource stmt [, int row_number] )
+ *
+ * Returns a dictionary, indexed by column name, representing a row in a result  * set.
+ *
+ * ===Parameters
+ * ====stmt
+ *		A valid stmt resource containing a result set.
+ *
+ * ====row_number
+ *
+ *		Requests a specific 1-indexed row from the result set. Passing this
+ * parameter results in a
+ *		Python warning if the result set uses a forward-only cursor.
+ *
+ * ===Return Values
+ *
+ * Returns an associative array with column values indexed by the column name
+ * representing the next
+ * or requested row in the result set. Returns FALSE if there are no rows left
+ * in the result set,
+ * or if the row requested by row_number does not exist in the result set.
+ */
+static PyObject *ibm_db_fetch_assoc(PyObject *self, PyObject *args)			
+{
+	return _python_ibm_db_bind_fetch_helper(args, FETCH_ASSOC);
+}
+
+
+/*
+ * ibm_db.fetch_object --	Returns an object with properties representing columns in the fetched row
+ * 
+ * ===Description
+ * object ibm_db.fetch_object ( resource stmt [, int row_number] )
+ * 
+ * Returns an object in which each property represents a column returned in the row fetched from a result set.
+ * 
+ * ===Parameters
+ * 
+ * stmt
+ *		A valid stmt resource containing a result set. 
+ * 
+ * row_number
+ *		Requests a specific 1-indexed row from the result set. Passing this parameter results in a
+ *		Python warning if the result set uses a forward-only cursor. 
+ * 
+ * ===Return Values
+ * 
+ * Returns an object representing a single row in the result set. The properties of the object map
+ * to the names of the columns in the result set.
+ * 
+ * The IBM DB2, Cloudscape, and Apache Derby database servers typically fold column names to upper-case,
+ * so the object properties will reflect that case.
+ * 
+ * If your SELECT statement calls a scalar function to modify the value of a column, the database servers
+ * return the column number as the name of the column in the result set. If you prefer a more
+ * descriptive column name and object property, you can use the AS clause to assign a name
+ * to the column in the result set.
+ * 
+ * Returns FALSE if no row was retrieved. 
+ */
+/*
+PyObject *ibm_db_fetch_object(int argc, PyObject **argv, PyObject *self)
+{
+	row_hash_struct *row_res;
+
+	row_res = ALLOC(row_hash_struct);
+	row_res->hash = _python_ibm_db_bind_fetch_helper(argc, argv, FETCH_ASSOC);
+
+	if (RTEST(row_res->hash)) {
+	  return Data_Wrap_Struct(le_row_struct,
+			_python_ibm_db_mark_row_struct, _python_ibm_db_free_row_struct,
+			row_res);
+	} else {
+	  free(row_res);
+	  return Py_False;
+	}
+}
+*/
+
+/*!# ibm_db.fetch_array
+ *
+ * ===Description
+ *
+ * array ibm_db.fetch_array ( resource stmt [, int row_number] )
+ *
+ * Returns a tuple, indexed by column position, representing a row in a result
+ * set. The columns are 0-indexed.
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *		A valid stmt resource containing a result set.
+ *
+ * ====row_number
+ *		Requests a specific 1-indexed row from the result set. Passing this
+ * parameter results in a warning if the result set uses a forward-only cursor.
+ *
+ * ===Return Values
+ *
+ * Returns a 0-indexed tuple with column values indexed by the column position
+ * representing the next or requested row in the result set. Returns FALSE if
+ * there are no rows left in the result set, or if the row requested by
+ * row_number does not exist in the result set.
+ */
+static PyObject *ibm_db_fetch_array(PyObject *self, PyObject *args)
+{
+	return _python_ibm_db_bind_fetch_helper(args, FETCH_INDEX);
+}
+
+/*!# ibm_db.fetch_both
+ *
+ * ===Description
+ * dictionary ibm_db.fetch_both ( resource stmt [, int row_number] )
+ *
+ * Returns a dictionary, indexed by both column name and position, representing  * a row in a result set. Note that the row returned by ibm_db.fetch_both()
+ * requires more memory than the single-indexed dictionaries/arrays returned by  * ibm_db.fetch_assoc() or ibm_db.fetch_tuple().
+ *
+ * ===Parameters
+ *
+ * ====stmt
+ *		A valid stmt resource containing a result set.
+ *
+ * ====row_number
+ *		Requests a specific 1-indexed row from the result set. Passing this
+ * parameter results in a warning if the result set uses a forward-only cursor.
+ *
+ * ===Return Values
+ *
+ * Returns a dictionary with column values indexed by both the column name and
+ * 0-indexed column number.
+ * The dictionary represents the next or requested row in the result set.
+ * Returns FALSE if there are no rows left in the result set, or if the row
+ * requested by row_number does not exist in the result set.
+ */
+static PyObject *ibm_db_fetch_both(PyObject *self, PyObject *args)
+{
+	return _python_ibm_db_bind_fetch_helper(args, FETCH_BOTH);
+}
+
+/*!# ibm_db.set_option
+ *
+ * ===Description
+ * bool ibm_db.set_option ( resource resc, array options, int type )
+ *
+ * Sets options for a connection or statement resource. You cannot set options
+ * for result set resources.
+ *
+ * ===Parameters
+ *
+ * ====resc
+ *		A valid connection or statement resource.
+ *
+ * ====options
+ *		The options to be set
+ *
+ * ====type
+ *		A field that specifies the resource type (1 = Connection,
+ * NON-1 = Statement)
+ *
+ * ===Return Values
+ *
+ * Returns TRUE on success or FALSE on failure
+ */
+static PyObject *ibm_db_set_option(PyObject *self, PyObject *args)
+{
+	PyObject *conn_or_stmt = NULL;
+	PyObject *options = NULL;
+	PyObject *py_type = NULL;
+	stmt_handle *stmt_res = NULL;
+	conn_handle *conn_res;
+	int rc = 0;
+	long type = 0;
+
+	if (!PyArg_ParseTuple(args, "OOO", &conn_or_stmt, &options, &py_type))
+		return NULL;
+
+	if (!NIL_P(conn_or_stmt)) {
+		if (!NIL_P(py_type)) {
+			if (PyInt_Check(py_type)) {
+				type = (int) PyInt_AsLong(py_type);
+			} else {
+				PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+				return NULL;
+			}
+		}
+		if ( type == 1 ) {
+			if (!PyObject_TypeCheck(conn_or_stmt, &conn_handleType)) {
+				PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+				return NULL;
+			}
+			conn_res = (conn_handle *)conn_or_stmt;
+
+			if ( !NIL_P(options) ) {
+				rc = _python_ibm_db_parse_options(options, SQL_HANDLE_DBC, 
+					conn_res);
+				if (rc == SQL_ERROR) {
+					PyErr_SetString(PyExc_Exception, 
+						"Options Array must have string indexes");
+					return NULL;
+				}
+			}
+		} else {
+			if (!PyObject_TypeCheck(conn_or_stmt, &stmt_handleType)) {
+				PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+				return NULL;
+			}
+			stmt_res = (stmt_handle *)conn_or_stmt;				  
+
+			if ( !NIL_P(options) ) {
+				rc = _python_ibm_db_parse_options(options, SQL_HANDLE_STMT, 
+					stmt_res);
+				if (rc == SQL_ERROR) {
+					PyErr_SetString(PyExc_Exception, 
+						"Options Array must have string indexes");
+					return NULL;
+				}
+			}
+		}
+		Py_INCREF(Py_True);
+		return Py_True;
+	} else {
+		Py_INCREF(Py_False);
+		return Py_False;
+	}
+}
+
+static PyObject *ibm_db_get_db_info(PyObject *self, PyObject *args) 
+{
+	PyObject *py_conn_res = NULL;
+	PyObject *return_value = NULL;
+	PyObject *py_option = NULL;
+	SQLINTEGER option = 0;
+	conn_handle *conn_res;
+	int rc = 0;
+	SQLCHAR *value=NULL;
+
+	if (!PyArg_ParseTuple(args, "OO", &py_conn_res, &py_option))
+		return NULL;
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+		if (!NIL_P(py_option)) {
+			if (PyInt_Check(py_option)) {
+				option = (SQLINTEGER) PyInt_AsLong(py_option);
+			} else {
+				PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+				return NULL;
+			}
+		}
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			return NULL;
+		}
+
+		value = (SQLCHAR*)ALLOC_N(char, ACCTSTR_LEN + 1);
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, (SQLSMALLINT)option, (SQLPOINTER)value, 
+						ACCTSTR_LEN, NULL);
+		Py_END_ALLOW_THREADS;
+		
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			if(value != NULL) {
+				PyMem_Del(value);
+				value = NULL;
+			}
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value = StringOBJ_FromASCII((char *)value);
+			if(value != NULL) {
+				PyMem_Del(value);
+				value = NULL;
+			}
+			return return_value;
+		}
+	}
+	Py_INCREF(Py_False);
+	return Py_False;
+}
+
+/*!# ibm_db.server_info
+ *
+ * ===Description
+ * object ibm_db.server_info ( resource connection )
+ *
+ * This function returns a read-only object with information about the IBM DB2
+ * or Informix Dynamic Server.
+ * The following table lists the database server properties:
+ *
+ * ====Table 1. Database server properties
+ * Property name:: Description (Return type)
+ *
+ * DBMS_NAME:: The name of the database server to which you are connected. For
+ * DB2 servers this is a combination of DB2 followed by the operating system on
+ * which the database server is running. (string)
+ *
+ * DBMS_VER:: The version of the database server, in the form of a string
+ * "MM.mm.uuuu" where MM is the major version, mm is the minor version, and
+ * uuuu is the update. For example, "08.02.0001" represents major version 8,
+ * minor version 2, update 1. (string)
+ *
+ * DB_CODEPAGE:: The code page of the database to which you are connected. (int)
+ *
+ * DB_NAME:: The name of the database to which you are connected. (string)
+ *
+ * DFT_ISOLATION:: The default transaction isolation level supported by the
+ * server: (string)
+ *
+ *						 UR:: Uncommitted read: changes are immediately
+ * visible by all concurrent transactions.
+ *
+ *						 CS:: Cursor stability: a row read by one transaction
+ * can be altered and committed by a second concurrent transaction.
+ *
+ *						 RS:: Read stability: a transaction can add or remove
+ * rows matching a search condition or a pending transaction.
+ *
+ *						 RR:: Repeatable read: data affected by pending
+ * transaction is not available to other transactions.
+ *
+ *						 NC:: No commit: any changes are visible at the end of
+ * a successful operation. Explicit commits and rollbacks are not allowed.
+ *
+ * IDENTIFIER_QUOTE_CHAR:: The character used to delimit an identifier. (string)
+ *
+ * INST_NAME:: The instance on the database server that contains the database.
+ * (string)
+ *
+ * ISOLATION_OPTION:: An array of the isolation options supported by the
+ * database server. The isolation options are described in the DFT_ISOLATION
+ * property. (array)
+ *
+ * KEYWORDS:: An array of the keywords reserved by the database server. (array)
+ *
+ * LIKE_ESCAPE_CLAUSE:: TRUE if the database server supports the use of % and _
+ * wildcard characters. FALSE if the database server does not support these
+ * wildcard characters. (bool)
+ *
+ * MAX_COL_NAME_LEN:: Maximum length of a column name supported by the database
+ * server, expressed in bytes. (int)
+ *
+ * MAX_IDENTIFIER_LEN:: Maximum length of an SQL identifier supported by the
+ * database server, expressed in characters. (int)
+ *
+ * MAX_INDEX_SIZE:: Maximum size of columns combined in an index supported by
+ * the database server, expressed in bytes. (int)
+ *
+ * MAX_PROC_NAME_LEN:: Maximum length of a procedure name supported by the
+ * database server, expressed in bytes. (int)
+ *
+ * MAX_ROW_SIZE:: Maximum length of a row in a base table supported by the
+ * database server, expressed in bytes. (int)
+ *
+ * MAX_SCHEMA_NAME_LEN:: Maximum length of a schema name supported by the
+ * database server, expressed in bytes. (int)
+ *
+ * MAX_STATEMENT_LEN:: Maximum length of an SQL statement supported by the
+ * database server, expressed in bytes. (int)
+ *
+ * MAX_TABLE_NAME_LEN:: Maximum length of a table name supported by the
+ * database server, expressed in bytes. (bool)
+ *
+ * NON_NULLABLE_COLUMNS:: TRUE if the database server supports columns that can
+ * be defined as NOT NULL, FALSE if the database server does not support columns
+ * defined as NOT NULL. (bool)
+ *
+ * PROCEDURES:: TRUE if the database server supports the use of the CALL
+ * statement to call stored procedures, FALSE if the database server does not
+ * support the CALL statement. (bool)
+ *
+ * SPECIAL_CHARS:: A string containing all of the characters other than a-Z,
+ * 0-9, and underscore that can be used in an identifier name. (string)
+ *
+ * SQL_CONFORMANCE:: The level of conformance to the ANSI/ISO SQL-92
+ * specification offered by the database server: (string)
+ *
+ *							ENTRY:: Entry-level SQL-92 compliance.
+ *
+ *							FIPS127:: FIPS-127-2 transitional compliance.
+ *
+ *							FULL:: Full level SQL-92 compliance.
+ *
+ *							INTERMEDIATE:: Intermediate level SQL-92
+ *											compliance.
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *		Specifies an active DB2 client connection.
+ *
+ * ===Return Values
+ *
+ * Returns an object on a successful call. Returns FALSE on failure.
+ */
+static PyObject *ibm_db_server_info(PyObject *self, PyObject *args)
+{
+	PyObject *py_conn_res = NULL;
+	conn_handle *conn_res;
+	int rc = 0;
+	char buffer11[11];
+	char buffer255[255];
+	char buffer2k[2048];
+	SQLSMALLINT bufferint16;
+	SQLUINTEGER bufferint32;
+	SQLINTEGER bitmask;
+
+	le_server_info *return_value = PyObject_NEW(le_server_info,
+										 &server_infoType);
+
+	if (!PyArg_ParseTuple(args, "O", &py_conn_res))
+		return NULL;
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			return NULL;
+		}
+
+		/* DBMS_NAME */
+		memset(buffer255, 0, sizeof(buffer255));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_DBMS_NAME, (SQLPOINTER)buffer255, 
+						sizeof(buffer255), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->DBMS_NAME = StringOBJ_FromASCII(buffer255);
+		}
+
+		/* DBMS_VER */
+		memset(buffer11, 0, sizeof(buffer11));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_DBMS_VER, (SQLPOINTER)buffer11, 
+					sizeof(buffer11), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+				return_value->DBMS_VER = StringOBJ_FromASCII(buffer11);
+		}
+
+#ifndef PASE	  /* i5/OS DB_CODEPAGE handled natively */
+		/* DB_CODEPAGE */
+		bufferint32 = 0;
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_DATABASE_CODEPAGE, &bufferint32, 
+						sizeof(bufferint32), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->DB_CODEPAGE = PyInt_FromLong(bufferint32);
+		}
+#endif /* PASE */
+
+		/* DB_NAME */
+		memset(buffer255, 0, sizeof(buffer255));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_DATABASE_NAME, (SQLPOINTER)buffer255, 
+						sizeof(buffer255), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			Py_INCREF(Py_False);
+			return Py_False;
+		} else {
+			return_value->DB_NAME = StringOBJ_FromASCII(buffer255);
+		}
+
+#ifndef PASE	  /* i5/OS INST_NAME handled natively */
+		/* INST_NAME */
+		memset(buffer255, 0, sizeof(buffer255));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_SERVER_NAME, (SQLPOINTER)buffer255, 
+					sizeof(buffer255), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->INST_NAME = StringOBJ_FromASCII(buffer255);
+		}
+
+		/* SPECIAL_CHARS */
+		memset(buffer255, 0, sizeof(buffer255));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_SPECIAL_CHARACTERS, 
+					(SQLPOINTER)buffer255, sizeof(buffer255), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->SPECIAL_CHARS = StringOBJ_FromASCII(buffer255);
+		}
+#endif /* PASE */
+
+		/* KEYWORDS */
+		memset(buffer2k, 0, sizeof(buffer2k));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_KEYWORDS, (SQLPOINTER)buffer2k, 
+						sizeof(buffer2k), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			char *keyword, *last;
+			PyObject *karray;
+			int numkw = 0;
+			int count = 0;
+		
+			for (last = buffer2k; *last; last++) {
+				if (*last == ',') {
+					numkw++;		 
+				}
+			}
+			karray = PyTuple_New(numkw+1);
+
+			for (keyword = last = buffer2k; *last; last++) {
+				if (*last == ',') {
+					*last = '\0';
+					PyTuple_SetItem(karray, count, StringOBJ_FromASCII(keyword));
+					keyword = last+1;
+					count++;
+				}
+			}
+			if (*keyword) 
+				PyTuple_SetItem(karray, count, StringOBJ_FromASCII(keyword));
+			return_value->KEYWORDS = karray;
+		}
+
+		/* DFT_ISOLATION */
+		bitmask = 0;
+		memset(buffer11, 0, sizeof(buffer11));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_DEFAULT_TXN_ISOLATION, &bitmask, 
+					sizeof(bitmask), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			if( bitmask & SQL_TXN_READ_UNCOMMITTED ) {
+				strcpy((char *)buffer11, "UR");
+			}
+			if( bitmask & SQL_TXN_READ_COMMITTED ) {
+				strcpy((char *)buffer11, "CS");
+			}
+			if( bitmask & SQL_TXN_REPEATABLE_READ ) {
+				strcpy((char *)buffer11, "RS");
+			}
+			if( bitmask & SQL_TXN_SERIALIZABLE ) {
+				strcpy((char *)buffer11, "RR");
+			}
+			if( bitmask & SQL_TXN_NOCOMMIT ) {
+				strcpy((char *)buffer11, "NC");
+			}
+			return_value->DFT_ISOLATION = StringOBJ_FromASCII(buffer11);
+		}
+
+#ifndef PASE	  /* i5/OS ISOLATION_OPTION handled natively */
+		/* ISOLATION_OPTION */
+		bitmask = 0;
+		memset(buffer11, 0, sizeof(buffer11));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_TXN_ISOLATION_OPTION, &bitmask, 
+						sizeof(bitmask), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			PyObject *array;
+			int count = 0;
+
+			array = PyTuple_New(5);
+
+			if( bitmask & SQL_TXN_READ_UNCOMMITTED ) {
+				PyTuple_SetItem(array, count, StringOBJ_FromASCII("UR"));
+				count++;
+			}
+			if( bitmask & SQL_TXN_READ_COMMITTED ) {
+				PyTuple_SetItem(array, count, StringOBJ_FromASCII("CS"));
+				count++;
+			}
+			if( bitmask & SQL_TXN_REPEATABLE_READ ) {
+				PyTuple_SetItem(array, count, StringOBJ_FromASCII("RS"));
+				count++;
+			}
+			if( bitmask & SQL_TXN_SERIALIZABLE ) {
+				PyTuple_SetItem(array, count, StringOBJ_FromASCII("RR"));
+				count++;
+			}
+			if( bitmask & SQL_TXN_NOCOMMIT ) {
+				PyTuple_SetItem(array, count, StringOBJ_FromASCII("NC"));
+				count++;
+			}
+			_PyTuple_Resize(&array, count);
+
+			return_value->ISOLATION_OPTION = array;
+		}
+#endif /* PASE */
+
+		/* SQL_CONFORMANCE */
+		bufferint32 = 0;
+		memset(buffer255, 0, sizeof(buffer255));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_ODBC_SQL_CONFORMANCE, &bufferint32, 
+					sizeof(bufferint32), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			switch (bufferint32) {
+				case SQL_SC_SQL92_ENTRY:
+					strcpy((char *)buffer255, "ENTRY");
+					break;
+				case SQL_SC_FIPS127_2_TRANSITIONAL:
+					strcpy((char *)buffer255, "FIPS127");
+					break;
+				case SQL_SC_SQL92_FULL:
+					strcpy((char *)buffer255, "FULL");
+					break;
+				case SQL_SC_SQL92_INTERMEDIATE:
+					strcpy((char *)buffer255, "INTERMEDIATE");
+					break;
+				default:
+					break;
+			}
+			return_value->SQL_CONFORMANCE = StringOBJ_FromASCII(buffer255);
+		}
+
+		/* PROCEDURES */
+		memset(buffer11, 0, sizeof(buffer11));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_PROCEDURES, (SQLPOINTER)buffer11, 
+						sizeof(buffer11), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			if( strcmp((char *)buffer11, "Y") == 0 ) {
+				Py_INCREF(Py_True);
+				return_value->PROCEDURES = Py_True;
+			} else {
+				Py_INCREF(Py_False);
+				return_value->PROCEDURES = Py_False;
+			}
+		}
+
+		/* IDENTIFIER_QUOTE_CHAR */
+		memset(buffer11, 0, sizeof(buffer11));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_IDENTIFIER_QUOTE_CHAR, 
+				(SQLPOINTER)buffer11, sizeof(buffer11), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->IDENTIFIER_QUOTE_CHAR = StringOBJ_FromASCII(buffer11);
+		}
+
+		/* LIKE_ESCAPE_CLAUSE */
+		memset(buffer11, 0, sizeof(buffer11));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_LIKE_ESCAPE_CLAUSE, 
+						(SQLPOINTER)buffer11, sizeof(buffer11), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			if( strcmp(buffer11, "Y") == 0 ) {
+				Py_INCREF(Py_True);
+				return_value->LIKE_ESCAPE_CLAUSE = Py_True;
+			} else {
+				Py_INCREF(Py_False);
+				return_value->LIKE_ESCAPE_CLAUSE = Py_False;
+			}
+		}
+
+		/* MAX_COL_NAME_LEN */
+		bufferint16 = 0;
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_MAX_COLUMN_NAME_LEN, &bufferint16, 
+					sizeof(bufferint16), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->MAX_COL_NAME_LEN = PyInt_FromLong(bufferint16);
+		}
+
+		/* MAX_ROW_SIZE */
+		bufferint32 = 0;
+		
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_MAX_ROW_SIZE, &bufferint32, 
+						sizeof(bufferint32), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->MAX_ROW_SIZE = PyInt_FromLong(bufferint32);
+		}
+
+#ifndef PASE	  /* i5/OS MAX_IDENTIFIER_LEN handled natively */
+		/* MAX_IDENTIFIER_LEN */
+		bufferint16 = 0;
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_MAX_IDENTIFIER_LEN, &bufferint16, 
+					sizeof(bufferint16), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->MAX_IDENTIFIER_LEN = PyInt_FromLong(bufferint16);
+		}
+
+		/* MAX_INDEX_SIZE */
+		bufferint32 = 0;
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_MAX_INDEX_SIZE, &bufferint32, 
+					sizeof(bufferint32), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->MAX_INDEX_SIZE = PyInt_FromLong(bufferint32);
+		}
+
+		/* MAX_PROC_NAME_LEN */
+		bufferint16 = 0;
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_MAX_PROCEDURE_NAME_LEN, &bufferint16, 
+						sizeof(bufferint16), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->MAX_PROC_NAME_LEN = PyInt_FromLong(bufferint16);
+		}
+#endif /* PASE */
+
+		/* MAX_SCHEMA_NAME_LEN */
+		bufferint16 = 0;
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_MAX_SCHEMA_NAME_LEN, &bufferint16, 
+						sizeof(bufferint16), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->MAX_SCHEMA_NAME_LEN = PyInt_FromLong(bufferint16);
+		}
+
+		/* MAX_STATEMENT_LEN */
+		bufferint32 = 0;
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_MAX_STATEMENT_LEN, &bufferint32, 
+						sizeof(bufferint32), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+										NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->MAX_STATEMENT_LEN = PyInt_FromLong(bufferint32);
+		}
+
+		/* MAX_TABLE_NAME_LEN */
+		bufferint16 = 0;
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_MAX_TABLE_NAME_LEN, &bufferint16, 
+						sizeof(bufferint16), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->MAX_TABLE_NAME_LEN = PyInt_FromLong(bufferint16);
+		}
+
+		/* NON_NULLABLE_COLUMNS */
+		bufferint16 = 0;
+
+		Py_BEGIN_ALLOW_THREADS;
+
+		rc = SQLGetInfo(conn_res->hdbc, SQL_NON_NULLABLE_COLUMNS, &bufferint16, 
+					sizeof(bufferint16), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			PyObject *rv = NULL;
+			switch (bufferint16) {
+				case SQL_NNC_NON_NULL:
+					Py_INCREF(Py_True);
+					rv = Py_True;
+					break;
+				case SQL_NNC_NULL:
+					Py_INCREF(Py_False);
+					rv = Py_False;
+					break;
+				default:
+					break;
+			}
+			return_value->NON_NULLABLE_COLUMNS = rv;
+		}
+		return (PyObject *)return_value;
+	}
+	Py_RETURN_FALSE;
+}
+
+/*!# ibm_db.client_info
+ *
+ * ===Description
+ * object ibm_db.client_info ( resource connection )
+ *
+ * This function returns a read-only object with information about the IBM Data
+ * Server database client. The following table lists the client properties:
+ *
+ * ====IBM Data Server client properties
+ *
+ * APPL_CODEPAGE:: The application code page.
+ *
+ * CONN_CODEPAGE:: The code page for the current connection.
+ *
+ * DATA_SOURCE_NAME:: The data source name (DSN) used to create the current
+ * connection to the database.
+ *
+ * DRIVER_NAME:: The name of the library that implements the Call Level
+ * Interface (CLI) specification.
+ *
+ * DRIVER_ODBC_VER:: The version of ODBC that the IBM Data Server client
+ * supports. This returns a string "MM.mm" where MM is the major version and mm
+ * is the minor version. The IBM Data Server client always returns "03.51".
+ *
+ * DRIVER_VER:: The version of the client, in the form of a string "MM.mm.uuuu"
+ * where MM is the major version, mm is the minor version, and uuuu is the
+ * update. For example, "08.02.0001" represents major version 8, minor version
+ * 2, update 1. (string)
+ *
+ * ODBC_SQL_CONFORMANCE:: There are three levels of ODBC SQL grammar supported
+ * by the client: MINIMAL (Supports the minimum ODBC SQL grammar), CORE
+ * (Supports the core ODBC SQL grammar), EXTENDED (Supports extended ODBC SQL
+ * grammar).
+ *
+ * ODBC_VER:: The version of ODBC that the ODBC driver manager supports. This
+ * returns a string "MM.mm.rrrr" where MM is the major version, mm is the minor
+ * version, and rrrr is the release. The client always returns "03.01.0000".
+ *
+ * ===Parameters
+ *
+ * ====connection
+ *
+ *	  Specifies an active IBM Data Server client connection.
+ *
+ * ===Return Values
+ *
+ * Returns an object on a successful call. Returns FALSE on failure.
+ */
+static PyObject *ibm_db_client_info(PyObject *self, PyObject *args)			
+{
+	PyObject *py_conn_res = NULL;
+	conn_handle *conn_res = NULL;
+	int rc = 0;
+	char buffer255[255];
+	SQLSMALLINT bufferint16;
+	SQLUINTEGER bufferint32;
+
+	le_client_info *return_value = PyObject_NEW(le_client_info, 
+										 &client_infoType);
+
+	if (!PyArg_ParseTuple(args, "O", &py_conn_res))
+		return NULL;
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");
+			return NULL;
+		}
+
+		/* DRIVER_NAME */
+		memset(buffer255, 0, sizeof(buffer255));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_DRIVER_NAME, (SQLPOINTER)buffer255, 
+					  sizeof(buffer255), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											 NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->DRIVER_NAME = StringOBJ_FromASCII(buffer255);
+		}
+	
+		/* DRIVER_VER */
+		memset(buffer255, 0, sizeof(buffer255));
+	
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_DRIVER_VER, (SQLPOINTER)buffer255, 
+						  sizeof(buffer255), NULL);
+		Py_END_ALLOW_THREADS;
+	
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											 NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->DRIVER_VER = StringOBJ_FromASCII(buffer255);
+		}
+	
+		/* DATA_SOURCE_NAME */
+		memset(buffer255, 0, sizeof(buffer255));
+	
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_DATA_SOURCE_NAME, 
+					(SQLPOINTER)buffer255, sizeof(buffer255), NULL);
+		Py_END_ALLOW_THREADS;
+	
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+										 NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->DATA_SOURCE_NAME = StringOBJ_FromASCII(buffer255);
+		}
+
+		/* DRIVER_ODBC_VER */
+		memset(buffer255, 0, sizeof(buffer255));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_DRIVER_ODBC_VER, 
+					  (SQLPOINTER)buffer255, sizeof(buffer255), NULL);
+		Py_END_ALLOW_THREADS;
+
+		if ( rc == SQL_ERROR ) {
+		        _python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+													NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->DRIVER_ODBC_VER = StringOBJ_FromASCII(buffer255);
+		}
+
+#ifndef PASE	  /* i5/OS ODBC_VER handled natively */
+		/* ODBC_VER */
+		memset(buffer255, 0, sizeof(buffer255));
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_ODBC_VER, (SQLPOINTER)buffer255, 
+						  sizeof(buffer255), NULL);
+		Py_END_ALLOW_THREADS;
+	
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											 NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->ODBC_VER = StringOBJ_FromASCII(buffer255);
+		}
+#endif /* PASE */
+
+		/* ODBC_SQL_CONFORMANCE */
+		bufferint16 = 0;
+		memset(buffer255, 0, sizeof(buffer255));
+	
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_ODBC_SQL_CONFORMANCE, &bufferint16, 
+					  sizeof(bufferint16), NULL);
+		Py_END_ALLOW_THREADS;
+	
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											 NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			switch (bufferint16) {
+				case SQL_OSC_MINIMUM:
+					strcpy((char *)buffer255, "MINIMUM");
+					break;
+				case SQL_OSC_CORE:
+					strcpy((char *)buffer255, "CORE");
+					break;
+				case SQL_OSC_EXTENDED:
+					strcpy((char *)buffer255, "EXTENDED");
+					break;
+				default:
+					break;
+			}
+			return_value->ODBC_SQL_CONFORMANCE = StringOBJ_FromASCII(buffer255);
+		}
+	
+#ifndef	PASE	  /* i5/OS APPL_CODEPAGE handled natively */
+		/* APPL_CODEPAGE */
+		bufferint32 = 0;
+	
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_APPLICATION_CODEPAGE, &bufferint32, 
+					  sizeof(bufferint32), NULL);
+		Py_END_ALLOW_THREADS;
+	
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+											 NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->APPL_CODEPAGE = PyInt_FromLong(bufferint32);
+		}
+	
+		/* CONN_CODEPAGE */
+		bufferint32 = 0;
+	
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetInfo(conn_res->hdbc, SQL_CONNECT_CODEPAGE, &bufferint32, 
+						  sizeof(bufferint32), NULL);
+		Py_END_ALLOW_THREADS;
+	
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1, 
+										 NULL, -1, 1);
+			PyErr_Clear();
+			Py_RETURN_FALSE;
+		} else {
+			return_value->CONN_CODEPAGE = PyInt_FromLong(bufferint32);
+		}
+#endif /* PASE */
+
+		return (PyObject *)return_value;
+	}
+	PyErr_Clear();
+	Py_RETURN_FALSE;
+}
+
+/*!# ibm_db.active
+ *
+ * ===Description
+ * Py_True/Py_False ibm_db.active(resource connection)
+ *
+ * Checks if the specified connection resource is active
+ *
+ * Returns Py_True if the given connection resource is active
+ *
+ * ===Parameters
+ * ====connection
+ *		The connection resource to be validated.
+ *
+ * ===Return Values
+ *
+ * Returns Py_True if the given connection resource is active, otherwise it will
+ * return Py_False
+ */
+static PyObject *ibm_db_active(PyObject *self, PyObject *args)			
+{
+	PyObject *py_conn_res = NULL;
+	int rc;
+	conn_handle *conn_res = NULL;
+	SQLINTEGER conn_alive;
+
+	conn_alive = 0;
+
+	if (!PyArg_ParseTuple(args, "O", &py_conn_res))
+		return NULL;
+
+	if (!(NIL_P(py_conn_res) || (py_conn_res == Py_None))) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+#ifndef PASE
+		rc = SQLGetConnectAttr(conn_res->hdbc, SQL_ATTR_PING_DB, 
+			(SQLPOINTER)&conn_alive, 0, NULL);
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, rc, 1,
+				NULL, -1, 1);
+			PyErr_Clear();
+		}
+#endif /* PASE */
+	}
+	/*
+	* SQLGetConnectAttr with SQL_ATTR_PING_DB will return 0 on failure but will 
+	* return the ping time on success.	We only want success or failure.
+	*/
+	if (conn_alive == 0) {
+		Py_RETURN_FALSE;
+	} else {
+		Py_RETURN_TRUE;
+	}
+}
+
+/*!# ibm_db.get_option
+ *
+ * ===Description
+ * mixed ibm_db.get_option ( resource resc, int options, int type )
+ *
+ * Returns a value, that is the current setting of a connection or statement
+ * attribute.
+ *
+ * ===Parameters
+ *
+ * ====resc
+ *		A valid connection or statement resource containing a result set.
+ *
+ * ====options
+ *		The options to be retrieved
+ *
+ * ====type
+ *		A field that specifies the resource type (1 = Connection,
+ *		non - 1 = Statement)
+ *
+ * ===Return Values
+ *
+ * Returns the current setting of the resource attribute provided.
+ */
+static PyObject *ibm_db_get_option(PyObject *self, PyObject *args)
+{
+	PyObject *conn_or_stmt = NULL;
+	PyObject *retVal = NULL;
+	PyObject *py_op_integer = NULL;
+	PyObject *py_type = NULL;
+	SQLCHAR *value = NULL;
+	SQLINTEGER value_int = 0;
+	conn_handle *conn_res = NULL;
+	stmt_handle *stmt_res = NULL;
+	SQLINTEGER op_integer = 0;
+	long type = 0;
+	int rc;
+
+	if (!PyArg_ParseTuple(args, "OOO", &conn_or_stmt, &py_op_integer, &py_type))
+		return NULL;
+
+	if (!NIL_P(conn_or_stmt)) {
+		if (!NIL_P(py_op_integer)) {
+			if (PyInt_Check(py_op_integer)) {
+				op_integer = (SQLINTEGER) PyInt_AsLong(py_op_integer);
+			} else {
+				PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+				return NULL;
+			}
+		}
+		if (!NIL_P(py_type)) {
+			if (PyInt_Check(py_type)) {
+				type = PyInt_AsLong(py_type);
+			} else {
+				PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+				return NULL;
+			}
+		}
+		/* Checking to see if we are getting a connection option (1) or a 
+		* statement option (non - 1) 
+		*/
+		if (type == 1) {
+			if (!PyObject_TypeCheck(conn_or_stmt, &conn_handleType)) {
+				PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+				return NULL;
+			}
+			conn_res = (conn_handle *)conn_or_stmt;
+
+			/* Check to ensure the connection resource given is active */
+			if (!conn_res->handle_active) {
+				PyErr_SetString(PyExc_Exception, "Connection is not active");
+				return NULL;
+		 	}
+			/* Check that the option given is not null */
+			if (!NIL_P(py_op_integer)) {
+				/* ACCTSTR_LEN is the largest possible length of the options to 
+				* retrieve 
+			 */
+				value = (SQLCHAR*)ALLOC_N(char, ACCTSTR_LEN + 1);
+				if ( value == NULL ) {
+					PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+					return NULL;
+				}
+
+				rc = SQLGetConnectAttr((SQLHDBC)conn_res->hdbc, op_integer, 
+					(SQLPOINTER)value, ACCTSTR_LEN, NULL);
+				if (rc == SQL_ERROR) {
+					_python_ibm_db_check_sql_errors(conn_res->hdbc, SQL_HANDLE_DBC, 
+						rc, 1, NULL, -1, 1);
+					if(value != NULL) {
+						PyMem_Del(value);
+						value = NULL;
+					}
+					PyErr_Clear();
+					Py_RETURN_FALSE;
+				}
+				retVal = StringOBJ_FromASCII((char *)value);
+				if(value != NULL) {
+					PyMem_Del(value);
+					value = NULL;
+				}
+				return retVal;
+			} else {
+				PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+				return NULL;
+		 }
+			/* At this point we know we are to retreive a statement option */
+		} else {
+			stmt_res = (stmt_handle *)conn_or_stmt;
+
+			/* Check that the option given is not null */
+			if (!NIL_P(py_op_integer)) {
+				/* Checking that the option to get is the cursor type because that 
+				* is what we support here 
+				*/
+				if (op_integer == SQL_ATTR_CURSOR_TYPE) {
+					rc = SQLGetStmtAttr((SQLHSTMT)stmt_res->hstmt, op_integer, 
+						&value_int, SQL_IS_INTEGER, NULL);
+					if (rc == SQL_ERROR) {
+						_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+						PyErr_Clear();
+						Py_RETURN_FALSE;
+					}
+					return PyInt_FromLong(value_int);
+				} else {
+					PyErr_SetString(PyExc_Exception,"Supplied parameter is invalid");
+					return NULL;
+				}
+			} else {
+				PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+				return NULL;
+			}
+		}
+	}
+	PyErr_Clear();
+	Py_RETURN_FALSE;
+}
+
+static int _ibm_db_chaining_flag(stmt_handle *stmt_res, SQLINTEGER flag, error_msg_node *error_list, int client_err_cnt) {
+	int rc;
+	Py_BEGIN_ALLOW_THREADS;
+	rc = SQLSetStmtAttrW((SQLHSTMT)stmt_res->hstmt, flag, (SQLPOINTER)SQL_TRUE, SQL_IS_INTEGER);
+	Py_END_ALLOW_THREADS;
+	if ( flag == SQL_ATTR_CHAINING_BEGIN ) {
+		if ( rc == SQL_ERROR ) {
+			_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+			PyErr_SetString(PyExc_Exception, IBM_DB_G(__python_stmt_err_msg));
+		}
+	} else {
+		if ( (rc != SQL_SUCCESS) || (client_err_cnt != 0) ) {
+			SQLINTEGER errNo = 0;
+			PyObject *errTuple = NULL;
+			SQLINTEGER err_cnt = 0;
+			PyObject *err_msg = NULL, *err_fmtObj = NULL;
+			char *err_fmt = NULL;
+			if ( rc != SQL_SUCCESS ) {
+				SQLGetDiagField(SQL_HANDLE_STMT, (SQLHSTMT)stmt_res->hstmt, 0, SQL_DIAG_NUMBER, (SQLPOINTER) &err_cnt, SQL_IS_POINTER, NULL);
+			}
+			errTuple = PyTuple_New(err_cnt + client_err_cnt);
+			err_fmt = (char *)PyMem_Malloc(strlen("%s\nError %d :%s \n ") * (err_cnt + client_err_cnt));
+			err_fmt[0] = '\0';
+			errNo = 1;
+			while( error_list != NULL ) {
+				sprintf(err_fmt,"%s\nError %d: %s", err_fmt, (int)errNo, "%s \n");
+				PyTuple_SetItem(errTuple, errNo - 1, StringOBJ_FromASCII(error_list->err_msg));
+				error_list = error_list->next;
+				errNo++;
+			}
+			for ( errNo = client_err_cnt + 1; errNo <= (err_cnt + client_err_cnt); errNo++ ) {
+				sprintf(err_fmt,"%s\nError %d: %s", err_fmt, (int)errNo, "%s \n");
+				_python_ibm_db_check_sql_errors((SQLHSTMT)stmt_res->hstmt, SQL_HANDLE_STMT, SQL_ERROR, 1, NULL, -1, (errNo - client_err_cnt));
+				PyTuple_SetItem(errTuple, errNo - 1, StringOBJ_FromASCII(IBM_DB_G(__python_stmt_err_msg)));
+			}
+			err_fmtObj = StringOBJ_FromASCII(err_fmt);
+			err_msg = StringObj_Format(err_fmtObj, errTuple);
+			if ( err_fmtObj != NULL ) { Py_XDECREF(err_fmtObj); }
+			if ( err_fmt != NULL ) { PyMem_Free(err_fmt); }		
+			PyErr_SetObject(PyExc_Exception, err_msg);
+		}
+	}
+	return rc;
+}
+
+static void _build_client_err_list(error_msg_node *head_error_list, char *err_msg) {
+	error_msg_node *tmp_err = NULL, *curr_err = head_error_list->next, *prv_err = NULL;
+	tmp_err = ALLOC(error_msg_node);
+	memset(tmp_err, 0, sizeof(error_msg_node));
+	strcpy(tmp_err->err_msg, err_msg);
+	tmp_err->next = NULL;
+	while( curr_err != NULL ) {
+		prv_err = curr_err;
+		curr_err = curr_err->next;
+	}
+
+	if ( head_error_list->next == NULL ) {
+		head_error_list->next = tmp_err;
+	} else {
+		prv_err->next = tmp_err;
+	}	
+} 
+
+/*
+ * ibm_db.execute_many -- can be used to execute an SQL with multiple values of parameter marker.
+ * ===Description
+ * int ibm_db.execute_many(IBM_DBStatement, Parameters[, Options])
+ * Returns number of inserted/updated/deleted rows if batch executed successfully.
+ * return NULL if batch fully or partialy fails  (All the rows executed except for which error occurs).
+ */
+static PyObject* ibm_db_execute_many (PyObject *self, PyObject *args) {
+	PyObject *options = NULL;
+	PyObject *params = NULL;
+	PyObject *py_stmt_res = NULL;
+	stmt_handle *stmt_res = NULL;
+	char error[DB2_MAX_ERR_MSG_LEN];
+	PyObject *data = NULL;
+	error_msg_node *head_error_list = NULL;
+	int err_count = 0;
+
+	int rc;
+	int i = 0;
+	SQLSMALLINT numOpts = 0;
+	int numOfRows = 0;
+	int numOfParam = 0;
+	SQLINTEGER row_cnt = 0;
+	int chaining_start = 0;
+
+	SQLSMALLINT *data_type;
+	SQLUINTEGER precision;
+	SQLSMALLINT scale;
+	SQLSMALLINT nullable;
+	SQLSMALLINT *ref_data_type;
+
+	/* Get the parameters 
+	 *  	1. statement handler Object
+	 *  	2. Parameters
+	 *  	3. Options (optional) */
+	if ( !PyArg_ParseTuple(args, "OO|O", &py_stmt_res, &params, &options) )
+		return NULL;
+	
+	if ( !NIL_P(py_stmt_res) ) {
+		if (!PyObject_TypeCheck(py_stmt_res, &stmt_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied statement object parameter is invalid" );
+			return NULL;
+		} else {
+			stmt_res = (stmt_handle *)py_stmt_res;
+		}
+		/* Free any cursors that might have been allocated in a previous call to SQLExecute */
+		Py_BEGIN_ALLOW_THREADS;
+		SQLFreeStmt((SQLHSTMT)stmt_res->hstmt, SQL_CLOSE);
+		Py_END_ALLOW_THREADS;
+		
+		_python_ibm_db_clear_stmt_err_cache();
+		stmt_res->head_cache_list = NULL;
+		stmt_res->current_node = NULL;
+
+		/* Bind parameters */
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLNumParams((SQLHSTMT)stmt_res->hstmt, (SQLSMALLINT*)&numOpts);
+		Py_END_ALLOW_THREADS;
+		
+		data_type = (SQLSMALLINT*)ALLOC_N(SQLSMALLINT, numOpts);
+		ref_data_type = (SQLSMALLINT*)ALLOC_N(SQLSMALLINT, numOpts);
+		for ( i = 0; i < numOpts; i++) {
+			ref_data_type[i] = -1;
+		}
+		if ( numOpts != 0 ) {
+			for ( i = 0; i < numOpts; i++) {
+				Py_BEGIN_ALLOW_THREADS;
+				rc = SQLDescribeParam((SQLHSTMT)stmt_res->hstmt, i + 1,
+					(SQLSMALLINT*)(data_type + i), &precision, (SQLSMALLINT*)&scale,
+					(SQLSMALLINT*)&nullable);
+				Py_END_ALLOW_THREADS;
+
+				if ( rc == SQL_ERROR ) {
+					_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT,
+												rc, 1, NULL, -1, 1);
+					PyErr_SetString(PyExc_Exception, IBM_DB_G(__python_stmt_err_msg));
+					return NULL;
+				}
+
+				build_list(stmt_res, i + 1, data_type[i], precision, 
+							  scale, nullable);
+			}
+		}
+
+		/* Execute SQL for all set of parameters */
+		numOfRows = PyTuple_Size(params);
+		head_error_list = ALLOC(error_msg_node);
+		memset(head_error_list, 0, sizeof(error_msg_node));
+		head_error_list->next = NULL;
+		if ( numOfRows > 0 ) {
+			for ( i = 0; i < numOfRows; i++ ) {
+				int j = 0;
+				param_node *curr = NULL;
+				PyObject *param = PyTuple_GET_ITEM(params, i);
+				error[0] = '\0';
+				if ( !PyTuple_Check(param) ) {
+					sprintf(error, "Value parameter: %d is not a tuple", i + 1);
+					_build_client_err_list(head_error_list, error);
+					err_count++;
+					continue;
+				}
+				
+				numOfParam = PyTuple_Size(param);
+				if ( numOpts < numOfParam ) {
+					/* More are passed in -- Warning - Use the max number present */
+					sprintf(error, "Value parameter tuple: %d has more no of param", i + 1);
+					_build_client_err_list(head_error_list, error);
+					err_count++;
+					continue;
+				} else if ( numOpts > numOfParam ) {
+					/* If there are less params passed in, than are present 
+					* -- Error 
+					*/
+					sprintf(error, "Value parameter tuple: %d has less no of param", i + 1);
+					_build_client_err_list(head_error_list, error);
+					err_count++;
+					continue;
+				}
+
+				/* Bind values from the parameters_tuple to params */
+				curr = stmt_res->head_cache_list;
+
+				while ( curr != NULL ) {
+					data = PyTuple_GET_ITEM(param, j);
+					if ( data == NULL ) {
+						sprintf(error, "NULL value passed for value parameter: %d", i + 1);
+						_build_client_err_list(head_error_list, error);
+						err_count++;
+						break;
+					}
+
+					if ( chaining_start ) {
+						if ( ( TYPE(data) != PYTHON_NIL ) && ( ref_data_type[curr->param_num - 1] != TYPE(data) ) ) {
+							sprintf(error, "Value parameters array %d is not homogeneous with privious parameters array", i + 1);
+							_build_client_err_list(head_error_list, error);
+							err_count++;
+							break;
+						}
+					} else {
+						if ( TYPE(data) != PYTHON_NIL ) {
+							ref_data_type[curr->param_num -1] = TYPE(data);
+						} else {
+							int i_tmp;
+							PyObject *param_tmp = NULL;
+							PyObject *data_tmp = NULL;
+							i_tmp = i + 1;
+							for ( i_tmp = i + 1; i_tmp < numOfRows; i_tmp++ ) {
+								param_tmp = PyTuple_GET_ITEM(params, i_tmp);
+								if ( !PyTuple_Check(param_tmp) ) {
+									continue;
+								}
+								data_tmp = PyTuple_GET_ITEM(param_tmp, j);
+								if ( TYPE(data_tmp) != PYTHON_NIL ) {
+									ref_data_type[curr->param_num -1] = TYPE(data_tmp);
+									break;
+								} else {
+									continue;
+								}	
+							}
+							if ( ref_data_type[curr->param_num -1] == -1 ) {
+								ref_data_type[curr->param_num -1] = PYTHON_NIL;
+							}
+						}
+						
+					}
+
+					curr->data_type = data_type[curr->param_num - 1];
+					if ( TYPE(data) != PYTHON_NIL ) {
+						rc = _python_ibm_db_bind_data(stmt_res, curr, data);
+					} else {
+						SQLSMALLINT valueType = 0;
+						switch( ref_data_type[curr->param_num -1] ) {
+							case PYTHON_FIXNUM:
+								if(curr->data_type == SQL_BIGINT || curr->data_type == SQL_DECIMAL ) {
+									valueType = SQL_C_CHAR;
+								} else {
+									valueType = SQL_C_LONG;
+								}
+								break;
+							case PYTHON_FALSE:
+							case PYTHON_TRUE:
+								valueType = SQL_C_LONG;
+								break;
+							case PYTHON_FLOAT:
+								valueType = SQL_C_DOUBLE;
+								break;
+							case PYTHON_UNICODE:
+								switch( curr->data_type ) {
+									case SQL_BLOB:
+									case SQL_BINARY:
+#ifndef PASE /* i5/OS SQL_LONGVARBINARY is SQL_VARBINARY */
+									case SQL_LONGVARBINARY:
+#endif /* PASE */
+									case SQL_VARBINARY:
+										valueType = SQL_C_BINARY;
+										break;
+									default:
+										valueType = SQL_C_WCHAR;
+								}
+								break;
+							case PYTHON_STRING:
+								switch( curr->data_type ) {
+									case SQL_BLOB:
+									case SQL_BINARY:
+#ifndef PASE /* i5/OS SQL_LONGVARBINARY is SQL_VARBINARY */
+									case SQL_LONGVARBINARY:
+#endif /* PASE */
+									case SQL_VARBINARY:
+										valueType = SQL_C_BINARY;
+										break;
+									default:
+										valueType = SQL_C_CHAR;
+								}
+								break;
+							case PYTHON_DATE:
+								valueType = SQL_C_TYPE_DATE;
+								break;
+							case PYTHON_TIME:
+								valueType = SQL_C_TYPE_TIME;
+								break;
+							case PYTHON_TIMESTAMP:
+								valueType = SQL_C_TYPE_TIMESTAMP;
+								break;
+							case PYTHON_DECIMAL:
+								valueType = SQL_C_CHAR;
+								break;
+							case PYTHON_NIL:
+								valueType = SQL_C_DEFAULT;
+								break;
+						}
+						curr->ivalue = SQL_NULL_DATA;
+
+						Py_BEGIN_ALLOW_THREADS;
+						rc = SQLBindParameter(stmt_res->hstmt, curr->param_num, curr->param_type, valueType, curr->data_type, curr->param_size, curr->scale, &curr->ivalue, 0, (SQLLEN *)&(curr->ivalue));
+						Py_END_ALLOW_THREADS;
+						if ( rc == SQL_ERROR ) {
+							_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+						}
+					}	
+					if ( rc != SQL_SUCCESS ) {
+						sprintf(error, "Binding Error 1: %s", 
+								IBM_DB_G(__python_stmt_err_msg));
+						_build_client_err_list(head_error_list, error);
+						err_count++;
+						break;
+					}
+					curr = curr->next;
+					j++;
+				}
+
+				if ( !chaining_start && ( error[0] == '\0' ) ) {
+					/* Set statement attribute SQL_ATTR_CHAINING_BEGIN */
+					rc = _ibm_db_chaining_flag(stmt_res, SQL_ATTR_CHAINING_BEGIN, NULL, 0);
+					chaining_start = 1;
+					if ( rc != SQL_SUCCESS ) {
+						return NULL;
+					}
+				}
+
+				if ( error[0] == '\0' ) {
+					Py_BEGIN_ALLOW_THREADS;
+					rc = SQLExecute((SQLHSTMT)stmt_res->hstmt);
+					Py_END_ALLOW_THREADS;
+
+					if ( rc == SQL_NEED_DATA ) {
+						SQLPOINTER valuePtr;
+						rc = SQLParamData((SQLHSTMT)stmt_res->hstmt, (SQLPOINTER *)&valuePtr);
+						while ( rc == SQL_NEED_DATA ) {
+							/* passing data value for a parameter */
+							if ( !NIL_P(((param_node*)valuePtr)->svalue)) {
+								Py_BEGIN_ALLOW_THREADS;
+								rc = SQLPutData((SQLHSTMT)stmt_res->hstmt, (SQLPOINTER)(((param_node*)valuePtr)->svalue), ((param_node*)valuePtr)->ivalue);
+								Py_END_ALLOW_THREADS;
+							} else {
+								Py_BEGIN_ALLOW_THREADS;
+								rc = SQLPutData((SQLHSTMT)stmt_res->hstmt, (SQLPOINTER)(((param_node*)valuePtr)->uvalue), ((param_node*)valuePtr)->ivalue);
+								Py_END_ALLOW_THREADS;
+							}
+							if ( rc == SQL_ERROR ) {
+								_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+								sprintf(error, "Sending data failed: %s", IBM_DB_G(__python_stmt_err_msg));
+								_build_client_err_list(head_error_list, error);
+								err_count++;
+								break;
+							}
+							rc = SQLParamData((SQLHSTMT)stmt_res->hstmt, (SQLPOINTER *)&valuePtr);
+						}
+					}
+				}
+			}
+		} else {
+			return PyInt_FromLong(0);
+			
+		}
+		
+		/* Set statement attribute SQL_ATTR_CHAINING_END */
+		rc = _ibm_db_chaining_flag(stmt_res, SQL_ATTR_CHAINING_END, head_error_list->next, err_count);
+		if ( head_error_list != NULL ) {
+			error_msg_node *tmp_err = NULL;
+			while ( head_error_list != NULL ) {
+				tmp_err = head_error_list;
+				head_error_list = head_error_list->next;
+				PyMem_Del(tmp_err);
+			}
+		}
+		if ( rc != SQL_SUCCESS || err_count != 0 ) {
+			return NULL;
+		}
+	} else {
+		PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+		return NULL;
+	
+	}
+
+	Py_BEGIN_ALLOW_THREADS;
+	rc = SQLRowCount((SQLHSTMT)stmt_res->hstmt, &row_cnt);
+	Py_END_ALLOW_THREADS;
+	
+	if ( (rc == SQL_ERROR) && (stmt_res != NULL) ) {
+		_python_ibm_db_check_sql_errors(stmt_res->hstmt, SQL_HANDLE_STMT, rc,1, NULL, -1, 1);
+		sprintf(error, "SQLRowCount failed: %s",IBM_DB_G(__python_stmt_err_msg));
+		PyErr_SetString(PyExc_Exception, error);
+		return NULL;
+	}
+	return PyInt_FromLong(row_cnt);
+}
+
+/*
+ * ===Description
+ *  ibm_db.callproc( conn_handle conn_res, char *procName, (In/INOUT/OUT parameters tuple) )
+ *
+ * Returns resultset and INOUT/OUT parameters
+ * 
+ * ===Parameters
+ * =====  conn_handle
+ *		a valid connection resource
+ * ===== procedure Name
+ *		a valide procedure Name
+ *
+ * ===== parameters tuple
+ *		parameters tuple containing In/OUT/INOUT  variables, 
+ *
+ * ===Returns Values
+ * ===== stmt_res
+ *		statement resource containning result set
+ *
+ * ==== INOUT/OUT variables tuple
+ *		tuple containing all INOUT/OUT variables
+ * 
+ * If procedure not found than it return NULL
+ */
+static PyObject* ibm_db_callproc(PyObject *self, PyObject *args){
+	PyObject *py_conn_res = NULL;
+	PyObject *parameters_tuple = NULL;
+	PyObject *outTuple = NULL, *pyprocName = NULL, *data = NULL;
+	conn_handle *conn_res = NULL;
+	stmt_handle *stmt_res = NULL;
+	param_node *tmp_curr = NULL;
+	int numOfParam = 0;
+
+	if (!PyArg_ParseTuple(args, "OO|O", &py_conn_res, &pyprocName, &parameters_tuple)) {
+		return NULL;
+	}
+	
+	if (!NIL_P(py_conn_res) && pyprocName != Py_None) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+		if (StringObj_Size(pyprocName) == 0) {
+			PyErr_SetString(PyExc_Exception, "Empty Procedure Name");
+			return NULL;
+		}
+		
+		if (!NIL_P(parameters_tuple) ) {
+			PyObject *subsql1 = NULL;
+			PyObject *subsql2 = NULL;
+			char *strsubsql = NULL;
+			PyObject *sql = NULL;
+			int i=0;
+			if (!PyTuple_Check(parameters_tuple)) {
+				PyErr_SetString(PyExc_Exception, "Param is not a tuple");
+				return NULL;
+			}
+			numOfParam = PyTuple_Size(parameters_tuple);
+			subsql1 = StringOBJ_FromASCII("CALL ");
+			subsql2 = PyUnicode_Concat(subsql1, pyprocName);
+			Py_XDECREF(subsql1);
+			strsubsql = (char *)PyMem_Malloc(sizeof(char)*((strlen("(  )") + strlen(", ?")*numOfParam) + 2));
+			if (strsubsql == NULL) {
+				PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+				return NULL;
+			}
+			strsubsql[0] = '\0';
+			strcat(strsubsql, "( ");
+			for (i = 0; i < numOfParam; i++) {
+				if (i == 0) {
+					strcat(strsubsql, " ?");
+				} else {
+					strcat(strsubsql, ", ?");
+				}	
+			}
+			strcat(strsubsql, " )");
+			subsql1 = StringOBJ_FromASCII(strsubsql);
+			sql = PyUnicode_Concat(subsql2, subsql1);
+			Py_XDECREF(subsql1);
+			Py_XDECREF(subsql2);
+			stmt_res = (stmt_handle *)_python_ibm_db_prepare_helper(conn_res, sql, NULL);
+			PyMem_Del(strsubsql);
+			Py_XDECREF(sql);
+			if(NIL_P(stmt_res)) {
+				return NULL;
+			}
+			/* Bind values from the parameters_tuple to params */
+			for (i = 0; i < numOfParam; i++ ) {	
+				PyObject *bind_result = NULL;
+				data = PyTuple_GET_ITEM(parameters_tuple, i);
+				bind_result = _python_ibm_db_bind_param_helper(4, stmt_res, i+1, data, SQL_PARAM_INPUT_OUTPUT, 0, 0, 0, 0);
+				if (NIL_P(bind_result)){
+					return NULL;
+				}
+			}
+		} else {
+			PyObject *subsql1 = NULL;
+			PyObject *subsql2 = NULL;
+			PyObject *sql = NULL;
+			subsql1 = StringOBJ_FromASCII("CALL ");
+			subsql2 = PyUnicode_Concat(subsql1, pyprocName);
+			Py_XDECREF(subsql1);
+			subsql1 = StringOBJ_FromASCII("( )");
+			sql = PyUnicode_Concat(subsql2, subsql1);
+			Py_XDECREF(subsql1);
+			Py_XDECREF(subsql2);
+			stmt_res = (stmt_handle *)_python_ibm_db_prepare_helper(conn_res, sql, NULL);
+			Py_XDECREF(sql);
+			if(NIL_P(stmt_res)) {
+				return NULL;
+			}
+		}
+	
+		if (!NIL_P(_python_ibm_db_execute_helper1(stmt_res, NULL))) {
+			tmp_curr = stmt_res->head_cache_list;
+			if(numOfParam != 0 && tmp_curr != NULL) {
+				int paramCount = 1;
+				outTuple = PyTuple_New(numOfParam + 1);
+				PyTuple_SetItem(outTuple, 0, (PyObject*)stmt_res); 
+				while(tmp_curr != NULL && (paramCount <= numOfParam)) {
+					if ( (tmp_curr->bind_indicator != SQL_NULL_DATA && tmp_curr->bind_indicator != SQL_NO_TOTAL )) {
+						switch (tmp_curr->data_type) {
+							case SQL_SMALLINT:
+							case SQL_INTEGER:
+								PyTuple_SetItem(outTuple, paramCount, PyInt_FromLong(tmp_curr->ivalue));
+								paramCount++;
+								break;
+							case SQL_REAL:
+							case SQL_FLOAT:
+							case SQL_DOUBLE:
+								PyTuple_SetItem(outTuple, paramCount, PyFloat_FromDouble(tmp_curr->fvalue));
+								paramCount++;
+								break;
+							case SQL_TYPE_DATE:
+								PyTuple_SetItem(outTuple, paramCount, PyDate_FromDate(tmp_curr->date_value->year,
+									tmp_curr->date_value->month, tmp_curr->date_value->day));
+								paramCount++;
+								break;
+							case SQL_TYPE_TIME:
+								PyTuple_SetItem(outTuple, paramCount, PyTime_FromTime(tmp_curr->time_value->hour,
+									tmp_curr->time_value->minute, tmp_curr->time_value->second, 0));
+								paramCount++;
+								break;
+							case SQL_TYPE_TIMESTAMP:
+								PyTuple_SetItem(outTuple, paramCount, PyDateTime_FromDateAndTime(tmp_curr->ts_value->year,
+									tmp_curr->ts_value->month, tmp_curr->ts_value->day, tmp_curr->ts_value->hour,
+									tmp_curr->ts_value->minute, tmp_curr->ts_value->second, tmp_curr->ts_value->fraction / 1000));
+								paramCount++;
+								break;
+							case SQL_BIGINT:
+								PyTuple_SetItem(outTuple, paramCount, PyLong_FromString(tmp_curr->svalue, NULL, 0));
+								paramCount++;
+								break;
+							default:
+								if (!NIL_P(tmp_curr->svalue)) {
+									PyTuple_SetItem(outTuple, paramCount, StringOBJ_FromASCII(tmp_curr->svalue));
+									paramCount++;
+								} else if (!NIL_P(tmp_curr->uvalue)) {
+									PyTuple_SetItem(outTuple, paramCount, getSQLWCharAsPyUnicodeObject(tmp_curr->uvalue, tmp_curr->bind_indicator));
+									paramCount++;
+								} else {
+									Py_INCREF(Py_None);
+									PyTuple_SetItem(outTuple, paramCount, Py_None);
+									paramCount++;
+								}
+								break;
+						}
+					} else {
+						Py_INCREF(Py_None);
+						PyTuple_SetItem(outTuple, paramCount, Py_None);
+						paramCount++;
+					}
+					tmp_curr = tmp_curr->next;
+				}
+			} else {
+				outTuple = (PyObject *)stmt_res;
+			}
+		} else {
+			return NULL;
+		}
+		return outTuple;
+	} else {
+		PyErr_SetString(PyExc_Exception, "Connection Resource invalid or procedure name is NULL");
+		return NULL;
+	}
+}
+
+
+/*
+ * ibm_db.check_function_support-- can be used to query whether a  DB2 CLI or ODBC function is supported
+ * ===Description
+ * int ibm_db.check_function_support(ConnectionHandle, FunctionId)
+ * Returns Py_True if a DB2 CLI or ODBC function is supported
+ * return Py_False if a DB2 CLI or ODBC function is not supported
+ */
+static PyObject* ibm_db_check_function_support(PyObject *self, PyObject *args)
+{
+	PyObject *py_conn_res = NULL;
+	PyObject *py_funtion_id = NULL;
+	int funtion_id = 0;
+	conn_handle *conn_res = NULL;
+	int supported = 0;
+	int rc = 0;
+
+	if (!PyArg_ParseTuple(args, "OO", &py_conn_res, &py_funtion_id)) {
+		return NULL;
+	}
+
+	if (!NIL_P(py_conn_res)) {
+		if (!PyObject_TypeCheck(py_conn_res, &conn_handleType)) {
+			PyErr_SetString( PyExc_Exception, "Supplied connection object Parameter is invalid" );
+			return NULL;
+		} else {
+			conn_res = (conn_handle *)py_conn_res;
+		}
+		if (!NIL_P(py_funtion_id)) {
+			if (PyInt_Check(py_funtion_id)){
+				funtion_id = (int) PyInt_AsLong(py_funtion_id);
+			} else {
+				PyErr_SetString(PyExc_Exception, "Supplied parameter is invalid");
+				return NULL;
+			}
+		}
+		/* Check to ensure the connection resource given is active */
+		if (!conn_res->handle_active) {
+			PyErr_SetString(PyExc_Exception, "Connection is not active");				
+			return NULL;
+		 }
+
+		Py_BEGIN_ALLOW_THREADS;
+		rc = SQLGetFunctions(conn_res->hdbc, (SQLUSMALLINT) funtion_id, (SQLUSMALLINT*) &supported);
+		Py_END_ALLOW_THREADS;
+
+		if (rc == SQL_ERROR) {
+			Py_RETURN_FALSE;
+		}
+		else {
+			if(supported == SQL_TRUE) {
+				Py_RETURN_TRUE;
+			}
+			else {
+				Py_RETURN_FALSE;
+			}
+		}
+	
+	}
+	return NULL;
+}
+
+/*
+ * ibm_db.get_last_serial_value --	Gets the last inserted serial value from IDS
+ *
+ * ===Description
+ * string ibm_db.get_last_serial_value ( resource stmt )
+ *
+ * Returns a string, that is the last inserted value for a serial column for IDS. 
+ * The last inserted value could be auto-generated or entered explicitly by the user
+ * This function is valid for IDS (Informix Dynamic Server only)
+ *
+ * ===Parameters
+ *
+ * stmt
+ *		A valid statement resource.
+ *
+ * ===Return Values
+ *
+ * Returns a string representation of last inserted serial value on a successful call. 
+ * Returns FALSE on failure.
+ */
+/*
+PyObject *ibm_db_get_last_serial_value(int argc, PyObject **argv, PyObject *self)
+{
+	PyObject *stmt = NULL;
+	SQLCHAR *value = NULL;
+	stmt_handle *stmt_res;
+	int rc = 0;
+	
+	rb_scan_args(argc, argv, "1", &stmt);
+
+	if (!NIL_P(stmt)) {
+	  Data_Get_Struct(stmt, stmt_handle, stmt_res);
+
+	  / * We allocate a buffer of size 31 as per recommendations from the CLI IDS team * /
+	  value = ALLOC_N(char, 31);
+	  if ( value == NULL ) {
+		 PyErr_SetString(PyExc_Exception, "Failed to Allocate Memory");
+		 return Py_False;
+	  }
+
+	  rc = SQLGetStmtAttr((SQLHSTMT)stmt_res->hstmt, SQL_ATTR_GET_GENERATED_VALUE, (SQLPOINTER)value, 31, NULL);
+	  if ( rc == SQL_ERROR ) {
+		 _python_ibm_db_check_sql_errors( (SQLHSTMT)stmt_res->hstmt, SQL_HANDLE_STMT, rc, 1, NULL, -1, 1);
+		 return Py_False;
+	  }
+	  return INT2NUM(atoi(value));
+	}
+	else {
+	  PyErr_SetString(PyExc_Exception, "Supplied statement handle is invalid");
+	  return Py_False;
+	}
+}
+*/
+
+static int _python_get_variable_type(PyObject *variable_value)
+{
+	if (PyBool_Check(variable_value) && (variable_value == Py_True)){
+		return PYTHON_TRUE;
+	}
+	else if (PyBool_Check(variable_value) && (variable_value == Py_False)){
+		return PYTHON_FALSE;
+	}
+	else if (PyInt_Check(variable_value) || PyLong_Check(variable_value)){
+		return PYTHON_FIXNUM;
+	}
+	else if (PyFloat_Check(variable_value)){
+		return PYTHON_FLOAT;
+	}
+	else if (PyUnicode_Check(variable_value)){
+		return PYTHON_UNICODE;
+	}
+	else if (PyString_Check(variable_value) || PyBytes_Check(variable_value)){
+		return PYTHON_STRING;
+	}
+	else if (PyDateTime_Check(variable_value)){
+		return PYTHON_TIMESTAMP;
+	}
+	else if (PyTime_Check(variable_value)){
+		return PYTHON_TIME;
+	}
+	else if (PyDate_Check(variable_value)){
+		return PYTHON_DATE;
+	}
+	else if (PyComplex_Check(variable_value)){
+		return PYTHON_COMPLEX;
+	}
+	else if (PyNumber_Check(variable_value)){
+		return PYTHON_DECIMAL;
+	}
+	else if (variable_value == Py_None){
+		return PYTHON_NIL;
+	}
+	else return 0;
+}
+
+/* Listing of ibm_db module functions: */
+static PyMethodDef ibm_db_Methods[] = {
+	/* name, function, argument type, docstring */
+	{"connect", (PyCFunction)ibm_db_connect, METH_VARARGS | METH_KEYWORDS, "Connect to the database"},
+	{"pconnect", (PyCFunction)ibm_db_pconnect, METH_VARARGS | METH_KEYWORDS, "Returns a persistent connection to a database"},
+	{"exec_immediate", (PyCFunction)ibm_db_exec, METH_VARARGS, "Prepares and executes an SQL statement."},
+	{"prepare", (PyCFunction)ibm_db_prepare, METH_VARARGS, "Prepares an SQL statement."},
+	{"bind_param", (PyCFunction)ibm_db_bind_param, METH_VARARGS, "Binds a Python variable to an SQL statement parameter"},
+	{"execute", (PyCFunction)ibm_db_execute, METH_VARARGS, "Executes an SQL statement that was prepared by ibm_db.prepare()"},
+	{"fetch_tuple", (PyCFunction)ibm_db_fetch_array, METH_VARARGS, "Returns an tuple, indexed by column position, representing a row in a result set"},
+	{"fetch_assoc", (PyCFunction)ibm_db_fetch_assoc, METH_VARARGS, "Returns a dictionary, indexed by column name, representing a row in a result set"},
+	{"fetch_both", (PyCFunction)ibm_db_fetch_both, METH_VARARGS, "Returns a dictionary, indexed by both column name and position, representing a row in a result set"},
+	{"fetch_row", (PyCFunction)ibm_db_fetch_row, METH_VARARGS, "Sets the result set pointer to the next row or requested row"},
+	{"result", (PyCFunction)ibm_db_result, METH_VARARGS, "Returns a single column from a row in the result set"},
+	{"active", (PyCFunction)ibm_db_active, METH_VARARGS, "Checks if the specified connection resource is active"},
+	{"autocommit", (PyCFunction)ibm_db_autocommit, METH_VARARGS, "Returns or sets the AUTOCOMMIT state for a database connection"},
+	{"callproc", (PyCFunction)ibm_db_callproc, METH_VARARGS, "Returns a tuple containing OUT/INOUT variable value"},
+	{"check_function_support", (PyCFunction)ibm_db_check_function_support, METH_VARARGS, "return true if fuction is supported otherwise return false"},
+	{"close", (PyCFunction)ibm_db_close, METH_VARARGS, "Close a database connection"},
+	{"conn_error", (PyCFunction)ibm_db_conn_error, METH_VARARGS, "Returns a string containing the SQLSTATE returned by the last connection attempt"},
+	{"conn_errormsg", (PyCFunction)ibm_db_conn_errormsg, METH_VARARGS, "Returns an error message and SQLCODE value representing the reason the last database connection attempt failed"},
+	{"client_info", (PyCFunction)ibm_db_client_info, METH_VARARGS, "Returns a read-only object with information about the DB2 database client"},
+	{"column_privileges", (PyCFunction)ibm_db_column_privileges, METH_VARARGS, "Returns a result set listing the columns and associated privileges for a table."},
+	{"columns", (PyCFunction)ibm_db_columns, METH_VARARGS, "Returns a result set listing the columns and associated metadata for a table"},
+	{"commit", (PyCFunction)ibm_db_commit, METH_VARARGS, "Commits a transaction"},
+	{"createdb", (PyCFunction)ibm_db_createdb, METH_VARARGS, "Create db"},
+	{"createdbNX", (PyCFunction)ibm_db_createdbNX, METH_VARARGS, "createdbNX" },
+	{"cursor_type", (PyCFunction)ibm_db_cursor_type, METH_VARARGS, "Returns the cursor type used by a statement resource"},
+	{"dropdb", (PyCFunction)ibm_db_dropdb, METH_VARARGS, "Drop db"},
+	{"execute_many", (PyCFunction)ibm_db_execute_many, METH_VARARGS, "Execute SQL with multiple rows."},
+	{"field_display_size", (PyCFunction)ibm_db_field_display_size, METH_VARARGS, "Returns the maximum number of bytes required to display a column"},
+	{"field_name", (PyCFunction)ibm_db_field_name, METH_VARARGS, "Returns the name of the column in the result set"},
+	{"field_nullable", (PyCFunction)ibm_db_field_nullable, METH_VARARGS, "Returns indicated column can contain nulls or not"},
+	{"field_num", (PyCFunction)ibm_db_field_num, METH_VARARGS, "Returns the position of the named column in a result set"},
+	{"field_precision", (PyCFunction)ibm_db_field_precision, METH_VARARGS, "Returns the precision of the indicated column in a result set"},
+	{"field_scale", (PyCFunction)ibm_db_field_scale , METH_VARARGS, "Returns the scale of the indicated column in a result set"},
+	{"field_type", (PyCFunction)ibm_db_field_type, METH_VARARGS, "Returns the data type of the indicated column in a result set"},
+	{"field_width", (PyCFunction)ibm_db_field_width, METH_VARARGS, "Returns the width of the indicated column in a result set"},
+	{"foreign_keys", (PyCFunction)ibm_db_foreign_keys, METH_VARARGS, "Returns a result set listing the foreign keys for a table"},
+	{"free_result", (PyCFunction)ibm_db_free_result, METH_VARARGS, "Frees resources associated with a result set"},
+	{"free_stmt", (PyCFunction)ibm_db_free_stmt, METH_VARARGS, "Frees resources associated with the indicated statement resource"},
+	{"get_option", (PyCFunction)ibm_db_get_option, METH_VARARGS, "Gets the specified option in the resource."},
+	{"next_result", (PyCFunction)ibm_db_next_result, METH_VARARGS, "Requests the next result set from a stored procedure"},
+	{"num_fields", (PyCFunction)ibm_db_num_fields, METH_VARARGS, "Returns the number of fields contained in a result set"},
+	{"num_rows", (PyCFunction)ibm_db_num_rows, METH_VARARGS, "Returns the number of rows affected by an SQL statement"},
+	{"get_num_result", (PyCFunction)ibm_db_get_num_result, METH_VARARGS, "Returns the number of rows in a current open non-dynamic scrollable cursor"},
+	{"primary_keys", (PyCFunction)ibm_db_primary_keys, METH_VARARGS, "Returns a result set listing primary keys for a table"},
+	{"procedure_columns", (PyCFunction)ibm_db_procedure_columns, METH_VARARGS, "Returns a result set listing the parameters for one or more stored procedures."},
+	{"procedures", (PyCFunction)ibm_db_procedures, METH_VARARGS, "Returns a result set listing the stored procedures registered in a database"},
+	{"recreatedb", (PyCFunction)ibm_db_recreatedb, METH_VARARGS, "recreate db"},
+	{"rollback", (PyCFunction)ibm_db_rollback, METH_VARARGS, "Rolls back a transaction"},
+	{"server_info", (PyCFunction)ibm_db_server_info, METH_VARARGS, "Returns an object with properties that describe the DB2 database server"},
+	{"get_db_info", (PyCFunction)ibm_db_get_db_info, METH_VARARGS, "Returns an object with properties that describe the DB2 database server according to the option passed"},
+	{"set_option", (PyCFunction)ibm_db_set_option, METH_VARARGS, "Sets the specified option in the resource"},
+	{"special_columns", (PyCFunction)ibm_db_special_columns, METH_VARARGS, "Returns a result set listing the unique row identifier columns for a table"},
+	{"statistics", (PyCFunction)ibm_db_statistics, METH_VARARGS, "Returns a result set listing the index and statistics for a table"},
+	{"stmt_error", (PyCFunction)ibm_db_stmt_error, METH_VARARGS, "Returns a string containing the SQLSTATE returned by an SQL statement"},
+	{"stmt_errormsg", (PyCFunction)ibm_db_stmt_errormsg, METH_VARARGS, "Returns a string containing the last SQL statement error message"},
+	{"table_privileges", (PyCFunction)ibm_db_table_privileges, METH_VARARGS, "Returns a result set listing the tables and associated privileges in a database"},
+	{"tables", (PyCFunction)ibm_db_tables, METH_VARARGS, "Returns a result set listing the tables and associated metadata in a database"},	
+	/* An end-of-listing sentinel: */ 
+	{NULL, NULL, 0, NULL}
+};
+	
+#ifndef PyMODINIT_FUNC	/* declarations for DLL import/export */
+#define PyMODINIT_FUNC void
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+static struct PyModuleDef moduledef = {
+		PyModuleDef_HEAD_INIT,
+		"ibm_db",
+		"IBM DataServer Driver for Python.",
+		-1,
+		ibm_db_Methods,
+	};
+#endif
+
+/* Module initialization function */
+PyMODINIT_FUNC
+INIT_ibm_db(void) {
+	PyObject* m;
+
+	PyDateTime_IMPORT;
+	ibm_db_globals = ALLOC(struct _ibm_db_globals);
+	memset(ibm_db_globals, 0, sizeof(struct _ibm_db_globals));
+	python_ibm_db_init_globals(ibm_db_globals);
+
+	persistent_list = PyDict_New();
+
+	conn_handleType.tp_new = PyType_GenericNew;
+	if (PyType_Ready(&conn_handleType) < 0)
+		return MOD_RETURN_ERROR;
+
+	stmt_handleType.tp_new = PyType_GenericNew;
+	if (PyType_Ready(&stmt_handleType) < 0)
+		return MOD_RETURN_ERROR;
+
+	client_infoType.tp_new = PyType_GenericNew;
+	if (PyType_Ready(&client_infoType) < 0)
+		return MOD_RETURN_ERROR;
+
+	server_infoType.tp_new = PyType_GenericNew;
+	if (PyType_Ready(&server_infoType) < 0)
+		return MOD_RETURN_ERROR;
+
+#if PY_MAJOR_VERSION < 3
+	m = Py_InitModule3("ibm_db", ibm_db_Methods,  "IBM DataServer Driver for Python.");
+#else
+	m = PyModule_Create(&moduledef);
+#endif
+
+	Py_INCREF(&conn_handleType);
+	PyModule_AddObject(m, "IBM_DBConnection", (PyObject *)&conn_handleType);
+
+	PyModule_AddIntConstant(m, "SQL_AUTOCOMMIT_ON", SQL_AUTOCOMMIT_ON);
+	PyModule_AddIntConstant(m, "SQL_AUTOCOMMIT_OFF", SQL_AUTOCOMMIT_OFF);
+	PyModule_AddIntConstant(m, "SQL_ATTR_AUTOCOMMIT", SQL_ATTR_AUTOCOMMIT);
+	PyModule_AddIntConstant(m, "ATTR_CASE", ATTR_CASE);
+	PyModule_AddIntConstant(m, "CASE_NATURAL", CASE_NATURAL);
+	PyModule_AddIntConstant(m, "CASE_LOWER", CASE_LOWER);
+	PyModule_AddIntConstant(m, "CASE_UPPER", CASE_UPPER);
+	PyModule_AddIntConstant(m, "SQL_ATTR_CURSOR_TYPE", SQL_ATTR_CURSOR_TYPE);
+	PyModule_AddIntConstant(m, "SQL_CURSOR_FORWARD_ONLY", SQL_CURSOR_FORWARD_ONLY);
+	PyModule_AddIntConstant(m, "SQL_CURSOR_KEYSET_DRIVEN", SQL_CURSOR_KEYSET_DRIVEN);
+	PyModule_AddIntConstant(m, "SQL_CURSOR_DYNAMIC", SQL_CURSOR_DYNAMIC);
+	PyModule_AddIntConstant(m, "SQL_CURSOR_STATIC", SQL_CURSOR_STATIC);
+	PyModule_AddIntConstant(m, "SQL_PARAM_INPUT", SQL_PARAM_INPUT);
+	PyModule_AddIntConstant(m, "SQL_PARAM_OUTPUT", SQL_PARAM_OUTPUT);
+	PyModule_AddIntConstant(m, "SQL_PARAM_INPUT_OUTPUT", SQL_PARAM_INPUT_OUTPUT);
+	PyModule_AddIntConstant(m, "PARAM_FILE", PARAM_FILE);
+
+	PyModule_AddIntConstant(m, "SQL_BIGINT", SQL_BIGINT);
+	PyModule_AddIntConstant(m, "SQL_BINARY", SQL_BINARY);
+	PyModule_AddIntConstant(m, "SQL_BLOB", SQL_BLOB);
+	PyModule_AddIntConstant(m, "SQL_BLOB_LOCATOR", SQL_BLOB_LOCATOR);
+	PyModule_AddIntConstant(m, "SQL_CHAR", SQL_CHAR);
+	PyModule_AddIntConstant(m, "SQL_TINYINT", SQL_TINYINT);
+	PyModule_AddIntConstant(m, "SQL_BINARY", SQL_BINARY);
+	PyModule_AddIntConstant(m, "SQL_BIT", SQL_BIT);
+	PyModule_AddIntConstant(m, "SQL_CLOB", SQL_CLOB);
+	PyModule_AddIntConstant(m, "SQL_CLOB_LOCATOR", SQL_CLOB_LOCATOR);
+	PyModule_AddIntConstant(m, "SQL_TYPE_DATE", SQL_TYPE_DATE);
+	PyModule_AddIntConstant(m, "SQL_DBCLOB", SQL_DBCLOB);
+	PyModule_AddIntConstant(m, "SQL_DBCLOB_LOCATOR", SQL_DBCLOB_LOCATOR);
+	PyModule_AddIntConstant(m, "SQL_DECIMAL", SQL_DECIMAL);
+	PyModule_AddIntConstant(m, "SQL_DECFLOAT", SQL_DECFLOAT);
+	PyModule_AddIntConstant(m, "SQL_DOUBLE", SQL_DOUBLE);
+	PyModule_AddIntConstant(m, "SQL_FLOAT", SQL_FLOAT);
+	PyModule_AddIntConstant(m, "SQL_GRAPHIC", SQL_GRAPHIC);
+	PyModule_AddIntConstant(m, "SQL_INTEGER", SQL_INTEGER);
+	PyModule_AddIntConstant(m, "SQL_LONGVARCHAR", SQL_LONGVARCHAR);
+	PyModule_AddIntConstant(m, "SQL_LONGVARBINARY", SQL_LONGVARBINARY);
+	PyModule_AddIntConstant(m, "SQL_LONGVARGRAPHIC", SQL_LONGVARGRAPHIC);
+	PyModule_AddIntConstant(m, "SQL_WLONGVARCHAR", SQL_WLONGVARCHAR);
+	PyModule_AddIntConstant(m, "SQL_NUMERIC", SQL_NUMERIC);
+	PyModule_AddIntConstant(m, "SQL_REAL", SQL_REAL);
+	PyModule_AddIntConstant(m, "SQL_SMALLINT", SQL_SMALLINT);
+	PyModule_AddIntConstant(m, "SQL_TYPE_TIME", SQL_TYPE_TIME);
+	PyModule_AddIntConstant(m, "SQL_TYPE_TIMESTAMP", SQL_TYPE_TIMESTAMP);
+	PyModule_AddIntConstant(m, "SQL_VARBINARY", SQL_VARBINARY);
+	PyModule_AddIntConstant(m, "SQL_VARCHAR", SQL_VARCHAR);
+	PyModule_AddIntConstant(m, "SQL_VARBINARY", SQL_VARBINARY);
+	PyModule_AddIntConstant(m, "SQL_VARGRAPHIC", SQL_VARGRAPHIC);
+	PyModule_AddIntConstant(m, "SQL_WVARCHAR", SQL_WVARCHAR);
+	PyModule_AddIntConstant(m, "SQL_WCHAR", SQL_WCHAR);
+	PyModule_AddIntConstant(m, "SQL_XML", SQL_XML);
+	PyModule_AddIntConstant(m, "SQL_FALSE", SQL_FALSE);
+	PyModule_AddIntConstant(m, "SQL_TRUE", SQL_TRUE);
+	PyModule_AddIntConstant(m, "SQL_TABLE_STAT", SQL_TABLE_STAT);
+	PyModule_AddIntConstant(m, "SQL_INDEX_CLUSTERED", SQL_INDEX_CLUSTERED);
+	PyModule_AddIntConstant(m, "SQL_INDEX_OTHER", SQL_INDEX_OTHER);
+	PyModule_AddIntConstant(m, "SQL_ATTR_CURRENT_SCHEMA", SQL_ATTR_CURRENT_SCHEMA);
+	PyModule_AddIntConstant(m, "SQL_ATTR_INFO_USERID", SQL_ATTR_INFO_USERID);
+	PyModule_AddIntConstant(m, "SQL_ATTR_INFO_WRKSTNNAME", SQL_ATTR_INFO_WRKSTNNAME);
+	PyModule_AddIntConstant(m, "SQL_ATTR_INFO_ACCTSTR", SQL_ATTR_INFO_ACCTSTR);
+	PyModule_AddIntConstant(m, "SQL_ATTR_INFO_APPLNAME", SQL_ATTR_INFO_APPLNAME);
+	PyModule_AddIntConstant(m, "SQL_ATTR_USE_TRUSTED_CONTEXT", SQL_ATTR_USE_TRUSTED_CONTEXT);
+	PyModule_AddIntConstant(m, "SQL_ATTR_TRUSTED_CONTEXT_USERID", SQL_ATTR_TRUSTED_CONTEXT_USERID);
+	PyModule_AddIntConstant(m, "SQL_ATTR_TRUSTED_CONTEXT_PASSWORD", SQL_ATTR_TRUSTED_CONTEXT_PASSWORD);
+	PyModule_AddIntConstant(m, "SQL_DBMS_NAME", SQL_DBMS_NAME);
+	PyModule_AddIntConstant(m, "SQL_DBMS_VER", SQL_DBMS_VER);
+	PyModule_AddIntConstant(m, "SQL_ATTR_ROWCOUNT_PREFETCH", SQL_ATTR_ROWCOUNT_PREFETCH);
+	PyModule_AddIntConstant(m, "SQL_ROWCOUNT_PREFETCH_ON", SQL_ROWCOUNT_PREFETCH_ON);
+	PyModule_AddIntConstant(m, "SQL_ROWCOUNT_PREFETCH_OFF", SQL_ROWCOUNT_PREFETCH_OFF);
+	PyModule_AddIntConstant(m, "SQL_API_SQLROWCOUNT", SQL_API_SQLROWCOUNT);
+	PyModule_AddIntConstant(m, "QUOTED_LITERAL_REPLACEMENT_ON", SET_QUOTED_LITERAL_REPLACEMENT_ON);
+	PyModule_AddIntConstant(m, "QUOTED_LITERAL_REPLACEMENT_OFF", SET_QUOTED_LITERAL_REPLACEMENT_OFF);
+	PyModule_AddIntConstant(m, "SQL_ATTR_INFO_PROGRAMNAME", SQL_ATTR_INFO_PROGRAMNAME);
+	PyModule_AddStringConstant(m, "__version__", MODULE_RELEASE);	
+
+	Py_INCREF(&stmt_handleType);
+	PyModule_AddObject(m, "IBM_DBStatement", (PyObject *)&stmt_handleType);
+
+	Py_INCREF(&client_infoType);
+	PyModule_AddObject(m, "IBM_DBClientInfo", (PyObject *)&client_infoType);
+
+	Py_INCREF(&server_infoType);
+	PyModule_AddObject(m, "IBM_DBServerInfo", (PyObject *)&server_infoType);
+	return MOD_RETURN_VAL(m);
+}
diff -pruN 0.3.0-3/ibm_db_dbi.py 2.0.5-0ubuntu2/ibm_db_dbi.py
--- 0.3.0-3/ibm_db_dbi.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/ibm_db_dbi.py	2014-01-24 06:53:53.000000000 +0000
@@ -0,0 +1,1525 @@
+# +--------------------------------------------------------------------------+
+# |  Licensed Materials - Property of IBM                                    |
+# |                                                                          |
+# | (C) Copyright IBM Corporation 2007-2013                                  |
+# +--------------------------------------------------------------------------+
+# | This module complies with SQLAlchemy and is                              |
+# | Licensed under the Apache License, Version 2.0 (the "License");          |
+# | you may not use this file except in compliance with the License.         |
+# | You may obtain a copy of the License at                                  |
+# | http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable |
+# | law or agreed to in writing, software distributed under the License is   |
+# | distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
+# | KIND, either express or implied. See the License for the specific        |
+# | language governing permissions and limitations under the License.        |
+# +--------------------------------------------------------------------------+
+# | Authors: Swetha Patel, Abhigyan Agrawal, Tarun Pasrija, Rahul Priyadarshi|
+# +--------------------------------------------------------------------------+
+
+"""
+This module implements the Python DB API Specification v2.0 for DB2 database.
+"""
+
+import types, string, time, datetime, decimal, sys
+import weakref
+
+if sys.version_info >= (3, ):
+   buffer = memoryview
+if sys.version_info < (3, ):
+   import exceptions
+   exception = exceptions.StandardError
+else:
+   exception = Exception
+   
+import ibm_db
+__version__ = ibm_db.__version__
+
+# Constants for specifying database connection options.
+SQL_ATTR_AUTOCOMMIT = ibm_db.SQL_ATTR_AUTOCOMMIT
+SQL_ATTR_CURRENT_SCHEMA = ibm_db.SQL_ATTR_CURRENT_SCHEMA
+SQL_AUTOCOMMIT_OFF = ibm_db.SQL_AUTOCOMMIT_OFF
+SQL_AUTOCOMMIT_ON = ibm_db.SQL_AUTOCOMMIT_ON
+ATTR_CASE = ibm_db.ATTR_CASE
+CASE_NATURAL = ibm_db.CASE_NATURAL
+CASE_LOWER = ibm_db.CASE_LOWER
+CASE_UPPER = ibm_db.CASE_UPPER
+SQL_FALSE = ibm_db.SQL_FALSE
+SQL_TRUE = ibm_db.SQL_TRUE
+SQL_TABLE_STAT = ibm_db.SQL_TABLE_STAT
+SQL_INDEX_CLUSTERED = ibm_db.SQL_INDEX_CLUSTERED
+SQL_INDEX_OTHER = ibm_db.SQL_INDEX_OTHER
+SQL_DBMS_VER = ibm_db.SQL_DBMS_VER
+SQL_DBMS_NAME = ibm_db.SQL_DBMS_NAME
+
+# Module globals
+apilevel = '2.0'
+threadsafety = 0
+paramstyle = 'qmark'
+
+
+class Error(exception):
+    """This is the base class of all other exception thrown by this
+    module.  It can be use to catch all exceptions with a single except
+    statement.
+    
+    """
+    def __init__(self, message):
+        """This is the constructor which take one string argument."""
+        self._message = message
+    def __str__(self):
+        """Converts the message to a string."""
+        return 'ibm_db_dbi::'+str(self.__class__.__name__)+': '+str(self._message)
+
+
+class Warning(exception):
+    """This exception is used to inform the user about important 
+    warnings such as data truncations.
+
+    """
+    def __init__(self, message):
+        """This is the constructor which take one string argument."""
+        self._message = message
+    def __str__(self):
+        """Converts the message to a string."""
+        return 'ibm_db_dbi::'+str(self.__class__.__name__)+': '+str(self._message)
+
+
+class InterfaceError(Error):
+    """This exception is raised when the module interface is being
+    used incorrectly.
+
+    """
+    pass
+
+
+class DatabaseError(Error):
+    """This exception is raised for errors related to database."""
+    pass
+
+
+class InternalError(DatabaseError):
+    """This exception is raised when internal database error occurs,
+    such as cursor is not valid anymore.
+
+    """
+    pass
+
+
+class OperationalError(DatabaseError):
+    """This exception is raised when database operation errors that are
+    not under the programmer control occur, such as unexpected
+    disconnect.
+
+    """ 
+    pass
+
+
+class ProgrammingError(DatabaseError):
+    """This exception is raised for programming errors, such as table 
+    not found.
+
+    """
+    pass
+
+class IntegrityError(DatabaseError):
+    """This exception is thrown when errors occur when the relational
+    integrity of database fails, such as foreign key check fails. 
+
+    """
+    pass
+
+
+class  DataError(DatabaseError):
+    """This exception is raised when errors due to data processing,
+    occur, such as divide by zero. 
+
+    """
+    pass
+
+
+class NotSupportedError(DatabaseError):
+    """This exception is thrown when a method in this module or an 
+    database API is not supported.
+
+    """
+    pass
+
+
+def Date(year, month, day):
+    """This method can be used to get date object from integers, for 
+    inserting it into a DATE column in the database.
+
+    """
+    return datetime.date(year, month, day)
+
+def Time(hour, minute, second):
+    """This method can be used to get time object from integers, for 
+    inserting it into a TIME column in the database.
+
+    """
+    return datetime.time(hour, minute, second)
+
+def Timestamp(year, month, day, hour, minute, second):
+    """This method can be used to get timestamp object from integers, 
+    for inserting it into a TIMESTAMP column in the database.
+
+    """
+    return datetime.datetime(year, month, day, hour, minute, second)
+
+def DateFromTicks(ticks):
+    """This method can be used to get date object from ticks seconds,
+    for inserting it into a DATE column in the database.
+
+    """
+    time_tuple = time.localtime(ticks)
+    return datetime.date(time_tuple[0], time_tuple[1], time_tuple[2])
+
+def TimeFromTicks(ticks):
+    """This method can be used to get time object from ticks seconds,
+    for inserting it into a TIME column in the database.
+
+    """
+    time_tuple = time.localtime(ticks)
+    return datetime.time(time_tuple[3], time_tuple[4], time_tuple[5])
+
+def TimestampFromTicks(ticks):
+    """This method can be used to get timestamp object from ticks  
+    seconds, for inserting it into a TIMESTAMP column in the database.
+
+    """
+    time_tuple = time.localtime(ticks)
+    return datetime.datetime(time_tuple[0], time_tuple[1], time_tuple[2], 
+                                time_tuple[3], time_tuple[4], time_tuple[5])
+
+def Binary(string):
+    """This method can be used to store binary information, for 
+    inserting it into a binary type column in the database.
+
+    """
+    if not isinstance( string, (types.StringType, types.BufferType) ):
+        raise InterfaceError("Binary function expects type string argument.")
+    return buffer(string)
+
+
+class DBAPITypeObject(frozenset):
+    """Class used for creating objects that can be used to compare
+    in order to determine the python type to provide in parameter 
+    sequence argument of the execute method.
+
+    """
+    def __new__(cls, col_types):
+        return frozenset.__new__(cls, col_types)
+        
+    def __init__(self, col_types):
+        """Constructor for DBAPITypeObject.  It takes a tuple of 
+        database column type as an argument.
+        """
+        self.col_types = col_types
+
+    def __cmp__(self, cmp):
+        """This method checks if the string compared with is in the 
+        tuple provided to the constructor of this object.  It takes 
+        string as an argument. 
+        """
+        if cmp in self.col_types:
+            return 0
+        if sys.version_info < (3, ):
+            if cmp < self.col_types:
+                return 1
+            else:
+                return -1
+        else:
+            return 1
+            
+    def __eq__(self, cmp):
+        """This method checks if the string compared with is in the 
+        tuple provided to the constructor of this object.  It takes 
+        string as an argument. 
+        """
+        return cmp in self.col_types
+           
+    def __ne__(self, cmp):
+        """This method checks if the string compared with is not in the 
+        tuple provided to the constructor of this object.  It takes 
+        string as an argument. 
+        """
+        return cmp not in self.col_types
+        
+    def __hash__(self):
+        return id(self)
+
+# The user can use these objects to compare the database column types
+# with in order to determine the python type to provide in the 
+# parameter sequence argument of the execute method.
+STRING = DBAPITypeObject(("CHARACTER", "CHAR", "VARCHAR", 
+                          "CHARACTER VARYING", "CHAR VARYING", "STRING",))
+
+TEXT = DBAPITypeObject(("CLOB", "CHARACTER LARGE OBJECT", "CHAR LARGE OBJECT",))
+
+XML = DBAPITypeObject(("XML",))
+
+BINARY = DBAPITypeObject(("BLOB", "BINARY LARGE OBJECT",))
+
+NUMBER = DBAPITypeObject(("INTEGER", "INT", "SMALLINT",))
+
+BIGINT = DBAPITypeObject(("BIGINT",))
+
+FLOAT = DBAPITypeObject(("FLOAT", "REAL", "DOUBLE", "DECFLOAT"))
+
+DECIMAL = DBAPITypeObject(("DECIMAL", "DEC", "NUMERIC", "NUM",))
+
+DATE = DBAPITypeObject(("DATE",))
+
+TIME = DBAPITypeObject(("TIME",))
+
+DATETIME = DBAPITypeObject(("TIMESTAMP",))
+
+ROWID = DBAPITypeObject(())
+
+# This method is used to determine the type of error that was 
+# generated.  It takes an exception instance as an argument, and 
+# returns exception object of the appropriate type.
+def _get_exception(inst):
+    # These tuple are used to determine the type of exceptions that are
+    # thrown by the database.  They store the SQLSTATE code and the
+    # SQLSTATE class code(the 2 digit prefix of the SQLSTATE code)  
+    warning_error_tuple = ('01', )
+    data_error_tuple = ('02', '22', '10601', '10603', '10605', '10901', '10902', 
+                                                               '38552', '54')
+
+    operational_error_tuple = ( '08', '09', '10502', '10000', '10611', '38501', 
+                          '38503', '38553', '38H01', '38H02', '38H03', '38H04',
+                                   '38H05', '38H06', '38H07', '38H09', '38H0A')
+
+    integrity_error_tuple = ('23', )
+
+    internal_error_tuple = ('24', '25', '26', '2D', '51', '57')
+
+    programming_error_tuple = ('08002', '07', 'OD', 'OF','OK','ON','10', '27',
+                               '28', '2E', '34', '36', '38', '39', '56', '42',
+                               '3B', '40', '44', '53', '55', '58', '5U', '21')
+
+    not_supported_error_tuple = ('0A', '10509')
+
+    # These tuple are used to determine the type of exceptions that are
+    # thrown from the driver module. 
+    interface_exceptions = (                  "Supplied parameter is invalid",
+                                        "ATTR_CASE attribute must be one of "
+                                    "CASE_LOWER, CASE_UPPER, or CASE_NATURAL",
+                          "Connection or statement handle must be passed in.",
+                                                       "Param is not a tuple")
+
+    programming_exceptions = (                     "Connection is not active", 
+                                                 "qualifier must be a string",
+                                                   "unique must be a boolean",
+                                                       "Parameters not bound",
+                                                     "owner must be a string",
+                                                "table_name must be a string",
+                                                "table type must be a string", 
+                                               "column_name must be a string", 
+                                                "Column ordinal out of range", 
+                                            "procedure name must be a string",
+                              "Requested row number must be a positive value", 
+                                     "Options Array must have string indexes")
+
+    database_exceptions = (                                   "Binding Error", 
+                                   "Column information cannot be retrieved: ", 
+                                            "Column binding cannot be done: ",
+                                             "Failed to Determine XML Size: ")
+
+    statement_exceptions = (                     "Statement Execute Failed: ",
+                                                    "Describe Param Failed: ",
+                                                      "Sending data failed: ",
+                                                            "Fetch Failure: ",
+                                                  "SQLNumResultCols failed: ",
+                                                       "SQLRowCount failed: ",
+                                                   "SQLGetDiagField failed: ",
+                                                 "Statement prepare Failed: ")
+
+    operational_exceptions = (          "Connection Resource cannot be found", 
+                                                  "Failed to Allocate Memory",
+                                                    "Describe Param Failed: ",
+                                                 "Statement Execute Failed: ",
+                                                      "Sending data failed: ", 
+                                     "Failed to Allocate Memory for XML Data",
+                                     "Failed to Allocate Memory for LOB Data")
+
+    # First check if the exception is from the database.  If it is 
+    # determine the SQLSTATE code which is used further to determine 
+    # the exception type.  If not check if the exception is thrown by 
+    # by the driver and return the appropriate exception type.  If it 
+    # is not possible to determine the type of exception generated 
+    # return the generic Error exception.
+    if inst is not None:
+        message = repr(inst)
+        if message.startswith("Exception('") and message.endswith("',)"):
+            message = message[11:]
+            message = message[:len(message)-3]
+
+        index = message.find('SQLSTATE=')
+        if( message != '') & (index != -1):
+            error_code = message[(index+9):(index+14)]
+            prefix_code = error_code[:2]
+        else:
+            for key in interface_exceptions:
+                if message.find(key) != -1:
+                    return InterfaceError(message)
+            for key in programming_exceptions:
+                if message.find(key) != -1:
+                    return ProgrammingError(message)
+            for key in operational_exceptions:
+                if message.find(key) != -1:
+                    return OperationalError(message)
+            for key in database_exceptions:
+                if message.find(key) != -1:
+                    return DatabaseError(message)  
+            for key in statement_exceptions:
+                if message.find(key) != -1:
+                    return DatabaseError(message)
+            return Error(message)
+    else:
+        return Error('An error has occured')
+
+    # First check if the SQLSTATE is in the tuples, if not check
+    # if the SQLSTATE class code is in the tuples to determine the
+    # exception type. 
+    if ( error_code in warning_error_tuple or 
+         prefix_code in warning_error_tuple ):
+        return Warning(message)
+    if ( error_code in data_error_tuple or 
+         prefix_code in data_error_tuple ):
+        return DataError(message)
+    if ( error_code in operational_error_tuple or 
+         prefix_code in operational_error_tuple ):
+        return OperationalError(message)
+    if ( error_code in integrity_error_tuple or 
+         prefix_code in integrity_error_tuple ):
+        return IntegrityError(message)
+    if ( error_code in internal_error_tuple or
+         prefix_code in internal_error_tuple ):
+        return InternalError(message)
+    if ( error_code in programming_error_tuple or
+         prefix_code in programming_error_tuple ):
+        return ProgrammingError(message)
+    if ( error_code in not_supported_error_tuple or
+         prefix_code in not_supported_error_tuple ):
+        return NotSupportedError(message)
+    return DatabaseError(message)
+
+def _server_connect(dsn, user='', password='', host=''):
+    """This method create connection with server
+    """
+    
+    if dsn is None:
+        raise InterfaceError("dsn value should not be None")
+    
+    if (not isinstance(dsn, basestring)) | \
+       (not isinstance(user, basestring)) | \
+       (not isinstance(password, basestring)) | \
+       (not isinstance(host, basestring)):
+        raise InterfaceError("Arguments should be of type string or unicode")
+    
+    # If the dsn does not contain port and protocal adding database
+    # and hostname is no good.  Add these when required, that is,
+    # if there is a '=' in the dsn.  Else the dsn string is taken to be
+    # a DSN entry.
+    if dsn.find('=') != -1:
+        if dsn[len(dsn) - 1] != ';':
+            dsn = dsn + ";"
+        if host != '' and dsn.find('HOSTNAME=') == -1:
+            dsn = dsn + "HOSTNAME=" + host + ";"
+    else:
+        dsn = "DSN=" + dsn + ";"
+
+    if dsn.find('attach=') == -1:
+        dsn = dsn + "attach=true;"
+    if user != '' and dsn.find('UID=') == -1:
+        dsn = dsn + "UID=" + user + ";"
+    if password != '' and dsn.find('PWD=') == -1:
+        dsn = dsn + "PWD=" + password + ";"
+    try:    
+        conn = ibm_db.connect(dsn, '', '')
+    except Exception, inst:
+        raise _get_exception(inst)
+    
+    return conn
+    
+def createdb(database, dsn, user='', password='', host='', codeset='', mode=''):
+    """This method creates a database by using the specified database name, code set, and mode
+    """
+    
+    if database is None:
+        raise InterfaceError("createdb expects a not None database name value")
+    if (not isinstance(database, basestring)) | \
+       (not isinstance(codeset, basestring)) | \
+       (not isinstance(mode, basestring)):
+        raise InterfaceError("Arguments sould be string or unicode")
+        
+    conn = _server_connect(dsn, user=user, password=password, host=host)
+    try:
+        return_value = ibm_db.createdb(conn, database, codeset, mode)
+    except Exception, inst:
+        raise _get_exception(inst)
+    finally:
+        try:
+            ibm_db.close(conn)
+        except Exception, inst:
+            raise _get_exception(inst)
+        
+    return return_value
+    
+def dropdb(database, dsn, user='', password='', host=''):
+    """This method drops the specified database
+    """
+    
+    if database is None:
+        raise InterfaceError("dropdb expects a not None database name value")
+    if (not isinstance(database, basestring)):
+        raise InterfaceError("Arguments sould be string or unicode")
+        
+    conn = _server_connect(dsn, user=user, password=password, host=host)
+    try:
+        return_value = ibm_db.dropdb(conn, database)
+    except Exception, inst:
+        raise _get_exception(inst)
+    finally:
+        try:
+            ibm_db.close(conn)
+        except Exception, inst:
+            raise _get_exception(inst)
+        
+    return return_value
+    
+def recreatedb(database, dsn, user='', password='', host='', codeset='', mode=''):
+    """This method drops and then recreate the database by using the specified database name, code set, and mode
+    """
+    
+    if database is None:
+        raise InterfaceError("recreatedb expects a not None database name value")
+    if (not isinstance(database, basestring)) | \
+       (not isinstance(codeset, basestring)) | \
+       (not isinstance(mode, basestring)):
+        raise InterfaceError("Arguments sould be string or unicode")
+        
+    conn = _server_connect(dsn, user=user, password=password, host=host)
+    try:
+        return_value = ibm_db.recreatedb(conn, database, codeset, mode)
+    except Exception, inst:
+        raise _get_exception(inst)
+    finally:
+        try:
+            ibm_db.close(conn)
+        except Exception, inst:
+            raise _get_exception(inst)
+        
+    return return_value
+    
+def createdbNX(database, dsn, user='', password='', host='', codeset='', mode=''):
+    """This method creates a database if it not exist by using the specified database name, code set, and mode
+    """
+    
+    if database is None:
+        raise InterfaceError("createdbNX expects a not None database name value")
+    if (not isinstance(database, basestring)) | \
+       (not isinstance(codeset, basestring)) | \
+       (not isinstance(mode, basestring)):
+        raise InterfaceError("Arguments sould be string or unicode")
+        
+    conn = _server_connect(dsn, user=user, password=password, host=host)
+    try:
+        return_value = ibm_db.createdbNX(conn, database, codeset, mode)
+    except Exception, inst:
+        raise _get_exception(inst)
+    finally:
+        try:
+            ibm_db.close(conn)
+        except Exception, inst:
+            raise _get_exception(inst)
+        
+    return return_value
+    
+def connect(dsn, user='', password='', host='', database='', conn_options=None):
+    """This method creates a non persistent connection to the database. It returns
+        a ibm_db_dbi.Connection object.
+    """
+    
+    if dsn is None:
+        raise InterfaceError("connect expects a not None dsn value") 
+    
+    if (not isinstance(dsn, basestring)) | \
+       (not isinstance(user, basestring)) | \
+       (not isinstance(password, basestring)) | \
+       (not isinstance(host, basestring)) | \
+       (not isinstance(database, basestring)):
+        raise InterfaceError("connect expects the first five arguments to"
+                                                      " be of type string or unicode")
+    if conn_options is not None:
+        if not isinstance(conn_options, dict):
+            raise InterfaceError("connect expects the sixth argument"
+                                 " (conn_options) to be of type dict")
+        if not SQL_ATTR_AUTOCOMMIT in conn_options:
+            conn_options[SQL_ATTR_AUTOCOMMIT] = SQL_AUTOCOMMIT_OFF
+    else:
+        conn_options = {SQL_ATTR_AUTOCOMMIT : SQL_AUTOCOMMIT_OFF}
+
+    # If the dsn does not contain port and protocal adding database
+    # and hostname is no good.  Add these when required, that is,
+    # if there is a '=' in the dsn.  Else the dsn string is taken to be
+    # a DSN entry.
+    if dsn.find('=') != -1:
+        if dsn[len(dsn) - 1] != ';':
+            dsn = dsn + ";"
+        if database != '' and dsn.find('DATABASE=') == -1:
+            dsn = dsn + "DATABASE=" + database + ";"
+        if host != '' and dsn.find('HOSTNAME=') == -1:
+            dsn = dsn + "HOSTNAME=" + host + ";"
+    else:
+        dsn = "DSN=" + dsn + ";"
+
+    if user != '' and dsn.find('UID=') == -1:
+        dsn = dsn + "UID=" + user + ";"
+    if password != '' and dsn.find('PWD=') == -1:
+        dsn = dsn + "PWD=" + password + ";"
+    try:    
+        conn = ibm_db.connect(dsn, '', '', conn_options)
+        ibm_db.set_option(conn, {SQL_ATTR_CURRENT_SCHEMA : user}, 1)
+    except Exception, inst:
+        raise _get_exception(inst)
+
+    return Connection(conn)
+
+def pconnect(dsn, user='', password='', host='', database='', conn_options=None):
+    """This method creates persistent connection to the database. It returns
+        a ibm_db_dbi.Connection object.
+    """
+    
+    if dsn is None:
+        raise InterfaceError("connect expects a not None dsn value") 
+    
+    if (not isinstance(dsn, basestring)) | \
+       (not isinstance(user, basestring)) | \
+       (not isinstance(password, basestring)) | \
+       (not isinstance(host, basestring)) | \
+       (not isinstance(database, basestring)):
+        raise InterfaceError("connect expects the first five arguments to"
+                                                      " be of type string or unicode")
+    if conn_options is not None:
+        if not isinstance(conn_options, dict):
+            raise InterfaceError("connect expects the sixth argument"
+                                 " (conn_options) to be of type dict")
+        if not SQL_ATTR_AUTOCOMMIT in conn_options:
+            conn_options[SQL_ATTR_AUTOCOMMIT] = SQL_AUTOCOMMIT_OFF
+    else:
+        conn_options = {SQL_ATTR_AUTOCOMMIT : SQL_AUTOCOMMIT_OFF}
+
+    # If the dsn does not contain port and protocal adding database
+    # and hostname is no good.  Add these when required, that is,
+    # if there is a '=' in the dsn.  Else the dsn string is taken to be
+    # a DSN entry.
+    if dsn.find('=') != -1:
+        if dsn[len(dsn) - 1] != ';':
+            dsn = dsn + ";"
+        if database != '' and dsn.find('DATABASE=') == -1:
+            dsn = dsn + "DATABASE=" + database + ";"
+        if host != '' and dsn.find('HOSTNAME=') == -1:
+            dsn = dsn + "HOSTNAME=" + host + ";"
+    else:
+        dsn = "DSN=" + dsn + ";"
+
+    if user != '' and dsn.find('UID=') == -1:
+        dsn = dsn + "UID=" + user + ";"
+    if password != '' and dsn.find('PWD=') == -1:
+        dsn = dsn + "PWD=" + password + ";"
+    try:    
+        conn = ibm_db.pconnect(dsn, '', '', conn_options)
+        ibm_db.set_option(conn, {SQL_ATTR_CURRENT_SCHEMA : user}, 1)
+    except Exception, inst:
+        raise _get_exception(inst)
+
+    return Connection(conn)
+
+class Connection(object):
+    """This class object represents a connection between the database 
+    and the application.
+
+    """
+    def __init__(self, conn_handler):
+        """Constructor for Connection object. It takes ibm_db 
+        connection handler as an argument. 
+
+        """
+        self.conn_handler = conn_handler
+
+        # Used to identify close cursors for generating exceptions 
+        # after the connection is closed.
+        self._cursor_list = []
+        self.__dbms_name = ibm_db.get_db_info(conn_handler, SQL_DBMS_NAME)
+        self.__dbms_ver = ibm_db.get_db_info(conn_handler, SQL_DBMS_VER)
+
+    # This method is used to get the DBMS_NAME 
+    def __get_dbms_name( self ):
+        return self.__dbms_name
+
+    # This attribute specifies the DBMS_NAME
+    # It is a read only attribute. 
+    dbms_name = property(__get_dbms_name, None, None, "")
+
+    # This method is used to get the DBMS_ver 
+    def __get_dbms_ver( self ):
+        return self.__dbms_ver
+
+    # This attribute specifies the DBMS_ver
+    # It is a read only attribute. 
+    dbms_ver = property(__get_dbms_ver, None, None, "")
+
+    def close(self):
+        """This method closes the Database connection associated with
+        the Connection object.  It takes no arguments.
+
+        """
+        self.rollback()
+        try:
+            if self.conn_handler is None:
+                raise ProgrammingError("Connection cannot be closed; "
+                                     "connection is no longer active.")
+            else:
+                return_value = ibm_db.close(self.conn_handler)
+        except Exception, inst:
+            raise _get_exception(inst)
+        self.conn_handler = None
+        for index in range(len(self._cursor_list)):
+            if (self._cursor_list[index]() != None):
+                tmp_cursor =  self._cursor_list[index]()
+                tmp_cursor.conn_handler = None
+                tmp_cursor.stmt_handler = None
+                tmp_cursor._all_stmt_handlers = None
+        self._cursor_list = []
+        return return_value
+
+    def commit(self):
+        """This method commits the transaction associated with the
+        Connection object.  It takes no arguments.
+
+        """
+        try:
+            return_value = ibm_db.commit(self.conn_handler)
+        except Exception, inst:
+            raise _get_exception(inst)
+        return return_value
+
+    def rollback(self):
+        """This method rollbacks the transaction associated with the
+        Connection object.  It takes no arguments.
+
+        """
+        try:
+            return_value = ibm_db.rollback(self.conn_handler)
+        except Exception, inst:
+            raise _get_exception(inst)
+        return return_value
+
+    def cursor(self):
+        """This method returns a Cursor object associated with the 
+        Connection.  It takes no arguments.
+
+        """
+        if self.conn_handler is None:
+            raise ProgrammingError("Cursor cannot be returned; "
+                               "connection is no longer active.")
+        cursor = Cursor(self.conn_handler, self)
+        self._cursor_list.append(weakref.ref(cursor))
+        return cursor
+
+    # Sets connection attribute values
+    def set_option(self, attr_dict):
+        """Input: connection attribute dictionary
+           Return: True on success or False on failure
+        """
+        return ibm_db.set_option(self.conn_handler, attr_dict, 1)
+
+    # Retrieves connection attributes values
+    def get_option(self, attr_key):
+        """Input: connection attribute key
+           Return: current setting of the resource attribute requested
+        """
+        return ibm_db.get_option(self.conn_handler, attr_key, 1)
+
+    # Sets connection AUTOCOMMIT attribute
+    def set_autocommit(self, is_on):
+        """Input: connection attribute: true if AUTOCOMMIT ON, false otherwise (i.e. OFF)
+           Return: True on success or False on failure
+        """
+        try:
+          if is_on:
+            is_set = ibm_db.set_option(self.conn_handler, {SQL_ATTR_AUTOCOMMIT : SQL_AUTOCOMMIT_ON}, 1)
+          else:
+            is_set = ibm_db.set_option(self.conn_handler, {SQL_ATTR_AUTOCOMMIT : SQL_AUTOCOMMIT_OFF}, 1)
+        except Exception, inst:
+          raise _get_exception(inst)
+        return is_set
+
+    # Sets connection attribute values
+    def set_current_schema(self, schema_name):
+        """Input: connection attribute dictionary
+           Return: True on success or False on failure
+        """
+        self.current_schema = schema_name
+        try:
+          is_set = ibm_db.set_option(self.conn_handler, {SQL_ATTR_CURRENT_SCHEMA : schema_name}, 1)
+        except Exception, inst:
+          raise _get_exception(inst)
+        return is_set
+
+    # Retrieves connection attributes values
+    def get_current_schema(self):
+        """Return: current setting of the schema attribute
+        """
+        try:
+          conn_schema = ibm_db.get_option(self.conn_handler, SQL_ATTR_CURRENT_SCHEMA, 1)
+          if conn_schema is not None and conn_schema != '':
+            self.current_schema = conn_schema
+        except Exception, inst:
+          raise _get_exception(inst)
+        return self.current_schema
+
+    # Retrieves the IBM Data Server version for a given Connection object
+    def server_info(self):
+        """Return: tuple (DBMS_NAME, DBMS_VER)
+        """
+        try:
+          server_info = []
+          server_info.append(self.dbms_name)
+          server_info.append(self.dbms_ver)
+        except Exception, inst:
+          raise _get_exception(inst)
+        return tuple(server_info)
+    
+    def set_case(self, server_type, str_value):
+        return str_value.upper()
+
+    # Retrieves the tables for a specified schema (and/or given table name)
+    def tables(self, schema_name=None, table_name=None):
+        """Input: connection - ibm_db.IBM_DBConnection object
+           Return: sequence of table metadata dicts for the specified schema
+        """
+            
+        result = []
+        if schema_name is not None:
+            schema_name = self.set_case("DB2_LUW", schema_name)
+        if table_name is not None:
+            table_name = self.set_case("DB2_LUW", table_name)
+
+        try:      
+          stmt = ibm_db.tables(self.conn_handler, None, schema_name, table_name)
+          row = ibm_db.fetch_assoc(stmt)
+          i = 0
+          while (row):
+              result.append( row )
+              i += 1    
+              row = ibm_db.fetch_assoc(stmt)
+          ibm_db.free_result(stmt)
+        except Exception, inst:
+          raise _get_exception(inst)
+
+        return result
+
+    # Retrieves metadata pertaining to index for specified schema (and/or table name)
+    def indexes(self, unique=True, schema_name=None, table_name=None):
+        """Input: connection - ibm_db.IBM_DBConnection object
+           Return: sequence of index metadata dicts for the specified table
+        Example:
+           Index metadata retrieved from schema 'PYTHONIC.TEST_TABLE' table
+           {
+           'TABLE_SCHEM':       'PYTHONIC',              'TABLE_CAT':          None, 
+           'TABLE_NAME':        'ENGINE_USERS',          'PAGES':              None, 
+           'COLUMN_NAME':       'USER_ID'                'FILTER_CONDITION':   None, 
+           'INDEX_NAME':        'SQL071201150750170',    'CARDINALITY':        None,
+           'ORDINAL_POSITION':   1,                      'INDEX_QUALIFIER':   'SYSIBM', 
+           'TYPE':               3, 
+           'NON_UNIQUE':         0, 
+           'ASC_OR_DESC':       'A'
+           }
+        """
+        result = []
+        if schema_name is not None:
+            schema_name = self.set_case("DB2_LUW", schema_name)
+        if table_name is not None:
+            table_name = self.set_case("DB2_LUW", table_name)
+
+        try:
+          stmt = ibm_db.statistics(self.conn_handler, None, schema_name, table_name, unique)
+          row = ibm_db.fetch_assoc(stmt)
+          i = 0
+          while (row):
+              if row['TYPE'] == SQL_INDEX_OTHER:
+                  result.append( row )
+              i += 1    
+              row = ibm_db.fetch_assoc(stmt)
+          ibm_db.free_result(stmt)
+        except Exception, inst:
+          raise _get_exception(inst)
+
+        return result        
+
+    # Retrieves metadata pertaining to primary keys for specified schema (and/or table name)
+    def primary_keys(self, unique=True, schema_name=None, table_name=None):
+        """Input: connection - ibm_db.IBM_DBConnection object
+           Return: sequence of PK metadata dicts for the specified table
+        Example:
+           PK metadata retrieved from 'PYTHONIC.ORDERS' table
+           {  
+           'TABLE_SCHEM':  'PYTHONIC',                 'TABLE_CAT': None, 
+           'TABLE_NAME':   'ORDERS', 
+           'COLUMN_NAME':  'ORDER_ID'
+           'PK_NAME':      'SQL071128122038680', 
+           'KEY_SEQ':       1
+           }
+        """
+        result = []
+        if schema_name is not None:
+            schema_name = self.set_case("DB2_LUW", schema_name)
+        if table_name is not None:
+            table_name = self.set_case("DB2_LUW", table_name)
+
+        try:
+          stmt = ibm_db.primary_keys(self.conn_handler, None, schema_name, table_name)
+          row = ibm_db.fetch_assoc(stmt)
+          i = 0
+          while (row):
+              result.append( row )
+              i += 1    
+              row = ibm_db.fetch_assoc(stmt)
+          ibm_db.free_result(stmt)
+        except Exception, inst:
+          raise _get_exception(inst)
+
+        return result        
+
+    # Retrieves metadata pertaining to foreign keys for specified schema (and/or table name)
+    def foreign_keys(self, unique=True, schema_name=None, table_name=None):
+        """Input: connection - ibm_db.IBM_DBConnection object
+           Return: sequence of FK metadata dicts for the specified table
+        Example:
+           FK metadata retrieved from 'PYTHONIC.ENGINE_EMAIL_ADDRESSES' table
+           {  
+           'PKTABLE_SCHEM': 'PYTHONIC',                 'PKTABLE_CAT':    None, 
+           'PKTABLE_NAME':  'ENGINE_USERS',             'FKTABLE_CAT':    None,
+           'PKCOLUMN_NAME': 'USER_ID',                  'UPDATE_RULE':    3,
+           'PK_NAME':       'SQL071205090958680',       'DELETE_RULE':    3
+           'KEY_SEQ':        1,                         'DEFERRABILITY':  7, 
+           'FK_NAME':       'SQL071205091000160', 
+           'FKCOLUMN_NAME': 'REMOTE_USER_ID', 
+           'FKTABLE_NAME':  'ENGINE_EMAIL_ADDRESSES', 
+           'FKTABLE_SCHEM': 'PYTHONIC' 
+           }
+        """
+        result = []
+        if schema_name is not None:
+            schema_name = self.set_case("DB2_LUW", schema_name)
+        if table_name is not None:
+            table_name = self.set_case("DB2_LUW", table_name)
+
+        try:
+          stmt = ibm_db.foreign_keys(self.conn_handler, None, None, None, None, schema_name, table_name)
+          row = ibm_db.fetch_assoc(stmt)
+          i = 0
+          while (row):
+              result.append( row )
+              i += 1    
+              row = ibm_db.fetch_assoc(stmt)
+          ibm_db.free_result(stmt)
+        except Exception, inst:
+          raise _get_exception(inst)
+
+        return result        
+    
+    # Retrieves the columns for a specified schema (and/or table name and column name)
+    def columns(self, schema_name=None, table_name=None, column_names=None):
+        """Input: connection - ibm_db.IBM_DBConnection object
+           Return: sequence of column metadata dicts for the specified schema
+        Example:
+           Column metadata retrieved from schema 'PYTHONIC.FOO' table, column 'A'
+           {
+           'TABLE_NAME':        'FOO',        'NULLABLE':           1, 
+           'ORDINAL_POSITION':   2L,          'REMARKS':            None, 
+           'COLUMN_NAME':       'A',          'BUFFER_LENGTH':      30L, 
+           'TYPE_NAME':         'VARCHAR',    'SQL_DATETIME_SUB':   None, 
+           'COLUMN_DEF':         None,        'DATA_TYPE':          12, 
+           'IS_NULLABLE':       'YES',        'SQL_DATA_TYPE':      12, 
+           'COLUMN_SIZE':        30L,         'TABLE_CAT':          None, 
+           'CHAR_OCTET_LENGTH':  30L,         'TABLE_SCHEM':       'PYTHONIC',
+           'NUM_PREC_RADIX':     None,
+           'DECIMAL_DIGITS':     None
+           }
+        """
+        result = []
+        if schema_name is not None:
+          schema_name = self.set_case("DB2_LUW", schema_name)
+        if table_name is not None:
+          table_name = self.set_case("DB2_LUW", table_name)
+
+        try:
+          stmt = ibm_db.columns(self.conn_handler, None, schema_name, table_name)
+          row = ibm_db.fetch_assoc(stmt)
+          i = 0
+          while (row):
+            result.append( row )
+            i += 1    
+            row = ibm_db.fetch_assoc(stmt)
+          ibm_db.free_result(stmt)
+
+          col_names_lower = []
+          if column_names is not None:
+            for name in column_names:
+              col_names_lower.append(name.lower())
+            include_columns = []
+            if column_names and column_names != '':
+              for column in result:
+                if column['COLUMN_NAME'].lower() in col_names_lower:
+                  column['COLUMN_NAME'] = column['COLUMN_NAME'].lower()
+                  include_columns.append(column)
+              result = include_columns
+        except Exception, inst:
+          raise _get_exception(inst)
+
+        return result
+
+
+# Defines a cursor for the driver connection
+class Cursor(object):
+    """This class represents a cursor of the connection.  It can be
+    used to process an SQL statement.
+    """
+    
+    # This method is used to get the description attribute.
+    def __get_description(self):
+        """ If this method has already been called, after executing a select statement,
+            return the stored information in the self.__description.
+        """
+        if self.__description is not None:
+            return self.__description 
+
+        if self.stmt_handler is None:
+            return None
+        self.__description = []
+        
+        try:
+            num_columns = ibm_db.num_fields(self.stmt_handler)
+            """ If the execute statement did not produce a result set return None.
+            """
+            if num_columns == False:
+                self.__description = None
+                return None
+            for column_index in range(num_columns):
+                column_desc = []
+                column_desc.append(ibm_db.field_name(self.stmt_handler,
+                                                          column_index))
+                type = ibm_db.field_type(self.stmt_handler, column_index)
+                type = type.upper()
+                if STRING == type:
+                    column_desc.append(STRING)
+                elif TEXT == type:
+                    column_desc.append(TEXT)
+                elif XML == type:
+                    column_desc.append(XML)
+                elif BINARY == type:
+                    column_desc.append(BINARY)
+                elif NUMBER == type:
+                    column_desc.append(NUMBER)
+                elif BIGINT == type:
+                    column_desc.append(BIGINT) 
+                elif FLOAT == type:
+                    column_desc.append(FLOAT)                
+                elif DECIMAL == type:
+                    column_desc.append(DECIMAL)
+                elif DATE == type:
+                    column_desc.append(DATE)
+                elif TIME == type:
+                    column_desc.append(TIME)
+                elif DATETIME == type:
+                    column_desc.append(DATETIME)
+                elif ROWID == type:
+                    column_desc.append(ROWID)
+
+                column_desc.append(ibm_db.field_display_size(
+                                             self.stmt_handler, column_index))
+
+                column_desc.append(ibm_db.field_display_size(
+                                             self.stmt_handler, column_index))
+                
+                column_desc.append(ibm_db.field_precision(
+                                             self.stmt_handler, column_index))
+
+                column_desc.append(ibm_db.field_scale(self.stmt_handler,
+                                                                column_index))
+                                                                
+                column_desc.append(ibm_db.field_nullable(
+                                             self.stmt_handler, column_index))
+                                             
+                self.__description.append(column_desc)
+        except Exception, inst:
+            self.messages.append(_get_exception(inst))
+            raise self.messages[len(self.messages) - 1]
+
+        return self.__description
+
+    # This attribute provides the metadata information of the columns  
+    # in the result set produced by the last execute function.  It is
+    # a read only attribute.
+    description = property(fget = __get_description)
+
+    # This method is used to get the rowcount attribute. 
+    def __get_rowcount( self ):
+        return self.__rowcount
+
+    def __iter__( self ):
+        return self
+        
+    def next( self ):
+        row = self.fetchone()
+        if row == None:
+            raise StopIteration
+        return row
+        
+    # This attribute specifies the number of rows the last executeXXX()
+    # produced or affected.  It is a read only attribute. 
+    rowcount = property(__get_rowcount, None, None, "")
+    
+    # This method is used to get the Connection object
+    def __get_connection( self ):
+        return self.__connection
+    
+    # This attribute specifies the connection object.
+    # It is a read only attribute. 
+    connection = property(__get_connection, None, None, "")
+
+    def __init__(self, conn_handler, conn_object=None):
+        """Constructor for Cursor object. It takes ibm_db connection
+        handler as an argument.
+        """
+        
+        # This attribute is used to determine the fetch size for fetchmany
+        # operation. It is a read/write attribute
+        self.arraysize = 1
+        self.__rowcount = -1
+        self._result_set_produced = False
+        self.__description = None
+        self.conn_handler = conn_handler
+        self.stmt_handler = None
+        self._is_scrollable_cursor = False
+        self.__connection = conn_object
+        self.messages = []
+    
+    # This method closes the statemente associated with the cursor object.
+    # It takes no argument.
+    def close(self):
+        """This method closes the cursor object.  After this method is 
+        called the cursor object is no longer usable.  It takes no
+        arguments.
+
+        """
+        messages = []
+        if self.conn_handler is None:
+            self.messages.append(ProgrammingError("Cursor cannot be closed; connection is no longer active."))
+            raise self.messages[len(self.messages) - 1]
+        try:
+            return_value = ibm_db.free_stmt(self.stmt_handler)
+        except Exception, inst:
+            self.messages.append(_get_exception(inst))
+            raise self.messages[len(self.messages) - 1]
+        self.stmt_handler = None
+        self.conn_handler = None
+        self._all_stmt_handlers = None
+        if self.__connection is not None:
+            try:
+                self.__connection._cursor_list.remove(weakref.ref(self))
+            except:
+                pass
+        return return_value
+
+    # helper for calling procedure
+    def _callproc_helper(self, procname, parameters=None):
+        if parameters is not None:
+            buff = []
+            CONVERT_STR = (buffer)
+            # Convert date/time and binary objects to string for 
+            # inserting into the database. 
+            for param in parameters:
+                if isinstance(param, CONVERT_STR):
+                    param = str(param)
+                buff.append(param)
+            parameters = tuple(buff)
+            
+            try:
+                result = ibm_db.callproc(self.conn_handler, procname,parameters)
+            except Exception, inst:
+                self.messages.append(_get_exception(inst))
+                raise self.messages[len(self.messages) - 1]
+        else:
+            try:
+                result = ibm_db.callproc(self.conn_handler, procname)
+            except Exception, inst:
+                self.messages.append(_get_exception(inst))
+                raise self.messages[len(self.messages) - 1]
+        return result
+       
+
+    def callproc(self, procname, parameters=None):
+        """This method can be used to execute a stored procedure.  
+        It takes the name of the stored procedure and the parameters to
+        the stored procedure as arguments. 
+
+        """
+        self.messages = []
+        if not isinstance(procname, basestring):
+            self.messages.append(InterfaceError("callproc expects the first argument to be of type String or Unicode."))
+            raise self.messages[len(self.messages) - 1]
+        if parameters is not None:
+            if not isinstance(parameters, (types.ListType, types.TupleType)):
+                self.messages.append(InterfaceError("callproc expects the second argument to be of type list or tuple."))
+                raise self.messages[len(self.messages) - 1]
+        result = self._callproc_helper(procname, parameters)
+        return_value = None
+        self.__description = None
+        self._all_stmt_handlers = []
+        if isinstance(result, types.TupleType):
+            self.stmt_handler = result[0]
+            return_value = result[1:]
+        else:
+            self.stmt_handler = result
+        self._result_set_produced = True
+        return return_value
+
+    # Helper for preparing an SQL statement. 
+    def _prepare_helper(self, operation, parameters=None):
+        try:
+            ibm_db.free_stmt(self.stmt_handler)
+        except:
+            pass
+
+        try:
+            self.stmt_handler = ibm_db.prepare(self.conn_handler, operation)
+        except Exception, inst:
+            self.messages.append(_get_exception(inst))
+            raise self.messages[len(self.messages) - 1]
+
+    # Helper for preparing an SQL statement.
+    def _set_cursor_helper(self):
+        if (ibm_db.get_option(self.stmt_handler, ibm_db.SQL_ATTR_CURSOR_TYPE, 0) != ibm_db.SQL_CURSOR_FORWARD_ONLY):
+            self._is_scrollable_cursor = True
+        else:
+            self._is_scrollable_cursor = False
+        self._result_set_produced = False
+        try:
+            num_columns = ibm_db.num_fields(self.stmt_handler)
+        except Exception, inst:
+            self.messages.append(_get_exception(inst))
+            raise self.messages[len(self.messages) - 1]
+        if not num_columns:
+            return True
+        self._result_set_produced = True
+
+        return True
+
+    # Helper for executing an SQL statement.
+    def _execute_helper(self, parameters=None):
+        if parameters is not None:
+            buff = []
+            CONVERT_STR = (buffer)
+            # Convert date/time and binary objects to string for 
+            # inserting into the database. 
+            for param in parameters:
+                if isinstance(param, CONVERT_STR):
+                    param = str(param)
+                buff.append(param)
+            parameters = tuple(buff)
+            try:                
+                return_value = ibm_db.execute(self.stmt_handler, parameters)
+                if not return_value:
+                    if ibm_db.conn_errormsg() is not None:
+                        self.messages.append(Error(str(ibm_db.conn_errormsg())))
+                        raise self.messages[len(self.messages) - 1]
+                    if ibm_db.stmt_errormsg() is not None:
+                        self.messages.append(Error(str(ibm_db.stmt_errormsg())))
+                        raise self.messages[len(self.messages) - 1]
+            except Exception, inst:
+                self.messages.append(_get_exception(inst))
+                raise self.messages[len(self.messages) - 1]
+        else:
+            try:
+                return_value = ibm_db.execute(self.stmt_handler)
+                if not return_value:
+                    if ibm_db.conn_errormsg() is not None:
+                        self.messages.append(Error(str(ibm_db.conn_errormsg())))
+                        raise self.messages[len(self.messages) - 1]
+                    if ibm_db.stmt_errormsg() is not None:
+                        self.messages.append(Error(str(ibm_db.stmt_errormsg())))
+                        raise self.messages[len(self.messages) - 1]
+            except Exception, inst:
+                self.messages.append(_get_exception(inst))
+                raise self.messages[len(self.messages) - 1]
+        return return_value
+
+    # This method is used to set the rowcount after executing an SQL 
+    # statement. 
+    def _set_rowcount(self):
+        self.__rowcount = -1
+        if not self._result_set_produced:
+            try:
+                counter = ibm_db.num_rows(self.stmt_handler)
+            except Exception, inst:
+                self.messages.append(_get_exception(inst))
+                raise self.messages[len(self.messages) - 1]
+            self.__rowcount = counter
+        elif self._is_scrollable_cursor:
+            try:
+                counter = ibm_db.get_num_result(self.stmt_handler)
+            except Exception, inst:
+                self.messages.append(_get_exception(inst))
+                raise self.messages[len(self.messages) - 1]
+            if counter >= 0:
+                self.__rowcount = counter
+        return True
+
+    # Retrieves the last generated identity value from the DB2 catalog
+    def _get_last_identity_val(self):
+        """
+        The result of the IDENTITY_VAL_LOCAL function is not affected by the following:
+         - A single row INSERT statement with a VALUES clause for a table without an
+        identity column
+         - A multiple row INSERT statement with a VALUES clause
+         - An INSERT statement with a fullselect
+
+        """
+        operation = 'SELECT IDENTITY_VAL_LOCAL() FROM SYSIBM.SYSDUMMY1'
+        try:
+            stmt_handler = ibm_db.prepare(self.conn_handler, operation)
+            if ibm_db.execute(stmt_handler):
+                row = ibm_db.fetch_assoc(stmt_handler)
+                if row['1'] is not None:
+                  identity_val = int(row['1'])
+                else:
+                  identity_val = None
+            else:
+                if ibm_db.conn_errormsg() is not None:
+                    self.messages.append(Error(str(ibm_db.conn_errormsg())))
+                    raise self.messages[len(self.messages) - 1]
+                if ibm_db.stmt_errormsg() is not None:
+                    self.messages.append(Error(str(ibm_db.stmt_errormsg())))
+                    raise self.messages[len(self.messages) - 1]
+        except Exception, inst:
+            self.messages.append(_get_exception(inst))
+            raise self.messages[len(self.messages) - 1]
+        return identity_val
+    last_identity_val = property(_get_last_identity_val, None, None, "")
+
+    def execute(self, operation, parameters=None):
+        """
+        This method can be used to prepare and execute an SQL 
+        statement.  It takes the SQL statement(operation) and a 
+        sequence of values to substitute for the parameter markers in  
+        the SQL statement as arguments.
+        """
+        self.messages = []
+        if not isinstance(operation, basestring):
+            self.messages.append(InterfaceError("execute expects the first argument [%s] to be of type String or Unicode." % operation ))
+            raise self.messages[len(self.messages) - 1]
+        if parameters is not None:
+            if not isinstance(parameters, (types.ListType, types.TupleType, types.DictType)):
+                self.messages.append(InterfaceError("execute parameters argument should be sequence."))
+                raise self.messages[len(self.messages) - 1]
+        self.__description = None
+        self._all_stmt_handlers = []
+        self._prepare_helper(operation)
+        self._set_cursor_helper()
+        self._execute_helper(parameters)
+        return self._set_rowcount()
+
+    def executemany(self, operation, seq_parameters):
+        """
+        This method can be used to prepare, and then execute an SQL 
+        statement many times.  It takes the SQL statement(operation) 
+        and sequence of sequence of values to substitute for the 
+        parameter markers in the SQL statement as its argument.
+        """
+        self.messages = []
+        if not isinstance(operation, basestring):
+            self.messages.append(InterfaceError("executemany expects the first argument to be of type String or Unicode."))
+            raise self.messages[len(self.messages) - 1]
+        if seq_parameters is None:
+            self.messages.append(InterfaceError("executemany expects a not None seq_parameters value"))
+            raise self.messages[len(self.messages) - 1]
+
+        if not isinstance(seq_parameters, (types.ListType, types.TupleType)):
+            self.messages.append(InterfaceError("executemany expects the second argument to be of type list or tuple of sequence."))
+            raise self.messages[len(self.messages) - 1]
+        
+        CONVERT_STR = (buffer)
+        # Convert date/time and binary objects to string for
+        # inserting into the database.
+        buff = []
+        seq_buff = []
+        for index in range(len(seq_parameters)):
+            buff = []
+            for param in seq_parameters[index]:
+                if isinstance(param, CONVERT_STR):
+                    param = str(param)
+                buff.append(param)
+            seq_buff.append(tuple(buff))
+        seq_parameters = tuple(seq_buff)
+        self.__description = None
+        self._all_stmt_handlers = []
+        self.__rowcount = -1
+        self._prepare_helper(operation)
+        try:
+            autocommit = ibm_db.autocommit(self.conn_handler)
+            if autocommit !=  0:
+                ibm_db.autocommit(self.conn_handler, 0)
+            self.__rowcount = ibm_db.execute_many(self.stmt_handler, seq_parameters)
+            if autocommit != 0:
+                ibm_db.commit(self.conn_handler)
+                ibm_db.autocommit(self.conn_handler, autocommit)
+            if self.__rowcount == -1:
+                if ibm_db.conn_errormsg() is not None:
+                    self.messages.append(Error(str(ibm_db.conn_errormsg())))
+                    raise self.messages[len(self.messages) - 1]
+                if ibm_db.stmt_errormsg() is not None:
+                    self.messages.append(Error(str(ibm_db.stmt_errormsg())))
+                    raise self.messages[len(self.messages) - 1]   
+        except Exception, inst:
+            self._set_rowcount()
+            self.messages.append(Error(inst))
+            raise self.messages[len(self.messages) - 1]
+        return True
+
+    def _fetch_helper(self, fetch_size=-1):
+        """
+        This method is a helper function for fetching fetch_size number of 
+        rows, after executing an SQL statement which produces a result set.
+        It takes the number of rows to fetch as an argument.
+        If this is not provided it fetches all the remaining rows.
+        """
+        if self.stmt_handler is None:
+            self.messages.append(ProgrammingError("Please execute an SQL statement in order to get a row from result set."))
+            raise self.messages[len(self.messages) - 1]
+        if self._result_set_produced == False:
+            self.messages.append(ProgrammingError("The last call to execute did not produce any result set."))
+            raise  self.messages[len(self.messages) - 1]
+        row_list = []
+        rows_fetched = 0
+        while (fetch_size == -1) or \
+              (fetch_size != -1 and rows_fetched < fetch_size):
+            try:
+                row = ibm_db.fetch_tuple(self.stmt_handler)
+            except Exception, inst:
+                self.messages.append(_get_exception(inst))
+                if len(row_list) == 0:
+                    raise self.messages[len(self.messages) - 1]
+                else:
+                    return row_list
+            
+            if row != False:
+                row_list.append(self._fix_return_data_type(row))
+            else:
+                return row_list
+            rows_fetched = rows_fetched + 1
+        return row_list
+
+    def fetchone(self):
+        """This method fetches one row from the database, after 
+        executing an SQL statement which produces a result set.
+        
+        """
+        row_list = self._fetch_helper(1)
+        if len(row_list) == 0:
+            return None
+        else:
+            return row_list[0]
+
+    def fetchmany(self, size=0):
+        """This method fetches size number of rows from the database,
+        after executing an SQL statement which produces a result set.
+        It takes the number of rows to fetch as an argument.  If this 
+        is not provided it fetches self.arraysize number of rows. 
+        """
+        if not isinstance(size, (int, long)):
+            self.messages.append(InterfaceError( "fetchmany expects argument type int or long."))
+            raise self.messages[len(self.messages) - 1]
+        if size == 0:
+            size = self.arraysize
+        if size < -1:
+            self.messages.append(ProgrammingError("fetchmany argument size expected to be positive."))
+            raise self.messages[len(self.messages) - 1]
+
+        return self._fetch_helper(size)
+
+    def fetchall(self):
+        """This method fetches all remaining rows from the database,
+        after executing an SQL statement which produces a result set.
+        """
+        return self._fetch_helper()
+
+    def nextset(self):
+        """This method can be used to get the next result set after 
+        executing a stored procedure, which produces multiple result sets.
+        """
+        self.messages = []
+        if self.stmt_handler is None:
+            self.messages.append(ProgrammingError("Please execute an SQL statement in order to get result sets."))
+            raise self.messages[len(self.messages) - 1]
+        if self._result_set_produced == False:
+            self.messages.append(ProgrammingError("The last call to execute did not produce any result set."))
+            raise self.messages[len(self.messages) - 1]
+        try:
+            # Store all the stmt handler that were created.  The 
+            # handler was the one created by the execute method.  It 
+            # should be used to get next result set. 
+            self.__description = None
+            self._all_stmt_handlers.append(self.stmt_handler)
+            self.stmt_handler = ibm_db.next_result(self._all_stmt_handlers[0])
+        except Exception, inst:
+            self.messages.append(_get_exception(inst))
+            raise self.messages[len(self.messages) - 1]
+
+        if self.stmt_handler == False:
+            self.stmt_handler = None
+        if self.stmt_handler == None:
+            return None 
+        return True
+
+    def setinputsizes(self, sizes):
+        """This method currently does nothing."""
+        pass
+
+    def setoutputsize(self, size, column=-1):
+        """This method currently does nothing."""
+        pass
+
+    # This method is used to convert a string representing decimal 
+    # and binary data in a row tuple fetched from the database 
+    # to decimal and binary objects, for returning it to the user.
+    def _fix_return_data_type(self, row):
+        row_list = None
+        for index in range(len(row)):
+            if row[index] is not None:
+                type = ibm_db.field_type(self.stmt_handler, index)
+                type = type.upper()
+
+                try:
+                    if type == 'BLOB':
+                        if row_list is None:
+                            row_list = list(row)
+                        row_list[index] = buffer(row[index])
+
+                    elif type == 'DECIMAL':
+                        if row_list is None:
+                            row_list = list(row)
+                        row_list[index] = decimal.Decimal(str(row[index]).replace(",", "."))    
+
+                except Exception, inst:
+                    self.messages.append(DataError("Data type format error: "+ str(inst)))
+                    raise self.messages[len(self.messages) - 1]
+        if row_list is None:
+            return row
+        else:
+            return tuple(row_list)
diff -pruN 0.3.0-3/ibm_db.h 2.0.5-0ubuntu2/ibm_db.h
--- 0.3.0-3/ibm_db.h	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/ibm_db.h	2013-10-23 18:30:34.000000000 +0000
@@ -0,0 +1,374 @@
+/*
++----------------------------------------------------------------------+
+|  Licensed Materials - Property of IBM                                |
+|                                                                      |
+| (C) Copyright IBM Corporation 2006-2013.                             |
++----------------------------------------------------------------------+
+| Authors: Manas Dadarkar, Abhigyan Agrawal, Rahul Priyadarshi         |
+|                                                                      | 
++----------------------------------------------------------------------+
+*/
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <sqlcli1.h>
+#include <Python.h> 
+#include <structmember.h>
+
+/*
+ * Combability changes for Python 3
+ */
+
+/* defining string methods */
+#if  PY_MAJOR_VERSION < 3
+#define PyBytes_Check			PyString_Check
+#define StringOBJ_FromASCII(str)	PyString_FromString(str)
+#define PyBytes_AsString		PyString_AsString
+#define PyBytes_FromStringAndSize	PyString_FromStringAndSize
+#define StringObj_Format		PyString_Format
+#define StringObj_Size			PyString_Size
+#define PyObject_CheckBuffer		PyObject_CheckReadBuffer
+#define PyVarObject_HEAD_INIT(type, size) \
+					PyObject_HEAD_INIT(type) size,
+#define Py_TYPE(ob)			(((PyObject*)(ob))->ob_type)
+#define MOD_RETURN_ERROR		
+#define MOD_RETURN_VAL(mod)			
+#define INIT_ibm_db			initibm_db
+#else
+#define PyInt_Check			PyLong_Check
+#define PyInt_FromLong          	PyLong_FromLong
+#define PyInt_AsLong            	PyLong_AsLong
+#define PyInt_AS_LONG			PyLong_AsLong
+#define StringOBJ_FromASCII(str)	PyUnicode_DecodeASCII(str, strlen(str), NULL)
+#define PyString_Check			PyUnicode_Check
+#define StringObj_Format		PyUnicode_Format
+#define StringObj_Size			PyUnicode_GET_SIZE
+#define MOD_RETURN_ERROR		NULL
+#define MOD_RETURN_VAL(mod)		mod
+#define INIT_ibm_db PyInit_ibm_db
+#endif
+
+#define NUM2LONG(data) PyInt_AsLong(data)
+#define STR2CSTR(data) PyString_AsString(data)
+#define NIL_P(ptr) (ptr == NULL)
+#define ALLOC_N(type, n) PyMem_New(type, n)
+#define ALLOC(type) PyMem_New(type, 1)
+/*
+#define Qtrue Py_INCREF(Py_True); return Py_True
+#define Qfalse Py_INCREF(Py_False); return Py_False
+*/
+#define TYPE(data) _python_get_variable_type(data)
+
+/* Python types */
+#define PYTHON_FIXNUM 1
+#define PYTHON_TRUE 2
+#define PYTHON_FALSE 3
+#define PYTHON_FLOAT 4
+#define PYTHON_STRING 5
+#define PYTHON_NIL 6
+#define PYTHON_UNICODE 7
+#define PYTHON_DECIMAL 8
+#define PYTHON_COMPLEX 9
+#define PYTHON_DATE 10
+#define PYTHON_TIME 11
+#define PYTHON_TIMESTAMP 12
+
+#define ENABLE_NUMERIC_LITERALS 1 /* Enable CLI numeric literals */
+
+#ifndef SQL_XML
+#define SQL_XML -370
+#endif
+
+#ifndef SQL_DECFLOAT
+#define SQL_DECFLOAT -360
+#endif
+
+#ifndef SQL_ATTR_REPLACE_QUOTED_LITERALS
+#define SQL_ATTR_REPLACE_QUOTED_LITERALS 2586
+#endif
+
+/* needed for backward compatibility (SQL_ATTR_ROWCOUNT_PREFETCH not defined prior to DB2 9.5.0.3) */
+#ifndef SQL_ATTR_ROWCOUNT_PREFETCH
+#define SQL_ATTR_ROWCOUNT_PREFETCH 2592
+#define SQL_ROWCOUNT_PREFETCH_OFF   0
+#define SQL_ROWCOUNT_PREFETCH_ON    1
+#endif
+
+#ifndef SQL_ATTR_USE_TRUSTED_CONTEXT
+#define SQL_ATTR_USE_TRUSTED_CONTEXT 2561
+#define SQL_ATTR_TRUSTED_CONTEXT_USERID 2562
+#define SQL_ATTR_TRUSTED_CONTEXT_PASSWORD 2563
+#endif
+
+/* CLI v9.1 FP3 and below has a SQL_ATTR_REPLACE_QUOTED_LITERALS value of 116
+* We need to support both the new and old values for compatibility with older
+* versions of CLI. CLI v9.1 FP4 and beyond changed this value to 2586
+*/
+#define SQL_ATTR_REPLACE_QUOTED_LITERALS_OLDVALUE 116
+
+/* If using a DB2 CLI version which doesn't support this functionality, 
+* explicitly define this. We will rely on DB2 CLI to throw an error when 
+* SQLGetStmtAttr is called.
+*/
+
+#ifndef SQL_ATTR_GET_GENERATED_VALUE 
+#define SQL_ATTR_GET_GENERATED_VALUE 2578
+#endif
+
+/* strlen(" SQLCODE=") added in */
+#define DB2_MAX_ERR_MSG_LEN (SQL_MAX_MESSAGE_LENGTH + SQL_SQLSTATE_SIZE + 10)
+
+/* Default initail LOB buffer size */
+#define INIT_BUFSIZ 10240
+ 
+/* Used in _python_parse_options */
+#define DB2_ERRMSG 1
+#define DB2_ERR 2
+
+/*Used to decide if LITERAL REPLACEMENT should be turned on or not*/
+#define SET_QUOTED_LITERAL_REPLACEMENT_ON  1
+#define SET_QUOTED_LITERAL_REPLACEMENT_OFF 0
+
+/* DB2 instance environment variable */
+#define DB2_VAR_INSTANCE "DB2INSTANCE="
+
+/******** Makes code compatible with the options used by the user */
+#define BINARY 1
+#define CONVERT 2
+#define PASSTHRU 3
+#define PARAM_FILE 11
+
+#ifdef PASE
+#define SQL_IS_INTEGER 0
+#define SQL_BEST_ROWID 0
+#define SQLLEN long
+#define SQLFLOAT double
+#endif
+
+/* fetch */
+#define FETCH_INDEX	0x01
+#define FETCH_ASSOC	0x02
+#define FETCH_BOTH	0x03
+
+/* Change column case */
+#define ATTR_CASE 3271982
+#define CASE_NATURAL 0
+#define CASE_LOWER 1
+#define CASE_UPPER 2
+
+/* maximum sizes */
+#define USERID_LEN 16
+#define ACCTSTR_LEN 200
+#define APPLNAME_LEN 32
+#define WRKSTNNAME_LEN 18
+
+/*
+ *  * Enum for Decfloat Rounding Modes
+ *   * */
+enum
+{
+        ROUND_HALF_EVEN = 0,
+        ROUND_HALF_UP,
+        ROUND_DOWN,
+        ROUND_CEILING,
+        ROUND_FLOOR
+}ROUNDING_MODE;
+
+/*
+* Declare any global variables you may need between the BEGIN
+* and END macros here:
+*/
+struct _ibm_db_globals {
+	int  bin_mode;
+	char __python_conn_err_msg[DB2_MAX_ERR_MSG_LEN];
+	char __python_conn_err_state[SQL_SQLSTATE_SIZE + 1];
+	char __python_stmt_err_msg[DB2_MAX_ERR_MSG_LEN];
+	char __python_stmt_err_state[SQL_SQLSTATE_SIZE + 1];
+#ifdef PASE /* i5/OS ease of use turn off commit */
+	long i5_allow_commit;
+#endif /* PASE */
+};
+
+typedef struct {
+	PyObject_HEAD
+	PyObject *DRIVER_NAME;
+	PyObject *DRIVER_VER;
+	PyObject *DATA_SOURCE_NAME;
+	PyObject *DRIVER_ODBC_VER;
+	PyObject *ODBC_VER;
+	PyObject *ODBC_SQL_CONFORMANCE;
+	PyObject *APPL_CODEPAGE;
+	PyObject *CONN_CODEPAGE;
+} le_client_info;
+
+static PyMemberDef le_client_info_members[] = {
+	{"DRIVER_NAME", T_OBJECT_EX, offsetof(le_client_info, DRIVER_NAME), 0, "Driver Name"},
+	{"DRIVER_VER", T_OBJECT_EX, offsetof(le_client_info, DRIVER_VER), 0, "Driver Version"},
+	{"DATA_SOURCE_NAME", T_OBJECT_EX, offsetof(le_client_info, DATA_SOURCE_NAME), 0, "Data Source Name"},
+	{"DRIVER_ODBC_VER", T_OBJECT_EX, offsetof(le_client_info, DRIVER_ODBC_VER), 0, "Driver ODBC Version"},
+	{"ODBC_VER", T_OBJECT_EX, offsetof(le_client_info, ODBC_VER), 0, "ODBC Version"},
+	{"ODBC_SQL_CONFORMANCE", T_OBJECT_EX, offsetof(le_client_info, ODBC_SQL_CONFORMANCE), 0, "ODBC SQL Conformance"},
+	{"APPL_CODEPAGE", T_OBJECT_EX, offsetof(le_client_info, APPL_CODEPAGE), 0, "Application Codepage"},
+	{"CONN_CODEPAGE", T_OBJECT_EX, offsetof(le_client_info, CONN_CODEPAGE), 0, "Connection Codepage"},
+	{NULL} /* Sentinel */
+};
+
+static PyTypeObject client_infoType = {
+		PyVarObject_HEAD_INIT(NULL, 0)
+		"ibm_db.IBM_DBClientInfo", /*tp_name*/
+		sizeof(le_client_info), /*tp_basicsize*/
+		0,                                     /*tp_itemsize*/
+		0,                                     /*tp_dealloc*/
+		0,                                     /*tp_print*/
+		0,                                     /*tp_getattr*/
+		0,                                     /*tp_setattr*/
+		0,                                     /*tp_compare*/
+		0,                                     /*tp_repr*/
+		0,                                     /*tp_as_number*/
+		0,                                     /*tp_as_sequence */
+		0,                                     /*tp_as_mapping  */
+		0,                                     /*tp_hash */
+		0,                                     /*tp_call*/
+		0,                                     /*tp_str*/
+		0,                                     /*tp_getattro    */
+		0,                                     /*tp_setattro    */
+		0,                                     /*tp_as_buffer   */
+		Py_TPFLAGS_DEFAULT,            /*tp_flags                   */
+		"IBM DataServer Client Information object", /* tp_doc       */
+		0,                                     /* tp_traverse       */
+		0,                                     /* tp_clear          */
+		0,                                     /* tp_richcompare    */
+		0,                                     /* tp_weaklistoffset */
+		0,                                     /* tp_iter           */
+		0,                                     /* tp_iternext       */
+		0,   /* tp_methods            */
+		le_client_info_members,                /* tp_members        */
+		0,                                     /* tp_getset         */
+		0,                                     /* tp_base           */
+		0,                                     /* tp_dict           */
+		0,                                     /* tp_descr_get      */
+		0,                                     /* tp_descr_set      */
+		0,                                     /* tp_dictoffset     */
+		0,                                     /* tp_init           */
+};
+
+
+typedef struct {
+	PyObject_HEAD
+	PyObject *DBMS_NAME;
+	PyObject *DBMS_VER;
+	PyObject *DB_CODEPAGE;
+	PyObject *DB_NAME;
+	PyObject *INST_NAME;
+	PyObject *SPECIAL_CHARS;
+	PyObject *KEYWORDS;
+	PyObject *DFT_ISOLATION;
+	PyObject *ISOLATION_OPTION;
+	PyObject *SQL_CONFORMANCE;
+	PyObject *PROCEDURES;
+	PyObject *IDENTIFIER_QUOTE_CHAR;
+	PyObject *LIKE_ESCAPE_CLAUSE;
+	PyObject *MAX_COL_NAME_LEN;
+	PyObject *MAX_IDENTIFIER_LEN;
+	PyObject *MAX_INDEX_SIZE;
+	PyObject *MAX_PROC_NAME_LEN;
+	PyObject *MAX_ROW_SIZE;
+	PyObject *MAX_SCHEMA_NAME_LEN;
+	PyObject *MAX_STATEMENT_LEN;
+	PyObject *MAX_TABLE_NAME_LEN;
+	PyObject *NON_NULLABLE_COLUMNS;
+} le_server_info;
+
+
+static PyMemberDef le_server_info_members[] = {
+	{"DBMS_NAME", T_OBJECT_EX, offsetof(le_server_info, DBMS_NAME), 0, "Database Server Name"},
+	{"DBMS_VER", T_OBJECT_EX, offsetof(le_server_info, DBMS_VER), 0, "Database Server Version"},
+	{"DB_CODEPAGE", T_OBJECT_EX, offsetof(le_server_info, DB_CODEPAGE), 0, "Database Codepage"},
+	{"DB_NAME", T_OBJECT_EX, offsetof(le_server_info, DB_NAME), 0, "Database Name"},
+	{"INST_NAME", T_OBJECT_EX, offsetof(le_server_info, INST_NAME), 0, "Database Server Instance Name"},
+	{"SPECIAL_CHARS", T_OBJECT_EX, offsetof(le_server_info, SPECIAL_CHARS), 0, "Characters that can be used in an identifier"},
+	{"KEYWORDS", T_OBJECT_EX, offsetof(le_server_info, KEYWORDS), 0, "Reserved words"},
+	{"DFT_ISOLATION", T_OBJECT_EX, offsetof(le_server_info, DFT_ISOLATION), 0, "Default Server Isolation"},
+	{"ISOLATION_OPTION", T_OBJECT_EX, offsetof(le_server_info, ISOLATION_OPTION), 0, "Supported Isolation Levels "},
+	{"SQL_CONFORMANCE", T_OBJECT_EX, offsetof(le_server_info, SQL_CONFORMANCE), 0, "ANSI/ISO SQL-92 Specification Conformance"},
+	{"PROCEDURES", T_OBJECT_EX, offsetof(le_server_info, PROCEDURES), 0, "True if CALL statement is supported by database server"},
+	{"IDENTIFIER_QUOTE_CHAR", T_OBJECT_EX, offsetof(le_server_info, IDENTIFIER_QUOTE_CHAR), 0, "Character to quote an identifier"},
+	{"LIKE_ESCAPE_CLAUSE", T_OBJECT_EX, offsetof(le_server_info, LIKE_ESCAPE_CLAUSE), 0, "TRUE if the database server supports the use of % and _ wildcard characters"},
+	{"MAX_COL_NAME_LEN", T_OBJECT_EX, offsetof(le_server_info, MAX_COL_NAME_LEN), 0, "Maximum length of column name supported by the database server in bytes"},
+	{"MAX_IDENTIFIER_LEN", T_OBJECT_EX, offsetof(le_server_info, MAX_IDENTIFIER_LEN), 0, "Maximum length of an SQL identifier supported by the database server, expressed in characters"},
+	{"MAX_INDEX_SIZE", T_OBJECT_EX, offsetof(le_server_info, MAX_INDEX_SIZE), 0, "Maximum size of columns combined in an index supported by the database server, expressed in bytes"},
+	{"MAX_PROC_NAME_LEN", T_OBJECT_EX, offsetof(le_server_info, MAX_PROC_NAME_LEN), 0, "Maximum length of a procedure name supported by the database server, expressed in bytes"},
+	{"MAX_ROW_SIZE", T_OBJECT_EX, offsetof(le_server_info, MAX_ROW_SIZE), 0, "Maximum length of a row in a base table supported by the database server, expressed in bytes"},
+	{"MAX_SCHEMA_NAME_LEN", T_OBJECT_EX, offsetof(le_server_info, MAX_SCHEMA_NAME_LEN), 0, "Maximum length of a schema name supported by the database server, expressed in bytes"},
+	{"MAX_STATEMENT_LEN", T_OBJECT_EX, offsetof(le_server_info, MAX_STATEMENT_LEN), 0, "Maximum length of an SQL statement supported by the database server, expressed in bytes"},
+	{"MAX_TABLE_NAME_LEN", T_OBJECT_EX, offsetof(le_server_info, MAX_TABLE_NAME_LEN), 0, "Maximum length of a table name supported by the database server, expressed in bytes"},
+	{"NON_NULLABLE_COLUMNS", T_OBJECT_EX, offsetof(le_server_info, NON_NULLABLE_COLUMNS), 0, "Connectionf the database server supports columns that can be defined as NOT NULL "},
+	{NULL} /* Sentinel */
+};
+
+static PyTypeObject server_infoType = {
+		PyVarObject_HEAD_INIT(NULL, 0)
+		"ibm_db.IBM_DBServerInfo", /*tp_name*/
+		sizeof(le_server_info), /*tp_basicsize*/
+		0,                                     /*tp_itemsize*/
+		0,                                     /*tp_dealloc*/
+		0,                                     /*tp_print*/
+		0,                                     /*tp_getattr*/
+		0,                                     /*tp_setattr*/
+		0,                                     /*tp_compare*/
+		0,                                     /*tp_repr*/
+		0,                                     /*tp_as_number*/
+		0,                                     /*tp_as_sequence */
+		0,                                     /*tp_as_mapping  */
+		0,                                     /*tp_hash */
+		0,                                     /*tp_call*/
+		0,                                     /*tp_str*/
+		0,                                     /*tp_getattro    */
+		0,                                     /*tp_setattro    */
+		0,                                     /*tp_as_buffer   */
+		Py_TPFLAGS_DEFAULT,            /*tp_flags                   */
+		"IBM DataServer Information object", /* tp_doc       */
+		0,                                     /* tp_traverse       */
+		0,                                     /* tp_clear          */
+		0,                                     /* tp_richcompare    */
+		0,                                     /* tp_weaklistoffset */
+		0,                                     /* tp_iter           */
+		0,                                     /* tp_iternext       */
+		0,   /* tp_methods            */
+		le_server_info_members,                /* tp_members        */
+		0,                                     /* tp_getset         */
+		0,                                     /* tp_base           */
+		0,                                     /* tp_dict           */
+		0,                                     /* tp_descr_get      */
+		0,                                     /* tp_descr_set      */
+		0,                                     /* tp_dictoffset     */
+		0,                                     /* tp_init           */
+};
+
+
+
+/*
+* TODO: make this threadsafe
+*/
+
+#define IBM_DB_G(v) (ibm_db_globals->v)
+
+static void _python_ibm_db_clear_stmt_err_cache(void);
+static void _python_ibm_db_clear_conn_err_cache(void);
+static int _python_get_variable_type(PyObject *variable_value);
+
+#ifdef CLI_DBC_SERVER_TYPE_DB2LUW
+#ifdef SQL_ATTR_DECFLOAT_ROUNDING_MODE
+/* Declare _python_ibm_db_set_decfloat_rounding_mode_client() */
+static int _python_ibm_db_set_decfloat_rounding_mode_client(SQLHANDLE hdbc);
+#endif
+#endif
+
+/* For compatibility with python < 2.5 */
+#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
+typedef int Py_ssize_t;
+#define PY_SSIZE_T_MAX INT_MAX
+#define PY_SSIZE_T_MIN INT_MIN
+#endif
+
diff -pruN 0.3.0-3/ibm_db_sa/base.py 2.0.5-0ubuntu2/ibm_db_sa/base.py
--- 0.3.0-3/ibm_db_sa/base.py	2013-02-28 12:20:17.000000000 +0000
+++ 2.0.5-0ubuntu2/ibm_db_sa/base.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,646 +0,0 @@
-# +--------------------------------------------------------------------------+
-# |  Licensed Materials - Property of IBM                                    |
-# |                                                                          |
-# | (C) Copyright IBM Corporation 2008, 2013.                                |
-# +--------------------------------------------------------------------------+
-# | This module complies with SQLAlchemy 0.8 and is                          |
-# | Licensed under the Apache License, Version 2.0 (the "License");          |
-# | you may not use this file except in compliance with the License.         |
-# | You may obtain a copy of the License at                                  |
-# | http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable |
-# | law or agreed to in writing, software distributed under the License is   |
-# | distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
-# | KIND, either express or implied. See the License for the specific        |
-# | language governing permissions and limitations under the License.        |
-# +--------------------------------------------------------------------------+
-# | Authors: Alex Pitigoi, Abhigyan Agrawal, Rahul Priyadarshi               |
-# | Contributors: Jaimy Azle, Mike Bayer                                     |
-# +--------------------------------------------------------------------------+
-"""Support for IBM DB2 database
-
-"""
-import datetime, re
-from sqlalchemy import types as sa_types
-from sqlalchemy import schema as sa_schema
-from sqlalchemy import util
-from sqlalchemy.sql import compiler
-from sqlalchemy.engine import default
-
-from . import reflection as ibm_reflection
-
-from sqlalchemy.types import BLOB, CHAR, CLOB, DATE, DATETIME, INTEGER,\
-    SMALLINT, BIGINT, DECIMAL, NUMERIC, REAL, TIME, TIMESTAMP,\
-    VARCHAR
-
-
-# as documented from:
-# http://publib.boulder.ibm.com/infocenter/db2luw/v9/index.jsp?topic=/com.ibm.db2.udb.doc/admin/r0001095.htm
-RESERVED_WORDS = set(
-   ['activate', 'disallow', 'locale', 'result', 'add', 'disconnect', 'localtime',
-    'result_set_locator', 'after', 'distinct', 'localtimestamp', 'return', 'alias',
-    'do', 'locator', 'returns', 'all', 'double', 'locators', 'revoke', 'allocate', 'drop',
-    'lock', 'right', 'allow', 'dssize', 'lockmax', 'rollback', 'alter', 'dynamic',
-    'locksize', 'routine', 'and', 'each', 'long', 'row', 'any', 'editproc', 'loop',
-    'row_number', 'as', 'else', 'maintained', 'rownumber', 'asensitive', 'elseif',
-    'materialized', 'rows', 'associate', 'enable', 'maxvalue', 'rowset', 'asutime',
-    'encoding', 'microsecond', 'rrn', 'at', 'encryption', 'microseconds', 'run',
-    'attributes', 'end', 'minute', 'savepoint', 'audit', 'end-exec', 'minutes', 'schema',
-    'authorization', 'ending', 'minvalue', 'scratchpad', 'aux', 'erase', 'mode', 'scroll',
-    'auxiliary', 'escape', 'modifies', 'search', 'before', 'every', 'month', 'second',
-    'begin', 'except', 'months', 'seconds', 'between', 'exception', 'new', 'secqty',
-    'binary', 'excluding', 'new_table', 'security', 'bufferpool', 'exclusive',
-    'nextval', 'select', 'by', 'execute', 'no', 'sensitive', 'cache', 'exists', 'nocache',
-    'sequence', 'call', 'exit', 'nocycle', 'session', 'called', 'explain', 'nodename',
-    'session_user', 'capture', 'external', 'nodenumber', 'set', 'cardinality',
-    'extract', 'nomaxvalue', 'signal', 'cascaded', 'fenced', 'nominvalue', 'simple',
-    'case', 'fetch', 'none', 'some', 'cast', 'fieldproc', 'noorder', 'source', 'ccsid',
-    'file', 'normalized', 'specific', 'char', 'final', 'not', 'sql', 'character', 'for',
-    'null', 'sqlid', 'check', 'foreign', 'nulls', 'stacked', 'close', 'free', 'numparts',
-    'standard', 'cluster', 'from', 'obid', 'start', 'collection', 'full', 'of', 'starting',
-    'collid', 'function', 'old', 'statement', 'column', 'general', 'old_table', 'static',
-    'comment', 'generated', 'on', 'stay', 'commit', 'get', 'open', 'stogroup', 'concat',
-    'global', 'optimization', 'stores', 'condition', 'go', 'optimize', 'style', 'connect',
-    'goto', 'option', 'substring', 'connection', 'grant', 'or', 'summary', 'constraint',
-    'graphic', 'order', 'synonym', 'contains', 'group', 'out', 'sysfun', 'continue',
-    'handler', 'outer', 'sysibm', 'count', 'hash', 'over', 'sysproc', 'count_big',
-    'hashed_value', 'overriding', 'system', 'create', 'having', 'package',
-    'system_user', 'cross', 'hint', 'padded', 'table', 'current', 'hold', 'pagesize',
-    'tablespace', 'current_date', 'hour', 'parameter', 'then', 'current_lc_ctype',
-    'hours', 'part', 'time', 'current_path', 'identity', 'partition', 'timestamp',
-    'current_schema', 'if', 'partitioned', 'to', 'current_server', 'immediate',
-    'partitioning', 'transaction', 'current_time', 'in', 'partitions', 'trigger',
-    'current_timestamp', 'including', 'password', 'trim', 'current_timezone',
-    'inclusive', 'path', 'type', 'current_user', 'increment', 'piecesize', 'undo',
-    'cursor', 'index', 'plan', 'union', 'cycle', 'indicator', 'position', 'unique', 'data',
-    'inherit', 'precision', 'until', 'database', 'inner', 'prepare', 'update',
-    'datapartitionname', 'inout', 'prevval', 'usage', 'datapartitionnum',
-    'insensitive', 'primary', 'user', 'date', 'insert', 'priqty', 'using', 'day',
-    'integrity', 'privileges', 'validproc', 'days', 'intersect', 'procedure', 'value',
-    'db2general', 'into', 'program', 'values', 'db2genrl', 'is', 'psid', 'variable',
-    'db2sql', 'isobid', 'query', 'variant', 'dbinfo', 'isolation', 'queryno', 'vcat',
-    'dbpartitionname', 'iterate', 'range', 'version', 'dbpartitionnum', 'jar', 'rank',
-    'view', 'deallocate', 'java', 'read', 'volatile', 'declare', 'join', 'reads', 'volumes',
-    'default', 'key', 'recovery', 'when', 'defaults', 'label', 'references', 'whenever',
-    'definition', 'language', 'referencing', 'where', 'delete', 'lateral', 'refresh',
-    'while', 'dense_rank', 'lc_ctype', 'release', 'with', 'denserank', 'leave', 'rename',
-    'without', 'describe', 'left', 'repeat', 'wlm', 'descriptor', 'like', 'reset', 'write',
-    'deterministic', 'linktype', 'resignal', 'xmlelement', 'diagnostics', 'local',
-    'restart', 'year', 'disable', 'localdate', 'restrict', 'years', '', 'abs', 'grouping',
-    'regr_intercept', 'are', 'int', 'regr_r2', 'array', 'integer', 'regr_slope',
-    'asymmetric', 'intersection', 'regr_sxx', 'atomic', 'interval', 'regr_sxy', 'avg',
-    'large', 'regr_syy', 'bigint', 'leading', 'rollup', 'blob', 'ln', 'scope', 'boolean',
-    'lower', 'similar', 'both', 'match', 'smallint', 'ceil', 'max', 'specifictype',
-    'ceiling', 'member', 'sqlexception', 'char_length', 'merge', 'sqlstate',
-    'character_length', 'method', 'sqlwarning', 'clob', 'min', 'sqrt', 'coalesce', 'mod',
-    'stddev_pop', 'collate', 'module', 'stddev_samp', 'collect', 'multiset',
-    'submultiset', 'convert', 'national', 'sum', 'corr', 'natural', 'symmetric',
-    'corresponding', 'nchar', 'tablesample', 'covar_pop', 'nclob', 'timezone_hour',
-    'covar_samp', 'normalize', 'timezone_minute', 'cube', 'nullif', 'trailing',
-    'cume_dist', 'numeric', 'translate', 'current_default_transform_group',
-    'octet_length', 'translation', 'current_role', 'only', 'treat',
-    'current_transform_group_for_type', 'overlaps', 'true', 'dec', 'overlay',
-    'uescape', 'decimal', 'percent_rank', 'unknown', 'deref', 'percentile_cont',
-    'unnest', 'element', 'percentile_disc', 'upper', 'exec', 'power', 'var_pop', 'exp',
-    'real', 'var_samp', 'false', 'recursive', 'varchar', 'filter', 'ref', 'varying',
-    'float', 'regr_avgx', 'width_bucket', 'floor', 'regr_avgy', 'window', 'fusion',
-    'regr_count', 'within'])
-
-
-class _IBM_Boolean(sa_types.Boolean):
-
-    def result_processor(self, dialect, coltype):
-        def process(value):
-            if value is None:
-                return None
-            else:
-                return bool(value)
-        return process
-
-    def bind_processor(self, dialect):
-        def process(value):
-            if value is None:
-                return None
-            elif bool(value):
-                return '1'
-            else:
-                return '0'
-        return process
-
-class _IBM_Date(sa_types.Date):
-
-    def result_processor(self, dialect, coltype):
-        def process(value):
-            if value is None:
-                return None
-            if isinstance(value, datetime.datetime):
-                value = datetime.date(value.year, value.month, value.day)
-            return value
-        return process
-
-    def bind_processor(self, dialect):
-        def process(value):
-            if value is None:
-                return None
-            if isinstance(value, datetime.datetime):
-                value = datetime.date(value.year, value.month, value.day)
-            return str(value)
-        return process
-
-class DOUBLE(sa_types.Numeric):
-    __visit_name__ = 'DOUBLE'
-
-class LONGVARCHAR(sa_types.VARCHAR):
-    __visit_name_ = 'LONGVARCHAR'
-
-class DBCLOB(sa_types.CLOB):
-    __visit_name__ = "DBCLOB"
-
-class GRAPHIC(sa_types.CHAR):
-    __visit_name__ = "GRAPHIC"
-
-class VARGRAPHIC(sa_types.Unicode):
-    __visit_name__ = "VARGRAPHIC"
-
-
-class LONGVARGRAPHIC(sa_types.UnicodeText):
-    __visit_name__ = "LONGVARGRAPHIC"
-
-class XML(sa_types.Text):
-    __visit_name__ = "XML"
-
-colspecs = {
-    sa_types.Boolean: _IBM_Boolean,
-    sa_types.Date: _IBM_Date,
-# really ?
-#    sa_types.Unicode: DB2VARGRAPHIC
-}
-
-ischema_names = {
-    'BLOB': BLOB,
-    'CHAR': CHAR,
-    'CHARACTER': CHAR,
-    'CLOB': CLOB,
-    'DATE': DATE,
-    'DATETIME': DATETIME,
-    'INTEGER': INTEGER,
-    'SMALLINT': SMALLINT,
-    'BIGINT': BIGINT,
-    'DECIMAL': DECIMAL,
-    'NUMERIC': NUMERIC,
-    'REAL': REAL,
-    'DOUBLE': DOUBLE,
-    'TIME': TIME,
-    'TIMESTAMP': TIMESTAMP,
-    'VARCHAR': VARCHAR,
-    'LONGVARCHAR': LONGVARCHAR,
-    'XML': XML,
-    'GRAPHIC': GRAPHIC,
-    'VARGRAPHIC': VARGRAPHIC,
-    'LONGVARGRAPHIC': LONGVARGRAPHIC,
-    'DBCLOB': DBCLOB
-}
-
-
-class DB2TypeCompiler(compiler.GenericTypeCompiler):
-
-
-    def visit_TIMESTAMP(self, type_):
-        return "TIMESTAMP"
-
-    def visit_DATE(self, type_):
-        return "DATE"
-
-    def visit_TIME(self, type_):
-        return "TIME"
-
-    def visit_DATETIME(self, type_):
-        return self.visit_TIMESTAMP(type_)
-
-    def visit_SMALLINT(self, type_):
-        return "SMALLINT"
-
-    def visit_INT(self, type_):
-        return "INT"
-
-    def visit_BIGINT(self, type_):
-        return "BIGINT"
-
-    def visit_FLOAT(self, type_):
-        return "FLOAT" if type_.precision is None else \
-                "FLOAT(%(precision)s)" % {'precision': type_.precision}
-
-    def visit_XML(self, type_):
-        return "XML"
-
-    def visit_CLOB(self, type_):
-        return "CLOB"
-
-    def visit_BLOB(self, type_):
-        return "BLOB(1M)" if type_.length in (None, 0) else \
-                "BLOB(%(length)s)" % {'length': type_.length}
-
-    def visit_DBCLOB(self, type_):
-        return "DBCLOB(1M)" if type_.length in (None, 0) else \
-                "DBCLOB(%(length)s)" % {'length': type_.length}
-
-    def visit_VARCHAR(self, type_):
-        return "VARCHAR(%(length)s)" % {'length': type_.length}
-
-    def visit_LONGVARCHAR(self, type_):
-        return "LONG VARCHAR"
-
-    def visit_VARGRAPHIC(self, type_):
-        return "VARGRAPHIC(%(length)s)" % {'length': type_.length}
-
-    def visit_LONGVARGRAPHIC(self, type_):
-        return "LONG VARGRAPHIC"
-
-    def visit_CHAR(self, type_):
-        return "CHAR" if type_.length in (None, 0) else \
-                "CHAR(%(length)s)" % {'length': type_.length}
-
-    def visit_GRAPHIC(self, type_):
-        return "GRAPHIC" if type_.length in (None, 0) else \
-                "GRAPHIC(%(length)s)" % {'length': type_.length}
-
-    def visit_DECIMAL(self, type_):
-        if not type_.precision:
-            return "DECIMAL(31, 0)"
-        elif not type_.scale:
-            return "DECIMAL(%(precision)s, 0)" % {'precision': type_.precision}
-        else:
-            return "DECIMAL(%(precision)s, %(scale)s)" % {
-                            'precision': type_.precision, 'scale': type_.scale}
-
-
-    def visit_numeric(self, type_):
-        return self.visit_DECIMAL(type_)
-
-    def visit_datetime(self, type_):
-        return self.visit_TIMESTAMP(type_)
-
-    def visit_date(self, type_):
-        return self.visit_DATE(type_)
-
-    def visit_time(self, type_):
-        return self.visit_TIME(type_)
-
-    def visit_integer(self, type_):
-        return self.visit_INT(type_)
-
-    def visit_boolean(self, type_):
-        return self.visit_SMALLINT(type_)
-
-    def visit_float(self, type_):
-        return self.visit_FLOAT(type_)
-
-    def visit_unicode(self, type_):
-        return self.visit_VARGRAPHIC(type_)
-
-    def visit_unicode_text(self, type_):
-        return self.visit_LONGVARGRAPHIC(type_)
-
-    def visit_string(self, type_):
-        return self.visit_VARCHAR(type_)
-
-    def visit_TEXT(self, type_):
-        return self.visit_CLOB(type_)
-
-    def visit_large_binary(self, type_):
-        return self.visit_BLOB(type_)
-
-
-class DB2Compiler(compiler.SQLCompiler):
-
-    def visit_now_func(self, fn, **kw):
-        return "CURRENT_TIMESTAMP"
-
-    def visit_mod_binary(self, binary, operator, **kw):
-        return "mod(%s, %s)" % (self.process(binary.left),
-                                                self.process(binary.right))
-
-    def limit_clause(self, select):
-        if (select._limit is not None) and (select._offset is None):
-            return " FETCH FIRST %s ROWS ONLY" % select._limit
-        else:
-            return ""
-        
-    def visit_select(self, select, **kwargs):
-        limit, offset = select._limit, select._offset
-        sql_ori = compiler.SQLCompiler.visit_select(self, select, **kwargs)
-        if offset is not None:
-            __rownum = 'Z.__ROWNUM'
-            sql_split = re.split("[\s+]FROM ", sql_ori, 1)
-            sql_sec = ""
-            sql_sec = " \nFROM %s " % ( sql_split[1] )
-                
-            dummyVal = "Z.__db2_"
-            sql_pri = ""
-            
-            sql_sel = "SELECT "
-            if select._distinct:
-                sql_sel = "SELECT DISTINCT "
-
-            sql_select_token = sql_split[0].split( "," )
-            i = 0
-            while ( i < len( sql_select_token ) ):
-                if sql_select_token[i].count( "TIMESTAMP(DATE(SUBSTR(CHAR(" ) == 1:
-                    sql_sel = "%s \"%s%d\"," % ( sql_sel, dummyVal, i + 1 )
-                    sql_pri = '%s %s,%s,%s,%s AS "%s%d",' % ( 
-                                    sql_pri,
-                                    sql_select_token[i],
-                                    sql_select_token[i + 1],
-                                    sql_select_token[i + 2],
-                                    sql_select_token[i + 3],
-                                    dummyVal, i + 1 )
-                    i = i + 4
-                    continue
-                
-                if sql_select_token[i].count( " AS " ) == 1:
-                    temp_col_alias = sql_select_token[i].split( " AS " )
-                    sql_pri = '%s %s,' % ( sql_pri, sql_select_token[i] )
-                    sql_sel = "%s %s," % ( sql_sel, temp_col_alias[1] )
-                    i = i + 1
-                    continue
-            
-                sql_pri = '%s %s AS "%s%d",' % ( sql_pri, sql_select_token[i], dummyVal, i + 1 )
-                sql_sel = "%s \"%s%d\"," % ( sql_sel, dummyVal, i + 1 )
-                i = i + 1
-
-            sql_pri = sql_pri[:len( sql_pri ) - 1]
-            sql_pri = "%s%s" % ( sql_pri, sql_sec )
-            sql_sel = sql_sel[:len( sql_sel ) - 1]
-            sql = '%s, ( ROW_NUMBER() OVER() ) AS "%s" FROM ( %s ) AS M' % ( sql_sel, __rownum, sql_pri )
-            sql = '%s FROM ( %s ) Z WHERE' % ( sql_sel, sql )
-            
-            if offset is not 0:
-                sql = '%s "%s" > %d' % ( sql, __rownum, offset )
-            if offset is not 0 and limit is not None:
-                sql = '%s AND ' % ( sql )
-            if limit is not None:
-                sql = '%s "%s" <= %d' % ( sql, __rownum, offset + limit )
-            return "( %s )" % ( sql, )
-        else:
-            return sql_ori
-    
-    def visit_sequence(self, sequence):
-        return "NEXT VALUE FOR %s" % sequence.name
-
-    def default_from(self):
-        # DB2 uses SYSIBM.SYSDUMMY1 table for row count
-        return  " FROM SYSIBM.SYSDUMMY1"
-    
-    def visit_function(self, func, result_map=None, **kwargs):
-        if func.name.upper() == "AVG":
-            return "AVG(DOUBLE(%s))" % (self.function_argspec(func, **kwargs))
-        else:
-            return compiler.SQLCompiler.visit_function(self, func, **kwargs)        
-    # TODO: this is wrong but need to know what DB2 is expecting here
-    #    if func.name.upper() == "LENGTH":
-    #        return "LENGTH('%s')" % func.compile().params[func.name + '_1']
-    #    else:
-    #        return compiler.SQLCompiler.visit_function(self, func, **kwargs)
-
-
-    def visit_cast(self, cast, **kw):
-        type_ = cast.typeclause.type
-
-        # TODO: verify that CAST shouldn't be called with
-        # other types, I was able to CAST against VARCHAR
-        # for example
-        if isinstance(type_, (
-                    sa_types.DateTime, sa_types.Date, sa_types.Time,
-                    sa_types.DECIMAL)):
-            return super(DB2Compiler, self).visit_cast(cast, **kw)
-        else:
-            return self.process(cast.clause)
-
-    def get_select_precolumns(self, select):
-        if isinstance(select._distinct, basestring):
-            return select._distinct.upper() + " "
-        elif select._distinct:
-            return "DISTINCT "
-        else:
-            return ""
-
-    def visit_join(self, join, asfrom=False, **kwargs):
-        # NOTE: this is the same method as that used in mysql/base.py
-        # to render INNER JOIN
-        return ''.join(
-            (self.process(join.left, asfrom=True, **kwargs),
-             (join.isouter and " LEFT OUTER JOIN " or " INNER JOIN "),
-             self.process(join.right, asfrom=True, **kwargs),
-             " ON ",
-             self.process(join.onclause, **kwargs)))
-    
-    def visit_savepoint(self, savepoint_stmt):
-        return "SAVEPOINT %(sid)s ON ROLLBACK RETAIN CURSORS" % {'sid':self.preparer.format_savepoint(savepoint_stmt)}
-
-    def visit_rollback_to_savepoint(self, savepoint_stmt):
-        return 'ROLLBACK TO SAVEPOINT %(sid)s'% {'sid':self.preparer.format_savepoint(savepoint_stmt)}
-       
-    def visit_release_savepoint(self, savepoint_stmt):
-        return 'RELEASE TO SAVEPOINT %(sid)s'% {'sid':self.preparer.format_savepoint(savepoint_stmt)}
-        
-class DB2DDLCompiler(compiler.DDLCompiler):
-
-    def get_column_specification(self, column, **kw):
-        col_spec = [self.preparer.format_column(column)]
-        col_spec.append(self.dialect.type_compiler.process(column.type))
-
-
-        # column-options: "NOT NULL"
-        if not column.nullable or column.primary_key:
-            col_spec.append('NOT NULL')
-
-        # default-clause:
-        default = self.get_column_default_string(column)
-        if default is not None:
-            col_spec.append('WITH DEFAULT')
-            col_spec.append(default)
-
-        if column is column.table._autoincrement_column:
-            col_spec.append('GENERATED BY DEFAULT')
-            col_spec.append('AS IDENTITY')
-            col_spec.append('(START WITH 1)')
-
-        column_spec = ' '.join(col_spec)
-        return column_spec
-
-    def define_constraint_cascades(self, constraint):
-        text = ""
-        if constraint.ondelete is not None:
-            text += " ON DELETE %s" % constraint.ondelete
-
-        if constraint.onupdate is not None:
-            util.warn(
-                "DB2 does not support UPDATE CASCADE for foreign keys.")
-
-        return text
-
-    def visit_drop_index(self, drop, **kw):
-        return "\nDROP INDEX %s" % (
-                        self.preparer.quote(
-                                    self._index_identifier(drop.element.name),
-                                    drop.element.quote)
-                        )
-
-    def visit_drop_constraint(self, drop, **kw):
-        constraint = drop.element
-        if isinstance(constraint, sa_schema.ForeignKeyConstraint):
-                qual = "FOREIGN KEY "
-                const = self.preparer.format_constraint(constraint)
-        elif isinstance(constraint, sa_schema.PrimaryKeyConstraint):
-                qual = "PRIMARY KEY "
-                const = ""
-        elif isinstance(constraint, sa_schema.UniqueConstraint):
-                qual = "INDEX "
-                const = self.preparer.format_constraint(constraint)
-        else:
-                qual = ""
-                const = self.preparer.format_constraint(constraint)
-        return "ALTER TABLE %s DROP %s%s" % \
-                                (self.preparer.format_table(constraint.table),
-                                qual, const)
-
-class DB2IdentifierPreparer(compiler.IdentifierPreparer):
-
-    reserved_words = RESERVED_WORDS
-    illegal_initial_characters = set(xrange(0, 10)).union(["_", "$"])
-
-
-class DB2ExecutionContext(default.DefaultExecutionContext):
-    def fire_sequence(self, seq, type_):
-        return self._execute_scalar("SELECT NEXTVAL FOR " +
-                    self.dialect.identifier_preparer.format_sequence(seq) +
-                    " FROM SYSIBM.SYSDUMMY1", type_)
-                    
-class _SelectLastRowIDMixin(object):
-    _select_lastrowid = False
-    _lastrowid = None
-
-
-    def getlastrowid(self):
-        return self._lastrowid
-
-    def pre_exec(self):
-        if self.isinsert:
-            tbl = self.compiled.statement.table
-            seq_column = tbl._autoincrement_column
-            insert_has_sequence = seq_column is not None
-
-            self._select_lastrowid = insert_has_sequence and \
-                                        not self.compiled.returning and \
-                                        not self.compiled.inline
-
-    def post_exec(self):
-        conn = self.root_connection
-        if self._select_lastrowid:
-            conn._cursor_execute(self.cursor,
-                    "SELECT IDENTITY_VAL_LOCAL() FROM SYSIBM.SYSDUMMY1",
-                    (), self)
-            row = self.cursor.fetchall()[0]
-            if row[0] is not None:
-                self._lastrowid = int(row[0])
-
-
-class DB2Dialect(default.DefaultDialect):
-
-    name = 'ibm_db_sa'
-    max_identifier_length = 128
-    encoding = 'utf-8'
-    default_paramstyle = 'named'
-    colspecs = colspecs
-    ischema_names = ischema_names
-    supports_char_length = False
-    supports_unicode_statements = False
-    supports_unicode_binds = False
-    returns_unicode_strings = False
-    postfetch_lastrowid = True
-    supports_sane_rowcount = True
-    supports_sane_multi_rowcount = True
-    supports_native_decimal = True
-    preexecute_sequences = False
-    supports_alter = True
-    supports_sequences = True
-    sequences_optional = True
-
-    requires_name_normalize = True
-
-    supports_default_values = False
-    supports_empty_insert = False
-
-    two_phase_transactions = False
-    savepoints =  True 
-
-    statement_compiler = DB2Compiler
-    ddl_compiler = DB2DDLCompiler
-    type_compiler = DB2TypeCompiler
-    preparer = DB2IdentifierPreparer
-    execution_ctx_cls = DB2ExecutionContext
-
-    _reflector_cls = ibm_reflection.DB2Reflector
-
-    def __init__(self, **kw):
-        super(DB2Dialect, self).__init__(**kw)
-
-        self._reflector = self._reflector_cls(self)
-
-    # reflection: these all defer to an BaseDB2Reflector
-    # object which selects between DB2 and AS/400 schemas
-
-    def normalize_name(self, name):
-        return self._reflector.normalize_name(name)
-
-    def denormalize_name(self, name):
-        return self._reflector.denormalize_name(name)
-
-    def _get_default_schema_name(self, connection):
-        return self._reflector._get_default_schema_name(connection)
-
-    def has_table(self, connection, table_name, schema=None):
-        return self._reflector.has_table(connection, table_name, schema=schema)
-
-    def has_sequence(self, connection, sequence_name, schema=None):
-        return self._reflector.has_sequence(connection, sequence_name,
-                        schema=schema)
-
-    def get_schema_names(self, connection, **kw):
-        return self._reflector.get_schema_names(connection, **kw)
-
-
-    def get_table_names(self, connection, schema=None, **kw):
-        return self._reflector.get_table_names(connection, schema=schema, **kw)
-
-    def get_view_names(self, connection, schema=None, **kw):
-        return self._reflector.get_view_names(connection, schema=schema, **kw)
-
-    def get_view_definition(self, connection, viewname, schema=None, **kw):
-        return self._reflector.get_view_definition(
-                                connection, viewname, schema=schema, **kw)
-
-    def get_columns(self, connection, table_name, schema=None, **kw):
-        return self._reflector.get_columns(
-                                connection, table_name, schema=schema, **kw)
-
-    def get_primary_keys(self, connection, table_name, schema=None, **kw):
-        return self._reflector.get_primary_keys(
-                                connection, table_name, schema=schema, **kw)
-
-    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
-        return self._reflector.get_foreign_keys(
-                                connection, table_name, schema=schema, **kw)
-
-    def get_indexes(self, connection, table_name, schema=None, **kw):
-        return self._reflector.get_indexes(
-                                connection, table_name, schema=schema, **kw)
-
-
-# legacy naming
-IBM_DBCompiler = DB2Compiler
-IBM_DBDDLCompiler = DB2DDLCompiler
-IBM_DBIdentifierPreparer = DB2IdentifierPreparer
-IBM_DBExecutionContext = DB2ExecutionContext
-IBM_DBDialect = DB2Dialect
-
-dialect = DB2Dialect
diff -pruN 0.3.0-3/ibm_db_sa/ibm_db.py 2.0.5-0ubuntu2/ibm_db_sa/ibm_db.py
--- 0.3.0-3/ibm_db_sa/ibm_db.py	2013-02-28 12:18:53.000000000 +0000
+++ 2.0.5-0ubuntu2/ibm_db_sa/ibm_db.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,108 +0,0 @@
-# +--------------------------------------------------------------------------+
-# |  Licensed Materials - Property of IBM                                    |
-# |                                                                          |
-# | (C) Copyright IBM Corporation 2008, 2013.                                |
-# +--------------------------------------------------------------------------+
-# | This module complies with SQLAlchemy 0.8 and is                          |
-# | Licensed under the Apache License, Version 2.0 (the "License");          |
-# | you may not use this file except in compliance with the License.         |
-# | You may obtain a copy of the License at                                  |
-# | http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable |
-# | law or agreed to in writing, software distributed under the License is   |
-# | distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
-# | KIND, either express or implied. See the License for the specific        |
-# | language governing permissions and limitations under the License.        |
-# +--------------------------------------------------------------------------+
-# | Authors: Alex Pitigoi, Abhigyan Agrawal, Rahul Priyadarshi               |
-# | Contributors: Jaimy Azle, Mike Bayer                                     |
-# +--------------------------------------------------------------------------+
-
-from .base import DB2ExecutionContext, DB2Dialect
-
-from sqlalchemy import processors, types as sa_types, util
-
-class _IBM_Numeric_ibm_db(sa_types.Numeric):
-    def result_processor(self, dialect, coltype):
-        if self.asdecimal:
-            return None
-        else:
-            return processors.to_float
-
-
-class DB2ExecutionContext_ibm_db(DB2ExecutionContext):
-
-    def get_lastrowid(self):
-        return self.cursor.last_identity_val
-
-class DB2Dialect_ibm_db(DB2Dialect):
-
-    driver = 'ibm_db_sa'
-    supports_unicode_statements = False
-    supports_sane_rowcount = True
-    supports_sane_multi_rowcount = False
-    supports_native_decimal = False
-    supports_char_length = True
-    execution_ctx_cls = DB2ExecutionContext_ibm_db
-
-    colspecs = util.update_copy(
-        DB2Dialect.colspecs,
-        {
-            sa_types.Numeric: _IBM_Numeric_ibm_db
-        }
-    )
-
-    @classmethod
-    def dbapi(cls):
-        """ Returns: the underlying DBAPI driver module
-        """
-        import ibm_db_dbi as module
-        return module
-
-    def _get_server_version_info(self, connection):
-        return connection.connection.server_info()
-
-    def create_connect_args(self, url):
-        # DSN support through CLI configuration (../cfg/db2cli.ini),
-        # while 2 connection attributes are mandatory: database alias
-        # and UID (in support to current schema), all the other
-        # connection attributes (protocol, hostname, servicename) are
-        # provided through db2cli.ini database catalog entry. Example
-        # 1: ibm_db_sa:///<database_alias>?UID=db2inst1 or Example 2:
-        # ibm_db_sa:///?DSN=<database_alias>;UID=db2inst1
-        if not url.host:
-            dsn = url.database
-            uid = url.username
-            pwd = url.password
-            return ((dsn, uid, pwd, '', ''), {})
-        else:
-            # Full URL string support for connection to remote data servers
-            dsn_param = ['DRIVER={IBM DB2 ODBC DRIVER}']
-            dsn_param.append('DATABASE=%s' % url.database)
-            dsn_param.append('HOSTNAME=%s' % url.host)
-            dsn_param.append('PROTOCOL=TCPIP')
-            if url.port:
-                dsn_param.append('PORT=%s' % url.port)
-            if url.username:
-                dsn_param.append('UID=%s' % url.username)
-            if url.password:
-                dsn_param.append('PWD=%s' % url.password)
-            dsn = ';'.join(dsn_param)
-            dsn += ';'
-            return ((dsn, url.username, '', '', ''), {})
-
-    # Retrieves current schema for the specified connection object
-    def _get_default_schema_name(self, connection):
-        return self.normalize_name(connection.connection.get_current_schema())
-
-
-    # Checks if the DB_API driver error indicates an invalid connection
-    def is_disconnect(self, ex, connection, cursor):
-        if isinstance(ex, (self.dbapi.ProgrammingError,
-                                             self.dbapi.OperationalError)):
-            return 'Connection is not active' in str(ex) or \
-                        'connection is no longer active' in str(ex) or \
-                        'Connection Resource cannot be found' in str(ex)
-        else:
-            return False
-
-dialect = DB2Dialect_ibm_db
diff -pruN 0.3.0-3/ibm_db_sa/__init__.py 2.0.5-0ubuntu2/ibm_db_sa/__init__.py
--- 0.3.0-3/ibm_db_sa/__init__.py	2013-02-28 12:18:11.000000000 +0000
+++ 2.0.5-0ubuntu2/ibm_db_sa/__init__.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,38 +0,0 @@
-# +--------------------------------------------------------------------------+
-# |  Licensed Materials - Property of IBM                                    |
-# |                                                                          |
-# | (C) Copyright IBM Corporation 2008, 2013.                                |
-# +--------------------------------------------------------------------------+
-# | This module complies with SQLAlchemy 0.8 and is                          |
-# | Licensed under the Apache License, Version 2.0 (the "License");          |
-# | you may not use this file except in compliance with the License.         |
-# | You may obtain a copy of the License at                                  |
-# | http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable |
-# | law or agreed to in writing, software distributed under the License is   |
-# | distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
-# | KIND, either express or implied. See the License for the specific        |
-# | language governing permissions and limitations under the License.        |
-# +--------------------------------------------------------------------------+
-# | Authors: Alex Pitigoi, Abhigyan Agrawal, Rahul Priyadarshi               |
-# | Contributors: Jaimy Azle, Mike Bayer                                     |
-# +--------------------------------------------------------------------------+
-
-__version__ = '0.3.0'
-
-from . import ibm_db, pyodbc, base   # zxjdbc
-
-
-# default dialect
-base.dialect = ibm_db.dialect
-
-from .base import \
-    BIGINT, BLOB, CHAR, CLOB, DATE, DATETIME, \
-    DECIMAL, DOUBLE, DECIMAL,\
-    GRAPHIC, INTEGER, INTEGER, LONGVARCHAR, \
-    NUMERIC, SMALLINT, REAL, TIME, TIMESTAMP, \
-    VARCHAR, VARGRAPHIC, dialect
-
-#__all__ = (
-    # TODO: (put types here)
-#    'dialect'
-#)
diff -pruN 0.3.0-3/ibm_db_sa/pyodbc.py 2.0.5-0ubuntu2/ibm_db_sa/pyodbc.py
--- 0.3.0-3/ibm_db_sa/pyodbc.py	2013-02-28 12:19:14.000000000 +0000
+++ 2.0.5-0ubuntu2/ibm_db_sa/pyodbc.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,102 +0,0 @@
-# +--------------------------------------------------------------------------+
-# |  Licensed Materials - Property of IBM                                    |
-# |                                                                          |
-# | (C) Copyright IBM Corporation 2008, 2013.                                |
-# +--------------------------------------------------------------------------+
-# | This module complies with SQLAlchemy 0.8 and is                          |
-# | Licensed under the Apache License, Version 2.0 (the "License");          |
-# | you may not use this file except in compliance with the License.         |
-# | You may obtain a copy of the License at                                  |
-# | http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable |
-# | law or agreed to in writing, software distributed under the License is   |
-# | distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
-# | KIND, either express or implied. See the License for the specific        |
-# | language governing permissions and limitations under the License.        |
-# +--------------------------------------------------------------------------+
-# | Authors: Jaimy Azle, Rahul Priyadarshi                                   |
-# | Contributors: Mike Bayer                                                 |
-# +--------------------------------------------------------------------------+
-from sqlalchemy import util
-import urllib
-from sqlalchemy.connectors.pyodbc import PyODBCConnector
-from .base import _SelectLastRowIDMixin, DB2ExecutionContext, DB2Dialect
-
-
-
-class DB2ExecutionContext_pyodbc(_SelectLastRowIDMixin, DB2ExecutionContext):
-    pass
-
-class DB2Dialect_pyodbc(PyODBCConnector, DB2Dialect):
-
-    supports_unicode_statements = False
-    supports_native_decimal = True
-    supports_char_length = True
-    supports_native_decimal = False
-
-    execution_ctx_cls = DB2ExecutionContext_pyodbc
-
-    pyodbc_driver_name = "IBM DB2 ODBC DRIVER"
-
-    def create_connect_args(self, url):
-        opts = url.translate_connect_args(username='user')
-        opts.update(url.query)
-
-        keys = opts
-        query = url.query
-
-        connect_args = {}
-        for param in ('ansi', 'unicode_results', 'autocommit'):
-            if param in keys:
-                connect_args[param] = util.asbool(keys.pop(param))
-
-        if 'odbc_connect' in keys:
-            connectors = [urllib.unquote_plus(keys.pop('odbc_connect'))]
-        else:
-            dsn_connection = 'dsn' in keys or \
-                                    ('host' in keys and 'database' not in keys)
-            if dsn_connection:
-                connectors = ['dsn=%s' % (keys.pop('host', '') or \
-                                            keys.pop('dsn', ''))]
-            else:
-                port = ''
-                if 'port' in keys and not 'port' in query:
-                    port = '%d' % int(keys.pop('port'))
-
-                database = keys.pop('database', '')
-
-                connectors = ["DRIVER={%s}" %
-                                keys.pop('driver', self.pyodbc_driver_name),
-                            'hostname=%s;port=%s' % (keys.pop('host', ''), port),
-                            'database=%s' % database]
-
-                user = keys.pop("user", None)
-                if user:
-                    connectors.append("uid=%s" % user)
-                    connectors.append("pwd=%s" % keys.pop('password', ''))
-                else:
-                    connectors.append("trusted_connection=yes")
-
-                # if set to 'yes', the odbc layer will try to automagically
-                # convert textual data from your database encoding to your
-                # client encoding.    this should obviously be set to 'no' if
-                # you query a cp1253 encoded database from a latin1 client...
-                if 'odbc_autotranslate' in keys:
-                    connectors.append("autotranslate=%s" %
-                                            keys.pop("odbc_autotranslate"))
-
-                connectors.extend(['%s=%s' % (k, v)
-                                        for k, v in keys.iteritems()])
-        return [[";".join(connectors)], connect_args]
-
-class AS400Dialect_pyodbc(PyODBCConnector, DB2Dialect):
-
-    supports_unicode_statements = False
-    supports_sane_rowcount = False
-    supports_sane_multi_rowcount = False
-    supports_native_decimal = True
-    supports_char_length = True
-    supports_native_decimal = False
-
-    pyodbc_driver_name = "IBM DB2 ODBC DRIVER"
-
-
diff -pruN 0.3.0-3/ibm_db_sa/reflection.py 2.0.5-0ubuntu2/ibm_db_sa/reflection.py
--- 0.3.0-3/ibm_db_sa/reflection.py	2013-02-28 12:20:03.000000000 +0000
+++ 2.0.5-0ubuntu2/ibm_db_sa/reflection.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,605 +0,0 @@
-# +--------------------------------------------------------------------------+
-# |  Licensed Materials - Property of IBM                                    |
-# |                                                                          |
-# | (C) Copyright IBM Corporation 2008, 2013.                                |
-# +--------------------------------------------------------------------------+
-# | This module complies with SQLAlchemy 0.8 and is                          |
-# | Licensed under the Apache License, Version 2.0 (the "License");          |
-# | you may not use this file except in compliance with the License.         |
-# | You may obtain a copy of the License at                                  |
-# | http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable |
-# | law or agreed to in writing, software distributed under the License is   |
-# | distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
-# | KIND, either express or implied. See the License for the specific        |
-# | language governing permissions and limitations under the License.        |
-# +--------------------------------------------------------------------------+
-# | Authors: Alex Pitigoi, Abhigyan Agrawal, Rahul Priyadarshi               |
-# | Contributors: Jaimy Azle, Mike Bayer                                     |
-# +--------------------------------------------------------------------------+
-
-from sqlalchemy import types as sa_types
-from sqlalchemy import sql, util
-from sqlalchemy import Table, MetaData, Column
-from sqlalchemy.engine import reflection
-import re
-
-
-
-class CoerceUnicode(sa_types.TypeDecorator):
-    impl = sa_types.Unicode
-
-    def process_bind_param(self, value, dialect):
-        if isinstance(value, str):
-            value = value.decode(dialect.encoding)
-        return value
-
-class BaseReflector(object):
-    def __init__(self, dialect):
-        self.dialect = dialect
-        self.ischema_names = dialect.ischema_names
-        self.identifier_preparer = dialect.identifier_preparer
-
-    def normalize_name(self, name):
-        if isinstance(name, str):
-            name = name.decode(self.dialect.encoding)
-        elif name != None:
-            return name.lower() if name.upper() == name and \
-               not self.identifier_preparer._requires_quotes(name.lower()) \
-               else name
-        return name
-
-    def denormalize_name(self, name):
-        if name is None:
-            return None
-        elif name.lower() == name and \
-                not self.identifier_preparer._requires_quotes(name.lower()):
-            name = name.upper()
-        if not self.dialect.supports_unicode_binds:
-            name = name.encode(self.dialect.encoding)
-        else:
-            name = unicode(name)
-        return name
-
-    def _get_default_schema_name(self, connection):
-        """Return: current setting of the schema attribute"""
-        default_schema_name = connection.execute(
-                    u'SELECT CURRENT_SCHEMA FROM SYSIBM.SYSDUMMY1').scalar()
-        if isinstance(default_schema_name, str):
-            default_schema_name = default_schema_name.strip()
-        return self.normalize_name(default_schema_name)
-
-    @property
-    def default_schema_name(self):
-        return self.dialect.default_schema_name
-
-class DB2Reflector(BaseReflector):
-    ischema = MetaData()
-
-    sys_schemas = Table("SCHEMATA", ischema,
-      Column("SCHEMANAME", CoerceUnicode, key="schemaname"),
-      Column("OWNER", CoerceUnicode, key="owner"),
-      Column("OWNERTYPE", CoerceUnicode, key="ownertype"),
-      Column("DEFINER", CoerceUnicode, key="definer"),
-      Column("DEFINERTYPE", CoerceUnicode, key="definertype"),
-      Column("REMARK", CoerceUnicode, key="remark"),
-      schema="SYSCAT")
-
-    sys_tables = Table("TABLES", ischema,
-      Column("TABSCHEMA", CoerceUnicode, key="tabschema"),
-      Column("TABNAME", CoerceUnicode, key="tabname"),
-      Column("OWNER", CoerceUnicode, key="owner"),
-      Column("OWNERTYPE", CoerceUnicode, key="ownertype"),
-      Column("TYPE", CoerceUnicode, key="type"),
-      Column("STATUS", CoerceUnicode, key="status"),
-      schema="SYSCAT")
-
-    sys_indexes = Table("INDEXES", ischema,
-      Column("TABSCHEMA", CoerceUnicode, key="tabschema"),
-      Column("TABNAME", CoerceUnicode, key="tabname"),
-      Column("INDNAME", CoerceUnicode, key="indname"),
-      Column("COLNAMES", CoerceUnicode, key="colnames"),
-      Column("UNIQUERULE", CoerceUnicode, key="uniquerule"),
-      schema="SYSCAT")
-
-    sys_foreignkeys = Table("SQLFOREIGNKEYS", ischema,
-      Column("FK_NAME", CoerceUnicode, key="fkname"),
-      Column("FKTABLE_SCHEM", CoerceUnicode, key="fktabschema"),
-      Column("FKTABLE_NAME", CoerceUnicode, key="fktabname"),
-      Column("FKCOLUMN_NAME", CoerceUnicode, key="fkcolname"),
-      Column("PK_NAME", CoerceUnicode, key="pkname"),
-      Column("PKTABLE_SCHEM", CoerceUnicode, key="pktabschema"),
-      Column("PKTABLE_NAME", CoerceUnicode, key="pktabname"),
-      Column("PKCOLUMN_NAME", CoerceUnicode, key="pkcolname"),
-      Column("KEY_SEQ", sa_types.Integer, key="colno"),
-      schema="SYSIBM")
-
-    sys_columns = Table("COLUMNS", ischema,
-      Column("TABSCHEMA", CoerceUnicode, key="tabschema"),
-      Column("TABNAME", CoerceUnicode, key="tabname"),
-      Column("COLNAME", CoerceUnicode, key="colname"),
-      Column("COLNO", sa_types.Integer, key="colno"),
-      Column("TYPENAME", CoerceUnicode, key="typename"),
-      Column("LENGTH", sa_types.Integer, key="length"),
-      Column("SCALE", sa_types.Integer, key="scale"),
-      Column("DEFAULT", CoerceUnicode, key="defaultval"),
-      Column("NULLS", CoerceUnicode, key="nullable"),
-      schema="SYSCAT")
-
-    sys_views = Table("VIEWS", ischema,
-      Column("VIEWSCHEMA", CoerceUnicode, key="viewschema"),
-      Column("VIEWNAME", CoerceUnicode, key="viewname"),
-      Column("TEXT", CoerceUnicode, key="text"),
-      schema="SYSCAT")
-
-    sys_sequences = Table("SEQUENCES", ischema,
-      Column("SEQSCHEMA", CoerceUnicode, key="seqschema"),
-      Column("SEQNAME", CoerceUnicode, key="seqname"),
-      schema="SYSCAT")
-
-    def has_table(self, connection, table_name, schema=None):
-        current_schema = self.denormalize_name(
-                            schema or self.default_schema_name)
-        table_name = self.denormalize_name(table_name)
-        if current_schema:
-            whereclause = sql.and_(self.sys_tables.c.tabschema == current_schema,
-                                   self.sys_tables.c.tabname == table_name)
-        else:
-            whereclause = self.sys_tables.c.tabname == table_name
-        s = sql.select([self.sys_tables.c.tabname], whereclause)
-        c = connection.execute(s)
-        return c.first() is not None
-
-    def has_sequence(self, connection, sequence_name, schema=None):
-        current_schema = self.denormalize_name(schema or self.default_schema_name)
-        sequence_name = self.denormalize_name(sequence_name)
-        if current_schema:
-            whereclause = sql.and_(self.sys_sequences.c.seqschema == current_schema,
-                                   self.sys_sequences.c.seqname == sequence_name)
-        else:
-            whereclause = self.sys_sequences.c.seqname == sequence_name
-        s = sql.select([self.sys_sequences.c.seqname], whereclause)
-        c = connection.execute(s)
-        return c.first() is not None
-
-    def get_schema_names(self, connection, **kw):
-        sysschema = self.sys_schemas
-        query = sql.select([sysschema.c.schemaname],
-            sql.not_(sysschema.c.schemaname.like('SYS%')),
-            order_by=[sysschema.c.schemaname]
-        )
-        return [self.normalize_name(r[0]) for r in connection.execute(query)]
-
-
-    @reflection.cache
-    def get_table_names(self, connection, schema=None, **kw):
-        current_schema = self.denormalize_name(schema or self.default_schema_name)
-        systbl = self.sys_tables
-        query = sql.select([systbl.c.tabname]).\
-                    where(systbl.c.type == 'T').\
-                    where(systbl.c.tabschema == current_schema).\
-                    order_by(systbl.c.tabname)
-        return [self.normalize_name(r[0]) for r in connection.execute(query)]
-
-    @reflection.cache
-    def get_view_names(self, connection, schema=None, **kw):
-        current_schema = self.denormalize_name(schema or self.default_schema_name)
-
-        query = sql.select([self.sys_views.c.viewname]).\
-            where(self.sys_views.c.viewschema == current_schema).\
-            order_by(self.sys_views.c.viewname)
-            
-        return [self.normalize_name(r[0]) for r in connection.execute(query)]
-
-    @reflection.cache
-    def get_view_definition(self, connection, viewname, schema=None, **kw):
-        current_schema = self.denormalize_name(schema or self.default_schema_name)
-        viewname = self.denormalize_name(viewname)
-
-        query = sql.select([self.sys_views.c.text]).\
-            where(self.sys_views.c.viewschema == current_schema).\
-            where(self.sys_views.c.viewname == viewname)
-            
-        return connection.execute(query).scalar()
-
-    @reflection.cache
-    def get_columns(self, connection, table_name, schema=None, **kw):
-        current_schema = self.denormalize_name(schema or self.default_schema_name)
-        table_name = self.denormalize_name(table_name)
-        syscols = self.sys_columns
-
-        query = sql.select([syscols.c.colname, syscols.c.typename,
-                            syscols.c.defaultval, syscols.c.nullable,
-                            syscols.c.length, syscols.c.scale],
-              sql.and_(
-                  syscols.c.tabschema == current_schema,
-                  syscols.c.tabname == table_name
-                ),
-              order_by=[syscols.c.colno]
-            )
-        sa_columns = []
-        for r in connection.execute(query):
-            coltype = r[1].upper()
-            if coltype in ['DECIMAL', 'NUMERIC']:
-                coltype = self.ischema_names.get(coltype)(int(r[4]), int(r[5]))
-            elif coltype in ['CHARACTER', 'CHAR', 'VARCHAR',
-                            'GRAPHIC', 'VARGRAPHIC']:
-                coltype = self.ischema_names.get(coltype)(int(r[4]))
-            else:
-                try:
-                    coltype = self.ischema_names[coltype]
-                except KeyError:
-                    util.warn("Did not recognize type '%s' of column '%s'" %
-                            (coltype, r[0]))
-                    coltype = coltype = sa_types.NULLTYPE
-
-            sa_columns.append({
-                    'name': self.normalize_name(r[0]),
-                    'type': coltype,
-                    'nullable': r[3] == 'Y',
-                    'default': r[2] or None,
-                })
-        return sa_columns
-
-    @reflection.cache
-    def get_primary_keys(self, connection, table_name, schema=None, **kw):
-        current_schema = self.denormalize_name(schema or self.default_schema_name)
-        table_name = self.denormalize_name(table_name)
-        sysindexes = self.sys_indexes
-        col_finder = re.compile("(\w+)")
-        query = sql.select([sysindexes.c.colnames],
-              sql.and_(
-                  sysindexes.c.tabschema == current_schema,
-                  sysindexes.c.tabname == table_name,
-                  sysindexes.c.uniquerule == 'P'
-                ),
-              order_by=[sysindexes.c.tabschema, sysindexes.c.tabname]
-            )
-        pk_columns = []
-        for r in connection.execute(query):
-            cols = col_finder.findall(r[0])
-            pk_columns.extend(cols)
-        return [self.normalize_name(col) for col in pk_columns]
-
-    @reflection.cache
-    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
-        current_schema = self.denormalize_name(schema or self.default_schema_name)
-        table_name = self.denormalize_name(table_name)
-        sysfkeys = self.sys_foreignkeys
-        query = sql.select([sysfkeys.c.fkname, sysfkeys.c.fktabschema, \
-                            sysfkeys.c.fktabname, sysfkeys.c.fkcolname, \
-                            sysfkeys.c.pkname, sysfkeys.c.pktabschema, \
-                            sysfkeys.c.pktabname, sysfkeys.c.pkcolname],
-            sql.and_(
-              sysfkeys.c.fktabschema == current_schema,
-              sysfkeys.c.fktabname == table_name
-            ),
-            order_by=[sysfkeys.c.colno]
-          )
-
-        fschema = {}
-        for r in connection.execute(query):
-            if not fschema.has_key(r[0]):
-                referred_schema = self.normalize_name(r[5])
-
-                # if no schema specified and referred schema here is the
-                # default, then set to None
-                if schema is None and \
-                    referred_schema == self.default_schema_name:
-                    referred_schema = None
-
-                fschema[r[0]] = {
-                    'name': self.normalize_name(r[0]),
-                  'constrained_columns': [self.normalize_name(r[3])],
-                  'referred_schema': referred_schema,
-                  'referred_table': self.normalize_name(r[6]),
-                  'referred_columns': [self.normalize_name(r[7])]}
-            else:
-                fschema[r[0]]['constrained_columns'].append(self.normalize_name(r[3]))
-                fschema[r[0]]['referred_columns'].append(self.normalize_name(r[7]))
-        return [value for key, value in fschema.iteritems()]
-
-
-    @reflection.cache
-    def get_indexes(self, connection, table_name, schema=None, **kw):
-        current_schema = self.denormalize_name(schema or self.default_schema_name)
-        table_name = self.denormalize_name(table_name)
-        sysidx = self.sys_indexes
-        query = sql.select([sysidx.c.indname, sysidx.c.colnames, sysidx.c.uniquerule],
-            sql.and_(
-              sysidx.c.tabschema == current_schema,
-              sysidx.c.tabname == table_name
-            ),
-            order_by=[sysidx.c.tabname]
-          )
-        indexes = []
-        col_finder = re.compile("(\w+)")
-        for r in connection.execute(query):
-            if r[2] != 'P':
-                indexes.append({
-                        'name': self.normalize_name(r[0]),
-                        'column_names': [self.normalize_name(col)
-                                        for col in col_finder.findall(r[1])],
-                        'unique': r[2] == 'U'
-                    })
-        return indexes
-
-class AS400Reflector(BaseReflector):
-
-    ischema = MetaData()
-
-    sys_schemas = Table("SQLSCHEMAS", ischema,
-      Column("TABLE_SCHEM", CoerceUnicode, key="schemaname"),
-      schema="SYSIBM")
-
-    sys_tables = Table("SYSTABLES", ischema,
-      Column("TABLE_SCHEMA", CoerceUnicode, key="tabschema"),
-      Column("TABLE_NAME", CoerceUnicode, key="tabname"),
-      Column("TABLE_TYPE", CoerceUnicode, key="tabtype"),
-      schema="QSYS2")
-
-    sys_table_constraints = Table("SYSCST", ischema,
-      Column("CONSTRAINT_SCHEMA", CoerceUnicode, key="conschema"),
-      Column("CONSTRAINT_NAME", CoerceUnicode, key="conname"),
-      Column("CONSTRAINT_TYPE", CoerceUnicode, key="contype"),
-      Column("TABLE_SCHEMA", CoerceUnicode, key="tabschema"),
-      Column("TABLE_NAME", CoerceUnicode, key="tabname"),
-      Column("TABLE_TYPE", CoerceUnicode, key="tabtype"),
-      schema="QSYS2")
-
-    sys_key_constraints = Table("SYSKEYCST", ischema,
-      Column("CONSTRAINT_SCHEMA", CoerceUnicode, key="conschema"),
-      Column("CONSTRAINT_NAME", CoerceUnicode, key="conname"),
-      Column("TABLE_SCHEMA", CoerceUnicode, key="tabschema"),
-      Column("TABLE_NAME", CoerceUnicode, key="tabname"),
-      Column("COLUMN_NAME", CoerceUnicode, key="colname"),
-      Column("ORDINAL_POSITION", sa_types.Integer, key="colno"),
-      schema="QSYS2")
-
-    sys_columns = Table("SYSCOLUMNS", ischema,
-      Column("TABLE_SCHEMA", CoerceUnicode, key="tabschema"),
-      Column("TABLE_NAME", CoerceUnicode, key="tabname"),
-      Column("COLUMN_NAME", CoerceUnicode, key="colname"),
-      Column("ORDINAL_POSITION", sa_types.Integer, key="colno"),
-      Column("DATA_TYPE", CoerceUnicode, key="typename"),
-      Column("LENGTH", sa_types.Integer, key="length"),
-      Column("NUMERIC_SCALE", sa_types.Integer, key="scale"),
-      Column("IS_NULLABLE", sa_types.Integer, key="nullable"),
-      Column("COLUMN_DEFAULT", CoerceUnicode, key="defaultval"),
-      Column("HAS_DEFAULT", CoerceUnicode, key="hasdef"),
-      schema="QSYS2")
-
-    sys_indexes = Table("SYSINDEXES", ischema,
-      Column("TABLE_SCHEMA", CoerceUnicode, key="tabschema"),
-      Column("TABLE_NAME", CoerceUnicode, key="tabname"),
-      Column("INDEX_SCHEMA", CoerceUnicode, key="indschema"),
-      Column("INDEX_NAME", CoerceUnicode, key="indname"),
-      Column("IS_UNIQUE", CoerceUnicode, key="uniquerule"),
-      schema="QSYS2")
-
-    sys_keys = Table("SYSKEYS", ischema,
-      Column("INDEX_SCHEMA", CoerceUnicode, key="indschema"),
-      Column("INDEX_NAME", CoerceUnicode, key="indname"),
-      Column("COLUMN_NAME", CoerceUnicode, key="colname"),
-      Column("ORDINAL_POSITION", sa_types.Integer, key="colno"),
-      Column("ORDERING", CoerceUnicode, key="ordering"),
-      schema="QSYS2")
-
-    sys_foreignkeys = Table("SQLFOREIGNKEYS", ischema,
-      Column("FK_NAME", CoerceUnicode, key="fkname"),
-      Column("FKTABLE_SCHEM", CoerceUnicode, key="fktabschema"),
-      Column("FKTABLE_NAME", CoerceUnicode, key="fktabname"),
-      Column("FKCOLUMN_NAME", CoerceUnicode, key="fkcolname"),
-      Column("PK_NAME", CoerceUnicode, key="pkname"),
-      Column("PKTABLE_SCHEM", CoerceUnicode, key="pktabschema"),
-      Column("PKTABLE_NAME", CoerceUnicode, key="pktabname"),
-      Column("PKCOLUMN_NAME", CoerceUnicode, key="pkcolname"),
-      Column("KEY_SEQ", sa_types.Integer, key="colno"),
-      schema="SYSIBM")
-
-    sys_views = Table("SYSVIEWS", ischema,
-      Column("TABLE_SCHEMA", CoerceUnicode, key="viewschema"),
-      Column("TABLE_NAME", CoerceUnicode, key="viewname"),
-      Column("VIEW_DEFINITION", CoerceUnicode, key="text"),
-      schema="QSYS2")
-
-    sys_sequences = Table("SYSSEQUENCES", ischema,
-      Column("SEQUENCE_SCHEMA", CoerceUnicode, key="seqschema"),
-      Column("SEQUENCE_NAME", CoerceUnicode, key="seqname"),
-      schema="QSYS2")
-
-    def has_table(self, connection, table_name, schema=None):
-        current_schema = self.denormalize_name(
-                                schema or self.default_schema_name)
-        table_name = self.denormalize_name(table_name)
-        if current_schema:
-                whereclause = sql.and_(
-                            self.sys_tables.c.tabschema == current_schema,
-                            self.sys_tables.c.tabname == table_name)
-        else:
-                whereclause = self.sys_tables.c.tabname == table_name
-        s = sql.select([self.sys_tables], whereclause)
-        c = connection.execute(s)
-        return c.first() is not None
-
-    def has_sequence(self, connection, sequence_name, schema=None):
-        current_schema = self.denormalize_name(
-                                schema or self.default_schema_name)
-        sequence_name = self.denormalize_name(sequence_name)
-        if current_schema:
-                whereclause = sql.and_(
-                            self.sys_sequences.c.seqschema == current_schema,
-                            self.sys_sequences.c.seqname == sequence_name)
-        else:
-                whereclause = self.sys_sequences.c.seqname == sequence_name
-        s = sql.select([self.sys_sequences.c.seqname], whereclause)
-        c = connection.execute(s)
-        return c.first() is not None
-
-    @reflection.cache
-    def get_schema_names(self, connection, **kw):
-        sysschema = self.sys_schemas
-        query = sql.select([sysschema.c.schemaname],
-                sql.not_(sysschema.c.schemaname.like('SYS%')),
-                sql.not_(sysschema.c.schemaname.like('Q%')),
-                order_by=[sysschema.c.schemaname]
-        )
-        return [self.normalize_name(r[0]) for r in connection.execute(query)]
-
-    # Retrieves a list of table names for a given schema
-    @reflection.cache
-    def get_table_names(self, connection, schema=None, **kw):
-        current_schema = self.denormalize_name(
-                            schema or self.default_schema_name)
-        systbl = self.sys_tables
-        query = sql.select([systbl.c.tabname],
-                systbl.c.tabschema == current_schema,
-                order_by=[systbl.c.tabname]
-            )
-        return [self.normalize_name(r[0]) for r in connection.execute(query)]
-
-    @reflection.cache
-    def get_view_names(self, connection, schema=None, **kw):
-        current_schema = self.denormalize_name(
-                                schema or self.default_schema_name)
-
-        query = sql.select([self.sys_views.c.viewname],
-                self.sys_views.c.viewschema == current_schema,
-                order_by=[self.sys_views.c.viewname]
-            )
-        return [self.normalize_name(r[0]) for r in connection.execute(query)]
-
-    @reflection.cache
-    def get_view_definition(self, connection, viewname, schema=None, **kw):
-        current_schema = self.denormalize_name(
-                                schema or self.default_schema_name)
-        viewname = self.denormalize_name(viewname)
-
-        query = sql.select([self.sys_views.c.text],
-                self.sys_views.c.viewschema == current_schema,
-                self.sys_views.c.viewname == viewname
-            )
-        return connection.execute(query).scalar()
-
-    @reflection.cache
-    def get_columns(self, connection, table_name, schema=None, **kw):
-        current_schema = self.denormalize_name(
-                                schema or self.default_schema_name)
-        table_name = self.denormalize_name(table_name)
-        syscols = self.sys_columns
-
-        query = sql.select([syscols.c.colname,
-                                syscols.c.typename,
-                                syscols.c.defaultval, syscols.c.nullable,
-                                syscols.c.length, syscols.c.scale],
-                    sql.and_(
-                            syscols.c.tabschema == current_schema,
-                            syscols.c.tabname == table_name
-                        ),
-                    order_by=[syscols.c.tabschema, syscols.c.tabname,
-                                    syscols.c.colname, syscols.c.colno]
-                )
-        sa_columns = []
-        for r in connection.execute(query):
-            coltype = r[1].upper()
-            if coltype in ['DECIMAL', 'NUMERIC']:
-                coltype = self.ischema_names.get(coltype)(int(r[4]), int(r[5]))
-            elif coltype in ['CHARACTER', 'CHAR', 'VARCHAR',
-                                'GRAPHIC', 'VARGRAPHIC']:
-                coltype = self.ischema_names.get(coltype)(int(r[4]))
-            else:
-                try:
-                    coltype = self.ischema_names[coltype]
-                except KeyError:
-                    util.warn("Did not recognize type '%s' of column '%s'" %
-                                    (coltype, r[0]))
-                    coltype = coltype = sa_types.NULLTYPE
-
-            sa_columns.append({
-                    'name': self.normalize_name(r[0]),
-                    'type': coltype,
-                    'nullable': r[3] == 'Y',
-                    'default': r[2],
-                    'autoincrement': r[2] is None,
-                })
-        return sa_columns
-
-    @reflection.cache
-    def get_primary_keys(self, connection, table_name, schema=None, **kw):
-        current_schema = self.denormalize_name(
-                                    schema or self.default_schema_name)
-        table_name = self.denormalize_name(table_name)
-        sysconst = self.sys_table_constraints
-        syskeyconst = self.sys_key_constraints
-
-        query = sql.select([syskeyconst.c.colname, sysconst.c.tabname],
-                sql.and_(
-                    syskeyconst.c.conschema == sysconst.c.conschema,
-                    syskeyconst.c.conname == sysconst.c.conname,
-                    sysconst.c.tabschema == current_schema,
-                    sysconst.c.tabname == table_name,
-                    sysconst.c.contype == 'PRIMARY KEY'
-            ), order_by=[syskeyconst.c.colno])
-
-        return [self.normalize_name(key[0])
-                    for key in connection.execute(query)]
-
-    @reflection.cache
-    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
-        current_schema = self.denormalize_name(
-                                    schema or self.default_schema_name)
-        table_name = self.denormalize_name(table_name)
-        sysfkeys = self.sys_foreignkeys
-        query = sql.select([sysfkeys.c.fkname, sysfkeys.c.fktabschema, \
-                                sysfkeys.c.fktabname, sysfkeys.c.fkcolname, \
-                                sysfkeys.c.pkname, sysfkeys.c.pktabschema, \
-                                sysfkeys.c.pktabname, sysfkeys.c.pkcolname],
-                sql.and_(
-                    sysfkeys.c.fktabschema == current_schema,
-                    sysfkeys.c.fktabname == table_name
-                ),
-                order_by=[sysfkeys.c.colno]
-            )
-        fschema = {}
-        for r in connection.execute(query):
-            if not fschema.has_key(r[0]):
-                fschema[r[0]] = {'name': self.normalize_name(r[0]),
-                            'constrained_columns': [self.normalize_name(r[3])],
-                            'referred_schema': self.normalize_name(r[5]),
-                            'referred_table': self.normalize_name(r[6]),
-                            'referred_columns': [self.normalize_name(r[7])]}
-            else:
-                fschema[r[0]]['constrained_columns'].append(
-                                                    self.normalize_name(r[3]))
-                fschema[r[0]]['referred_columns'].append(
-                                                    self.normalize_name(r[7]))
-        return [value for key, value in fschema.iteritems()]
-
-    # Retrieves a list of index names for a given schema
-    @reflection.cache
-    def get_indexes(self, connection, table_name, schema=None, **kw):
-        current_schema = self.denormalize_name(
-                                    schema or self.default_schema_name)
-        table_name = self.denormalize_name(table_name)
-        sysidx = self.sys_indexes
-        syskey = self.sys_keys
-
-        query = sql.select([sysidx.c.indname,
-                            sysidx.c.uniquerule, syskey.c.colname], sql.and_(
-                    syskey.c.indschema == sysidx.c.indschema,
-                    syskey.c.indname == sysidx.c.indname,
-                    sysidx.c.tabschema == current_schema,
-                    sysidx.c.tabname == table_name
-                ), order_by=[syskey.c.indname, syskey.c.colno]
-            )
-        indexes = {}
-        for r in connection.execute(query):
-            key = r[0].upper()
-            if key in indexes:
-                indexes[key]['column_names'].append(self.normalize_name(r[2]))
-            else:
-                indexes[key] = {
-                                'name': self.normalize_name(r[0]),
-                                'column_names': [self.normalize_name(r[2])],
-                                'unique': r[1] == 'Y'
-                        }
-        return [value for key, value in indexes.iteritems()]
diff -pruN 0.3.0-3/ibm_db_sa/requirements.py 2.0.5-0ubuntu2/ibm_db_sa/requirements.py
--- 0.3.0-3/ibm_db_sa/requirements.py	2013-02-28 06:29:09.000000000 +0000
+++ 2.0.5-0ubuntu2/ibm_db_sa/requirements.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,78 +0,0 @@
-"""requirements.py
-
-
-This file is used by the SQLAlchemy 0.8 testing suite to mark various
-optional behaviors as non-supported.
-
-"""
-from sqlalchemy.testing.requirements import SuiteRequirements
-
-from sqlalchemy.testing import exclusions
-
-class Requirements(SuiteRequirements):
-
-    @property
-    def on_update_cascade(self):
-        """"target database must support ON UPDATE..CASCADE behavior in
-        foreign keys."""
-
-        return exclusions.closed()
-
-    @property
-    def datetime_microseconds(self):
-        """target dialect supports representation of Python
-        datetime.datetime() with microsecond objects."""
-
-        return exclusions.closed()
-
-    @property
-    def time_microseconds(self):
-        """target dialect supports representation of Python
-        datetime.time() with microsecond objects."""
-
-        return exclusions.closed()
-
-    @property
-    def unbounded_varchar(self):
-        """Target database must support VARCHAR with no length"""
-
-        return exclusions.closed()
-
-    #@property
-    #def offset(self):
-    #    return exclusions.closed()
-
-    @property
-    def window_functions(self):
-        """Target database must support window functions."""
-        return exclusions.open()
-
-    @property
-    def precision_numerics_enotation_small(self):
-        """target backend supports Decimal() objects using E notation
-        to represent very small values."""
-        return exclusions.open()
-
-    @property
-    def precision_numerics_enotation_large(self):
-        """target backend supports Decimal() objects using E notation
-        to represent very large values."""
-        return exclusions.closed()
-
-    @property
-    def precision_numerics_many_significant_digits(self):
-        """target backend supports values with many digits on both sides,
-        such as 319438950232418390.273596, 87673.594069654243
-
-        """
-        return exclusions.fails_if(lambda: True,
-                    "Throws error SQL0604N, regarding Decimal(38, 12)"
-            )
-
-    @property
-    def precision_numerics_retains_significant_digits(self):
-        """A precision numeric type will return empty significant digits,
-        i.e. a value such as 10.000 will come back in Decimal form with
-        the .000 maintained."""
-
-        return exclusions.open()
diff -pruN 0.3.0-3/ibm_db_sa/zxjdbc.py 2.0.5-0ubuntu2/ibm_db_sa/zxjdbc.py
--- 0.3.0-3/ibm_db_sa/zxjdbc.py	2013-02-28 12:19:45.000000000 +0000
+++ 2.0.5-0ubuntu2/ibm_db_sa/zxjdbc.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,190 +0,0 @@
-# +--------------------------------------------------------------------------+
-# |  Licensed Materials - Property of IBM                                    |
-# |                                                                          |
-# | (C) Copyright IBM Corporation 2008, 2013.                                |
-# +--------------------------------------------------------------------------+
-# | This module complies with SQLAlchemy 0.8 and is                          |
-# | Licensed under the Apache License, Version 2.0 (the "License");          |
-# | you may not use this file except in compliance with the License.         |
-# | You may obtain a copy of the License at                                  |
-# | http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable |
-# | law or agreed to in writing, software distributed under the License is   |
-# | distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
-# | KIND, either express or implied. See the License for the specific        |
-# | language governing permissions and limitations under the License.        |
-# +--------------------------------------------------------------------------+
-# | Author: Jaimy Azle                                                       |
-# | Contributor: Mike Bayer                                                  |
-# +--------------------------------------------------------------------------+
-
-raise NotImplementedError(
-        "The zxjdbc dialect is not implemented at this time.")
-
-
-# NOTE: it appears that to use zxjdbc, the "RETURNING" syntax
-# must be installed in DB2, which appears to be optional.  It would
-# be best if the RETURNING support were built into the base dialect
-# and not be local to zxjdbc here.
-
-from decimal import Decimal as _python_Decimal
-from sqlalchemy import sql, util
-from sqlalchemy import types as sa_types
-from sqlalchemy.engine.base import FullyBufferedResultProxy, ResultProxy
-from sqlalchemy.connectors.zxJDBC import ZxJDBCConnector
-from .base import DB2Dialect, DB2ExecutionContext, DB2Compiler
-
-
-
-class ReturningResultProxy(FullyBufferedResultProxy):
-
-    """ResultProxy backed by the RETURNING ResultSet results."""
-
-    def __init__(self, context, returning_row):
-        self._returning_row = returning_row
-        super(ReturningResultProxy, self).__init__(context)
-
-    def _cursor_description(self):
-        ret = []
-        for c in self.context.compiled.returning_cols:
-            if hasattr(c, 'name'):
-                ret.append((c.name, c.type))
-            else:
-                ret.append((c.anon_label, c.type))
-        return ret
-
-    def _buffer_rows(self):
-        return [self._returning_row]
-
-class ReturningParam(object):
-
-    """A bindparam value representing a RETURNING parameter.
-
-    """
-
-    def __init__(self, type):
-        self.type = type
-
-    def __eq__(self, other):
-        if isinstance(other, ReturningParam):
-            return self.type == other.type
-        return NotImplemented
-
-    def __ne__(self, other):
-        if isinstance(other, ReturningParam):
-            return self.type != other.type
-        return NotImplemented
-
-    def __repr__(self):
-        kls = self.__class__
-        return '<%s.%s object at 0x%x type=%s>' % (
-                kls.__module__, kls.__name__, id(self),
-                                                   self.type)
-
-class DB2ExecutionContext_zxjdbc(DB2ExecutionContext):
-
-    def pre_exec(self):
-        if hasattr(self.compiled, 'returning_parameters'):
-            self.statement = self.cursor.prepare(self.statement)
-
-    def get_result_proxy(self):
-        if hasattr(self.compiled, 'returning_parameters'):
-            rrs = None
-            try:
-                try:
-                    rrs = self.statement.__statement__.getReturnResultSet()
-                    rrs.next()
-                except SQLException, sqle:
-                    msg = '%s [SQLCode: %d]' % (sqle.getMessage(), sqle.getErrorCode())
-                    if sqle.getSQLState() is not None:
-                        msg += ' [SQLState: %s]' % sqle.getSQLState()
-                    raise zxJDBC.Error(msg)
-                else:
-                    row = tuple(self.cursor.datahandler.getPyObject(rrs, index, dbtype)
-                                for index, dbtype in self.compiled.returning_parameters)
-                    return ReturningResultProxy(self, row)
-            finally:
-                if rrs is not None:
-                    try:
-                        rrs.close()
-                    except SQLException:
-                        pass
-                self.statement.close()
-
-        return ResultProxy(self)
-
-    def create_cursor(self):
-        cursor = self._dbapi_connection.cursor()
-        cursor.datahandler = self.dialect.DataHandler(cursor.datahandler)
-        return cursor
-
-class DB2Compiler_zxjdbc(DB2Compiler):
-
-    def returning_clause(self, stmt, returning_cols):
-        self.returning_cols = list(expression._select_iterables(returning_cols))
-
-        # within_columns_clause=False so that labels (foo AS bar) don't render
-        columns = [self.process(c, within_columns_clause=False, result_map=self.result_map)
-                   for c in self.returning_cols]
-
-        if not hasattr(self, 'returning_parameters'):
-            self.returning_parameters = []
-
-        binds = []
-        for i, col in enumerate(self.returning_cols):
-            dbtype = col.type.dialect_impl(self.dialect).get_dbapi_type(self.dialect.dbapi)
-            self.returning_parameters.append((i + 1, dbtype))
-
-            bindparam = sql.bindparam("ret_%d" % i, value=ReturningParam(dbtype))
-            self.binds[bindparam.key] = bindparam
-            binds.append(self.bindparam_string(self._truncate_bindparam(bindparam)))
-
-        return 'RETURNING ' + ', '.join(columns) +  " INTO " + ", ".join(binds)
-
-class DB2Dialect_zxjdbc(ZxJDBCConnector, DB2Dialect):
-
-    supports_unicode_statements = supports_unicode_binds = \
-    returns_unicode_strings = supports_unicode = False
-    supports_sane_rowcount = False
-    supports_sane_multi_rowcount = False
-
-    jdbc_db_name = 'db2'
-    jdbc_driver_name = 'com.ibm.db2.jcc.DB2Driver'
-
-    statement_compiler = DB2Compiler_zxjdbc
-    execution_ctx_cls = DB2ExecutionContext_zxjdbc
-
-    @classmethod
-    def dbapi(cls):
-
-        global SQLException, zxJDBC
-        from java.sql import SQLException, Types as java_Types
-        from com.ziclix.python.sql import zxJDBC
-        from com.ziclix.python.sql import FilterDataHandler
-
-        # TODO: this should be somewhere else
-        class IBM_DB2DataHandler(FilterDataHandler):
-
-            def setJDBCObject(self, statement, index, object, dbtype=None):
-                if type(object) is ReturningParam:
-                    statement.registerReturnParameter(index, object.type)
-                elif dbtype is None:
-                    if (isinstance(object, int)):
-                        statement.setObject(index, str(object), java_Types.BIGINT)
-                    elif (isinstance(object, _python_Decimal)):
-                        statement.setObject(index, str(object), java_Types.DECIMAL)
-                    else:
-                        statement.setObject(index, object)
-                else:
-                    FilterDataHandler.setJDBCObject(self, statement, index, object, dbtype)
-
-        cls.DataHandler = IBM_DB2DataHandler
-        return zxJDBC
-
-
-class AS400Dialect_zxjdbc(DB2Dialect_zxjdbc):
-    jdbc_db_name = 'as400'
-    jdbc_driver_name = 'com.ibm.as400.access.AS400JDBCDriver'
-
-
-
-
diff -pruN 0.3.0-3/MANIFEST.in 2.0.5-0ubuntu2/MANIFEST.in
--- 0.3.0-3/MANIFEST.in	2013-02-27 17:30:18.000000000 +0000
+++ 2.0.5-0ubuntu2/MANIFEST.in	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-recursive-include test *.py
-
-include README* LICENSE distribute_setup.py CHANGES* run_tests.py
-
diff -pruN 0.3.0-3/README 2.0.5-0ubuntu2/README
--- 0.3.0-3/README	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/README	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,149 @@
+Python Driver and DBI Wrapper for IBM Databases
+-----------------------------------------------
+The ibm_db driver and ibm_db_dbi wrapper provide a Python interface for IBM 
+databases.  The wrapper conforms to the DBI specification located here:
+  http://www.python.org/dev/peps/pep-0249/
+
+The driver and wrapper have been developed and will be supported by IBM. 
+
+
+Python ibm_db Driver Build and Installation
+-------------------------------------------
+1. Linux 
+    
+  Preparations (specific commands dependent on shell):
+    If you are compiling against a DB2 instance, do the following
+    (paths depend on system):
+      . <path_to_sqllib>/db2profile
+      export IBM_DB_HOME=<path_to_sqllib>
+  
+  Installation Commands for Linux:
+    python setup.py build
+    python setup.py install 
+  
+2. Windows 
+
+  Preparations:
+    Install Python from:
+      http://python.org
+    Install Visual C++ 2003, Platform SDK (latest), .NET SDK Version
+    1.1 from:
+      http://wiki.tcl.tk/11431
+    Set envrionment:
+      CALL "C:\Program Files\Microsoft Platform SDK for Windows
+        Server 2003 R2\SetEnv.Cmd"
+      CALL "C:\Program Files\Microsoft Visual C++ Toolkit
+        2003\vcvars32.bat"
+      SET LIB=%LIB%;C:\Program Files\Microsoft Visual Studio .NET
+        2003\Vc7\lib 
+      (Use notepad to edit config.py to your DB2 settings)
+      set IBM_DB_HOME=<path_to_sqllib>
+
+  Installation Commands for Windows:
+    setup.py build
+    setup.yp install
+
+
+Python ibm_db_dbi Wrapper Build and Installation
+------------------------------------------------
+There is no special build required to use the ibm_db_wrapper. However, the 
+wrapper internally uses the driver. So the driver needs to be built and 
+installed before you use the wrapper.
+
+
+Setup to use ibm_db or ibm_db_dbi
+---------------------------------
+For Unix based systems:
+Depending on your shell, source the db2profile or db2chsrc. Be sure to source 
+the profile for the DB2 instance and not the profile under the DB2 product 
+installation. For example:
+  . <path_to_sqllib>/db2profile
+                 or
+  source <path_to_sqllib>/db2chsrc
+
+Import ibm_db or ibm_db_dbi inside your Python application to use the driver or 
+wrapper functionality
+  import ibm_db
+       or
+  import ibm_db_dbi
+
+Note: Please make sure that the directory containing the ibm_db_dbi.py file is 
+added to the PYTHONPATH variable. This will ensure that the DBI wrapper can be 
+accessed in your python applications.
+
+
+List of Operating Systems tested
+--------------------------------
+SuSE Linux Server 9 32 bit
+Ubuntu Linux 7.04 32 bit
+Windows 32 bit
+
+
+Supported Databases
+-------------------
+IBM DB2 Database on Linux, Unix and Windows 8.2 and onwards
+Informix(IDS) Cheetah version 11.10 onwards
+Remote connections to i5/OS (iSeries)
+Remote connections to z/OS (DB2 UDB for zOS)
+DB2 on Mac
+
+Future Supported Databases
+--------------------------
+IBM Cloudscape
+Apache Derby
+Native DB2 for i5/OS (iSeries)
+Native DB2 UDB for zOS
+
+
+Testing
+-------
+Tests displaying Python ibm_db driver code examples are located in the tests 
+directory.  These tests can be run by using 'python tests.py' on Unix based 
+systems, and 'tests.py' on Windows based systems. Use vi to edit config.py to 
+your DB2 settings.
+
+The default config.py contains:
+
+test_dir =      'tests'         # Location of testsuite file (relative to current directory)
+
+database =      'test'          # Database to connect to
+user     =      'db2inst1'      # User ID to connect with
+password =      'password'      # Password for given User ID
+hostname =      'localhost'     # Hostname
+port     =      50000           # Port Number
+
+Point the database to mydatabase as created by the following command.
+
+The tests that ibm_db driver uses depends on a UTF-8 database.  This can be 
+created by running:
+    CREATE DATABASE mydatabase USING CODESET UTF-8 TERRITORY US
+Some of the tests utilize XML functionality only available in version 9 or 
+later of DB2.  While DB2 v8.x is fully supported, two of the tests 
+(test_195.py and test_52949.py) utilize XML functionality.  These tests will 
+fail on version 8.x of DB2.
+
+Running the driver testsuite on Linux
+  In order to run the entire python driver testsuite on Linux, run this 
+  command at the command prompt:
+    python tests.py
+  To run a single test, set the environment variable, SINGLE_PYTHON_TEST, to 
+  the test filename you would like to run, followed by the previous command.
+    
+Running the driver testsuite on Windows
+  In order to run the entire python driver testsuite on Windows, run this 
+  command at the command prompt:
+    tests.py
+  To run a single test, set the environment variable, SINGLE_PYTHON_TEST, to 
+  the test filename you would like to run, followed by the previous command.
+
+
+Known Limitations for the Python driver
+---------------------------------------
+If trusted context is not set up, there will be two failures related to trusted context. When thick client has been used than additioanl three failures related to create, recreate DB.
+
+
+Known Limitations for the Python wrapper
+----------------------------------------
+1. The rowcount for select statements can not be generated.
+2. Some warnings from the drivers are not caught by the wrapper.
+   As such these might go unnoticed.
diff -pruN 0.3.0-3/README.rst 2.0.5-0ubuntu2/README.rst
--- 0.3.0-3/README.rst	2013-03-01 10:37:17.000000000 +0000
+++ 2.0.5-0ubuntu2/README.rst	1970-01-01 00:00:00.000000000 +0000
@@ -1,83 +0,0 @@
-IBM_DB_SA
-=========
-
-The IBM_DB_SA adapter provides the Python/SQLAlchemy interface to IBM Data Servers.
-
-Version
---------
-0.3.0 (2013/03/01)
-
-This version is all new for version 0.8 of SQLAlchemy and will also work with version 0.7.
-
-Prerequisites
---------------
-1. Python 2.5.x or Jython 2.5.x .
-2. SQLAlchemy o.7.3 or above.
-3. IBM_DB driver and IBM_DB_DBI wrapper 1.0.1 or higher
-
-Install and Configuration
-=========================
-The IBM_DB_SA Python Egg component (.egg) can be installed using the standard setuptools provided by the Python Easy Install through Python Entreprise 
-Application Kit community portal:
-  http://peak.telecommunity.com/DevCenter/EasyInstall
-
-Please follow the steps provided to Install "Easy Install" in the link above and follow up with these additional steps to install IBM_DB_SA:
-
-  1. To install IBM_DB_SA egg component available in the remote repositories
-  (pypi.python.org or code.google.com):
-    Windows:
-      > easy_install ibm_db_sa
-    Linux/Unix:
-      $ sudo easy_install ibm_db_sa
-  
-  2. To install IBM_DB_SA egg component from the downloaded .egg file
-    Windows:
-      > easy_install ibm_db_sa-x.x.x-pyx.x.egg
-    Linux/Unix:
-      $ sudo easy_install ibm_db_sa-x.x.x-pyx.x.egg
-  
-  3. To install IBM_DB_SA from source
-    Standard Python setup should be used::
-        python setup.py install
-
-Connecting
-----------
-A TCP/IP connection can be specified as the following::
-
-	from sqlalchemy import create_engine
-
-	e = create_engine("db2+ibm_db://user:pass@host[:port]/database")
-
-For a local socket connection, exclude the "host" and "port" portions::
-
-	from sqlalchemy import create_engine
-
-	e = create_engine("db2+ibm_db://user:pass@/database")
-
-Supported Databases
--------------------
-- IBM DB2 Universal Database for Linux/Unix/Windows versions 9.7 onwards
-- Remote connections to i5/OS (iSeries)
-- Remote connections to z/OS (DB2 UDB for zOS), only by default ibm_db drivers
-
-Known Limitations in ibm_db_sa adapter for DB2 databases
--------------------------------------------------------------
-1) Non-standard SQL queries are not supported. e.g. "SELECT ? FROM TAB1"
-2) For updations involving primary/foreign key references, the entries should be made in correct order. Integrity check is always on and thus the primary keys referenced by the foreign keys in the referencing tables should always exist in the parent table.
-3) Unique key which contains nullable column not supported
-4) UPDATE CASCADE for foreign keys not supported
-5) DEFERRABLE INITIALLY deferred not supported
-6) Subquery in ON clause of LEFT OUTER JOIN not supported
-
-Not Supported / Not Tested
----------------------------
-- Python 3 has not yet been tested.
-- pyodbc support has not been tested.
-- zxjdbc/Jython support is not fully implemented.
-
-Credits
--------
-ibm_db_sa for SQLAlchemy was first produced by IBM Inc., targeting version 0.4.
-The library was ported for version 0.6 and 0.7 by Jaimy Azle.
-Port for version 0.8 and modernization of test suite by Mike Bayer.
-
diff -pruN 0.3.0-3/run_tests.py 2.0.5-0ubuntu2/run_tests.py
--- 0.3.0-3/run_tests.py	2013-02-27 17:30:18.000000000 +0000
+++ 2.0.5-0ubuntu2/run_tests.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,13 +0,0 @@
-from sqlalchemy.dialects import registry
-
-registry.register("db2", "ibm_db_sa.ibm_db", "DB2Dialect_ibm_db")
-registry.register("db2.ibm_db", "ibm_db_sa.ibm_db", "DB2Dialect_ibm_db")
-registry.register("db2.pyodbc", "ibm_db_sa.pyodbc", "DB2Dialect_pyodbc")
-registry.register("db2.zxjdbc", "ibm_db_sa.zxjdbc", "DB2Dialect_zxjdbc")
-registry.register("db2.pyodbc400", "ibm_db_sa.pyodbc", "AS400Dialect_pyodbc")
-registry.register("db2.zxjdbc400", "ibm_db_sa.zxjdbc", "AS400Dialect_zxjdbc")
-
-from sqlalchemy.testing import runner
-
-runner.main()
-
diff -pruN 0.3.0-3/setup.cfg 2.0.5-0ubuntu2/setup.cfg
--- 0.3.0-3/setup.cfg	2013-02-28 11:06:19.000000000 +0000
+++ 2.0.5-0ubuntu2/setup.cfg	1970-01-01 00:00:00.000000000 +0000
@@ -1,18 +0,0 @@
-[egg_info]
-tag_build =
-
-[nosetests]
-with-sqla_testing = true
-where = test
-cover-package = ibm_db
-with-coverage = 1
-cover-erase = 1
-
-[sqla_testing]
-requirement_cls=ibm_db_sa.requirements:Requirements
-profile_file=.profiles.txt
-
-[db]
-default=db2+ibm_db://db2inst2:db2inst2@/test
-sqlite=sqlite:///:memory:
-
diff -pruN 0.3.0-3/setup.py 2.0.5-0ubuntu2/setup.py
--- 0.3.0-3/setup.py	2013-02-28 10:01:43.000000000 +0000
+++ 2.0.5-0ubuntu2/setup.py	2014-01-24 10:40:44.000000000 +0000
@@ -1,55 +1,102 @@
-#!/usr/bin/env python
-
-from setuptools import setup
 import os
-import re
-
-
-v = open(os.path.join(os.path.dirname(__file__), 'ibm_db_sa', '__init__.py'))
-VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
-v.close()
-
-readme = os.path.join(os.path.dirname(__file__), 'README.rst')
-
-setup(
-         name='ibm_db_sa',
-         version=VERSION,
-         license='Apache License 2.0',
-         description='SQLAlchemy support for IBM Data Servers',
-         author='IBM Application Development Team',
-         author_email='opendev@us.ibm.com',
-         url='http://pypi.python.org/pypi/ibm_db_sa/',
-         download_url='http://code.google.com/p/ibm-db/downloads/list',
-         keywords='sqlalchemy database interface IBM Data Servers DB2 Informix IDS',
-         classifiers=[
-            'Development Status :: 4 - Beta',
-            'Intended Audience :: Developers',
-            'License :: OSI Approved :: Apache License 2.0',
-            'Operating System :: OS Independent',
-            'Topic :: Databases :: Front-end, middle-tier'
-        ],
-         long_description=open(readme).read(),
-         platforms='All',
-         install_requires=['sqlalchemy>=0.7.3'],
-         packages=['ibm_db_sa'],
-        entry_points={
-         'sqlalchemy.dialects': [
-                     'db2=ibm_db_sa.ibm_db:DB2Dialect_ibm_db',
-                     'db2.ibm_db=ibm_db_sa.ibm_db:DB2Dialect_ibm_db',
-                     'db2.zxjdbc=ibm_db_sa.zxjdbc:DB2Dialect_zxjdbc',
-                     'db2.pyodbc=ibm_db_sa.pyodbc:DB2Dialect_pyodbc',
-                     'db2.zxjdbc400=ibm_db_sa.zxjdbc:AS400Dialect_zxjdbc',
-                     'db2.pyodbc400=ibm_db_sa.pyodbc:AS400Dialect_pyodbc',
-
-                     # older "ibm_db_sa://" style for backwards
-                     # compatibility
-                     'ibm_db_sa=ibm_db_sa.ibm_db:DB2Dialect_ibm_db',
-                     'ibm_db_sa.zxjdbc=ibm_db_sa.zxjdbc:DB2Dialect_zxjdbc',
-                     'ibm_db_sa.pyodbc=ibm_db_sa.pyodbc:DB2Dialect_pyodbc',
-                     'ibm_db_sa.zxjdbc400=ibm_db_sa.zxjdbc:AS400Dialect_zxjdbc',
-                     'ibm_db_sa.pyodbc400=ibm_db_sa.pyodbc:AS400Dialect_pyodbc',
-                    ]
-       },
-       zip_safe=False,
-       tests_require=['nose >= 0.11'],
+import sys
+import struct
+import warnings
+
+from setuptools import setup, find_packages
+from distutils.core import setup, Extension
+
+PACKAGE = 'ibm_db'
+VERSION = '2.0.5'
+LICENSE = 'Apache License 2.0'
+
+machine_bits =  8 * struct.calcsize("P")
+is64Bit = True
+libDir = ''
+ibm_db_home = ''
+ibm_db_dir = ''
+ibm_db_lib = ''
+
+if machine_bits == 64:
+    is64Bit = True
+    libDir = 'lib64'
+    sys.stdout.write("Detected 64-bit Python\n")
+else:
+    is64Bit = False
+    libDir = 'lib32'
+    sys.stdout.write("Detected 32-bit Python\n")
+    
+try:
+    ibm_db_home = os.environ['IBM_DB_HOME']
+    ibm_db_dir = ibm_db_home
+    ibm_db_lib = os.path.join(ibm_db_dir, libDir)
+except (KeyError):   
+    try:
+        ibm_db_dir = os.environ['IBM_DB_DIR']
+        ibm_db_lib = os.path.join(ibm_db_dir, libDir)
+    except (KeyError):
+        sys.stdout.write("Environment variable IBM_DB_HOME is not set. Set it to your DB2/IBM_Data_Server_Driver installation directory and retry ibm_db module install.\n")
+        sys.exit()
+
+if not os.path.isdir(ibm_db_lib):
+    ibm_db_lib = os.path.join(ibm_db_dir, 'lib')
+    if not os.path.isdir(ibm_db_lib):
+        sys.stdout.write("Cannot find %s directory. Check if you have set the IBM_DB_HOME environment variable's value correctly\n " %(ibm_db_lib))
+        sys.exit()
+    notifyString  = "Detected usage of IBM Data Server Driver package. Ensure you have downloaded "
+    if is64Bit:
+        notifyString = notifyString + "64-bit package "
+    else:
+        notifyString = notifyString + "32-bit package "
+    notifyString = notifyString + "of IBM_Data_Server_Driver and retry the ibm_db module install\n "
+    warnings.warn(notifyString)
+ibm_db_include = os.path.join(ibm_db_dir, 'include')
+if not os.path.isdir(ibm_db_include):
+    sys.stdout.write(" %s/include folder not found. Check if you have set the IBM_DB_HOME environment variable's value correctly\n " %(ibm_db_dir))
+    sys.exit()
+    
+library = ['db2']
+if (sys.platform[0:3] == 'win'):
+  library = ['db2cli']
+
+ibm_db = Extension('ibm_db',
+                    include_dirs = [ibm_db_dir + '/include'],
+                    libraries = library,
+                    library_dirs = [ibm_db_lib],
+                    sources = ['ibm_db.c'])
+
+modules = ['config', 'ibm_db_dbi', 'testfunctions', 'tests']
+extra = {}
+if sys.version_info >= (3, ):
+    extra['use_2to3'] = True
+
+setup( name    = PACKAGE, 
+       version = VERSION,
+       license = LICENSE,
+       description      = 'Python DBI driver for DB2 (LUW, zOS, i5) and IDS',
+       author           = 'IBM Application Development Team',
+       author_email     = 'opendev@us.ibm.com',
+       url              = 'http://pypi.python.org/pypi/ibm_db/',
+       download_url     = 'http://code.google.com/p/ibm-db/downloads/list',
+       keywords         = 'database DB-API interface IBM Data Servers DB2 Informix IDS',
+       classifiers  = [(sys.version_info >= (3, )) and 'Development Status :: 4 - Beta' or 'Development Status :: 5 - Production/Stable',
+                      'Intended Audience :: Developers',
+                      'License :: OSI Approved :: Apache Software License',
+                      'Operating System :: Microsoft :: Windows :: Windows NT/2000/XP',
+                      'Operating System :: Unix',
+                      'Topic :: Database :: Front-Ends'],
+
+       long_description = '''
+                      This extension is the implementation of Python Database API Specification v2.0
+                      The extension supports DB2 (LUW, zOS, i5) and IDS (Informix Dynamic Server)''',
+       platforms = 'LinuxIA32, Win32',
+       ext_modules  = [ibm_db],
+       py_modules   = modules,
+       packages     = find_packages(),
+       package_data = { 'tests': [ '*.png', '*.jpg']},
+       data_files=[ ('', ['./README']),
+                    ('', ['./CHANGES']),
+                    ('', ['./LICENSE']) ],
+       include_package_data = True,
+       **extra
      )
diff -pruN 0.3.0-3/test/test_suite.py 2.0.5-0ubuntu2/test/test_suite.py
--- 0.3.0-3/test/test_suite.py	2013-02-27 17:30:18.000000000 +0000
+++ 2.0.5-0ubuntu2/test/test_suite.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,2 +0,0 @@
-from sqlalchemy.testing.suite import *
-
diff -pruN 0.3.0-3/testfunctions.py 2.0.5-0ubuntu2/testfunctions.py
--- 0.3.0-3/testfunctions.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/testfunctions.py	2014-01-31 10:49:40.000000000 +0000
@@ -0,0 +1,113 @@
+import os
+import sys
+import unittest
+import StringIO
+import re
+import glob
+import inspect
+
+import ibm_db
+import config
+
+class IbmDbTestFunctions(unittest.TestCase):
+  prepconn = ibm_db.connect(config.database, config.user, config.password)
+  server = ibm_db.server_info(prepconn)
+  ibm_db.close(prepconn)
+  
+  # See the tests.py comments for this function.
+  def setUp(self):
+    pass
+ 
+  # This function captures the output of the current test file.
+  def capture(self, func):
+    buffer = StringIO.StringIO()
+    sys.stdout = buffer
+    func()
+    sys.stdout = sys.__stdout__
+    var = buffer.getvalue()
+    var = var.replace('\n', '').replace('\r', '')
+    return var
+  
+  # This function grabs the expected output of the current test function for LUW,
+  #   located at the bottom of the current test file.
+  def expected_LUW(self, fileName):
+    fileHandle = open(fileName, 'r')
+    fileInput = fileHandle.read().split('#__LUW_EXPECTED__')[-1].split('#__ZOS_EXPECTED__')[0].replace('\n', '').replace('#', '')
+    fileHandle.close()
+    return fileInput
+
+  # This function grabs the expected output of the current test function for IDS,
+  #   located at the bottom of the current test file.
+  def expected_IDS(self, fileName):
+    fileHandle = open(fileName, 'r')
+    fileInput = fileHandle.read().split('#__IDS_EXPECTED__')[-1].replace('\n', '').replace('#', '')
+    fileHandle.close()
+    return fileInput
+
+  # This function grabs the expected output of the current test function for zOS,
+  #   located at the bottom of the current test file.
+  def expected_ZOS(self, fileName):
+    fileHandle = open(fileName, 'r')
+    fileInput = fileHandle.read().split('#__ZOS_EXPECTED__')[-1].split('#__SYSTEMI_EXPECTED__')[0].replace('\n', '').replace('#', '')
+    fileHandle.close()
+    return fileInput
+
+  # This function grabs the expected output of the current test function for zOS,
+  #   located at the bottom of the current test file.
+  def expected_AS(self, fileName):
+    fileHandle = open(fileName, 'r')
+    fileInput = fileHandle.read().split('#__SYSTEMI_EXPECTED__')[-1].split('#__IDS_EXPECTED__')[0].replace('\n', '').replace('#', '')
+    fileHandle.close()
+    return fileInput
+    
+  # This function compares the captured outout with the expected out of
+  #   the current test file.
+  def assert_expect(self, testFuncName):
+    callstack = inspect.stack(0)
+    try:
+      if (self.server.DBMS_NAME[0:2] == "AS"):
+          self.assertEqual(self.capture(testFuncName), self.expected_AS(callstack[1][1]))
+      elif (self.server.DBMS_NAME == "DB2"):
+          self.assertEqual(self.capture(testFuncName), self.expected_ZOS(callstack[1][1]))
+      elif (self.server.DBMS_NAME[0:3] == "IDS"):
+          self.assertEqual(self.capture(testFuncName), self.expected_IDS(callstack[1][1]))
+      else:
+          self.assertEqual(self.capture(testFuncName), self.expected_LUW(callstack[1][1]))
+      
+    finally:
+      del callstack
+
+  # This function will compare using Regular Expressions
+  # based on the servre
+  def assert_expectf(self, testFuncName):
+    callstack = inspect.stack(0)
+    try:
+      if (self.server.DBMS_NAME[0:2] == "AS"):
+          pattern = self.expected_AS(callstack[1][1])
+      elif (self.server.DBMS_NAME == "DB2"):
+          pattern = self.expected_ZOS(callstack[1][1])
+      elif (self.server.DBMS_NAME[0:3] == "IDS"):
+          pattern = self.expected_IDS(callstack[1][1])
+      else:
+          pattern = self.expected_LUW(callstack[1][1])
+      
+      sym = ['\[','\]','\(','\)']
+      for chr in sym:
+          pattern = re.sub(chr, '\\' + chr, pattern)
+
+      pattern = re.sub('%s', '.*?', pattern)
+      pattern = re.sub('%d', '\\d+', pattern)
+
+      result = re.match(pattern, self.capture(testFuncName))
+      self.assertNotEqual(result, None)
+    finally:
+      del callstack
+      
+  #def assert_throw_blocks(self, testFuncName):
+  #  callstack = inspect.stack(0)
+  #  try:
+
+  # This function needs to be declared here, regardless of if there 
+  #   is any body to this function
+  def runTest(self):
+    pass
diff -pruN 0.3.0-3/tests/__init__.py 2.0.5-0ubuntu2/tests/__init__.py
--- 0.3.0-3/tests/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/__init__.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,2 @@
+
+
Binary files 0.3.0-3/tests/pic1.jpg and 2.0.5-0ubuntu2/tests/pic1.jpg differ
Binary files 0.3.0-3/tests/spook.png and 2.0.5-0ubuntu2/tests/spook.png differ
diff -pruN 0.3.0-3/tests/test_000_PrepareDb.py 2.0.5-0ubuntu2/tests/test_000_PrepareDb.py
--- 0.3.0-3/tests/test_000_PrepareDb.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_000_PrepareDb.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,555 @@
+#
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys, os
+import ibm_db
+#need to add this line below to each file to make the connect parameters available to all the test files
+import config
+from testfunctions import IbmDbTestFunctions
+
+name = 'name'
+picture = 'picture'
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_000_PrepareDb(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_000)
+
+  def run_test_000(self):
+    # Make a connection
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    # Get the server type
+    server = ibm_db.server_info( conn )
+
+    # Drop the animal table, in case it exists
+    drop = 'DROP TABLE animals'
+    try:
+      result = ibm_db.exec_immediate(conn, drop)
+    except:
+      pass
+    # Create the animal table
+    create = 'CREATE TABLE animals (id INTEGER, breed VARCHAR(32), name CHAR(16), weight DECIMAL(7,2))'
+    result = ibm_db.exec_immediate(conn, create)
+    # Populate the animal table
+    animals = (\
+    	(0, 'cat',        'Pook',         3.2),\
+	(1, 'dog',        'Peaches',      12.3),\
+	(2, 'horse',      'Smarty',       350.0),\
+	(3, 'gold fish',  'Bubbles',      0.1),\
+	(4, 'budgerigar', 'Gizmo',        0.2),\
+	(5, 'goat',       'Rickety Ride', 9.7),\
+	(6, 'llama',      'Sweater',      150)\
+	)
+    insert = 'INSERT INTO animals (id, breed, name, weight) VALUES (?, ?, ?, ?)'
+    stmt = ibm_db.prepare(conn, insert)
+    if stmt:
+      for animal in animals:
+        result = ibm_db.execute(stmt, animal)
+
+    # Drop the test view, in case it exists
+    drop = 'DROP VIEW anime_cat'
+    try:
+      result = ibm_db.exec_immediate(conn, drop)
+    except:
+      pass
+    # Create test view
+    ibm_db.exec_immediate(conn, """CREATE VIEW anime_cat AS
+      SELECT name, breed FROM animals
+      WHERE id = 0""")
+
+    # Drop the animal_pics table
+    drop = 'DROP TABLE animal_pics'
+    try:
+      result = ibm_db.exec_immediate(conn, drop)
+    except:
+      pass
+    # Create the animal_pics table
+    create = 'CREATE TABLE animal_pics (name VARCHAR(32), picture BLOB)'
+    result = ibm_db.exec_immediate(conn, create)
+    # Populate the view table
+    animals = (\
+      ('Spook', 'spook.png'),\
+      ('Helmut', 'pic1.jpg'),\
+    )
+    insert = 'INSERT INTO animal_pics (name, picture) VALUES (?, ?)'
+    stmt = ibm_db.prepare(conn, insert)
+    if (not stmt):
+      print "Attempt to prepare statement failed."
+      return 0
+    for animal in animals:
+      name = animal[0]
+      fileHandle = open(os.path.dirname(os.path.abspath(__file__)) + '/' + animal[1], 'rb')
+      picture = fileHandle.read()
+      if (not picture):
+        print "Could not retrieve picture '%s'." % animal[1]
+        return 0
+      ibm_db.bind_param(stmt, 1, name, ibm_db.SQL_PARAM_INPUT)
+      ibm_db.bind_param(stmt, 2, picture, ibm_db.SQL_PARAM_INPUT)
+      result = ibm_db.execute(stmt)
+
+    # Drop the department table, in case it exists
+    drop = 'DROP TABLE department'
+    try:
+        result = ibm_db.exec_immediate(conn, drop) 
+    except:
+        pass
+    # Create the department table
+    create = 'CREATE TABLE department (deptno CHAR(3) NOT NULL, deptname VARCHAR(29) NOT NULL, mgrno CHAR(6), admrdept CHAR(3) NOT NULL, location CHAR(16))'
+    result = ibm_db.exec_immediate(conn, create)
+    # Populate the department table
+    department = (\
+      ('A00', 'SPIFFY COMPUTER SERVICE DIV.', '000010', 'A00', None),\
+      ('B01', 'PLANNING',                     '000020', 'A00', None),\
+      ('C01', 'INFORMATION CENTER',           '000030', 'A00', None),\
+      ('D01', 'DEVELOPMENT CENTER',           None,     'A00', None),\
+      ('D11', 'MANUFACTURING SYSTEMS',        '000060', 'D01', None),\
+      ('D21', 'ADMINISTRATION SYSTEMS',       '000070', 'D01', None),\
+      ('E01', 'SUPPORT SERVICES',             '000050', 'A00', None),\
+      ('E11', 'OPERATIONS',                   '000090', 'E01', None),\
+      ('E21', 'SOFTWARE SUPPORT',             '000100', 'E01', None)\
+    )
+    insert = 'INSERT INTO department (deptno, deptname, mgrno, admrdept, location) VALUES (?, ?, ?, ?, ?)'
+    stmt = ibm_db.prepare(conn, insert)
+    if stmt:
+      for dept in department:
+        result = ibm_db.execute(stmt, dept)
+
+    # Drop the emp_act table, in case it exists
+    drop = 'DROP TABLE emp_act'
+    try:
+      result = ibm_db.exec_immediate(conn, drop)
+    except:
+      pass
+    # Create the emp_act table
+    create = 'CREATE TABLE emp_act (empno CHAR(6) NOT NULL, projno CHAR(6) NOT NULL, actno SMALLINT NOT NULL, emptime DECIMAL(5,2), emstdate DATE, emendate DATE)'
+    result = ibm_db.exec_immediate(conn, create)
+    # Populate the emp_act table
+    emp_act = (\
+      ('000010', 'MA2100',   10,   0.50,  '1982-01-01',  '1982-11-01'),\
+      ('000010', 'MA2110',   10,   1.00,  '1982-01-01',  '1983-02-01'),\
+      ('000010', 'AD3100',   10,   0.50,  '1982-01-01',  '1982-07-01'),\
+      ('000020', 'PL2100',   30,   1.00,  '1982-01-01',  '1982-09-15'),\
+      ('000030', 'IF1000',   10,   0.50,  '1982-06-01',  '1983-01-01'),\
+      ('000030', 'IF2000',   10,   0.50,  '1982-01-01',  '1983-01-01'),\
+      ('000050', 'OP1000',   10,   0.25,  '1982-01-01',  '1983-02-01'),\
+      ('000050', 'OP2010',   10,   0.75,  '1982-01-01',  '1983-02-01'),\
+      ('000070', 'AD3110',   10,   1.00,  '1982-01-01',  '1983-02-01'),\
+      ('000090', 'OP1010',   10,   1.00,  '1982-01-01',  '1983-02-01'),\
+      ('000100', 'OP2010',   10,   1.00,  '1982-01-01',  '1983-02-01'),\
+      ('000110', 'MA2100',   20,   1.00,  '1982-01-01',  '1982-03-01'),\
+      ('000130', 'IF1000',   90,   1.00,  '1982-01-01',  '1982-10-01'),\
+      ('000130', 'IF1000',  100,   0.50,  '1982-10-01',  '1983-01-01'),\
+      ('000140', 'IF1000',   90,   0.50,  '1982-10-01',  '1983-01-01'),\
+      ('000140', 'IF2000',  100,   1.00,  '1982-01-01',  '1982-03-01'),\
+      ('000140', 'IF2000',  100,   0.50,  '1982-03-01',  '1982-07-01'),\
+      ('000140', 'IF2000',  110,   0.50,  '1982-03-01',  '1982-07-01'),\
+      ('000140', 'IF2000',  110,   0.50,  '1982-10-01',  '1983-01-01'),\
+      ('000150', 'MA2112',   60,   1.00,  '1982-01-01',  '1982-07-15'),\
+      ('000150', 'MA2112',  180,   1.00,  '1982-07-15',  '1983-02-01'),\
+      ('000160', 'MA2113',   60,   1.00,  '1982-07-15',  '1983-02-01'),\
+      ('000170', 'MA2112',   60,   1.00,  '1982-01-01',  '1983-06-01'),\
+      ('000170', 'MA2112',   70,   1.00,  '1982-06-01',  '1983-02-01'),\
+      ('000170', 'MA2113',   80,   1.00,  '1982-01-01',  '1983-02-01'),\
+      ('000180', 'MA2113',   70,   1.00,  '1982-04-01',  '1982-06-15'),\
+      ('000190', 'MA2112',   70,   1.00,  '1982-02-01',  '1982-10-01'),\
+      ('000190', 'MA2112',   80,   1.00,  '1982-10-01',  '1983-10-01'),\
+      ('000200', 'MA2111',   50,   1.00,  '1982-01-01',  '1982-06-15'),\
+      ('000200', 'MA2111',   60,   1.00,  '1982-06-15',  '1983-02-01'),\
+      ('000210', 'MA2113',   80,   0.50,  '1982-10-01',  '1983-02-01'),\
+      ('000210', 'MA2113',  180,   0.50,  '1982-10-01',  '1983-02-01'),\
+      ('000220', 'MA2111',   40,   1.00,  '1982-01-01',  '1983-02-01'),\
+      ('000230', 'AD3111',   60,   1.00,  '1982-01-01',  '1982-03-15'),\
+      ('000230', 'AD3111',   60,   0.50,  '1982-03-15',  '1982-04-15'),\
+      ('000230', 'AD3111',   70,   0.50,  '1982-03-15',  '1982-10-15'),\
+      ('000230', 'AD3111',   80,   0.50,  '1982-04-15',  '1982-10-15'),\
+      ('000230', 'AD3111',  180,   1.00,  '1982-10-15',  '1983-01-01'),\
+      ('000240', 'AD3111',   70,   1.00,  '1982-02-15',  '1982-09-15'),\
+      ('000240', 'AD3111',   80,   1.00,  '1982-09-15',  '1983-01-01'),\
+      ('000250', 'AD3112',   60,   1.00,  '1982-01-01',  '1982-02-01'),\
+      ('000250', 'AD3112',   60,   0.50,  '1982-02-01',  '1982-03-15'),\
+      ('000250', 'AD3112',   60,   0.50,  '1982-12-01',  '1983-01-01'),\
+      ('000250', 'AD3112',   60,   1.00,  '1983-01-01',  '1983-02-01'),\
+      ('000250', 'AD3112',   70,   0.50,  '1982-02-01',  '1982-03-15'),\
+      ('000250', 'AD3112',   70,   1.00,  '1982-03-15',  '1982-08-15'),\
+      ('000250', 'AD3112',   70,   0.25,  '1982-08-15',  '1982-10-15'),\
+      ('000250', 'AD3112',   80,   0.25,  '1982-08-15',  '1982-10-15'),\
+      ('000250', 'AD3112',   80,   0.50,  '1982-10-15',  '1982-12-01'),\
+      ('000250', 'AD3112',  180,   0.50,  '1982-08-15',  '1983-01-01'),\
+      ('000260', 'AD3113',   70,   0.50,  '1982-06-15',  '1982-07-01'),\
+      ('000260', 'AD3113',   70,   1.00,  '1982-07-01',  '1983-02-01'),\
+      ('000260', 'AD3113',   80,   1.00,  '1982-01-01',  '1982-03-01'),\
+      ('000260', 'AD3113',   80,   0.50,  '1982-03-01',  '1982-04-15'),\
+      ('000260', 'AD3113',  180,   0.50,  '1982-03-01',  '1982-04-15'),\
+      ('000260', 'AD3113',  180,   1.00,  '1982-04-15',  '1982-06-01'),\
+      ('000260', 'AD3113',  180,   0.50,  '1982-06-01',  '1982-07-01'),\
+      ('000270', 'AD3113',   60,   0.50,  '1982-03-01',  '1982-04-01'),\
+      ('000270', 'AD3113',   60,   1.00,  '1982-04-01',  '1982-09-01'),\
+      ('000270', 'AD3113',   60,   0.25,  '1982-09-01',  '1982-10-15'),\
+      ('000270', 'AD3113',   70,   0.75,  '1982-09-01',  '1982-10-15'),\
+      ('000270', 'AD3113',   70,   1.00,  '1982-10-15',  '1983-02-01'),\
+      ('000270', 'AD3113',   80,   1.00,  '1982-01-01',  '1982-03-01'),\
+      ('000270', 'AD3113',   80,   0.50,  '1982-03-01',  '1982-04-01'),\
+      ('000280', 'OP1010',  130,   1.00,  '1982-01-01',  '1983-02-01'),\
+      ('000290', 'OP1010',  130,   1.00,  '1982-01-01',  '1983-02-01'),\
+      ('000300', 'OP1010',  130,   1.00,  '1982-01-01',  '1983-02-01'),\
+      ('000310', 'OP1010',  130,   1.00,  '1982-01-01',  '1983-02-01'),\
+      ('000320', 'OP2011',  140,   0.75,  '1982-01-01',  '1983-02-01'),\
+      ('000320', 'OP2011',  150,   0.25,  '1982-01-01',  '1983-02-01'),\
+      ('000330', 'OP2012',  140,   0.25,  '1982-01-01',  '1983-02-01'),\
+      ('000330', 'OP2012',  160,   0.75,  '1982-01-01',  '1983-02-01'),\
+      ('000340', 'OP2013',  140,   0.50,  '1982-01-01',  '1983-02-01'),\
+      ('000340', 'OP2013',  170,   0.50,  '1982-01-01',  '1983-02-01'),\
+      ('000020', 'PL2100',   30,   1.00,  '1982-01-01',  '1982-09-15')\
+    )
+    insert = 'INSERT INTO emp_act (empno, projno, actno, emptime, emstdate, emendate) VALUES (?, ?, ?, ?, ?, ?)'
+    stmt = ibm_db.prepare(conn, insert)
+    if stmt:
+      for emp in emp_act:
+        result = ibm_db.execute(stmt, emp)
+
+    # Drop the employee table, in case it exists
+    drop = 'DROP TABLE employee'
+    try:
+      result = ibm_db.exec_immediate(conn, drop)
+    except:
+      pass
+    # Create the employee table
+    create = 'CREATE TABLE employee (empno CHAR(6) NOT NULL, firstnme VARCHAR(12) NOT NULL, midinit CHAR(1) NOT NULL, lastname VARCHAR(15) NOT NULL, workdept CHAR(3), phoneno CHAR(4), hiredate DATE, job CHAR(8), edlevel SMALLINT NOT NULL, sex CHAR(1), birthdate DATE, salary DECIMAL(9,2), bonus DECIMAL(9,2), comm DECIMAL(9,2))'
+    result = ibm_db.exec_immediate(conn, create)
+    # Populate the employee table
+    employee = (
+      ('000010', 'CHRISTINE', 'I', 'HAAS',       'A00', '3978', '1965-01-01', 'PRES',     18, 'F', '1933-08-24', 52750.00, 1000, 4220),
+      ('000020', 'MICHAEL',   'L', 'THOMPSON',   'B01', '3476', '1973-10-10', 'MANAGER',  18, 'M' ,'1948-02-02', 41250.00,  800, 3300),
+      ('000030', 'SALLY',     'A', 'KWAN',       'C01', '4738', '1975-04-05', 'MANAGER',  20, 'F' ,'1941-05-11', 38250.00,  800, 3060),
+      ('000050', 'JOHN',      'B', 'GEYER',      'E01', '6789', '1949-08-17', 'MANAGER',  16, 'M' ,'1925-09-15', 40175.00,  800, 3214),
+      ('000060', 'IRVING',    'F', 'STERN',      'D11', '6423', '1973-09-14', 'MANAGER',  16, 'M' ,'1945-07-07', 32250.00,  500, 2580),
+      ('000070', 'EVA',       'D', 'PULASKI',    'D21', '7831', '1980-09-30', 'MANAGER',  16, 'F' ,'1953-05-26', 36170.00,  700, 2893),
+      ('000090', 'EILEEN',    'W', 'HENDERSON',  'E11', '5498', '1970-08-15', 'MANAGER',  16, 'F' ,'1941-05-15', 29750.00,  600, 2380),
+      ('000100', 'THEODORE',  'Q', 'SPENSER',    'E21', '0972', '1980-06-19', 'MANAGER',  14, 'M' ,'1956-12-18', 26150.00,  500, 2092),
+      ('000110', 'VINCENZO',  'G', 'LUCCHESSI',  'A00', '3490', '1958-05-16', 'SALESREP', 19, 'M' ,'1929-11-05', 46500.00,  900, 3720),
+      ('000120', 'SEAN',      '' , 'OCONNELL',   'A00', '2167', '1963-12-05', 'CLERK',    14, 'M' ,'1942-10-18', 29250.00,  600, 2340),
+      ('000130', 'DOLORES',   'M', 'QUINTANA',   'C01', '4578', '1971-07-28', 'ANALYST',  16, 'F' ,'1925-09-15', 23800.00,  500, 1904),
+      ('000140', 'HEATHER',   'A', 'NICHOLLS',   'C01', '1793', '1976-12-15', 'ANALYST',  18, 'F' ,'1946-01-19', 28420.00,  600, 2274),
+      ('000150', 'BRUCE',     '' , 'ADAMSON',    'D11', '4510', '1972-02-12', 'DESIGNER', 16, 'M' ,'1947-05-17', 25280.00,  500, 2022),
+      ('000160', 'ELIZABETH', 'R', 'PIANKA',     'D11', '3782', '1977-10-11', 'DESIGNER', 17, 'F' ,'1955-04-12', 22250.00,  400, 1780),
+      ('000170', 'MASATOSHI', 'J', 'YOSHIMURA',  'D11', '2890', '1978-09-15', 'DESIGNER', 16, 'M' ,'1951-01-05', 24680.00,  500, 1974),
+      ('000180', 'MARILYN',   'S', 'SCOUTTEN',   'D11', '1682', '1973-07-07', 'DESIGNER', 17, 'F' ,'1949-02-21', 21340.00,  500, 1707),
+      ('000190', 'JAMES',     'H', 'WALKER',     'D11', '2986', '1974-07-26', 'DESIGNER', 16, 'M' ,'1952-06-25', 20450.00,  400, 1636),
+      ('000200', 'DAVID',     '' , 'BROWN',      'D11', '4501', '1966-03-03', 'DESIGNER', 16, 'M' ,'1941-05-29', 27740.00,  600, 2217),
+      ('000210', 'WILLIAM',   'T', 'JONES',      'D11', '0942', '1979-04-11', 'DESIGNER', 17, 'M' ,'1953-02-23', 18270.00,  400, 1462),
+      ('000220', 'JENNIFER',  'K', 'LUTZ',       'D11', '0672', '1968-08-29', 'DESIGNER', 18, 'F' ,'1948-03-19', 29840.00,  600, 2387),
+      ('000230', 'JAMES',     'J', 'JEFFERSON',  'D21', '2094', '1966-11-21', 'CLERK',    14, 'M' ,'1935-05-30', 22180.00,  400, 1774),
+      ('000240', 'SALVATORE', 'M', 'MARINO',     'D21', '3780', '1979-12-05', 'CLERK',    17, 'M' ,'1954-03-31', 28760.00,  600, 2301),
+      ('000250', 'DANIEL',    'S', 'SMITH',      'D21', '0961', '1969-10-30', 'CLERK',    15, 'M' ,'1939-11-12', 19180.00,  400, 1534),
+      ('000260', 'SYBIL',     'P', 'JOHNSON',    'D21', '8953', '1975-09-11', 'CLERK',    16, 'F' ,'1936-10-05', 17250.00,  300, 1380),
+      ('000270', 'MARIA',     'L', 'PEREZ',      'D21', '9001', '1980-09-30', 'CLERK',    15, 'F' ,'1953-05-26', 27380.00,  500, 2190),
+      ('000280', 'ETHEL',     'R', 'SCHNEIDER',  'E11', '8997', '1967-03-24', 'OPERATOR', 17, 'F' ,'1936-03-28', 26250.00,  500, 2100),
+      ('000290', 'JOHN',      'R', 'PARKER',     'E11', '4502', '1980-05-30', 'OPERATOR', 12, 'M' ,'1946-07-09', 15340.00,  300, 1227),
+      ('000300', 'PHILIP',    'X', 'SMITH',      'E11', '2095', '1972-06-19', 'OPERATOR', 14, 'M' ,'1936-10-27', 17750.00,  400, 1420),
+      ('000310', 'MAUDE',     'F', 'SETRIGHT',   'E11', '3332', '1964-09-12', 'OPERATOR', 12, 'F' ,'1931-04-21', 15900.00,  300, 1272),
+      ('000320', 'RAMLAL',    'V', 'MEHTA',      'E21', '9990', '1965-07-07', 'FIELDREP', 16, 'M' ,'1932-08-11', 19950.00,  400, 1596),
+      ('000330', 'WING',      '' , 'LEE',        'E21', '2103', '1976-02-23', 'FIELDREP', 14, 'M' ,'1941-07-18', 25370.00,  500, 2030),
+      ('000340', 'JASON',     'R', 'GOUNOT',     'E21', '5698', '1947-05-05', 'FIELDREP', 16, 'M' ,'1926-05-17', 23840.00,  500, 1907)
+    )
+    insert = 'INSERT INTO employee (empno, firstnme, midinit, lastname, workdept, phoneno, hiredate, job, edlevel, sex, birthdate, salary, bonus, comm) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
+    stmt = ibm_db.prepare(conn, insert)
+    if stmt:
+      for emp in employee:
+        result = ibm_db.execute(stmt, emp)
+
+    # Drop the emp_photo table, in case it exists
+    drop = 'DROP TABLE emp_photo'
+    try:
+      result = ibm_db.exec_immediate(conn, drop)
+    except:
+      pass
+    # Create the emp_photo table
+    create = 'CREATE TABLE emp_photo (empno CHAR(6) NOT NULL, photo_format VARCHAR(10) NOT NULL, picture BLOB, PRIMARY KEY(empno, photo_format))'
+    try:
+      result = ibm_db.exec_immediate(conn, create)
+    except:
+      pass
+    # Populate the emp_photo table
+    emp_photo = (\
+      ('000130', 'jpg', 'pic1.jpg'),\
+      ('000130', 'png', 'spook.png'),\
+      ('000140', 'jpg', 'pic1.jpg'),\
+      ('000140', 'png', 'spook.png'),\
+      ('000150', 'jpg', 'pic1.jpg'),\
+      ('000150', 'png', 'spook.png'),\
+      ('000190', 'jpg', 'pic1.jpg'),\
+      ('000190', 'png', 'spook.png')\
+    )
+    insert = 'INSERT INTO emp_photo (empno, photo_format, picture) VALUES (?, ?, ?)'
+    stmt = ibm_db.prepare(conn, insert)
+    if stmt:
+      for photo in emp_photo:
+        empno = photo[0]
+        photo_format = photo[1]
+        fileHandler = open(os.path.dirname(os.path.abspath(__file__)) + '/' + photo[2], 'rb')
+        picture = fileHandler.read()
+        ibm_db.bind_param(stmt, 1, empno, ibm_db.SQL_PARAM_INPUT)
+        ibm_db.bind_param(stmt, 2, photo_format, ibm_db.SQL_PARAM_INPUT)
+        ibm_db.bind_param(stmt, 3, picture, ibm_db.SQL_PARAM_INPUT)
+        result = ibm_db.execute(stmt)
+
+    # Drop the org table, in case it exists
+    drop = 'DROP TABLE org'
+    try:
+      result = ibm_db.exec_immediate(conn, drop)
+    except:
+      pass
+    # Create the org table
+    create = 'CREATE TABLE org (deptnumb SMALLINT NOT NULL, deptname VARCHAR(14), manager SMALLINT, division VARCHAR(10), location VARCHAR(13))'
+    result = ibm_db.exec_immediate(conn, create)
+    # Populate the org table
+    org = (\
+      (10, 'Head Office',    160, 'Corporate', 'New York'),\
+      (15, 'New England',    50,  'Eastern',   'Boston'),\
+      (20, 'Mid Atlantic',   10,  'Eastern',   'Washington'),\
+      (38, 'South Atlantic', 30,  'Eastern',   'Atlanta'),\
+      (42, 'Great Lakes',    100, 'Midwest',   'Chicago'),\
+      (51, 'Plains',         140, 'Midwest',   'Dallas'),\
+      (66, 'Pacific',        270, 'Western',   'San Francisco'),\
+      (84, 'Mountain',       290, 'Western',   'Denver')\
+    )
+    insert = 'INSERT INTO org (deptnumb, deptname, manager, division, location) VALUES (?, ?, ?, ?, ?)'
+    stmt = ibm_db.prepare(conn, insert)
+    if stmt:
+      for orgpart in org:
+        result = ibm_db.execute(stmt, orgpart)
+
+    # Drop the project table, in case it exists
+    drop = 'DROP TABLE project'
+    try:
+      result = ibm_db.exec_immediate(conn, drop)
+    except:
+      pass
+    # Create the project table
+    create = 'CREATE TABLE project (projno CHAR(6) NOT NULL, projname VARCHAR(24) NOT NULL, deptno CHAR(3) NOT NULL, respemp CHAR(6) NOT NULL, prstaff DECIMAL(5,2), prstdate DATE, prendate DATE, majproj CHAR(6))'
+    result = ibm_db.exec_immediate(conn, create)
+    # Populate the project table
+    project = (\
+      ('AD3100', 'ADMIN SERVICES',        'D01', '000010', 6.5, '1982-01-01', '1983-02-01', ''),\
+      ('AD3110', 'GENERAL ADMIN SYSTEMS', 'D21', '000070',   6, '1982-01-01', '1983-02-01', 'AD3100'),\
+      ('AD3111', 'PAYROLL PROGRAMMING',   'D21', '000230',   2, '1982-01-01', '1983-02-01', 'AD3110'),\
+      ('AD3112', 'PERSONNEL PROGRAMMING', 'D21', '000250',   1, '1982-01-01', '1983-02-01', 'AD3110'),\
+      ('AD3113', 'ACCOUNT PROGRAMMING',   'D21', '000270',   2, '1982-01-01', '1983-02-01', 'AD3110'),\
+      ('IF1000', 'QUERY SERVICES',        'C01', '000030',   2, '1982-01-01', '1983-02-01', None),\
+      ('IF2000', 'USER EDUCATION',        'C01', '000030',   1, '1982-01-01', '1983-02-01', None),\
+      ('MA2100', 'WELD LINE AUTOMATION',  'D01', '000010',  12, '1982-01-01', '1983-02-01', None),\
+      ('MA2110', 'W L PROGRAMMING',       'D11', '000060',   9, '1982-01-01', '1983-02-01', 'MA2100'),\
+      ('MA2111', 'W L PROGRAM DESIGN',    'D11', '000220',   2, '1982-01-01', '1982-12-01', 'MA2110'),\
+      ('MA2112', 'W L ROBOT DESIGN',      'D11', '000150',   3, '1982-01-01', '1982-12-01', 'MA2110'),\
+      ('MA2113', 'W L PROD CONT PROGS',   'D11', '000160',   3, '1982-02-15', '1982-12-01', 'MA2110'),\
+      ('OP1000', 'OPERATION SUPPORT',     'E01', '000050',   6, '1982-01-01', '1983-02-01', None),\
+      ('OP1010', 'OPERATION',             'E11', '000090',   5, '1982-01-01', '1983-02-01', 'OP1000'),\
+      ('OP2000', 'GEN SYSTEMS SERVICES',  'E01', '000050',   5, '1982-01-01', '1983-02-01', None),\
+      ('OP2010', 'SYSTEMS SUPPORT',       'E21', '000100',   4, '1982-01-01', '1983-02-01', 'OP2000'),\
+      ('OP2011', 'SCP SYSTEMS SUPPORT',   'E21', '000320',   1, '1982-01-01', '1983-02-01', 'OP2010'),\
+      ('OP2012', 'APPLICATIONS SUPPORT',  'E21', '000330',   1, '1982-01-01', '1983-02-01', 'OP2010'),\
+      ('OP2013', 'DB/DC SUPPORT',         'E21', '000340',   1, '1982-01-01', '1983-02-01', 'OP2010'),\
+      ('PL2100', 'WELD LINE PLANNING',    'B01', '000020',   1, '1982-01-01', '1982-09-15', 'MA2100')\
+    )
+    insert = 'INSERT INTO project (projno, projname, deptno, respemp, prstaff, prstdate, prendate, majproj) VALUES (?, ?, ?, ?, ?, ?, ?, ?)'
+    stmt = ibm_db.prepare(conn, insert)
+    if stmt:
+      for proj in project:
+        result = ibm_db.execute(stmt, proj)
+
+    # Drop the sales table, in case it exists
+    drop = 'DROP TABLE sales'
+    try:
+      result = ibm_db.exec_immediate(conn, drop)
+    except:
+      pass
+    # Create the sales table
+    create = 'CREATE TABLE sales (sales_date DATE, sales_person VARCHAR(15), region VARCHAR(15), sales INT)'
+    result = ibm_db.exec_immediate(conn, create)
+    # Populate the sales table
+    sales = (\
+      ('1995-12-31', 'LUCCHESSI',   'Ontario-South',  1),\
+      ('1995-12-31', 'LEE',         'Ontario-South',  3),\
+      ('1995-12-31', 'LEE',         'Quebec',         1),\
+      ('1995-12-31', 'LEE',         'Manitoba',       2),\
+      ('1995-12-31', 'GOUNOT',      'Quebec',         1),\
+      ('1996-03-29', 'LUCCHESSI',   'Ontario-South',  3),\
+      ('1996-03-29', 'LUCCHESSI',   'Quebec',         1),\
+      ('1996-03-29', 'LEE',         'Ontario-South',  2),\
+      ('1996-03-29', 'LEE',         'Ontario-North',  2),\
+      ('1996-03-29', 'LEE',         'Quebec',         3),\
+      ('1996-03-29', 'LEE',         'Manitoba',       5),\
+      ('1996-03-29', 'GOUNOT',      'Ontario-South',  3),\
+      ('1996-03-29', 'GOUNOT',      'Quebec',         1),\
+      ('1996-03-29', 'GOUNOT',      'Manitoba',       7),\
+      ('1996-03-30', 'LUCCHESSI',   'Ontario-South',  1),\
+      ('1996-03-30', 'LUCCHESSI',   'Quebec',         2),\
+      ('1996-03-30', 'LUCCHESSI',   'Manitoba',       1),\
+      ('1996-03-30', 'LEE',         'Ontario-South',  7),\
+      ('1996-03-30', 'LEE',         'Ontario-North',  3),\
+      ('1996-03-30', 'LEE',         'Quebec',         7),\
+      ('1996-03-30', 'LEE',         'Manitoba',       4),\
+      ('1996-03-30', 'GOUNOT',      'Ontario-South',  2),\
+      ('1996-03-30', 'GOUNOT',      'Quebec',        18),\
+      ('1996-03-30', 'GOUNOT',      'Manitoba',       1),\
+      ('1996-03-31', 'LUCCHESSI',   'Manitoba',       1),\
+      ('1996-03-31', 'LEE',         'Ontario-South', 14),\
+      ('1996-03-31', 'LEE',         'Ontario-North',  3),\
+      ('1996-03-31', 'LEE',         'Quebec',         7),\
+      ('1996-03-31', 'LEE',         'Manitoba',       3),\
+      ('1996-03-31', 'GOUNOT',      'Ontario-South',  2),\
+      ('1996-03-31', 'GOUNOT',      'Quebec',         1),\
+      ('1996-04-01', 'LUCCHESSI',   'Ontario-South',  3),\
+      ('1996-04-01', 'LUCCHESSI',   'Manitoba',       1),\
+      ('1996-04-01', 'LEE',         'Ontario-South',  8),\
+      ('1996-04-01', 'LEE',         'Ontario-North', None),\
+      ('1996-04-01', 'LEE',         'Quebec',         8),\
+      ('1996-04-01', 'LEE',         'Manitoba',       9),\
+      ('1996-04-01', 'GOUNOT',      'Ontario-South',  3),\
+      ('1996-04-01', 'GOUNOT',      'Ontario-North',  1),\
+      ('1996-04-01', 'GOUNOT',      'Quebec',         3),\
+      ('1996-04-01', 'GOUNOT',      'Manitoba',       7)\
+    )
+    insert = 'INSERT INTO sales (sales_date, sales_person, region, sales) VALUES (?, ?, ?, ?)'
+    stmt = ibm_db.prepare(conn, insert)
+    if stmt:
+      for sale in sales:
+        result = ibm_db.execute(stmt, sale)
+
+    # Drop the stored procedure, in case it exists
+    drop = 'DROP PROCEDURE match_animal'
+    try:
+      result = ibm_db.exec_immediate(conn, drop)
+    except:
+      pass
+
+    # Create the stored procedure
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      result = ibm_db.exec_immediate(conn, """
+      CREATE PROCEDURE match_animal(first_name VARCHAR(128), INOUT second_name VARCHAR(128), OUT animal_weight DOUBLE PRECISION )
+       DEFINE match_name INT;
+       LET match_name = 0;
+
+       FOREACH c1 FOR
+             SELECT COUNT(*) INTO match_name FROM animals
+                   WHERE name IN (second_name)
+       IF (match_name > 0)
+          THEN LET second_name = 'TRUE';
+       END IF;
+       END FOREACH;
+
+       FOREACH c2 FOR
+             SELECT SUM(weight) INTO animal_weight FROM animals
+                   WHERE name in (first_name, second_name)
+       END FOREACH;
+      END PROCEDURE;""")
+    else:
+      result = ibm_db.exec_immediate(conn, """
+      CREATE PROCEDURE match_animal(IN first_name VARCHAR(128), INOUT second_name VARCHAR(128), OUT animal_weight DOUBLE)
+      DYNAMIC RESULT SETS 1
+      LANGUAGE SQL
+      BEGIN
+       DECLARE match_name INT DEFAULT 0;
+       DECLARE c1 CURSOR FOR
+        SELECT COUNT(*) FROM animals
+        WHERE name IN (second_name);
+
+       DECLARE c2 CURSOR FOR
+        SELECT SUM(weight) FROM animals
+        WHERE name in (first_name, second_name);
+
+       DECLARE c3 CURSOR WITH RETURN FOR
+        SELECT name, breed, weight FROM animals
+        WHERE name BETWEEN first_name AND second_name
+        ORDER BY name;
+
+       OPEN c1;
+       FETCH c1 INTO match_name;
+       IF (match_name > 0)
+        THEN SET second_name = 'TRUE';
+       END IF;
+       CLOSE c1;
+
+       OPEN c2;
+       FETCH c2 INTO animal_weight;
+       CLOSE c2;
+
+       OPEN c3;
+      END""")
+    result = None
+
+    # Drop the staff table, in case it exists
+    drop = 'DROP TABLE staff'
+    try:
+      result = ibm_db.exec_immediate(conn, drop)
+    except:
+      pass
+    # Create the staff table
+    create = 'CREATE TABLE staff (id SMALLINT NOT NULL, name VARCHAR(9), dept SMALLINT, job CHAR(5), years SMALLINT, salary DECIMAL(7,2), comm DECIMAL(7,2))';
+    result = ibm_db.exec_immediate(conn, create)
+    # Populate the staff table
+    staff = (\
+      (10, 'Sanders',    20, 'Mgr',   7,    18357.50, None),\
+      (20, 'Pernal',     20, 'Sales', 8,    18171.25, 612.45),\
+      (30, 'Marenghi',   38, 'Mgr',   5,    17506.75, None),\
+      (40, 'OBrien',     38, 'Sales', 6,    18006.00, 846.55),\
+      (50, 'Hanes',      15, 'Mgr',   10,   20659.80, None),\
+      (60, 'Quigley',    38, 'Sales', None,  16808.30, 650.25),\
+      (70, 'Rothman',    15, 'Sales', 7,    16502.83, 1152.00),\
+      (80, 'James',      20, 'Clerk', None,  13504.60, 128.20),\
+      (90, 'Koonitz',    42, 'Sales', 6,    18001.75, 1386.70),\
+      (100, 'Plotz',     42, 'Mgr'  , 7,    18352.80, None),\
+      (110, 'Ngan',      15, 'Clerk', 5,    12508.20, 206.60),\
+      (120, 'Naughton',  38, 'Clerk', None,  12954.75, 180.00),\
+      (130, 'Yamaguchi', 42, 'Clerk', 6,    10505.90, 75.60),\
+      (140, 'Fraye',     51, 'Mgr'  , 6,    21150.00, None),\
+      (150, 'Williams',  51, 'Sales', 6,    19456.50, 637.65),\
+      (160, 'Molinare',  10, 'Mgr'  , 7,    22959.20, None),\
+      (170, 'Kermisch',  15, 'Clerk', 4,    12258.50, 110.10),\
+      (180, 'Abrahams',  38, 'Clerk', 3,    12009.75, 236.50),\
+      (190, 'Sneider',   20, 'Clerk', 8,    14252.75, 126.50),\
+      (200, 'Scoutten',  42, 'Clerk', None,  11508.60, 84.20),\
+      (210, 'Lu',        10, 'Mgr'  , 10,   20010.00, None),\
+      (220, 'Smith',     51, 'Sales', 7,    17654.50, 992.80),\
+      (230, 'Lundquist', 51, 'Clerk', 3,    13369.80, 189.65),\
+      (240, 'Daniels',   10, 'Mgr'  , 5,    19260.25, None),\
+      (250, 'Wheeler',   51, 'Clerk', 6,    14460.00, 513.30),\
+      (260, 'Jones',     10, 'Mgr'  , 12,   21234.00, None),\
+      (270, 'Lea',       66, 'Mgr'  , 9,    18555.50, None),\
+      (280, 'Wilson',    66, 'Sales', 9,    18674.50, 811.50),\
+      (290, 'Quill',     84, 'Mgr'  , 10,   19818.00, None),\
+      (300, 'Davis',     84, 'Sales', 5,    15454.50, 806.10),\
+      (310, 'Graham',    66, 'Sales', 13,   21000.00, 200.30),\
+      (320, 'Gonzales',  66, 'Sales', 4,    16858.20, 844.00),\
+      (330, 'Burke',     66, 'Clerk', 1,    10988.00, 55.50),\
+      (340, 'Edwards',   84, 'Sales', 7,    17844.00, 1285.00),\
+      (350, 'Gafney',    84, 'Clerk', 5,    13030.50, 188.00)\
+    )
+    insert = 'INSERT INTO staff (id, name, dept, job, years, salary, comm) VALUES (?, ?, ?, ?, ?, ?, ?)'
+    stmt = ibm_db.prepare(conn, insert)
+    if stmt:
+      for emp in staff:
+        result = ibm_db.execute(stmt, emp)
+
+    try:
+      result = ibm_db.exec_immediate(conn, 'DROP TABLE t_string')
+    except:
+      pass
+    result = ibm_db.exec_immediate(conn, 'CREATE TABLE t_string(a INTEGER, b DOUBLE PRECISION, c VARCHAR(100))')
+
+    print "Preperation complete"
+
+#__END__
+#__LUW_EXPECTED__
+#Preperation complete
+#__ZOS_EXPECTED__
+#Preperation complete
+#__SYSTEMI_EXPECTED__
+#Preperation complete
+#__IDS_EXPECTED__
+#Preperation complete
diff -pruN 0.3.0-3/tests/test_001_ConnDb.py 2.0.5-0ubuntu2/tests/test_001_ConnDb.py
--- 0.3.0-3/tests/test_001_ConnDb.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_001_ConnDb.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,35 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_001_ConnDb(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_001)
+
+  def run_test_001(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    if conn:
+      print "Connection succeeded."
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Connection succeeded.
+#__ZOS_EXPECTED__
+#Connection succeeded.
+#__SYSTEMI_EXPECTED__
+#Connection succeeded.
+#__IDS_EXPECTED__
+#Connection succeeded.
diff -pruN 0.3.0-3/tests/test_002_ConnDbUncatalogedConn.py 2.0.5-0ubuntu2/tests/test_002_ConnDbUncatalogedConn.py
--- 0.3.0-3/tests/test_002_ConnDbUncatalogedConn.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_002_ConnDbUncatalogedConn.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,36 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_002_ConnDbUncatalogedConn(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_002)
+
+  def run_test_002(self):
+    conn_str = "DATABASE=%s;HOSTNAME=%s;PORT=%d;PROTOCOL=TCPIP;UID=%s;PWD=%s;" % (config.database, config.hostname, config.port, config.user, config.password)
+    conn = ibm_db.connect(conn_str, '', '')
+      
+    if conn:
+      print "Connection succeeded."
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Connection succeeded.
+#__ZOS_EXPECTED__
+#Connection succeeded.
+#__SYSTEMI_EXPECTED__
+#Connection succeeded.
+#__IDS_EXPECTED__
+#Connection succeeded.
diff -pruN 0.3.0-3/tests/test_003_NumAffectedRows.py 2.0.5-0ubuntu2/tests/test_003_NumAffectedRows.py
--- 0.3.0-3/tests/test_003_NumAffectedRows.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_003_NumAffectedRows.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,39 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_003_NumAffectedRows(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_003)
+    
+  def run_test_003(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    if conn:
+      ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+      sql = 'UPDATE animals SET id = 9'
+      res = ibm_db.exec_immediate(conn, sql)
+      print "Number of affected rows: %d" % ibm_db.num_rows(res)
+      ibm_db.rollback(conn)
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Number of affected rows: 7
+#__ZOS_EXPECTED__
+#Number of affected rows: 7
+#__SYSTEMI_EXPECTED__
+#Number of affected rows: 7
+#__IDS_EXPECTED__
+#Number of affected rows: 7
diff -pruN 0.3.0-3/tests/test_004_ConnWrongUserPwd.py 2.0.5-0ubuntu2/tests/test_004_ConnWrongUserPwd.py
--- 0.3.0-3/tests/test_004_ConnWrongUserPwd.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_004_ConnWrongUserPwd.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,34 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_004_ConnWrongUserPwd(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_004)
+
+  def run_test_004(self):
+    try:
+      conn = ibm_db.connect("sample", "not_a_user", "inv_pass")
+    except:
+      print "connect failed, test succeeded"
+      return -1
+    print "connect succeeded? Test failed"
+
+#__END__
+#__LUW_EXPECTED__
+#connect failed, test succeeded
+#__ZOS_EXPECTED__
+#connect failed, test succeeded
+#__SYSTEMI_EXPECTED__
+#connect failed, test succeeded
+#__IDS_EXPECTED__
+#connect failed, test succeeded
diff -pruN 0.3.0-3/tests/test_005_ConnBadUserBadPwd.py 2.0.5-0ubuntu2/tests/test_005_ConnBadUserBadPwd.py
--- 0.3.0-3/tests/test_005_ConnBadUserBadPwd.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_005_ConnBadUserBadPwd.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,37 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_005_ConnBadUserBadPwd(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_005)
+
+  def run_test_005(self):
+    baduser = "non_user"
+    badpass = "invalid_password"
+    dsn = "DATABASE=" + config.database + ";UID=" + baduser + ";PWD=" + badpass + ";"
+    try:
+      conn = ibm_db.connect(dsn, "", "")
+      print "odd, ibm_db.connect succeeded with an invalid user / password"
+      ibm_db.close(conn)
+    except: 
+      print "Ooops"
+
+#__END__
+#__LUW_EXPECTED__
+#Ooops
+#__ZOS_EXPECTED__
+#Ooops
+#__SYSTEMI_EXPECTED__
+#Ooops
+#__IDS_EXPECTED__
+#Ooops
diff -pruN 0.3.0-3/tests/test_006_ConnPassingOpts.py 2.0.5-0ubuntu2/tests/test_006_ConnPassingOpts.py
--- 0.3.0-3/tests/test_006_ConnPassingOpts.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_006_ConnPassingOpts.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,91 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+  
+  def test_006_ConnPassingOpts(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_006)
+	  
+  def run_test_006(self):    
+
+    options1 = {ibm_db.SQL_ATTR_CURSOR_TYPE:  ibm_db.SQL_CURSOR_KEYSET_DRIVEN}
+    options2 = {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_FORWARD_ONLY}
+      
+    conn = ibm_db.connect(config.database, config.user, config.password)
+  
+    if conn:
+      serverinfo = ibm_db.server_info( conn )
+
+      if (serverinfo.DBMS_NAME[0:3] == 'IDS'):
+        options1 = options2
+
+      stmt = ibm_db.prepare(conn, "SELECT name FROM animals WHERE weight < 10.0", options2)
+      ibm_db.execute(stmt)
+      data = ibm_db.fetch_both(stmt)
+      while ( data ):
+        print data[0]
+        data = ibm_db.fetch_both(stmt)
+      
+      print ""
+
+      stmt = ibm_db.prepare(conn, "SELECT name FROM animals WHERE weight < 10.0", options1)
+      ibm_db.execute(stmt)
+      data = ibm_db.fetch_both(stmt)
+      while ( data ):
+        print data[0]
+        data = ibm_db.fetch_both(stmt)
+    
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
+#
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
+#__ZOS_EXPECTED__
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
+#
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
+#__SYSTEMI_EXPECTED__
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
+#
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
+#__IDS_EXPECTED__
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
+#
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
diff -pruN 0.3.0-3/tests/test_007_pConnPassingOpts.py 2.0.5-0ubuntu2/tests/test_007_pConnPassingOpts.py
--- 0.3.0-3/tests/test_007_pConnPassingOpts.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_007_pConnPassingOpts.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,89 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+  
+  def test_007_pConnPassingOpts(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_007)
+
+  def run_test_007(self):
+    options1 = {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_KEYSET_DRIVEN}
+    options2 = {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_FORWARD_ONLY}
+      
+    conn = ibm_db.pconnect(config.database, config.user, config.password)
+      
+    if conn:
+      serverinfo = ibm_db.server_info( conn )
+      if (serverinfo.DBMS_NAME[0:3] == 'IDS'):
+        options1 = options2
+
+      stmt = ibm_db.prepare(conn, "SELECT name FROM animals WHERE weight < 10.0", options2)
+      ibm_db.execute(stmt)
+      data = ibm_db.fetch_both(stmt)
+      while ( data ):
+        print data[0].strip()
+        data = ibm_db.fetch_both(stmt)
+
+      print ""
+      
+      stmt = ibm_db.prepare(conn, "SELECT name FROM animals WHERE weight < 10.0", options1)
+      ibm_db.execute(stmt)
+      data = ibm_db.fetch_both(stmt)
+      while ( data ):
+        print data[0].strip()
+        data = ibm_db.fetch_both(stmt)
+    
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Pook
+#Bubbles
+#Gizmo
+#Rickety Ride
+#
+#Pook
+#Bubbles
+#Gizmo
+#Rickety Ride
+#__ZOS_EXPECTED__
+#Pook
+#Bubbles
+#Gizmo
+#Rickety Ride
+#
+#Pook
+#Bubbles
+#Gizmo
+#Rickety Ride
+#__SYSTEMI_EXPECTED__
+#Pook
+#Bubbles
+#Gizmo
+#Rickety Ride
+#
+#Pook
+#Bubbles
+#Gizmo
+#Rickety Ride
+#__IDS_EXPECTED__
+#Pook
+#Bubbles
+#Gizmo
+#Rickety Ride
+#
+#Pook
+#Bubbles
+#Gizmo
+#Rickety Ride
diff -pruN 0.3.0-3/tests/test_008_ColumnInfo.py 2.0.5-0ubuntu2/tests/test_008_ColumnInfo.py
--- 0.3.0-3/tests/test_008_ColumnInfo.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_008_ColumnInfo.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,144 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_008_ColumnInfo(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_008)
+
+  def run_test_008(self):
+    op = {ibm_db.ATTR_CASE: ibm_db.CASE_NATURAL}
+    conn = ibm_db.connect(config.database, config.user, config.password, op)
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      result = ibm_db.columns(conn,None,None,"employee")
+    else:
+      result = ibm_db.columns(conn,None,None,"EMPLOYEE")
+    row = ibm_db.fetch_both(result)
+    value1 = None
+    value2 = None
+    value3 = None
+    value4 = None
+    if (row.has_key('TABLE_NAME')):
+      value1 = row['TABLE_NAME']
+    if (row.has_key('COLUMN_NAME')):
+      value2 = row['COLUMN_NAME']
+    if (row.has_key('table_name')):
+      value3 = row['table_name']
+    if (row.has_key('column_name')):
+      value4 = row['column_name']
+    print value1
+    print value2
+    print value3
+    print value4
+
+    op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+    ibm_db.set_option(conn, op, 1)
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      result = ibm_db.columns(conn,None,None,"employee")
+    else:
+      result = ibm_db.columns(conn,None,None,"EMPLOYEE")
+    row = ibm_db.fetch_both(result)
+    value1 = None
+    value2 = None
+    value3 = None
+    value4 = None
+    if (row.has_key('TABLE_NAME')):
+      value1 = row['TABLE_NAME']
+    if (row.has_key('COLUMN_NAME')):
+      value2 = row['COLUMN_NAME']
+    if (row.has_key('table_name')):
+      value3 = row['table_name']
+    if (row.has_key('column_name')):
+      value4 = row['column_name']
+    print value1
+    print value2
+    print value3
+    print value4
+    
+    op = {ibm_db.ATTR_CASE: ibm_db.CASE_LOWER}
+    ibm_db.set_option(conn, op, 1)
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      result = ibm_db.columns(conn,None,None,"employee")
+    else:
+      result = ibm_db.columns(conn,None,None,"EMPLOYEE")
+    row = ibm_db.fetch_both(result)
+    value1 = None
+    value2 = None
+    value3 = None
+    value4 = None
+    if (row.has_key('TABLE_NAME')):
+      value1 = row['TABLE_NAME']
+    if (row.has_key('COLUMN_NAME')):
+      value2 = row['COLUMN_NAME']
+    if (row.has_key('table_name')):
+      value3 = row['table_name']
+    if (row.has_key('column_name')):
+      value4 = row['column_name']
+    print value1
+    print value2
+    print value3
+    print value4
+
+#__END__
+#__LUW_EXPECTED__
+#EMPLOYEE
+#EMPNO
+#None
+#None
+#EMPLOYEE
+#EMPNO
+#None
+#None
+#None
+#None
+#EMPLOYEE
+#EMPNO
+#__ZOS_EXPECTED__
+#EMPLOYEE
+#EMPNO
+#None
+#None
+#EMPLOYEE
+#EMPNO
+#None
+#None
+#None
+#None
+#EMPLOYEE
+#EMPNO
+#__SYSTEMI_EXPECTED__
+#EMPLOYEE
+#EMPNO
+#None
+#None
+#EMPLOYEE
+#EMPNO
+#None
+#None
+#None
+#None
+#EMPLOYEE
+#EMPNO
+#__IDS_EXPECTED__
+#None
+#None
+#employee
+#empno
+#employee
+#empno
+#None
+#None
+#None
+#None
+#employee
+#empno
diff -pruN 0.3.0-3/tests/test_010_UpdateRowCount.py 2.0.5-0ubuntu2/tests/test_010_UpdateRowCount.py
--- 0.3.0-3/tests/test_010_UpdateRowCount.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_010_UpdateRowCount.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,38 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+  
+  def test_010_UpdateRowCount(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_010)
+
+  def run_test_010(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+     
+    if conn:
+      ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+      stmt = ibm_db.exec_immediate(conn, "UPDATE animals SET name = 'flyweight' WHERE weight < 10.0")
+      print "Number of affected rows: %d" % ibm_db.num_rows( stmt )
+      ibm_db.rollback(conn)
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Number of affected rows: 4
+#__ZOS_EXPECTED__
+#Number of affected rows: 4
+#__SYSTEMI_EXPECTED__
+#Number of affected rows: 4
+#__IDS_EXPECTED__
+#Number of affected rows: 4
diff -pruN 0.3.0-3/tests/test_011_DeleteRowCount.py 2.0.5-0ubuntu2/tests/test_011_DeleteRowCount.py
--- 0.3.0-3/tests/test_011_DeleteRowCount.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_011_DeleteRowCount.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,38 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_011_DeleteRowCount(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_011)
+
+  def run_test_011(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    if conn:
+      ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+      stmt = ibm_db.exec_immediate(conn, "DELETE FROM animals WHERE weight > 10.0")
+      print "Number of affected rows: %d" % ibm_db.num_rows( stmt )
+      ibm_db.rollback(conn)
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Number of affected rows: 3
+#__ZOS_EXPECTED__
+#Number of affected rows: 3
+#__SYSTEMI_EXPECTED__
+#Number of affected rows: 3
+#__IDS_EXPECTED__
+#Number of affected rows: 3
diff -pruN 0.3.0-3/tests/test_012_KeysetDrivenCursorSelect01.py 2.0.5-0ubuntu2/tests/test_012_KeysetDrivenCursorSelect01.py
--- 0.3.0-3/tests/test_012_KeysetDrivenCursorSelect01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_012_KeysetDrivenCursorSelect01.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,56 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_012_KeysetDrivenCursorSelect01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_012)
+
+  def run_test_012(self):
+      conn = ibm_db.connect(config.database, config.user, config.password)
+      
+      if conn:
+        serverinfo = ibm_db.server_info( conn )
+        if (serverinfo.DBMS_NAME[0:3] != 'IDS'):
+          stmt = ibm_db.prepare(conn, "SELECT name FROM animals WHERE weight < 10.0", {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+        else:
+          stmt = ibm_db.prepare(conn, "SELECT name FROM animals WHERE weight < 10.0")
+        ibm_db.execute(stmt)
+        data = ibm_db.fetch_both( stmt )
+        while (data):
+          print data[0]
+          data = ibm_db.fetch_both( stmt)
+        ibm_db.close(conn)
+      else:
+        print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
+#__ZOS_EXPECTED__
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
+#__SYSTEMI_EXPECTED__
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
+#__IDS_EXPECTED__
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
diff -pruN 0.3.0-3/tests/test_013_KeysetDrivenCursorSelect02.py 2.0.5-0ubuntu2/tests/test_013_KeysetDrivenCursorSelect02.py
--- 0.3.0-3/tests/test_013_KeysetDrivenCursorSelect02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_013_KeysetDrivenCursorSelect02.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,56 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_013_KeysetDrivenCursorSelect02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_013)
+
+  def run_test_013(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    if conn:
+      serverinfo = ibm_db.server_info( conn )
+      if (serverinfo.DBMS_NAME[0:3] != 'IDS'):
+        stmt = ibm_db.prepare(conn, "SELECT name FROM animals WHERE weight < 10.0", {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+      else:
+        stmt = ibm_db.prepare(conn, "SELECT name FROM animals WHERE weight < 10.0")
+      ibm_db.execute(stmt)
+      data = ibm_db.fetch_both( stmt )
+      while (data):
+        print data[0]
+        data = ibm_db.fetch_both( stmt )
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
+#__ZOS_EXPECTED__
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
+#__SYSTEMI_EXPECTED__
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
+#__IDS_EXPECTED__
+#Pook            
+#Bubbles         
+#Gizmo           
+#Rickety Ride    
diff -pruN 0.3.0-3/tests/test_014_KeysetDrivenCursorNegativeRow.py 2.0.5-0ubuntu2/tests/test_014_KeysetDrivenCursorNegativeRow.py
--- 0.3.0-3/tests/test_014_KeysetDrivenCursorNegativeRow.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_014_KeysetDrivenCursorNegativeRow.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,79 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_014_KeysetDrivenCursorNegativeRow(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_014)
+
+  def run_test_014(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    serverinfo = ibm_db.server_info( conn )
+
+    query = 'SELECT * FROM animals ORDER BY name'
+
+    if (serverinfo.DBMS_NAME[0:3] != 'IDS'):
+      stmt = ibm_db.prepare(conn, query, {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+    else:
+      stmt = ibm_db.prepare(conn, query)
+    ibm_db.execute(stmt)
+    data = ibm_db.fetch_both( stmt )
+    while ( data ):
+      print "%s : %s : %s : %s\n" % (data[0], data[1], data[2], data[3])
+      data = ibm_db.fetch_both( stmt )
+    try:
+      stmt = ibm_db.prepare(conn, query, {ibm_db.SQL_ATTR_CURSOR_TYPE:  ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+      ibm_db.execute(stmt)
+      rc = ibm_db.fetch_row(stmt, -1)
+      print "Fetch row -1: %s" % str(rc)
+    except:
+      print "Requested row number must be a positive value"
+
+    ibm_db.close(conn)
+
+#__END__
+#__LUW_EXPECTED__
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#1 : dog : Peaches          : 12.30
+#0 : cat : Pook             : 3.20
+#5 : goat : Rickety Ride     : 9.70
+#2 : horse : Smarty           : 350.00
+#6 : llama : Sweater          : 150.00
+#Requested row number must be a positive value
+#__ZOS_EXPECTED__
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#1 : dog : Peaches          : 12.30
+#0 : cat : Pook             : 3.20
+#5 : goat : Rickety Ride     : 9.70
+#2 : horse : Smarty           : 350.00
+#6 : llama : Sweater          : 150.00
+#Requested row number must be a positive value
+#__SYSTEMI_EXPECTED__
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#1 : dog : Peaches          : 12.30
+#0 : cat : Pook             : 3.20
+#5 : goat : Rickety Ride     : 9.70
+#2 : horse : Smarty           : 350.00
+#6 : llama : Sweater          : 150.00
+#Requested row number must be a positive value
+#__IDS_EXPECTED__
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#1 : dog : Peaches          : 12.30
+#0 : cat : Pook             : 3.20
+#5 : goat : Rickety Ride     : 9.70
+#2 : horse : Smarty           : 350.00
+#6 : llama : Sweater          : 150.00
+#Requested row number must be a positive value
diff -pruN 0.3.0-3/tests/test_015_InsertDeleteRowCount_01.py 2.0.5-0ubuntu2/tests/test_015_InsertDeleteRowCount_01.py
--- 0.3.0-3/tests/test_015_InsertDeleteRowCount_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_015_InsertDeleteRowCount_01.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,62 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_015_InsertDeleteRowCount_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_015)
+
+  def run_test_015(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    if conn:
+      result = ibm_db.exec_immediate(conn,"insert into t_string values(123,1.222333,'one to one')")
+      if result:
+        cols = ibm_db.num_fields(result)
+        # NOTE: Removed '\n' from the following and a few more prints here (refer to ruby test_015.rb)
+        print "col:", cols
+        rows = ibm_db.num_rows(result)
+        print "affected row:", rows
+      else:
+        print ibm_db.stmt_errormsg()
+      result = ibm_db.exec_immediate(conn,"delete from t_string where a=123")
+      if result:
+        cols = ibm_db.num_fields(result)
+        print "col:", cols
+        rows = ibm_db.num_rows(result)
+        print "affected row:", rows
+      else:
+        print ibm_db.stmt_errormsg()
+      ibm_db.close(conn)
+    else:
+      print "no connection:", ibm_db.conn_errormsg()
+
+#__END__
+#__LUW_EXPECTED__
+#col: 0
+#affected row: 1
+#col: 0
+#affected row: 1
+#__ZOS_EXPECTED__
+#col: 0
+#affected row: 1
+#col: 0
+#affected row: 1
+#__SYSTEMI_EXPECTED__
+#col: 0
+#affected row: 1
+#col: 0
+#affected row: 1
+#__IDS_EXPECTED__
+#col: 0
+#affected row: 1
+#col: 0
+#affected row: 1
diff -pruN 0.3.0-3/tests/test_016_InsertDeleteRowCount_02.py 2.0.5-0ubuntu2/tests/test_016_InsertDeleteRowCount_02.py
--- 0.3.0-3/tests/test_016_InsertDeleteRowCount_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_016_InsertDeleteRowCount_02.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,61 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_016_InsertDeleteRowCount_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_016)
+
+  def run_test_016(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    if conn:
+      result = ibm_db.exec_immediate(conn,"insert into t_string values(123,1.222333,'one to one')")
+      if result:
+        cols = ibm_db.num_fields(result)
+        print "col:", cols
+        rows = ibm_db.num_rows(result)
+        print "affected row:", rows
+      else:
+        print ibm_db.stmt_errormsg()
+      result = ibm_db.exec_immediate(conn,"delete from t_string where a=123")
+      if result:
+        cols = ibm_db.num_fields(result)
+        print "col:", cols
+        rows = ibm_db.num_rows(result)
+        print "affected row:", rows
+      else:
+        print ibm_db.stmt_errormsg()
+      ibm_db.close(conn)
+    else:
+      print "no connection:", ibm_db.conn_errormsg()
+
+#__END__
+#__LUW_EXPECTED__
+#col: 0
+#affected row: 1
+#col: 0
+#affected row: 1
+#__ZOS_EXPECTED__
+#col: 0
+#affected row: 1
+#col: 0
+#affected row: 1
+#__SYSTEMI_EXPECTED__
+#col: 0
+#affected row: 1
+#col: 0
+#affected row: 1
+#__IDS_EXPECTED__
+#col: 0
+#affected row: 1
+#col: 0
+#affected row: 1
diff -pruN 0.3.0-3/tests/test_017_selectRowcountPrefetchSTMTOpt.py 2.0.5-0ubuntu2/tests/test_017_selectRowcountPrefetchSTMTOpt.py
--- 0.3.0-3/tests/test_017_selectRowcountPrefetchSTMTOpt.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_017_selectRowcountPrefetchSTMTOpt.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,71 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_017_selectRowcountPrefetchSTMTOpt(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_017)
+
+  def run_test_017(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    if conn:
+      result = ibm_db.exec_immediate(conn,"SELECT * from animals WHERE weight < 10.0", { ibm_db.SQL_ATTR_CURSOR_TYPE : ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+      if result:
+        rows = ibm_db.num_rows(result)
+        print "affected row:", rows
+      else:
+        print ibm_db.stmt_errormsg()
+      result = ibm_db.exec_immediate(conn,"SELECT * from animals WHERE weight < 10.0", {ibm_db.SQL_ATTR_CURSOR_TYPE : ibm_db.SQL_CURSOR_FORWARD_ONLY})
+      if result:
+        rows = ibm_db.num_rows(result)
+        print "affected row:", rows
+      else:
+        print ibm_db.stmt_errormsg()
+      result = ibm_db.exec_immediate(conn,"SELECT * from animals WHERE weight < 10.0", {ibm_db.SQL_ATTR_ROWCOUNT_PREFETCH : ibm_db.SQL_ROWCOUNT_PREFETCH_ON})
+      if result:
+        rows = ibm_db.num_rows(result)
+        print "affected row:", rows
+      else:
+        print ibm_db.stmt_errormsg()
+      result = ibm_db.exec_immediate(conn,"SELECT * from animals WHERE weight < 10.0", {ibm_db.SQL_ATTR_ROWCOUNT_PREFETCH : ibm_db.SQL_ROWCOUNT_PREFETCH_OFF})
+      if result:
+        rows = ibm_db.num_rows(result)
+        print "affected row:", rows
+      else:
+        print ibm_db.stmt_errormsg()
+
+
+      ibm_db.close(conn)
+    else:
+      print "no connection:", ibm_db.conn_errormsg()
+
+#__END__
+#__LUW_EXPECTED__
+#affected row: 4
+#affected row: -1
+#affected row: 4
+#affected row: -1
+#__ZOS_EXPECTED__
+#affected row: 4
+#affected row: -1
+#affected row: 4
+#affected row: -1
+#__SYSTEMI_EXPECTED__
+#affected row: 4
+#affected row: -1
+#affected row: 4
+#affected row: -1
+#__IDS_EXPECTED__
+#affected row: 4
+#affected row: -1
+#affected row: 4
+#affected row: -1
diff -pruN 0.3.0-3/tests/test_018_selectRowcountPrefetchSetOpt.py 2.0.5-0ubuntu2/tests/test_018_selectRowcountPrefetchSetOpt.py
--- 0.3.0-3/tests/test_018_selectRowcountPrefetchSetOpt.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_018_selectRowcountPrefetchSetOpt.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,69 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_018_selectRowcountPrefetchSetOpt(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_018)
+
+  def run_test_018(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_ON)
+    if conn:
+      stmt = ibm_db.prepare(conn, "SELECT * from animals WHERE weight < 10.0" )
+      ibm_db.set_option(stmt, {ibm_db.SQL_ATTR_ROWCOUNT_PREFETCH : ibm_db.SQL_ROWCOUNT_PREFETCH_ON}, 2)
+      result = ibm_db.execute(stmt)
+      if result:
+        rows = ibm_db.num_rows(stmt)
+        print "affected row:", rows
+        ibm_db.free_result(stmt)
+      else:
+        print ibm_db.stmt_errormsg()
+
+      ibm_db.set_option(stmt, {ibm_db.SQL_ATTR_ROWCOUNT_PREFETCH : ibm_db.SQL_ROWCOUNT_PREFETCH_OFF}, 2)
+      result = ibm_db.execute(stmt)
+      if result:
+        rows = ibm_db.num_rows(stmt)
+        print "affected row:", rows
+        ibm_db.free_result(stmt)
+      else:
+        print ibm_db.stmt_errormsg()
+
+      ibm_db.set_option(stmt, {ibm_db.SQL_ATTR_ROWCOUNT_PREFETCH : ibm_db.SQL_ROWCOUNT_PREFETCH_ON}, 2)
+      result = ibm_db.execute(stmt)
+      if result:
+        rows = ibm_db.num_rows(stmt)
+        print "affected row:", rows
+      else:
+        print ibm_db.stmt_errormsg()
+
+      ibm_db.close(conn)
+    else:
+      print "no connection:", ibm_db.conn_errormsg()
+
+#__END__
+#__LUW_EXPECTED__
+#affected row: 4
+#affected row: -1
+#affected row: 4
+#__ZOS_EXPECTED__
+#affected row: 4
+#affected row: -1
+#affected row: 4
+#__SYSTEMI_EXPECTED__
+#affected row: 4
+#affected row: -1
+#affected row: 4
+#__IDS_EXPECTED__
+#affected row: 4
+#affected row: -1
+#affected row: 4
diff -pruN 0.3.0-3/tests/test_019_selectRowcountPrefetchPrepOpt.py 2.0.5-0ubuntu2/tests/test_019_selectRowcountPrefetchPrepOpt.py
--- 0.3.0-3/tests/test_019_selectRowcountPrefetchPrepOpt.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_019_selectRowcountPrefetchPrepOpt.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,43 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_019_selectRowcountPrefetchPrepOpt(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_019)
+
+  def run_test_019(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_ON)
+    if conn:
+      stmt = ibm_db.prepare(conn, "SELECT * from animals WHERE weight < 10.0", {ibm_db.SQL_ATTR_ROWCOUNT_PREFETCH : ibm_db.SQL_ROWCOUNT_PREFETCH_ON} )
+      result = ibm_db.execute(stmt)
+      if result:
+        rows = ibm_db.num_rows(stmt)
+        print "affected row:", rows
+        ibm_db.free_result(stmt)
+      else:
+        print ibm_db.stmt_errormsg()
+
+      ibm_db.close(conn)
+    else:
+      print "no connection:", ibm_db.conn_errormsg()
+
+#__END__
+#__LUW_EXPECTED__
+#affected row: 4
+#__ZOS_EXPECTED__
+#affected row: 4
+#__SYSTEMI_EXPECTED__
+#affected row: 4
+#__IDS_EXPECTED__
+#affected row: 4
diff -pruN 0.3.0-3/tests/test_020_RollbackDelete.py 2.0.5-0ubuntu2/tests/test_020_RollbackDelete.py
--- 0.3.0-3/tests/test_020_RollbackDelete.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_020_RollbackDelete.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,67 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_020_RollbackDelete(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_020)
+
+  def run_test_020(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    if conn:
+        
+      stmt = ibm_db.exec_immediate(conn, "SELECT count(*) FROM animals")
+      res = ibm_db.fetch_tuple(stmt)
+      rows = res[0]
+      print rows
+      
+      ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+      ac = ibm_db.autocommit(conn)
+      if ac != 0:
+        print "Cannot set ibm_db.SQL_AUTOCOMMIT_OFF\nCannot run test"
+        #continue 
+      
+      ibm_db.exec_immediate(conn, "DELETE FROM animals")
+      
+      stmt = ibm_db.exec_immediate(conn, "SELECT count(*) FROM animals")
+      res = ibm_db.fetch_tuple(stmt)
+      rows = res[0]
+      print rows
+       
+      ibm_db.rollback(conn)
+       
+      stmt = ibm_db.exec_immediate(conn, "SELECT count(*) FROM animals")
+      res = ibm_db.fetch_tuple(stmt)
+      rows = res[0]
+      print rows
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#7
+#0
+#7
+#__ZOS_EXPECTED__
+#7
+#0
+#7
+#__SYSTEMI_EXPECTED__
+#7
+#0
+#7
+#__IDS_EXPECTED__
+#7
+#0
+#7
diff -pruN 0.3.0-3/tests/test_021_CommitDelete.py 2.0.5-0ubuntu2/tests/test_021_CommitDelete.py
--- 0.3.0-3/tests/test_021_CommitDelete.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_021_CommitDelete.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,83 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_021_CommitDelete(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_021)
+
+  def run_test_021(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    if conn:
+      stmt = ibm_db.exec_immediate(conn, "SELECT count(*) FROM animals")
+      res = ibm_db.fetch_tuple(stmt)
+      rows = res[0]
+      print rows
+        
+      ibm_db.autocommit(conn, 0)
+      ac = ibm_db.autocommit(conn)
+      if ac != 0:
+        print "Cannot set ibm_db.AUTOCOMMIT_OFF\nCannot run test"
+        #continue
+        
+      ibm_db.exec_immediate(conn, "DELETE FROM animals")
+        
+      stmt = ibm_db.exec_immediate(conn, "SELECT count(*) FROM animals")
+      res = ibm_db.fetch_tuple(stmt)
+      rows = res[0]
+      print rows
+        
+      ibm_db.commit(conn)
+      
+      stmt = ibm_db.exec_immediate(conn, "SELECT count(*) FROM animals")
+      res = ibm_db.fetch_tuple(stmt)
+      rows = res[0]
+      print rows
+
+      # Populate the animal table
+      animals = (
+        (0, 'cat',        'Pook',         3.2),
+        (1, 'dog',        'Peaches',      12.3),
+        (2, 'horse',      'Smarty',       350.0),
+        (3, 'gold fish',  'Bubbles',      0.1),
+        (4, 'budgerigar', 'Gizmo',        0.2),
+        (5, 'goat',       'Rickety Ride', 9.7),
+        (6, 'llama',      'Sweater',      150)
+      )
+      insert = 'INSERT INTO animals (id, breed, name, weight) VALUES (?, ?, ?, ?)'
+      stmt = ibm_db.prepare(conn, insert)
+      if stmt:
+        for animal in animals:
+          result = ibm_db.execute(stmt, animal)
+      ibm_db.commit(conn)
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+      
+#__END__
+#__LUW_EXPECTED__
+#7
+#0
+#0
+#__ZOS_EXPECTED__
+#7
+#0
+#0
+#__SYSTEMI_EXPECTED__
+#7
+#0
+#0
+#__IDS_EXPECTED__
+#7
+#0
+#0
diff -pruN 0.3.0-3/tests/test_022_RollbackInsert.py 2.0.5-0ubuntu2/tests/test_022_RollbackInsert.py
--- 0.3.0-3/tests/test_022_RollbackInsert.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_022_RollbackInsert.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,66 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_022_RollbackInsert(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_022)
+
+  def run_test_022(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    if conn:
+      stmt = ibm_db.exec_immediate(conn, "SELECT count(*) FROM animals")
+      res = ibm_db.fetch_tuple(stmt)
+      rows = res[0]
+      print rows
+        
+      ibm_db.autocommit(conn, 0)
+      ac = ibm_db.autocommit(conn)
+      if ac != 0:
+        print "Cannot set ibm_db.AUTOCOMMIT_OFF\nCannot run test"
+        #continue
+        
+      ibm_db.exec_immediate(conn, "INSERT INTO animals values (7,'bug','Brain Bug',10000.1)")
+        
+      stmt = ibm_db.exec_immediate(conn, "SELECT count(*) FROM animals")
+      res = ibm_db.fetch_tuple(stmt)
+      rows = res[0]
+      print rows
+        
+      ibm_db.rollback(conn)
+      
+      stmt = ibm_db.exec_immediate(conn, "SELECT count(*) FROM animals")
+      res = ibm_db.fetch_tuple(stmt)
+      rows = res[0]
+      print rows
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#7
+#8
+#7
+#__ZOS_EXPECTED__
+#7
+#8
+#7
+#__SYSTEMI_EXPECTED__
+#7
+#8
+#7
+#__IDS_EXPECTED__
+#7
+#8
+#7
diff -pruN 0.3.0-3/tests/test_023_ColumnPrivileges.py 2.0.5-0ubuntu2/tests/test_023_ColumnPrivileges.py
--- 0.3.0-3/tests/test_023_ColumnPrivileges.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_023_ColumnPrivileges.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,77 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+# NOTE: IDS requires that you pass the schema name (cannot pass nil)
+#
+# NOTE: IDS will not return any rows from column_privileges unless
+#       there have been privileges granted to another user other
+#       then the user that is running the script.  This test assumes
+#       that no other user has been granted permission and therefore
+#       will return no rows.
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+  
+  def test_023_ColumnPrivileges(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_023)
+
+  def run_test_023(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    if (conn != 0):
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        stmt = ibm_db.column_privileges(conn, None, config.user, 'animals')
+      else:
+        stmt = ibm_db.column_privileges(conn, None, None, 'ANIMALS')
+      row = ibm_db.fetch_tuple(stmt)
+      if row:
+        print row[0]
+        print row[1]
+        print row[2]
+        print row[3]
+        print row[4]
+        print row[5]
+        print row[6]
+        print row[7]
+      ibm_db.close(conn)
+    else:
+      print ibm_db.conn_errormsg()
+      print "Connection failed\n\n"
+
+#__END__
+#__LUW_EXPECTED__
+#%s
+#%s
+#ANIMALS
+#BREED
+#SYSIBM
+#%s
+#%s
+#YES
+#__ZOS_EXPECTED__
+#%s
+#%s
+#ANIMALS
+#BREED
+#%s
+#%s
+#%s
+#YES
+#__SYSTEMI_EXPECTED__
+#%s
+#%s
+#ANIMALS
+#BREED
+#None
+#%s
+#%s
+#YES
+#__IDS_EXPECTED__
diff -pruN 0.3.0-3/tests/test_024_ForeignKeys.py 2.0.5-0ubuntu2/tests/test_024_ForeignKeys.py
--- 0.3.0-3/tests/test_024_ForeignKeys.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_024_ForeignKeys.py	2014-01-30 10:43:10.000000000 +0000
@@ -0,0 +1,190 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+# NOTE: IDS requires that you pass the schema name (cannot pass None)
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_024_ForeignKeys(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_024)
+
+  def run_test_024(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+     
+    if conn != 0:
+      drop = 'DROP TABLE test_primary_keys'
+      try:
+        result = ibm_db.exec_immediate(conn, drop)
+      except:
+        pass
+      drop = 'DROP TABLE test_keys'
+      try:
+        result = ibm_db.exec_immediate(conn, drop)
+      except:
+        pass
+      drop = 'DROP TABLE test_foreign_keys'
+      try:
+        result = ibm_db.exec_immediate(conn, drop)
+      except:
+        pass
+
+      statement = 'CREATE TABLE test_primary_keys (id INTEGER NOT NULL, PRIMARY KEY(id))'
+      result = ibm_db.exec_immediate(conn, statement)
+      statement = "INSERT INTO test_primary_keys VALUES (1)"
+      result = ibm_db.exec_immediate(conn, statement)
+      statement = 'CREATE TABLE test_keys (name VARCHAR(30) NOT NULL, idf INTEGER NOT NULL, FOREIGN KEY(idf) REFERENCES test_primary_keys(id), \
+                   PRIMARY KEY(name))'
+      result = ibm_db.exec_immediate(conn, statement)
+      statement = "INSERT INTO test_keys VALUES ('vince', 1)"
+      result = ibm_db.exec_immediate(conn, statement)
+      statement = 'CREATE TABLE test_foreign_keys (namef VARCHAR(30) NOT NULL, id INTEGER NOT NULL, FOREIGN KEY(namef) REFERENCES test_keys(name))'
+      result = ibm_db.exec_immediate(conn, statement)
+      statement = "INSERT INTO test_foreign_keys VALUES ('vince', 1)"
+      result = ibm_db.exec_immediate(conn, statement)
+
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        stmt = ibm_db.foreign_keys(conn, None, config.user, 'test_primary_keys')
+      else:
+        stmt = ibm_db.foreign_keys(conn, None, None, 'TEST_PRIMARY_KEYS')
+      row = ibm_db.fetch_tuple(stmt)
+      print row[2]
+      print row[3]
+      print row[6]
+      print row[7]
+
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        stmt = ibm_db.foreign_keys(conn, None, None, None, None, config.user, 'test_keys')
+      else:
+        stmt = ibm_db.foreign_keys(conn, None, None, None, None, None, 'TEST_KEYS')
+      row = ibm_db.fetch_tuple(stmt)
+      print row[2]
+      print row[3]
+      print row[6]
+      print row[7]
+
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        stmt = ibm_db.foreign_keys(conn, None, config.user, 'test_keys', None, None, None)
+      else:
+        stmt = ibm_db.foreign_keys(conn, None, None, 'TEST_KEYS', None, None, None)
+      row = ibm_db.fetch_tuple(stmt)
+      print row[2]
+      print row[3]
+      print row[6]
+      print row[7]
+
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        stmt = ibm_db.foreign_keys(conn, None, config.user, 'test_keys', None, config.user, 'test_foreign_keys')
+      else:
+        stmt = ibm_db.foreign_keys(conn, None, None, 'TEST_KEYS', None, None, 'TEST_FOREIGN_KEYS')
+      row = ibm_db.fetch_tuple(stmt)
+      print row[2]
+      print row[3]
+      print row[6]
+      print row[7]
+
+      try:
+        stmt = ibm_db.foreign_keys(conn, None, None, None, None, None, None)
+        row = ibm_db.fetch_tuple(stmt)
+      except:
+        if (not stmt):
+          print ibm_db.stmt_errormsg()
+
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        stmt = ibm_db.foreign_keys(conn, None, config.user, 'test_keys', None, 'dummy_schema')
+      else:
+        stmt = ibm_db.foreign_keys(conn, None, None, 'TEST_KEYS', None, 'dummy_schema')
+      row = ibm_db.fetch_tuple(stmt)
+      if(not row):
+        print "No Data Found"
+      else:
+        print row
+      ibm_db.close(conn)
+    else:
+      print ibm_db.conn_errormsg()
+      print "Connection failed\n"
+
+#__END__
+#__LUW_EXPECTED__
+#TEST_PRIMARY_KEYS
+#ID
+#TEST_KEYS
+#IDF
+#TEST_PRIMARY_KEYS
+#ID
+#TEST_KEYS
+#IDF
+#TEST_KEYS
+#NAME
+#TEST_FOREIGN_KEYS
+#NAMEF
+#TEST_KEYS
+#NAME
+#TEST_FOREIGN_KEYS
+#NAMEF
+#[IBM][CLI Driver] CLI0124E  Invalid argument value. SQLSTATE=HY009 SQLCODE=-99999
+#No Data Found
+#__ZOS_EXPECTED__
+#TEST_PRIMARY_KEYS
+#ID
+#TEST_KEYS
+#IDF
+#TEST_PRIMARY_KEYS
+#ID
+#TEST_KEYS
+#IDF
+#TEST_KEYS
+#NAME
+#TEST_FOREIGN_KEYS
+#NAMEF
+#TEST_KEYS
+#NAME
+#TEST_FOREIGN_KEYS
+#NAMEF
+#[IBM][CLI Driver] CLI0124E  Invalid argument value. SQLSTATE=HY009 SQLCODE=-99999
+#No Data Found
+#__SYSTEMI_EXPECTED__
+#TEST_PRIMARY_KEYS
+#ID
+#TEST_KEYS
+#IDF
+#TEST_PRIMARY_KEYS
+#ID
+#TEST_KEYS
+#IDF
+#TEST_KEYS
+#NAME
+#TEST_FOREIGN_KEYS
+#NAMEF
+#TEST_KEYS
+#NAME
+#TEST_FOREIGN_KEYS
+#NAMEF
+#[IBM][CLI Driver] CLI0124E  Invalid argument value. SQLSTATE=HY009 SQLCODE=-99999
+#__IDS_EXPECTED__
+#test_primary_keys
+#id
+#test_keys
+#idf
+#test_primary_keys
+#id
+#test_keys
+#idf
+#test_keys
+#name
+#test_foreign_keys
+#namef
+#test_keys
+#name
+#test_foreign_keys
+#namef
+#[IBM][CLI Driver] CLI0124E  Invalid argument value. SQLSTATE=HY009 SQLCODE=-99999
+#No Data Found
diff -pruN 0.3.0-3/tests/test_025_PrimaryKeys.py 2.0.5-0ubuntu2/tests/test_025_PrimaryKeys.py
--- 0.3.0-3/tests/test_025_PrimaryKeys.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_025_PrimaryKeys.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,72 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+# NOTE: IDS requires that you pass the schema name (cannot pass None)
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_025_PrimaryKeys(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_025)
+
+  def run_test_025(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+      
+    if (conn != 0):
+      drop = 'DROP TABLE test_primary_keys'
+      try:
+        result = ibm_db.exec_immediate(conn, drop)
+      except:
+        pass
+      drop = 'DROP TABLE test_foreign_keys'
+      try:
+        result = ibm_db.exec_immediate(conn, drop)
+      except:
+        pass
+      statement = 'CREATE TABLE test_primary_keys (id INTEGER NOT NULL, PRIMARY KEY(id))'
+      result = ibm_db.exec_immediate(conn, statement)
+      statement = "INSERT INTO test_primary_keys VALUES (1)"
+      result = ibm_db.exec_immediate(conn, statement)
+      statement = 'CREATE TABLE test_foreign_keys (idf INTEGER NOT NULL, FOREIGN KEY(idf) REFERENCES test_primary_keys(id))'
+      result = ibm_db.exec_immediate(conn, statement)
+      statement = "INSERT INTO test_foreign_keys VALUES (1)"
+      result = ibm_db.exec_immediate(conn, statement)
+      
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        stmt = ibm_db.primary_keys(conn, None, config.user, 'test_primary_keys')
+      else:
+        stmt = ibm_db.primary_keys(conn, None, None, 'TEST_PRIMARY_KEYS')
+      row = ibm_db.fetch_tuple(stmt)
+      print row[2]
+      print row[3]
+      print row[4]
+      ibm_db.close(conn)
+    else:
+      print ibm_db.conn_errormsg()
+      print "Connection failed\n"
+
+#__END__
+#__LUW_EXPECTED__
+#TEST_PRIMARY_KEYS
+#ID
+#1
+#__ZOS_EXPECTED__
+#TEST_PRIMARY_KEYS
+#ID
+#1
+#__SYSTEMI_EXPECTED__
+#TEST_PRIMARY_KEYS
+#ID
+#1
+#__IDS_EXPECTED__
+#test_primary_keys
+#id
+#1
diff -pruN 0.3.0-3/tests/test_030_Result.py 2.0.5-0ubuntu2/tests/test_030_Result.py
--- 0.3.0-3/tests/test_030_Result.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_030_Result.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,50 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_030_Result(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_030)
+
+  def run_test_030(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    if conn:
+      stmt = ibm_db.exec_immediate(conn, "SELECT id, breed, name, weight FROM animals WHERE id = 0")
+
+      while (ibm_db.fetch_row(stmt)):
+        breed = ibm_db.result(stmt, 1)
+        print "string(%d) \"%s\"" % (len(breed), breed)
+        if (server.DBMS_NAME[0:3] == 'IDS'):
+          name = ibm_db.result(stmt, "name")
+        else:
+          name = ibm_db.result(stmt, "NAME")
+        print "string(%d) \"%s\"" % (len(name), name)
+      ibm_db.close(conn)
+        
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#string(3) "cat"
+#string(16) "Pook            "
+#__ZOS_EXPECTED__
+#string(3) "cat"
+#string(16) "Pook            "
+#__SYSTEMI_EXPECTED__
+#string(3) "cat"
+#string(16) "Pook            "
+#__IDS_EXPECTED__
+#string(3) "cat"
+#string(16) "Pook            "
diff -pruN 0.3.0-3/tests/test_031_ResultIndexPosition.py 2.0.5-0ubuntu2/tests/test_031_ResultIndexPosition.py
--- 0.3.0-3/tests/test_031_ResultIndexPosition.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_031_ResultIndexPosition.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,57 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_031_ResultIndexPosition(self):
+     obj = IbmDbTestFunctions()
+     obj.assert_expect(self.run_test_031)
+
+  def run_test_031(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    if conn:
+      stmt = ibm_db.exec_immediate(conn, "SELECT id, breed, name, weight FROM animals WHERE id = 0")
+        
+      while (ibm_db.fetch_row(stmt)):
+        id = ibm_db.result(stmt, 0)
+        print "int(%d)" % id
+        breed = ibm_db.result(stmt, 1)
+        print "string(%d) \"%s\"" % (len(breed), breed)
+        name = ibm_db.result(stmt, 2)
+        print "string(%d) \"%s\"" % (len(name), name)
+        weight = ibm_db.result(stmt, 3)
+        print "string(%d) \"%s\"" % (len(str(weight)), weight)
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#int(0)
+#string(3) "cat"
+#string(16) "Pook            "
+#string(4) "3.20"
+#__ZOS_EXPECTED__
+#int(0)
+#string(3) "cat"
+#string(16) "Pook            "
+#string(4) "3.20"
+#__SYSTEMI_EXPECTED__
+#int(0)
+#string(3) "cat"
+#string(16) "Pook            "
+#string(4) "3.20"
+#__IDS_EXPECTED__
+#int(0)
+#string(3) "cat"
+#string(16) "Pook            "
+#string(4) "3.20"
diff -pruN 0.3.0-3/tests/test_032_ResultIndexName.py 2.0.5-0ubuntu2/tests/test_032_ResultIndexName.py
--- 0.3.0-3/tests/test_032_ResultIndexName.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_032_ResultIndexName.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,64 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_032_ResultIndexName(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_032)
+
+  def run_test_032(self):
+      conn = ibm_db.connect(config.database, config.user, config.password)
+      server = ibm_db.server_info( conn )
+
+      if conn:
+        stmt = ibm_db.exec_immediate(conn, "SELECT id, breed, name, weight FROM animals WHERE id = 6")
+        
+        while (ibm_db.fetch_row(stmt)):
+          if (server.DBMS_NAME[0:3] == 'IDS'):
+            id = ibm_db.result(stmt, "id")
+            breed = ibm_db.result(stmt, "breed")
+            name = ibm_db.result(stmt, "name")
+            weight = ibm_db.result(stmt, "weight")
+          else:
+            id = ibm_db.result(stmt, "ID")
+            breed = ibm_db.result(stmt, "BREED")
+            name = ibm_db.result(stmt, "NAME")
+            weight = ibm_db.result(stmt, "WEIGHT")
+          print "int(%d)" % id
+          print "string(%d) \"%s\"" % (len(breed), breed)
+          print "string(%d) \"%s\"" % (len(name), name)
+          print "string(%d) \"%s\"" % (len(str(weight)), weight)
+        ibm_db.close(conn)
+      else:
+        print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#int(6)
+#string(5) "llama"
+#string(16) "Sweater         "
+#string(6) "150.00"
+#__ZOS_EXPECTED__
+#int(6)
+#string(5) "llama"
+#string(16) "Sweater         "
+#string(6) "150.00"
+#__SYSTEMI_EXPECTED__
+#int(6)
+#string(5) "llama"
+#string(16) "Sweater         "
+#string(6) "150.00"
+#__IDS_EXPECTED__
+#int(6)
+#string(5) "llama"
+#string(16) "Sweater         "
+#string(6) "150.00"
diff -pruN 0.3.0-3/tests/test_033_ResultOutSequenceColumn.py 2.0.5-0ubuntu2/tests/test_033_ResultOutSequenceColumn.py
--- 0.3.0-3/tests/test_033_ResultOutSequenceColumn.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_033_ResultOutSequenceColumn.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,55 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_033_ResultOutSequenceColumn(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_033)
+	  
+  def run_test_033(self): 
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+      
+    if conn:
+      stmt = ibm_db.exec_immediate(conn, "SELECT id, breed, name, weight FROM animals WHERE id = 0")
+        
+      while (ibm_db.fetch_row(stmt)):
+        weight = ibm_db.result(stmt, 3)
+        print "string(%d) \"%s\"" % (len(str(weight)), weight)
+        breed = ibm_db.result(stmt, 1)
+        print "string(%d) \"%s\"" % (len(breed), breed)
+        if (server.DBMS_NAME[0:3] == 'IDS'):
+          name = ibm_db.result(stmt, "name")
+        else:
+          name = ibm_db.result(stmt, "NAME")
+        print "string(%d) \"%s\"" % (len(name), name)
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#string(4) "3.20"
+#string(3) "cat"
+#string(16) "Pook            "
+#__ZOS_EXPECTED__
+#string(4) "3.20"
+#string(3) "cat"
+#string(16) "Pook            "
+#__SYSTEMI_EXPECTED__
+#string(4) "3.20"
+#string(3) "cat"
+#string(16) "Pook            "
+#__IDS_EXPECTED__
+#string(4) "3.20"
+#string(3) "cat"
+#string(16) "Pook            "
diff -pruN 0.3.0-3/tests/test_034_FetchAssoc.py 2.0.5-0ubuntu2/tests/test_034_FetchAssoc.py
--- 0.3.0-3/tests/test_034_FetchAssoc.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_034_FetchAssoc.py	2014-01-30 10:48:30.000000000 +0000
@@ -0,0 +1,49 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_034_FetchAssoc(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_034)
+
+  def run_test_034(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+
+    result = ibm_db.exec_immediate(conn, "select * from staff")
+    row = ibm_db.fetch_assoc(result)
+    if( row ):
+      #printf("%5d  ",row['ID'])
+      #printf("%-10s ",row['NAME'])
+      #printf("%5d ",row['DEPT'])
+      #printf("%-7s ",row['JOB'])
+      #printf("%5d ", row['YEARS'])
+      #printf("%15s ", row['SALARY'])
+      #printf("%10s ", row['COMM'])
+      #puts ""
+      print "%5d %-10s %5d %-7s %5d %15s %10s" % (row['ID'], row['NAME'], row['DEPT'], row['JOB'], row['YEARS'], row['SALARY'], row['COMM'])
+    
+    ibm_db.close(conn)
+
+#__END__
+#__LUW_EXPECTED__
+#   10 Sanders       20 Mgr         7        18357.50       None
+#__ZOS_EXPECTED__
+#   10 Sanders       20 Mgr         7        18357.50       None
+#__SYSTEMI_EXPECTED__
+#   10 Sanders       20 Mgr         7        18357.50       None
+#__IDS_EXPECTED__
+#   10 Sanders       20 Mgr         7        18357.50       None
diff -pruN 0.3.0-3/tests/test_035_FetchRow_01.py 2.0.5-0ubuntu2/tests/test_035_FetchRow_01.py
--- 0.3.0-3/tests/test_035_FetchRow_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_035_FetchRow_01.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,175 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_035_FetchRow_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_035)
+
+  def run_test_035(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    result = ibm_db.exec_immediate(conn, "select * from staff")
+    i=0
+    
+    row = ibm_db.fetch_row(result)
+    while ( row ):
+      print "%d, " % i
+      i+=1
+      row = ibm_db.fetch_row(result)
+    ibm_db.close(conn)
+
+#__END__
+#__LUW_EXPECTED__
+#0, 
+#1, 
+#2, 
+#3, 
+#4, 
+#5, 
+#6, 
+#7, 
+#8, 
+#9, 
+#10, 
+#11, 
+#12, 
+#13, 
+#14, 
+#15, 
+#16, 
+#17, 
+#18, 
+#19, 
+#20, 
+#21, 
+#22, 
+#23, 
+#24, 
+#25, 
+#26, 
+#27, 
+#28, 
+#29, 
+#30, 
+#31, 
+#32, 
+#33, 
+#34, 
+#__ZOS_EXPECTED__
+#0, 
+#1, 
+#2, 
+#3, 
+#4, 
+#5, 
+#6, 
+#7, 
+#8, 
+#9, 
+#10, 
+#11, 
+#12, 
+#13, 
+#14, 
+#15, 
+#16, 
+#17, 
+#18, 
+#19, 
+#20, 
+#21, 
+#22, 
+#23, 
+#24, 
+#25, 
+#26, 
+#27, 
+#28, 
+#29, 
+#30, 
+#31, 
+#32, 
+#33, 
+#34, 
+#__SYSTEMI_EXPECTED__
+#0, 
+#1, 
+#2, 
+#3, 
+#4, 
+#5, 
+#6, 
+#7, 
+#8, 
+#9, 
+#10, 
+#11, 
+#12, 
+#13, 
+#14, 
+#15, 
+#16, 
+#17, 
+#18, 
+#19, 
+#20, 
+#21, 
+#22, 
+#23, 
+#24, 
+#25, 
+#26, 
+#27, 
+#28, 
+#29, 
+#30, 
+#31, 
+#32, 
+#33, 
+#34, 
+#__IDS_EXPECTED__
+#0, 
+#1, 
+#2, 
+#3, 
+#4, 
+#5, 
+#6, 
+#7, 
+#8, 
+#9, 
+#10, 
+#11, 
+#12, 
+#13, 
+#14, 
+#15, 
+#16, 
+#17, 
+#18, 
+#19, 
+#20, 
+#21, 
+#22, 
+#23, 
+#24, 
+#25, 
+#26, 
+#27, 
+#28, 
+#29, 
+#30, 
+#31, 
+#32, 
+#33, 
+#34, 
diff -pruN 0.3.0-3/tests/test_036_FetchRow_02.py 2.0.5-0ubuntu2/tests/test_036_FetchRow_02.py
--- 0.3.0-3/tests/test_036_FetchRow_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_036_FetchRow_02.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,182 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_036_FetchRow_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_036)
+      
+  def run_test_036(self):      
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    result = ibm_db.exec_immediate(conn, "select * from staff")
+    i=0
+    row = ibm_db.fetch_row(result)
+    
+    while ( row ):
+       result2 = ibm_db.exec_immediate(conn, "select * from staff")
+       j=0
+       row2 = ibm_db.fetch_row(result2) 
+       while ( row2 ):
+          print "%d)%d," % (i, j)
+          j+=1
+          row2 = ibm_db.fetch_row(result2)
+       print "%d, " % i
+       i+=1
+       row = ibm_db.fetch_row(result)
+    ibm_db.close(conn)
+
+#__END__
+#__LUW_EXPECTED__
+#0)0,0)1,0)2,0)3,0)4,0)5,0)6,0)7,0)8,0)9,0)10,0)11,0)12,0)13,0)14,0)15,0)16,0)17,0)18,0)19,0)20,0)21,0)22,0)23,0)24,0)25,0)26,0)27,0)28,0)29,0)30,0)31,0)32,0)33,0)34,0, 
+#1)0,1)1,1)2,1)3,1)4,1)5,1)6,1)7,1)8,1)9,1)10,1)11,1)12,1)13,1)14,1)15,1)16,1)17,1)18,1)19,1)20,1)21,1)22,1)23,1)24,1)25,1)26,1)27,1)28,1)29,1)30,1)31,1)32,1)33,1)34,1, 
+#2)0,2)1,2)2,2)3,2)4,2)5,2)6,2)7,2)8,2)9,2)10,2)11,2)12,2)13,2)14,2)15,2)16,2)17,2)18,2)19,2)20,2)21,2)22,2)23,2)24,2)25,2)26,2)27,2)28,2)29,2)30,2)31,2)32,2)33,2)34,2, 
+#3)0,3)1,3)2,3)3,3)4,3)5,3)6,3)7,3)8,3)9,3)10,3)11,3)12,3)13,3)14,3)15,3)16,3)17,3)18,3)19,3)20,3)21,3)22,3)23,3)24,3)25,3)26,3)27,3)28,3)29,3)30,3)31,3)32,3)33,3)34,3, 
+#4)0,4)1,4)2,4)3,4)4,4)5,4)6,4)7,4)8,4)9,4)10,4)11,4)12,4)13,4)14,4)15,4)16,4)17,4)18,4)19,4)20,4)21,4)22,4)23,4)24,4)25,4)26,4)27,4)28,4)29,4)30,4)31,4)32,4)33,4)34,4, 
+#5)0,5)1,5)2,5)3,5)4,5)5,5)6,5)7,5)8,5)9,5)10,5)11,5)12,5)13,5)14,5)15,5)16,5)17,5)18,5)19,5)20,5)21,5)22,5)23,5)24,5)25,5)26,5)27,5)28,5)29,5)30,5)31,5)32,5)33,5)34,5, 
+#6)0,6)1,6)2,6)3,6)4,6)5,6)6,6)7,6)8,6)9,6)10,6)11,6)12,6)13,6)14,6)15,6)16,6)17,6)18,6)19,6)20,6)21,6)22,6)23,6)24,6)25,6)26,6)27,6)28,6)29,6)30,6)31,6)32,6)33,6)34,6, 
+#7)0,7)1,7)2,7)3,7)4,7)5,7)6,7)7,7)8,7)9,7)10,7)11,7)12,7)13,7)14,7)15,7)16,7)17,7)18,7)19,7)20,7)21,7)22,7)23,7)24,7)25,7)26,7)27,7)28,7)29,7)30,7)31,7)32,7)33,7)34,7, 
+#8)0,8)1,8)2,8)3,8)4,8)5,8)6,8)7,8)8,8)9,8)10,8)11,8)12,8)13,8)14,8)15,8)16,8)17,8)18,8)19,8)20,8)21,8)22,8)23,8)24,8)25,8)26,8)27,8)28,8)29,8)30,8)31,8)32,8)33,8)34,8, 
+#9)0,9)1,9)2,9)3,9)4,9)5,9)6,9)7,9)8,9)9,9)10,9)11,9)12,9)13,9)14,9)15,9)16,9)17,9)18,9)19,9)20,9)21,9)22,9)23,9)24,9)25,9)26,9)27,9)28,9)29,9)30,9)31,9)32,9)33,9)34,9, 
+#10)0,10)1,10)2,10)3,10)4,10)5,10)6,10)7,10)8,10)9,10)10,10)11,10)12,10)13,10)14,10)15,10)16,10)17,10)18,10)19,10)20,10)21,10)22,10)23,10)24,10)25,10)26,10)27,10)28,10)29,10)30,10)31,10)32,10)33,10)34,10, 
+#11)0,11)1,11)2,11)3,11)4,11)5,11)6,11)7,11)8,11)9,11)10,11)11,11)12,11)13,11)14,11)15,11)16,11)17,11)18,11)19,11)20,11)21,11)22,11)23,11)24,11)25,11)26,11)27,11)28,11)29,11)30,11)31,11)32,11)33,11)34,11, 
+#12)0,12)1,12)2,12)3,12)4,12)5,12)6,12)7,12)8,12)9,12)10,12)11,12)12,12)13,12)14,12)15,12)16,12)17,12)18,12)19,12)20,12)21,12)22,12)23,12)24,12)25,12)26,12)27,12)28,12)29,12)30,12)31,12)32,12)33,12)34,12, 
+#13)0,13)1,13)2,13)3,13)4,13)5,13)6,13)7,13)8,13)9,13)10,13)11,13)12,13)13,13)14,13)15,13)16,13)17,13)18,13)19,13)20,13)21,13)22,13)23,13)24,13)25,13)26,13)27,13)28,13)29,13)30,13)31,13)32,13)33,13)34,13, 
+#14)0,14)1,14)2,14)3,14)4,14)5,14)6,14)7,14)8,14)9,14)10,14)11,14)12,14)13,14)14,14)15,14)16,14)17,14)18,14)19,14)20,14)21,14)22,14)23,14)24,14)25,14)26,14)27,14)28,14)29,14)30,14)31,14)32,14)33,14)34,14, 
+#15)0,15)1,15)2,15)3,15)4,15)5,15)6,15)7,15)8,15)9,15)10,15)11,15)12,15)13,15)14,15)15,15)16,15)17,15)18,15)19,15)20,15)21,15)22,15)23,15)24,15)25,15)26,15)27,15)28,15)29,15)30,15)31,15)32,15)33,15)34,15, 
+#16)0,16)1,16)2,16)3,16)4,16)5,16)6,16)7,16)8,16)9,16)10,16)11,16)12,16)13,16)14,16)15,16)16,16)17,16)18,16)19,16)20,16)21,16)22,16)23,16)24,16)25,16)26,16)27,16)28,16)29,16)30,16)31,16)32,16)33,16)34,16, 
+#17)0,17)1,17)2,17)3,17)4,17)5,17)6,17)7,17)8,17)9,17)10,17)11,17)12,17)13,17)14,17)15,17)16,17)17,17)18,17)19,17)20,17)21,17)22,17)23,17)24,17)25,17)26,17)27,17)28,17)29,17)30,17)31,17)32,17)33,17)34,17, 
+#18)0,18)1,18)2,18)3,18)4,18)5,18)6,18)7,18)8,18)9,18)10,18)11,18)12,18)13,18)14,18)15,18)16,18)17,18)18,18)19,18)20,18)21,18)22,18)23,18)24,18)25,18)26,18)27,18)28,18)29,18)30,18)31,18)32,18)33,18)34,18, 
+#19)0,19)1,19)2,19)3,19)4,19)5,19)6,19)7,19)8,19)9,19)10,19)11,19)12,19)13,19)14,19)15,19)16,19)17,19)18,19)19,19)20,19)21,19)22,19)23,19)24,19)25,19)26,19)27,19)28,19)29,19)30,19)31,19)32,19)33,19)34,19, 
+#20)0,20)1,20)2,20)3,20)4,20)5,20)6,20)7,20)8,20)9,20)10,20)11,20)12,20)13,20)14,20)15,20)16,20)17,20)18,20)19,20)20,20)21,20)22,20)23,20)24,20)25,20)26,20)27,20)28,20)29,20)30,20)31,20)32,20)33,20)34,20, 
+#21)0,21)1,21)2,21)3,21)4,21)5,21)6,21)7,21)8,21)9,21)10,21)11,21)12,21)13,21)14,21)15,21)16,21)17,21)18,21)19,21)20,21)21,21)22,21)23,21)24,21)25,21)26,21)27,21)28,21)29,21)30,21)31,21)32,21)33,21)34,21, 
+#22)0,22)1,22)2,22)3,22)4,22)5,22)6,22)7,22)8,22)9,22)10,22)11,22)12,22)13,22)14,22)15,22)16,22)17,22)18,22)19,22)20,22)21,22)22,22)23,22)24,22)25,22)26,22)27,22)28,22)29,22)30,22)31,22)32,22)33,22)34,22, 
+#23)0,23)1,23)2,23)3,23)4,23)5,23)6,23)7,23)8,23)9,23)10,23)11,23)12,23)13,23)14,23)15,23)16,23)17,23)18,23)19,23)20,23)21,23)22,23)23,23)24,23)25,23)26,23)27,23)28,23)29,23)30,23)31,23)32,23)33,23)34,23, 
+#24)0,24)1,24)2,24)3,24)4,24)5,24)6,24)7,24)8,24)9,24)10,24)11,24)12,24)13,24)14,24)15,24)16,24)17,24)18,24)19,24)20,24)21,24)22,24)23,24)24,24)25,24)26,24)27,24)28,24)29,24)30,24)31,24)32,24)33,24)34,24, 
+#25)0,25)1,25)2,25)3,25)4,25)5,25)6,25)7,25)8,25)9,25)10,25)11,25)12,25)13,25)14,25)15,25)16,25)17,25)18,25)19,25)20,25)21,25)22,25)23,25)24,25)25,25)26,25)27,25)28,25)29,25)30,25)31,25)32,25)33,25)34,25, 
+#26)0,26)1,26)2,26)3,26)4,26)5,26)6,26)7,26)8,26)9,26)10,26)11,26)12,26)13,26)14,26)15,26)16,26)17,26)18,26)19,26)20,26)21,26)22,26)23,26)24,26)25,26)26,26)27,26)28,26)29,26)30,26)31,26)32,26)33,26)34,26, 
+#27)0,27)1,27)2,27)3,27)4,27)5,27)6,27)7,27)8,27)9,27)10,27)11,27)12,27)13,27)14,27)15,27)16,27)17,27)18,27)19,27)20,27)21,27)22,27)23,27)24,27)25,27)26,27)27,27)28,27)29,27)30,27)31,27)32,27)33,27)34,27, 
+#28)0,28)1,28)2,28)3,28)4,28)5,28)6,28)7,28)8,28)9,28)10,28)11,28)12,28)13,28)14,28)15,28)16,28)17,28)18,28)19,28)20,28)21,28)22,28)23,28)24,28)25,28)26,28)27,28)28,28)29,28)30,28)31,28)32,28)33,28)34,28, 
+#29)0,29)1,29)2,29)3,29)4,29)5,29)6,29)7,29)8,29)9,29)10,29)11,29)12,29)13,29)14,29)15,29)16,29)17,29)18,29)19,29)20,29)21,29)22,29)23,29)24,29)25,29)26,29)27,29)28,29)29,29)30,29)31,29)32,29)33,29)34,29, 
+#30)0,30)1,30)2,30)3,30)4,30)5,30)6,30)7,30)8,30)9,30)10,30)11,30)12,30)13,30)14,30)15,30)16,30)17,30)18,30)19,30)20,30)21,30)22,30)23,30)24,30)25,30)26,30)27,30)28,30)29,30)30,30)31,30)32,30)33,30)34,30, 
+#31)0,31)1,31)2,31)3,31)4,31)5,31)6,31)7,31)8,31)9,31)10,31)11,31)12,31)13,31)14,31)15,31)16,31)17,31)18,31)19,31)20,31)21,31)22,31)23,31)24,31)25,31)26,31)27,31)28,31)29,31)30,31)31,31)32,31)33,31)34,31, 
+#32)0,32)1,32)2,32)3,32)4,32)5,32)6,32)7,32)8,32)9,32)10,32)11,32)12,32)13,32)14,32)15,32)16,32)17,32)18,32)19,32)20,32)21,32)22,32)23,32)24,32)25,32)26,32)27,32)28,32)29,32)30,32)31,32)32,32)33,32)34,32, 
+#33)0,33)1,33)2,33)3,33)4,33)5,33)6,33)7,33)8,33)9,33)10,33)11,33)12,33)13,33)14,33)15,33)16,33)17,33)18,33)19,33)20,33)21,33)22,33)23,33)24,33)25,33)26,33)27,33)28,33)29,33)30,33)31,33)32,33)33,33)34,33, 
+#34)0,34)1,34)2,34)3,34)4,34)5,34)6,34)7,34)8,34)9,34)10,34)11,34)12,34)13,34)14,34)15,34)16,34)17,34)18,34)19,34)20,34)21,34)22,34)23,34)24,34)25,34)26,34)27,34)28,34)29,34)30,34)31,34)32,34)33,34)34,34, 
+#__ZOS_EXPECTED__
+#0)0,0)1,0)2,0)3,0)4,0)5,0)6,0)7,0)8,0)9,0)10,0)11,0)12,0)13,0)14,0)15,0)16,0)17,0)18,0)19,0)20,0)21,0)22,0)23,0)24,0)25,0)26,0)27,0)28,0)29,0)30,0)31,0)32,0)33,0)34,0, 
+#1)0,1)1,1)2,1)3,1)4,1)5,1)6,1)7,1)8,1)9,1)10,1)11,1)12,1)13,1)14,1)15,1)16,1)17,1)18,1)19,1)20,1)21,1)22,1)23,1)24,1)25,1)26,1)27,1)28,1)29,1)30,1)31,1)32,1)33,1)34,1, 
+#2)0,2)1,2)2,2)3,2)4,2)5,2)6,2)7,2)8,2)9,2)10,2)11,2)12,2)13,2)14,2)15,2)16,2)17,2)18,2)19,2)20,2)21,2)22,2)23,2)24,2)25,2)26,2)27,2)28,2)29,2)30,2)31,2)32,2)33,2)34,2, 
+#3)0,3)1,3)2,3)3,3)4,3)5,3)6,3)7,3)8,3)9,3)10,3)11,3)12,3)13,3)14,3)15,3)16,3)17,3)18,3)19,3)20,3)21,3)22,3)23,3)24,3)25,3)26,3)27,3)28,3)29,3)30,3)31,3)32,3)33,3)34,3, 
+#4)0,4)1,4)2,4)3,4)4,4)5,4)6,4)7,4)8,4)9,4)10,4)11,4)12,4)13,4)14,4)15,4)16,4)17,4)18,4)19,4)20,4)21,4)22,4)23,4)24,4)25,4)26,4)27,4)28,4)29,4)30,4)31,4)32,4)33,4)34,4, 
+#5)0,5)1,5)2,5)3,5)4,5)5,5)6,5)7,5)8,5)9,5)10,5)11,5)12,5)13,5)14,5)15,5)16,5)17,5)18,5)19,5)20,5)21,5)22,5)23,5)24,5)25,5)26,5)27,5)28,5)29,5)30,5)31,5)32,5)33,5)34,5, 
+#6)0,6)1,6)2,6)3,6)4,6)5,6)6,6)7,6)8,6)9,6)10,6)11,6)12,6)13,6)14,6)15,6)16,6)17,6)18,6)19,6)20,6)21,6)22,6)23,6)24,6)25,6)26,6)27,6)28,6)29,6)30,6)31,6)32,6)33,6)34,6, 
+#7)0,7)1,7)2,7)3,7)4,7)5,7)6,7)7,7)8,7)9,7)10,7)11,7)12,7)13,7)14,7)15,7)16,7)17,7)18,7)19,7)20,7)21,7)22,7)23,7)24,7)25,7)26,7)27,7)28,7)29,7)30,7)31,7)32,7)33,7)34,7, 
+#8)0,8)1,8)2,8)3,8)4,8)5,8)6,8)7,8)8,8)9,8)10,8)11,8)12,8)13,8)14,8)15,8)16,8)17,8)18,8)19,8)20,8)21,8)22,8)23,8)24,8)25,8)26,8)27,8)28,8)29,8)30,8)31,8)32,8)33,8)34,8, 
+#9)0,9)1,9)2,9)3,9)4,9)5,9)6,9)7,9)8,9)9,9)10,9)11,9)12,9)13,9)14,9)15,9)16,9)17,9)18,9)19,9)20,9)21,9)22,9)23,9)24,9)25,9)26,9)27,9)28,9)29,9)30,9)31,9)32,9)33,9)34,9, 
+#10)0,10)1,10)2,10)3,10)4,10)5,10)6,10)7,10)8,10)9,10)10,10)11,10)12,10)13,10)14,10)15,10)16,10)17,10)18,10)19,10)20,10)21,10)22,10)23,10)24,10)25,10)26,10)27,10)28,10)29,10)30,10)31,10)32,10)33,10)34,10, 
+#11)0,11)1,11)2,11)3,11)4,11)5,11)6,11)7,11)8,11)9,11)10,11)11,11)12,11)13,11)14,11)15,11)16,11)17,11)18,11)19,11)20,11)21,11)22,11)23,11)24,11)25,11)26,11)27,11)28,11)29,11)30,11)31,11)32,11)33,11)34,11, 
+#12)0,12)1,12)2,12)3,12)4,12)5,12)6,12)7,12)8,12)9,12)10,12)11,12)12,12)13,12)14,12)15,12)16,12)17,12)18,12)19,12)20,12)21,12)22,12)23,12)24,12)25,12)26,12)27,12)28,12)29,12)30,12)31,12)32,12)33,12)34,12, 
+#13)0,13)1,13)2,13)3,13)4,13)5,13)6,13)7,13)8,13)9,13)10,13)11,13)12,13)13,13)14,13)15,13)16,13)17,13)18,13)19,13)20,13)21,13)22,13)23,13)24,13)25,13)26,13)27,13)28,13)29,13)30,13)31,13)32,13)33,13)34,13, 
+#14)0,14)1,14)2,14)3,14)4,14)5,14)6,14)7,14)8,14)9,14)10,14)11,14)12,14)13,14)14,14)15,14)16,14)17,14)18,14)19,14)20,14)21,14)22,14)23,14)24,14)25,14)26,14)27,14)28,14)29,14)30,14)31,14)32,14)33,14)34,14, 
+#15)0,15)1,15)2,15)3,15)4,15)5,15)6,15)7,15)8,15)9,15)10,15)11,15)12,15)13,15)14,15)15,15)16,15)17,15)18,15)19,15)20,15)21,15)22,15)23,15)24,15)25,15)26,15)27,15)28,15)29,15)30,15)31,15)32,15)33,15)34,15, 
+#16)0,16)1,16)2,16)3,16)4,16)5,16)6,16)7,16)8,16)9,16)10,16)11,16)12,16)13,16)14,16)15,16)16,16)17,16)18,16)19,16)20,16)21,16)22,16)23,16)24,16)25,16)26,16)27,16)28,16)29,16)30,16)31,16)32,16)33,16)34,16, 
+#17)0,17)1,17)2,17)3,17)4,17)5,17)6,17)7,17)8,17)9,17)10,17)11,17)12,17)13,17)14,17)15,17)16,17)17,17)18,17)19,17)20,17)21,17)22,17)23,17)24,17)25,17)26,17)27,17)28,17)29,17)30,17)31,17)32,17)33,17)34,17, 
+#18)0,18)1,18)2,18)3,18)4,18)5,18)6,18)7,18)8,18)9,18)10,18)11,18)12,18)13,18)14,18)15,18)16,18)17,18)18,18)19,18)20,18)21,18)22,18)23,18)24,18)25,18)26,18)27,18)28,18)29,18)30,18)31,18)32,18)33,18)34,18, 
+#19)0,19)1,19)2,19)3,19)4,19)5,19)6,19)7,19)8,19)9,19)10,19)11,19)12,19)13,19)14,19)15,19)16,19)17,19)18,19)19,19)20,19)21,19)22,19)23,19)24,19)25,19)26,19)27,19)28,19)29,19)30,19)31,19)32,19)33,19)34,19, 
+#20)0,20)1,20)2,20)3,20)4,20)5,20)6,20)7,20)8,20)9,20)10,20)11,20)12,20)13,20)14,20)15,20)16,20)17,20)18,20)19,20)20,20)21,20)22,20)23,20)24,20)25,20)26,20)27,20)28,20)29,20)30,20)31,20)32,20)33,20)34,20, 
+#21)0,21)1,21)2,21)3,21)4,21)5,21)6,21)7,21)8,21)9,21)10,21)11,21)12,21)13,21)14,21)15,21)16,21)17,21)18,21)19,21)20,21)21,21)22,21)23,21)24,21)25,21)26,21)27,21)28,21)29,21)30,21)31,21)32,21)33,21)34,21, 
+#22)0,22)1,22)2,22)3,22)4,22)5,22)6,22)7,22)8,22)9,22)10,22)11,22)12,22)13,22)14,22)15,22)16,22)17,22)18,22)19,22)20,22)21,22)22,22)23,22)24,22)25,22)26,22)27,22)28,22)29,22)30,22)31,22)32,22)33,22)34,22, 
+#23)0,23)1,23)2,23)3,23)4,23)5,23)6,23)7,23)8,23)9,23)10,23)11,23)12,23)13,23)14,23)15,23)16,23)17,23)18,23)19,23)20,23)21,23)22,23)23,23)24,23)25,23)26,23)27,23)28,23)29,23)30,23)31,23)32,23)33,23)34,23, 
+#24)0,24)1,24)2,24)3,24)4,24)5,24)6,24)7,24)8,24)9,24)10,24)11,24)12,24)13,24)14,24)15,24)16,24)17,24)18,24)19,24)20,24)21,24)22,24)23,24)24,24)25,24)26,24)27,24)28,24)29,24)30,24)31,24)32,24)33,24)34,24, 
+#25)0,25)1,25)2,25)3,25)4,25)5,25)6,25)7,25)8,25)9,25)10,25)11,25)12,25)13,25)14,25)15,25)16,25)17,25)18,25)19,25)20,25)21,25)22,25)23,25)24,25)25,25)26,25)27,25)28,25)29,25)30,25)31,25)32,25)33,25)34,25, 
+#26)0,26)1,26)2,26)3,26)4,26)5,26)6,26)7,26)8,26)9,26)10,26)11,26)12,26)13,26)14,26)15,26)16,26)17,26)18,26)19,26)20,26)21,26)22,26)23,26)24,26)25,26)26,26)27,26)28,26)29,26)30,26)31,26)32,26)33,26)34,26, 
+#27)0,27)1,27)2,27)3,27)4,27)5,27)6,27)7,27)8,27)9,27)10,27)11,27)12,27)13,27)14,27)15,27)16,27)17,27)18,27)19,27)20,27)21,27)22,27)23,27)24,27)25,27)26,27)27,27)28,27)29,27)30,27)31,27)32,27)33,27)34,27, 
+#28)0,28)1,28)2,28)3,28)4,28)5,28)6,28)7,28)8,28)9,28)10,28)11,28)12,28)13,28)14,28)15,28)16,28)17,28)18,28)19,28)20,28)21,28)22,28)23,28)24,28)25,28)26,28)27,28)28,28)29,28)30,28)31,28)32,28)33,28)34,28, 
+#29)0,29)1,29)2,29)3,29)4,29)5,29)6,29)7,29)8,29)9,29)10,29)11,29)12,29)13,29)14,29)15,29)16,29)17,29)18,29)19,29)20,29)21,29)22,29)23,29)24,29)25,29)26,29)27,29)28,29)29,29)30,29)31,29)32,29)33,29)34,29, 
+#30)0,30)1,30)2,30)3,30)4,30)5,30)6,30)7,30)8,30)9,30)10,30)11,30)12,30)13,30)14,30)15,30)16,30)17,30)18,30)19,30)20,30)21,30)22,30)23,30)24,30)25,30)26,30)27,30)28,30)29,30)30,30)31,30)32,30)33,30)34,30, 
+#31)0,31)1,31)2,31)3,31)4,31)5,31)6,31)7,31)8,31)9,31)10,31)11,31)12,31)13,31)14,31)15,31)16,31)17,31)18,31)19,31)20,31)21,31)22,31)23,31)24,31)25,31)26,31)27,31)28,31)29,31)30,31)31,31)32,31)33,31)34,31, 
+#32)0,32)1,32)2,32)3,32)4,32)5,32)6,32)7,32)8,32)9,32)10,32)11,32)12,32)13,32)14,32)15,32)16,32)17,32)18,32)19,32)20,32)21,32)22,32)23,32)24,32)25,32)26,32)27,32)28,32)29,32)30,32)31,32)32,32)33,32)34,32, 
+#33)0,33)1,33)2,33)3,33)4,33)5,33)6,33)7,33)8,33)9,33)10,33)11,33)12,33)13,33)14,33)15,33)16,33)17,33)18,33)19,33)20,33)21,33)22,33)23,33)24,33)25,33)26,33)27,33)28,33)29,33)30,33)31,33)32,33)33,33)34,33, 
+#34)0,34)1,34)2,34)3,34)4,34)5,34)6,34)7,34)8,34)9,34)10,34)11,34)12,34)13,34)14,34)15,34)16,34)17,34)18,34)19,34)20,34)21,34)22,34)23,34)24,34)25,34)26,34)27,34)28,34)29,34)30,34)31,34)32,34)33,34)34,34, 
+#__SYSTEMI_EXPECTED__
+#0)0,0)1,0)2,0)3,0)4,0)5,0)6,0)7,0)8,0)9,0)10,0)11,0)12,0)13,0)14,0)15,0)16,0)17,0)18,0)19,0)20,0)21,0)22,0)23,0)24,0)25,0)26,0)27,0)28,0)29,0)30,0)31,0)32,0)33,0)34,0, 
+#1)0,1)1,1)2,1)3,1)4,1)5,1)6,1)7,1)8,1)9,1)10,1)11,1)12,1)13,1)14,1)15,1)16,1)17,1)18,1)19,1)20,1)21,1)22,1)23,1)24,1)25,1)26,1)27,1)28,1)29,1)30,1)31,1)32,1)33,1)34,1, 
+#2)0,2)1,2)2,2)3,2)4,2)5,2)6,2)7,2)8,2)9,2)10,2)11,2)12,2)13,2)14,2)15,2)16,2)17,2)18,2)19,2)20,2)21,2)22,2)23,2)24,2)25,2)26,2)27,2)28,2)29,2)30,2)31,2)32,2)33,2)34,2, 
+#3)0,3)1,3)2,3)3,3)4,3)5,3)6,3)7,3)8,3)9,3)10,3)11,3)12,3)13,3)14,3)15,3)16,3)17,3)18,3)19,3)20,3)21,3)22,3)23,3)24,3)25,3)26,3)27,3)28,3)29,3)30,3)31,3)32,3)33,3)34,3, 
+#4)0,4)1,4)2,4)3,4)4,4)5,4)6,4)7,4)8,4)9,4)10,4)11,4)12,4)13,4)14,4)15,4)16,4)17,4)18,4)19,4)20,4)21,4)22,4)23,4)24,4)25,4)26,4)27,4)28,4)29,4)30,4)31,4)32,4)33,4)34,4, 
+#5)0,5)1,5)2,5)3,5)4,5)5,5)6,5)7,5)8,5)9,5)10,5)11,5)12,5)13,5)14,5)15,5)16,5)17,5)18,5)19,5)20,5)21,5)22,5)23,5)24,5)25,5)26,5)27,5)28,5)29,5)30,5)31,5)32,5)33,5)34,5, 
+#6)0,6)1,6)2,6)3,6)4,6)5,6)6,6)7,6)8,6)9,6)10,6)11,6)12,6)13,6)14,6)15,6)16,6)17,6)18,6)19,6)20,6)21,6)22,6)23,6)24,6)25,6)26,6)27,6)28,6)29,6)30,6)31,6)32,6)33,6)34,6, 
+#7)0,7)1,7)2,7)3,7)4,7)5,7)6,7)7,7)8,7)9,7)10,7)11,7)12,7)13,7)14,7)15,7)16,7)17,7)18,7)19,7)20,7)21,7)22,7)23,7)24,7)25,7)26,7)27,7)28,7)29,7)30,7)31,7)32,7)33,7)34,7, 
+#8)0,8)1,8)2,8)3,8)4,8)5,8)6,8)7,8)8,8)9,8)10,8)11,8)12,8)13,8)14,8)15,8)16,8)17,8)18,8)19,8)20,8)21,8)22,8)23,8)24,8)25,8)26,8)27,8)28,8)29,8)30,8)31,8)32,8)33,8)34,8, 
+#9)0,9)1,9)2,9)3,9)4,9)5,9)6,9)7,9)8,9)9,9)10,9)11,9)12,9)13,9)14,9)15,9)16,9)17,9)18,9)19,9)20,9)21,9)22,9)23,9)24,9)25,9)26,9)27,9)28,9)29,9)30,9)31,9)32,9)33,9)34,9, 
+#10)0,10)1,10)2,10)3,10)4,10)5,10)6,10)7,10)8,10)9,10)10,10)11,10)12,10)13,10)14,10)15,10)16,10)17,10)18,10)19,10)20,10)21,10)22,10)23,10)24,10)25,10)26,10)27,10)28,10)29,10)30,10)31,10)32,10)33,10)34,10, 
+#11)0,11)1,11)2,11)3,11)4,11)5,11)6,11)7,11)8,11)9,11)10,11)11,11)12,11)13,11)14,11)15,11)16,11)17,11)18,11)19,11)20,11)21,11)22,11)23,11)24,11)25,11)26,11)27,11)28,11)29,11)30,11)31,11)32,11)33,11)34,11, 
+#12)0,12)1,12)2,12)3,12)4,12)5,12)6,12)7,12)8,12)9,12)10,12)11,12)12,12)13,12)14,12)15,12)16,12)17,12)18,12)19,12)20,12)21,12)22,12)23,12)24,12)25,12)26,12)27,12)28,12)29,12)30,12)31,12)32,12)33,12)34,12, 
+#13)0,13)1,13)2,13)3,13)4,13)5,13)6,13)7,13)8,13)9,13)10,13)11,13)12,13)13,13)14,13)15,13)16,13)17,13)18,13)19,13)20,13)21,13)22,13)23,13)24,13)25,13)26,13)27,13)28,13)29,13)30,13)31,13)32,13)33,13)34,13, 
+#14)0,14)1,14)2,14)3,14)4,14)5,14)6,14)7,14)8,14)9,14)10,14)11,14)12,14)13,14)14,14)15,14)16,14)17,14)18,14)19,14)20,14)21,14)22,14)23,14)24,14)25,14)26,14)27,14)28,14)29,14)30,14)31,14)32,14)33,14)34,14, 
+#15)0,15)1,15)2,15)3,15)4,15)5,15)6,15)7,15)8,15)9,15)10,15)11,15)12,15)13,15)14,15)15,15)16,15)17,15)18,15)19,15)20,15)21,15)22,15)23,15)24,15)25,15)26,15)27,15)28,15)29,15)30,15)31,15)32,15)33,15)34,15, 
+#16)0,16)1,16)2,16)3,16)4,16)5,16)6,16)7,16)8,16)9,16)10,16)11,16)12,16)13,16)14,16)15,16)16,16)17,16)18,16)19,16)20,16)21,16)22,16)23,16)24,16)25,16)26,16)27,16)28,16)29,16)30,16)31,16)32,16)33,16)34,16, 
+#17)0,17)1,17)2,17)3,17)4,17)5,17)6,17)7,17)8,17)9,17)10,17)11,17)12,17)13,17)14,17)15,17)16,17)17,17)18,17)19,17)20,17)21,17)22,17)23,17)24,17)25,17)26,17)27,17)28,17)29,17)30,17)31,17)32,17)33,17)34,17, 
+#18)0,18)1,18)2,18)3,18)4,18)5,18)6,18)7,18)8,18)9,18)10,18)11,18)12,18)13,18)14,18)15,18)16,18)17,18)18,18)19,18)20,18)21,18)22,18)23,18)24,18)25,18)26,18)27,18)28,18)29,18)30,18)31,18)32,18)33,18)34,18, 
+#19)0,19)1,19)2,19)3,19)4,19)5,19)6,19)7,19)8,19)9,19)10,19)11,19)12,19)13,19)14,19)15,19)16,19)17,19)18,19)19,19)20,19)21,19)22,19)23,19)24,19)25,19)26,19)27,19)28,19)29,19)30,19)31,19)32,19)33,19)34,19, 
+#20)0,20)1,20)2,20)3,20)4,20)5,20)6,20)7,20)8,20)9,20)10,20)11,20)12,20)13,20)14,20)15,20)16,20)17,20)18,20)19,20)20,20)21,20)22,20)23,20)24,20)25,20)26,20)27,20)28,20)29,20)30,20)31,20)32,20)33,20)34,20, 
+#21)0,21)1,21)2,21)3,21)4,21)5,21)6,21)7,21)8,21)9,21)10,21)11,21)12,21)13,21)14,21)15,21)16,21)17,21)18,21)19,21)20,21)21,21)22,21)23,21)24,21)25,21)26,21)27,21)28,21)29,21)30,21)31,21)32,21)33,21)34,21, 
+#22)0,22)1,22)2,22)3,22)4,22)5,22)6,22)7,22)8,22)9,22)10,22)11,22)12,22)13,22)14,22)15,22)16,22)17,22)18,22)19,22)20,22)21,22)22,22)23,22)24,22)25,22)26,22)27,22)28,22)29,22)30,22)31,22)32,22)33,22)34,22, 
+#23)0,23)1,23)2,23)3,23)4,23)5,23)6,23)7,23)8,23)9,23)10,23)11,23)12,23)13,23)14,23)15,23)16,23)17,23)18,23)19,23)20,23)21,23)22,23)23,23)24,23)25,23)26,23)27,23)28,23)29,23)30,23)31,23)32,23)33,23)34,23, 
+#24)0,24)1,24)2,24)3,24)4,24)5,24)6,24)7,24)8,24)9,24)10,24)11,24)12,24)13,24)14,24)15,24)16,24)17,24)18,24)19,24)20,24)21,24)22,24)23,24)24,24)25,24)26,24)27,24)28,24)29,24)30,24)31,24)32,24)33,24)34,24, 
+#25)0,25)1,25)2,25)3,25)4,25)5,25)6,25)7,25)8,25)9,25)10,25)11,25)12,25)13,25)14,25)15,25)16,25)17,25)18,25)19,25)20,25)21,25)22,25)23,25)24,25)25,25)26,25)27,25)28,25)29,25)30,25)31,25)32,25)33,25)34,25, 
+#26)0,26)1,26)2,26)3,26)4,26)5,26)6,26)7,26)8,26)9,26)10,26)11,26)12,26)13,26)14,26)15,26)16,26)17,26)18,26)19,26)20,26)21,26)22,26)23,26)24,26)25,26)26,26)27,26)28,26)29,26)30,26)31,26)32,26)33,26)34,26, 
+#27)0,27)1,27)2,27)3,27)4,27)5,27)6,27)7,27)8,27)9,27)10,27)11,27)12,27)13,27)14,27)15,27)16,27)17,27)18,27)19,27)20,27)21,27)22,27)23,27)24,27)25,27)26,27)27,27)28,27)29,27)30,27)31,27)32,27)33,27)34,27, 
+#28)0,28)1,28)2,28)3,28)4,28)5,28)6,28)7,28)8,28)9,28)10,28)11,28)12,28)13,28)14,28)15,28)16,28)17,28)18,28)19,28)20,28)21,28)22,28)23,28)24,28)25,28)26,28)27,28)28,28)29,28)30,28)31,28)32,28)33,28)34,28, 
+#29)0,29)1,29)2,29)3,29)4,29)5,29)6,29)7,29)8,29)9,29)10,29)11,29)12,29)13,29)14,29)15,29)16,29)17,29)18,29)19,29)20,29)21,29)22,29)23,29)24,29)25,29)26,29)27,29)28,29)29,29)30,29)31,29)32,29)33,29)34,29, 
+#30)0,30)1,30)2,30)3,30)4,30)5,30)6,30)7,30)8,30)9,30)10,30)11,30)12,30)13,30)14,30)15,30)16,30)17,30)18,30)19,30)20,30)21,30)22,30)23,30)24,30)25,30)26,30)27,30)28,30)29,30)30,30)31,30)32,30)33,30)34,30, 
+#31)0,31)1,31)2,31)3,31)4,31)5,31)6,31)7,31)8,31)9,31)10,31)11,31)12,31)13,31)14,31)15,31)16,31)17,31)18,31)19,31)20,31)21,31)22,31)23,31)24,31)25,31)26,31)27,31)28,31)29,31)30,31)31,31)32,31)33,31)34,31, 
+#32)0,32)1,32)2,32)3,32)4,32)5,32)6,32)7,32)8,32)9,32)10,32)11,32)12,32)13,32)14,32)15,32)16,32)17,32)18,32)19,32)20,32)21,32)22,32)23,32)24,32)25,32)26,32)27,32)28,32)29,32)30,32)31,32)32,32)33,32)34,32, 
+#33)0,33)1,33)2,33)3,33)4,33)5,33)6,33)7,33)8,33)9,33)10,33)11,33)12,33)13,33)14,33)15,33)16,33)17,33)18,33)19,33)20,33)21,33)22,33)23,33)24,33)25,33)26,33)27,33)28,33)29,33)30,33)31,33)32,33)33,33)34,33, 
+#34)0,34)1,34)2,34)3,34)4,34)5,34)6,34)7,34)8,34)9,34)10,34)11,34)12,34)13,34)14,34)15,34)16,34)17,34)18,34)19,34)20,34)21,34)22,34)23,34)24,34)25,34)26,34)27,34)28,34)29,34)30,34)31,34)32,34)33,34)34,34, 
+#__IDS_EXPECTED__
+#0)0,0)1,0)2,0)3,0)4,0)5,0)6,0)7,0)8,0)9,0)10,0)11,0)12,0)13,0)14,0)15,0)16,0)17,0)18,0)19,0)20,0)21,0)22,0)23,0)24,0)25,0)26,0)27,0)28,0)29,0)30,0)31,0)32,0)33,0)34,0, 
+#1)0,1)1,1)2,1)3,1)4,1)5,1)6,1)7,1)8,1)9,1)10,1)11,1)12,1)13,1)14,1)15,1)16,1)17,1)18,1)19,1)20,1)21,1)22,1)23,1)24,1)25,1)26,1)27,1)28,1)29,1)30,1)31,1)32,1)33,1)34,1, 
+#2)0,2)1,2)2,2)3,2)4,2)5,2)6,2)7,2)8,2)9,2)10,2)11,2)12,2)13,2)14,2)15,2)16,2)17,2)18,2)19,2)20,2)21,2)22,2)23,2)24,2)25,2)26,2)27,2)28,2)29,2)30,2)31,2)32,2)33,2)34,2, 
+#3)0,3)1,3)2,3)3,3)4,3)5,3)6,3)7,3)8,3)9,3)10,3)11,3)12,3)13,3)14,3)15,3)16,3)17,3)18,3)19,3)20,3)21,3)22,3)23,3)24,3)25,3)26,3)27,3)28,3)29,3)30,3)31,3)32,3)33,3)34,3, 
+#4)0,4)1,4)2,4)3,4)4,4)5,4)6,4)7,4)8,4)9,4)10,4)11,4)12,4)13,4)14,4)15,4)16,4)17,4)18,4)19,4)20,4)21,4)22,4)23,4)24,4)25,4)26,4)27,4)28,4)29,4)30,4)31,4)32,4)33,4)34,4, 
+#5)0,5)1,5)2,5)3,5)4,5)5,5)6,5)7,5)8,5)9,5)10,5)11,5)12,5)13,5)14,5)15,5)16,5)17,5)18,5)19,5)20,5)21,5)22,5)23,5)24,5)25,5)26,5)27,5)28,5)29,5)30,5)31,5)32,5)33,5)34,5, 
+#6)0,6)1,6)2,6)3,6)4,6)5,6)6,6)7,6)8,6)9,6)10,6)11,6)12,6)13,6)14,6)15,6)16,6)17,6)18,6)19,6)20,6)21,6)22,6)23,6)24,6)25,6)26,6)27,6)28,6)29,6)30,6)31,6)32,6)33,6)34,6, 
+#7)0,7)1,7)2,7)3,7)4,7)5,7)6,7)7,7)8,7)9,7)10,7)11,7)12,7)13,7)14,7)15,7)16,7)17,7)18,7)19,7)20,7)21,7)22,7)23,7)24,7)25,7)26,7)27,7)28,7)29,7)30,7)31,7)32,7)33,7)34,7, 
+#8)0,8)1,8)2,8)3,8)4,8)5,8)6,8)7,8)8,8)9,8)10,8)11,8)12,8)13,8)14,8)15,8)16,8)17,8)18,8)19,8)20,8)21,8)22,8)23,8)24,8)25,8)26,8)27,8)28,8)29,8)30,8)31,8)32,8)33,8)34,8, 
+#9)0,9)1,9)2,9)3,9)4,9)5,9)6,9)7,9)8,9)9,9)10,9)11,9)12,9)13,9)14,9)15,9)16,9)17,9)18,9)19,9)20,9)21,9)22,9)23,9)24,9)25,9)26,9)27,9)28,9)29,9)30,9)31,9)32,9)33,9)34,9, 
+#10)0,10)1,10)2,10)3,10)4,10)5,10)6,10)7,10)8,10)9,10)10,10)11,10)12,10)13,10)14,10)15,10)16,10)17,10)18,10)19,10)20,10)21,10)22,10)23,10)24,10)25,10)26,10)27,10)28,10)29,10)30,10)31,10)32,10)33,10)34,10, 
+#11)0,11)1,11)2,11)3,11)4,11)5,11)6,11)7,11)8,11)9,11)10,11)11,11)12,11)13,11)14,11)15,11)16,11)17,11)18,11)19,11)20,11)21,11)22,11)23,11)24,11)25,11)26,11)27,11)28,11)29,11)30,11)31,11)32,11)33,11)34,11, 
+#12)0,12)1,12)2,12)3,12)4,12)5,12)6,12)7,12)8,12)9,12)10,12)11,12)12,12)13,12)14,12)15,12)16,12)17,12)18,12)19,12)20,12)21,12)22,12)23,12)24,12)25,12)26,12)27,12)28,12)29,12)30,12)31,12)32,12)33,12)34,12, 
+#13)0,13)1,13)2,13)3,13)4,13)5,13)6,13)7,13)8,13)9,13)10,13)11,13)12,13)13,13)14,13)15,13)16,13)17,13)18,13)19,13)20,13)21,13)22,13)23,13)24,13)25,13)26,13)27,13)28,13)29,13)30,13)31,13)32,13)33,13)34,13, 
+#14)0,14)1,14)2,14)3,14)4,14)5,14)6,14)7,14)8,14)9,14)10,14)11,14)12,14)13,14)14,14)15,14)16,14)17,14)18,14)19,14)20,14)21,14)22,14)23,14)24,14)25,14)26,14)27,14)28,14)29,14)30,14)31,14)32,14)33,14)34,14, 
+#15)0,15)1,15)2,15)3,15)4,15)5,15)6,15)7,15)8,15)9,15)10,15)11,15)12,15)13,15)14,15)15,15)16,15)17,15)18,15)19,15)20,15)21,15)22,15)23,15)24,15)25,15)26,15)27,15)28,15)29,15)30,15)31,15)32,15)33,15)34,15, 
+#16)0,16)1,16)2,16)3,16)4,16)5,16)6,16)7,16)8,16)9,16)10,16)11,16)12,16)13,16)14,16)15,16)16,16)17,16)18,16)19,16)20,16)21,16)22,16)23,16)24,16)25,16)26,16)27,16)28,16)29,16)30,16)31,16)32,16)33,16)34,16, 
+#17)0,17)1,17)2,17)3,17)4,17)5,17)6,17)7,17)8,17)9,17)10,17)11,17)12,17)13,17)14,17)15,17)16,17)17,17)18,17)19,17)20,17)21,17)22,17)23,17)24,17)25,17)26,17)27,17)28,17)29,17)30,17)31,17)32,17)33,17)34,17, 
+#18)0,18)1,18)2,18)3,18)4,18)5,18)6,18)7,18)8,18)9,18)10,18)11,18)12,18)13,18)14,18)15,18)16,18)17,18)18,18)19,18)20,18)21,18)22,18)23,18)24,18)25,18)26,18)27,18)28,18)29,18)30,18)31,18)32,18)33,18)34,18, 
+#19)0,19)1,19)2,19)3,19)4,19)5,19)6,19)7,19)8,19)9,19)10,19)11,19)12,19)13,19)14,19)15,19)16,19)17,19)18,19)19,19)20,19)21,19)22,19)23,19)24,19)25,19)26,19)27,19)28,19)29,19)30,19)31,19)32,19)33,19)34,19, 
+#20)0,20)1,20)2,20)3,20)4,20)5,20)6,20)7,20)8,20)9,20)10,20)11,20)12,20)13,20)14,20)15,20)16,20)17,20)18,20)19,20)20,20)21,20)22,20)23,20)24,20)25,20)26,20)27,20)28,20)29,20)30,20)31,20)32,20)33,20)34,20, 
+#21)0,21)1,21)2,21)3,21)4,21)5,21)6,21)7,21)8,21)9,21)10,21)11,21)12,21)13,21)14,21)15,21)16,21)17,21)18,21)19,21)20,21)21,21)22,21)23,21)24,21)25,21)26,21)27,21)28,21)29,21)30,21)31,21)32,21)33,21)34,21, 
+#22)0,22)1,22)2,22)3,22)4,22)5,22)6,22)7,22)8,22)9,22)10,22)11,22)12,22)13,22)14,22)15,22)16,22)17,22)18,22)19,22)20,22)21,22)22,22)23,22)24,22)25,22)26,22)27,22)28,22)29,22)30,22)31,22)32,22)33,22)34,22, 
+#23)0,23)1,23)2,23)3,23)4,23)5,23)6,23)7,23)8,23)9,23)10,23)11,23)12,23)13,23)14,23)15,23)16,23)17,23)18,23)19,23)20,23)21,23)22,23)23,23)24,23)25,23)26,23)27,23)28,23)29,23)30,23)31,23)32,23)33,23)34,23, 
+#24)0,24)1,24)2,24)3,24)4,24)5,24)6,24)7,24)8,24)9,24)10,24)11,24)12,24)13,24)14,24)15,24)16,24)17,24)18,24)19,24)20,24)21,24)22,24)23,24)24,24)25,24)26,24)27,24)28,24)29,24)30,24)31,24)32,24)33,24)34,24, 
+#25)0,25)1,25)2,25)3,25)4,25)5,25)6,25)7,25)8,25)9,25)10,25)11,25)12,25)13,25)14,25)15,25)16,25)17,25)18,25)19,25)20,25)21,25)22,25)23,25)24,25)25,25)26,25)27,25)28,25)29,25)30,25)31,25)32,25)33,25)34,25, 
+#26)0,26)1,26)2,26)3,26)4,26)5,26)6,26)7,26)8,26)9,26)10,26)11,26)12,26)13,26)14,26)15,26)16,26)17,26)18,26)19,26)20,26)21,26)22,26)23,26)24,26)25,26)26,26)27,26)28,26)29,26)30,26)31,26)32,26)33,26)34,26, 
+#27)0,27)1,27)2,27)3,27)4,27)5,27)6,27)7,27)8,27)9,27)10,27)11,27)12,27)13,27)14,27)15,27)16,27)17,27)18,27)19,27)20,27)21,27)22,27)23,27)24,27)25,27)26,27)27,27)28,27)29,27)30,27)31,27)32,27)33,27)34,27, 
+#28)0,28)1,28)2,28)3,28)4,28)5,28)6,28)7,28)8,28)9,28)10,28)11,28)12,28)13,28)14,28)15,28)16,28)17,28)18,28)19,28)20,28)21,28)22,28)23,28)24,28)25,28)26,28)27,28)28,28)29,28)30,28)31,28)32,28)33,28)34,28, 
+#29)0,29)1,29)2,29)3,29)4,29)5,29)6,29)7,29)8,29)9,29)10,29)11,29)12,29)13,29)14,29)15,29)16,29)17,29)18,29)19,29)20,29)21,29)22,29)23,29)24,29)25,29)26,29)27,29)28,29)29,29)30,29)31,29)32,29)33,29)34,29, 
+#30)0,30)1,30)2,30)3,30)4,30)5,30)6,30)7,30)8,30)9,30)10,30)11,30)12,30)13,30)14,30)15,30)16,30)17,30)18,30)19,30)20,30)21,30)22,30)23,30)24,30)25,30)26,30)27,30)28,30)29,30)30,30)31,30)32,30)33,30)34,30, 
+#31)0,31)1,31)2,31)3,31)4,31)5,31)6,31)7,31)8,31)9,31)10,31)11,31)12,31)13,31)14,31)15,31)16,31)17,31)18,31)19,31)20,31)21,31)22,31)23,31)24,31)25,31)26,31)27,31)28,31)29,31)30,31)31,31)32,31)33,31)34,31, 
+#32)0,32)1,32)2,32)3,32)4,32)5,32)6,32)7,32)8,32)9,32)10,32)11,32)12,32)13,32)14,32)15,32)16,32)17,32)18,32)19,32)20,32)21,32)22,32)23,32)24,32)25,32)26,32)27,32)28,32)29,32)30,32)31,32)32,32)33,32)34,32, 
+#33)0,33)1,33)2,33)3,33)4,33)5,33)6,33)7,33)8,33)9,33)10,33)11,33)12,33)13,33)14,33)15,33)16,33)17,33)18,33)19,33)20,33)21,33)22,33)23,33)24,33)25,33)26,33)27,33)28,33)29,33)30,33)31,33)32,33)33,33)34,33, 
+#34)0,34)1,34)2,34)3,34)4,34)5,34)6,34)7,34)8,34)9,34)10,34)11,34)12,34)13,34)14,34)15,34)16,34)17,34)18,34)19,34)20,34)21,34)22,34)23,34)24,34)25,34)26,34)27,34)28,34)29,34)30,34)31,34)32,34)33,34)34,34, 
diff -pruN 0.3.0-3/tests/test_037_FetchRowIndexPos.py 2.0.5-0ubuntu2/tests/test_037_FetchRowIndexPos.py
--- 0.3.0-3/tests/test_037_FetchRowIndexPos.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_037_FetchRowIndexPos.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,445 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_037_FetchRowIndexPos(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_037)
+
+  def run_test_037(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    serverinfo = ibm_db.server_info( conn )
+
+    result = ibm_db.exec_immediate(conn, "SELECT * FROM staff WHERE id < 101")
+    
+    row = ibm_db.fetch_row(result)
+    while ( row ):
+      if (serverinfo.DBMS_NAME[0:3] != 'IDS'):
+        result2 = ibm_db.prepare(conn, "SELECT * FROM staff WHERE id < 101", {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+      else:
+        result2 = ibm_db.prepare(conn, "SELECT * FROM staff WHERE id < 101")
+      ibm_db.execute(result2)
+      row2 = ibm_db.fetch_row(result2)
+      while ( row2 ):
+        print "%s : %s : %s : %s : %s" % (ibm_db.result(result2, 0), \
+                                          ibm_db.result(result2, 1), \
+                                          ibm_db.result(result2, 2), \
+                                          ibm_db.result(result2, 3), \
+                                          ibm_db.result(result2, 5))
+        row2 = ibm_db.fetch_row(result2)
+      row = ibm_db.fetch_row(result)
+
+#__END__
+#__LUW_EXPECTED__
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#__ZOS_EXPECTED__
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#__SYSTEMI_EXPECTED__
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#__IDS_EXPECTED__
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
diff -pruN 0.3.0-3/tests/test_038_FetchRowIndexPosNested_01.py 2.0.5-0ubuntu2/tests/test_038_FetchRowIndexPosNested_01.py
--- 0.3.0-3/tests/test_038_FetchRowIndexPosNested_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_038_FetchRowIndexPosNested_01.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,448 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_038_FetchRowIndexPosNested_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_038)
+
+  def run_test_038(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    serverinfo = ibm_db.server_info( conn )
+
+    if (serverinfo.DBMS_NAME[0:3] != 'IDS'):
+      result = ibm_db.exec_immediate(conn, "SELECT * FROM staff WHERE id < 101", {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+    else:
+      result = ibm_db.exec_immediate(conn, "SELECT * FROM staff WHERE id < 101")
+
+    row = ibm_db.fetch_row(result)
+    while ( row ):
+      if (serverinfo.DBMS_NAME[0:3] != 'IDS'):
+        result2 = ibm_db.prepare(conn, "SELECT * FROM staff WHERE id < 101", {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+      else:
+        result2 = ibm_db.prepare(conn, "SELECT * FROM staff WHERE id < 101")
+      ibm_db.execute(result2)
+      row2 = ibm_db.fetch_row(result2)
+      while ( row2 ):
+        print "%s : %s : %s : %s : %s\n" % (ibm_db.result(result2, 0), \
+                                            ibm_db.result(result2, 1), \
+                                            ibm_db.result(result2, 2), \
+                                            ibm_db.result(result2, 3), \
+                                            ibm_db.result(result2, 5))
+        row2 = ibm_db.fetch_row(result2)
+      row = ibm_db.fetch_row(result)
+
+#__END__
+#__LUW_EXPECTED__
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#__ZOS_EXPECTED__
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#__SYSTEMI_EXPECTED__
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#__IDS_EXPECTED__
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
+#10 : Sanders : 20 : Mgr   : 18357.50
+#20 : Pernal : 20 : Sales : 18171.25
+#30 : Marenghi : 38 : Mgr   : 17506.75
+#40 : OBrien : 38 : Sales : 18006.00
+#50 : Hanes : 15 : Mgr   : 20659.80
+#60 : Quigley : 38 : Sales : 16808.30
+#70 : Rothman : 15 : Sales : 16502.83
+#80 : James : 20 : Clerk : 13504.60
+#90 : Koonitz : 42 : Sales : 18001.75
+#100 : Plotz : 42 : Mgr   : 18352.80
diff -pruN 0.3.0-3/tests/test_039_FetchRowIndexPosNested_02.py 2.0.5-0ubuntu2/tests/test_039_FetchRowIndexPosNested_02.py
--- 0.3.0-3/tests/test_039_FetchRowIndexPosNested_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_039_FetchRowIndexPosNested_02.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,241 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_039_FetchRowIndexPosNested_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_039)
+
+  def run_test_039(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    serverinfo = ibm_db.server_info( conn )
+
+    if (serverinfo.DBMS_NAME[0:3] != 'IDS'):
+      result = ibm_db.prepare(conn, "SELECT * FROM animals", {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+    else:
+      result = ibm_db.prepare(conn, "SELECT * FROM animals")
+    ibm_db.execute(result)
+    row = ibm_db.fetch_row(result)
+    while ( row ):
+      if (serverinfo.DBMS_NAME[0:3] != 'IDS'):
+        result2 = ibm_db.prepare(conn, "SELECT * FROM animals", {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+      else:
+        result2 = ibm_db.prepare(conn, "SELECT * FROM animals")
+      ibm_db.execute(result2)
+      while (ibm_db.fetch_row(result2)):
+        print "%s : %s : %s : %s" % (ibm_db.result(result2, 0), \
+                                     ibm_db.result(result2, 1), \
+                                     ibm_db.result(result2, 2), \
+                                     ibm_db.result(result2, 3))
+      row = ibm_db.fetch_row(result)
+
+#__END__
+#__LUW_EXPECTED__
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#__ZOS_EXPECTED__
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#__SYSTEMI_EXPECTED__
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#__IDS_EXPECTED__
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
+#0 : cat : Pook             : 3.20
+#1 : dog : Peaches          : 12.30
+#2 : horse : Smarty           : 350.00
+#3 : gold fish : Bubbles          : 0.10
+#4 : budgerigar : Gizmo            : 0.20
+#5 : goat : Rickety Ride     : 9.70
+#6 : llama : Sweater          : 150.00
diff -pruN 0.3.0-3/tests/test_03a_ResultNonExistCol.py 2.0.5-0ubuntu2/tests/test_03a_ResultNonExistCol.py
--- 0.3.0-3/tests/test_03a_ResultNonExistCol.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_03a_ResultNonExistCol.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,61 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_03a_ResultNonExistCol(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_03a)
+
+  def run_test_03a(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    if conn:
+      stmt = ibm_db.exec_immediate(conn, "SELECT id, breed, name, weight FROM animals WHERE id = 0")
+      
+      while ( ibm_db.fetch_row(stmt) ):
+         breed = ibm_db.result(stmt, 1)
+         print "string(%d) \"%s\"" % (len(breed), breed)
+         if (server.DBMS_NAME[0:3] == 'IDS'):
+            name = ibm_db.result(stmt, "name")
+         else:
+            name = ibm_db.result(stmt, "NAME")
+         print "string(%d) \"%s\"" % (len(name), name)
+    
+         # following field does not exist in result set
+         if (server.DBMS_NAME[0:3] == 'IDS'):
+           name = ibm_db.result(stmt, "passport")
+         else:
+           name = ibm_db.result(stmt, "PASSPORT")
+         print name
+      ibm_db.close(conn)
+      
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#string(3) "cat"
+#string(16) "Pook            "
+#None
+#__ZOS_EXPECTED__
+#string(3) "cat"
+#string(16) "Pook            "
+#None
+#__SYSTEMI_EXPECTED__
+#string(3) "cat"
+#string(16) "Pook            "
+#None
+#__IDS_EXPECTED__
+#string(3) "cat"
+#string(16) "Pook            "
+#None
diff -pruN 0.3.0-3/tests/test_040_FetchTuple.py 2.0.5-0ubuntu2/tests/test_040_FetchTuple.py
--- 0.3.0-3/tests/test_040_FetchTuple.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_040_FetchTuple.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,67 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_040_FetchTuple(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_040)
+  
+  def run_test_040(self): 
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+      
+    # Drop the test table, in case it exists
+    drop = 'DROP TABLE animals'
+    try:
+      result = ibm_db.exec_immediate(conn, drop)
+    except:
+      pass
+      
+    # Create the test table
+    create = 'CREATE TABLE animals (id INTEGER, breed VARCHAR(32), name CHAR(16), weight DECIMAL(7,2))'
+    result = ibm_db.exec_immediate(conn, create)
+      
+    insert = "INSERT INTO animals values (0, 'cat', 'Pook', 3.2)"
+      
+    ibm_db.exec_immediate(conn, insert)
+      
+    stmt = ibm_db.exec_immediate(conn, "select * from animals")
+    
+    onerow = ibm_db.fetch_tuple(stmt)
+     
+    for element in onerow:
+      print element
+
+    ibm_db.rollback(conn)
+
+#__END__
+#__LUW_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__ZOS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__SYSTEMI_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__IDS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
diff -pruN 0.3.0-3/tests/test_041_FetchTupleMany_01.py 2.0.5-0ubuntu2/tests/test_041_FetchTupleMany_01.py
--- 0.3.0-3/tests/test_041_FetchTupleMany_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_041_FetchTupleMany_01.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,76 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_041_FetchTupleMany_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_041)
+
+  def run_test_041(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    if conn:
+      stmt = ibm_db.exec_immediate(conn, "select * from animals order by breed")
+      
+      i = 0
+      
+      cols = ibm_db.fetch_tuple( stmt )
+      while( cols ):
+        print "%s %s %s %s " % (cols[0], cols[1], cols[2], cols[3])
+        i+=1
+        cols = ibm_db.fetch_tuple( stmt )
+      
+      print "\nNumber of rows: %d" % i
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#4 budgerigar Gizmo            0.20 
+#0 cat Pook             3.20 
+#1 dog Peaches          12.30 
+#5 goat Rickety Ride     9.70 
+#3 gold fish Bubbles          0.10 
+#2 horse Smarty           350.00 
+#6 llama Sweater          150.00 
+#
+#Number of rows: 7
+#__ZOS_EXPECTED__
+#4 budgerigar Gizmo            0.20 
+#0 cat Pook             3.20 
+#1 dog Peaches          12.30 
+#5 goat Rickety Ride     9.70 
+#3 gold fish Bubbles          0.10 
+#2 horse Smarty           350.00 
+#6 llama Sweater          150.00 
+#
+#Number of rows: 7
+#__SYSTEMI_EXPECTED__
+#4 budgerigar Gizmo            0.20 
+#0 cat Pook             3.20 
+#1 dog Peaches          12.30 
+#5 goat Rickety Ride     9.70 
+#3 gold fish Bubbles          0.10 
+#2 horse Smarty           350.00 
+#6 llama Sweater          150.00 
+#
+#Number of rows: 7
+#__IDS_EXPECTED__
+#4 budgerigar Gizmo            0.20 
+#0 cat Pook             3.20 
+#1 dog Peaches          12.30 
+#5 goat Rickety Ride     9.70 
+#3 gold fish Bubbles          0.10 
+#2 horse Smarty           350.00 
+#6 llama Sweater          150.00 
+#
+#Number of rows: 7
diff -pruN 0.3.0-3/tests/test_042_FetchTupleMany_02.py 2.0.5-0ubuntu2/tests/test_042_FetchTupleMany_02.py
--- 0.3.0-3/tests/test_042_FetchTupleMany_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_042_FetchTupleMany_02.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,59 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_042_FetchTupleMany_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_042)
+
+  def run_test_042(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    #if ({}['EMPNO'] != None):
+    #  result = ibm_db.exec_immediate(conn, "select photo_format, picture, length(picture) from emp_photo where photo_format='jpg' and empno='" + {}['EMPNO'] + "'")
+    #  row = ibm_db.fetch_array(result)
+    #  if row:
+    #    # We'll be outputting a         
+    #    header('Content-type: image/' + row[0])
+    #    header('Content-Length: ' + row[2])
+    #    print row[1]
+    #  else:
+    #    print ibm_db.error()
+    #  continue
+    #else:
+    result = ibm_db.exec_immediate(conn, "select EMPNO, PHOTO_FORMAT from emp_photo where photo_format='jpg'")
+    row = ibm_db.fetch_tuple(result)
+    while ( row ):
+      print "<a href='test_042.php?EMPNO=%s' target=_blank>%s (%s)</a><br>" % (row[0], row[0], row[1])
+      row = ibm_db.fetch_tuple(result)
+
+#__END__
+#__LUW_EXPECTED__
+#<a href='test_042.php?EMPNO=000130' target=_blank>000130 (jpg)</a><br>
+#<a href='test_042.php?EMPNO=000140' target=_blank>000140 (jpg)</a><br>
+#<a href='test_042.php?EMPNO=000150' target=_blank>000150 (jpg)</a><br>
+#<a href='test_042.php?EMPNO=000190' target=_blank>000190 (jpg)</a><br>
+#__ZOS_EXPECTED__
+#<a href='test_042.php?EMPNO=000130' target=_blank>000130 (jpg)</a><br>
+#<a href='test_042.php?EMPNO=000140' target=_blank>000140 (jpg)</a><br>
+#<a href='test_042.php?EMPNO=000150' target=_blank>000150 (jpg)</a><br>
+#<a href='test_042.php?EMPNO=000190' target=_blank>000190 (jpg)</a><br>
+#__SYSTEMI_EXPECTED__
+#<a href='test_042.php?EMPNO=000130' target=_blank>000130 (jpg)</a><br>
+#<a href='test_042.php?EMPNO=000140' target=_blank>000140 (jpg)</a><br>
+#<a href='test_042.php?EMPNO=000150' target=_blank>000150 (jpg)</a><br>
+#<a href='test_042.php?EMPNO=000190' target=_blank>000190 (jpg)</a><br>
+#__IDS_EXPECTED__
+#<a href='test_042.php?EMPNO=000130' target=_blank>000130 (jpg)</a><br>
+#<a href='test_042.php?EMPNO=000140' target=_blank>000140 (jpg)</a><br>
+#<a href='test_042.php?EMPNO=000150' target=_blank>000150 (jpg)</a><br>
+#<a href='test_042.php?EMPNO=000190' target=_blank>000190 (jpg)</a><br>
diff -pruN 0.3.0-3/tests/test_043_FetchTupleMany_03.py 2.0.5-0ubuntu2/tests/test_043_FetchTupleMany_03.py
--- 0.3.0-3/tests/test_043_FetchTupleMany_03.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_043_FetchTupleMany_03.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,184 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_043_FetchTupleMany_03(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_043)
+
+  def run_test_043(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    result = ibm_db.exec_immediate(conn, "select * from staff")
+    
+    row = ibm_db.fetch_tuple(result)
+    while ( row ):
+      #printf("%5d  ",row[0])
+      #printf("%-10s ",row[1])
+      #printf("%5d ",row[2])
+      #printf("%-7s ",row[3])
+      #printf("%5d ", row[4])
+      #printf("%15s ", row[5])
+      #printf("%10s ", row[6])
+      #print
+      if (row[4] == None):
+        row = row[0:4] + (0,) + row[5:]
+      if (row[6] == None):
+        row = row[0:6] + (' ',)
+      print "%5s  %-10s %5s %-7s %5s %15s %10s " % (row[0], row[1], row[2], row[3], row[4], row[5], row[6])
+      row = ibm_db.fetch_tuple(result)
+
+#__END__
+#__LUW_EXPECTED__
+#   10  Sanders       20 Mgr         7        18357.50            
+#   20  Pernal        20 Sales       8        18171.25     612.45 
+#   30  Marenghi      38 Mgr         5        17506.75            
+#   40  OBrien        38 Sales       6        18006.00     846.55 
+#   50  Hanes         15 Mgr        10        20659.80            
+#   60  Quigley       38 Sales       0        16808.30     650.25 
+#   70  Rothman       15 Sales       7        16502.83    1152.00 
+#   80  James         20 Clerk       0        13504.60     128.20 
+#   90  Koonitz       42 Sales       6        18001.75    1386.70 
+#  100  Plotz         42 Mgr         7        18352.80            
+#  110  Ngan          15 Clerk       5        12508.20     206.60 
+#  120  Naughton      38 Clerk       0        12954.75     180.00 
+#  130  Yamaguchi     42 Clerk       6        10505.90      75.60 
+#  140  Fraye         51 Mgr         6        21150.00            
+#  150  Williams      51 Sales       6        19456.50     637.65 
+#  160  Molinare      10 Mgr         7        22959.20            
+#  170  Kermisch      15 Clerk       4        12258.50     110.10 
+#  180  Abrahams      38 Clerk       3        12009.75     236.50 
+#  190  Sneider       20 Clerk       8        14252.75     126.50 
+#  200  Scoutten      42 Clerk       0        11508.60      84.20 
+#  210  Lu            10 Mgr        10        20010.00            
+#  220  Smith         51 Sales       7        17654.50     992.80 
+#  230  Lundquist     51 Clerk       3        13369.80     189.65 
+#  240  Daniels       10 Mgr         5        19260.25            
+#  250  Wheeler       51 Clerk       6        14460.00     513.30 
+#  260  Jones         10 Mgr        12        21234.00            
+#  270  Lea           66 Mgr         9        18555.50            
+#  280  Wilson        66 Sales       9        18674.50     811.50 
+#  290  Quill         84 Mgr        10        19818.00            
+#  300  Davis         84 Sales       5        15454.50     806.10 
+#  310  Graham        66 Sales      13        21000.00     200.30 
+#  320  Gonzales      66 Sales       4        16858.20     844.00 
+#  330  Burke         66 Clerk       1        10988.00      55.50 
+#  340  Edwards       84 Sales       7        17844.00    1285.00 
+#  350  Gafney        84 Clerk       5        13030.50     188.00 
+#__ZOS_EXPECTED__
+#   10  Sanders       20 Mgr         7        18357.50            
+#   20  Pernal        20 Sales       8        18171.25     612.45 
+#   30  Marenghi      38 Mgr         5        17506.75            
+#   40  OBrien        38 Sales       6        18006.00     846.55 
+#   50  Hanes         15 Mgr        10        20659.80            
+#   60  Quigley       38 Sales       0        16808.30     650.25 
+#   70  Rothman       15 Sales       7        16502.83    1152.00 
+#   80  James         20 Clerk       0        13504.60     128.20 
+#   90  Koonitz       42 Sales       6        18001.75    1386.70 
+#  100  Plotz         42 Mgr         7        18352.80            
+#  110  Ngan          15 Clerk       5        12508.20     206.60 
+#  120  Naughton      38 Clerk       0        12954.75     180.00 
+#  130  Yamaguchi     42 Clerk       6        10505.90      75.60 
+#  140  Fraye         51 Mgr         6        21150.00            
+#  150  Williams      51 Sales       6        19456.50     637.65 
+#  160  Molinare      10 Mgr         7        22959.20            
+#  170  Kermisch      15 Clerk       4        12258.50     110.10 
+#  180  Abrahams      38 Clerk       3        12009.75     236.50 
+#  190  Sneider       20 Clerk       8        14252.75     126.50 
+#  200  Scoutten      42 Clerk       0        11508.60      84.20 
+#  210  Lu            10 Mgr        10        20010.00            
+#  220  Smith         51 Sales       7        17654.50     992.80 
+#  230  Lundquist     51 Clerk       3        13369.80     189.65 
+#  240  Daniels       10 Mgr         5        19260.25            
+#  250  Wheeler       51 Clerk       6        14460.00     513.30 
+#  260  Jones         10 Mgr        12        21234.00            
+#  270  Lea           66 Mgr         9        18555.50            
+#  280  Wilson        66 Sales       9        18674.50     811.50 
+#  290  Quill         84 Mgr        10        19818.00            
+#  300  Davis         84 Sales       5        15454.50     806.10 
+#  310  Graham        66 Sales      13        21000.00     200.30 
+#  320  Gonzales      66 Sales       4        16858.20     844.00 
+#  330  Burke         66 Clerk       1        10988.00      55.50 
+#  340  Edwards       84 Sales       7        17844.00    1285.00 
+#  350  Gafney        84 Clerk       5        13030.50     188.00 
+#__SYSTEMI_EXPECTED__
+#   10  Sanders       20 Mgr         7        18357.50            
+#   20  Pernal        20 Sales       8        18171.25     612.45 
+#   30  Marenghi      38 Mgr         5        17506.75            
+#   40  OBrien        38 Sales       6        18006.00     846.55 
+#   50  Hanes         15 Mgr        10        20659.80            
+#   60  Quigley       38 Sales       0        16808.30     650.25 
+#   70  Rothman       15 Sales       7        16502.83    1152.00 
+#   80  James         20 Clerk       0        13504.60     128.20 
+#   90  Koonitz       42 Sales       6        18001.75    1386.70 
+#  100  Plotz         42 Mgr         7        18352.80            
+#  110  Ngan          15 Clerk       5        12508.20     206.60 
+#  120  Naughton      38 Clerk       0        12954.75     180.00 
+#  130  Yamaguchi     42 Clerk       6        10505.90      75.60 
+#  140  Fraye         51 Mgr         6        21150.00            
+#  150  Williams      51 Sales       6        19456.50     637.65 
+#  160  Molinare      10 Mgr         7        22959.20            
+#  170  Kermisch      15 Clerk       4        12258.50     110.10 
+#  180  Abrahams      38 Clerk       3        12009.75     236.50 
+#  190  Sneider       20 Clerk       8        14252.75     126.50 
+#  200  Scoutten      42 Clerk       0        11508.60      84.20 
+#  210  Lu            10 Mgr        10        20010.00            
+#  220  Smith         51 Sales       7        17654.50     992.80 
+#  230  Lundquist     51 Clerk       3        13369.80     189.65 
+#  240  Daniels       10 Mgr         5        19260.25            
+#  250  Wheeler       51 Clerk       6        14460.00     513.30 
+#  260  Jones         10 Mgr        12        21234.00            
+#  270  Lea           66 Mgr         9        18555.50            
+#  280  Wilson        66 Sales       9        18674.50     811.50 
+#  290  Quill         84 Mgr        10        19818.00            
+#  300  Davis         84 Sales       5        15454.50     806.10 
+#  310  Graham        66 Sales      13        21000.00     200.30 
+#  320  Gonzales      66 Sales       4        16858.20     844.00 
+#  330  Burke         66 Clerk       1        10988.00      55.50 
+#  340  Edwards       84 Sales       7        17844.00    1285.00 
+#  350  Gafney        84 Clerk       5        13030.50     188.00 
+#__IDS_EXPECTED__
+#   10  Sanders       20 Mgr         7        18357.50            
+#   20  Pernal        20 Sales       8        18171.25     612.45 
+#   30  Marenghi      38 Mgr         5        17506.75            
+#   40  OBrien        38 Sales       6        18006.00     846.55 
+#   50  Hanes         15 Mgr        10        20659.80            
+#   60  Quigley       38 Sales       0        16808.30     650.25 
+#   70  Rothman       15 Sales       7        16502.83    1152.00 
+#   80  James         20 Clerk       0        13504.60     128.20 
+#   90  Koonitz       42 Sales       6        18001.75    1386.70 
+#  100  Plotz         42 Mgr         7        18352.80            
+#  110  Ngan          15 Clerk       5        12508.20     206.60 
+#  120  Naughton      38 Clerk       0        12954.75     180.00 
+#  130  Yamaguchi     42 Clerk       6        10505.90      75.60 
+#  140  Fraye         51 Mgr         6        21150.00            
+#  150  Williams      51 Sales       6        19456.50     637.65 
+#  160  Molinare      10 Mgr         7        22959.20            
+#  170  Kermisch      15 Clerk       4        12258.50     110.10 
+#  180  Abrahams      38 Clerk       3        12009.75     236.50 
+#  190  Sneider       20 Clerk       8        14252.75     126.50 
+#  200  Scoutten      42 Clerk       0        11508.60      84.20 
+#  210  Lu            10 Mgr        10        20010.00            
+#  220  Smith         51 Sales       7        17654.50     992.80 
+#  230  Lundquist     51 Clerk       3        13369.80     189.65 
+#  240  Daniels       10 Mgr         5        19260.25            
+#  250  Wheeler       51 Clerk       6        14460.00     513.30 
+#  260  Jones         10 Mgr        12        21234.00            
+#  270  Lea           66 Mgr         9        18555.50            
+#  280  Wilson        66 Sales       9        18674.50     811.50 
+#  290  Quill         84 Mgr        10        19818.00            
+#  300  Davis         84 Sales       5        15454.50     806.10 
+#  310  Graham        66 Sales      13        21000.00     200.30 
+#  320  Gonzales      66 Sales       4        16858.20     844.00 
+#  330  Burke         66 Clerk       1        10988.00      55.50 
+#  340  Edwards       84 Sales       7        17844.00    1285.00 
+#  350  Gafney        84 Clerk       5        13030.50     188.00 
diff -pruN 0.3.0-3/tests/test_044_FetchTupleMany_04.py 2.0.5-0ubuntu2/tests/test_044_FetchTupleMany_04.py
--- 0.3.0-3/tests/test_044_FetchTupleMany_04.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_044_FetchTupleMany_04.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,203 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_044_FetchTupleMany_04(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_044)
+
+  def run_test_044(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    result = ibm_db.exec_immediate(conn, "select * from sales")
+     
+    row = ibm_db.fetch_tuple(result)
+    while ( row ):
+      #printf("%-10s ",row[0])
+      #printf("%-15s ",row[1])
+      #printf("%-15s ",row[2])
+      #printf("%4s",row[3])
+      #print
+      if (row[3] == None):
+        row = row[0:3] + (' ',)
+      print "%-10s %-15s %-15s %4s" % (row[0], row[1], row[2], row[3])
+      row = ibm_db.fetch_tuple(result)
+
+#__END__
+#__LUW_EXPECTED__
+#1995-12-31 LUCCHESSI       Ontario-South      1
+#1995-12-31 LEE             Ontario-South      3
+#1995-12-31 LEE             Quebec             1
+#1995-12-31 LEE             Manitoba           2
+#1995-12-31 GOUNOT          Quebec             1
+#1996-03-29 LUCCHESSI       Ontario-South      3
+#1996-03-29 LUCCHESSI       Quebec             1
+#1996-03-29 LEE             Ontario-South      2
+#1996-03-29 LEE             Ontario-North      2
+#1996-03-29 LEE             Quebec             3
+#1996-03-29 LEE             Manitoba           5
+#1996-03-29 GOUNOT          Ontario-South      3
+#1996-03-29 GOUNOT          Quebec             1
+#1996-03-29 GOUNOT          Manitoba           7
+#1996-03-30 LUCCHESSI       Ontario-South      1
+#1996-03-30 LUCCHESSI       Quebec             2
+#1996-03-30 LUCCHESSI       Manitoba           1
+#1996-03-30 LEE             Ontario-South      7
+#1996-03-30 LEE             Ontario-North      3
+#1996-03-30 LEE             Quebec             7
+#1996-03-30 LEE             Manitoba           4
+#1996-03-30 GOUNOT          Ontario-South      2
+#1996-03-30 GOUNOT          Quebec            18
+#1996-03-30 GOUNOT          Manitoba           1
+#1996-03-31 LUCCHESSI       Manitoba           1
+#1996-03-31 LEE             Ontario-South     14
+#1996-03-31 LEE             Ontario-North      3
+#1996-03-31 LEE             Quebec             7
+#1996-03-31 LEE             Manitoba           3
+#1996-03-31 GOUNOT          Ontario-South      2
+#1996-03-31 GOUNOT          Quebec             1
+#1996-04-01 LUCCHESSI       Ontario-South      3
+#1996-04-01 LUCCHESSI       Manitoba           1
+#1996-04-01 LEE             Ontario-South      8
+#1996-04-01 LEE             Ontario-North       
+#1996-04-01 LEE             Quebec             8
+#1996-04-01 LEE             Manitoba           9
+#1996-04-01 GOUNOT          Ontario-South      3
+#1996-04-01 GOUNOT          Ontario-North      1
+#1996-04-01 GOUNOT          Quebec             3
+#1996-04-01 GOUNOT          Manitoba           7
+#__ZOS_EXPECTED__
+#1995-12-31 LUCCHESSI       Ontario-South      1
+#1995-12-31 LEE             Ontario-South      3
+#1995-12-31 LEE             Quebec             1
+#1995-12-31 LEE             Manitoba           2
+#1995-12-31 GOUNOT          Quebec             1
+#1996-03-29 LUCCHESSI       Ontario-South      3
+#1996-03-29 LUCCHESSI       Quebec             1
+#1996-03-29 LEE             Ontario-South      2
+#1996-03-29 LEE             Ontario-North      2
+#1996-03-29 LEE             Quebec             3
+#1996-03-29 LEE             Manitoba           5
+#1996-03-29 GOUNOT          Ontario-South      3
+#1996-03-29 GOUNOT          Quebec             1
+#1996-03-29 GOUNOT          Manitoba           7
+#1996-03-30 LUCCHESSI       Ontario-South      1
+#1996-03-30 LUCCHESSI       Quebec             2
+#1996-03-30 LUCCHESSI       Manitoba           1
+#1996-03-30 LEE             Ontario-South      7
+#1996-03-30 LEE             Ontario-North      3
+#1996-03-30 LEE             Quebec             7
+#1996-03-30 LEE             Manitoba           4
+#1996-03-30 GOUNOT          Ontario-South      2
+#1996-03-30 GOUNOT          Quebec            18
+#1996-03-30 GOUNOT          Manitoba           1
+#1996-03-31 LUCCHESSI       Manitoba           1
+#1996-03-31 LEE             Ontario-South     14
+#1996-03-31 LEE             Ontario-North      3
+#1996-03-31 LEE             Quebec             7
+#1996-03-31 LEE             Manitoba           3
+#1996-03-31 GOUNOT          Ontario-South      2
+#1996-03-31 GOUNOT          Quebec             1
+#1996-04-01 LUCCHESSI       Ontario-South      3
+#1996-04-01 LUCCHESSI       Manitoba           1
+#1996-04-01 LEE             Ontario-South      8
+#1996-04-01 LEE             Ontario-North       
+#1996-04-01 LEE             Quebec             8
+#1996-04-01 LEE             Manitoba           9
+#1996-04-01 GOUNOT          Ontario-South      3
+#1996-04-01 GOUNOT          Ontario-North      1
+#1996-04-01 GOUNOT          Quebec             3
+#1996-04-01 GOUNOT          Manitoba           7
+#__SYSTEMI_EXPECTED__
+#1995-12-31 LUCCHESSI       Ontario-South      1
+#1995-12-31 LEE             Ontario-South      3
+#1995-12-31 LEE             Quebec             1
+#1995-12-31 LEE             Manitoba           2
+#1995-12-31 GOUNOT          Quebec             1
+#1996-03-29 LUCCHESSI       Ontario-South      3
+#1996-03-29 LUCCHESSI       Quebec             1
+#1996-03-29 LEE             Ontario-South      2
+#1996-03-29 LEE             Ontario-North      2
+#1996-03-29 LEE             Quebec             3
+#1996-03-29 LEE             Manitoba           5
+#1996-03-29 GOUNOT          Ontario-South      3
+#1996-03-29 GOUNOT          Quebec             1
+#1996-03-29 GOUNOT          Manitoba           7
+#1996-03-30 LUCCHESSI       Ontario-South      1
+#1996-03-30 LUCCHESSI       Quebec             2
+#1996-03-30 LUCCHESSI       Manitoba           1
+#1996-03-30 LEE             Ontario-South      7
+#1996-03-30 LEE             Ontario-North      3
+#1996-03-30 LEE             Quebec             7
+#1996-03-30 LEE             Manitoba           4
+#1996-03-30 GOUNOT          Ontario-South      2
+#1996-03-30 GOUNOT          Quebec            18
+#1996-03-30 GOUNOT          Manitoba           1
+#1996-03-31 LUCCHESSI       Manitoba           1
+#1996-03-31 LEE             Ontario-South     14
+#1996-03-31 LEE             Ontario-North      3
+#1996-03-31 LEE             Quebec             7
+#1996-03-31 LEE             Manitoba           3
+#1996-03-31 GOUNOT          Ontario-South      2
+#1996-03-31 GOUNOT          Quebec             1
+#1996-04-01 LUCCHESSI       Ontario-South      3
+#1996-04-01 LUCCHESSI       Manitoba           1
+#1996-04-01 LEE             Ontario-South      8
+#1996-04-01 LEE             Ontario-North       
+#1996-04-01 LEE             Quebec             8
+#1996-04-01 LEE             Manitoba           9
+#1996-04-01 GOUNOT          Ontario-South      3
+#1996-04-01 GOUNOT          Ontario-North      1
+#1996-04-01 GOUNOT          Quebec             3
+#1996-04-01 GOUNOT          Manitoba           7
+#__IDS_EXPECTED__
+#1995-12-31 LUCCHESSI       Ontario-South      1
+#1995-12-31 LEE             Ontario-South      3
+#1995-12-31 LEE             Quebec             1
+#1995-12-31 LEE             Manitoba           2
+#1995-12-31 GOUNOT          Quebec             1
+#1996-03-29 LUCCHESSI       Ontario-South      3
+#1996-03-29 LUCCHESSI       Quebec             1
+#1996-03-29 LEE             Ontario-South      2
+#1996-03-29 LEE             Ontario-North      2
+#1996-03-29 LEE             Quebec             3
+#1996-03-29 LEE             Manitoba           5
+#1996-03-29 GOUNOT          Ontario-South      3
+#1996-03-29 GOUNOT          Quebec             1
+#1996-03-29 GOUNOT          Manitoba           7
+#1996-03-30 LUCCHESSI       Ontario-South      1
+#1996-03-30 LUCCHESSI       Quebec             2
+#1996-03-30 LUCCHESSI       Manitoba           1
+#1996-03-30 LEE             Ontario-South      7
+#1996-03-30 LEE             Ontario-North      3
+#1996-03-30 LEE             Quebec             7
+#1996-03-30 LEE             Manitoba           4
+#1996-03-30 GOUNOT          Ontario-South      2
+#1996-03-30 GOUNOT          Quebec            18
+#1996-03-30 GOUNOT          Manitoba           1
+#1996-03-31 LUCCHESSI       Manitoba           1
+#1996-03-31 LEE             Ontario-South     14
+#1996-03-31 LEE             Ontario-North      3
+#1996-03-31 LEE             Quebec             7
+#1996-03-31 LEE             Manitoba           3
+#1996-03-31 GOUNOT          Ontario-South      2
+#1996-03-31 GOUNOT          Quebec             1
+#1996-04-01 LUCCHESSI       Ontario-South      3
+#1996-04-01 LUCCHESSI       Manitoba           1
+#1996-04-01 LEE             Ontario-South      8
+#1996-04-01 LEE             Ontario-North       
+#1996-04-01 LEE             Quebec             8
+#1996-04-01 LEE             Manitoba           9
+#1996-04-01 GOUNOT          Ontario-South      3
+#1996-04-01 GOUNOT          Ontario-North      1
+#1996-04-01 GOUNOT          Quebec             3
+#1996-04-01 GOUNOT          Manitoba           7
diff -pruN 0.3.0-3/tests/test_045_FetchTupleBinaryData_01.py 2.0.5-0ubuntu2/tests/test_045_FetchTupleBinaryData_01.py
--- 0.3.0-3/tests/test_045_FetchTupleBinaryData_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_045_FetchTupleBinaryData_01.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,40 @@
+#
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_045_FetchTupleBinaryData_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_045)
+    
+  def run_test_045(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    fp = open("tests/pic1_out.jpg", "wb")
+    result = ibm_db.exec_immediate(conn, "SELECT picture FROM animal_pics WHERE name = 'Helmut'")
+    row = ibm_db.fetch_tuple(result)
+    if row:
+      fp.write(row[0])
+    else:
+      print ibm_db.stmt_errormsg()
+    fp.close()
+    cmp = (open('tests/pic1_out.jpg', 'rb').read() == open('tests/pic1.jpg', 'rb').read())
+    print 'Are the files the same:', cmp
+
+
+#__END__
+#__LUW_EXPECTED__
+#Are the files the same: True
+#__ZOS_EXPECTED__
+#Are the files the same: True
+#__SYSTEMI_EXPECTED__
+#Are the files the same: True
+#__IDS_EXPECTED__
+#Are the files the same: True
diff -pruN 0.3.0-3/tests/test_046_FetchTupleMany_05.py 2.0.5-0ubuntu2/tests/test_046_FetchTupleMany_05.py
--- 0.3.0-3/tests/test_046_FetchTupleMany_05.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_046_FetchTupleMany_05.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,68 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_046_FetchTupleMany_05(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_046)
+
+  def run_test_046(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      result = ibm_db.exec_immediate(conn, "SELECT empno, photo_format, photo_format FROM emp_photo") 
+    else:
+      result = ibm_db.exec_immediate(conn, "SELECT empno, photo_format, length(picture) FROM emp_photo")
+    row = ibm_db.fetch_tuple(result)
+    while ( row ):
+      if row[1] != 'xwd':
+        print "<a href='test_046.php?EMPNO=%s&FORMAT=%s' target=_blank>%s - %s - %s bytes</a><br>" % (row[0], row[1], row[0], row[1], row[2])
+      row = ibm_db.fetch_tuple(result)
+
+#__END__
+#__LUW_EXPECTED__
+#<a href='test_046.php?EMPNO=000130&FORMAT=jpg' target=_blank>000130 - jpg - 15398 bytes</a><br>
+#<a href='test_046.php?EMPNO=000130&FORMAT=png' target=_blank>000130 - png - 10291 bytes</a><br>
+#<a href='test_046.php?EMPNO=000140&FORMAT=jpg' target=_blank>000140 - jpg - 15398 bytes</a><br>
+#<a href='test_046.php?EMPNO=000140&FORMAT=png' target=_blank>000140 - png - 10291 bytes</a><br>
+#<a href='test_046.php?EMPNO=000150&FORMAT=jpg' target=_blank>000150 - jpg - 15398 bytes</a><br>
+#<a href='test_046.php?EMPNO=000150&FORMAT=png' target=_blank>000150 - png - 10291 bytes</a><br>
+#<a href='test_046.php?EMPNO=000190&FORMAT=jpg' target=_blank>000190 - jpg - 15398 bytes</a><br>
+#<a href='test_046.php?EMPNO=000190&FORMAT=png' target=_blank>000190 - png - 10291 bytes</a><br>
+#__ZOS_EXPECTED__
+#<a href='test_046.php?EMPNO=000130&FORMAT=jpg' target=_blank>000130 - jpg - 15398 bytes</a><br>
+#<a href='test_046.php?EMPNO=000130&FORMAT=png' target=_blank>000130 - png - 10291 bytes</a><br>
+#<a href='test_046.php?EMPNO=000140&FORMAT=jpg' target=_blank>000140 - jpg - 15398 bytes</a><br>
+#<a href='test_046.php?EMPNO=000140&FORMAT=png' target=_blank>000140 - png - 10291 bytes</a><br>
+#<a href='test_046.php?EMPNO=000150&FORMAT=jpg' target=_blank>000150 - jpg - 15398 bytes</a><br>
+#<a href='test_046.php?EMPNO=000150&FORMAT=png' target=_blank>000150 - png - 10291 bytes</a><br>
+#<a href='test_046.php?EMPNO=000190&FORMAT=jpg' target=_blank>000190 - jpg - 15398 bytes</a><br>
+#<a href='test_046.php?EMPNO=000190&FORMAT=png' target=_blank>000190 - png - 10291 bytes</a><br>
+#__SYSTEMI_EXPECTED__
+#<a href='test_046.php?EMPNO=000130&FORMAT=jpg' target=_blank>000130 - jpg - 15398 bytes</a><br>
+#<a href='test_046.php?EMPNO=000130&FORMAT=png' target=_blank>000130 - png - 10291 bytes</a><br>
+#<a href='test_046.php?EMPNO=000140&FORMAT=jpg' target=_blank>000140 - jpg - 15398 bytes</a><br>
+#<a href='test_046.php?EMPNO=000140&FORMAT=png' target=_blank>000140 - png - 10291 bytes</a><br>
+#<a href='test_046.php?EMPNO=000150&FORMAT=jpg' target=_blank>000150 - jpg - 15398 bytes</a><br>
+#<a href='test_046.php?EMPNO=000150&FORMAT=png' target=_blank>000150 - png - 10291 bytes</a><br>
+#<a href='test_046.php?EMPNO=000190&FORMAT=jpg' target=_blank>000190 - jpg - 15398 bytes</a><br>
+#<a href='test_046.php?EMPNO=000190&FORMAT=png' target=_blank>000190 - png - 10291 bytes</a><br>
+#__IDS_EXPECTED__
+#<a href='test_046.php?EMPNO=000130&FORMAT=jpg' target=_blank>000130 - jpg - jpg bytes</a><br>
+#<a href='test_046.php?EMPNO=000130&FORMAT=png' target=_blank>000130 - png - png bytes</a><br>
+#<a href='test_046.php?EMPNO=000140&FORMAT=jpg' target=_blank>000140 - jpg - jpg bytes</a><br>
+#<a href='test_046.php?EMPNO=000140&FORMAT=png' target=_blank>000140 - png - png bytes</a><br>
+#<a href='test_046.php?EMPNO=000150&FORMAT=jpg' target=_blank>000150 - jpg - jpg bytes</a><br>
+#<a href='test_046.php?EMPNO=000150&FORMAT=png' target=_blank>000150 - png - png bytes</a><br>
+#<a href='test_046.php?EMPNO=000190&FORMAT=jpg' target=_blank>000190 - jpg - jpg bytes</a><br>
+#<a href='test_046.php?EMPNO=000190&FORMAT=png' target=_blank>000190 - png - png bytes</a><br>
diff -pruN 0.3.0-3/tests/test_047_FetchTupleMany_06.py 2.0.5-0ubuntu2/tests/test_047_FetchTupleMany_06.py
--- 0.3.0-3/tests/test_047_FetchTupleMany_06.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_047_FetchTupleMany_06.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,73 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_047_FetchTupleMany_06(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_047)
+
+  def run_test_047(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      result = ibm_db.exec_immediate(conn, "SELECT empno, photo_format, photo_format from emp_photo")
+    else:
+      result = ibm_db.exec_immediate(conn, "SELECT empno, photo_format, length(PICTURE) from emp_photo")
+    row = ibm_db.fetch_tuple(result)
+    while ( row ):
+      if (row[1] == 'gif'):
+        print "<img src='test_047.php?EMPNO=%s&FORMAT=%s'><br>\n" % (row[0], row[1])
+      if (row[1] != 'xwd'):
+        print "<a href='test_047.php?EMPNO=%s&FORMAT=%s' target=_blank>%s - %s - %s bytes</a>\n<br>" % (row[0], row[1], row[0], row[1], row[2])
+      row = ibm_db.fetch_tuple(result)
+#__END__
+#__LUW_EXPECTED__
+#<a href='test_047.php?EMPNO=000130&FORMAT=jpg' target=_blank>000130 - jpg - 15398 bytes</a>
+#<br><a href='test_047.php?EMPNO=000130&FORMAT=png' target=_blank>000130 - png - 10291 bytes</a>
+#<br><a href='test_047.php?EMPNO=000140&FORMAT=jpg' target=_blank>000140 - jpg - 15398 bytes</a>
+#<br><a href='test_047.php?EMPNO=000140&FORMAT=png' target=_blank>000140 - png - 10291 bytes</a>
+#<br><a href='test_047.php?EMPNO=000150&FORMAT=jpg' target=_blank>000150 - jpg - 15398 bytes</a>
+#<br><a href='test_047.php?EMPNO=000150&FORMAT=png' target=_blank>000150 - png - 10291 bytes</a>
+#<br><a href='test_047.php?EMPNO=000190&FORMAT=jpg' target=_blank>000190 - jpg - 15398 bytes</a>
+#<br><a href='test_047.php?EMPNO=000190&FORMAT=png' target=_blank>000190 - png - 10291 bytes</a>
+#<br>
+#__ZOS_EXPECTED__
+#<a href='test_047.php?EMPNO=000130&FORMAT=jpg' target=_blank>000130 - jpg - 15398 bytes</a>
+#<br><a href='test_047.php?EMPNO=000130&FORMAT=png' target=_blank>000130 - png - 10291 bytes</a>
+#<br><a href='test_047.php?EMPNO=000140&FORMAT=jpg' target=_blank>000140 - jpg - 15398 bytes</a>
+#<br><a href='test_047.php?EMPNO=000140&FORMAT=png' target=_blank>000140 - png - 10291 bytes</a>
+#<br><a href='test_047.php?EMPNO=000150&FORMAT=jpg' target=_blank>000150 - jpg -153988 bytes</a>
+#<br><a href='test_047.php?EMPNO=000150&FORMAT=png' target=_blank>000150 - png - 10291 bytes</a>
+#<br><a href='test_047.php?EMPNO=000190&FORMAT=jpg' target=_blank>000190 - jpg - 15398 bytes</a>
+#<br><a href='test_047.php?EMPNO=000190&FORMAT=png' target=_blank>000190 - png - 10291 bytes</a>
+#<br>
+#__SYSTEMI_EXPECTED__
+#<a href='test_047.php?EMPNO=000130&FORMAT=jpg' target=_blank>000130 - jpg - 15398 bytes</a>
+#<br><a href='test_047.php?EMPNO=000130&FORMAT=png' target=_blank>000130 - png - 10291 bytes</a>
+#<br><a href='test_047.php?EMPNO=000140&FORMAT=jpg' target=_blank>000140 - jpg - 15398 bytes</a>
+#<br><a href='test_047.php?EMPNO=000140&FORMAT=png' target=_blank>000140 - png - 10291 bytes</a>
+#<br><a href='test_047.php?EMPNO=000150&FORMAT=jpg' target=_blank>000150 - jpg - 15398 bytes</a>
+#<br><a href='test_047.php?EMPNO=000150&FORMAT=png' target=_blank>000150 - png - 10291 bytes</a>
+#<br><a href='test_047.php?EMPNO=000190&FORMAT=jpg' target=_blank>000190 - jpg - 15398 bytes</a>
+#<br><a href='test_047.php?EMPNO=000190&FORMAT=png' target=_blank>000190 - png - 10291 bytes</a>
+#<br>
+#__IDS_EXPECTED__
+#<a href='test_047.php?EMPNO=000130&FORMAT=jpg' target=_blank>000130 - jpg - jpg bytes</a>
+#<br><a href='test_047.php?EMPNO=000130&FORMAT=png' target=_blank>000130 - png - png bytes</a>
+#<br><a href='test_047.php?EMPNO=000140&FORMAT=jpg' target=_blank>000140 - jpg - jpg bytes</a>
+#<br><a href='test_047.php?EMPNO=000140&FORMAT=png' target=_blank>000140 - png - png bytes</a>
+#<br><a href='test_047.php?EMPNO=000150&FORMAT=jpg' target=_blank>000150 - jpg - jpg bytes</a>
+#<br><a href='test_047.php?EMPNO=000150&FORMAT=png' target=_blank>000150 - png - png bytes</a>
+#<br><a href='test_047.php?EMPNO=000190&FORMAT=jpg' target=_blank>000190 - jpg - jpg bytes</a>
+#<br><a href='test_047.php?EMPNO=000190&FORMAT=png' target=_blank>000190 - png - png bytes</a>
+#<br>
diff -pruN 0.3.0-3/tests/test_048_FetchTupleBinaryData_02.py 2.0.5-0ubuntu2/tests/test_048_FetchTupleBinaryData_02.py
--- 0.3.0-3/tests/test_048_FetchTupleBinaryData_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_048_FetchTupleBinaryData_02.py	2014-01-30 10:56:43.000000000 +0000
@@ -0,0 +1,50 @@
+#
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_048_FetchTupleBinaryData_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_048)
+
+  def run_test_048(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    if (not conn):
+      print "Could not make a connection." 
+      return 0
+    server = ibm_db.server_info( conn )
+    
+    fp = open("tests/spook_out.png", "wb")
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+        result = ibm_db.exec_immediate(conn, "SELECT picture FROM animal_pics WHERE name = 'Spook'")
+    else:
+        result = ibm_db.exec_immediate(conn, "SELECT picture, LENGTH(picture) FROM animal_pics WHERE name = 'Spook'")
+    if (not result):
+      print "Could not execute SELECT statement."
+      return 0
+    row = ibm_db.fetch_tuple(result)
+    if row:
+      fp.write(row[0])
+    else:
+      print ibm_db.stmt_errormsg()
+    fp.close()
+    cmp = (open('tests/spook_out.png', "rb").read() == open('tests/spook.png', "rb").read())
+    print "Are the files the same:", cmp
+
+#__END__
+#__LUW_EXPECTED__
+#Are the files the same: True
+#__ZOS_EXPECTED__
+#Are the files the same: True
+#__SYSTEMI_EXPECTED__
+#Are the files the same: True
+#__IDS_EXPECTED__
+#Are the files the same: True
diff -pruN 0.3.0-3/tests/test_049_InsertNoneParam.py 2.0.5-0ubuntu2/tests/test_049_InsertNoneParam.py
--- 0.3.0-3/tests/test_049_InsertNoneParam.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_049_InsertNoneParam.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,61 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_049_InsertNoneParam(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_049)
+	  
+  def run_test_049(self):      
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+      
+    insert = "INSERT INTO animals (id, breed, name, weight) VALUES (?, ?, ?, ?)"
+    select = 'SELECT id, breed, name, weight FROM animals WHERE weight IS NULL'
+      
+    if conn:
+      stmt = ibm_db.prepare(conn, insert)
+      
+      if ibm_db.execute(stmt, (None, 'ghost', None, None)):
+        stmt = ibm_db.exec_immediate(conn, select)
+        row = ibm_db.fetch_tuple(stmt)
+        while ( row ): 
+          #row.each { |child| puts child }
+          for child in row:
+            print child
+          row = ibm_db.fetch_tuple(stmt)
+      ibm_db.rollback(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#None
+#ghost
+#None
+#None
+#__ZOS_EXPECTED__
+#None
+#ghost
+#None
+#None
+#__SYSTEMI_EXPECTED__
+#None
+#ghost
+#None
+#None
+#__IDS_EXPECTED__
+#None
+#ghost
+#None
+#None
diff -pruN 0.3.0-3/tests/test_050_AutocommitStatus.py 2.0.5-0ubuntu2/tests/test_050_AutocommitStatus.py
--- 0.3.0-3/tests/test_050_AutocommitStatus.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_050_AutocommitStatus.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,33 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_050_AutocommitStatus(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_050)
+
+  def run_test_050(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+     
+    ac = ibm_db.autocommit(conn)
+      
+    print ac
+
+#__END__
+#__LUW_EXPECTED__
+#1
+#__ZOS_EXPECTED__
+#1
+#__SYSTEMI_EXPECTED__
+#1
+#__IDS_EXPECTED__
+#1
diff -pruN 0.3.0-3/tests/test_051_SetAutocommit_01.py 2.0.5-0ubuntu2/tests/test_051_SetAutocommit_01.py
--- 0.3.0-3/tests/test_051_SetAutocommit_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_051_SetAutocommit_01.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,35 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_051_SetAutocommit_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_051)
+
+  def run_test_051(self):
+    options = { ibm_db.SQL_ATTR_AUTOCOMMIT:  ibm_db.SQL_AUTOCOMMIT_OFF }
+      
+    conn = ibm_db.connect(config.database, config.user, config.password, options)
+      
+    ac = ibm_db.autocommit(conn)
+      
+    print ac
+
+#__END__
+#__LUW_EXPECTED__
+#0
+#__ZOS_EXPECTED__
+#0
+#__SYSTEMI_EXPECTED__
+#0
+#__IDS_EXPECTED__
+#0
diff -pruN 0.3.0-3/tests/test_052_SetAutocommit_02.py 2.0.5-0ubuntu2/tests/test_052_SetAutocommit_02.py
--- 0.3.0-3/tests/test_052_SetAutocommit_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_052_SetAutocommit_02.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,35 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_052_SetAutocommit_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_052)
+	  
+  def run_test_052(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    ibm_db.autocommit(conn, 0)
+      
+    ac = ibm_db.autocommit(conn)
+      
+    print ac
+
+#__END__
+#__LUW_EXPECTED__
+#0
+#__ZOS_EXPECTED__
+#0
+#__SYSTEMI_EXPECTED__
+#0
+#__IDS_EXPECTED__
+#0
diff -pruN 0.3.0-3/tests/test_053_AttrThruConn.py 2.0.5-0ubuntu2/tests/test_053_AttrThruConn.py
--- 0.3.0-3/tests/test_053_AttrThruConn.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_053_AttrThruConn.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,141 @@
+#
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_053_AttrThruConn(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_053)
+
+  def run_test_053(self):
+    print "Client attributes passed through conection string:"
+
+    options1 = {ibm_db.SQL_ATTR_INFO_USERID: 'db2inst1'}
+    conn1 = ibm_db.connect(config.database, config.user, config.password, options1)
+    val = ibm_db.get_option(conn1, ibm_db.SQL_ATTR_INFO_USERID, 1)
+    print val
+
+    options2 = {ibm_db.SQL_ATTR_INFO_ACCTSTR: 'account'}
+    conn2 = ibm_db.connect(config.database, config.user, config.password, options2)
+    val = ibm_db.get_option(conn2, ibm_db.SQL_ATTR_INFO_ACCTSTR, 1)
+    print val
+
+    options3 = {ibm_db.SQL_ATTR_INFO_APPLNAME: 'myapp'}
+    conn3 = ibm_db.connect(config.database, config.user, config.password, options3)
+    val = ibm_db.get_option(conn3, ibm_db.SQL_ATTR_INFO_APPLNAME, 1)
+    print val
+
+    options4 = {ibm_db.SQL_ATTR_INFO_WRKSTNNAME: 'workstation'}
+    conn4 = ibm_db.connect(config.database, config.user, config.password, options4)
+    val = ibm_db.get_option(conn4, ibm_db.SQL_ATTR_INFO_WRKSTNNAME, 1)
+    print val
+
+    options5 = {ibm_db.SQL_ATTR_INFO_USERID: 'kfb',
+                ibm_db.SQL_ATTR_INFO_WRKSTNNAME: 'kfbwork',
+                ibm_db.SQL_ATTR_INFO_ACCTSTR: 'kfbacc',
+                ibm_db.SQL_ATTR_INFO_APPLNAME: 'kfbapp'}
+    conn5 = ibm_db.connect(config.database, config.user, config.password, options5)
+    val = ibm_db.get_option(conn5, ibm_db.SQL_ATTR_INFO_USERID, 1)
+    print val
+    val = ibm_db.get_option(conn5, ibm_db.SQL_ATTR_INFO_ACCTSTR, 1)
+    print val
+    val = ibm_db.get_option(conn5, ibm_db.SQL_ATTR_INFO_APPLNAME, 1)
+    print val
+    val = ibm_db.get_option(conn5, ibm_db.SQL_ATTR_INFO_WRKSTNNAME, 1)
+    print val
+
+    print "Client attributes passed post-conection:"
+
+    options5 = {ibm_db.SQL_ATTR_INFO_USERID: 'db2inst1'}
+    conn5 = ibm_db.connect(config.database, config.user, config.password)
+    rc = ibm_db.set_option(conn5, options5, 1)
+    val = ibm_db.get_option(conn5, ibm_db.SQL_ATTR_INFO_USERID, 1)
+    print val
+
+    options6 = {ibm_db.SQL_ATTR_INFO_ACCTSTR: 'account'}
+    conn6 = ibm_db.connect(config.database, config.user, config.password)
+    rc = ibm_db.set_option(conn6, options6, 1)
+    val = ibm_db.get_option(conn6, ibm_db.SQL_ATTR_INFO_ACCTSTR, 1)
+    print val
+
+    options7 = {ibm_db.SQL_ATTR_INFO_APPLNAME: 'myapp'}
+    conn7 = ibm_db.connect(config.database, config.user, config.password)
+    rc = ibm_db.set_option(conn7, options7, 1)
+    val = ibm_db.get_option(conn7, ibm_db.SQL_ATTR_INFO_APPLNAME, 1)
+    print val
+
+    options8 = {ibm_db.SQL_ATTR_INFO_WRKSTNNAME: 'workstation'}
+    conn8 = ibm_db.connect(config.database, config.user, config.password)
+    rc = ibm_db.set_option(conn8, options8, 1)
+    val = ibm_db.get_option(conn8, ibm_db.SQL_ATTR_INFO_WRKSTNNAME, 1)
+    print val
+
+#__END__
+#__LUW_EXPECTED__
+#Client attributes passed through conection string:
+#db2inst1
+#account
+#myapp
+#workstation
+#kfb
+#kfbacc
+#kfbapp
+#kfbwork
+#Client attributes passed post-conection:
+#db2inst1
+#account
+#myapp
+#workstation
+#__ZOS_EXPECTED__
+#Client attributes passed through conection string:
+#db2inst1
+#account
+#myapp
+#workstation
+#kfb
+#kfbacc
+#kfbapp
+#kfbwork
+#Client attributes passed post-conection:
+#db2inst1
+#account
+#myapp
+#workstation
+#__SYSTEMI_EXPECTED__
+#Client attributes passed through conection string:
+#db2inst1
+#account
+#myapp
+#workstation
+#kfb
+#kfbacc
+#kfbapp
+#kfbwork
+#Client attributes passed post-conection:
+#db2inst1
+#account
+#myapp
+#workstation
+#__IDS_EXPECTED__
+#Client attributes passed through conection string:
+#db2inst1
+#account
+#myapp
+#workstation
+#kfb
+#kfbacc
+#kfbapp
+#kfbwork
+#Client attributes passed post-conection:
+#db2inst1
+#account
+#myapp
+#workstation
diff -pruN 0.3.0-3/tests/test_054_CursorType.py 2.0.5-0ubuntu2/tests/test_054_CursorType.py
--- 0.3.0-3/tests/test_054_CursorType.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_054_CursorType.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,65 @@
+#
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_054_CursorType(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_054)
+
+  def run_test_054(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    serverinfo = ibm_db.server_info( conn )
+
+    stmt = ibm_db.exec_immediate(conn, "SELECT * FROM animals")
+    val = ibm_db.get_option(stmt, ibm_db.SQL_ATTR_CURSOR_TYPE, 0)
+    print val
+
+    op = {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_FORWARD_ONLY}
+    stmt = ibm_db.exec_immediate(conn, "SELECT * FROM animals", op)
+    val = ibm_db.get_option(stmt, ibm_db.SQL_ATTR_CURSOR_TYPE, 0)
+    print val
+
+    if (serverinfo.DBMS_NAME[0:3] != 'IDS'):
+      op = {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_KEYSET_DRIVEN}
+    else:
+      op = {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_STATIC}
+    stmt = ibm_db.exec_immediate(conn, "SELECT * FROM animals", op)
+    val = ibm_db.get_option(stmt, ibm_db.SQL_ATTR_CURSOR_TYPE, 0)
+    print val
+
+    op = {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_STATIC}
+    stmt = ibm_db.exec_immediate(conn, "SELECT * FROM animals", op)
+    val = ibm_db.get_option(stmt, ibm_db.SQL_ATTR_CURSOR_TYPE, 0)
+    print val
+
+#__END__
+#__LUW_EXPECTED__
+#0
+#0
+#1
+#3
+#__ZOS_EXPECTED__
+#0
+#0
+#1
+#3
+#__SYSTEMI_EXPECTED__
+#0
+#0
+#3
+#3
+#__IDS_EXPECTED__
+#0
+#0
+#3
+#3
+#
diff -pruN 0.3.0-3/tests/test_060_Tables_01.py 2.0.5-0ubuntu2/tests/test_060_Tables_01.py
--- 0.3.0-3/tests/test_060_Tables_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_060_Tables_01.py	2014-01-30 11:13:59.000000000 +0000
@@ -0,0 +1,98 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_060_Tables_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_060)
+
+  def run_test_060(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+    
+    create = 'CREATE SCHEMA AUTHORIZATION t'
+    try:
+      result = ibm_db.exec_immediate(conn, create)
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t1( c1 INTEGER, c2 VARCHAR(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create)
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t2( c1 INTEGER, c2 VARCHAR(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t3( c1 INTEGER, c2 VARCHAR(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t4( c1 INTEGER, c2 VARCHAR(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    if conn:
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        result = ibm_db.tables(conn, None, 't')
+      else:
+        result = ibm_db.tables(conn, None, 'T')
+      i = 0
+      row = ibm_db.fetch_both(result)
+      while ( row ):
+        if (i < 4):
+          print "/%s/%s" % (row[1], row[2])
+        i = i + 1
+        row = ibm_db.fetch_both(result)
+
+      ibm_db.exec_immediate(conn, 'DROP TABLE t.t1')
+      ibm_db.exec_immediate(conn, 'DROP TABLE t.t2')
+      ibm_db.exec_immediate(conn, 'DROP TABLE t.t3')
+      ibm_db.exec_immediate(conn, 'DROP TABLE t.t4')
+
+      print "done!"
+    else:
+      print "no connection: #{ibm_db.conn_errormsg}";    
+
+#__END__
+#__LUW_EXPECTED__
+#/T/T1
+#/T/T2
+#/T/T3
+#/T/T4
+#done!
+#__ZOS_EXPECTED__
+#/T/T1
+#/T/T2
+#/T/T3
+#/T/T4
+#done!
+#__SYSTEMI_EXPECTED__
+#/T/T1
+#/T/T2
+#/T/T3
+#/T/T4
+#done!
+#__IDS_EXPECTED__
+#/t/t1
+#/t/t2
+#/t/t3
+#/t/t4
+#done!
diff -pruN 0.3.0-3/tests/test_061_Tables_02.py 2.0.5-0ubuntu2/tests/test_061_Tables_02.py
--- 0.3.0-3/tests/test_061_Tables_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_061_Tables_02.py	2014-01-30 11:17:11.000000000 +0000
@@ -0,0 +1,104 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_061_Tables_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_061)
+
+  def run_test_061(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    create = 'CREATE SCHEMA AUTHORIZATION t'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t1( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t2( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t3( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+      
+    create = 'CREATE TABLE t.t4( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    if conn:
+      server = ibm_db.server_info( conn )
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+        ibm_db.set_option(conn, op, 1)
+
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        result = ibm_db.tables(conn, None, 't');
+      else:
+        result = ibm_db.tables(conn, None, 'T');    
+      i = 0
+      row = ibm_db.fetch_both(result)
+      while ( row ):
+        str = row['TABLE_SCHEM'] + row['TABLE_NAME'] + row['TABLE_TYPE']
+        if (i < 4):
+          print str
+        i = i + 1
+        row = ibm_db.fetch_both(result)
+
+      ibm_db.exec_immediate(conn, 'DROP TABLE t.t1')
+      ibm_db.exec_immediate(conn, 'DROP TABLE t.t2')
+      ibm_db.exec_immediate(conn, 'DROP TABLE t.t3')
+      ibm_db.exec_immediate(conn, 'DROP TABLE t.t4')
+
+      print "done!"
+    else:
+      print "no connection: %s" % ibm_db.conn_errormsg()
+
+#__END__
+#__LUW_EXPECTED__
+#TT1TABLE
+#TT2TABLE
+#TT3TABLE
+#TT4TABLE
+#done!
+#__ZOS_EXPECTED__
+#TT1TABLE
+#TT2TABLE
+#TT3TABLE
+#TT4TABLE
+#done!
+#__SYSTEMI_EXPECTED__
+#TT1TABLE
+#TT2TABLE
+#TT3TABLE
+#TT4TABLE
+#done!
+#__IDS_EXPECTED__
+#tt1TABLE%s
+#tt2TABLE%s
+#tt3TABLE%s
+#tt4TABLE%s
+#done!
diff -pruN 0.3.0-3/tests/test_062_Tables_03.py 2.0.5-0ubuntu2/tests/test_062_Tables_03.py
--- 0.3.0-3/tests/test_062_Tables_03.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_062_Tables_03.py	2014-01-30 11:34:55.000000000 +0000
@@ -0,0 +1,100 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_062_Tables_03(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_062)
+
+  def run_test_062(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    create = 'CREATE SCHEMA AUTHORIZATION t'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t1( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t2( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t3( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t4( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    if conn:
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        schema = 't'
+      else:
+        schema = 'T'
+      result = ibm_db.tables(conn,None,schema);    
+      i = 0
+      row = ibm_db.fetch_both(result)
+      while ( row ):
+        str = row[1] + "/" + row[2] + "/" + row[3]
+        if (i < 4):
+          print str
+        i = i + 1
+        row = ibm_db.fetch_both(result)
+
+      ibm_db.exec_immediate(conn, 'DROP TABLE t.t1')
+      ibm_db.exec_immediate(conn, 'DROP TABLE t.t2')
+      ibm_db.exec_immediate(conn, 'DROP TABLE t.t3')
+      ibm_db.exec_immediate(conn, 'DROP TABLE t.t4')
+
+      print "done!"
+    else:
+      print "no connection: #{ibm_db.conn_errormsg}";    
+
+#__END__
+#__LUW_EXPECTED__
+#T/T1/TABLE
+#T/T2/TABLE
+#T/T3/TABLE
+#T/T4/TABLE
+#done!
+#__ZOS_EXPECTED__
+#T/T1/TABLE
+#T/T2/TABLE
+#T/T3/TABLE
+#T/T4/TABLE
+#done!
+#__SYSTEMI_EXPECTED__
+#T/T1/TABLE
+#T/T2/TABLE
+#T/T3/TABLE
+#T/T4/TABLE
+#done!
+#__IDS_EXPECTED__
+#t/t1/TABLE%s
+#t/t2/TABLE%s
+#t/t3/TABLE%s
+#t/t4/TABLE%s
+#done!
diff -pruN 0.3.0-3/tests/test_063_Tables_04.py 2.0.5-0ubuntu2/tests/test_063_Tables_04.py
--- 0.3.0-3/tests/test_063_Tables_04.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_063_Tables_04.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,36 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_063_Tables_04(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_063)
+
+  def run_test_063(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+      
+    result = ibm_db.tables(conn, None, "SYSIBM", "", "VIEW")
+    
+    if (type(result) == ibm_db.IBM_DBStatement):
+      print "Resource is a DB2 Statement"
+      
+    ibm_db.free_result(result)
+
+#__END__
+#__LUW_EXPECTED__
+#Resource is a DB2 Statement
+#__ZOS_EXPECTED__
+#Resource is a DB2 Statement
+#__SYSTEMI_EXPECTED__
+#Resource is a DB2 Statement
+#__IDS_EXPECTED__
+#Resource is a DB2 Statement
diff -pruN 0.3.0-3/tests/test_064_Tables_05.py 2.0.5-0ubuntu2/tests/test_064_Tables_05.py
--- 0.3.0-3/tests/test_064_Tables_05.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_064_Tables_05.py	2014-01-30 11:20:27.000000000 +0000
@@ -0,0 +1,106 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_064_Tables_05(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_064)
+
+  def run_test_064(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    create = 'CREATE SCHEMA AUTHORIZATION t'
+    try:
+      result = ibm_db.exec_immediate(conn, create)
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t1( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create)
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t2( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create)
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t3( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create)
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t4( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create)
+    except:
+      pass
+    
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+        result = ibm_db.tables(conn, None, 't')
+    else:
+        result = ibm_db.tables(conn, None, 'T')
+    
+    for i in range(0, ibm_db.num_fields(result)):
+      print "%s, " % ibm_db.field_name(result, i)
+    print
+    print
+  
+    i = 0
+    row = ibm_db.fetch_tuple(result)
+    while ( row ):
+      ibm_db.num_fields(result)
+      if (i < 4):
+        print ", " + row[1] + ", " + row[2] + ", " + row[3] + ", , \n"
+      i = i + 1
+      row = ibm_db.fetch_tuple(result)
+
+    ibm_db.free_result(result)
+
+    ibm_db.exec_immediate(conn, 'DROP TABLE t.t1')
+    ibm_db.exec_immediate(conn, 'DROP TABLE t.t2')
+    ibm_db.exec_immediate(conn, 'DROP TABLE t.t3')
+    ibm_db.exec_immediate(conn, 'DROP TABLE t.t4')
+
+#__END__
+#__LUW_EXPECTED__
+#TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE, REMARKS, 
+#
+#, T, T1, TABLE, , 
+#, T, T2, TABLE, , 
+#, T, T3, TABLE, , 
+#, T, T4, TABLE, , 
+#__ZOS_EXPECTED__
+#TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE, REMARKS, 
+#
+#, T, T1, TABLE, , 
+#, T, T2, TABLE, , 
+#, T, T3, TABLE, , 
+#, T, T4, TABLE, , 
+#__SYSTEMI_EXPECTED__
+#TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE, REMARKS, 
+#
+#, T, T1, TABLE, , 
+#, T, T2, TABLE, , 
+#, T, T3, TABLE, , 
+#, T, T4, TABLE, , 
+#__IDS_EXPECTED__
+#table_cat, table_schem, table_name, table_type, remarks, 
+#
+#, t, t1, TABLE%s, , 
+#, t, t2, TABLE%s, , 
+#, t, t3, TABLE%s, , 
+#, t, t4, TABLE%s, ,
diff -pruN 0.3.0-3/tests/test_065_FilterTableName.py 2.0.5-0ubuntu2/tests/test_065_FilterTableName.py
--- 0.3.0-3/tests/test_065_FilterTableName.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_065_FilterTableName.py	2014-01-30 11:30:20.000000000 +0000
@@ -0,0 +1,93 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_065_FilterTableName(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_065)
+
+  def run_test_065(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    create = 'CREATE SCHEMA AUTHORIZATION t'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t1( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t2( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+      
+    create = 'CREATE TABLE t.t3( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    create = 'CREATE TABLE t.t4( c1 integer, c2 varchar(40))'
+    try:
+      result = ibm_db.exec_immediate(conn, create) 
+    except:
+      pass
+    
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      result = ibm_db.tables(conn, None, '%', "t3")
+    else:
+      result = ibm_db.tables(conn, None, '%', "T3")
+    
+    columns = ibm_db.num_fields(result)
+    
+    for i in range(0, columns):
+      print "%s, " % ibm_db.field_name(result, i)
+    print "\n\n"
+   
+    row = ibm_db.fetch_tuple(result) 
+    while ( row ):
+      final = ", " + row[1] + ", " + row[2] + ", " + row[3] + ", , ";
+      row = ibm_db.fetch_tuple(result)
+
+    print final
+    
+    ibm_db.free_result(result)
+
+    ibm_db.exec_immediate(conn, 'DROP TABLE t.t1')
+    ibm_db.exec_immediate(conn, 'DROP TABLE t.t2')
+    ibm_db.exec_immediate(conn, 'DROP TABLE t.t3')
+    ibm_db.exec_immediate(conn, 'DROP TABLE t.t4')
+
+#__END__
+#__LUW_EXPECTED__
+#TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE, REMARKS, 
+#
+#, T, T3, TABLE, , 
+#__ZOS_EXPECTED__
+#TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE, REMARKS, 
+#
+#, %sT, T3, TABLE, , 
+#__SYSTEMI_EXPECTED__
+#TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE, REMARKS, 
+#
+#, T, T3, TABLE, , 
+#__IDS_EXPECTED__
+#table_cat, table_schem, table_name, table_type, remarks, 
+#
+#, t, t3, TABLE, ,
diff -pruN 0.3.0-3/tests/test_066_TableObjects.py 2.0.5-0ubuntu2/tests/test_066_TableObjects.py
--- 0.3.0-3/tests/test_066_TableObjects.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_066_TableObjects.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,187 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_066_TableObjects(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_066)
+
+  def run_test_066(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      result = ibm_db.tables(conn, None, config.user.lower(), 'animals')
+    else:
+      result = ibm_db.tables(conn, None, config.user.upper(), 'ANIMALS')
+      
+#    NOTE: This is a workaround
+#    function fetch_object() to be implemented...
+#    row = ibm_db.fetch_object(result)
+    
+    class Row:
+        pass
+
+    data = ibm_db.fetch_assoc(result)
+    while ( data ):
+      row = Row()
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        row.table_schem = data['table_schem']
+        row.table_name = data['table_name']
+        row.table_type = data['table_type']
+        row.remarks = data['remarks']
+          
+        print "Schema:  %s" % row.table_schem
+        print "Name:    %s" % row.table_name
+        print "Type:    %s" % row.table_type
+        print "Remarks: %s\n" % row.remarks
+      else:
+        row.TABLE_SCHEM = data['TABLE_SCHEM']
+        row.TABLE_NAME = data['TABLE_NAME']
+        row.TABLE_TYPE = data['TABLE_TYPE']
+        row.REMARKS = data['REMARKS']
+          
+        print "Schema:  %s" % row.TABLE_SCHEM
+        print "Name:    %s" % row.TABLE_NAME
+        print "Type:    %s" % row.TABLE_TYPE
+        print "Remarks: %s\n" % row.REMARKS
+#      row = ibm_db.fetch_object(result)
+      data = ibm_db.fetch_assoc(result)
+
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      result = ibm_db.tables(conn, None, config.user.lower(), 'animal_pics')
+    else:
+      result = ibm_db.tables(conn, None, config.user.upper(), 'ANIMAL_PICS')
+    
+#    row = ibm_db.fetch_object(result)
+    data = ibm_db.fetch_assoc(result)
+    while (data ):
+      row = Row()
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        row.table_schem = data['table_schem']
+        row.table_name = data['table_name']
+        row.table_type = data['table_type']
+        row.remarks = data['remarks']
+        
+        print "Schema:  %s" % row.table_schem
+        print "Name:    %s" % row.table_name
+        print "Type:    %s" % row.table_type
+        print "Remarks: %s\n" % row.remarks
+      else:
+        row.TABLE_SCHEM = data['TABLE_SCHEM']
+        row.TABLE_NAME = data['TABLE_NAME']
+        row.TABLE_TYPE = data['TABLE_TYPE']
+        row.REMARKS = data['REMARKS']
+        
+        print "Schema:  %s" % row.TABLE_SCHEM
+        print "Name:    %s" % row.TABLE_NAME
+        print "Type:    %s" % row.TABLE_TYPE
+        print "Remarks: %s\n" % row.REMARKS
+#      row = ibm_db.fetch_object(result)
+      data = ibm_db.fetch_assoc(result)
+      
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      result = ibm_db.tables(conn, None, config.user.lower(), 'anime_cat')
+    else:
+      result = ibm_db.tables(conn, None, config.user.upper(), 'ANIME_CAT')
+    
+#    row = ibm_db.fetch_object(result)
+    data = ibm_db.fetch_assoc(result)
+    while ( data ): 
+      row = Row()
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        row.table_schem = data['table_schem']
+        row.table_name = data['table_name']
+        row.table_type = data['table_type']
+        row.remarks = data['remarks']
+        
+        print "Schema:  %s" % row.table_schem
+        print "Name:    %s" % row.table_name
+        print "Type:    %s" % row.table_type
+        print "Remarks: %s\n" % row.remarks
+      else:
+        row.TABLE_SCHEM = data['TABLE_SCHEM']
+        row.TABLE_NAME = data['TABLE_NAME']
+        row.TABLE_TYPE = data['TABLE_TYPE']
+        row.REMARKS = data['REMARKS']
+        
+        print "Schema:  %s" % row.TABLE_SCHEM
+        print "Name:    %s" % row.TABLE_NAME
+        print "Type:    %s" % row.TABLE_TYPE
+        print "Remarks: %s\n" % row.REMARKS
+#      row = ibm_db.fetch_object(result)
+      data = ibm_db.fetch_assoc(result)
+    
+    ibm_db.free_result(result)
+    ibm_db.close(conn)
+
+#__END__
+#__LUW_EXPECTED__
+#Schema:  %s
+#Name:    ANIMALS
+#Type:    TABLE
+#Remarks: None
+#
+#Schema:  %s
+#Name:    ANIMAL_PICS
+#Type:    TABLE
+#Remarks: None
+#
+#Schema:  %s
+#Name:    ANIME_CAT
+#Type:    VIEW
+#Remarks: None
+#__ZOS_EXPECTED__
+#Schema:  %s
+#Name:    ANIMALS
+#Type:    TABLE
+#Remarks: 
+#
+#Schema:  %s
+#Name:    ANIMAL_PICS
+#Type:    TABLE
+#Remarks: 
+#
+#Schema:  %s
+#Name:    ANIME_CAT
+#Type:    VIEW
+#Remarks: 
+#__SYSTEMI_EXPECTED__
+#Schema:  %s
+#Name:    ANIMALS
+#Type:    TABLE
+#Remarks: None
+#
+#Schema:  %s
+#Name:    ANIMAL_PICS
+#Type:    TABLE
+#Remarks: None
+#
+#Schema:  %s
+#Name:    ANIME_CAT
+#Type:    VIEW
+#Remarks: None
+#__IDS_EXPECTED__
+#Schema:  %s
+#Name:    animals
+#Type:    TABLE
+#Remarks: None
+#
+#Schema:  %s
+#Name:    animal_pics
+#Type:    TABLE
+#Remarks: None
+#
+#Schema:  %s
+#Name:    anime_cat
+#Type:    VIEW
+#Remarks: None
diff -pruN 0.3.0-3/tests/test_070_Close.py 2.0.5-0ubuntu2/tests/test_070_Close.py
--- 0.3.0-3/tests/test_070_Close.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_070_Close.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,43 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_070_Close(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_070)
+
+  def run_test_070(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      if (type(conn) == ibm_db.IBM_DBConnection):
+        print "Resource is a DB2 Connection"
+      
+      rc = ibm_db.close(conn)
+      
+      print rc
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Resource is a DB2 Connection
+#True
+#__ZOS_EXPECTED__
+#Resource is a DB2 Connection
+#True
+#__SYSTEMI_EXPECTED__
+#Resource is a DB2 Connection
+#True
+#__IDS_EXPECTED__
+#Resource is a DB2 Connection
+#True
diff -pruN 0.3.0-3/tests/test_071_CloseSuccess.py 2.0.5-0ubuntu2/tests/test_071_CloseSuccess.py
--- 0.3.0-3/tests/test_071_CloseSuccess.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_071_CloseSuccess.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,43 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_071_CloseSuccess(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_071)
+
+  def run_test_071(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      rc = ibm_db.close(conn)
+      if (rc == True):
+        print "ibm_db.close succeeded"
+      else:
+        print "ibm_db.close FAILED\n"
+    else:
+      print "%s" % ibm_db.conn_errormsg()
+      print ",sqlstate=%s" % ibm_db.conn_error()
+      print "%s" % ibm_db.conn_errormsg()
+      print "%s" % ibm_db.conn_errormsg()
+      print "%s" % ibm_db.conn_errormsg()
+      print "%s" % ibm_db.conn_errormsg()
+
+#__END__
+#__LUW_EXPECTED__
+#ibm_db.close succeeded
+#__ZOS_EXPECTED__
+#ibm_db.close succeeded
+#__SYSTEMI_EXPECTED__
+#ibm_db.close succeeded
+#__IDS_EXPECTED__
+#ibm_db.close succeeded
diff -pruN 0.3.0-3/tests/test_080_ConnWrongDbAlias.py 2.0.5-0ubuntu2/tests/test_080_ConnWrongDbAlias.py
--- 0.3.0-3/tests/test_080_ConnWrongDbAlias.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_080_ConnWrongDbAlias.py	2014-01-30 12:13:04.000000000 +0000
@@ -0,0 +1,38 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_080_ConnWrongDbAlias(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_080)
+
+  def run_test_080(self):
+    try:
+      conn = ibm_db.connect("x", config.user, config.password)
+      print "??? No way."
+    except:
+      print ibm_db.conn_error()
+ 
+    #if conn:
+    #  print "??? No way."
+    #else:
+    #  print ibm_db.conn_error()
+
+#__END__
+#__LUW_EXPECTED__
+#08001
+#__ZOS_EXPECTED__
+#08001
+#__SYSTEMI_EXPECTED__
+#08001
+#__IDS_EXPECTED__
+#08001
diff -pruN 0.3.0-3/tests/test_081_ConnWrongUser.py 2.0.5-0ubuntu2/tests/test_081_ConnWrongUser.py
--- 0.3.0-3/tests/test_081_ConnWrongUser.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_081_ConnWrongUser.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,39 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_081_ConnWrongUser(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_081)
+
+  def run_test_081(self):
+    try:
+      conn = ibm_db.connect(config.database, "y", config.password)
+      print "??? No way."
+    except:
+      print ibm_db.conn_error()
+
+    #if conn:
+    #  print "??? No way."
+    #else:
+    #  err = ibm_db.conn_error 
+    #  print err
+
+#__END__
+#__LUW_EXPECTED__
+#08001
+#__ZOS_EXPECTED__
+#08001
+#__SYSTEMI_EXPECTED__
+#08001
+#__IDS_EXPECTED__
+#08001
diff -pruN 0.3.0-3/tests/test_082_ConnWrongPwd.py 2.0.5-0ubuntu2/tests/test_082_ConnWrongPwd.py
--- 0.3.0-3/tests/test_082_ConnWrongPwd.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_082_ConnWrongPwd.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,34 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_082_ConnWrongPwd(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_082)
+
+  def run_test_082(self):
+    try:
+      conn = ibm_db.connect(config.database, config.user, "z")
+      print "??? No way."
+    except:
+      err = ibm_db.conn_error()
+      print err
+
+#__END__
+#__LUW_EXPECTED__
+#08001
+#__ZOS_EXPECTED__
+#08001
+#__SYSTEMI_EXPECTED__
+#08001
+#__IDS_EXPECTED__
+#08001
diff -pruN 0.3.0-3/tests/test_090_ConnmsgWrongDbAlias.py 2.0.5-0ubuntu2/tests/test_090_ConnmsgWrongDbAlias.py
--- 0.3.0-3/tests/test_090_ConnmsgWrongDbAlias.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_090_ConnmsgWrongDbAlias.py	2014-01-30 12:59:34.000000000 +0000
@@ -0,0 +1,34 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_090_ConnmsgWrongDbAlias(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_090)
+
+  def run_test_090(self):
+    try:
+      conn = ibm_db.connect("x", config.user, config.password)
+      print "??? No way."
+    except:
+      err = ibm_db.conn_errormsg()
+      print err
+
+#__END__
+#__LUW_EXPECTED__
+#[IBM][CLI Driver] SQL1013N  The database alias name or database name "X" could not be found.  SQLSTATE=42705 SQLCODE=-1013
+#__ZOS_EXPECTED__
+#[IBM][CLI Driver] SQL1013N  The database alias name or database name "X" could not be found.  SQLSTATE=42705 SQLCODE=-1013
+#__SYSTEMI_EXPECTED__
+#[IBM][CLI Driver] SQL1013N  The database alias name or database name "X" could not be found.  SQLSTATE=42705 SQLCODE=-1013
+#__IDS_EXPECTED__
+#[IBM][CLI Driver] SQL1013N  The database alias name or database name "X" could not be found.  SQLSTATE=42705 SQLCODE=-1013
diff -pruN 0.3.0-3/tests/test_091_ConnmsgWrongUser.py 2.0.5-0ubuntu2/tests/test_091_ConnmsgWrongUser.py
--- 0.3.0-3/tests/test_091_ConnmsgWrongUser.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_091_ConnmsgWrongUser.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,34 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_091_ConnmsgWrongUser(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_091)
+
+  def run_test_091(self):
+    try:
+      conn = ibm_db.connect(config.database, "y", config.password)
+      print "??? No way."
+    except:
+      err = ibm_db.conn_errormsg()
+      print err
+
+#__END__
+#__LUW_EXPECTED__
+#[IBM][CLI Driver] SQL30082N  Security processing failed with reason "24" ("USERNAME AND/OR PASSWORD INVALID").  SQLSTATE=08001 SQLCODE=-30082
+#__ZOS_EXPECTED__
+#[IBM][CLI Driver] SQL30082N  Security processing failed with reason "15" ("PROCESSING FAILURE").  SQLSTATE=08001 SQLCODE=-30082
+#__SYSTEMI_EXPECTED__
+#[IBM][CLI Driver] SQL30082N  Security processing failed with reason "24" ("USERNAME AND/OR PASSWORD INVALID").  SQLSTATE=08001 SQLCODE=-30082
+#__IDS_EXPECTED__
+#[IBM][CLI Driver] SQL30082N  Security processing failed with reason "24" ("USERNAME AND/OR PASSWORD INVALID").  SQLSTATE=08001 SQLCODE=-30082
diff -pruN 0.3.0-3/tests/test_092_ConnmsgWrongPwd.py 2.0.5-0ubuntu2/tests/test_092_ConnmsgWrongPwd.py
--- 0.3.0-3/tests/test_092_ConnmsgWrongPwd.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_092_ConnmsgWrongPwd.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,34 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_092_ConnmsgWrongPwd(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_092)
+
+  def run_test_092(self):
+    try:
+      conn = ibm_db.connect(config.database, config.user, "z")
+      print "??? No way."
+    except:
+      err = ibm_db.conn_errormsg()
+      print err
+
+#__END__
+#__LUW_EXPECTED__
+#[IBM][CLI Driver] SQL30082N  Security processing failed with reason "24" ("USERNAME AND/OR PASSWORD INVALID").  SQLSTATE=08001 SQLCODE=-30082
+#__ZOS_EXPECTED__
+#[IBM][CLI Driver] SQL30082N  Security processing failed with reason "15" ("PROCESSING FAILURE").  SQLSTATE=08001 SQLCODE=-30082
+#__SYSTEMI_EXPECTED__
+#[IBM][CLI Driver] SQL30082N  Security processing failed with reason "24" ("USERNAME AND/OR PASSWORD INVALID").  SQLSTATE=08001 SQLCODE=-30082
+#__IDS_EXPECTED__
+#[IBM][CLI Driver] SQL30082N  Security processing failed with reason "24" ("USERNAME AND/OR PASSWORD INVALID").  SQLSTATE=08001 SQLCODE=-30082
diff -pruN 0.3.0-3/tests/test_100_SelectDeleteInsertFieldCount.py 2.0.5-0ubuntu2/tests/test_100_SelectDeleteInsertFieldCount.py
--- 0.3.0-3/tests/test_100_SelectDeleteInsertFieldCount.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_100_SelectDeleteInsertFieldCount.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,78 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_100_SelectDeleteInsertFieldCount(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_100)
+
+  def run_test_100(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+
+      stmt = ibm_db.exec_immediate(conn, "SELECT * FROM animals ORDER BY breed")
+    
+      fields1 = ibm_db.num_fields(stmt)
+      
+      print "int(%d)" % fields1
+      
+      stmt = ibm_db.exec_immediate(conn, "SELECT name, breed FROM animals ORDER BY breed")
+      fields2 = ibm_db.num_fields(stmt)
+      
+      print "int(%d)" % fields2
+      
+      stmt = ibm_db.exec_immediate(conn, "DELETE FROM animals")
+      fields3 = ibm_db.num_fields(stmt)
+      
+      print "int(%d)" % fields3
+      
+      stmt = ibm_db.exec_immediate(conn, "INSERT INTO animals values (0, 'cat', 'Pook', 3.2)")
+      fields4 = ibm_db.num_fields(stmt)
+        
+      print "int(%d)" % fields4
+      
+      stmt = ibm_db.exec_immediate(conn, "SELECT name, breed, 'TEST' FROM animals")
+      fields5 = ibm_db.num_fields(stmt)
+        
+      print "int(%d)" % fields5
+
+      ibm_db.rollback(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#int(4)
+#int(2)
+#int(0)
+#int(0)
+#int(3)
+#__ZOS_EXPECTED__
+#int(4)
+#int(2)
+#int(0)
+#int(0)
+#int(3)
+#__SYSTEMI_EXPECTED__
+#int(4)
+#int(2)
+#int(0)
+#int(0)
+#int(3)
+#__IDS_EXPECTED__
+#int(4)
+#int(2)
+#int(0)
+#int(0)
+#int(3)
diff -pruN 0.3.0-3/tests/test_101_InsertDeleteFieldCount.py 2.0.5-0ubuntu2/tests/test_101_InsertDeleteFieldCount.py
--- 0.3.0-3/tests/test_101_InsertDeleteFieldCount.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_101_InsertDeleteFieldCount.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,56 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_101_InsertDeleteFieldCount(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_101)
+
+  def run_test_101(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    if conn:
+      result = ibm_db.exec_immediate(conn,"insert into t_string values(123,1.222333,'one to one')")
+      if result:
+        cols = ibm_db.num_fields(result)
+        print "col: %d" % cols
+        rows = ibm_db.num_rows(result)
+        print "affected row: %d" % rows
+      result = ibm_db.exec_immediate(conn,"delete from t_string where a=123")
+      if result:
+        cols = ibm_db.num_fields(result)
+        print "col: %d" % cols
+        rows = ibm_db.num_rows(result)
+        print "affected row: %d" % rows
+    else:
+      print "no connection";    
+
+#__END__
+#__LUW_EXPECTED__
+#col: 0
+#affected row: 1
+#col: 0
+#affected row: 1
+#__ZOS_EXPECTED__
+#col: 0
+#affected row: 1
+#col: 0
+#affected row: 1
+#__SYSTEMI_EXPECTED__
+#col: 0
+#affected row: 1
+#col: 0
+#affected row: 1
+#__IDS_EXPECTED__
+#col: 0
+#affected row: 1
+#col: 0
+#affected row: 1
diff -pruN 0.3.0-3/tests/test_102_NumFieldsSelect_01.py 2.0.5-0ubuntu2/tests/test_102_NumFieldsSelect_01.py
--- 0.3.0-3/tests/test_102_NumFieldsSelect_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_102_NumFieldsSelect_01.py	2014-01-30 13:03:30.000000000 +0000
@@ -0,0 +1,43 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_102_NumFieldsSelect_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_102)
+
+  def run_test_102(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if (not conn):
+      print ibm_db.conn_errormsg()
+    
+    server = ibm_db.server_info( conn )
+    if ((server.DBMS_NAME[0:2] != "AS") and (server.DBMS_NAME != "DB2") and (server.DBMS_NAME[0:3] != "IDS")):
+      result = ibm_db.exec_immediate(conn, "VALUES(1)")
+      #throw :unsupported unless result
+      if (not result):
+        raise Exception('Unsupported')
+      print ibm_db.num_fields(result)
+    else:
+      print '1'
+    ibm_db.close(conn)
+
+#__END__
+#__LUW_EXPECTED__
+#1
+#__ZOS_EXPECTED__
+#1
+#__SYSTEMI_EXPECTED__
+#1
+#__IDS_EXPECTED__
+#1
diff -pruN 0.3.0-3/tests/test_103_NumFieldsSelect_02.py 2.0.5-0ubuntu2/tests/test_103_NumFieldsSelect_02.py
--- 0.3.0-3/tests/test_103_NumFieldsSelect_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_103_NumFieldsSelect_02.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,82 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_103_NumFieldsSelect_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_103)
+
+  def run_test_103(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+       result = ibm_db.exec_immediate(conn, "select * from org, project order by project.projname")
+       cols = ibm_db.num_fields(result)
+       j = 1
+       row = ibm_db.fetch_tuple(result)
+       while ( row ):
+          print "%d) " % j
+          for i in range(0, cols):
+             print "%s " % row[i]
+          j += 1
+          if (j > 10):
+             break
+          row = ibm_db.fetch_tuple(result)
+       ibm_db.close(conn)
+    else:
+      print ibm_db.conn_errormsg()
+
+#__END__
+#__LUW_EXPECTED__
+#1) 10 Head Office 160 Corporate New York AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#2) 15 New England 50 Eastern Boston AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#3) 20 Mid Atlantic 10 Eastern Washington AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#4) 38 South Atlantic 30 Eastern Atlanta AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#5) 42 Great Lakes 100 Midwest Chicago AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#6) 51 Plains 140 Midwest Dallas AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#7) 66 Pacific 270 Western San Francisco AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#8) 84 Mountain 290 Western Denver AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#9) 10 Head Office 160 Corporate New York AD3100 ADMIN SERVICES D01 000010 6.50 1982-01-01 1983-02-01        
+#10) 15 New England 50 Eastern Boston AD3100 ADMIN SERVICES D01 000010 6.50 1982-01-01 1983-02-01        
+#__ZOS_EXPECTED__
+#1) 84 Mountain 290 Western Denver AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#2) 66 Pacific 270 Western San Francisco AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#3) 51 Plains 140 Midwest Dallas AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#4) 42 Great Lakes 100 Midwest Chicago AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#5) 38 South Atlantic 30 Eastern Atlanta AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#6) 20 Mid Atlantic 10 Eastern Washington AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#7) 15 New England 50 Eastern Boston AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#8) 10 Head Office 160 Corporate New York AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#9) 84 Mountain 290 Western Denver AD3100 ADMIN SERVICES D01 000010 6.50 1982-01-01 1983-02-01        
+#10) 66 Pacific 270 Western San Francisco AD3100 ADMIN SERVICES D01 000010 6.50 1982-01-01 1983-02-01        
+#__SYSTEMI_EXPECTED__
+#1) 10 Head Office 160 Corporate New York AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#2) 15 New England 50 Eastern Boston AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#3) 20 Mid Atlantic 10 Eastern Washington AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#4) 38 South Atlantic 30 Eastern Atlanta AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#5) 42 Great Lakes 100 Midwest Chicago AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#6) 51 Plains 140 Midwest Dallas AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#7) 66 Pacific 270 Western San Francisco AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#8) 84 Mountain 290 Western Denver AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#9) 10 Head Office 160 Corporate New York AD3100 ADMIN SERVICES D01 000010 6.50 1982-01-01 1983-02-01        
+#10) 15 New England 50 Eastern Boston AD3100 ADMIN SERVICES D01 000010 6.50 1982-01-01 1983-02-01        
+#__IDS_EXPECTED__
+#1) 38 South Atlantic 30 Eastern Atlanta AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#2) 42 Great Lakes 100 Midwest Chicago AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#3) 10 Head Office 160 Corporate New York AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#4) 51 Plains 140 Midwest Dallas AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#5) 15 New England 50 Eastern Boston AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#6) 66 Pacific 270 Western San Francisco AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#7) 84 Mountain 290 Western Denver AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#8) 20 Mid Atlantic 10 Eastern Washington AD3113 ACCOUNT PROGRAMMING D21 000270 2.00 1982-01-01 1983-02-01 AD3110 
+#9) 15 New England 50 Eastern Boston AD3100 ADMIN SERVICES D01 000010 6.50 1982-01-01 1983-02-01        
+#10) 20 Mid Atlantic 10 Eastern Washington AD3100 ADMIN SERVICES D01 000010 6.50 1982-01-01 1983-02-01        
diff -pruN 0.3.0-3/tests/test_110_FieldNum.py 2.0.5-0ubuntu2/tests/test_110_FieldNum.py
--- 0.3.0-3/tests/test_110_FieldNum.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_110_FieldNum.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,92 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_110_FieldNum(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_110)
+
+  def run_test_110(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+    
+    if conn:
+      stmt = ibm_db.exec_immediate(conn, "SELECT * FROM animals ORDER BY breed")
+    
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        num1 = ibm_db.field_num(stmt, "id")
+        num2 = ibm_db.field_num(stmt, "breed")
+        num3 = ibm_db.field_num(stmt, "name")
+        num4 = ibm_db.field_num(stmt, "weight")
+        num5 = ibm_db.field_num(stmt, "test")
+        num6 = ibm_db.field_num(stmt, 8)
+        num7 = ibm_db.field_num(stmt, 1)
+        num8 = ibm_db.field_num(stmt, "WEIGHT")
+      else:
+        num1 = ibm_db.field_num(stmt, "ID")
+        num2 = ibm_db.field_num(stmt, "BREED")
+        num3 = ibm_db.field_num(stmt, "NAME")
+        num4 = ibm_db.field_num(stmt, "WEIGHT")
+        num5 = ibm_db.field_num(stmt, "TEST")
+        num6 = ibm_db.field_num(stmt, 8)
+        num7 = ibm_db.field_num(stmt, 1)
+        num8 = ibm_db.field_num(stmt, "weight")
+      
+      print "int(%d)" % num1
+      print "int(%d)" % num2
+      print "int(%d)" % num3
+      print "int(%d)" % num4
+      
+      print "%s" % num5
+      print "%s" % num6
+      print "int(%d)" % num7
+      print "%s" % num8
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#int(0)
+#int(1)
+#int(2)
+#int(3)
+#False
+#False
+#int(1)
+#False
+#__ZOS_EXPECTED__
+#int(0)
+#int(1)
+#int(2)
+#int(3)
+#False
+#False
+#int(1)
+#False
+#__SYSTEMI_EXPECTED__
+#int(0)
+#int(1)
+#int(2)
+#int(3)
+#False
+#False
+#int(1)
+#False
+#__IDS_EXPECTED__
+#int(0)
+#int(1)
+#int(2)
+#int(3)
+#False
+#False
+#int(1)
+#False
diff -pruN 0.3.0-3/tests/test_111_FieldNumAddCol.py 2.0.5-0ubuntu2/tests/test_111_FieldNumAddCol.py
--- 0.3.0-3/tests/test_111_FieldNumAddCol.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_111_FieldNumAddCol.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,99 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_111_FieldNumAddCol(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_111)
+
+  def run_test_111(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    if conn:
+      ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+
+      insert = "INSERT INTO animals values (7, 'cat', 'Benji', 5.1)"
+      ibm_db.exec_immediate(conn, insert)
+      
+      stmt = ibm_db.exec_immediate(conn, "SELECT breed, COUNT(breed) AS number FROM animals GROUP BY breed ORDER BY breed")
+    
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        num1 = ibm_db.field_num(stmt, "id")
+        num2 = ibm_db.field_num(stmt, "breed")
+        num3 = ibm_db.field_num(stmt, "number")
+        num4 = ibm_db.field_num(stmt, "NUMBER")
+        num5 = ibm_db.field_num(stmt, "bREED")
+        num6 = ibm_db.field_num(stmt, 8)
+        num7 = ibm_db.field_num(stmt, 1)
+        num8 = ibm_db.field_num(stmt, "WEIGHT")
+      else:
+        num1 = ibm_db.field_num(stmt, "ID")
+        num2 = ibm_db.field_num(stmt, "BREED")
+        num3 = ibm_db.field_num(stmt, "NUMBER")
+        num4 = ibm_db.field_num(stmt, "number")
+        num5 = ibm_db.field_num(stmt, "Breed")
+        num6 = ibm_db.field_num(stmt, 8)
+        num7 = ibm_db.field_num(stmt, 1)
+        num8 = ibm_db.field_num(stmt, "weight")
+  
+      print "%s" % num1
+      print "int(%d)" % num2
+      print "int(%d)" % num3
+      print "%s" % num4
+      
+      print "%s" % num5
+      print "%s" % num6
+      print "int(%d)" % num7
+      print "%s" % num8
+
+      ibm_db.rollback(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#False
+#int(0)
+#int(1)
+#False
+#False
+#False
+#int(1)
+#False
+#__ZOS_EXPECTED__
+#False
+#int(0)
+#int(1)
+#False
+#False
+#False
+#int(1)
+#False
+#__SYSTEMI_EXPECTED__
+#False
+#int(0)
+#int(1)
+#False
+#False
+#False
+#int(1)
+#False
+#__IDS_EXPECTED__
+#False
+#int(0)
+#int(1)
+#False
+#False
+#False
+#int(1)
+#False
diff -pruN 0.3.0-3/tests/test_112_FieldNumDiffCaseColNames.py 2.0.5-0ubuntu2/tests/test_112_FieldNumDiffCaseColNames.py
--- 0.3.0-3/tests/test_112_FieldNumDiffCaseColNames.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_112_FieldNumDiffCaseColNames.py	2014-01-30 19:07:16.000000000 +0000
@@ -0,0 +1,63 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_112_FieldNumDiffCaseColNames(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_112)
+
+  def run_test_112(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    if conn:
+      drop = "DROP TABLE ftest"
+      try:
+        ibm_db.exec_immediate( conn, drop )
+      except:
+        pass
+      
+      create = "CREATE TABLE ftest ( \"TEST\" INTEGER, \"test\" INTEGER, \"Test\" INTEGER  )"
+      ibm_db.exec_immediate(conn, create)
+      
+      insert = "INSERT INTO ftest VALUES (1,2,3)"
+      ibm_db.exec_immediate(conn, insert)
+      
+      stmt = ibm_db.exec_immediate(conn, "SELECT * FROM ftest")
+    
+      num1 = ibm_db.field_num(stmt, "TEST")
+      num2 = ibm_db.field_num(stmt, 'test')
+      num3 = ibm_db.field_num(stmt, 'Test')
+      
+      print "int(%d)" % num1
+      print "int(%d)" % num2
+      print "int(%d)" % num3
+      
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#int(0)
+#int(1)
+#int(2)
+#__ZOS_EXPECTED__
+#int(0)
+#int(1)
+#int(2)
+#__SYSTEMI_EXPECTED__
+#int(0)
+#int(1)
+#int(2)
+#__IDS_EXPECTED__
+#int(0)
+#int(1)
+#int(2)
diff -pruN 0.3.0-3/tests/test_113_DateTest.py 2.0.5-0ubuntu2/tests/test_113_DateTest.py
--- 0.3.0-3/tests/test_113_DateTest.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_113_DateTest.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,76 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_113_DateTest(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_113)
+
+  def run_test_113(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      drop = "DROP TABLE datetest"
+      try:
+        ibm_db.exec_immediate( conn, drop )
+      except:
+        pass
+      
+      create = "CREATE TABLE datetest ( id INTEGER, mydate DATE )"
+      ibm_db.exec_immediate(conn, create)
+
+      server = ibm_db.server_info( conn )
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        insert = "INSERT INTO datetest (id, mydate) VALUES (1,'1982-03-27')"
+        ibm_db.exec_immediate(conn, insert)
+        insert = "INSERT INTO datetest (id, mydate) VALUES (2,'1981-07-08')"
+        ibm_db.exec_immediate(conn, insert)
+      else:
+        insert = "INSERT INTO datetest (id, mydate) VALUES (1,'1982-03-27')"
+        ibm_db.exec_immediate(conn, insert)
+        insert = "INSERT INTO datetest (id, mydate) VALUES (2,'1981-07-08')"
+        ibm_db.exec_immediate(conn, insert)
+      
+      stmt = ibm_db.prepare(conn, "SELECT * FROM datetest")
+      ibm_db.execute(stmt)
+
+      result = ibm_db.fetch_row( stmt )
+      while ( result ):
+        row0 = ibm_db.result(stmt, 0)
+        row1 = ibm_db.result(stmt, 1)
+        print row0
+        print row1
+        result = ibm_db.fetch_row( stmt )
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#1
+#1982-03-27
+#2
+#1981-07-08
+#__ZOS_EXPECTED__
+#1
+#1982-03-27
+#2
+#1981-07-08
+#__SYSTEMI_EXPECTED__
+#1
+#1982-03-27
+#2
+#1981-07-08
+#__IDS_EXPECTED__
+#1
+#1982-03-27
+#2
+#1981-07-08
diff -pruN 0.3.0-3/tests/test_114_NumericTest_01.py 2.0.5-0ubuntu2/tests/test_114_NumericTest_01.py
--- 0.3.0-3/tests/test_114_NumericTest_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_114_NumericTest_01.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,63 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_114_NumericTest_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_114)
+
+  def run_test_114(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      drop = "drop table numericliteral"
+
+      try:
+        ibm_db.exec_immediate( conn, drop )
+      except:
+        pass
+      
+      create = "create table numericliteral ( id INTEGER, num INTEGER )"
+      ibm_db.exec_immediate(conn, create)
+      
+      insert = "INSERT INTO numericliteral (id, num) values (1,5)"
+      ibm_db.exec_immediate(conn, insert)
+
+      insert = "UPDATE numericliteral SET num = '10' WHERE num = '5'"
+      ibm_db.exec_immediate(conn, insert)
+      
+      stmt = ibm_db.prepare(conn, "SELECT * FROM numericliteral")
+      ibm_db.execute(stmt)
+
+      result = ibm_db.fetch_row( stmt )
+      while ( result ):
+        row0 = ibm_db.result(stmt, 0)
+        row1 = ibm_db.result(stmt, 1)
+        print row0
+        print row1
+        result = ibm_db.fetch_row( stmt )
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#1
+#10
+#__ZOS_EXPECTED__
+#1
+#10
+#__SYSTEMI_EXPECTED__
+#1
+#10
+#__IDS_EXPECTED__
+#1
+#10
diff -pruN 0.3.0-3/tests/test_115_NumericTest_02.py 2.0.5-0ubuntu2/tests/test_115_NumericTest_02.py
--- 0.3.0-3/tests/test_115_NumericTest_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_115_NumericTest_02.py	2014-01-30 19:10:41.000000000 +0000
@@ -0,0 +1,85 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_115_NumericTest_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_115)
+
+  def run_test_115(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+    
+    if conn:
+      drop = "drop table numericliteral"
+      try:
+        ibm_db.exec_immediate( conn, drop )
+      except:
+        pass
+
+      create = "create table numericliteral ( id INTEGER, data VARCHAR(50) )"
+      ibm_db.exec_immediate(conn, create)
+
+      insert = "INSERT INTO numericliteral (id, data) values (12, 'NUMERIC LITERAL TEST')"
+      ibm_db.exec_immediate(conn, insert)
+
+      stmt = ibm_db.prepare(conn, "SELECT data FROM numericliteral")
+      ibm_db.execute(stmt)
+      
+#      NOTE: This is a workaround
+#      function fetch_object() to be implemented...
+#      row = ibm_db.fetch_object(stmt, 0)
+      
+      class Row:
+          pass
+      
+      row = Row()
+      ibm_db.fetch_row(stmt, 0)
+      if (server.DBMS_NAME[0:3] != 'IDS'):
+        row.DATA = ibm_db.result(stmt, 'DATA')
+      else:
+        row.DATA = ibm_db.result(stmt, 'data')
+      print row.DATA
+
+      insert = "UPDATE numericliteral SET data = '@@@@@@@@@@' WHERE id = '12'"
+      ibm_db.exec_immediate(conn, insert)
+
+      stmt = ibm_db.prepare(conn, "SELECT data FROM numericliteral")
+      ibm_db.execute(stmt)
+      
+#      row = ibm_db.fetch_object(stmt, 0)
+      ibm_db.fetch_row(stmt, 0)
+      if (server.DBMS_NAME[0:3] != 'IDS'):
+        row.DATA = ibm_db.result(stmt, 'DATA')
+      else:
+        row.DATA = ibm_db.result(stmt, 'data')
+      print row.DATA
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#NUMERIC LITERAL TEST
+#@@@@@@@@@@
+#__ZOS_EXPECTED__
+#NUMERIC LITERAL TEST
+#@@@@@@@@@@
+#__SYSTEMI_EXPECTED__
+#NUMERIC LITERAL TEST
+#@@@@@@@@@@
+#__IDS_EXPECTED__
+#NUMERIC LITERAL TEST
+#@@@@@@@@@@
diff -pruN 0.3.0-3/tests/test_116_ConnActive.py 2.0.5-0ubuntu2/tests/test_116_ConnActive.py
--- 0.3.0-3/tests/test_116_ConnActive.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_116_ConnActive.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,89 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_116_ConnActive(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_116)
+
+  def run_test_116(self):
+    conn = None
+    is_alive = ibm_db.active(conn)
+    if is_alive:
+      print "Is active"
+    else:
+      print "Is not active"
+
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    is_alive = ibm_db.active(conn)
+    if is_alive:
+      print "Is active"
+    else:
+      print "Is not active"
+
+    ibm_db.close(conn)
+    is_alive = ibm_db.active(conn)
+    if is_alive:
+      print "Is active"
+    else:
+      print "Is not active"
+
+    # Executing active method multiple times to reproduce a customer reported defect
+    print ibm_db.active(conn)
+    print ibm_db.active(conn)
+    print ibm_db.active(conn)
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    print ibm_db.active(conn)
+    print ibm_db.active(conn)
+    print ibm_db.active(conn)
+
+#__END__
+#__LUW_EXPECTED__
+#Is not active
+#Is active
+#Is not active
+#False
+#False
+#False
+#True
+#True
+#True
+#__ZOS_EXPECTED__
+#Is not active
+#Is active
+#Is not active
+#False
+#False
+#False
+#True
+#True
+#True
+#__SYSTEMI_EXPECTED__
+#Is not active
+#Is active
+#Is not active
+#False
+#False
+#False
+#True
+#True
+#True
+#__IDS_EXPECTED__
+#Is not active
+#Is active
+#Is not active
+#False
+#False
+#False
+#True
+#True
+#True
diff -pruN 0.3.0-3/tests/test_120_FieldName.py 2.0.5-0ubuntu2/tests/test_120_FieldName.py
--- 0.3.0-3/tests/test_120_FieldName.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_120_FieldName.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,87 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_120_FieldName(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_120)
+
+  def run_test_120(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    if conn:
+      stmt = ibm_db.exec_immediate(conn, "SELECT * FROM animals")
+    
+      name1 = ibm_db.field_name(stmt, 1)
+      name2 = ibm_db.field_name(stmt, 2)
+      name3 = ibm_db.field_name(stmt, 3)
+      name4 = ibm_db.field_name(stmt, 4)
+      name6 = ibm_db.field_name(stmt, 8)
+      name7 = ibm_db.field_name(stmt, 0)
+      
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        name5 = ibm_db.field_name(stmt, "id")
+        name8 = ibm_db.field_name(stmt, "WEIGHT")
+      else:
+        name5 = ibm_db.field_name(stmt, "ID")
+        name8 = ibm_db.field_name(stmt, "weight")
+
+      print "string(%d) \"%s\"" % (len(name1), name1)
+      print "string(%d) \"%s\"" % (len(name2), name2)
+      print "string(%d) \"%s\"" % (len(name3), name3)
+      print "%s" % name4
+
+      print "string(%d) \"%s\"" % (len(name5), name5)
+      print "%s" % name6
+      print "string(%d) \"%s\"" % (len(name7), name7)
+      print "%s" % name8
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#string(5) "BREED"
+#string(4) "NAME"
+#string(6) "WEIGHT"
+#False
+#string(2) "ID"
+#False
+#string(2) "ID"
+#False
+#__ZOS_EXPECTED__
+#string(5) "BREED"
+#string(4) "NAME"
+#string(6) "WEIGHT"
+#False
+#string(2) "ID"
+#False
+#string(2) "ID"
+#False
+#__SYSTEMI_EXPECTED__
+#string(5) "BREED"
+#string(4) "NAME"
+#string(6) "WEIGHT"
+#False
+#string(2) "ID"
+#False
+#string(2) "ID"
+#False
+#__IDS_EXPECTED__
+#string(5) "breed"
+#string(4) "name"
+#string(6) "weight"
+#False
+#string(2) "id"
+#False
+#string(2) "id"
+#False
diff -pruN 0.3.0-3/tests/test_121_FieldNameAddCol.py 2.0.5-0ubuntu2/tests/test_121_FieldNameAddCol.py
--- 0.3.0-3/tests/test_121_FieldNameAddCol.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_121_FieldNameAddCol.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,96 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_121_FieldNameAddCol(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_121)
+
+  def run_test_121(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+    
+    if conn:
+      ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+
+      insert = "INSERT INTO animals values (7, 'cat', 'Benji', 5.1)"
+      ibm_db.exec_immediate(conn, insert)
+        
+      stmt = ibm_db.exec_immediate(conn, "SELECT breed, COUNT(breed) AS number FROM animals GROUP BY breed ORDER BY breed")
+    
+      name1 = ibm_db.field_name(stmt, 0)
+      name2 = ibm_db.field_name(stmt, 1)
+      name3 = ibm_db.field_name(stmt, 2)
+      name4 = ibm_db.field_name(stmt, 3)
+      
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        name5 = ibm_db.field_name(stmt, "breed")
+        name6 = ibm_db.field_name(stmt, 7)
+        name7 = ibm_db.field_name(stmt, '"nUMBER"')
+        name8 = ibm_db.field_name(stmt, "number")
+      else:
+        name5 = ibm_db.field_name(stmt, "BREED")
+        name6 = ibm_db.field_name(stmt, 7)
+        name7 = ibm_db.field_name(stmt, '"Number"')
+        name8 = ibm_db.field_name(stmt, "NUMBER")
+      
+      print "string(%d) \"%s\"" % (len(name1), name1)
+      print "string(%d) \"%s\"" % (len(name2), name2)
+      print "%s" % name3
+      print "%s" % name4
+
+      print "string(%d) \"%s\"" % (len(name5), name5)
+      print "%s" % name6
+      print "%s" % name7
+      print "string(%d) \"%s\"" % (len(name8), name8)
+
+      ibm_db.rollback(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#string(5) "BREED"
+#string(6) "NUMBER"
+#False
+#False
+#string(5) "BREED"
+#False
+#False
+#string(6) "NUMBER"
+#__ZOS_EXPECTED__
+#string(5) "BREED"
+#string(6) "NUMBER"
+#False
+#False
+#string(5) "BREED"
+#False
+#False
+#string(6) "NUMBER"
+#__SYSTEMI_EXPECTED__
+#string(5) "BREED"
+#string(6) "NUMBER"
+#False
+#False
+#string(5) "BREED"
+#False
+#False
+#string(6) "NUMBER"
+#__IDS_EXPECTED__
+#string(5) "breed"
+#string(6) "number"
+#False
+#False
+#string(5) "breed"
+#False
+#False
+#string(6) "number"
diff -pruN 0.3.0-3/tests/test_122_FieldNameDiffCaseColNames.py 2.0.5-0ubuntu2/tests/test_122_FieldNameDiffCaseColNames.py
--- 0.3.0-3/tests/test_122_FieldNameDiffCaseColNames.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_122_FieldNameDiffCaseColNames.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,83 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_122_FieldNameDiffCaseColNames(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_122)
+
+  def run_test_122(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    if conn:
+      drop = "drop table ftest"
+      try:
+        ibm_db.exec_immediate( conn, drop )
+      except:
+        pass
+      
+      create = "create table ftest ( \"TEST\" integer, \"test\" integer, \"Test\" integer  )"
+      ibm_db.exec_immediate(conn, create)
+      
+      insert = "INSERT INTO ftest values (1,2,3)"
+      ibm_db.exec_immediate(conn, insert)
+      
+      stmt = ibm_db.exec_immediate(conn, "SELECT * FROM ftest")
+    
+      num1 = ibm_db.field_name(stmt, 0)
+      num2 = ibm_db.field_name(stmt, 1)
+      num3 = ibm_db.field_name(stmt, 2)
+      
+      num4 = ibm_db.field_name(stmt, "TEST")
+      num5 = ibm_db.field_name(stmt, 'test')
+      num6 = ibm_db.field_name(stmt, 'Test')
+
+      print "string(%d) \"%s\"" % (len(num1), num1)
+      print "string(%d) \"%s\"" % (len(num2), num2)
+      print "string(%d) \"%s\"" % (len(num3), num3)
+
+      print "string(%d) \"%s\"" % (len(num4), num4)
+      print "string(%d) \"%s\"" % (len(num5), num5)
+      print "string(%d) \"%s\"" % (len(num6), num6)
+      
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#string(4) "TEST"
+#string(4) "test"
+#string(4) "Test"
+#string(4) "TEST"
+#string(4) "test"
+#string(4) "Test"
+#__ZOS_EXPECTED__
+#string(4) "TEST"
+#string(4) "test"
+#string(4) "Test"
+#string(4) "TEST"
+#string(4) "test"
+#string(4) "Test"
+#__SYSTEMI_EXPECTED__
+#string(4) "TEST"
+#string(4) "test"
+#string(4) "Test"
+#string(4) "TEST"
+#string(4) "test"
+#string(4) "Test"
+#__IDS_EXPECTED__
+#string(4) "TEST"
+#string(4) "test"
+#string(4) "Test"
+#string(4) "TEST"
+#string(4) "test"
+#string(4) "Test"
diff -pruN 0.3.0-3/tests/test_123_FieldNamePos_01.py 2.0.5-0ubuntu2/tests/test_123_FieldNamePos_01.py
--- 0.3.0-3/tests/test_123_FieldNamePos_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_123_FieldNamePos_01.py	2014-01-30 19:12:33.000000000 +0000
@@ -0,0 +1,13526 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_123_FieldNamePos_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_123)
+
+  def run_test_123(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+  
+    arr = [\
+    	    "HAAS",\
+	    "THOMPSON",\
+	    "KWAN",\
+	    "GEYER",\
+	    "STERN",\
+	    "PULASKI",\
+	    "HENDERSON",\
+	    "SPENSER",\
+	    "LUCCHESSI",\
+	    "O'CONNELL",\
+	    "QUINTANA",\
+	    "NICHOLLS",\
+	    "ADAMSON",\
+	    "PIANKA",\
+	    "YOSHIMURA",\
+	    "SCOUTTEN",\
+	    "WALKER",\
+	    "BROWN",\
+	    "JONES",\
+	    "LUTZ",\
+	    "JEFFERSON",\
+	    "MARINO",\
+	    "SMITH",\
+	    "JOHNSON",\
+	    "PEREZ",\
+	    "SCHNEIDER",\
+	    "PARKER",\
+	    "SMITH",\
+	    "SETRIGHT",\
+	    "MEHTA",\
+	    "LEE",\
+	    "GOUNOT"\
+	]
+
+    if conn:
+      result = ibm_db.exec_immediate(conn, "select staff.id, employee.lastname from staff, employee order by employee.lastname,staff.id")
+      cols = ibm_db.num_fields(result)
+      j = 0
+      row = ibm_db.fetch_both(result)
+      while ( row ):
+        for i in range(0, cols):
+          #print "#{ibm_db.field_name(result,i)}:" % (ibm_db.field_name(result, i), row[ibm_db.field_name(result, i)])
+          #puts row[ibm_db.field_name(result,i)]
+          print "%s:%s" % (ibm_db.field_name(result, i), row[ibm_db.field_name(result, i)])
+        print "---------";    
+        j += 1
+        row = ibm_db.fetch_both(result)
+      ibm_db.close(conn)
+    else:
+      print ibm_db.conn_errormsg()
+
+#__END__
+#__LUW_EXPECTED__
+#
+#ID:10
+#LASTNAME:ADAMSON
+#---------
+#ID:20
+#LASTNAME:ADAMSON
+#---------
+#ID:30
+#LASTNAME:ADAMSON
+#---------
+#ID:40
+#LASTNAME:ADAMSON
+#---------
+#ID:50
+#LASTNAME:ADAMSON
+#---------
+#ID:60
+#LASTNAME:ADAMSON
+#---------
+#ID:70
+#LASTNAME:ADAMSON
+#---------
+#ID:80
+#LASTNAME:ADAMSON
+#---------
+#ID:90
+#LASTNAME:ADAMSON
+#---------
+#ID:100
+#LASTNAME:ADAMSON
+#---------
+#ID:110
+#LASTNAME:ADAMSON
+#---------
+#ID:120
+#LASTNAME:ADAMSON
+#---------
+#ID:130
+#LASTNAME:ADAMSON
+#---------
+#ID:140
+#LASTNAME:ADAMSON
+#---------
+#ID:150
+#LASTNAME:ADAMSON
+#---------
+#ID:160
+#LASTNAME:ADAMSON
+#---------
+#ID:170
+#LASTNAME:ADAMSON
+#---------
+#ID:180
+#LASTNAME:ADAMSON
+#---------
+#ID:190
+#LASTNAME:ADAMSON
+#---------
+#ID:200
+#LASTNAME:ADAMSON
+#---------
+#ID:210
+#LASTNAME:ADAMSON
+#---------
+#ID:220
+#LASTNAME:ADAMSON
+#---------
+#ID:230
+#LASTNAME:ADAMSON
+#---------
+#ID:240
+#LASTNAME:ADAMSON
+#---------
+#ID:250
+#LASTNAME:ADAMSON
+#---------
+#ID:260
+#LASTNAME:ADAMSON
+#---------
+#ID:270
+#LASTNAME:ADAMSON
+#---------
+#ID:280
+#LASTNAME:ADAMSON
+#---------
+#ID:290
+#LASTNAME:ADAMSON
+#---------
+#ID:300
+#LASTNAME:ADAMSON
+#---------
+#ID:310
+#LASTNAME:ADAMSON
+#---------
+#ID:320
+#LASTNAME:ADAMSON
+#---------
+#ID:330
+#LASTNAME:ADAMSON
+#---------
+#ID:340
+#LASTNAME:ADAMSON
+#---------
+#ID:350
+#LASTNAME:ADAMSON
+#---------
+#ID:10
+#LASTNAME:BROWN
+#---------
+#ID:20
+#LASTNAME:BROWN
+#---------
+#ID:30
+#LASTNAME:BROWN
+#---------
+#ID:40
+#LASTNAME:BROWN
+#---------
+#ID:50
+#LASTNAME:BROWN
+#---------
+#ID:60
+#LASTNAME:BROWN
+#---------
+#ID:70
+#LASTNAME:BROWN
+#---------
+#ID:80
+#LASTNAME:BROWN
+#---------
+#ID:90
+#LASTNAME:BROWN
+#---------
+#ID:100
+#LASTNAME:BROWN
+#---------
+#ID:110
+#LASTNAME:BROWN
+#---------
+#ID:120
+#LASTNAME:BROWN
+#---------
+#ID:130
+#LASTNAME:BROWN
+#---------
+#ID:140
+#LASTNAME:BROWN
+#---------
+#ID:150
+#LASTNAME:BROWN
+#---------
+#ID:160
+#LASTNAME:BROWN
+#---------
+#ID:170
+#LASTNAME:BROWN
+#---------
+#ID:180
+#LASTNAME:BROWN
+#---------
+#ID:190
+#LASTNAME:BROWN
+#---------
+#ID:200
+#LASTNAME:BROWN
+#---------
+#ID:210
+#LASTNAME:BROWN
+#---------
+#ID:220
+#LASTNAME:BROWN
+#---------
+#ID:230
+#LASTNAME:BROWN
+#---------
+#ID:240
+#LASTNAME:BROWN
+#---------
+#ID:250
+#LASTNAME:BROWN
+#---------
+#ID:260
+#LASTNAME:BROWN
+#---------
+#ID:270
+#LASTNAME:BROWN
+#---------
+#ID:280
+#LASTNAME:BROWN
+#---------
+#ID:290
+#LASTNAME:BROWN
+#---------
+#ID:300
+#LASTNAME:BROWN
+#---------
+#ID:310
+#LASTNAME:BROWN
+#---------
+#ID:320
+#LASTNAME:BROWN
+#---------
+#ID:330
+#LASTNAME:BROWN
+#---------
+#ID:340
+#LASTNAME:BROWN
+#---------
+#ID:350
+#LASTNAME:BROWN
+#---------
+#ID:10
+#LASTNAME:GEYER
+#---------
+#ID:20
+#LASTNAME:GEYER
+#---------
+#ID:30
+#LASTNAME:GEYER
+#---------
+#ID:40
+#LASTNAME:GEYER
+#---------
+#ID:50
+#LASTNAME:GEYER
+#---------
+#ID:60
+#LASTNAME:GEYER
+#---------
+#ID:70
+#LASTNAME:GEYER
+#---------
+#ID:80
+#LASTNAME:GEYER
+#---------
+#ID:90
+#LASTNAME:GEYER
+#---------
+#ID:100
+#LASTNAME:GEYER
+#---------
+#ID:110
+#LASTNAME:GEYER
+#---------
+#ID:120
+#LASTNAME:GEYER
+#---------
+#ID:130
+#LASTNAME:GEYER
+#---------
+#ID:140
+#LASTNAME:GEYER
+#---------
+#ID:150
+#LASTNAME:GEYER
+#---------
+#ID:160
+#LASTNAME:GEYER
+#---------
+#ID:170
+#LASTNAME:GEYER
+#---------
+#ID:180
+#LASTNAME:GEYER
+#---------
+#ID:190
+#LASTNAME:GEYER
+#---------
+#ID:200
+#LASTNAME:GEYER
+#---------
+#ID:210
+#LASTNAME:GEYER
+#---------
+#ID:220
+#LASTNAME:GEYER
+#---------
+#ID:230
+#LASTNAME:GEYER
+#---------
+#ID:240
+#LASTNAME:GEYER
+#---------
+#ID:250
+#LASTNAME:GEYER
+#---------
+#ID:260
+#LASTNAME:GEYER
+#---------
+#ID:270
+#LASTNAME:GEYER
+#---------
+#ID:280
+#LASTNAME:GEYER
+#---------
+#ID:290
+#LASTNAME:GEYER
+#---------
+#ID:300
+#LASTNAME:GEYER
+#---------
+#ID:310
+#LASTNAME:GEYER
+#---------
+#ID:320
+#LASTNAME:GEYER
+#---------
+#ID:330
+#LASTNAME:GEYER
+#---------
+#ID:340
+#LASTNAME:GEYER
+#---------
+#ID:350
+#LASTNAME:GEYER
+#---------
+#ID:10
+#LASTNAME:GOUNOT
+#---------
+#ID:20
+#LASTNAME:GOUNOT
+#---------
+#ID:30
+#LASTNAME:GOUNOT
+#---------
+#ID:40
+#LASTNAME:GOUNOT
+#---------
+#ID:50
+#LASTNAME:GOUNOT
+#---------
+#ID:60
+#LASTNAME:GOUNOT
+#---------
+#ID:70
+#LASTNAME:GOUNOT
+#---------
+#ID:80
+#LASTNAME:GOUNOT
+#---------
+#ID:90
+#LASTNAME:GOUNOT
+#---------
+#ID:100
+#LASTNAME:GOUNOT
+#---------
+#ID:110
+#LASTNAME:GOUNOT
+#---------
+#ID:120
+#LASTNAME:GOUNOT
+#---------
+#ID:130
+#LASTNAME:GOUNOT
+#---------
+#ID:140
+#LASTNAME:GOUNOT
+#---------
+#ID:150
+#LASTNAME:GOUNOT
+#---------
+#ID:160
+#LASTNAME:GOUNOT
+#---------
+#ID:170
+#LASTNAME:GOUNOT
+#---------
+#ID:180
+#LASTNAME:GOUNOT
+#---------
+#ID:190
+#LASTNAME:GOUNOT
+#---------
+#ID:200
+#LASTNAME:GOUNOT
+#---------
+#ID:210
+#LASTNAME:GOUNOT
+#---------
+#ID:220
+#LASTNAME:GOUNOT
+#---------
+#ID:230
+#LASTNAME:GOUNOT
+#---------
+#ID:240
+#LASTNAME:GOUNOT
+#---------
+#ID:250
+#LASTNAME:GOUNOT
+#---------
+#ID:260
+#LASTNAME:GOUNOT
+#---------
+#ID:270
+#LASTNAME:GOUNOT
+#---------
+#ID:280
+#LASTNAME:GOUNOT
+#---------
+#ID:290
+#LASTNAME:GOUNOT
+#---------
+#ID:300
+#LASTNAME:GOUNOT
+#---------
+#ID:310
+#LASTNAME:GOUNOT
+#---------
+#ID:320
+#LASTNAME:GOUNOT
+#---------
+#ID:330
+#LASTNAME:GOUNOT
+#---------
+#ID:340
+#LASTNAME:GOUNOT
+#---------
+#ID:350
+#LASTNAME:GOUNOT
+#---------
+#ID:10
+#LASTNAME:HAAS
+#---------
+#ID:20
+#LASTNAME:HAAS
+#---------
+#ID:30
+#LASTNAME:HAAS
+#---------
+#ID:40
+#LASTNAME:HAAS
+#---------
+#ID:50
+#LASTNAME:HAAS
+#---------
+#ID:60
+#LASTNAME:HAAS
+#---------
+#ID:70
+#LASTNAME:HAAS
+#---------
+#ID:80
+#LASTNAME:HAAS
+#---------
+#ID:90
+#LASTNAME:HAAS
+#---------
+#ID:100
+#LASTNAME:HAAS
+#---------
+#ID:110
+#LASTNAME:HAAS
+#---------
+#ID:120
+#LASTNAME:HAAS
+#---------
+#ID:130
+#LASTNAME:HAAS
+#---------
+#ID:140
+#LASTNAME:HAAS
+#---------
+#ID:150
+#LASTNAME:HAAS
+#---------
+#ID:160
+#LASTNAME:HAAS
+#---------
+#ID:170
+#LASTNAME:HAAS
+#---------
+#ID:180
+#LASTNAME:HAAS
+#---------
+#ID:190
+#LASTNAME:HAAS
+#---------
+#ID:200
+#LASTNAME:HAAS
+#---------
+#ID:210
+#LASTNAME:HAAS
+#---------
+#ID:220
+#LASTNAME:HAAS
+#---------
+#ID:230
+#LASTNAME:HAAS
+#---------
+#ID:240
+#LASTNAME:HAAS
+#---------
+#ID:250
+#LASTNAME:HAAS
+#---------
+#ID:260
+#LASTNAME:HAAS
+#---------
+#ID:270
+#LASTNAME:HAAS
+#---------
+#ID:280
+#LASTNAME:HAAS
+#---------
+#ID:290
+#LASTNAME:HAAS
+#---------
+#ID:300
+#LASTNAME:HAAS
+#---------
+#ID:310
+#LASTNAME:HAAS
+#---------
+#ID:320
+#LASTNAME:HAAS
+#---------
+#ID:330
+#LASTNAME:HAAS
+#---------
+#ID:340
+#LASTNAME:HAAS
+#---------
+#ID:350
+#LASTNAME:HAAS
+#---------
+#ID:10
+#LASTNAME:HENDERSON
+#---------
+#ID:20
+#LASTNAME:HENDERSON
+#---------
+#ID:30
+#LASTNAME:HENDERSON
+#---------
+#ID:40
+#LASTNAME:HENDERSON
+#---------
+#ID:50
+#LASTNAME:HENDERSON
+#---------
+#ID:60
+#LASTNAME:HENDERSON
+#---------
+#ID:70
+#LASTNAME:HENDERSON
+#---------
+#ID:80
+#LASTNAME:HENDERSON
+#---------
+#ID:90
+#LASTNAME:HENDERSON
+#---------
+#ID:100
+#LASTNAME:HENDERSON
+#---------
+#ID:110
+#LASTNAME:HENDERSON
+#---------
+#ID:120
+#LASTNAME:HENDERSON
+#---------
+#ID:130
+#LASTNAME:HENDERSON
+#---------
+#ID:140
+#LASTNAME:HENDERSON
+#---------
+#ID:150
+#LASTNAME:HENDERSON
+#---------
+#ID:160
+#LASTNAME:HENDERSON
+#---------
+#ID:170
+#LASTNAME:HENDERSON
+#---------
+#ID:180
+#LASTNAME:HENDERSON
+#---------
+#ID:190
+#LASTNAME:HENDERSON
+#---------
+#ID:200
+#LASTNAME:HENDERSON
+#---------
+#ID:210
+#LASTNAME:HENDERSON
+#---------
+#ID:220
+#LASTNAME:HENDERSON
+#---------
+#ID:230
+#LASTNAME:HENDERSON
+#---------
+#ID:240
+#LASTNAME:HENDERSON
+#---------
+#ID:250
+#LASTNAME:HENDERSON
+#---------
+#ID:260
+#LASTNAME:HENDERSON
+#---------
+#ID:270
+#LASTNAME:HENDERSON
+#---------
+#ID:280
+#LASTNAME:HENDERSON
+#---------
+#ID:290
+#LASTNAME:HENDERSON
+#---------
+#ID:300
+#LASTNAME:HENDERSON
+#---------
+#ID:310
+#LASTNAME:HENDERSON
+#---------
+#ID:320
+#LASTNAME:HENDERSON
+#---------
+#ID:330
+#LASTNAME:HENDERSON
+#---------
+#ID:340
+#LASTNAME:HENDERSON
+#---------
+#ID:350
+#LASTNAME:HENDERSON
+#---------
+#ID:10
+#LASTNAME:JEFFERSON
+#---------
+#ID:20
+#LASTNAME:JEFFERSON
+#---------
+#ID:30
+#LASTNAME:JEFFERSON
+#---------
+#ID:40
+#LASTNAME:JEFFERSON
+#---------
+#ID:50
+#LASTNAME:JEFFERSON
+#---------
+#ID:60
+#LASTNAME:JEFFERSON
+#---------
+#ID:70
+#LASTNAME:JEFFERSON
+#---------
+#ID:80
+#LASTNAME:JEFFERSON
+#---------
+#ID:90
+#LASTNAME:JEFFERSON
+#---------
+#ID:100
+#LASTNAME:JEFFERSON
+#---------
+#ID:110
+#LASTNAME:JEFFERSON
+#---------
+#ID:120
+#LASTNAME:JEFFERSON
+#---------
+#ID:130
+#LASTNAME:JEFFERSON
+#---------
+#ID:140
+#LASTNAME:JEFFERSON
+#---------
+#ID:150
+#LASTNAME:JEFFERSON
+#---------
+#ID:160
+#LASTNAME:JEFFERSON
+#---------
+#ID:170
+#LASTNAME:JEFFERSON
+#---------
+#ID:180
+#LASTNAME:JEFFERSON
+#---------
+#ID:190
+#LASTNAME:JEFFERSON
+#---------
+#ID:200
+#LASTNAME:JEFFERSON
+#---------
+#ID:210
+#LASTNAME:JEFFERSON
+#---------
+#ID:220
+#LASTNAME:JEFFERSON
+#---------
+#ID:230
+#LASTNAME:JEFFERSON
+#---------
+#ID:240
+#LASTNAME:JEFFERSON
+#---------
+#ID:250
+#LASTNAME:JEFFERSON
+#---------
+#ID:260
+#LASTNAME:JEFFERSON
+#---------
+#ID:270
+#LASTNAME:JEFFERSON
+#---------
+#ID:280
+#LASTNAME:JEFFERSON
+#---------
+#ID:290
+#LASTNAME:JEFFERSON
+#---------
+#ID:300
+#LASTNAME:JEFFERSON
+#---------
+#ID:310
+#LASTNAME:JEFFERSON
+#---------
+#ID:320
+#LASTNAME:JEFFERSON
+#---------
+#ID:330
+#LASTNAME:JEFFERSON
+#---------
+#ID:340
+#LASTNAME:JEFFERSON
+#---------
+#ID:350
+#LASTNAME:JEFFERSON
+#---------
+#ID:10
+#LASTNAME:JOHNSON
+#---------
+#ID:20
+#LASTNAME:JOHNSON
+#---------
+#ID:30
+#LASTNAME:JOHNSON
+#---------
+#ID:40
+#LASTNAME:JOHNSON
+#---------
+#ID:50
+#LASTNAME:JOHNSON
+#---------
+#ID:60
+#LASTNAME:JOHNSON
+#---------
+#ID:70
+#LASTNAME:JOHNSON
+#---------
+#ID:80
+#LASTNAME:JOHNSON
+#---------
+#ID:90
+#LASTNAME:JOHNSON
+#---------
+#ID:100
+#LASTNAME:JOHNSON
+#---------
+#ID:110
+#LASTNAME:JOHNSON
+#---------
+#ID:120
+#LASTNAME:JOHNSON
+#---------
+#ID:130
+#LASTNAME:JOHNSON
+#---------
+#ID:140
+#LASTNAME:JOHNSON
+#---------
+#ID:150
+#LASTNAME:JOHNSON
+#---------
+#ID:160
+#LASTNAME:JOHNSON
+#---------
+#ID:170
+#LASTNAME:JOHNSON
+#---------
+#ID:180
+#LASTNAME:JOHNSON
+#---------
+#ID:190
+#LASTNAME:JOHNSON
+#---------
+#ID:200
+#LASTNAME:JOHNSON
+#---------
+#ID:210
+#LASTNAME:JOHNSON
+#---------
+#ID:220
+#LASTNAME:JOHNSON
+#---------
+#ID:230
+#LASTNAME:JOHNSON
+#---------
+#ID:240
+#LASTNAME:JOHNSON
+#---------
+#ID:250
+#LASTNAME:JOHNSON
+#---------
+#ID:260
+#LASTNAME:JOHNSON
+#---------
+#ID:270
+#LASTNAME:JOHNSON
+#---------
+#ID:280
+#LASTNAME:JOHNSON
+#---------
+#ID:290
+#LASTNAME:JOHNSON
+#---------
+#ID:300
+#LASTNAME:JOHNSON
+#---------
+#ID:310
+#LASTNAME:JOHNSON
+#---------
+#ID:320
+#LASTNAME:JOHNSON
+#---------
+#ID:330
+#LASTNAME:JOHNSON
+#---------
+#ID:340
+#LASTNAME:JOHNSON
+#---------
+#ID:350
+#LASTNAME:JOHNSON
+#---------
+#ID:10
+#LASTNAME:JONES
+#---------
+#ID:20
+#LASTNAME:JONES
+#---------
+#ID:30
+#LASTNAME:JONES
+#---------
+#ID:40
+#LASTNAME:JONES
+#---------
+#ID:50
+#LASTNAME:JONES
+#---------
+#ID:60
+#LASTNAME:JONES
+#---------
+#ID:70
+#LASTNAME:JONES
+#---------
+#ID:80
+#LASTNAME:JONES
+#---------
+#ID:90
+#LASTNAME:JONES
+#---------
+#ID:100
+#LASTNAME:JONES
+#---------
+#ID:110
+#LASTNAME:JONES
+#---------
+#ID:120
+#LASTNAME:JONES
+#---------
+#ID:130
+#LASTNAME:JONES
+#---------
+#ID:140
+#LASTNAME:JONES
+#---------
+#ID:150
+#LASTNAME:JONES
+#---------
+#ID:160
+#LASTNAME:JONES
+#---------
+#ID:170
+#LASTNAME:JONES
+#---------
+#ID:180
+#LASTNAME:JONES
+#---------
+#ID:190
+#LASTNAME:JONES
+#---------
+#ID:200
+#LASTNAME:JONES
+#---------
+#ID:210
+#LASTNAME:JONES
+#---------
+#ID:220
+#LASTNAME:JONES
+#---------
+#ID:230
+#LASTNAME:JONES
+#---------
+#ID:240
+#LASTNAME:JONES
+#---------
+#ID:250
+#LASTNAME:JONES
+#---------
+#ID:260
+#LASTNAME:JONES
+#---------
+#ID:270
+#LASTNAME:JONES
+#---------
+#ID:280
+#LASTNAME:JONES
+#---------
+#ID:290
+#LASTNAME:JONES
+#---------
+#ID:300
+#LASTNAME:JONES
+#---------
+#ID:310
+#LASTNAME:JONES
+#---------
+#ID:320
+#LASTNAME:JONES
+#---------
+#ID:330
+#LASTNAME:JONES
+#---------
+#ID:340
+#LASTNAME:JONES
+#---------
+#ID:350
+#LASTNAME:JONES
+#---------
+#ID:10
+#LASTNAME:KWAN
+#---------
+#ID:20
+#LASTNAME:KWAN
+#---------
+#ID:30
+#LASTNAME:KWAN
+#---------
+#ID:40
+#LASTNAME:KWAN
+#---------
+#ID:50
+#LASTNAME:KWAN
+#---------
+#ID:60
+#LASTNAME:KWAN
+#---------
+#ID:70
+#LASTNAME:KWAN
+#---------
+#ID:80
+#LASTNAME:KWAN
+#---------
+#ID:90
+#LASTNAME:KWAN
+#---------
+#ID:100
+#LASTNAME:KWAN
+#---------
+#ID:110
+#LASTNAME:KWAN
+#---------
+#ID:120
+#LASTNAME:KWAN
+#---------
+#ID:130
+#LASTNAME:KWAN
+#---------
+#ID:140
+#LASTNAME:KWAN
+#---------
+#ID:150
+#LASTNAME:KWAN
+#---------
+#ID:160
+#LASTNAME:KWAN
+#---------
+#ID:170
+#LASTNAME:KWAN
+#---------
+#ID:180
+#LASTNAME:KWAN
+#---------
+#ID:190
+#LASTNAME:KWAN
+#---------
+#ID:200
+#LASTNAME:KWAN
+#---------
+#ID:210
+#LASTNAME:KWAN
+#---------
+#ID:220
+#LASTNAME:KWAN
+#---------
+#ID:230
+#LASTNAME:KWAN
+#---------
+#ID:240
+#LASTNAME:KWAN
+#---------
+#ID:250
+#LASTNAME:KWAN
+#---------
+#ID:260
+#LASTNAME:KWAN
+#---------
+#ID:270
+#LASTNAME:KWAN
+#---------
+#ID:280
+#LASTNAME:KWAN
+#---------
+#ID:290
+#LASTNAME:KWAN
+#---------
+#ID:300
+#LASTNAME:KWAN
+#---------
+#ID:310
+#LASTNAME:KWAN
+#---------
+#ID:320
+#LASTNAME:KWAN
+#---------
+#ID:330
+#LASTNAME:KWAN
+#---------
+#ID:340
+#LASTNAME:KWAN
+#---------
+#ID:350
+#LASTNAME:KWAN
+#---------
+#ID:10
+#LASTNAME:LEE
+#---------
+#ID:20
+#LASTNAME:LEE
+#---------
+#ID:30
+#LASTNAME:LEE
+#---------
+#ID:40
+#LASTNAME:LEE
+#---------
+#ID:50
+#LASTNAME:LEE
+#---------
+#ID:60
+#LASTNAME:LEE
+#---------
+#ID:70
+#LASTNAME:LEE
+#---------
+#ID:80
+#LASTNAME:LEE
+#---------
+#ID:90
+#LASTNAME:LEE
+#---------
+#ID:100
+#LASTNAME:LEE
+#---------
+#ID:110
+#LASTNAME:LEE
+#---------
+#ID:120
+#LASTNAME:LEE
+#---------
+#ID:130
+#LASTNAME:LEE
+#---------
+#ID:140
+#LASTNAME:LEE
+#---------
+#ID:150
+#LASTNAME:LEE
+#---------
+#ID:160
+#LASTNAME:LEE
+#---------
+#ID:170
+#LASTNAME:LEE
+#---------
+#ID:180
+#LASTNAME:LEE
+#---------
+#ID:190
+#LASTNAME:LEE
+#---------
+#ID:200
+#LASTNAME:LEE
+#---------
+#ID:210
+#LASTNAME:LEE
+#---------
+#ID:220
+#LASTNAME:LEE
+#---------
+#ID:230
+#LASTNAME:LEE
+#---------
+#ID:240
+#LASTNAME:LEE
+#---------
+#ID:250
+#LASTNAME:LEE
+#---------
+#ID:260
+#LASTNAME:LEE
+#---------
+#ID:270
+#LASTNAME:LEE
+#---------
+#ID:280
+#LASTNAME:LEE
+#---------
+#ID:290
+#LASTNAME:LEE
+#---------
+#ID:300
+#LASTNAME:LEE
+#---------
+#ID:310
+#LASTNAME:LEE
+#---------
+#ID:320
+#LASTNAME:LEE
+#---------
+#ID:330
+#LASTNAME:LEE
+#---------
+#ID:340
+#LASTNAME:LEE
+#---------
+#ID:350
+#LASTNAME:LEE
+#---------
+#ID:10
+#LASTNAME:LUCCHESSI
+#---------
+#ID:20
+#LASTNAME:LUCCHESSI
+#---------
+#ID:30
+#LASTNAME:LUCCHESSI
+#---------
+#ID:40
+#LASTNAME:LUCCHESSI
+#---------
+#ID:50
+#LASTNAME:LUCCHESSI
+#---------
+#ID:60
+#LASTNAME:LUCCHESSI
+#---------
+#ID:70
+#LASTNAME:LUCCHESSI
+#---------
+#ID:80
+#LASTNAME:LUCCHESSI
+#---------
+#ID:90
+#LASTNAME:LUCCHESSI
+#---------
+#ID:100
+#LASTNAME:LUCCHESSI
+#---------
+#ID:110
+#LASTNAME:LUCCHESSI
+#---------
+#ID:120
+#LASTNAME:LUCCHESSI
+#---------
+#ID:130
+#LASTNAME:LUCCHESSI
+#---------
+#ID:140
+#LASTNAME:LUCCHESSI
+#---------
+#ID:150
+#LASTNAME:LUCCHESSI
+#---------
+#ID:160
+#LASTNAME:LUCCHESSI
+#---------
+#ID:170
+#LASTNAME:LUCCHESSI
+#---------
+#ID:180
+#LASTNAME:LUCCHESSI
+#---------
+#ID:190
+#LASTNAME:LUCCHESSI
+#---------
+#ID:200
+#LASTNAME:LUCCHESSI
+#---------
+#ID:210
+#LASTNAME:LUCCHESSI
+#---------
+#ID:220
+#LASTNAME:LUCCHESSI
+#---------
+#ID:230
+#LASTNAME:LUCCHESSI
+#---------
+#ID:240
+#LASTNAME:LUCCHESSI
+#---------
+#ID:250
+#LASTNAME:LUCCHESSI
+#---------
+#ID:260
+#LASTNAME:LUCCHESSI
+#---------
+#ID:270
+#LASTNAME:LUCCHESSI
+#---------
+#ID:280
+#LASTNAME:LUCCHESSI
+#---------
+#ID:290
+#LASTNAME:LUCCHESSI
+#---------
+#ID:300
+#LASTNAME:LUCCHESSI
+#---------
+#ID:310
+#LASTNAME:LUCCHESSI
+#---------
+#ID:320
+#LASTNAME:LUCCHESSI
+#---------
+#ID:330
+#LASTNAME:LUCCHESSI
+#---------
+#ID:340
+#LASTNAME:LUCCHESSI
+#---------
+#ID:350
+#LASTNAME:LUCCHESSI
+#---------
+#ID:10
+#LASTNAME:LUTZ
+#---------
+#ID:20
+#LASTNAME:LUTZ
+#---------
+#ID:30
+#LASTNAME:LUTZ
+#---------
+#ID:40
+#LASTNAME:LUTZ
+#---------
+#ID:50
+#LASTNAME:LUTZ
+#---------
+#ID:60
+#LASTNAME:LUTZ
+#---------
+#ID:70
+#LASTNAME:LUTZ
+#---------
+#ID:80
+#LASTNAME:LUTZ
+#---------
+#ID:90
+#LASTNAME:LUTZ
+#---------
+#ID:100
+#LASTNAME:LUTZ
+#---------
+#ID:110
+#LASTNAME:LUTZ
+#---------
+#ID:120
+#LASTNAME:LUTZ
+#---------
+#ID:130
+#LASTNAME:LUTZ
+#---------
+#ID:140
+#LASTNAME:LUTZ
+#---------
+#ID:150
+#LASTNAME:LUTZ
+#---------
+#ID:160
+#LASTNAME:LUTZ
+#---------
+#ID:170
+#LASTNAME:LUTZ
+#---------
+#ID:180
+#LASTNAME:LUTZ
+#---------
+#ID:190
+#LASTNAME:LUTZ
+#---------
+#ID:200
+#LASTNAME:LUTZ
+#---------
+#ID:210
+#LASTNAME:LUTZ
+#---------
+#ID:220
+#LASTNAME:LUTZ
+#---------
+#ID:230
+#LASTNAME:LUTZ
+#---------
+#ID:240
+#LASTNAME:LUTZ
+#---------
+#ID:250
+#LASTNAME:LUTZ
+#---------
+#ID:260
+#LASTNAME:LUTZ
+#---------
+#ID:270
+#LASTNAME:LUTZ
+#---------
+#ID:280
+#LASTNAME:LUTZ
+#---------
+#ID:290
+#LASTNAME:LUTZ
+#---------
+#ID:300
+#LASTNAME:LUTZ
+#---------
+#ID:310
+#LASTNAME:LUTZ
+#---------
+#ID:320
+#LASTNAME:LUTZ
+#---------
+#ID:330
+#LASTNAME:LUTZ
+#---------
+#ID:340
+#LASTNAME:LUTZ
+#---------
+#ID:350
+#LASTNAME:LUTZ
+#---------
+#ID:10
+#LASTNAME:MARINO
+#---------
+#ID:20
+#LASTNAME:MARINO
+#---------
+#ID:30
+#LASTNAME:MARINO
+#---------
+#ID:40
+#LASTNAME:MARINO
+#---------
+#ID:50
+#LASTNAME:MARINO
+#---------
+#ID:60
+#LASTNAME:MARINO
+#---------
+#ID:70
+#LASTNAME:MARINO
+#---------
+#ID:80
+#LASTNAME:MARINO
+#---------
+#ID:90
+#LASTNAME:MARINO
+#---------
+#ID:100
+#LASTNAME:MARINO
+#---------
+#ID:110
+#LASTNAME:MARINO
+#---------
+#ID:120
+#LASTNAME:MARINO
+#---------
+#ID:130
+#LASTNAME:MARINO
+#---------
+#ID:140
+#LASTNAME:MARINO
+#---------
+#ID:150
+#LASTNAME:MARINO
+#---------
+#ID:160
+#LASTNAME:MARINO
+#---------
+#ID:170
+#LASTNAME:MARINO
+#---------
+#ID:180
+#LASTNAME:MARINO
+#---------
+#ID:190
+#LASTNAME:MARINO
+#---------
+#ID:200
+#LASTNAME:MARINO
+#---------
+#ID:210
+#LASTNAME:MARINO
+#---------
+#ID:220
+#LASTNAME:MARINO
+#---------
+#ID:230
+#LASTNAME:MARINO
+#---------
+#ID:240
+#LASTNAME:MARINO
+#---------
+#ID:250
+#LASTNAME:MARINO
+#---------
+#ID:260
+#LASTNAME:MARINO
+#---------
+#ID:270
+#LASTNAME:MARINO
+#---------
+#ID:280
+#LASTNAME:MARINO
+#---------
+#ID:290
+#LASTNAME:MARINO
+#---------
+#ID:300
+#LASTNAME:MARINO
+#---------
+#ID:310
+#LASTNAME:MARINO
+#---------
+#ID:320
+#LASTNAME:MARINO
+#---------
+#ID:330
+#LASTNAME:MARINO
+#---------
+#ID:340
+#LASTNAME:MARINO
+#---------
+#ID:350
+#LASTNAME:MARINO
+#---------
+#ID:10
+#LASTNAME:MEHTA
+#---------
+#ID:20
+#LASTNAME:MEHTA
+#---------
+#ID:30
+#LASTNAME:MEHTA
+#---------
+#ID:40
+#LASTNAME:MEHTA
+#---------
+#ID:50
+#LASTNAME:MEHTA
+#---------
+#ID:60
+#LASTNAME:MEHTA
+#---------
+#ID:70
+#LASTNAME:MEHTA
+#---------
+#ID:80
+#LASTNAME:MEHTA
+#---------
+#ID:90
+#LASTNAME:MEHTA
+#---------
+#ID:100
+#LASTNAME:MEHTA
+#---------
+#ID:110
+#LASTNAME:MEHTA
+#---------
+#ID:120
+#LASTNAME:MEHTA
+#---------
+#ID:130
+#LASTNAME:MEHTA
+#---------
+#ID:140
+#LASTNAME:MEHTA
+#---------
+#ID:150
+#LASTNAME:MEHTA
+#---------
+#ID:160
+#LASTNAME:MEHTA
+#---------
+#ID:170
+#LASTNAME:MEHTA
+#---------
+#ID:180
+#LASTNAME:MEHTA
+#---------
+#ID:190
+#LASTNAME:MEHTA
+#---------
+#ID:200
+#LASTNAME:MEHTA
+#---------
+#ID:210
+#LASTNAME:MEHTA
+#---------
+#ID:220
+#LASTNAME:MEHTA
+#---------
+#ID:230
+#LASTNAME:MEHTA
+#---------
+#ID:240
+#LASTNAME:MEHTA
+#---------
+#ID:250
+#LASTNAME:MEHTA
+#---------
+#ID:260
+#LASTNAME:MEHTA
+#---------
+#ID:270
+#LASTNAME:MEHTA
+#---------
+#ID:280
+#LASTNAME:MEHTA
+#---------
+#ID:290
+#LASTNAME:MEHTA
+#---------
+#ID:300
+#LASTNAME:MEHTA
+#---------
+#ID:310
+#LASTNAME:MEHTA
+#---------
+#ID:320
+#LASTNAME:MEHTA
+#---------
+#ID:330
+#LASTNAME:MEHTA
+#---------
+#ID:340
+#LASTNAME:MEHTA
+#---------
+#ID:350
+#LASTNAME:MEHTA
+#---------
+#ID:10
+#LASTNAME:NICHOLLS
+#---------
+#ID:20
+#LASTNAME:NICHOLLS
+#---------
+#ID:30
+#LASTNAME:NICHOLLS
+#---------
+#ID:40
+#LASTNAME:NICHOLLS
+#---------
+#ID:50
+#LASTNAME:NICHOLLS
+#---------
+#ID:60
+#LASTNAME:NICHOLLS
+#---------
+#ID:70
+#LASTNAME:NICHOLLS
+#---------
+#ID:80
+#LASTNAME:NICHOLLS
+#---------
+#ID:90
+#LASTNAME:NICHOLLS
+#---------
+#ID:100
+#LASTNAME:NICHOLLS
+#---------
+#ID:110
+#LASTNAME:NICHOLLS
+#---------
+#ID:120
+#LASTNAME:NICHOLLS
+#---------
+#ID:130
+#LASTNAME:NICHOLLS
+#---------
+#ID:140
+#LASTNAME:NICHOLLS
+#---------
+#ID:150
+#LASTNAME:NICHOLLS
+#---------
+#ID:160
+#LASTNAME:NICHOLLS
+#---------
+#ID:170
+#LASTNAME:NICHOLLS
+#---------
+#ID:180
+#LASTNAME:NICHOLLS
+#---------
+#ID:190
+#LASTNAME:NICHOLLS
+#---------
+#ID:200
+#LASTNAME:NICHOLLS
+#---------
+#ID:210
+#LASTNAME:NICHOLLS
+#---------
+#ID:220
+#LASTNAME:NICHOLLS
+#---------
+#ID:230
+#LASTNAME:NICHOLLS
+#---------
+#ID:240
+#LASTNAME:NICHOLLS
+#---------
+#ID:250
+#LASTNAME:NICHOLLS
+#---------
+#ID:260
+#LASTNAME:NICHOLLS
+#---------
+#ID:270
+#LASTNAME:NICHOLLS
+#---------
+#ID:280
+#LASTNAME:NICHOLLS
+#---------
+#ID:290
+#LASTNAME:NICHOLLS
+#---------
+#ID:300
+#LASTNAME:NICHOLLS
+#---------
+#ID:310
+#LASTNAME:NICHOLLS
+#---------
+#ID:320
+#LASTNAME:NICHOLLS
+#---------
+#ID:330
+#LASTNAME:NICHOLLS
+#---------
+#ID:340
+#LASTNAME:NICHOLLS
+#---------
+#ID:350
+#LASTNAME:NICHOLLS
+#---------
+#ID:10
+#LASTNAME:OCONNELL
+#---------
+#ID:20
+#LASTNAME:OCONNELL
+#---------
+#ID:30
+#LASTNAME:OCONNELL
+#---------
+#ID:40
+#LASTNAME:OCONNELL
+#---------
+#ID:50
+#LASTNAME:OCONNELL
+#---------
+#ID:60
+#LASTNAME:OCONNELL
+#---------
+#ID:70
+#LASTNAME:OCONNELL
+#---------
+#ID:80
+#LASTNAME:OCONNELL
+#---------
+#ID:90
+#LASTNAME:OCONNELL
+#---------
+#ID:100
+#LASTNAME:OCONNELL
+#---------
+#ID:110
+#LASTNAME:OCONNELL
+#---------
+#ID:120
+#LASTNAME:OCONNELL
+#---------
+#ID:130
+#LASTNAME:OCONNELL
+#---------
+#ID:140
+#LASTNAME:OCONNELL
+#---------
+#ID:150
+#LASTNAME:OCONNELL
+#---------
+#ID:160
+#LASTNAME:OCONNELL
+#---------
+#ID:170
+#LASTNAME:OCONNELL
+#---------
+#ID:180
+#LASTNAME:OCONNELL
+#---------
+#ID:190
+#LASTNAME:OCONNELL
+#---------
+#ID:200
+#LASTNAME:OCONNELL
+#---------
+#ID:210
+#LASTNAME:OCONNELL
+#---------
+#ID:220
+#LASTNAME:OCONNELL
+#---------
+#ID:230
+#LASTNAME:OCONNELL
+#---------
+#ID:240
+#LASTNAME:OCONNELL
+#---------
+#ID:250
+#LASTNAME:OCONNELL
+#---------
+#ID:260
+#LASTNAME:OCONNELL
+#---------
+#ID:270
+#LASTNAME:OCONNELL
+#---------
+#ID:280
+#LASTNAME:OCONNELL
+#---------
+#ID:290
+#LASTNAME:OCONNELL
+#---------
+#ID:300
+#LASTNAME:OCONNELL
+#---------
+#ID:310
+#LASTNAME:OCONNELL
+#---------
+#ID:320
+#LASTNAME:OCONNELL
+#---------
+#ID:330
+#LASTNAME:OCONNELL
+#---------
+#ID:340
+#LASTNAME:OCONNELL
+#---------
+#ID:350
+#LASTNAME:OCONNELL
+#---------
+#ID:10
+#LASTNAME:PARKER
+#---------
+#ID:20
+#LASTNAME:PARKER
+#---------
+#ID:30
+#LASTNAME:PARKER
+#---------
+#ID:40
+#LASTNAME:PARKER
+#---------
+#ID:50
+#LASTNAME:PARKER
+#---------
+#ID:60
+#LASTNAME:PARKER
+#---------
+#ID:70
+#LASTNAME:PARKER
+#---------
+#ID:80
+#LASTNAME:PARKER
+#---------
+#ID:90
+#LASTNAME:PARKER
+#---------
+#ID:100
+#LASTNAME:PARKER
+#---------
+#ID:110
+#LASTNAME:PARKER
+#---------
+#ID:120
+#LASTNAME:PARKER
+#---------
+#ID:130
+#LASTNAME:PARKER
+#---------
+#ID:140
+#LASTNAME:PARKER
+#---------
+#ID:150
+#LASTNAME:PARKER
+#---------
+#ID:160
+#LASTNAME:PARKER
+#---------
+#ID:170
+#LASTNAME:PARKER
+#---------
+#ID:180
+#LASTNAME:PARKER
+#---------
+#ID:190
+#LASTNAME:PARKER
+#---------
+#ID:200
+#LASTNAME:PARKER
+#---------
+#ID:210
+#LASTNAME:PARKER
+#---------
+#ID:220
+#LASTNAME:PARKER
+#---------
+#ID:230
+#LASTNAME:PARKER
+#---------
+#ID:240
+#LASTNAME:PARKER
+#---------
+#ID:250
+#LASTNAME:PARKER
+#---------
+#ID:260
+#LASTNAME:PARKER
+#---------
+#ID:270
+#LASTNAME:PARKER
+#---------
+#ID:280
+#LASTNAME:PARKER
+#---------
+#ID:290
+#LASTNAME:PARKER
+#---------
+#ID:300
+#LASTNAME:PARKER
+#---------
+#ID:310
+#LASTNAME:PARKER
+#---------
+#ID:320
+#LASTNAME:PARKER
+#---------
+#ID:330
+#LASTNAME:PARKER
+#---------
+#ID:340
+#LASTNAME:PARKER
+#---------
+#ID:350
+#LASTNAME:PARKER
+#---------
+#ID:10
+#LASTNAME:PEREZ
+#---------
+#ID:20
+#LASTNAME:PEREZ
+#---------
+#ID:30
+#LASTNAME:PEREZ
+#---------
+#ID:40
+#LASTNAME:PEREZ
+#---------
+#ID:50
+#LASTNAME:PEREZ
+#---------
+#ID:60
+#LASTNAME:PEREZ
+#---------
+#ID:70
+#LASTNAME:PEREZ
+#---------
+#ID:80
+#LASTNAME:PEREZ
+#---------
+#ID:90
+#LASTNAME:PEREZ
+#---------
+#ID:100
+#LASTNAME:PEREZ
+#---------
+#ID:110
+#LASTNAME:PEREZ
+#---------
+#ID:120
+#LASTNAME:PEREZ
+#---------
+#ID:130
+#LASTNAME:PEREZ
+#---------
+#ID:140
+#LASTNAME:PEREZ
+#---------
+#ID:150
+#LASTNAME:PEREZ
+#---------
+#ID:160
+#LASTNAME:PEREZ
+#---------
+#ID:170
+#LASTNAME:PEREZ
+#---------
+#ID:180
+#LASTNAME:PEREZ
+#---------
+#ID:190
+#LASTNAME:PEREZ
+#---------
+#ID:200
+#LASTNAME:PEREZ
+#---------
+#ID:210
+#LASTNAME:PEREZ
+#---------
+#ID:220
+#LASTNAME:PEREZ
+#---------
+#ID:230
+#LASTNAME:PEREZ
+#---------
+#ID:240
+#LASTNAME:PEREZ
+#---------
+#ID:250
+#LASTNAME:PEREZ
+#---------
+#ID:260
+#LASTNAME:PEREZ
+#---------
+#ID:270
+#LASTNAME:PEREZ
+#---------
+#ID:280
+#LASTNAME:PEREZ
+#---------
+#ID:290
+#LASTNAME:PEREZ
+#---------
+#ID:300
+#LASTNAME:PEREZ
+#---------
+#ID:310
+#LASTNAME:PEREZ
+#---------
+#ID:320
+#LASTNAME:PEREZ
+#---------
+#ID:330
+#LASTNAME:PEREZ
+#---------
+#ID:340
+#LASTNAME:PEREZ
+#---------
+#ID:350
+#LASTNAME:PEREZ
+#---------
+#ID:10
+#LASTNAME:PIANKA
+#---------
+#ID:20
+#LASTNAME:PIANKA
+#---------
+#ID:30
+#LASTNAME:PIANKA
+#---------
+#ID:40
+#LASTNAME:PIANKA
+#---------
+#ID:50
+#LASTNAME:PIANKA
+#---------
+#ID:60
+#LASTNAME:PIANKA
+#---------
+#ID:70
+#LASTNAME:PIANKA
+#---------
+#ID:80
+#LASTNAME:PIANKA
+#---------
+#ID:90
+#LASTNAME:PIANKA
+#---------
+#ID:100
+#LASTNAME:PIANKA
+#---------
+#ID:110
+#LASTNAME:PIANKA
+#---------
+#ID:120
+#LASTNAME:PIANKA
+#---------
+#ID:130
+#LASTNAME:PIANKA
+#---------
+#ID:140
+#LASTNAME:PIANKA
+#---------
+#ID:150
+#LASTNAME:PIANKA
+#---------
+#ID:160
+#LASTNAME:PIANKA
+#---------
+#ID:170
+#LASTNAME:PIANKA
+#---------
+#ID:180
+#LASTNAME:PIANKA
+#---------
+#ID:190
+#LASTNAME:PIANKA
+#---------
+#ID:200
+#LASTNAME:PIANKA
+#---------
+#ID:210
+#LASTNAME:PIANKA
+#---------
+#ID:220
+#LASTNAME:PIANKA
+#---------
+#ID:230
+#LASTNAME:PIANKA
+#---------
+#ID:240
+#LASTNAME:PIANKA
+#---------
+#ID:250
+#LASTNAME:PIANKA
+#---------
+#ID:260
+#LASTNAME:PIANKA
+#---------
+#ID:270
+#LASTNAME:PIANKA
+#---------
+#ID:280
+#LASTNAME:PIANKA
+#---------
+#ID:290
+#LASTNAME:PIANKA
+#---------
+#ID:300
+#LASTNAME:PIANKA
+#---------
+#ID:310
+#LASTNAME:PIANKA
+#---------
+#ID:320
+#LASTNAME:PIANKA
+#---------
+#ID:330
+#LASTNAME:PIANKA
+#---------
+#ID:340
+#LASTNAME:PIANKA
+#---------
+#ID:350
+#LASTNAME:PIANKA
+#---------
+#ID:10
+#LASTNAME:PULASKI
+#---------
+#ID:20
+#LASTNAME:PULASKI
+#---------
+#ID:30
+#LASTNAME:PULASKI
+#---------
+#ID:40
+#LASTNAME:PULASKI
+#---------
+#ID:50
+#LASTNAME:PULASKI
+#---------
+#ID:60
+#LASTNAME:PULASKI
+#---------
+#ID:70
+#LASTNAME:PULASKI
+#---------
+#ID:80
+#LASTNAME:PULASKI
+#---------
+#ID:90
+#LASTNAME:PULASKI
+#---------
+#ID:100
+#LASTNAME:PULASKI
+#---------
+#ID:110
+#LASTNAME:PULASKI
+#---------
+#ID:120
+#LASTNAME:PULASKI
+#---------
+#ID:130
+#LASTNAME:PULASKI
+#---------
+#ID:140
+#LASTNAME:PULASKI
+#---------
+#ID:150
+#LASTNAME:PULASKI
+#---------
+#ID:160
+#LASTNAME:PULASKI
+#---------
+#ID:170
+#LASTNAME:PULASKI
+#---------
+#ID:180
+#LASTNAME:PULASKI
+#---------
+#ID:190
+#LASTNAME:PULASKI
+#---------
+#ID:200
+#LASTNAME:PULASKI
+#---------
+#ID:210
+#LASTNAME:PULASKI
+#---------
+#ID:220
+#LASTNAME:PULASKI
+#---------
+#ID:230
+#LASTNAME:PULASKI
+#---------
+#ID:240
+#LASTNAME:PULASKI
+#---------
+#ID:250
+#LASTNAME:PULASKI
+#---------
+#ID:260
+#LASTNAME:PULASKI
+#---------
+#ID:270
+#LASTNAME:PULASKI
+#---------
+#ID:280
+#LASTNAME:PULASKI
+#---------
+#ID:290
+#LASTNAME:PULASKI
+#---------
+#ID:300
+#LASTNAME:PULASKI
+#---------
+#ID:310
+#LASTNAME:PULASKI
+#---------
+#ID:320
+#LASTNAME:PULASKI
+#---------
+#ID:330
+#LASTNAME:PULASKI
+#---------
+#ID:340
+#LASTNAME:PULASKI
+#---------
+#ID:350
+#LASTNAME:PULASKI
+#---------
+#ID:10
+#LASTNAME:QUINTANA
+#---------
+#ID:20
+#LASTNAME:QUINTANA
+#---------
+#ID:30
+#LASTNAME:QUINTANA
+#---------
+#ID:40
+#LASTNAME:QUINTANA
+#---------
+#ID:50
+#LASTNAME:QUINTANA
+#---------
+#ID:60
+#LASTNAME:QUINTANA
+#---------
+#ID:70
+#LASTNAME:QUINTANA
+#---------
+#ID:80
+#LASTNAME:QUINTANA
+#---------
+#ID:90
+#LASTNAME:QUINTANA
+#---------
+#ID:100
+#LASTNAME:QUINTANA
+#---------
+#ID:110
+#LASTNAME:QUINTANA
+#---------
+#ID:120
+#LASTNAME:QUINTANA
+#---------
+#ID:130
+#LASTNAME:QUINTANA
+#---------
+#ID:140
+#LASTNAME:QUINTANA
+#---------
+#ID:150
+#LASTNAME:QUINTANA
+#---------
+#ID:160
+#LASTNAME:QUINTANA
+#---------
+#ID:170
+#LASTNAME:QUINTANA
+#---------
+#ID:180
+#LASTNAME:QUINTANA
+#---------
+#ID:190
+#LASTNAME:QUINTANA
+#---------
+#ID:200
+#LASTNAME:QUINTANA
+#---------
+#ID:210
+#LASTNAME:QUINTANA
+#---------
+#ID:220
+#LASTNAME:QUINTANA
+#---------
+#ID:230
+#LASTNAME:QUINTANA
+#---------
+#ID:240
+#LASTNAME:QUINTANA
+#---------
+#ID:250
+#LASTNAME:QUINTANA
+#---------
+#ID:260
+#LASTNAME:QUINTANA
+#---------
+#ID:270
+#LASTNAME:QUINTANA
+#---------
+#ID:280
+#LASTNAME:QUINTANA
+#---------
+#ID:290
+#LASTNAME:QUINTANA
+#---------
+#ID:300
+#LASTNAME:QUINTANA
+#---------
+#ID:310
+#LASTNAME:QUINTANA
+#---------
+#ID:320
+#LASTNAME:QUINTANA
+#---------
+#ID:330
+#LASTNAME:QUINTANA
+#---------
+#ID:340
+#LASTNAME:QUINTANA
+#---------
+#ID:350
+#LASTNAME:QUINTANA
+#---------
+#ID:10
+#LASTNAME:SCHNEIDER
+#---------
+#ID:20
+#LASTNAME:SCHNEIDER
+#---------
+#ID:30
+#LASTNAME:SCHNEIDER
+#---------
+#ID:40
+#LASTNAME:SCHNEIDER
+#---------
+#ID:50
+#LASTNAME:SCHNEIDER
+#---------
+#ID:60
+#LASTNAME:SCHNEIDER
+#---------
+#ID:70
+#LASTNAME:SCHNEIDER
+#---------
+#ID:80
+#LASTNAME:SCHNEIDER
+#---------
+#ID:90
+#LASTNAME:SCHNEIDER
+#---------
+#ID:100
+#LASTNAME:SCHNEIDER
+#---------
+#ID:110
+#LASTNAME:SCHNEIDER
+#---------
+#ID:120
+#LASTNAME:SCHNEIDER
+#---------
+#ID:130
+#LASTNAME:SCHNEIDER
+#---------
+#ID:140
+#LASTNAME:SCHNEIDER
+#---------
+#ID:150
+#LASTNAME:SCHNEIDER
+#---------
+#ID:160
+#LASTNAME:SCHNEIDER
+#---------
+#ID:170
+#LASTNAME:SCHNEIDER
+#---------
+#ID:180
+#LASTNAME:SCHNEIDER
+#---------
+#ID:190
+#LASTNAME:SCHNEIDER
+#---------
+#ID:200
+#LASTNAME:SCHNEIDER
+#---------
+#ID:210
+#LASTNAME:SCHNEIDER
+#---------
+#ID:220
+#LASTNAME:SCHNEIDER
+#---------
+#ID:230
+#LASTNAME:SCHNEIDER
+#---------
+#ID:240
+#LASTNAME:SCHNEIDER
+#---------
+#ID:250
+#LASTNAME:SCHNEIDER
+#---------
+#ID:260
+#LASTNAME:SCHNEIDER
+#---------
+#ID:270
+#LASTNAME:SCHNEIDER
+#---------
+#ID:280
+#LASTNAME:SCHNEIDER
+#---------
+#ID:290
+#LASTNAME:SCHNEIDER
+#---------
+#ID:300
+#LASTNAME:SCHNEIDER
+#---------
+#ID:310
+#LASTNAME:SCHNEIDER
+#---------
+#ID:320
+#LASTNAME:SCHNEIDER
+#---------
+#ID:330
+#LASTNAME:SCHNEIDER
+#---------
+#ID:340
+#LASTNAME:SCHNEIDER
+#---------
+#ID:350
+#LASTNAME:SCHNEIDER
+#---------
+#ID:10
+#LASTNAME:SCOUTTEN
+#---------
+#ID:20
+#LASTNAME:SCOUTTEN
+#---------
+#ID:30
+#LASTNAME:SCOUTTEN
+#---------
+#ID:40
+#LASTNAME:SCOUTTEN
+#---------
+#ID:50
+#LASTNAME:SCOUTTEN
+#---------
+#ID:60
+#LASTNAME:SCOUTTEN
+#---------
+#ID:70
+#LASTNAME:SCOUTTEN
+#---------
+#ID:80
+#LASTNAME:SCOUTTEN
+#---------
+#ID:90
+#LASTNAME:SCOUTTEN
+#---------
+#ID:100
+#LASTNAME:SCOUTTEN
+#---------
+#ID:110
+#LASTNAME:SCOUTTEN
+#---------
+#ID:120
+#LASTNAME:SCOUTTEN
+#---------
+#ID:130
+#LASTNAME:SCOUTTEN
+#---------
+#ID:140
+#LASTNAME:SCOUTTEN
+#---------
+#ID:150
+#LASTNAME:SCOUTTEN
+#---------
+#ID:160
+#LASTNAME:SCOUTTEN
+#---------
+#ID:170
+#LASTNAME:SCOUTTEN
+#---------
+#ID:180
+#LASTNAME:SCOUTTEN
+#---------
+#ID:190
+#LASTNAME:SCOUTTEN
+#---------
+#ID:200
+#LASTNAME:SCOUTTEN
+#---------
+#ID:210
+#LASTNAME:SCOUTTEN
+#---------
+#ID:220
+#LASTNAME:SCOUTTEN
+#---------
+#ID:230
+#LASTNAME:SCOUTTEN
+#---------
+#ID:240
+#LASTNAME:SCOUTTEN
+#---------
+#ID:250
+#LASTNAME:SCOUTTEN
+#---------
+#ID:260
+#LASTNAME:SCOUTTEN
+#---------
+#ID:270
+#LASTNAME:SCOUTTEN
+#---------
+#ID:280
+#LASTNAME:SCOUTTEN
+#---------
+#ID:290
+#LASTNAME:SCOUTTEN
+#---------
+#ID:300
+#LASTNAME:SCOUTTEN
+#---------
+#ID:310
+#LASTNAME:SCOUTTEN
+#---------
+#ID:320
+#LASTNAME:SCOUTTEN
+#---------
+#ID:330
+#LASTNAME:SCOUTTEN
+#---------
+#ID:340
+#LASTNAME:SCOUTTEN
+#---------
+#ID:350
+#LASTNAME:SCOUTTEN
+#---------
+#ID:10
+#LASTNAME:SETRIGHT
+#---------
+#ID:20
+#LASTNAME:SETRIGHT
+#---------
+#ID:30
+#LASTNAME:SETRIGHT
+#---------
+#ID:40
+#LASTNAME:SETRIGHT
+#---------
+#ID:50
+#LASTNAME:SETRIGHT
+#---------
+#ID:60
+#LASTNAME:SETRIGHT
+#---------
+#ID:70
+#LASTNAME:SETRIGHT
+#---------
+#ID:80
+#LASTNAME:SETRIGHT
+#---------
+#ID:90
+#LASTNAME:SETRIGHT
+#---------
+#ID:100
+#LASTNAME:SETRIGHT
+#---------
+#ID:110
+#LASTNAME:SETRIGHT
+#---------
+#ID:120
+#LASTNAME:SETRIGHT
+#---------
+#ID:130
+#LASTNAME:SETRIGHT
+#---------
+#ID:140
+#LASTNAME:SETRIGHT
+#---------
+#ID:150
+#LASTNAME:SETRIGHT
+#---------
+#ID:160
+#LASTNAME:SETRIGHT
+#---------
+#ID:170
+#LASTNAME:SETRIGHT
+#---------
+#ID:180
+#LASTNAME:SETRIGHT
+#---------
+#ID:190
+#LASTNAME:SETRIGHT
+#---------
+#ID:200
+#LASTNAME:SETRIGHT
+#---------
+#ID:210
+#LASTNAME:SETRIGHT
+#---------
+#ID:220
+#LASTNAME:SETRIGHT
+#---------
+#ID:230
+#LASTNAME:SETRIGHT
+#---------
+#ID:240
+#LASTNAME:SETRIGHT
+#---------
+#ID:250
+#LASTNAME:SETRIGHT
+#---------
+#ID:260
+#LASTNAME:SETRIGHT
+#---------
+#ID:270
+#LASTNAME:SETRIGHT
+#---------
+#ID:280
+#LASTNAME:SETRIGHT
+#---------
+#ID:290
+#LASTNAME:SETRIGHT
+#---------
+#ID:300
+#LASTNAME:SETRIGHT
+#---------
+#ID:310
+#LASTNAME:SETRIGHT
+#---------
+#ID:320
+#LASTNAME:SETRIGHT
+#---------
+#ID:330
+#LASTNAME:SETRIGHT
+#---------
+#ID:340
+#LASTNAME:SETRIGHT
+#---------
+#ID:350
+#LASTNAME:SETRIGHT
+#---------
+#ID:10
+#LASTNAME:SMITH
+#---------
+#ID:10
+#LASTNAME:SMITH
+#---------
+#ID:20
+#LASTNAME:SMITH
+#---------
+#ID:20
+#LASTNAME:SMITH
+#---------
+#ID:30
+#LASTNAME:SMITH
+#---------
+#ID:30
+#LASTNAME:SMITH
+#---------
+#ID:40
+#LASTNAME:SMITH
+#---------
+#ID:40
+#LASTNAME:SMITH
+#---------
+#ID:50
+#LASTNAME:SMITH
+#---------
+#ID:50
+#LASTNAME:SMITH
+#---------
+#ID:60
+#LASTNAME:SMITH
+#---------
+#ID:60
+#LASTNAME:SMITH
+#---------
+#ID:70
+#LASTNAME:SMITH
+#---------
+#ID:70
+#LASTNAME:SMITH
+#---------
+#ID:80
+#LASTNAME:SMITH
+#---------
+#ID:80
+#LASTNAME:SMITH
+#---------
+#ID:90
+#LASTNAME:SMITH
+#---------
+#ID:90
+#LASTNAME:SMITH
+#---------
+#ID:100
+#LASTNAME:SMITH
+#---------
+#ID:100
+#LASTNAME:SMITH
+#---------
+#ID:110
+#LASTNAME:SMITH
+#---------
+#ID:110
+#LASTNAME:SMITH
+#---------
+#ID:120
+#LASTNAME:SMITH
+#---------
+#ID:120
+#LASTNAME:SMITH
+#---------
+#ID:130
+#LASTNAME:SMITH
+#---------
+#ID:130
+#LASTNAME:SMITH
+#---------
+#ID:140
+#LASTNAME:SMITH
+#---------
+#ID:140
+#LASTNAME:SMITH
+#---------
+#ID:150
+#LASTNAME:SMITH
+#---------
+#ID:150
+#LASTNAME:SMITH
+#---------
+#ID:160
+#LASTNAME:SMITH
+#---------
+#ID:160
+#LASTNAME:SMITH
+#---------
+#ID:170
+#LASTNAME:SMITH
+#---------
+#ID:170
+#LASTNAME:SMITH
+#---------
+#ID:180
+#LASTNAME:SMITH
+#---------
+#ID:180
+#LASTNAME:SMITH
+#---------
+#ID:190
+#LASTNAME:SMITH
+#---------
+#ID:190
+#LASTNAME:SMITH
+#---------
+#ID:200
+#LASTNAME:SMITH
+#---------
+#ID:200
+#LASTNAME:SMITH
+#---------
+#ID:210
+#LASTNAME:SMITH
+#---------
+#ID:210
+#LASTNAME:SMITH
+#---------
+#ID:220
+#LASTNAME:SMITH
+#---------
+#ID:220
+#LASTNAME:SMITH
+#---------
+#ID:230
+#LASTNAME:SMITH
+#---------
+#ID:230
+#LASTNAME:SMITH
+#---------
+#ID:240
+#LASTNAME:SMITH
+#---------
+#ID:240
+#LASTNAME:SMITH
+#---------
+#ID:250
+#LASTNAME:SMITH
+#---------
+#ID:250
+#LASTNAME:SMITH
+#---------
+#ID:260
+#LASTNAME:SMITH
+#---------
+#ID:260
+#LASTNAME:SMITH
+#---------
+#ID:270
+#LASTNAME:SMITH
+#---------
+#ID:270
+#LASTNAME:SMITH
+#---------
+#ID:280
+#LASTNAME:SMITH
+#---------
+#ID:280
+#LASTNAME:SMITH
+#---------
+#ID:290
+#LASTNAME:SMITH
+#---------
+#ID:290
+#LASTNAME:SMITH
+#---------
+#ID:300
+#LASTNAME:SMITH
+#---------
+#ID:300
+#LASTNAME:SMITH
+#---------
+#ID:310
+#LASTNAME:SMITH
+#---------
+#ID:310
+#LASTNAME:SMITH
+#---------
+#ID:320
+#LASTNAME:SMITH
+#---------
+#ID:320
+#LASTNAME:SMITH
+#---------
+#ID:330
+#LASTNAME:SMITH
+#---------
+#ID:330
+#LASTNAME:SMITH
+#---------
+#ID:340
+#LASTNAME:SMITH
+#---------
+#ID:340
+#LASTNAME:SMITH
+#---------
+#ID:350
+#LASTNAME:SMITH
+#---------
+#ID:350
+#LASTNAME:SMITH
+#---------
+#ID:10
+#LASTNAME:SPENSER
+#---------
+#ID:20
+#LASTNAME:SPENSER
+#---------
+#ID:30
+#LASTNAME:SPENSER
+#---------
+#ID:40
+#LASTNAME:SPENSER
+#---------
+#ID:50
+#LASTNAME:SPENSER
+#---------
+#ID:60
+#LASTNAME:SPENSER
+#---------
+#ID:70
+#LASTNAME:SPENSER
+#---------
+#ID:80
+#LASTNAME:SPENSER
+#---------
+#ID:90
+#LASTNAME:SPENSER
+#---------
+#ID:100
+#LASTNAME:SPENSER
+#---------
+#ID:110
+#LASTNAME:SPENSER
+#---------
+#ID:120
+#LASTNAME:SPENSER
+#---------
+#ID:130
+#LASTNAME:SPENSER
+#---------
+#ID:140
+#LASTNAME:SPENSER
+#---------
+#ID:150
+#LASTNAME:SPENSER
+#---------
+#ID:160
+#LASTNAME:SPENSER
+#---------
+#ID:170
+#LASTNAME:SPENSER
+#---------
+#ID:180
+#LASTNAME:SPENSER
+#---------
+#ID:190
+#LASTNAME:SPENSER
+#---------
+#ID:200
+#LASTNAME:SPENSER
+#---------
+#ID:210
+#LASTNAME:SPENSER
+#---------
+#ID:220
+#LASTNAME:SPENSER
+#---------
+#ID:230
+#LASTNAME:SPENSER
+#---------
+#ID:240
+#LASTNAME:SPENSER
+#---------
+#ID:250
+#LASTNAME:SPENSER
+#---------
+#ID:260
+#LASTNAME:SPENSER
+#---------
+#ID:270
+#LASTNAME:SPENSER
+#---------
+#ID:280
+#LASTNAME:SPENSER
+#---------
+#ID:290
+#LASTNAME:SPENSER
+#---------
+#ID:300
+#LASTNAME:SPENSER
+#---------
+#ID:310
+#LASTNAME:SPENSER
+#---------
+#ID:320
+#LASTNAME:SPENSER
+#---------
+#ID:330
+#LASTNAME:SPENSER
+#---------
+#ID:340
+#LASTNAME:SPENSER
+#---------
+#ID:350
+#LASTNAME:SPENSER
+#---------
+#ID:10
+#LASTNAME:STERN
+#---------
+#ID:20
+#LASTNAME:STERN
+#---------
+#ID:30
+#LASTNAME:STERN
+#---------
+#ID:40
+#LASTNAME:STERN
+#---------
+#ID:50
+#LASTNAME:STERN
+#---------
+#ID:60
+#LASTNAME:STERN
+#---------
+#ID:70
+#LASTNAME:STERN
+#---------
+#ID:80
+#LASTNAME:STERN
+#---------
+#ID:90
+#LASTNAME:STERN
+#---------
+#ID:100
+#LASTNAME:STERN
+#---------
+#ID:110
+#LASTNAME:STERN
+#---------
+#ID:120
+#LASTNAME:STERN
+#---------
+#ID:130
+#LASTNAME:STERN
+#---------
+#ID:140
+#LASTNAME:STERN
+#---------
+#ID:150
+#LASTNAME:STERN
+#---------
+#ID:160
+#LASTNAME:STERN
+#---------
+#ID:170
+#LASTNAME:STERN
+#---------
+#ID:180
+#LASTNAME:STERN
+#---------
+#ID:190
+#LASTNAME:STERN
+#---------
+#ID:200
+#LASTNAME:STERN
+#---------
+#ID:210
+#LASTNAME:STERN
+#---------
+#ID:220
+#LASTNAME:STERN
+#---------
+#ID:230
+#LASTNAME:STERN
+#---------
+#ID:240
+#LASTNAME:STERN
+#---------
+#ID:250
+#LASTNAME:STERN
+#---------
+#ID:260
+#LASTNAME:STERN
+#---------
+#ID:270
+#LASTNAME:STERN
+#---------
+#ID:280
+#LASTNAME:STERN
+#---------
+#ID:290
+#LASTNAME:STERN
+#---------
+#ID:300
+#LASTNAME:STERN
+#---------
+#ID:310
+#LASTNAME:STERN
+#---------
+#ID:320
+#LASTNAME:STERN
+#---------
+#ID:330
+#LASTNAME:STERN
+#---------
+#ID:340
+#LASTNAME:STERN
+#---------
+#ID:350
+#LASTNAME:STERN
+#---------
+#ID:10
+#LASTNAME:THOMPSON
+#---------
+#ID:20
+#LASTNAME:THOMPSON
+#---------
+#ID:30
+#LASTNAME:THOMPSON
+#---------
+#ID:40
+#LASTNAME:THOMPSON
+#---------
+#ID:50
+#LASTNAME:THOMPSON
+#---------
+#ID:60
+#LASTNAME:THOMPSON
+#---------
+#ID:70
+#LASTNAME:THOMPSON
+#---------
+#ID:80
+#LASTNAME:THOMPSON
+#---------
+#ID:90
+#LASTNAME:THOMPSON
+#---------
+#ID:100
+#LASTNAME:THOMPSON
+#---------
+#ID:110
+#LASTNAME:THOMPSON
+#---------
+#ID:120
+#LASTNAME:THOMPSON
+#---------
+#ID:130
+#LASTNAME:THOMPSON
+#---------
+#ID:140
+#LASTNAME:THOMPSON
+#---------
+#ID:150
+#LASTNAME:THOMPSON
+#---------
+#ID:160
+#LASTNAME:THOMPSON
+#---------
+#ID:170
+#LASTNAME:THOMPSON
+#---------
+#ID:180
+#LASTNAME:THOMPSON
+#---------
+#ID:190
+#LASTNAME:THOMPSON
+#---------
+#ID:200
+#LASTNAME:THOMPSON
+#---------
+#ID:210
+#LASTNAME:THOMPSON
+#---------
+#ID:220
+#LASTNAME:THOMPSON
+#---------
+#ID:230
+#LASTNAME:THOMPSON
+#---------
+#ID:240
+#LASTNAME:THOMPSON
+#---------
+#ID:250
+#LASTNAME:THOMPSON
+#---------
+#ID:260
+#LASTNAME:THOMPSON
+#---------
+#ID:270
+#LASTNAME:THOMPSON
+#---------
+#ID:280
+#LASTNAME:THOMPSON
+#---------
+#ID:290
+#LASTNAME:THOMPSON
+#---------
+#ID:300
+#LASTNAME:THOMPSON
+#---------
+#ID:310
+#LASTNAME:THOMPSON
+#---------
+#ID:320
+#LASTNAME:THOMPSON
+#---------
+#ID:330
+#LASTNAME:THOMPSON
+#---------
+#ID:340
+#LASTNAME:THOMPSON
+#---------
+#ID:350
+#LASTNAME:THOMPSON
+#---------
+#ID:10
+#LASTNAME:WALKER
+#---------
+#ID:20
+#LASTNAME:WALKER
+#---------
+#ID:30
+#LASTNAME:WALKER
+#---------
+#ID:40
+#LASTNAME:WALKER
+#---------
+#ID:50
+#LASTNAME:WALKER
+#---------
+#ID:60
+#LASTNAME:WALKER
+#---------
+#ID:70
+#LASTNAME:WALKER
+#---------
+#ID:80
+#LASTNAME:WALKER
+#---------
+#ID:90
+#LASTNAME:WALKER
+#---------
+#ID:100
+#LASTNAME:WALKER
+#---------
+#ID:110
+#LASTNAME:WALKER
+#---------
+#ID:120
+#LASTNAME:WALKER
+#---------
+#ID:130
+#LASTNAME:WALKER
+#---------
+#ID:140
+#LASTNAME:WALKER
+#---------
+#ID:150
+#LASTNAME:WALKER
+#---------
+#ID:160
+#LASTNAME:WALKER
+#---------
+#ID:170
+#LASTNAME:WALKER
+#---------
+#ID:180
+#LASTNAME:WALKER
+#---------
+#ID:190
+#LASTNAME:WALKER
+#---------
+#ID:200
+#LASTNAME:WALKER
+#---------
+#ID:210
+#LASTNAME:WALKER
+#---------
+#ID:220
+#LASTNAME:WALKER
+#---------
+#ID:230
+#LASTNAME:WALKER
+#---------
+#ID:240
+#LASTNAME:WALKER
+#---------
+#ID:250
+#LASTNAME:WALKER
+#---------
+#ID:260
+#LASTNAME:WALKER
+#---------
+#ID:270
+#LASTNAME:WALKER
+#---------
+#ID:280
+#LASTNAME:WALKER
+#---------
+#ID:290
+#LASTNAME:WALKER
+#---------
+#ID:300
+#LASTNAME:WALKER
+#---------
+#ID:310
+#LASTNAME:WALKER
+#---------
+#ID:320
+#LASTNAME:WALKER
+#---------
+#ID:330
+#LASTNAME:WALKER
+#---------
+#ID:340
+#LASTNAME:WALKER
+#---------
+#ID:350
+#LASTNAME:WALKER
+#---------
+#ID:10
+#LASTNAME:YOSHIMURA
+#---------
+#ID:20
+#LASTNAME:YOSHIMURA
+#---------
+#ID:30
+#LASTNAME:YOSHIMURA
+#---------
+#ID:40
+#LASTNAME:YOSHIMURA
+#---------
+#ID:50
+#LASTNAME:YOSHIMURA
+#---------
+#ID:60
+#LASTNAME:YOSHIMURA
+#---------
+#ID:70
+#LASTNAME:YOSHIMURA
+#---------
+#ID:80
+#LASTNAME:YOSHIMURA
+#---------
+#ID:90
+#LASTNAME:YOSHIMURA
+#---------
+#ID:100
+#LASTNAME:YOSHIMURA
+#---------
+#ID:110
+#LASTNAME:YOSHIMURA
+#---------
+#ID:120
+#LASTNAME:YOSHIMURA
+#---------
+#ID:130
+#LASTNAME:YOSHIMURA
+#---------
+#ID:140
+#LASTNAME:YOSHIMURA
+#---------
+#ID:150
+#LASTNAME:YOSHIMURA
+#---------
+#ID:160
+#LASTNAME:YOSHIMURA
+#---------
+#ID:170
+#LASTNAME:YOSHIMURA
+#---------
+#ID:180
+#LASTNAME:YOSHIMURA
+#---------
+#ID:190
+#LASTNAME:YOSHIMURA
+#---------
+#ID:200
+#LASTNAME:YOSHIMURA
+#---------
+#ID:210
+#LASTNAME:YOSHIMURA
+#---------
+#ID:220
+#LASTNAME:YOSHIMURA
+#---------
+#ID:230
+#LASTNAME:YOSHIMURA
+#---------
+#ID:240
+#LASTNAME:YOSHIMURA
+#---------
+#ID:250
+#LASTNAME:YOSHIMURA
+#---------
+#ID:260
+#LASTNAME:YOSHIMURA
+#---------
+#ID:270
+#LASTNAME:YOSHIMURA
+#---------
+#ID:280
+#LASTNAME:YOSHIMURA
+#---------
+#ID:290
+#LASTNAME:YOSHIMURA
+#---------
+#ID:300
+#LASTNAME:YOSHIMURA
+#---------
+#ID:310
+#LASTNAME:YOSHIMURA
+#---------
+#ID:320
+#LASTNAME:YOSHIMURA
+#---------
+#ID:330
+#LASTNAME:YOSHIMURA
+#---------
+#ID:340
+#LASTNAME:YOSHIMURA
+#---------
+#ID:350
+#LASTNAME:YOSHIMURA
+#---------
+#__ZOS_EXPECTED__
+#
+#ID:10
+#LASTNAME:ADAMSON
+#---------
+#ID:20
+#LASTNAME:ADAMSON
+#---------
+#ID:30
+#LASTNAME:ADAMSON
+#---------
+#ID:40
+#LASTNAME:ADAMSON
+#---------
+#ID:50
+#LASTNAME:ADAMSON
+#---------
+#ID:60
+#LASTNAME:ADAMSON
+#---------
+#ID:70
+#LASTNAME:ADAMSON
+#---------
+#ID:80
+#LASTNAME:ADAMSON
+#---------
+#ID:90
+#LASTNAME:ADAMSON
+#---------
+#ID:100
+#LASTNAME:ADAMSON
+#---------
+#ID:110
+#LASTNAME:ADAMSON
+#---------
+#ID:120
+#LASTNAME:ADAMSON
+#---------
+#ID:130
+#LASTNAME:ADAMSON
+#---------
+#ID:140
+#LASTNAME:ADAMSON
+#---------
+#ID:150
+#LASTNAME:ADAMSON
+#---------
+#ID:160
+#LASTNAME:ADAMSON
+#---------
+#ID:170
+#LASTNAME:ADAMSON
+#---------
+#ID:180
+#LASTNAME:ADAMSON
+#---------
+#ID:190
+#LASTNAME:ADAMSON
+#---------
+#ID:200
+#LASTNAME:ADAMSON
+#---------
+#ID:210
+#LASTNAME:ADAMSON
+#---------
+#ID:220
+#LASTNAME:ADAMSON
+#---------
+#ID:230
+#LASTNAME:ADAMSON
+#---------
+#ID:240
+#LASTNAME:ADAMSON
+#---------
+#ID:250
+#LASTNAME:ADAMSON
+#---------
+#ID:260
+#LASTNAME:ADAMSON
+#---------
+#ID:270
+#LASTNAME:ADAMSON
+#---------
+#ID:280
+#LASTNAME:ADAMSON
+#---------
+#ID:290
+#LASTNAME:ADAMSON
+#---------
+#ID:300
+#LASTNAME:ADAMSON
+#---------
+#ID:310
+#LASTNAME:ADAMSON
+#---------
+#ID:320
+#LASTNAME:ADAMSON
+#---------
+#ID:330
+#LASTNAME:ADAMSON
+#---------
+#ID:340
+#LASTNAME:ADAMSON
+#---------
+#ID:350
+#LASTNAME:ADAMSON
+#---------
+#ID:10
+#LASTNAME:BROWN
+#---------
+#ID:20
+#LASTNAME:BROWN
+#---------
+#ID:30
+#LASTNAME:BROWN
+#---------
+#ID:40
+#LASTNAME:BROWN
+#---------
+#ID:50
+#LASTNAME:BROWN
+#---------
+#ID:60
+#LASTNAME:BROWN
+#---------
+#ID:70
+#LASTNAME:BROWN
+#---------
+#ID:80
+#LASTNAME:BROWN
+#---------
+#ID:90
+#LASTNAME:BROWN
+#---------
+#ID:100
+#LASTNAME:BROWN
+#---------
+#ID:110
+#LASTNAME:BROWN
+#---------
+#ID:120
+#LASTNAME:BROWN
+#---------
+#ID:130
+#LASTNAME:BROWN
+#---------
+#ID:140
+#LASTNAME:BROWN
+#---------
+#ID:150
+#LASTNAME:BROWN
+#---------
+#ID:160
+#LASTNAME:BROWN
+#---------
+#ID:170
+#LASTNAME:BROWN
+#---------
+#ID:180
+#LASTNAME:BROWN
+#---------
+#ID:190
+#LASTNAME:BROWN
+#---------
+#ID:200
+#LASTNAME:BROWN
+#---------
+#ID:210
+#LASTNAME:BROWN
+#---------
+#ID:220
+#LASTNAME:BROWN
+#---------
+#ID:230
+#LASTNAME:BROWN
+#---------
+#ID:240
+#LASTNAME:BROWN
+#---------
+#ID:250
+#LASTNAME:BROWN
+#---------
+#ID:260
+#LASTNAME:BROWN
+#---------
+#ID:270
+#LASTNAME:BROWN
+#---------
+#ID:280
+#LASTNAME:BROWN
+#---------
+#ID:290
+#LASTNAME:BROWN
+#---------
+#ID:300
+#LASTNAME:BROWN
+#---------
+#ID:310
+#LASTNAME:BROWN
+#---------
+#ID:320
+#LASTNAME:BROWN
+#---------
+#ID:330
+#LASTNAME:BROWN
+#---------
+#ID:340
+#LASTNAME:BROWN
+#---------
+#ID:350
+#LASTNAME:BROWN
+#---------
+#ID:10
+#LASTNAME:GEYER
+#---------
+#ID:20
+#LASTNAME:GEYER
+#---------
+#ID:30
+#LASTNAME:GEYER
+#---------
+#ID:40
+#LASTNAME:GEYER
+#---------
+#ID:50
+#LASTNAME:GEYER
+#---------
+#ID:60
+#LASTNAME:GEYER
+#---------
+#ID:70
+#LASTNAME:GEYER
+#---------
+#ID:80
+#LASTNAME:GEYER
+#---------
+#ID:90
+#LASTNAME:GEYER
+#---------
+#ID:100
+#LASTNAME:GEYER
+#---------
+#ID:110
+#LASTNAME:GEYER
+#---------
+#ID:120
+#LASTNAME:GEYER
+#---------
+#ID:130
+#LASTNAME:GEYER
+#---------
+#ID:140
+#LASTNAME:GEYER
+#---------
+#ID:150
+#LASTNAME:GEYER
+#---------
+#ID:160
+#LASTNAME:GEYER
+#---------
+#ID:170
+#LASTNAME:GEYER
+#---------
+#ID:180
+#LASTNAME:GEYER
+#---------
+#ID:190
+#LASTNAME:GEYER
+#---------
+#ID:200
+#LASTNAME:GEYER
+#---------
+#ID:210
+#LASTNAME:GEYER
+#---------
+#ID:220
+#LASTNAME:GEYER
+#---------
+#ID:230
+#LASTNAME:GEYER
+#---------
+#ID:240
+#LASTNAME:GEYER
+#---------
+#ID:250
+#LASTNAME:GEYER
+#---------
+#ID:260
+#LASTNAME:GEYER
+#---------
+#ID:270
+#LASTNAME:GEYER
+#---------
+#ID:280
+#LASTNAME:GEYER
+#---------
+#ID:290
+#LASTNAME:GEYER
+#---------
+#ID:300
+#LASTNAME:GEYER
+#---------
+#ID:310
+#LASTNAME:GEYER
+#---------
+#ID:320
+#LASTNAME:GEYER
+#---------
+#ID:330
+#LASTNAME:GEYER
+#---------
+#ID:340
+#LASTNAME:GEYER
+#---------
+#ID:350
+#LASTNAME:GEYER
+#---------
+#ID:10
+#LASTNAME:GOUNOT
+#---------
+#ID:20
+#LASTNAME:GOUNOT
+#---------
+#ID:30
+#LASTNAME:GOUNOT
+#---------
+#ID:40
+#LASTNAME:GOUNOT
+#---------
+#ID:50
+#LASTNAME:GOUNOT
+#---------
+#ID:60
+#LASTNAME:GOUNOT
+#---------
+#ID:70
+#LASTNAME:GOUNOT
+#---------
+#ID:80
+#LASTNAME:GOUNOT
+#---------
+#ID:90
+#LASTNAME:GOUNOT
+#---------
+#ID:100
+#LASTNAME:GOUNOT
+#---------
+#ID:110
+#LASTNAME:GOUNOT
+#---------
+#ID:120
+#LASTNAME:GOUNOT
+#---------
+#ID:130
+#LASTNAME:GOUNOT
+#---------
+#ID:140
+#LASTNAME:GOUNOT
+#---------
+#ID:150
+#LASTNAME:GOUNOT
+#---------
+#ID:160
+#LASTNAME:GOUNOT
+#---------
+#ID:170
+#LASTNAME:GOUNOT
+#---------
+#ID:180
+#LASTNAME:GOUNOT
+#---------
+#ID:190
+#LASTNAME:GOUNOT
+#---------
+#ID:200
+#LASTNAME:GOUNOT
+#---------
+#ID:210
+#LASTNAME:GOUNOT
+#---------
+#ID:220
+#LASTNAME:GOUNOT
+#---------
+#ID:230
+#LASTNAME:GOUNOT
+#---------
+#ID:240
+#LASTNAME:GOUNOT
+#---------
+#ID:250
+#LASTNAME:GOUNOT
+#---------
+#ID:260
+#LASTNAME:GOUNOT
+#---------
+#ID:270
+#LASTNAME:GOUNOT
+#---------
+#ID:280
+#LASTNAME:GOUNOT
+#---------
+#ID:290
+#LASTNAME:GOUNOT
+#---------
+#ID:300
+#LASTNAME:GOUNOT
+#---------
+#ID:310
+#LASTNAME:GOUNOT
+#---------
+#ID:320
+#LASTNAME:GOUNOT
+#---------
+#ID:330
+#LASTNAME:GOUNOT
+#---------
+#ID:340
+#LASTNAME:GOUNOT
+#---------
+#ID:350
+#LASTNAME:GOUNOT
+#---------
+#ID:10
+#LASTNAME:HAAS
+#---------
+#ID:20
+#LASTNAME:HAAS
+#---------
+#ID:30
+#LASTNAME:HAAS
+#---------
+#ID:40
+#LASTNAME:HAAS
+#---------
+#ID:50
+#LASTNAME:HAAS
+#---------
+#ID:60
+#LASTNAME:HAAS
+#---------
+#ID:70
+#LASTNAME:HAAS
+#---------
+#ID:80
+#LASTNAME:HAAS
+#---------
+#ID:90
+#LASTNAME:HAAS
+#---------
+#ID:100
+#LASTNAME:HAAS
+#---------
+#ID:110
+#LASTNAME:HAAS
+#---------
+#ID:120
+#LASTNAME:HAAS
+#---------
+#ID:130
+#LASTNAME:HAAS
+#---------
+#ID:140
+#LASTNAME:HAAS
+#---------
+#ID:150
+#LASTNAME:HAAS
+#---------
+#ID:160
+#LASTNAME:HAAS
+#---------
+#ID:170
+#LASTNAME:HAAS
+#---------
+#ID:180
+#LASTNAME:HAAS
+#---------
+#ID:190
+#LASTNAME:HAAS
+#---------
+#ID:200
+#LASTNAME:HAAS
+#---------
+#ID:210
+#LASTNAME:HAAS
+#---------
+#ID:220
+#LASTNAME:HAAS
+#---------
+#ID:230
+#LASTNAME:HAAS
+#---------
+#ID:240
+#LASTNAME:HAAS
+#---------
+#ID:250
+#LASTNAME:HAAS
+#---------
+#ID:260
+#LASTNAME:HAAS
+#---------
+#ID:270
+#LASTNAME:HAAS
+#---------
+#ID:280
+#LASTNAME:HAAS
+#---------
+#ID:290
+#LASTNAME:HAAS
+#---------
+#ID:300
+#LASTNAME:HAAS
+#---------
+#ID:310
+#LASTNAME:HAAS
+#---------
+#ID:320
+#LASTNAME:HAAS
+#---------
+#ID:330
+#LASTNAME:HAAS
+#---------
+#ID:340
+#LASTNAME:HAAS
+#---------
+#ID:350
+#LASTNAME:HAAS
+#---------
+#ID:10
+#LASTNAME:HENDERSON
+#---------
+#ID:20
+#LASTNAME:HENDERSON
+#---------
+#ID:30
+#LASTNAME:HENDERSON
+#---------
+#ID:40
+#LASTNAME:HENDERSON
+#---------
+#ID:50
+#LASTNAME:HENDERSON
+#---------
+#ID:60
+#LASTNAME:HENDERSON
+#---------
+#ID:70
+#LASTNAME:HENDERSON
+#---------
+#ID:80
+#LASTNAME:HENDERSON
+#---------
+#ID:90
+#LASTNAME:HENDERSON
+#---------
+#ID:100
+#LASTNAME:HENDERSON
+#---------
+#ID:110
+#LASTNAME:HENDERSON
+#---------
+#ID:120
+#LASTNAME:HENDERSON
+#---------
+#ID:130
+#LASTNAME:HENDERSON
+#---------
+#ID:140
+#LASTNAME:HENDERSON
+#---------
+#ID:150
+#LASTNAME:HENDERSON
+#---------
+#ID:160
+#LASTNAME:HENDERSON
+#---------
+#ID:170
+#LASTNAME:HENDERSON
+#---------
+#ID:180
+#LASTNAME:HENDERSON
+#---------
+#ID:190
+#LASTNAME:HENDERSON
+#---------
+#ID:200
+#LASTNAME:HENDERSON
+#---------
+#ID:210
+#LASTNAME:HENDERSON
+#---------
+#ID:220
+#LASTNAME:HENDERSON
+#---------
+#ID:230
+#LASTNAME:HENDERSON
+#---------
+#ID:240
+#LASTNAME:HENDERSON
+#---------
+#ID:250
+#LASTNAME:HENDERSON
+#---------
+#ID:260
+#LASTNAME:HENDERSON
+#---------
+#ID:270
+#LASTNAME:HENDERSON
+#---------
+#ID:280
+#LASTNAME:HENDERSON
+#---------
+#ID:290
+#LASTNAME:HENDERSON
+#---------
+#ID:300
+#LASTNAME:HENDERSON
+#---------
+#ID:310
+#LASTNAME:HENDERSON
+#---------
+#ID:320
+#LASTNAME:HENDERSON
+#---------
+#ID:330
+#LASTNAME:HENDERSON
+#---------
+#ID:340
+#LASTNAME:HENDERSON
+#---------
+#ID:350
+#LASTNAME:HENDERSON
+#---------
+#ID:10
+#LASTNAME:JEFFERSON
+#---------
+#ID:20
+#LASTNAME:JEFFERSON
+#---------
+#ID:30
+#LASTNAME:JEFFERSON
+#---------
+#ID:40
+#LASTNAME:JEFFERSON
+#---------
+#ID:50
+#LASTNAME:JEFFERSON
+#---------
+#ID:60
+#LASTNAME:JEFFERSON
+#---------
+#ID:70
+#LASTNAME:JEFFERSON
+#---------
+#ID:80
+#LASTNAME:JEFFERSON
+#---------
+#ID:90
+#LASTNAME:JEFFERSON
+#---------
+#ID:100
+#LASTNAME:JEFFERSON
+#---------
+#ID:110
+#LASTNAME:JEFFERSON
+#---------
+#ID:120
+#LASTNAME:JEFFERSON
+#---------
+#ID:130
+#LASTNAME:JEFFERSON
+#---------
+#ID:140
+#LASTNAME:JEFFERSON
+#---------
+#ID:150
+#LASTNAME:JEFFERSON
+#---------
+#ID:160
+#LASTNAME:JEFFERSON
+#---------
+#ID:170
+#LASTNAME:JEFFERSON
+#---------
+#ID:180
+#LASTNAME:JEFFERSON
+#---------
+#ID:190
+#LASTNAME:JEFFERSON
+#---------
+#ID:200
+#LASTNAME:JEFFERSON
+#---------
+#ID:210
+#LASTNAME:JEFFERSON
+#---------
+#ID:220
+#LASTNAME:JEFFERSON
+#---------
+#ID:230
+#LASTNAME:JEFFERSON
+#---------
+#ID:240
+#LASTNAME:JEFFERSON
+#---------
+#ID:250
+#LASTNAME:JEFFERSON
+#---------
+#ID:260
+#LASTNAME:JEFFERSON
+#---------
+#ID:270
+#LASTNAME:JEFFERSON
+#---------
+#ID:280
+#LASTNAME:JEFFERSON
+#---------
+#ID:290
+#LASTNAME:JEFFERSON
+#---------
+#ID:300
+#LASTNAME:JEFFERSON
+#---------
+#ID:310
+#LASTNAME:JEFFERSON
+#---------
+#ID:320
+#LASTNAME:JEFFERSON
+#---------
+#ID:330
+#LASTNAME:JEFFERSON
+#---------
+#ID:340
+#LASTNAME:JEFFERSON
+#---------
+#ID:350
+#LASTNAME:JEFFERSON
+#---------
+#ID:10
+#LASTNAME:JOHNSON
+#---------
+#ID:20
+#LASTNAME:JOHNSON
+#---------
+#ID:30
+#LASTNAME:JOHNSON
+#---------
+#ID:40
+#LASTNAME:JOHNSON
+#---------
+#ID:50
+#LASTNAME:JOHNSON
+#---------
+#ID:60
+#LASTNAME:JOHNSON
+#---------
+#ID:70
+#LASTNAME:JOHNSON
+#---------
+#ID:80
+#LASTNAME:JOHNSON
+#---------
+#ID:90
+#LASTNAME:JOHNSON
+#---------
+#ID:100
+#LASTNAME:JOHNSON
+#---------
+#ID:110
+#LASTNAME:JOHNSON
+#---------
+#ID:120
+#LASTNAME:JOHNSON
+#---------
+#ID:130
+#LASTNAME:JOHNSON
+#---------
+#ID:140
+#LASTNAME:JOHNSON
+#---------
+#ID:150
+#LASTNAME:JOHNSON
+#---------
+#ID:160
+#LASTNAME:JOHNSON
+#---------
+#ID:170
+#LASTNAME:JOHNSON
+#---------
+#ID:180
+#LASTNAME:JOHNSON
+#---------
+#ID:190
+#LASTNAME:JOHNSON
+#---------
+#ID:200
+#LASTNAME:JOHNSON
+#---------
+#ID:210
+#LASTNAME:JOHNSON
+#---------
+#ID:220
+#LASTNAME:JOHNSON
+#---------
+#ID:230
+#LASTNAME:JOHNSON
+#---------
+#ID:240
+#LASTNAME:JOHNSON
+#---------
+#ID:250
+#LASTNAME:JOHNSON
+#---------
+#ID:260
+#LASTNAME:JOHNSON
+#---------
+#ID:270
+#LASTNAME:JOHNSON
+#---------
+#ID:280
+#LASTNAME:JOHNSON
+#---------
+#ID:290
+#LASTNAME:JOHNSON
+#---------
+#ID:300
+#LASTNAME:JOHNSON
+#---------
+#ID:310
+#LASTNAME:JOHNSON
+#---------
+#ID:320
+#LASTNAME:JOHNSON
+#---------
+#ID:330
+#LASTNAME:JOHNSON
+#---------
+#ID:340
+#LASTNAME:JOHNSON
+#---------
+#ID:350
+#LASTNAME:JOHNSON
+#---------
+#ID:10
+#LASTNAME:JONES
+#---------
+#ID:20
+#LASTNAME:JONES
+#---------
+#ID:30
+#LASTNAME:JONES
+#---------
+#ID:40
+#LASTNAME:JONES
+#---------
+#ID:50
+#LASTNAME:JONES
+#---------
+#ID:60
+#LASTNAME:JONES
+#---------
+#ID:70
+#LASTNAME:JONES
+#---------
+#ID:80
+#LASTNAME:JONES
+#---------
+#ID:90
+#LASTNAME:JONES
+#---------
+#ID:100
+#LASTNAME:JONES
+#---------
+#ID:110
+#LASTNAME:JONES
+#---------
+#ID:120
+#LASTNAME:JONES
+#---------
+#ID:130
+#LASTNAME:JONES
+#---------
+#ID:140
+#LASTNAME:JONES
+#---------
+#ID:150
+#LASTNAME:JONES
+#---------
+#ID:160
+#LASTNAME:JONES
+#---------
+#ID:170
+#LASTNAME:JONES
+#---------
+#ID:180
+#LASTNAME:JONES
+#---------
+#ID:190
+#LASTNAME:JONES
+#---------
+#ID:200
+#LASTNAME:JONES
+#---------
+#ID:210
+#LASTNAME:JONES
+#---------
+#ID:220
+#LASTNAME:JONES
+#---------
+#ID:230
+#LASTNAME:JONES
+#---------
+#ID:240
+#LASTNAME:JONES
+#---------
+#ID:250
+#LASTNAME:JONES
+#---------
+#ID:260
+#LASTNAME:JONES
+#---------
+#ID:270
+#LASTNAME:JONES
+#---------
+#ID:280
+#LASTNAME:JONES
+#---------
+#ID:290
+#LASTNAME:JONES
+#---------
+#ID:300
+#LASTNAME:JONES
+#---------
+#ID:310
+#LASTNAME:JONES
+#---------
+#ID:320
+#LASTNAME:JONES
+#---------
+#ID:330
+#LASTNAME:JONES
+#---------
+#ID:340
+#LASTNAME:JONES
+#---------
+#ID:350
+#LASTNAME:JONES
+#---------
+#ID:10
+#LASTNAME:KWAN
+#---------
+#ID:20
+#LASTNAME:KWAN
+#---------
+#ID:30
+#LASTNAME:KWAN
+#---------
+#ID:40
+#LASTNAME:KWAN
+#---------
+#ID:50
+#LASTNAME:KWAN
+#---------
+#ID:60
+#LASTNAME:KWAN
+#---------
+#ID:70
+#LASTNAME:KWAN
+#---------
+#ID:80
+#LASTNAME:KWAN
+#---------
+#ID:90
+#LASTNAME:KWAN
+#---------
+#ID:100
+#LASTNAME:KWAN
+#---------
+#ID:110
+#LASTNAME:KWAN
+#---------
+#ID:120
+#LASTNAME:KWAN
+#---------
+#ID:130
+#LASTNAME:KWAN
+#---------
+#ID:140
+#LASTNAME:KWAN
+#---------
+#ID:150
+#LASTNAME:KWAN
+#---------
+#ID:160
+#LASTNAME:KWAN
+#---------
+#ID:170
+#LASTNAME:KWAN
+#---------
+#ID:180
+#LASTNAME:KWAN
+#---------
+#ID:190
+#LASTNAME:KWAN
+#---------
+#ID:200
+#LASTNAME:KWAN
+#---------
+#ID:210
+#LASTNAME:KWAN
+#---------
+#ID:220
+#LASTNAME:KWAN
+#---------
+#ID:230
+#LASTNAME:KWAN
+#---------
+#ID:240
+#LASTNAME:KWAN
+#---------
+#ID:250
+#LASTNAME:KWAN
+#---------
+#ID:260
+#LASTNAME:KWAN
+#---------
+#ID:270
+#LASTNAME:KWAN
+#---------
+#ID:280
+#LASTNAME:KWAN
+#---------
+#ID:290
+#LASTNAME:KWAN
+#---------
+#ID:300
+#LASTNAME:KWAN
+#---------
+#ID:310
+#LASTNAME:KWAN
+#---------
+#ID:320
+#LASTNAME:KWAN
+#---------
+#ID:330
+#LASTNAME:KWAN
+#---------
+#ID:340
+#LASTNAME:KWAN
+#---------
+#ID:350
+#LASTNAME:KWAN
+#---------
+#ID:10
+#LASTNAME:LEE
+#---------
+#ID:20
+#LASTNAME:LEE
+#---------
+#ID:30
+#LASTNAME:LEE
+#---------
+#ID:40
+#LASTNAME:LEE
+#---------
+#ID:50
+#LASTNAME:LEE
+#---------
+#ID:60
+#LASTNAME:LEE
+#---------
+#ID:70
+#LASTNAME:LEE
+#---------
+#ID:80
+#LASTNAME:LEE
+#---------
+#ID:90
+#LASTNAME:LEE
+#---------
+#ID:100
+#LASTNAME:LEE
+#---------
+#ID:110
+#LASTNAME:LEE
+#---------
+#ID:120
+#LASTNAME:LEE
+#---------
+#ID:130
+#LASTNAME:LEE
+#---------
+#ID:140
+#LASTNAME:LEE
+#---------
+#ID:150
+#LASTNAME:LEE
+#---------
+#ID:160
+#LASTNAME:LEE
+#---------
+#ID:170
+#LASTNAME:LEE
+#---------
+#ID:180
+#LASTNAME:LEE
+#---------
+#ID:190
+#LASTNAME:LEE
+#---------
+#ID:200
+#LASTNAME:LEE
+#---------
+#ID:210
+#LASTNAME:LEE
+#---------
+#ID:220
+#LASTNAME:LEE
+#---------
+#ID:230
+#LASTNAME:LEE
+#---------
+#ID:240
+#LASTNAME:LEE
+#---------
+#ID:250
+#LASTNAME:LEE
+#---------
+#ID:260
+#LASTNAME:LEE
+#---------
+#ID:270
+#LASTNAME:LEE
+#---------
+#ID:280
+#LASTNAME:LEE
+#---------
+#ID:290
+#LASTNAME:LEE
+#---------
+#ID:300
+#LASTNAME:LEE
+#---------
+#ID:310
+#LASTNAME:LEE
+#---------
+#ID:320
+#LASTNAME:LEE
+#---------
+#ID:330
+#LASTNAME:LEE
+#---------
+#ID:340
+#LASTNAME:LEE
+#---------
+#ID:350
+#LASTNAME:LEE
+#---------
+#ID:10
+#LASTNAME:LUCCHESSI
+#---------
+#ID:20
+#LASTNAME:LUCCHESSI
+#---------
+#ID:30
+#LASTNAME:LUCCHESSI
+#---------
+#ID:40
+#LASTNAME:LUCCHESSI
+#---------
+#ID:50
+#LASTNAME:LUCCHESSI
+#---------
+#ID:60
+#LASTNAME:LUCCHESSI
+#---------
+#ID:70
+#LASTNAME:LUCCHESSI
+#---------
+#ID:80
+#LASTNAME:LUCCHESSI
+#---------
+#ID:90
+#LASTNAME:LUCCHESSI
+#---------
+#ID:100
+#LASTNAME:LUCCHESSI
+#---------
+#ID:110
+#LASTNAME:LUCCHESSI
+#---------
+#ID:120
+#LASTNAME:LUCCHESSI
+#---------
+#ID:130
+#LASTNAME:LUCCHESSI
+#---------
+#ID:140
+#LASTNAME:LUCCHESSI
+#---------
+#ID:150
+#LASTNAME:LUCCHESSI
+#---------
+#ID:160
+#LASTNAME:LUCCHESSI
+#---------
+#ID:170
+#LASTNAME:LUCCHESSI
+#---------
+#ID:180
+#LASTNAME:LUCCHESSI
+#---------
+#ID:190
+#LASTNAME:LUCCHESSI
+#---------
+#ID:200
+#LASTNAME:LUCCHESSI
+#---------
+#ID:210
+#LASTNAME:LUCCHESSI
+#---------
+#ID:220
+#LASTNAME:LUCCHESSI
+#---------
+#ID:230
+#LASTNAME:LUCCHESSI
+#---------
+#ID:240
+#LASTNAME:LUCCHESSI
+#---------
+#ID:250
+#LASTNAME:LUCCHESSI
+#---------
+#ID:260
+#LASTNAME:LUCCHESSI
+#---------
+#ID:270
+#LASTNAME:LUCCHESSI
+#---------
+#ID:280
+#LASTNAME:LUCCHESSI
+#---------
+#ID:290
+#LASTNAME:LUCCHESSI
+#---------
+#ID:300
+#LASTNAME:LUCCHESSI
+#---------
+#ID:310
+#LASTNAME:LUCCHESSI
+#---------
+#ID:320
+#LASTNAME:LUCCHESSI
+#---------
+#ID:330
+#LASTNAME:LUCCHESSI
+#---------
+#ID:340
+#LASTNAME:LUCCHESSI
+#---------
+#ID:350
+#LASTNAME:LUCCHESSI
+#---------
+#ID:10
+#LASTNAME:LUTZ
+#---------
+#ID:20
+#LASTNAME:LUTZ
+#---------
+#ID:30
+#LASTNAME:LUTZ
+#---------
+#ID:40
+#LASTNAME:LUTZ
+#---------
+#ID:50
+#LASTNAME:LUTZ
+#---------
+#ID:60
+#LASTNAME:LUTZ
+#---------
+#ID:70
+#LASTNAME:LUTZ
+#---------
+#ID:80
+#LASTNAME:LUTZ
+#---------
+#ID:90
+#LASTNAME:LUTZ
+#---------
+#ID:100
+#LASTNAME:LUTZ
+#---------
+#ID:110
+#LASTNAME:LUTZ
+#---------
+#ID:120
+#LASTNAME:LUTZ
+#---------
+#ID:130
+#LASTNAME:LUTZ
+#---------
+#ID:140
+#LASTNAME:LUTZ
+#---------
+#ID:150
+#LASTNAME:LUTZ
+#---------
+#ID:160
+#LASTNAME:LUTZ
+#---------
+#ID:170
+#LASTNAME:LUTZ
+#---------
+#ID:180
+#LASTNAME:LUTZ
+#---------
+#ID:190
+#LASTNAME:LUTZ
+#---------
+#ID:200
+#LASTNAME:LUTZ
+#---------
+#ID:210
+#LASTNAME:LUTZ
+#---------
+#ID:220
+#LASTNAME:LUTZ
+#---------
+#ID:230
+#LASTNAME:LUTZ
+#---------
+#ID:240
+#LASTNAME:LUTZ
+#---------
+#ID:250
+#LASTNAME:LUTZ
+#---------
+#ID:260
+#LASTNAME:LUTZ
+#---------
+#ID:270
+#LASTNAME:LUTZ
+#---------
+#ID:280
+#LASTNAME:LUTZ
+#---------
+#ID:290
+#LASTNAME:LUTZ
+#---------
+#ID:300
+#LASTNAME:LUTZ
+#---------
+#ID:310
+#LASTNAME:LUTZ
+#---------
+#ID:320
+#LASTNAME:LUTZ
+#---------
+#ID:330
+#LASTNAME:LUTZ
+#---------
+#ID:340
+#LASTNAME:LUTZ
+#---------
+#ID:350
+#LASTNAME:LUTZ
+#---------
+#ID:10
+#LASTNAME:MARINO
+#---------
+#ID:20
+#LASTNAME:MARINO
+#---------
+#ID:30
+#LASTNAME:MARINO
+#---------
+#ID:40
+#LASTNAME:MARINO
+#---------
+#ID:50
+#LASTNAME:MARINO
+#---------
+#ID:60
+#LASTNAME:MARINO
+#---------
+#ID:70
+#LASTNAME:MARINO
+#---------
+#ID:80
+#LASTNAME:MARINO
+#---------
+#ID:90
+#LASTNAME:MARINO
+#---------
+#ID:100
+#LASTNAME:MARINO
+#---------
+#ID:110
+#LASTNAME:MARINO
+#---------
+#ID:120
+#LASTNAME:MARINO
+#---------
+#ID:130
+#LASTNAME:MARINO
+#---------
+#ID:140
+#LASTNAME:MARINO
+#---------
+#ID:150
+#LASTNAME:MARINO
+#---------
+#ID:160
+#LASTNAME:MARINO
+#---------
+#ID:170
+#LASTNAME:MARINO
+#---------
+#ID:180
+#LASTNAME:MARINO
+#---------
+#ID:190
+#LASTNAME:MARINO
+#---------
+#ID:200
+#LASTNAME:MARINO
+#---------
+#ID:210
+#LASTNAME:MARINO
+#---------
+#ID:220
+#LASTNAME:MARINO
+#---------
+#ID:230
+#LASTNAME:MARINO
+#---------
+#ID:240
+#LASTNAME:MARINO
+#---------
+#ID:250
+#LASTNAME:MARINO
+#---------
+#ID:260
+#LASTNAME:MARINO
+#---------
+#ID:270
+#LASTNAME:MARINO
+#---------
+#ID:280
+#LASTNAME:MARINO
+#---------
+#ID:290
+#LASTNAME:MARINO
+#---------
+#ID:300
+#LASTNAME:MARINO
+#---------
+#ID:310
+#LASTNAME:MARINO
+#---------
+#ID:320
+#LASTNAME:MARINO
+#---------
+#ID:330
+#LASTNAME:MARINO
+#---------
+#ID:340
+#LASTNAME:MARINO
+#---------
+#ID:350
+#LASTNAME:MARINO
+#---------
+#ID:10
+#LASTNAME:MEHTA
+#---------
+#ID:20
+#LASTNAME:MEHTA
+#---------
+#ID:30
+#LASTNAME:MEHTA
+#---------
+#ID:40
+#LASTNAME:MEHTA
+#---------
+#ID:50
+#LASTNAME:MEHTA
+#---------
+#ID:60
+#LASTNAME:MEHTA
+#---------
+#ID:70
+#LASTNAME:MEHTA
+#---------
+#ID:80
+#LASTNAME:MEHTA
+#---------
+#ID:90
+#LASTNAME:MEHTA
+#---------
+#ID:100
+#LASTNAME:MEHTA
+#---------
+#ID:110
+#LASTNAME:MEHTA
+#---------
+#ID:120
+#LASTNAME:MEHTA
+#---------
+#ID:130
+#LASTNAME:MEHTA
+#---------
+#ID:140
+#LASTNAME:MEHTA
+#---------
+#ID:150
+#LASTNAME:MEHTA
+#---------
+#ID:160
+#LASTNAME:MEHTA
+#---------
+#ID:170
+#LASTNAME:MEHTA
+#---------
+#ID:180
+#LASTNAME:MEHTA
+#---------
+#ID:190
+#LASTNAME:MEHTA
+#---------
+#ID:200
+#LASTNAME:MEHTA
+#---------
+#ID:210
+#LASTNAME:MEHTA
+#---------
+#ID:220
+#LASTNAME:MEHTA
+#---------
+#ID:230
+#LASTNAME:MEHTA
+#---------
+#ID:240
+#LASTNAME:MEHTA
+#---------
+#ID:250
+#LASTNAME:MEHTA
+#---------
+#ID:260
+#LASTNAME:MEHTA
+#---------
+#ID:270
+#LASTNAME:MEHTA
+#---------
+#ID:280
+#LASTNAME:MEHTA
+#---------
+#ID:290
+#LASTNAME:MEHTA
+#---------
+#ID:300
+#LASTNAME:MEHTA
+#---------
+#ID:310
+#LASTNAME:MEHTA
+#---------
+#ID:320
+#LASTNAME:MEHTA
+#---------
+#ID:330
+#LASTNAME:MEHTA
+#---------
+#ID:340
+#LASTNAME:MEHTA
+#---------
+#ID:350
+#LASTNAME:MEHTA
+#---------
+#ID:10
+#LASTNAME:NICHOLLS
+#---------
+#ID:20
+#LASTNAME:NICHOLLS
+#---------
+#ID:30
+#LASTNAME:NICHOLLS
+#---------
+#ID:40
+#LASTNAME:NICHOLLS
+#---------
+#ID:50
+#LASTNAME:NICHOLLS
+#---------
+#ID:60
+#LASTNAME:NICHOLLS
+#---------
+#ID:70
+#LASTNAME:NICHOLLS
+#---------
+#ID:80
+#LASTNAME:NICHOLLS
+#---------
+#ID:90
+#LASTNAME:NICHOLLS
+#---------
+#ID:100
+#LASTNAME:NICHOLLS
+#---------
+#ID:110
+#LASTNAME:NICHOLLS
+#---------
+#ID:120
+#LASTNAME:NICHOLLS
+#---------
+#ID:130
+#LASTNAME:NICHOLLS
+#---------
+#ID:140
+#LASTNAME:NICHOLLS
+#---------
+#ID:150
+#LASTNAME:NICHOLLS
+#---------
+#ID:160
+#LASTNAME:NICHOLLS
+#---------
+#ID:170
+#LASTNAME:NICHOLLS
+#---------
+#ID:180
+#LASTNAME:NICHOLLS
+#---------
+#ID:190
+#LASTNAME:NICHOLLS
+#---------
+#ID:200
+#LASTNAME:NICHOLLS
+#---------
+#ID:210
+#LASTNAME:NICHOLLS
+#---------
+#ID:220
+#LASTNAME:NICHOLLS
+#---------
+#ID:230
+#LASTNAME:NICHOLLS
+#---------
+#ID:240
+#LASTNAME:NICHOLLS
+#---------
+#ID:250
+#LASTNAME:NICHOLLS
+#---------
+#ID:260
+#LASTNAME:NICHOLLS
+#---------
+#ID:270
+#LASTNAME:NICHOLLS
+#---------
+#ID:280
+#LASTNAME:NICHOLLS
+#---------
+#ID:290
+#LASTNAME:NICHOLLS
+#---------
+#ID:300
+#LASTNAME:NICHOLLS
+#---------
+#ID:310
+#LASTNAME:NICHOLLS
+#---------
+#ID:320
+#LASTNAME:NICHOLLS
+#---------
+#ID:330
+#LASTNAME:NICHOLLS
+#---------
+#ID:340
+#LASTNAME:NICHOLLS
+#---------
+#ID:350
+#LASTNAME:NICHOLLS
+#---------
+#ID:10
+#LASTNAME:OCONNELL
+#---------
+#ID:20
+#LASTNAME:OCONNELL
+#---------
+#ID:30
+#LASTNAME:OCONNELL
+#---------
+#ID:40
+#LASTNAME:OCONNELL
+#---------
+#ID:50
+#LASTNAME:OCONNELL
+#---------
+#ID:60
+#LASTNAME:OCONNELL
+#---------
+#ID:70
+#LASTNAME:OCONNELL
+#---------
+#ID:80
+#LASTNAME:OCONNELL
+#---------
+#ID:90
+#LASTNAME:OCONNELL
+#---------
+#ID:100
+#LASTNAME:OCONNELL
+#---------
+#ID:110
+#LASTNAME:OCONNELL
+#---------
+#ID:120
+#LASTNAME:OCONNELL
+#---------
+#ID:130
+#LASTNAME:OCONNELL
+#---------
+#ID:140
+#LASTNAME:OCONNELL
+#---------
+#ID:150
+#LASTNAME:OCONNELL
+#---------
+#ID:160
+#LASTNAME:OCONNELL
+#---------
+#ID:170
+#LASTNAME:OCONNELL
+#---------
+#ID:180
+#LASTNAME:OCONNELL
+#---------
+#ID:190
+#LASTNAME:OCONNELL
+#---------
+#ID:200
+#LASTNAME:OCONNELL
+#---------
+#ID:210
+#LASTNAME:OCONNELL
+#---------
+#ID:220
+#LASTNAME:OCONNELL
+#---------
+#ID:230
+#LASTNAME:OCONNELL
+#---------
+#ID:240
+#LASTNAME:OCONNELL
+#---------
+#ID:250
+#LASTNAME:OCONNELL
+#---------
+#ID:260
+#LASTNAME:OCONNELL
+#---------
+#ID:270
+#LASTNAME:OCONNELL
+#---------
+#ID:280
+#LASTNAME:OCONNELL
+#---------
+#ID:290
+#LASTNAME:OCONNELL
+#---------
+#ID:300
+#LASTNAME:OCONNELL
+#---------
+#ID:310
+#LASTNAME:OCONNELL
+#---------
+#ID:320
+#LASTNAME:OCONNELL
+#---------
+#ID:330
+#LASTNAME:OCONNELL
+#---------
+#ID:340
+#LASTNAME:OCONNELL
+#---------
+#ID:350
+#LASTNAME:OCONNELL
+#---------
+#ID:10
+#LASTNAME:PARKER
+#---------
+#ID:20
+#LASTNAME:PARKER
+#---------
+#ID:30
+#LASTNAME:PARKER
+#---------
+#ID:40
+#LASTNAME:PARKER
+#---------
+#ID:50
+#LASTNAME:PARKER
+#---------
+#ID:60
+#LASTNAME:PARKER
+#---------
+#ID:70
+#LASTNAME:PARKER
+#---------
+#ID:80
+#LASTNAME:PARKER
+#---------
+#ID:90
+#LASTNAME:PARKER
+#---------
+#ID:100
+#LASTNAME:PARKER
+#---------
+#ID:110
+#LASTNAME:PARKER
+#---------
+#ID:120
+#LASTNAME:PARKER
+#---------
+#ID:130
+#LASTNAME:PARKER
+#---------
+#ID:140
+#LASTNAME:PARKER
+#---------
+#ID:150
+#LASTNAME:PARKER
+#---------
+#ID:160
+#LASTNAME:PARKER
+#---------
+#ID:170
+#LASTNAME:PARKER
+#---------
+#ID:180
+#LASTNAME:PARKER
+#---------
+#ID:190
+#LASTNAME:PARKER
+#---------
+#ID:200
+#LASTNAME:PARKER
+#---------
+#ID:210
+#LASTNAME:PARKER
+#---------
+#ID:220
+#LASTNAME:PARKER
+#---------
+#ID:230
+#LASTNAME:PARKER
+#---------
+#ID:240
+#LASTNAME:PARKER
+#---------
+#ID:250
+#LASTNAME:PARKER
+#---------
+#ID:260
+#LASTNAME:PARKER
+#---------
+#ID:270
+#LASTNAME:PARKER
+#---------
+#ID:280
+#LASTNAME:PARKER
+#---------
+#ID:290
+#LASTNAME:PARKER
+#---------
+#ID:300
+#LASTNAME:PARKER
+#---------
+#ID:310
+#LASTNAME:PARKER
+#---------
+#ID:320
+#LASTNAME:PARKER
+#---------
+#ID:330
+#LASTNAME:PARKER
+#---------
+#ID:340
+#LASTNAME:PARKER
+#---------
+#ID:350
+#LASTNAME:PARKER
+#---------
+#ID:10
+#LASTNAME:PEREZ
+#---------
+#ID:20
+#LASTNAME:PEREZ
+#---------
+#ID:30
+#LASTNAME:PEREZ
+#---------
+#ID:40
+#LASTNAME:PEREZ
+#---------
+#ID:50
+#LASTNAME:PEREZ
+#---------
+#ID:60
+#LASTNAME:PEREZ
+#---------
+#ID:70
+#LASTNAME:PEREZ
+#---------
+#ID:80
+#LASTNAME:PEREZ
+#---------
+#ID:90
+#LASTNAME:PEREZ
+#---------
+#ID:100
+#LASTNAME:PEREZ
+#---------
+#ID:110
+#LASTNAME:PEREZ
+#---------
+#ID:120
+#LASTNAME:PEREZ
+#---------
+#ID:130
+#LASTNAME:PEREZ
+#---------
+#ID:140
+#LASTNAME:PEREZ
+#---------
+#ID:150
+#LASTNAME:PEREZ
+#---------
+#ID:160
+#LASTNAME:PEREZ
+#---------
+#ID:170
+#LASTNAME:PEREZ
+#---------
+#ID:180
+#LASTNAME:PEREZ
+#---------
+#ID:190
+#LASTNAME:PEREZ
+#---------
+#ID:200
+#LASTNAME:PEREZ
+#---------
+#ID:210
+#LASTNAME:PEREZ
+#---------
+#ID:220
+#LASTNAME:PEREZ
+#---------
+#ID:230
+#LASTNAME:PEREZ
+#---------
+#ID:240
+#LASTNAME:PEREZ
+#---------
+#ID:250
+#LASTNAME:PEREZ
+#---------
+#ID:260
+#LASTNAME:PEREZ
+#---------
+#ID:270
+#LASTNAME:PEREZ
+#---------
+#ID:280
+#LASTNAME:PEREZ
+#---------
+#ID:290
+#LASTNAME:PEREZ
+#---------
+#ID:300
+#LASTNAME:PEREZ
+#---------
+#ID:310
+#LASTNAME:PEREZ
+#---------
+#ID:320
+#LASTNAME:PEREZ
+#---------
+#ID:330
+#LASTNAME:PEREZ
+#---------
+#ID:340
+#LASTNAME:PEREZ
+#---------
+#ID:350
+#LASTNAME:PEREZ
+#---------
+#ID:10
+#LASTNAME:PIANKA
+#---------
+#ID:20
+#LASTNAME:PIANKA
+#---------
+#ID:30
+#LASTNAME:PIANKA
+#---------
+#ID:40
+#LASTNAME:PIANKA
+#---------
+#ID:50
+#LASTNAME:PIANKA
+#---------
+#ID:60
+#LASTNAME:PIANKA
+#---------
+#ID:70
+#LASTNAME:PIANKA
+#---------
+#ID:80
+#LASTNAME:PIANKA
+#---------
+#ID:90
+#LASTNAME:PIANKA
+#---------
+#ID:100
+#LASTNAME:PIANKA
+#---------
+#ID:110
+#LASTNAME:PIANKA
+#---------
+#ID:120
+#LASTNAME:PIANKA
+#---------
+#ID:130
+#LASTNAME:PIANKA
+#---------
+#ID:140
+#LASTNAME:PIANKA
+#---------
+#ID:150
+#LASTNAME:PIANKA
+#---------
+#ID:160
+#LASTNAME:PIANKA
+#---------
+#ID:170
+#LASTNAME:PIANKA
+#---------
+#ID:180
+#LASTNAME:PIANKA
+#---------
+#ID:190
+#LASTNAME:PIANKA
+#---------
+#ID:200
+#LASTNAME:PIANKA
+#---------
+#ID:210
+#LASTNAME:PIANKA
+#---------
+#ID:220
+#LASTNAME:PIANKA
+#---------
+#ID:230
+#LASTNAME:PIANKA
+#---------
+#ID:240
+#LASTNAME:PIANKA
+#---------
+#ID:250
+#LASTNAME:PIANKA
+#---------
+#ID:260
+#LASTNAME:PIANKA
+#---------
+#ID:270
+#LASTNAME:PIANKA
+#---------
+#ID:280
+#LASTNAME:PIANKA
+#---------
+#ID:290
+#LASTNAME:PIANKA
+#---------
+#ID:300
+#LASTNAME:PIANKA
+#---------
+#ID:310
+#LASTNAME:PIANKA
+#---------
+#ID:320
+#LASTNAME:PIANKA
+#---------
+#ID:330
+#LASTNAME:PIANKA
+#---------
+#ID:340
+#LASTNAME:PIANKA
+#---------
+#ID:350
+#LASTNAME:PIANKA
+#---------
+#ID:10
+#LASTNAME:PULASKI
+#---------
+#ID:20
+#LASTNAME:PULASKI
+#---------
+#ID:30
+#LASTNAME:PULASKI
+#---------
+#ID:40
+#LASTNAME:PULASKI
+#---------
+#ID:50
+#LASTNAME:PULASKI
+#---------
+#ID:60
+#LASTNAME:PULASKI
+#---------
+#ID:70
+#LASTNAME:PULASKI
+#---------
+#ID:80
+#LASTNAME:PULASKI
+#---------
+#ID:90
+#LASTNAME:PULASKI
+#---------
+#ID:100
+#LASTNAME:PULASKI
+#---------
+#ID:110
+#LASTNAME:PULASKI
+#---------
+#ID:120
+#LASTNAME:PULASKI
+#---------
+#ID:130
+#LASTNAME:PULASKI
+#---------
+#ID:140
+#LASTNAME:PULASKI
+#---------
+#ID:150
+#LASTNAME:PULASKI
+#---------
+#ID:160
+#LASTNAME:PULASKI
+#---------
+#ID:170
+#LASTNAME:PULASKI
+#---------
+#ID:180
+#LASTNAME:PULASKI
+#---------
+#ID:190
+#LASTNAME:PULASKI
+#---------
+#ID:200
+#LASTNAME:PULASKI
+#---------
+#ID:210
+#LASTNAME:PULASKI
+#---------
+#ID:220
+#LASTNAME:PULASKI
+#---------
+#ID:230
+#LASTNAME:PULASKI
+#---------
+#ID:240
+#LASTNAME:PULASKI
+#---------
+#ID:250
+#LASTNAME:PULASKI
+#---------
+#ID:260
+#LASTNAME:PULASKI
+#---------
+#ID:270
+#LASTNAME:PULASKI
+#---------
+#ID:280
+#LASTNAME:PULASKI
+#---------
+#ID:290
+#LASTNAME:PULASKI
+#---------
+#ID:300
+#LASTNAME:PULASKI
+#---------
+#ID:310
+#LASTNAME:PULASKI
+#---------
+#ID:320
+#LASTNAME:PULASKI
+#---------
+#ID:330
+#LASTNAME:PULASKI
+#---------
+#ID:340
+#LASTNAME:PULASKI
+#---------
+#ID:350
+#LASTNAME:PULASKI
+#---------
+#ID:10
+#LASTNAME:QUINTANA
+#---------
+#ID:20
+#LASTNAME:QUINTANA
+#---------
+#ID:30
+#LASTNAME:QUINTANA
+#---------
+#ID:40
+#LASTNAME:QUINTANA
+#---------
+#ID:50
+#LASTNAME:QUINTANA
+#---------
+#ID:60
+#LASTNAME:QUINTANA
+#---------
+#ID:70
+#LASTNAME:QUINTANA
+#---------
+#ID:80
+#LASTNAME:QUINTANA
+#---------
+#ID:90
+#LASTNAME:QUINTANA
+#---------
+#ID:100
+#LASTNAME:QUINTANA
+#---------
+#ID:110
+#LASTNAME:QUINTANA
+#---------
+#ID:120
+#LASTNAME:QUINTANA
+#---------
+#ID:130
+#LASTNAME:QUINTANA
+#---------
+#ID:140
+#LASTNAME:QUINTANA
+#---------
+#ID:150
+#LASTNAME:QUINTANA
+#---------
+#ID:160
+#LASTNAME:QUINTANA
+#---------
+#ID:170
+#LASTNAME:QUINTANA
+#---------
+#ID:180
+#LASTNAME:QUINTANA
+#---------
+#ID:190
+#LASTNAME:QUINTANA
+#---------
+#ID:200
+#LASTNAME:QUINTANA
+#---------
+#ID:210
+#LASTNAME:QUINTANA
+#---------
+#ID:220
+#LASTNAME:QUINTANA
+#---------
+#ID:230
+#LASTNAME:QUINTANA
+#---------
+#ID:240
+#LASTNAME:QUINTANA
+#---------
+#ID:250
+#LASTNAME:QUINTANA
+#---------
+#ID:260
+#LASTNAME:QUINTANA
+#---------
+#ID:270
+#LASTNAME:QUINTANA
+#---------
+#ID:280
+#LASTNAME:QUINTANA
+#---------
+#ID:290
+#LASTNAME:QUINTANA
+#---------
+#ID:300
+#LASTNAME:QUINTANA
+#---------
+#ID:310
+#LASTNAME:QUINTANA
+#---------
+#ID:320
+#LASTNAME:QUINTANA
+#---------
+#ID:330
+#LASTNAME:QUINTANA
+#---------
+#ID:340
+#LASTNAME:QUINTANA
+#---------
+#ID:350
+#LASTNAME:QUINTANA
+#---------
+#ID:10
+#LASTNAME:SCHNEIDER
+#---------
+#ID:20
+#LASTNAME:SCHNEIDER
+#---------
+#ID:30
+#LASTNAME:SCHNEIDER
+#---------
+#ID:40
+#LASTNAME:SCHNEIDER
+#---------
+#ID:50
+#LASTNAME:SCHNEIDER
+#---------
+#ID:60
+#LASTNAME:SCHNEIDER
+#---------
+#ID:70
+#LASTNAME:SCHNEIDER
+#---------
+#ID:80
+#LASTNAME:SCHNEIDER
+#---------
+#ID:90
+#LASTNAME:SCHNEIDER
+#---------
+#ID:100
+#LASTNAME:SCHNEIDER
+#---------
+#ID:110
+#LASTNAME:SCHNEIDER
+#---------
+#ID:120
+#LASTNAME:SCHNEIDER
+#---------
+#ID:130
+#LASTNAME:SCHNEIDER
+#---------
+#ID:140
+#LASTNAME:SCHNEIDER
+#---------
+#ID:150
+#LASTNAME:SCHNEIDER
+#---------
+#ID:160
+#LASTNAME:SCHNEIDER
+#---------
+#ID:170
+#LASTNAME:SCHNEIDER
+#---------
+#ID:180
+#LASTNAME:SCHNEIDER
+#---------
+#ID:190
+#LASTNAME:SCHNEIDER
+#---------
+#ID:200
+#LASTNAME:SCHNEIDER
+#---------
+#ID:210
+#LASTNAME:SCHNEIDER
+#---------
+#ID:220
+#LASTNAME:SCHNEIDER
+#---------
+#ID:230
+#LASTNAME:SCHNEIDER
+#---------
+#ID:240
+#LASTNAME:SCHNEIDER
+#---------
+#ID:250
+#LASTNAME:SCHNEIDER
+#---------
+#ID:260
+#LASTNAME:SCHNEIDER
+#---------
+#ID:270
+#LASTNAME:SCHNEIDER
+#---------
+#ID:280
+#LASTNAME:SCHNEIDER
+#---------
+#ID:290
+#LASTNAME:SCHNEIDER
+#---------
+#ID:300
+#LASTNAME:SCHNEIDER
+#---------
+#ID:310
+#LASTNAME:SCHNEIDER
+#---------
+#ID:320
+#LASTNAME:SCHNEIDER
+#---------
+#ID:330
+#LASTNAME:SCHNEIDER
+#---------
+#ID:340
+#LASTNAME:SCHNEIDER
+#---------
+#ID:350
+#LASTNAME:SCHNEIDER
+#---------
+#ID:10
+#LASTNAME:SCOUTTEN
+#---------
+#ID:20
+#LASTNAME:SCOUTTEN
+#---------
+#ID:30
+#LASTNAME:SCOUTTEN
+#---------
+#ID:40
+#LASTNAME:SCOUTTEN
+#---------
+#ID:50
+#LASTNAME:SCOUTTEN
+#---------
+#ID:60
+#LASTNAME:SCOUTTEN
+#---------
+#ID:70
+#LASTNAME:SCOUTTEN
+#---------
+#ID:80
+#LASTNAME:SCOUTTEN
+#---------
+#ID:90
+#LASTNAME:SCOUTTEN
+#---------
+#ID:100
+#LASTNAME:SCOUTTEN
+#---------
+#ID:110
+#LASTNAME:SCOUTTEN
+#---------
+#ID:120
+#LASTNAME:SCOUTTEN
+#---------
+#ID:130
+#LASTNAME:SCOUTTEN
+#---------
+#ID:140
+#LASTNAME:SCOUTTEN
+#---------
+#ID:150
+#LASTNAME:SCOUTTEN
+#---------
+#ID:160
+#LASTNAME:SCOUTTEN
+#---------
+#ID:170
+#LASTNAME:SCOUTTEN
+#---------
+#ID:180
+#LASTNAME:SCOUTTEN
+#---------
+#ID:190
+#LASTNAME:SCOUTTEN
+#---------
+#ID:200
+#LASTNAME:SCOUTTEN
+#---------
+#ID:210
+#LASTNAME:SCOUTTEN
+#---------
+#ID:220
+#LASTNAME:SCOUTTEN
+#---------
+#ID:230
+#LASTNAME:SCOUTTEN
+#---------
+#ID:240
+#LASTNAME:SCOUTTEN
+#---------
+#ID:250
+#LASTNAME:SCOUTTEN
+#---------
+#ID:260
+#LASTNAME:SCOUTTEN
+#---------
+#ID:270
+#LASTNAME:SCOUTTEN
+#---------
+#ID:280
+#LASTNAME:SCOUTTEN
+#---------
+#ID:290
+#LASTNAME:SCOUTTEN
+#---------
+#ID:300
+#LASTNAME:SCOUTTEN
+#---------
+#ID:310
+#LASTNAME:SCOUTTEN
+#---------
+#ID:320
+#LASTNAME:SCOUTTEN
+#---------
+#ID:330
+#LASTNAME:SCOUTTEN
+#---------
+#ID:340
+#LASTNAME:SCOUTTEN
+#---------
+#ID:350
+#LASTNAME:SCOUTTEN
+#---------
+#ID:10
+#LASTNAME:SETRIGHT
+#---------
+#ID:20
+#LASTNAME:SETRIGHT
+#---------
+#ID:30
+#LASTNAME:SETRIGHT
+#---------
+#ID:40
+#LASTNAME:SETRIGHT
+#---------
+#ID:50
+#LASTNAME:SETRIGHT
+#---------
+#ID:60
+#LASTNAME:SETRIGHT
+#---------
+#ID:70
+#LASTNAME:SETRIGHT
+#---------
+#ID:80
+#LASTNAME:SETRIGHT
+#---------
+#ID:90
+#LASTNAME:SETRIGHT
+#---------
+#ID:100
+#LASTNAME:SETRIGHT
+#---------
+#ID:110
+#LASTNAME:SETRIGHT
+#---------
+#ID:120
+#LASTNAME:SETRIGHT
+#---------
+#ID:130
+#LASTNAME:SETRIGHT
+#---------
+#ID:140
+#LASTNAME:SETRIGHT
+#---------
+#ID:150
+#LASTNAME:SETRIGHT
+#---------
+#ID:160
+#LASTNAME:SETRIGHT
+#---------
+#ID:170
+#LASTNAME:SETRIGHT
+#---------
+#ID:180
+#LASTNAME:SETRIGHT
+#---------
+#ID:190
+#LASTNAME:SETRIGHT
+#---------
+#ID:200
+#LASTNAME:SETRIGHT
+#---------
+#ID:210
+#LASTNAME:SETRIGHT
+#---------
+#ID:220
+#LASTNAME:SETRIGHT
+#---------
+#ID:230
+#LASTNAME:SETRIGHT
+#---------
+#ID:240
+#LASTNAME:SETRIGHT
+#---------
+#ID:250
+#LASTNAME:SETRIGHT
+#---------
+#ID:260
+#LASTNAME:SETRIGHT
+#---------
+#ID:270
+#LASTNAME:SETRIGHT
+#---------
+#ID:280
+#LASTNAME:SETRIGHT
+#---------
+#ID:290
+#LASTNAME:SETRIGHT
+#---------
+#ID:300
+#LASTNAME:SETRIGHT
+#---------
+#ID:310
+#LASTNAME:SETRIGHT
+#---------
+#ID:320
+#LASTNAME:SETRIGHT
+#---------
+#ID:330
+#LASTNAME:SETRIGHT
+#---------
+#ID:340
+#LASTNAME:SETRIGHT
+#---------
+#ID:350
+#LASTNAME:SETRIGHT
+#---------
+#ID:10
+#LASTNAME:SMITH
+#---------
+#ID:10
+#LASTNAME:SMITH
+#---------
+#ID:20
+#LASTNAME:SMITH
+#---------
+#ID:20
+#LASTNAME:SMITH
+#---------
+#ID:30
+#LASTNAME:SMITH
+#---------
+#ID:30
+#LASTNAME:SMITH
+#---------
+#ID:40
+#LASTNAME:SMITH
+#---------
+#ID:40
+#LASTNAME:SMITH
+#---------
+#ID:50
+#LASTNAME:SMITH
+#---------
+#ID:50
+#LASTNAME:SMITH
+#---------
+#ID:60
+#LASTNAME:SMITH
+#---------
+#ID:60
+#LASTNAME:SMITH
+#---------
+#ID:70
+#LASTNAME:SMITH
+#---------
+#ID:70
+#LASTNAME:SMITH
+#---------
+#ID:80
+#LASTNAME:SMITH
+#---------
+#ID:80
+#LASTNAME:SMITH
+#---------
+#ID:90
+#LASTNAME:SMITH
+#---------
+#ID:90
+#LASTNAME:SMITH
+#---------
+#ID:100
+#LASTNAME:SMITH
+#---------
+#ID:100
+#LASTNAME:SMITH
+#---------
+#ID:110
+#LASTNAME:SMITH
+#---------
+#ID:110
+#LASTNAME:SMITH
+#---------
+#ID:120
+#LASTNAME:SMITH
+#---------
+#ID:120
+#LASTNAME:SMITH
+#---------
+#ID:130
+#LASTNAME:SMITH
+#---------
+#ID:130
+#LASTNAME:SMITH
+#---------
+#ID:140
+#LASTNAME:SMITH
+#---------
+#ID:140
+#LASTNAME:SMITH
+#---------
+#ID:150
+#LASTNAME:SMITH
+#---------
+#ID:150
+#LASTNAME:SMITH
+#---------
+#ID:160
+#LASTNAME:SMITH
+#---------
+#ID:160
+#LASTNAME:SMITH
+#---------
+#ID:170
+#LASTNAME:SMITH
+#---------
+#ID:170
+#LASTNAME:SMITH
+#---------
+#ID:180
+#LASTNAME:SMITH
+#---------
+#ID:180
+#LASTNAME:SMITH
+#---------
+#ID:190
+#LASTNAME:SMITH
+#---------
+#ID:190
+#LASTNAME:SMITH
+#---------
+#ID:200
+#LASTNAME:SMITH
+#---------
+#ID:200
+#LASTNAME:SMITH
+#---------
+#ID:210
+#LASTNAME:SMITH
+#---------
+#ID:210
+#LASTNAME:SMITH
+#---------
+#ID:220
+#LASTNAME:SMITH
+#---------
+#ID:220
+#LASTNAME:SMITH
+#---------
+#ID:230
+#LASTNAME:SMITH
+#---------
+#ID:230
+#LASTNAME:SMITH
+#---------
+#ID:240
+#LASTNAME:SMITH
+#---------
+#ID:240
+#LASTNAME:SMITH
+#---------
+#ID:250
+#LASTNAME:SMITH
+#---------
+#ID:250
+#LASTNAME:SMITH
+#---------
+#ID:260
+#LASTNAME:SMITH
+#---------
+#ID:260
+#LASTNAME:SMITH
+#---------
+#ID:270
+#LASTNAME:SMITH
+#---------
+#ID:270
+#LASTNAME:SMITH
+#---------
+#ID:280
+#LASTNAME:SMITH
+#---------
+#ID:280
+#LASTNAME:SMITH
+#---------
+#ID:290
+#LASTNAME:SMITH
+#---------
+#ID:290
+#LASTNAME:SMITH
+#---------
+#ID:300
+#LASTNAME:SMITH
+#---------
+#ID:300
+#LASTNAME:SMITH
+#---------
+#ID:310
+#LASTNAME:SMITH
+#---------
+#ID:310
+#LASTNAME:SMITH
+#---------
+#ID:320
+#LASTNAME:SMITH
+#---------
+#ID:320
+#LASTNAME:SMITH
+#---------
+#ID:330
+#LASTNAME:SMITH
+#---------
+#ID:330
+#LASTNAME:SMITH
+#---------
+#ID:340
+#LASTNAME:SMITH
+#---------
+#ID:340
+#LASTNAME:SMITH
+#---------
+#ID:350
+#LASTNAME:SMITH
+#---------
+#ID:350
+#LASTNAME:SMITH
+#---------
+#ID:10
+#LASTNAME:SPENSER
+#---------
+#ID:20
+#LASTNAME:SPENSER
+#---------
+#ID:30
+#LASTNAME:SPENSER
+#---------
+#ID:40
+#LASTNAME:SPENSER
+#---------
+#ID:50
+#LASTNAME:SPENSER
+#---------
+#ID:60
+#LASTNAME:SPENSER
+#---------
+#ID:70
+#LASTNAME:SPENSER
+#---------
+#ID:80
+#LASTNAME:SPENSER
+#---------
+#ID:90
+#LASTNAME:SPENSER
+#---------
+#ID:100
+#LASTNAME:SPENSER
+#---------
+#ID:110
+#LASTNAME:SPENSER
+#---------
+#ID:120
+#LASTNAME:SPENSER
+#---------
+#ID:130
+#LASTNAME:SPENSER
+#---------
+#ID:140
+#LASTNAME:SPENSER
+#---------
+#ID:150
+#LASTNAME:SPENSER
+#---------
+#ID:160
+#LASTNAME:SPENSER
+#---------
+#ID:170
+#LASTNAME:SPENSER
+#---------
+#ID:180
+#LASTNAME:SPENSER
+#---------
+#ID:190
+#LASTNAME:SPENSER
+#---------
+#ID:200
+#LASTNAME:SPENSER
+#---------
+#ID:210
+#LASTNAME:SPENSER
+#---------
+#ID:220
+#LASTNAME:SPENSER
+#---------
+#ID:230
+#LASTNAME:SPENSER
+#---------
+#ID:240
+#LASTNAME:SPENSER
+#---------
+#ID:250
+#LASTNAME:SPENSER
+#---------
+#ID:260
+#LASTNAME:SPENSER
+#---------
+#ID:270
+#LASTNAME:SPENSER
+#---------
+#ID:280
+#LASTNAME:SPENSER
+#---------
+#ID:290
+#LASTNAME:SPENSER
+#---------
+#ID:300
+#LASTNAME:SPENSER
+#---------
+#ID:310
+#LASTNAME:SPENSER
+#---------
+#ID:320
+#LASTNAME:SPENSER
+#---------
+#ID:330
+#LASTNAME:SPENSER
+#---------
+#ID:340
+#LASTNAME:SPENSER
+#---------
+#ID:350
+#LASTNAME:SPENSER
+#---------
+#ID:10
+#LASTNAME:STERN
+#---------
+#ID:20
+#LASTNAME:STERN
+#---------
+#ID:30
+#LASTNAME:STERN
+#---------
+#ID:40
+#LASTNAME:STERN
+#---------
+#ID:50
+#LASTNAME:STERN
+#---------
+#ID:60
+#LASTNAME:STERN
+#---------
+#ID:70
+#LASTNAME:STERN
+#---------
+#ID:80
+#LASTNAME:STERN
+#---------
+#ID:90
+#LASTNAME:STERN
+#---------
+#ID:100
+#LASTNAME:STERN
+#---------
+#ID:110
+#LASTNAME:STERN
+#---------
+#ID:120
+#LASTNAME:STERN
+#---------
+#ID:130
+#LASTNAME:STERN
+#---------
+#ID:140
+#LASTNAME:STERN
+#---------
+#ID:150
+#LASTNAME:STERN
+#---------
+#ID:160
+#LASTNAME:STERN
+#---------
+#ID:170
+#LASTNAME:STERN
+#---------
+#ID:180
+#LASTNAME:STERN
+#---------
+#ID:190
+#LASTNAME:STERN
+#---------
+#ID:200
+#LASTNAME:STERN
+#---------
+#ID:210
+#LASTNAME:STERN
+#---------
+#ID:220
+#LASTNAME:STERN
+#---------
+#ID:230
+#LASTNAME:STERN
+#---------
+#ID:240
+#LASTNAME:STERN
+#---------
+#ID:250
+#LASTNAME:STERN
+#---------
+#ID:260
+#LASTNAME:STERN
+#---------
+#ID:270
+#LASTNAME:STERN
+#---------
+#ID:280
+#LASTNAME:STERN
+#---------
+#ID:290
+#LASTNAME:STERN
+#---------
+#ID:300
+#LASTNAME:STERN
+#---------
+#ID:310
+#LASTNAME:STERN
+#---------
+#ID:320
+#LASTNAME:STERN
+#---------
+#ID:330
+#LASTNAME:STERN
+#---------
+#ID:340
+#LASTNAME:STERN
+#---------
+#ID:350
+#LASTNAME:STERN
+#---------
+#ID:10
+#LASTNAME:THOMPSON
+#---------
+#ID:20
+#LASTNAME:THOMPSON
+#---------
+#ID:30
+#LASTNAME:THOMPSON
+#---------
+#ID:40
+#LASTNAME:THOMPSON
+#---------
+#ID:50
+#LASTNAME:THOMPSON
+#---------
+#ID:60
+#LASTNAME:THOMPSON
+#---------
+#ID:70
+#LASTNAME:THOMPSON
+#---------
+#ID:80
+#LASTNAME:THOMPSON
+#---------
+#ID:90
+#LASTNAME:THOMPSON
+#---------
+#ID:100
+#LASTNAME:THOMPSON
+#---------
+#ID:110
+#LASTNAME:THOMPSON
+#---------
+#ID:120
+#LASTNAME:THOMPSON
+#---------
+#ID:130
+#LASTNAME:THOMPSON
+#---------
+#ID:140
+#LASTNAME:THOMPSON
+#---------
+#ID:150
+#LASTNAME:THOMPSON
+#---------
+#ID:160
+#LASTNAME:THOMPSON
+#---------
+#ID:170
+#LASTNAME:THOMPSON
+#---------
+#ID:180
+#LASTNAME:THOMPSON
+#---------
+#ID:190
+#LASTNAME:THOMPSON
+#---------
+#ID:200
+#LASTNAME:THOMPSON
+#---------
+#ID:210
+#LASTNAME:THOMPSON
+#---------
+#ID:220
+#LASTNAME:THOMPSON
+#---------
+#ID:230
+#LASTNAME:THOMPSON
+#---------
+#ID:240
+#LASTNAME:THOMPSON
+#---------
+#ID:250
+#LASTNAME:THOMPSON
+#---------
+#ID:260
+#LASTNAME:THOMPSON
+#---------
+#ID:270
+#LASTNAME:THOMPSON
+#---------
+#ID:280
+#LASTNAME:THOMPSON
+#---------
+#ID:290
+#LASTNAME:THOMPSON
+#---------
+#ID:300
+#LASTNAME:THOMPSON
+#---------
+#ID:310
+#LASTNAME:THOMPSON
+#---------
+#ID:320
+#LASTNAME:THOMPSON
+#---------
+#ID:330
+#LASTNAME:THOMPSON
+#---------
+#ID:340
+#LASTNAME:THOMPSON
+#---------
+#ID:350
+#LASTNAME:THOMPSON
+#---------
+#ID:10
+#LASTNAME:WALKER
+#---------
+#ID:20
+#LASTNAME:WALKER
+#---------
+#ID:30
+#LASTNAME:WALKER
+#---------
+#ID:40
+#LASTNAME:WALKER
+#---------
+#ID:50
+#LASTNAME:WALKER
+#---------
+#ID:60
+#LASTNAME:WALKER
+#---------
+#ID:70
+#LASTNAME:WALKER
+#---------
+#ID:80
+#LASTNAME:WALKER
+#---------
+#ID:90
+#LASTNAME:WALKER
+#---------
+#ID:100
+#LASTNAME:WALKER
+#---------
+#ID:110
+#LASTNAME:WALKER
+#---------
+#ID:120
+#LASTNAME:WALKER
+#---------
+#ID:130
+#LASTNAME:WALKER
+#---------
+#ID:140
+#LASTNAME:WALKER
+#---------
+#ID:150
+#LASTNAME:WALKER
+#---------
+#ID:160
+#LASTNAME:WALKER
+#---------
+#ID:170
+#LASTNAME:WALKER
+#---------
+#ID:180
+#LASTNAME:WALKER
+#---------
+#ID:190
+#LASTNAME:WALKER
+#---------
+#ID:200
+#LASTNAME:WALKER
+#---------
+#ID:210
+#LASTNAME:WALKER
+#---------
+#ID:220
+#LASTNAME:WALKER
+#---------
+#ID:230
+#LASTNAME:WALKER
+#---------
+#ID:240
+#LASTNAME:WALKER
+#---------
+#ID:250
+#LASTNAME:WALKER
+#---------
+#ID:260
+#LASTNAME:WALKER
+#---------
+#ID:270
+#LASTNAME:WALKER
+#---------
+#ID:280
+#LASTNAME:WALKER
+#---------
+#ID:290
+#LASTNAME:WALKER
+#---------
+#ID:300
+#LASTNAME:WALKER
+#---------
+#ID:310
+#LASTNAME:WALKER
+#---------
+#ID:320
+#LASTNAME:WALKER
+#---------
+#ID:330
+#LASTNAME:WALKER
+#---------
+#ID:340
+#LASTNAME:WALKER
+#---------
+#ID:350
+#LASTNAME:WALKER
+#---------
+#ID:10
+#LASTNAME:YOSHIMURA
+#---------
+#ID:20
+#LASTNAME:YOSHIMURA
+#---------
+#ID:30
+#LASTNAME:YOSHIMURA
+#---------
+#ID:40
+#LASTNAME:YOSHIMURA
+#---------
+#ID:50
+#LASTNAME:YOSHIMURA
+#---------
+#ID:60
+#LASTNAME:YOSHIMURA
+#---------
+#ID:70
+#LASTNAME:YOSHIMURA
+#---------
+#ID:80
+#LASTNAME:YOSHIMURA
+#---------
+#ID:90
+#LASTNAME:YOSHIMURA
+#---------
+#ID:100
+#LASTNAME:YOSHIMURA
+#---------
+#ID:110
+#LASTNAME:YOSHIMURA
+#---------
+#ID:120
+#LASTNAME:YOSHIMURA
+#---------
+#ID:130
+#LASTNAME:YOSHIMURA
+#---------
+#ID:140
+#LASTNAME:YOSHIMURA
+#---------
+#ID:150
+#LASTNAME:YOSHIMURA
+#---------
+#ID:160
+#LASTNAME:YOSHIMURA
+#---------
+#ID:170
+#LASTNAME:YOSHIMURA
+#---------
+#ID:180
+#LASTNAME:YOSHIMURA
+#---------
+#ID:190
+#LASTNAME:YOSHIMURA
+#---------
+#ID:200
+#LASTNAME:YOSHIMURA
+#---------
+#ID:210
+#LASTNAME:YOSHIMURA
+#---------
+#ID:220
+#LASTNAME:YOSHIMURA
+#---------
+#ID:230
+#LASTNAME:YOSHIMURA
+#---------
+#ID:240
+#LASTNAME:YOSHIMURA
+#---------
+#ID:250
+#LASTNAME:YOSHIMURA
+#---------
+#ID:260
+#LASTNAME:YOSHIMURA
+#---------
+#ID:270
+#LASTNAME:YOSHIMURA
+#---------
+#ID:280
+#LASTNAME:YOSHIMURA
+#---------
+#ID:290
+#LASTNAME:YOSHIMURA
+#---------
+#ID:300
+#LASTNAME:YOSHIMURA
+#---------
+#ID:310
+#LASTNAME:YOSHIMURA
+#---------
+#ID:320
+#LASTNAME:YOSHIMURA
+#---------
+#ID:330
+#LASTNAME:YOSHIMURA
+#---------
+#ID:340
+#LASTNAME:YOSHIMURA
+#---------
+#ID:350
+#LASTNAME:YOSHIMURA
+#---------
+#__SYSTEMI_EXPECTED__
+#
+#ID:10
+#LASTNAME:ADAMSON
+#---------
+#ID:20
+#LASTNAME:ADAMSON
+#---------
+#ID:30
+#LASTNAME:ADAMSON
+#---------
+#ID:40
+#LASTNAME:ADAMSON
+#---------
+#ID:50
+#LASTNAME:ADAMSON
+#---------
+#ID:60
+#LASTNAME:ADAMSON
+#---------
+#ID:70
+#LASTNAME:ADAMSON
+#---------
+#ID:80
+#LASTNAME:ADAMSON
+#---------
+#ID:90
+#LASTNAME:ADAMSON
+#---------
+#ID:100
+#LASTNAME:ADAMSON
+#---------
+#ID:110
+#LASTNAME:ADAMSON
+#---------
+#ID:120
+#LASTNAME:ADAMSON
+#---------
+#ID:130
+#LASTNAME:ADAMSON
+#---------
+#ID:140
+#LASTNAME:ADAMSON
+#---------
+#ID:150
+#LASTNAME:ADAMSON
+#---------
+#ID:160
+#LASTNAME:ADAMSON
+#---------
+#ID:170
+#LASTNAME:ADAMSON
+#---------
+#ID:180
+#LASTNAME:ADAMSON
+#---------
+#ID:190
+#LASTNAME:ADAMSON
+#---------
+#ID:200
+#LASTNAME:ADAMSON
+#---------
+#ID:210
+#LASTNAME:ADAMSON
+#---------
+#ID:220
+#LASTNAME:ADAMSON
+#---------
+#ID:230
+#LASTNAME:ADAMSON
+#---------
+#ID:240
+#LASTNAME:ADAMSON
+#---------
+#ID:250
+#LASTNAME:ADAMSON
+#---------
+#ID:260
+#LASTNAME:ADAMSON
+#---------
+#ID:270
+#LASTNAME:ADAMSON
+#---------
+#ID:280
+#LASTNAME:ADAMSON
+#---------
+#ID:290
+#LASTNAME:ADAMSON
+#---------
+#ID:300
+#LASTNAME:ADAMSON
+#---------
+#ID:310
+#LASTNAME:ADAMSON
+#---------
+#ID:320
+#LASTNAME:ADAMSON
+#---------
+#ID:330
+#LASTNAME:ADAMSON
+#---------
+#ID:340
+#LASTNAME:ADAMSON
+#---------
+#ID:350
+#LASTNAME:ADAMSON
+#---------
+#ID:10
+#LASTNAME:BROWN
+#---------
+#ID:20
+#LASTNAME:BROWN
+#---------
+#ID:30
+#LASTNAME:BROWN
+#---------
+#ID:40
+#LASTNAME:BROWN
+#---------
+#ID:50
+#LASTNAME:BROWN
+#---------
+#ID:60
+#LASTNAME:BROWN
+#---------
+#ID:70
+#LASTNAME:BROWN
+#---------
+#ID:80
+#LASTNAME:BROWN
+#---------
+#ID:90
+#LASTNAME:BROWN
+#---------
+#ID:100
+#LASTNAME:BROWN
+#---------
+#ID:110
+#LASTNAME:BROWN
+#---------
+#ID:120
+#LASTNAME:BROWN
+#---------
+#ID:130
+#LASTNAME:BROWN
+#---------
+#ID:140
+#LASTNAME:BROWN
+#---------
+#ID:150
+#LASTNAME:BROWN
+#---------
+#ID:160
+#LASTNAME:BROWN
+#---------
+#ID:170
+#LASTNAME:BROWN
+#---------
+#ID:180
+#LASTNAME:BROWN
+#---------
+#ID:190
+#LASTNAME:BROWN
+#---------
+#ID:200
+#LASTNAME:BROWN
+#---------
+#ID:210
+#LASTNAME:BROWN
+#---------
+#ID:220
+#LASTNAME:BROWN
+#---------
+#ID:230
+#LASTNAME:BROWN
+#---------
+#ID:240
+#LASTNAME:BROWN
+#---------
+#ID:250
+#LASTNAME:BROWN
+#---------
+#ID:260
+#LASTNAME:BROWN
+#---------
+#ID:270
+#LASTNAME:BROWN
+#---------
+#ID:280
+#LASTNAME:BROWN
+#---------
+#ID:290
+#LASTNAME:BROWN
+#---------
+#ID:300
+#LASTNAME:BROWN
+#---------
+#ID:310
+#LASTNAME:BROWN
+#---------
+#ID:320
+#LASTNAME:BROWN
+#---------
+#ID:330
+#LASTNAME:BROWN
+#---------
+#ID:340
+#LASTNAME:BROWN
+#---------
+#ID:350
+#LASTNAME:BROWN
+#---------
+#ID:10
+#LASTNAME:GEYER
+#---------
+#ID:20
+#LASTNAME:GEYER
+#---------
+#ID:30
+#LASTNAME:GEYER
+#---------
+#ID:40
+#LASTNAME:GEYER
+#---------
+#ID:50
+#LASTNAME:GEYER
+#---------
+#ID:60
+#LASTNAME:GEYER
+#---------
+#ID:70
+#LASTNAME:GEYER
+#---------
+#ID:80
+#LASTNAME:GEYER
+#---------
+#ID:90
+#LASTNAME:GEYER
+#---------
+#ID:100
+#LASTNAME:GEYER
+#---------
+#ID:110
+#LASTNAME:GEYER
+#---------
+#ID:120
+#LASTNAME:GEYER
+#---------
+#ID:130
+#LASTNAME:GEYER
+#---------
+#ID:140
+#LASTNAME:GEYER
+#---------
+#ID:150
+#LASTNAME:GEYER
+#---------
+#ID:160
+#LASTNAME:GEYER
+#---------
+#ID:170
+#LASTNAME:GEYER
+#---------
+#ID:180
+#LASTNAME:GEYER
+#---------
+#ID:190
+#LASTNAME:GEYER
+#---------
+#ID:200
+#LASTNAME:GEYER
+#---------
+#ID:210
+#LASTNAME:GEYER
+#---------
+#ID:220
+#LASTNAME:GEYER
+#---------
+#ID:230
+#LASTNAME:GEYER
+#---------
+#ID:240
+#LASTNAME:GEYER
+#---------
+#ID:250
+#LASTNAME:GEYER
+#---------
+#ID:260
+#LASTNAME:GEYER
+#---------
+#ID:270
+#LASTNAME:GEYER
+#---------
+#ID:280
+#LASTNAME:GEYER
+#---------
+#ID:290
+#LASTNAME:GEYER
+#---------
+#ID:300
+#LASTNAME:GEYER
+#---------
+#ID:310
+#LASTNAME:GEYER
+#---------
+#ID:320
+#LASTNAME:GEYER
+#---------
+#ID:330
+#LASTNAME:GEYER
+#---------
+#ID:340
+#LASTNAME:GEYER
+#---------
+#ID:350
+#LASTNAME:GEYER
+#---------
+#ID:10
+#LASTNAME:GOUNOT
+#---------
+#ID:20
+#LASTNAME:GOUNOT
+#---------
+#ID:30
+#LASTNAME:GOUNOT
+#---------
+#ID:40
+#LASTNAME:GOUNOT
+#---------
+#ID:50
+#LASTNAME:GOUNOT
+#---------
+#ID:60
+#LASTNAME:GOUNOT
+#---------
+#ID:70
+#LASTNAME:GOUNOT
+#---------
+#ID:80
+#LASTNAME:GOUNOT
+#---------
+#ID:90
+#LASTNAME:GOUNOT
+#---------
+#ID:100
+#LASTNAME:GOUNOT
+#---------
+#ID:110
+#LASTNAME:GOUNOT
+#---------
+#ID:120
+#LASTNAME:GOUNOT
+#---------
+#ID:130
+#LASTNAME:GOUNOT
+#---------
+#ID:140
+#LASTNAME:GOUNOT
+#---------
+#ID:150
+#LASTNAME:GOUNOT
+#---------
+#ID:160
+#LASTNAME:GOUNOT
+#---------
+#ID:170
+#LASTNAME:GOUNOT
+#---------
+#ID:180
+#LASTNAME:GOUNOT
+#---------
+#ID:190
+#LASTNAME:GOUNOT
+#---------
+#ID:200
+#LASTNAME:GOUNOT
+#---------
+#ID:210
+#LASTNAME:GOUNOT
+#---------
+#ID:220
+#LASTNAME:GOUNOT
+#---------
+#ID:230
+#LASTNAME:GOUNOT
+#---------
+#ID:240
+#LASTNAME:GOUNOT
+#---------
+#ID:250
+#LASTNAME:GOUNOT
+#---------
+#ID:260
+#LASTNAME:GOUNOT
+#---------
+#ID:270
+#LASTNAME:GOUNOT
+#---------
+#ID:280
+#LASTNAME:GOUNOT
+#---------
+#ID:290
+#LASTNAME:GOUNOT
+#---------
+#ID:300
+#LASTNAME:GOUNOT
+#---------
+#ID:310
+#LASTNAME:GOUNOT
+#---------
+#ID:320
+#LASTNAME:GOUNOT
+#---------
+#ID:330
+#LASTNAME:GOUNOT
+#---------
+#ID:340
+#LASTNAME:GOUNOT
+#---------
+#ID:350
+#LASTNAME:GOUNOT
+#---------
+#ID:10
+#LASTNAME:HAAS
+#---------
+#ID:20
+#LASTNAME:HAAS
+#---------
+#ID:30
+#LASTNAME:HAAS
+#---------
+#ID:40
+#LASTNAME:HAAS
+#---------
+#ID:50
+#LASTNAME:HAAS
+#---------
+#ID:60
+#LASTNAME:HAAS
+#---------
+#ID:70
+#LASTNAME:HAAS
+#---------
+#ID:80
+#LASTNAME:HAAS
+#---------
+#ID:90
+#LASTNAME:HAAS
+#---------
+#ID:100
+#LASTNAME:HAAS
+#---------
+#ID:110
+#LASTNAME:HAAS
+#---------
+#ID:120
+#LASTNAME:HAAS
+#---------
+#ID:130
+#LASTNAME:HAAS
+#---------
+#ID:140
+#LASTNAME:HAAS
+#---------
+#ID:150
+#LASTNAME:HAAS
+#---------
+#ID:160
+#LASTNAME:HAAS
+#---------
+#ID:170
+#LASTNAME:HAAS
+#---------
+#ID:180
+#LASTNAME:HAAS
+#---------
+#ID:190
+#LASTNAME:HAAS
+#---------
+#ID:200
+#LASTNAME:HAAS
+#---------
+#ID:210
+#LASTNAME:HAAS
+#---------
+#ID:220
+#LASTNAME:HAAS
+#---------
+#ID:230
+#LASTNAME:HAAS
+#---------
+#ID:240
+#LASTNAME:HAAS
+#---------
+#ID:250
+#LASTNAME:HAAS
+#---------
+#ID:260
+#LASTNAME:HAAS
+#---------
+#ID:270
+#LASTNAME:HAAS
+#---------
+#ID:280
+#LASTNAME:HAAS
+#---------
+#ID:290
+#LASTNAME:HAAS
+#---------
+#ID:300
+#LASTNAME:HAAS
+#---------
+#ID:310
+#LASTNAME:HAAS
+#---------
+#ID:320
+#LASTNAME:HAAS
+#---------
+#ID:330
+#LASTNAME:HAAS
+#---------
+#ID:340
+#LASTNAME:HAAS
+#---------
+#ID:350
+#LASTNAME:HAAS
+#---------
+#ID:10
+#LASTNAME:HENDERSON
+#---------
+#ID:20
+#LASTNAME:HENDERSON
+#---------
+#ID:30
+#LASTNAME:HENDERSON
+#---------
+#ID:40
+#LASTNAME:HENDERSON
+#---------
+#ID:50
+#LASTNAME:HENDERSON
+#---------
+#ID:60
+#LASTNAME:HENDERSON
+#---------
+#ID:70
+#LASTNAME:HENDERSON
+#---------
+#ID:80
+#LASTNAME:HENDERSON
+#---------
+#ID:90
+#LASTNAME:HENDERSON
+#---------
+#ID:100
+#LASTNAME:HENDERSON
+#---------
+#ID:110
+#LASTNAME:HENDERSON
+#---------
+#ID:120
+#LASTNAME:HENDERSON
+#---------
+#ID:130
+#LASTNAME:HENDERSON
+#---------
+#ID:140
+#LASTNAME:HENDERSON
+#---------
+#ID:150
+#LASTNAME:HENDERSON
+#---------
+#ID:160
+#LASTNAME:HENDERSON
+#---------
+#ID:170
+#LASTNAME:HENDERSON
+#---------
+#ID:180
+#LASTNAME:HENDERSON
+#---------
+#ID:190
+#LASTNAME:HENDERSON
+#---------
+#ID:200
+#LASTNAME:HENDERSON
+#---------
+#ID:210
+#LASTNAME:HENDERSON
+#---------
+#ID:220
+#LASTNAME:HENDERSON
+#---------
+#ID:230
+#LASTNAME:HENDERSON
+#---------
+#ID:240
+#LASTNAME:HENDERSON
+#---------
+#ID:250
+#LASTNAME:HENDERSON
+#---------
+#ID:260
+#LASTNAME:HENDERSON
+#---------
+#ID:270
+#LASTNAME:HENDERSON
+#---------
+#ID:280
+#LASTNAME:HENDERSON
+#---------
+#ID:290
+#LASTNAME:HENDERSON
+#---------
+#ID:300
+#LASTNAME:HENDERSON
+#---------
+#ID:310
+#LASTNAME:HENDERSON
+#---------
+#ID:320
+#LASTNAME:HENDERSON
+#---------
+#ID:330
+#LASTNAME:HENDERSON
+#---------
+#ID:340
+#LASTNAME:HENDERSON
+#---------
+#ID:350
+#LASTNAME:HENDERSON
+#---------
+#ID:10
+#LASTNAME:JEFFERSON
+#---------
+#ID:20
+#LASTNAME:JEFFERSON
+#---------
+#ID:30
+#LASTNAME:JEFFERSON
+#---------
+#ID:40
+#LASTNAME:JEFFERSON
+#---------
+#ID:50
+#LASTNAME:JEFFERSON
+#---------
+#ID:60
+#LASTNAME:JEFFERSON
+#---------
+#ID:70
+#LASTNAME:JEFFERSON
+#---------
+#ID:80
+#LASTNAME:JEFFERSON
+#---------
+#ID:90
+#LASTNAME:JEFFERSON
+#---------
+#ID:100
+#LASTNAME:JEFFERSON
+#---------
+#ID:110
+#LASTNAME:JEFFERSON
+#---------
+#ID:120
+#LASTNAME:JEFFERSON
+#---------
+#ID:130
+#LASTNAME:JEFFERSON
+#---------
+#ID:140
+#LASTNAME:JEFFERSON
+#---------
+#ID:150
+#LASTNAME:JEFFERSON
+#---------
+#ID:160
+#LASTNAME:JEFFERSON
+#---------
+#ID:170
+#LASTNAME:JEFFERSON
+#---------
+#ID:180
+#LASTNAME:JEFFERSON
+#---------
+#ID:190
+#LASTNAME:JEFFERSON
+#---------
+#ID:200
+#LASTNAME:JEFFERSON
+#---------
+#ID:210
+#LASTNAME:JEFFERSON
+#---------
+#ID:220
+#LASTNAME:JEFFERSON
+#---------
+#ID:230
+#LASTNAME:JEFFERSON
+#---------
+#ID:240
+#LASTNAME:JEFFERSON
+#---------
+#ID:250
+#LASTNAME:JEFFERSON
+#---------
+#ID:260
+#LASTNAME:JEFFERSON
+#---------
+#ID:270
+#LASTNAME:JEFFERSON
+#---------
+#ID:280
+#LASTNAME:JEFFERSON
+#---------
+#ID:290
+#LASTNAME:JEFFERSON
+#---------
+#ID:300
+#LASTNAME:JEFFERSON
+#---------
+#ID:310
+#LASTNAME:JEFFERSON
+#---------
+#ID:320
+#LASTNAME:JEFFERSON
+#---------
+#ID:330
+#LASTNAME:JEFFERSON
+#---------
+#ID:340
+#LASTNAME:JEFFERSON
+#---------
+#ID:350
+#LASTNAME:JEFFERSON
+#---------
+#ID:10
+#LASTNAME:JOHNSON
+#---------
+#ID:20
+#LASTNAME:JOHNSON
+#---------
+#ID:30
+#LASTNAME:JOHNSON
+#---------
+#ID:40
+#LASTNAME:JOHNSON
+#---------
+#ID:50
+#LASTNAME:JOHNSON
+#---------
+#ID:60
+#LASTNAME:JOHNSON
+#---------
+#ID:70
+#LASTNAME:JOHNSON
+#---------
+#ID:80
+#LASTNAME:JOHNSON
+#---------
+#ID:90
+#LASTNAME:JOHNSON
+#---------
+#ID:100
+#LASTNAME:JOHNSON
+#---------
+#ID:110
+#LASTNAME:JOHNSON
+#---------
+#ID:120
+#LASTNAME:JOHNSON
+#---------
+#ID:130
+#LASTNAME:JOHNSON
+#---------
+#ID:140
+#LASTNAME:JOHNSON
+#---------
+#ID:150
+#LASTNAME:JOHNSON
+#---------
+#ID:160
+#LASTNAME:JOHNSON
+#---------
+#ID:170
+#LASTNAME:JOHNSON
+#---------
+#ID:180
+#LASTNAME:JOHNSON
+#---------
+#ID:190
+#LASTNAME:JOHNSON
+#---------
+#ID:200
+#LASTNAME:JOHNSON
+#---------
+#ID:210
+#LASTNAME:JOHNSON
+#---------
+#ID:220
+#LASTNAME:JOHNSON
+#---------
+#ID:230
+#LASTNAME:JOHNSON
+#---------
+#ID:240
+#LASTNAME:JOHNSON
+#---------
+#ID:250
+#LASTNAME:JOHNSON
+#---------
+#ID:260
+#LASTNAME:JOHNSON
+#---------
+#ID:270
+#LASTNAME:JOHNSON
+#---------
+#ID:280
+#LASTNAME:JOHNSON
+#---------
+#ID:290
+#LASTNAME:JOHNSON
+#---------
+#ID:300
+#LASTNAME:JOHNSON
+#---------
+#ID:310
+#LASTNAME:JOHNSON
+#---------
+#ID:320
+#LASTNAME:JOHNSON
+#---------
+#ID:330
+#LASTNAME:JOHNSON
+#---------
+#ID:340
+#LASTNAME:JOHNSON
+#---------
+#ID:350
+#LASTNAME:JOHNSON
+#---------
+#ID:10
+#LASTNAME:JONES
+#---------
+#ID:20
+#LASTNAME:JONES
+#---------
+#ID:30
+#LASTNAME:JONES
+#---------
+#ID:40
+#LASTNAME:JONES
+#---------
+#ID:50
+#LASTNAME:JONES
+#---------
+#ID:60
+#LASTNAME:JONES
+#---------
+#ID:70
+#LASTNAME:JONES
+#---------
+#ID:80
+#LASTNAME:JONES
+#---------
+#ID:90
+#LASTNAME:JONES
+#---------
+#ID:100
+#LASTNAME:JONES
+#---------
+#ID:110
+#LASTNAME:JONES
+#---------
+#ID:120
+#LASTNAME:JONES
+#---------
+#ID:130
+#LASTNAME:JONES
+#---------
+#ID:140
+#LASTNAME:JONES
+#---------
+#ID:150
+#LASTNAME:JONES
+#---------
+#ID:160
+#LASTNAME:JONES
+#---------
+#ID:170
+#LASTNAME:JONES
+#---------
+#ID:180
+#LASTNAME:JONES
+#---------
+#ID:190
+#LASTNAME:JONES
+#---------
+#ID:200
+#LASTNAME:JONES
+#---------
+#ID:210
+#LASTNAME:JONES
+#---------
+#ID:220
+#LASTNAME:JONES
+#---------
+#ID:230
+#LASTNAME:JONES
+#---------
+#ID:240
+#LASTNAME:JONES
+#---------
+#ID:250
+#LASTNAME:JONES
+#---------
+#ID:260
+#LASTNAME:JONES
+#---------
+#ID:270
+#LASTNAME:JONES
+#---------
+#ID:280
+#LASTNAME:JONES
+#---------
+#ID:290
+#LASTNAME:JONES
+#---------
+#ID:300
+#LASTNAME:JONES
+#---------
+#ID:310
+#LASTNAME:JONES
+#---------
+#ID:320
+#LASTNAME:JONES
+#---------
+#ID:330
+#LASTNAME:JONES
+#---------
+#ID:340
+#LASTNAME:JONES
+#---------
+#ID:350
+#LASTNAME:JONES
+#---------
+#ID:10
+#LASTNAME:KWAN
+#---------
+#ID:20
+#LASTNAME:KWAN
+#---------
+#ID:30
+#LASTNAME:KWAN
+#---------
+#ID:40
+#LASTNAME:KWAN
+#---------
+#ID:50
+#LASTNAME:KWAN
+#---------
+#ID:60
+#LASTNAME:KWAN
+#---------
+#ID:70
+#LASTNAME:KWAN
+#---------
+#ID:80
+#LASTNAME:KWAN
+#---------
+#ID:90
+#LASTNAME:KWAN
+#---------
+#ID:100
+#LASTNAME:KWAN
+#---------
+#ID:110
+#LASTNAME:KWAN
+#---------
+#ID:120
+#LASTNAME:KWAN
+#---------
+#ID:130
+#LASTNAME:KWAN
+#---------
+#ID:140
+#LASTNAME:KWAN
+#---------
+#ID:150
+#LASTNAME:KWAN
+#---------
+#ID:160
+#LASTNAME:KWAN
+#---------
+#ID:170
+#LASTNAME:KWAN
+#---------
+#ID:180
+#LASTNAME:KWAN
+#---------
+#ID:190
+#LASTNAME:KWAN
+#---------
+#ID:200
+#LASTNAME:KWAN
+#---------
+#ID:210
+#LASTNAME:KWAN
+#---------
+#ID:220
+#LASTNAME:KWAN
+#---------
+#ID:230
+#LASTNAME:KWAN
+#---------
+#ID:240
+#LASTNAME:KWAN
+#---------
+#ID:250
+#LASTNAME:KWAN
+#---------
+#ID:260
+#LASTNAME:KWAN
+#---------
+#ID:270
+#LASTNAME:KWAN
+#---------
+#ID:280
+#LASTNAME:KWAN
+#---------
+#ID:290
+#LASTNAME:KWAN
+#---------
+#ID:300
+#LASTNAME:KWAN
+#---------
+#ID:310
+#LASTNAME:KWAN
+#---------
+#ID:320
+#LASTNAME:KWAN
+#---------
+#ID:330
+#LASTNAME:KWAN
+#---------
+#ID:340
+#LASTNAME:KWAN
+#---------
+#ID:350
+#LASTNAME:KWAN
+#---------
+#ID:10
+#LASTNAME:LEE
+#---------
+#ID:20
+#LASTNAME:LEE
+#---------
+#ID:30
+#LASTNAME:LEE
+#---------
+#ID:40
+#LASTNAME:LEE
+#---------
+#ID:50
+#LASTNAME:LEE
+#---------
+#ID:60
+#LASTNAME:LEE
+#---------
+#ID:70
+#LASTNAME:LEE
+#---------
+#ID:80
+#LASTNAME:LEE
+#---------
+#ID:90
+#LASTNAME:LEE
+#---------
+#ID:100
+#LASTNAME:LEE
+#---------
+#ID:110
+#LASTNAME:LEE
+#---------
+#ID:120
+#LASTNAME:LEE
+#---------
+#ID:130
+#LASTNAME:LEE
+#---------
+#ID:140
+#LASTNAME:LEE
+#---------
+#ID:150
+#LASTNAME:LEE
+#---------
+#ID:160
+#LASTNAME:LEE
+#---------
+#ID:170
+#LASTNAME:LEE
+#---------
+#ID:180
+#LASTNAME:LEE
+#---------
+#ID:190
+#LASTNAME:LEE
+#---------
+#ID:200
+#LASTNAME:LEE
+#---------
+#ID:210
+#LASTNAME:LEE
+#---------
+#ID:220
+#LASTNAME:LEE
+#---------
+#ID:230
+#LASTNAME:LEE
+#---------
+#ID:240
+#LASTNAME:LEE
+#---------
+#ID:250
+#LASTNAME:LEE
+#---------
+#ID:260
+#LASTNAME:LEE
+#---------
+#ID:270
+#LASTNAME:LEE
+#---------
+#ID:280
+#LASTNAME:LEE
+#---------
+#ID:290
+#LASTNAME:LEE
+#---------
+#ID:300
+#LASTNAME:LEE
+#---------
+#ID:310
+#LASTNAME:LEE
+#---------
+#ID:320
+#LASTNAME:LEE
+#---------
+#ID:330
+#LASTNAME:LEE
+#---------
+#ID:340
+#LASTNAME:LEE
+#---------
+#ID:350
+#LASTNAME:LEE
+#---------
+#ID:10
+#LASTNAME:LUCCHESSI
+#---------
+#ID:20
+#LASTNAME:LUCCHESSI
+#---------
+#ID:30
+#LASTNAME:LUCCHESSI
+#---------
+#ID:40
+#LASTNAME:LUCCHESSI
+#---------
+#ID:50
+#LASTNAME:LUCCHESSI
+#---------
+#ID:60
+#LASTNAME:LUCCHESSI
+#---------
+#ID:70
+#LASTNAME:LUCCHESSI
+#---------
+#ID:80
+#LASTNAME:LUCCHESSI
+#---------
+#ID:90
+#LASTNAME:LUCCHESSI
+#---------
+#ID:100
+#LASTNAME:LUCCHESSI
+#---------
+#ID:110
+#LASTNAME:LUCCHESSI
+#---------
+#ID:120
+#LASTNAME:LUCCHESSI
+#---------
+#ID:130
+#LASTNAME:LUCCHESSI
+#---------
+#ID:140
+#LASTNAME:LUCCHESSI
+#---------
+#ID:150
+#LASTNAME:LUCCHESSI
+#---------
+#ID:160
+#LASTNAME:LUCCHESSI
+#---------
+#ID:170
+#LASTNAME:LUCCHESSI
+#---------
+#ID:180
+#LASTNAME:LUCCHESSI
+#---------
+#ID:190
+#LASTNAME:LUCCHESSI
+#---------
+#ID:200
+#LASTNAME:LUCCHESSI
+#---------
+#ID:210
+#LASTNAME:LUCCHESSI
+#---------
+#ID:220
+#LASTNAME:LUCCHESSI
+#---------
+#ID:230
+#LASTNAME:LUCCHESSI
+#---------
+#ID:240
+#LASTNAME:LUCCHESSI
+#---------
+#ID:250
+#LASTNAME:LUCCHESSI
+#---------
+#ID:260
+#LASTNAME:LUCCHESSI
+#---------
+#ID:270
+#LASTNAME:LUCCHESSI
+#---------
+#ID:280
+#LASTNAME:LUCCHESSI
+#---------
+#ID:290
+#LASTNAME:LUCCHESSI
+#---------
+#ID:300
+#LASTNAME:LUCCHESSI
+#---------
+#ID:310
+#LASTNAME:LUCCHESSI
+#---------
+#ID:320
+#LASTNAME:LUCCHESSI
+#---------
+#ID:330
+#LASTNAME:LUCCHESSI
+#---------
+#ID:340
+#LASTNAME:LUCCHESSI
+#---------
+#ID:350
+#LASTNAME:LUCCHESSI
+#---------
+#ID:10
+#LASTNAME:LUTZ
+#---------
+#ID:20
+#LASTNAME:LUTZ
+#---------
+#ID:30
+#LASTNAME:LUTZ
+#---------
+#ID:40
+#LASTNAME:LUTZ
+#---------
+#ID:50
+#LASTNAME:LUTZ
+#---------
+#ID:60
+#LASTNAME:LUTZ
+#---------
+#ID:70
+#LASTNAME:LUTZ
+#---------
+#ID:80
+#LASTNAME:LUTZ
+#---------
+#ID:90
+#LASTNAME:LUTZ
+#---------
+#ID:100
+#LASTNAME:LUTZ
+#---------
+#ID:110
+#LASTNAME:LUTZ
+#---------
+#ID:120
+#LASTNAME:LUTZ
+#---------
+#ID:130
+#LASTNAME:LUTZ
+#---------
+#ID:140
+#LASTNAME:LUTZ
+#---------
+#ID:150
+#LASTNAME:LUTZ
+#---------
+#ID:160
+#LASTNAME:LUTZ
+#---------
+#ID:170
+#LASTNAME:LUTZ
+#---------
+#ID:180
+#LASTNAME:LUTZ
+#---------
+#ID:190
+#LASTNAME:LUTZ
+#---------
+#ID:200
+#LASTNAME:LUTZ
+#---------
+#ID:210
+#LASTNAME:LUTZ
+#---------
+#ID:220
+#LASTNAME:LUTZ
+#---------
+#ID:230
+#LASTNAME:LUTZ
+#---------
+#ID:240
+#LASTNAME:LUTZ
+#---------
+#ID:250
+#LASTNAME:LUTZ
+#---------
+#ID:260
+#LASTNAME:LUTZ
+#---------
+#ID:270
+#LASTNAME:LUTZ
+#---------
+#ID:280
+#LASTNAME:LUTZ
+#---------
+#ID:290
+#LASTNAME:LUTZ
+#---------
+#ID:300
+#LASTNAME:LUTZ
+#---------
+#ID:310
+#LASTNAME:LUTZ
+#---------
+#ID:320
+#LASTNAME:LUTZ
+#---------
+#ID:330
+#LASTNAME:LUTZ
+#---------
+#ID:340
+#LASTNAME:LUTZ
+#---------
+#ID:350
+#LASTNAME:LUTZ
+#---------
+#ID:10
+#LASTNAME:MARINO
+#---------
+#ID:20
+#LASTNAME:MARINO
+#---------
+#ID:30
+#LASTNAME:MARINO
+#---------
+#ID:40
+#LASTNAME:MARINO
+#---------
+#ID:50
+#LASTNAME:MARINO
+#---------
+#ID:60
+#LASTNAME:MARINO
+#---------
+#ID:70
+#LASTNAME:MARINO
+#---------
+#ID:80
+#LASTNAME:MARINO
+#---------
+#ID:90
+#LASTNAME:MARINO
+#---------
+#ID:100
+#LASTNAME:MARINO
+#---------
+#ID:110
+#LASTNAME:MARINO
+#---------
+#ID:120
+#LASTNAME:MARINO
+#---------
+#ID:130
+#LASTNAME:MARINO
+#---------
+#ID:140
+#LASTNAME:MARINO
+#---------
+#ID:150
+#LASTNAME:MARINO
+#---------
+#ID:160
+#LASTNAME:MARINO
+#---------
+#ID:170
+#LASTNAME:MARINO
+#---------
+#ID:180
+#LASTNAME:MARINO
+#---------
+#ID:190
+#LASTNAME:MARINO
+#---------
+#ID:200
+#LASTNAME:MARINO
+#---------
+#ID:210
+#LASTNAME:MARINO
+#---------
+#ID:220
+#LASTNAME:MARINO
+#---------
+#ID:230
+#LASTNAME:MARINO
+#---------
+#ID:240
+#LASTNAME:MARINO
+#---------
+#ID:250
+#LASTNAME:MARINO
+#---------
+#ID:260
+#LASTNAME:MARINO
+#---------
+#ID:270
+#LASTNAME:MARINO
+#---------
+#ID:280
+#LASTNAME:MARINO
+#---------
+#ID:290
+#LASTNAME:MARINO
+#---------
+#ID:300
+#LASTNAME:MARINO
+#---------
+#ID:310
+#LASTNAME:MARINO
+#---------
+#ID:320
+#LASTNAME:MARINO
+#---------
+#ID:330
+#LASTNAME:MARINO
+#---------
+#ID:340
+#LASTNAME:MARINO
+#---------
+#ID:350
+#LASTNAME:MARINO
+#---------
+#ID:10
+#LASTNAME:MEHTA
+#---------
+#ID:20
+#LASTNAME:MEHTA
+#---------
+#ID:30
+#LASTNAME:MEHTA
+#---------
+#ID:40
+#LASTNAME:MEHTA
+#---------
+#ID:50
+#LASTNAME:MEHTA
+#---------
+#ID:60
+#LASTNAME:MEHTA
+#---------
+#ID:70
+#LASTNAME:MEHTA
+#---------
+#ID:80
+#LASTNAME:MEHTA
+#---------
+#ID:90
+#LASTNAME:MEHTA
+#---------
+#ID:100
+#LASTNAME:MEHTA
+#---------
+#ID:110
+#LASTNAME:MEHTA
+#---------
+#ID:120
+#LASTNAME:MEHTA
+#---------
+#ID:130
+#LASTNAME:MEHTA
+#---------
+#ID:140
+#LASTNAME:MEHTA
+#---------
+#ID:150
+#LASTNAME:MEHTA
+#---------
+#ID:160
+#LASTNAME:MEHTA
+#---------
+#ID:170
+#LASTNAME:MEHTA
+#---------
+#ID:180
+#LASTNAME:MEHTA
+#---------
+#ID:190
+#LASTNAME:MEHTA
+#---------
+#ID:200
+#LASTNAME:MEHTA
+#---------
+#ID:210
+#LASTNAME:MEHTA
+#---------
+#ID:220
+#LASTNAME:MEHTA
+#---------
+#ID:230
+#LASTNAME:MEHTA
+#---------
+#ID:240
+#LASTNAME:MEHTA
+#---------
+#ID:250
+#LASTNAME:MEHTA
+#---------
+#ID:260
+#LASTNAME:MEHTA
+#---------
+#ID:270
+#LASTNAME:MEHTA
+#---------
+#ID:280
+#LASTNAME:MEHTA
+#---------
+#ID:290
+#LASTNAME:MEHTA
+#---------
+#ID:300
+#LASTNAME:MEHTA
+#---------
+#ID:310
+#LASTNAME:MEHTA
+#---------
+#ID:320
+#LASTNAME:MEHTA
+#---------
+#ID:330
+#LASTNAME:MEHTA
+#---------
+#ID:340
+#LASTNAME:MEHTA
+#---------
+#ID:350
+#LASTNAME:MEHTA
+#---------
+#ID:10
+#LASTNAME:NICHOLLS
+#---------
+#ID:20
+#LASTNAME:NICHOLLS
+#---------
+#ID:30
+#LASTNAME:NICHOLLS
+#---------
+#ID:40
+#LASTNAME:NICHOLLS
+#---------
+#ID:50
+#LASTNAME:NICHOLLS
+#---------
+#ID:60
+#LASTNAME:NICHOLLS
+#---------
+#ID:70
+#LASTNAME:NICHOLLS
+#---------
+#ID:80
+#LASTNAME:NICHOLLS
+#---------
+#ID:90
+#LASTNAME:NICHOLLS
+#---------
+#ID:100
+#LASTNAME:NICHOLLS
+#---------
+#ID:110
+#LASTNAME:NICHOLLS
+#---------
+#ID:120
+#LASTNAME:NICHOLLS
+#---------
+#ID:130
+#LASTNAME:NICHOLLS
+#---------
+#ID:140
+#LASTNAME:NICHOLLS
+#---------
+#ID:150
+#LASTNAME:NICHOLLS
+#---------
+#ID:160
+#LASTNAME:NICHOLLS
+#---------
+#ID:170
+#LASTNAME:NICHOLLS
+#---------
+#ID:180
+#LASTNAME:NICHOLLS
+#---------
+#ID:190
+#LASTNAME:NICHOLLS
+#---------
+#ID:200
+#LASTNAME:NICHOLLS
+#---------
+#ID:210
+#LASTNAME:NICHOLLS
+#---------
+#ID:220
+#LASTNAME:NICHOLLS
+#---------
+#ID:230
+#LASTNAME:NICHOLLS
+#---------
+#ID:240
+#LASTNAME:NICHOLLS
+#---------
+#ID:250
+#LASTNAME:NICHOLLS
+#---------
+#ID:260
+#LASTNAME:NICHOLLS
+#---------
+#ID:270
+#LASTNAME:NICHOLLS
+#---------
+#ID:280
+#LASTNAME:NICHOLLS
+#---------
+#ID:290
+#LASTNAME:NICHOLLS
+#---------
+#ID:300
+#LASTNAME:NICHOLLS
+#---------
+#ID:310
+#LASTNAME:NICHOLLS
+#---------
+#ID:320
+#LASTNAME:NICHOLLS
+#---------
+#ID:330
+#LASTNAME:NICHOLLS
+#---------
+#ID:340
+#LASTNAME:NICHOLLS
+#---------
+#ID:350
+#LASTNAME:NICHOLLS
+#---------
+#ID:10
+#LASTNAME:OCONNELL
+#---------
+#ID:20
+#LASTNAME:OCONNELL
+#---------
+#ID:30
+#LASTNAME:OCONNELL
+#---------
+#ID:40
+#LASTNAME:OCONNELL
+#---------
+#ID:50
+#LASTNAME:OCONNELL
+#---------
+#ID:60
+#LASTNAME:OCONNELL
+#---------
+#ID:70
+#LASTNAME:OCONNELL
+#---------
+#ID:80
+#LASTNAME:OCONNELL
+#---------
+#ID:90
+#LASTNAME:OCONNELL
+#---------
+#ID:100
+#LASTNAME:OCONNELL
+#---------
+#ID:110
+#LASTNAME:OCONNELL
+#---------
+#ID:120
+#LASTNAME:OCONNELL
+#---------
+#ID:130
+#LASTNAME:OCONNELL
+#---------
+#ID:140
+#LASTNAME:OCONNELL
+#---------
+#ID:150
+#LASTNAME:OCONNELL
+#---------
+#ID:160
+#LASTNAME:OCONNELL
+#---------
+#ID:170
+#LASTNAME:OCONNELL
+#---------
+#ID:180
+#LASTNAME:OCONNELL
+#---------
+#ID:190
+#LASTNAME:OCONNELL
+#---------
+#ID:200
+#LASTNAME:OCONNELL
+#---------
+#ID:210
+#LASTNAME:OCONNELL
+#---------
+#ID:220
+#LASTNAME:OCONNELL
+#---------
+#ID:230
+#LASTNAME:OCONNELL
+#---------
+#ID:240
+#LASTNAME:OCONNELL
+#---------
+#ID:250
+#LASTNAME:OCONNELL
+#---------
+#ID:260
+#LASTNAME:OCONNELL
+#---------
+#ID:270
+#LASTNAME:OCONNELL
+#---------
+#ID:280
+#LASTNAME:OCONNELL
+#---------
+#ID:290
+#LASTNAME:OCONNELL
+#---------
+#ID:300
+#LASTNAME:OCONNELL
+#---------
+#ID:310
+#LASTNAME:OCONNELL
+#---------
+#ID:320
+#LASTNAME:OCONNELL
+#---------
+#ID:330
+#LASTNAME:OCONNELL
+#---------
+#ID:340
+#LASTNAME:OCONNELL
+#---------
+#ID:350
+#LASTNAME:OCONNELL
+#---------
+#ID:10
+#LASTNAME:PARKER
+#---------
+#ID:20
+#LASTNAME:PARKER
+#---------
+#ID:30
+#LASTNAME:PARKER
+#---------
+#ID:40
+#LASTNAME:PARKER
+#---------
+#ID:50
+#LASTNAME:PARKER
+#---------
+#ID:60
+#LASTNAME:PARKER
+#---------
+#ID:70
+#LASTNAME:PARKER
+#---------
+#ID:80
+#LASTNAME:PARKER
+#---------
+#ID:90
+#LASTNAME:PARKER
+#---------
+#ID:100
+#LASTNAME:PARKER
+#---------
+#ID:110
+#LASTNAME:PARKER
+#---------
+#ID:120
+#LASTNAME:PARKER
+#---------
+#ID:130
+#LASTNAME:PARKER
+#---------
+#ID:140
+#LASTNAME:PARKER
+#---------
+#ID:150
+#LASTNAME:PARKER
+#---------
+#ID:160
+#LASTNAME:PARKER
+#---------
+#ID:170
+#LASTNAME:PARKER
+#---------
+#ID:180
+#LASTNAME:PARKER
+#---------
+#ID:190
+#LASTNAME:PARKER
+#---------
+#ID:200
+#LASTNAME:PARKER
+#---------
+#ID:210
+#LASTNAME:PARKER
+#---------
+#ID:220
+#LASTNAME:PARKER
+#---------
+#ID:230
+#LASTNAME:PARKER
+#---------
+#ID:240
+#LASTNAME:PARKER
+#---------
+#ID:250
+#LASTNAME:PARKER
+#---------
+#ID:260
+#LASTNAME:PARKER
+#---------
+#ID:270
+#LASTNAME:PARKER
+#---------
+#ID:280
+#LASTNAME:PARKER
+#---------
+#ID:290
+#LASTNAME:PARKER
+#---------
+#ID:300
+#LASTNAME:PARKER
+#---------
+#ID:310
+#LASTNAME:PARKER
+#---------
+#ID:320
+#LASTNAME:PARKER
+#---------
+#ID:330
+#LASTNAME:PARKER
+#---------
+#ID:340
+#LASTNAME:PARKER
+#---------
+#ID:350
+#LASTNAME:PARKER
+#---------
+#ID:10
+#LASTNAME:PEREZ
+#---------
+#ID:20
+#LASTNAME:PEREZ
+#---------
+#ID:30
+#LASTNAME:PEREZ
+#---------
+#ID:40
+#LASTNAME:PEREZ
+#---------
+#ID:50
+#LASTNAME:PEREZ
+#---------
+#ID:60
+#LASTNAME:PEREZ
+#---------
+#ID:70
+#LASTNAME:PEREZ
+#---------
+#ID:80
+#LASTNAME:PEREZ
+#---------
+#ID:90
+#LASTNAME:PEREZ
+#---------
+#ID:100
+#LASTNAME:PEREZ
+#---------
+#ID:110
+#LASTNAME:PEREZ
+#---------
+#ID:120
+#LASTNAME:PEREZ
+#---------
+#ID:130
+#LASTNAME:PEREZ
+#---------
+#ID:140
+#LASTNAME:PEREZ
+#---------
+#ID:150
+#LASTNAME:PEREZ
+#---------
+#ID:160
+#LASTNAME:PEREZ
+#---------
+#ID:170
+#LASTNAME:PEREZ
+#---------
+#ID:180
+#LASTNAME:PEREZ
+#---------
+#ID:190
+#LASTNAME:PEREZ
+#---------
+#ID:200
+#LASTNAME:PEREZ
+#---------
+#ID:210
+#LASTNAME:PEREZ
+#---------
+#ID:220
+#LASTNAME:PEREZ
+#---------
+#ID:230
+#LASTNAME:PEREZ
+#---------
+#ID:240
+#LASTNAME:PEREZ
+#---------
+#ID:250
+#LASTNAME:PEREZ
+#---------
+#ID:260
+#LASTNAME:PEREZ
+#---------
+#ID:270
+#LASTNAME:PEREZ
+#---------
+#ID:280
+#LASTNAME:PEREZ
+#---------
+#ID:290
+#LASTNAME:PEREZ
+#---------
+#ID:300
+#LASTNAME:PEREZ
+#---------
+#ID:310
+#LASTNAME:PEREZ
+#---------
+#ID:320
+#LASTNAME:PEREZ
+#---------
+#ID:330
+#LASTNAME:PEREZ
+#---------
+#ID:340
+#LASTNAME:PEREZ
+#---------
+#ID:350
+#LASTNAME:PEREZ
+#---------
+#ID:10
+#LASTNAME:PIANKA
+#---------
+#ID:20
+#LASTNAME:PIANKA
+#---------
+#ID:30
+#LASTNAME:PIANKA
+#---------
+#ID:40
+#LASTNAME:PIANKA
+#---------
+#ID:50
+#LASTNAME:PIANKA
+#---------
+#ID:60
+#LASTNAME:PIANKA
+#---------
+#ID:70
+#LASTNAME:PIANKA
+#---------
+#ID:80
+#LASTNAME:PIANKA
+#---------
+#ID:90
+#LASTNAME:PIANKA
+#---------
+#ID:100
+#LASTNAME:PIANKA
+#---------
+#ID:110
+#LASTNAME:PIANKA
+#---------
+#ID:120
+#LASTNAME:PIANKA
+#---------
+#ID:130
+#LASTNAME:PIANKA
+#---------
+#ID:140
+#LASTNAME:PIANKA
+#---------
+#ID:150
+#LASTNAME:PIANKA
+#---------
+#ID:160
+#LASTNAME:PIANKA
+#---------
+#ID:170
+#LASTNAME:PIANKA
+#---------
+#ID:180
+#LASTNAME:PIANKA
+#---------
+#ID:190
+#LASTNAME:PIANKA
+#---------
+#ID:200
+#LASTNAME:PIANKA
+#---------
+#ID:210
+#LASTNAME:PIANKA
+#---------
+#ID:220
+#LASTNAME:PIANKA
+#---------
+#ID:230
+#LASTNAME:PIANKA
+#---------
+#ID:240
+#LASTNAME:PIANKA
+#---------
+#ID:250
+#LASTNAME:PIANKA
+#---------
+#ID:260
+#LASTNAME:PIANKA
+#---------
+#ID:270
+#LASTNAME:PIANKA
+#---------
+#ID:280
+#LASTNAME:PIANKA
+#---------
+#ID:290
+#LASTNAME:PIANKA
+#---------
+#ID:300
+#LASTNAME:PIANKA
+#---------
+#ID:310
+#LASTNAME:PIANKA
+#---------
+#ID:320
+#LASTNAME:PIANKA
+#---------
+#ID:330
+#LASTNAME:PIANKA
+#---------
+#ID:340
+#LASTNAME:PIANKA
+#---------
+#ID:350
+#LASTNAME:PIANKA
+#---------
+#ID:10
+#LASTNAME:PULASKI
+#---------
+#ID:20
+#LASTNAME:PULASKI
+#---------
+#ID:30
+#LASTNAME:PULASKI
+#---------
+#ID:40
+#LASTNAME:PULASKI
+#---------
+#ID:50
+#LASTNAME:PULASKI
+#---------
+#ID:60
+#LASTNAME:PULASKI
+#---------
+#ID:70
+#LASTNAME:PULASKI
+#---------
+#ID:80
+#LASTNAME:PULASKI
+#---------
+#ID:90
+#LASTNAME:PULASKI
+#---------
+#ID:100
+#LASTNAME:PULASKI
+#---------
+#ID:110
+#LASTNAME:PULASKI
+#---------
+#ID:120
+#LASTNAME:PULASKI
+#---------
+#ID:130
+#LASTNAME:PULASKI
+#---------
+#ID:140
+#LASTNAME:PULASKI
+#---------
+#ID:150
+#LASTNAME:PULASKI
+#---------
+#ID:160
+#LASTNAME:PULASKI
+#---------
+#ID:170
+#LASTNAME:PULASKI
+#---------
+#ID:180
+#LASTNAME:PULASKI
+#---------
+#ID:190
+#LASTNAME:PULASKI
+#---------
+#ID:200
+#LASTNAME:PULASKI
+#---------
+#ID:210
+#LASTNAME:PULASKI
+#---------
+#ID:220
+#LASTNAME:PULASKI
+#---------
+#ID:230
+#LASTNAME:PULASKI
+#---------
+#ID:240
+#LASTNAME:PULASKI
+#---------
+#ID:250
+#LASTNAME:PULASKI
+#---------
+#ID:260
+#LASTNAME:PULASKI
+#---------
+#ID:270
+#LASTNAME:PULASKI
+#---------
+#ID:280
+#LASTNAME:PULASKI
+#---------
+#ID:290
+#LASTNAME:PULASKI
+#---------
+#ID:300
+#LASTNAME:PULASKI
+#---------
+#ID:310
+#LASTNAME:PULASKI
+#---------
+#ID:320
+#LASTNAME:PULASKI
+#---------
+#ID:330
+#LASTNAME:PULASKI
+#---------
+#ID:340
+#LASTNAME:PULASKI
+#---------
+#ID:350
+#LASTNAME:PULASKI
+#---------
+#ID:10
+#LASTNAME:QUINTANA
+#---------
+#ID:20
+#LASTNAME:QUINTANA
+#---------
+#ID:30
+#LASTNAME:QUINTANA
+#---------
+#ID:40
+#LASTNAME:QUINTANA
+#---------
+#ID:50
+#LASTNAME:QUINTANA
+#---------
+#ID:60
+#LASTNAME:QUINTANA
+#---------
+#ID:70
+#LASTNAME:QUINTANA
+#---------
+#ID:80
+#LASTNAME:QUINTANA
+#---------
+#ID:90
+#LASTNAME:QUINTANA
+#---------
+#ID:100
+#LASTNAME:QUINTANA
+#---------
+#ID:110
+#LASTNAME:QUINTANA
+#---------
+#ID:120
+#LASTNAME:QUINTANA
+#---------
+#ID:130
+#LASTNAME:QUINTANA
+#---------
+#ID:140
+#LASTNAME:QUINTANA
+#---------
+#ID:150
+#LASTNAME:QUINTANA
+#---------
+#ID:160
+#LASTNAME:QUINTANA
+#---------
+#ID:170
+#LASTNAME:QUINTANA
+#---------
+#ID:180
+#LASTNAME:QUINTANA
+#---------
+#ID:190
+#LASTNAME:QUINTANA
+#---------
+#ID:200
+#LASTNAME:QUINTANA
+#---------
+#ID:210
+#LASTNAME:QUINTANA
+#---------
+#ID:220
+#LASTNAME:QUINTANA
+#---------
+#ID:230
+#LASTNAME:QUINTANA
+#---------
+#ID:240
+#LASTNAME:QUINTANA
+#---------
+#ID:250
+#LASTNAME:QUINTANA
+#---------
+#ID:260
+#LASTNAME:QUINTANA
+#---------
+#ID:270
+#LASTNAME:QUINTANA
+#---------
+#ID:280
+#LASTNAME:QUINTANA
+#---------
+#ID:290
+#LASTNAME:QUINTANA
+#---------
+#ID:300
+#LASTNAME:QUINTANA
+#---------
+#ID:310
+#LASTNAME:QUINTANA
+#---------
+#ID:320
+#LASTNAME:QUINTANA
+#---------
+#ID:330
+#LASTNAME:QUINTANA
+#---------
+#ID:340
+#LASTNAME:QUINTANA
+#---------
+#ID:350
+#LASTNAME:QUINTANA
+#---------
+#ID:10
+#LASTNAME:SCHNEIDER
+#---------
+#ID:20
+#LASTNAME:SCHNEIDER
+#---------
+#ID:30
+#LASTNAME:SCHNEIDER
+#---------
+#ID:40
+#LASTNAME:SCHNEIDER
+#---------
+#ID:50
+#LASTNAME:SCHNEIDER
+#---------
+#ID:60
+#LASTNAME:SCHNEIDER
+#---------
+#ID:70
+#LASTNAME:SCHNEIDER
+#---------
+#ID:80
+#LASTNAME:SCHNEIDER
+#---------
+#ID:90
+#LASTNAME:SCHNEIDER
+#---------
+#ID:100
+#LASTNAME:SCHNEIDER
+#---------
+#ID:110
+#LASTNAME:SCHNEIDER
+#---------
+#ID:120
+#LASTNAME:SCHNEIDER
+#---------
+#ID:130
+#LASTNAME:SCHNEIDER
+#---------
+#ID:140
+#LASTNAME:SCHNEIDER
+#---------
+#ID:150
+#LASTNAME:SCHNEIDER
+#---------
+#ID:160
+#LASTNAME:SCHNEIDER
+#---------
+#ID:170
+#LASTNAME:SCHNEIDER
+#---------
+#ID:180
+#LASTNAME:SCHNEIDER
+#---------
+#ID:190
+#LASTNAME:SCHNEIDER
+#---------
+#ID:200
+#LASTNAME:SCHNEIDER
+#---------
+#ID:210
+#LASTNAME:SCHNEIDER
+#---------
+#ID:220
+#LASTNAME:SCHNEIDER
+#---------
+#ID:230
+#LASTNAME:SCHNEIDER
+#---------
+#ID:240
+#LASTNAME:SCHNEIDER
+#---------
+#ID:250
+#LASTNAME:SCHNEIDER
+#---------
+#ID:260
+#LASTNAME:SCHNEIDER
+#---------
+#ID:270
+#LASTNAME:SCHNEIDER
+#---------
+#ID:280
+#LASTNAME:SCHNEIDER
+#---------
+#ID:290
+#LASTNAME:SCHNEIDER
+#---------
+#ID:300
+#LASTNAME:SCHNEIDER
+#---------
+#ID:310
+#LASTNAME:SCHNEIDER
+#---------
+#ID:320
+#LASTNAME:SCHNEIDER
+#---------
+#ID:330
+#LASTNAME:SCHNEIDER
+#---------
+#ID:340
+#LASTNAME:SCHNEIDER
+#---------
+#ID:350
+#LASTNAME:SCHNEIDER
+#---------
+#ID:10
+#LASTNAME:SCOUTTEN
+#---------
+#ID:20
+#LASTNAME:SCOUTTEN
+#---------
+#ID:30
+#LASTNAME:SCOUTTEN
+#---------
+#ID:40
+#LASTNAME:SCOUTTEN
+#---------
+#ID:50
+#LASTNAME:SCOUTTEN
+#---------
+#ID:60
+#LASTNAME:SCOUTTEN
+#---------
+#ID:70
+#LASTNAME:SCOUTTEN
+#---------
+#ID:80
+#LASTNAME:SCOUTTEN
+#---------
+#ID:90
+#LASTNAME:SCOUTTEN
+#---------
+#ID:100
+#LASTNAME:SCOUTTEN
+#---------
+#ID:110
+#LASTNAME:SCOUTTEN
+#---------
+#ID:120
+#LASTNAME:SCOUTTEN
+#---------
+#ID:130
+#LASTNAME:SCOUTTEN
+#---------
+#ID:140
+#LASTNAME:SCOUTTEN
+#---------
+#ID:150
+#LASTNAME:SCOUTTEN
+#---------
+#ID:160
+#LASTNAME:SCOUTTEN
+#---------
+#ID:170
+#LASTNAME:SCOUTTEN
+#---------
+#ID:180
+#LASTNAME:SCOUTTEN
+#---------
+#ID:190
+#LASTNAME:SCOUTTEN
+#---------
+#ID:200
+#LASTNAME:SCOUTTEN
+#---------
+#ID:210
+#LASTNAME:SCOUTTEN
+#---------
+#ID:220
+#LASTNAME:SCOUTTEN
+#---------
+#ID:230
+#LASTNAME:SCOUTTEN
+#---------
+#ID:240
+#LASTNAME:SCOUTTEN
+#---------
+#ID:250
+#LASTNAME:SCOUTTEN
+#---------
+#ID:260
+#LASTNAME:SCOUTTEN
+#---------
+#ID:270
+#LASTNAME:SCOUTTEN
+#---------
+#ID:280
+#LASTNAME:SCOUTTEN
+#---------
+#ID:290
+#LASTNAME:SCOUTTEN
+#---------
+#ID:300
+#LASTNAME:SCOUTTEN
+#---------
+#ID:310
+#LASTNAME:SCOUTTEN
+#---------
+#ID:320
+#LASTNAME:SCOUTTEN
+#---------
+#ID:330
+#LASTNAME:SCOUTTEN
+#---------
+#ID:340
+#LASTNAME:SCOUTTEN
+#---------
+#ID:350
+#LASTNAME:SCOUTTEN
+#---------
+#ID:10
+#LASTNAME:SETRIGHT
+#---------
+#ID:20
+#LASTNAME:SETRIGHT
+#---------
+#ID:30
+#LASTNAME:SETRIGHT
+#---------
+#ID:40
+#LASTNAME:SETRIGHT
+#---------
+#ID:50
+#LASTNAME:SETRIGHT
+#---------
+#ID:60
+#LASTNAME:SETRIGHT
+#---------
+#ID:70
+#LASTNAME:SETRIGHT
+#---------
+#ID:80
+#LASTNAME:SETRIGHT
+#---------
+#ID:90
+#LASTNAME:SETRIGHT
+#---------
+#ID:100
+#LASTNAME:SETRIGHT
+#---------
+#ID:110
+#LASTNAME:SETRIGHT
+#---------
+#ID:120
+#LASTNAME:SETRIGHT
+#---------
+#ID:130
+#LASTNAME:SETRIGHT
+#---------
+#ID:140
+#LASTNAME:SETRIGHT
+#---------
+#ID:150
+#LASTNAME:SETRIGHT
+#---------
+#ID:160
+#LASTNAME:SETRIGHT
+#---------
+#ID:170
+#LASTNAME:SETRIGHT
+#---------
+#ID:180
+#LASTNAME:SETRIGHT
+#---------
+#ID:190
+#LASTNAME:SETRIGHT
+#---------
+#ID:200
+#LASTNAME:SETRIGHT
+#---------
+#ID:210
+#LASTNAME:SETRIGHT
+#---------
+#ID:220
+#LASTNAME:SETRIGHT
+#---------
+#ID:230
+#LASTNAME:SETRIGHT
+#---------
+#ID:240
+#LASTNAME:SETRIGHT
+#---------
+#ID:250
+#LASTNAME:SETRIGHT
+#---------
+#ID:260
+#LASTNAME:SETRIGHT
+#---------
+#ID:270
+#LASTNAME:SETRIGHT
+#---------
+#ID:280
+#LASTNAME:SETRIGHT
+#---------
+#ID:290
+#LASTNAME:SETRIGHT
+#---------
+#ID:300
+#LASTNAME:SETRIGHT
+#---------
+#ID:310
+#LASTNAME:SETRIGHT
+#---------
+#ID:320
+#LASTNAME:SETRIGHT
+#---------
+#ID:330
+#LASTNAME:SETRIGHT
+#---------
+#ID:340
+#LASTNAME:SETRIGHT
+#---------
+#ID:350
+#LASTNAME:SETRIGHT
+#---------
+#ID:10
+#LASTNAME:SMITH
+#---------
+#ID:10
+#LASTNAME:SMITH
+#---------
+#ID:20
+#LASTNAME:SMITH
+#---------
+#ID:20
+#LASTNAME:SMITH
+#---------
+#ID:30
+#LASTNAME:SMITH
+#---------
+#ID:30
+#LASTNAME:SMITH
+#---------
+#ID:40
+#LASTNAME:SMITH
+#---------
+#ID:40
+#LASTNAME:SMITH
+#---------
+#ID:50
+#LASTNAME:SMITH
+#---------
+#ID:50
+#LASTNAME:SMITH
+#---------
+#ID:60
+#LASTNAME:SMITH
+#---------
+#ID:60
+#LASTNAME:SMITH
+#---------
+#ID:70
+#LASTNAME:SMITH
+#---------
+#ID:70
+#LASTNAME:SMITH
+#---------
+#ID:80
+#LASTNAME:SMITH
+#---------
+#ID:80
+#LASTNAME:SMITH
+#---------
+#ID:90
+#LASTNAME:SMITH
+#---------
+#ID:90
+#LASTNAME:SMITH
+#---------
+#ID:100
+#LASTNAME:SMITH
+#---------
+#ID:100
+#LASTNAME:SMITH
+#---------
+#ID:110
+#LASTNAME:SMITH
+#---------
+#ID:110
+#LASTNAME:SMITH
+#---------
+#ID:120
+#LASTNAME:SMITH
+#---------
+#ID:120
+#LASTNAME:SMITH
+#---------
+#ID:130
+#LASTNAME:SMITH
+#---------
+#ID:130
+#LASTNAME:SMITH
+#---------
+#ID:140
+#LASTNAME:SMITH
+#---------
+#ID:140
+#LASTNAME:SMITH
+#---------
+#ID:150
+#LASTNAME:SMITH
+#---------
+#ID:150
+#LASTNAME:SMITH
+#---------
+#ID:160
+#LASTNAME:SMITH
+#---------
+#ID:160
+#LASTNAME:SMITH
+#---------
+#ID:170
+#LASTNAME:SMITH
+#---------
+#ID:170
+#LASTNAME:SMITH
+#---------
+#ID:180
+#LASTNAME:SMITH
+#---------
+#ID:180
+#LASTNAME:SMITH
+#---------
+#ID:190
+#LASTNAME:SMITH
+#---------
+#ID:190
+#LASTNAME:SMITH
+#---------
+#ID:200
+#LASTNAME:SMITH
+#---------
+#ID:200
+#LASTNAME:SMITH
+#---------
+#ID:210
+#LASTNAME:SMITH
+#---------
+#ID:210
+#LASTNAME:SMITH
+#---------
+#ID:220
+#LASTNAME:SMITH
+#---------
+#ID:220
+#LASTNAME:SMITH
+#---------
+#ID:230
+#LASTNAME:SMITH
+#---------
+#ID:230
+#LASTNAME:SMITH
+#---------
+#ID:240
+#LASTNAME:SMITH
+#---------
+#ID:240
+#LASTNAME:SMITH
+#---------
+#ID:250
+#LASTNAME:SMITH
+#---------
+#ID:250
+#LASTNAME:SMITH
+#---------
+#ID:260
+#LASTNAME:SMITH
+#---------
+#ID:260
+#LASTNAME:SMITH
+#---------
+#ID:270
+#LASTNAME:SMITH
+#---------
+#ID:270
+#LASTNAME:SMITH
+#---------
+#ID:280
+#LASTNAME:SMITH
+#---------
+#ID:280
+#LASTNAME:SMITH
+#---------
+#ID:290
+#LASTNAME:SMITH
+#---------
+#ID:290
+#LASTNAME:SMITH
+#---------
+#ID:300
+#LASTNAME:SMITH
+#---------
+#ID:300
+#LASTNAME:SMITH
+#---------
+#ID:310
+#LASTNAME:SMITH
+#---------
+#ID:310
+#LASTNAME:SMITH
+#---------
+#ID:320
+#LASTNAME:SMITH
+#---------
+#ID:320
+#LASTNAME:SMITH
+#---------
+#ID:330
+#LASTNAME:SMITH
+#---------
+#ID:330
+#LASTNAME:SMITH
+#---------
+#ID:340
+#LASTNAME:SMITH
+#---------
+#ID:340
+#LASTNAME:SMITH
+#---------
+#ID:350
+#LASTNAME:SMITH
+#---------
+#ID:350
+#LASTNAME:SMITH
+#---------
+#ID:10
+#LASTNAME:SPENSER
+#---------
+#ID:20
+#LASTNAME:SPENSER
+#---------
+#ID:30
+#LASTNAME:SPENSER
+#---------
+#ID:40
+#LASTNAME:SPENSER
+#---------
+#ID:50
+#LASTNAME:SPENSER
+#---------
+#ID:60
+#LASTNAME:SPENSER
+#---------
+#ID:70
+#LASTNAME:SPENSER
+#---------
+#ID:80
+#LASTNAME:SPENSER
+#---------
+#ID:90
+#LASTNAME:SPENSER
+#---------
+#ID:100
+#LASTNAME:SPENSER
+#---------
+#ID:110
+#LASTNAME:SPENSER
+#---------
+#ID:120
+#LASTNAME:SPENSER
+#---------
+#ID:130
+#LASTNAME:SPENSER
+#---------
+#ID:140
+#LASTNAME:SPENSER
+#---------
+#ID:150
+#LASTNAME:SPENSER
+#---------
+#ID:160
+#LASTNAME:SPENSER
+#---------
+#ID:170
+#LASTNAME:SPENSER
+#---------
+#ID:180
+#LASTNAME:SPENSER
+#---------
+#ID:190
+#LASTNAME:SPENSER
+#---------
+#ID:200
+#LASTNAME:SPENSER
+#---------
+#ID:210
+#LASTNAME:SPENSER
+#---------
+#ID:220
+#LASTNAME:SPENSER
+#---------
+#ID:230
+#LASTNAME:SPENSER
+#---------
+#ID:240
+#LASTNAME:SPENSER
+#---------
+#ID:250
+#LASTNAME:SPENSER
+#---------
+#ID:260
+#LASTNAME:SPENSER
+#---------
+#ID:270
+#LASTNAME:SPENSER
+#---------
+#ID:280
+#LASTNAME:SPENSER
+#---------
+#ID:290
+#LASTNAME:SPENSER
+#---------
+#ID:300
+#LASTNAME:SPENSER
+#---------
+#ID:310
+#LASTNAME:SPENSER
+#---------
+#ID:320
+#LASTNAME:SPENSER
+#---------
+#ID:330
+#LASTNAME:SPENSER
+#---------
+#ID:340
+#LASTNAME:SPENSER
+#---------
+#ID:350
+#LASTNAME:SPENSER
+#---------
+#ID:10
+#LASTNAME:STERN
+#---------
+#ID:20
+#LASTNAME:STERN
+#---------
+#ID:30
+#LASTNAME:STERN
+#---------
+#ID:40
+#LASTNAME:STERN
+#---------
+#ID:50
+#LASTNAME:STERN
+#---------
+#ID:60
+#LASTNAME:STERN
+#---------
+#ID:70
+#LASTNAME:STERN
+#---------
+#ID:80
+#LASTNAME:STERN
+#---------
+#ID:90
+#LASTNAME:STERN
+#---------
+#ID:100
+#LASTNAME:STERN
+#---------
+#ID:110
+#LASTNAME:STERN
+#---------
+#ID:120
+#LASTNAME:STERN
+#---------
+#ID:130
+#LASTNAME:STERN
+#---------
+#ID:140
+#LASTNAME:STERN
+#---------
+#ID:150
+#LASTNAME:STERN
+#---------
+#ID:160
+#LASTNAME:STERN
+#---------
+#ID:170
+#LASTNAME:STERN
+#---------
+#ID:180
+#LASTNAME:STERN
+#---------
+#ID:190
+#LASTNAME:STERN
+#---------
+#ID:200
+#LASTNAME:STERN
+#---------
+#ID:210
+#LASTNAME:STERN
+#---------
+#ID:220
+#LASTNAME:STERN
+#---------
+#ID:230
+#LASTNAME:STERN
+#---------
+#ID:240
+#LASTNAME:STERN
+#---------
+#ID:250
+#LASTNAME:STERN
+#---------
+#ID:260
+#LASTNAME:STERN
+#---------
+#ID:270
+#LASTNAME:STERN
+#---------
+#ID:280
+#LASTNAME:STERN
+#---------
+#ID:290
+#LASTNAME:STERN
+#---------
+#ID:300
+#LASTNAME:STERN
+#---------
+#ID:310
+#LASTNAME:STERN
+#---------
+#ID:320
+#LASTNAME:STERN
+#---------
+#ID:330
+#LASTNAME:STERN
+#---------
+#ID:340
+#LASTNAME:STERN
+#---------
+#ID:350
+#LASTNAME:STERN
+#---------
+#ID:10
+#LASTNAME:THOMPSON
+#---------
+#ID:20
+#LASTNAME:THOMPSON
+#---------
+#ID:30
+#LASTNAME:THOMPSON
+#---------
+#ID:40
+#LASTNAME:THOMPSON
+#---------
+#ID:50
+#LASTNAME:THOMPSON
+#---------
+#ID:60
+#LASTNAME:THOMPSON
+#---------
+#ID:70
+#LASTNAME:THOMPSON
+#---------
+#ID:80
+#LASTNAME:THOMPSON
+#---------
+#ID:90
+#LASTNAME:THOMPSON
+#---------
+#ID:100
+#LASTNAME:THOMPSON
+#---------
+#ID:110
+#LASTNAME:THOMPSON
+#---------
+#ID:120
+#LASTNAME:THOMPSON
+#---------
+#ID:130
+#LASTNAME:THOMPSON
+#---------
+#ID:140
+#LASTNAME:THOMPSON
+#---------
+#ID:150
+#LASTNAME:THOMPSON
+#---------
+#ID:160
+#LASTNAME:THOMPSON
+#---------
+#ID:170
+#LASTNAME:THOMPSON
+#---------
+#ID:180
+#LASTNAME:THOMPSON
+#---------
+#ID:190
+#LASTNAME:THOMPSON
+#---------
+#ID:200
+#LASTNAME:THOMPSON
+#---------
+#ID:210
+#LASTNAME:THOMPSON
+#---------
+#ID:220
+#LASTNAME:THOMPSON
+#---------
+#ID:230
+#LASTNAME:THOMPSON
+#---------
+#ID:240
+#LASTNAME:THOMPSON
+#---------
+#ID:250
+#LASTNAME:THOMPSON
+#---------
+#ID:260
+#LASTNAME:THOMPSON
+#---------
+#ID:270
+#LASTNAME:THOMPSON
+#---------
+#ID:280
+#LASTNAME:THOMPSON
+#---------
+#ID:290
+#LASTNAME:THOMPSON
+#---------
+#ID:300
+#LASTNAME:THOMPSON
+#---------
+#ID:310
+#LASTNAME:THOMPSON
+#---------
+#ID:320
+#LASTNAME:THOMPSON
+#---------
+#ID:330
+#LASTNAME:THOMPSON
+#---------
+#ID:340
+#LASTNAME:THOMPSON
+#---------
+#ID:350
+#LASTNAME:THOMPSON
+#---------
+#ID:10
+#LASTNAME:WALKER
+#---------
+#ID:20
+#LASTNAME:WALKER
+#---------
+#ID:30
+#LASTNAME:WALKER
+#---------
+#ID:40
+#LASTNAME:WALKER
+#---------
+#ID:50
+#LASTNAME:WALKER
+#---------
+#ID:60
+#LASTNAME:WALKER
+#---------
+#ID:70
+#LASTNAME:WALKER
+#---------
+#ID:80
+#LASTNAME:WALKER
+#---------
+#ID:90
+#LASTNAME:WALKER
+#---------
+#ID:100
+#LASTNAME:WALKER
+#---------
+#ID:110
+#LASTNAME:WALKER
+#---------
+#ID:120
+#LASTNAME:WALKER
+#---------
+#ID:130
+#LASTNAME:WALKER
+#---------
+#ID:140
+#LASTNAME:WALKER
+#---------
+#ID:150
+#LASTNAME:WALKER
+#---------
+#ID:160
+#LASTNAME:WALKER
+#---------
+#ID:170
+#LASTNAME:WALKER
+#---------
+#ID:180
+#LASTNAME:WALKER
+#---------
+#ID:190
+#LASTNAME:WALKER
+#---------
+#ID:200
+#LASTNAME:WALKER
+#---------
+#ID:210
+#LASTNAME:WALKER
+#---------
+#ID:220
+#LASTNAME:WALKER
+#---------
+#ID:230
+#LASTNAME:WALKER
+#---------
+#ID:240
+#LASTNAME:WALKER
+#---------
+#ID:250
+#LASTNAME:WALKER
+#---------
+#ID:260
+#LASTNAME:WALKER
+#---------
+#ID:270
+#LASTNAME:WALKER
+#---------
+#ID:280
+#LASTNAME:WALKER
+#---------
+#ID:290
+#LASTNAME:WALKER
+#---------
+#ID:300
+#LASTNAME:WALKER
+#---------
+#ID:310
+#LASTNAME:WALKER
+#---------
+#ID:320
+#LASTNAME:WALKER
+#---------
+#ID:330
+#LASTNAME:WALKER
+#---------
+#ID:340
+#LASTNAME:WALKER
+#---------
+#ID:350
+#LASTNAME:WALKER
+#---------
+#ID:10
+#LASTNAME:YOSHIMURA
+#---------
+#ID:20
+#LASTNAME:YOSHIMURA
+#---------
+#ID:30
+#LASTNAME:YOSHIMURA
+#---------
+#ID:40
+#LASTNAME:YOSHIMURA
+#---------
+#ID:50
+#LASTNAME:YOSHIMURA
+#---------
+#ID:60
+#LASTNAME:YOSHIMURA
+#---------
+#ID:70
+#LASTNAME:YOSHIMURA
+#---------
+#ID:80
+#LASTNAME:YOSHIMURA
+#---------
+#ID:90
+#LASTNAME:YOSHIMURA
+#---------
+#ID:100
+#LASTNAME:YOSHIMURA
+#---------
+#ID:110
+#LASTNAME:YOSHIMURA
+#---------
+#ID:120
+#LASTNAME:YOSHIMURA
+#---------
+#ID:130
+#LASTNAME:YOSHIMURA
+#---------
+#ID:140
+#LASTNAME:YOSHIMURA
+#---------
+#ID:150
+#LASTNAME:YOSHIMURA
+#---------
+#ID:160
+#LASTNAME:YOSHIMURA
+#---------
+#ID:170
+#LASTNAME:YOSHIMURA
+#---------
+#ID:180
+#LASTNAME:YOSHIMURA
+#---------
+#ID:190
+#LASTNAME:YOSHIMURA
+#---------
+#ID:200
+#LASTNAME:YOSHIMURA
+#---------
+#ID:210
+#LASTNAME:YOSHIMURA
+#---------
+#ID:220
+#LASTNAME:YOSHIMURA
+#---------
+#ID:230
+#LASTNAME:YOSHIMURA
+#---------
+#ID:240
+#LASTNAME:YOSHIMURA
+#---------
+#ID:250
+#LASTNAME:YOSHIMURA
+#---------
+#ID:260
+#LASTNAME:YOSHIMURA
+#---------
+#ID:270
+#LASTNAME:YOSHIMURA
+#---------
+#ID:280
+#LASTNAME:YOSHIMURA
+#---------
+#ID:290
+#LASTNAME:YOSHIMURA
+#---------
+#ID:300
+#LASTNAME:YOSHIMURA
+#---------
+#ID:310
+#LASTNAME:YOSHIMURA
+#---------
+#ID:320
+#LASTNAME:YOSHIMURA
+#---------
+#ID:330
+#LASTNAME:YOSHIMURA
+#---------
+#ID:340
+#LASTNAME:YOSHIMURA
+#---------
+#ID:350
+#LASTNAME:YOSHIMURA
+#---------
+#__IDS_EXPECTED__
+#
+#ID:10
+#LASTNAME:ADAMSON
+#---------
+#ID:20
+#LASTNAME:ADAMSON
+#---------
+#ID:30
+#LASTNAME:ADAMSON
+#---------
+#ID:40
+#LASTNAME:ADAMSON
+#---------
+#ID:50
+#LASTNAME:ADAMSON
+#---------
+#ID:60
+#LASTNAME:ADAMSON
+#---------
+#ID:70
+#LASTNAME:ADAMSON
+#---------
+#ID:80
+#LASTNAME:ADAMSON
+#---------
+#ID:90
+#LASTNAME:ADAMSON
+#---------
+#ID:100
+#LASTNAME:ADAMSON
+#---------
+#ID:110
+#LASTNAME:ADAMSON
+#---------
+#ID:120
+#LASTNAME:ADAMSON
+#---------
+#ID:130
+#LASTNAME:ADAMSON
+#---------
+#ID:140
+#LASTNAME:ADAMSON
+#---------
+#ID:150
+#LASTNAME:ADAMSON
+#---------
+#ID:160
+#LASTNAME:ADAMSON
+#---------
+#ID:170
+#LASTNAME:ADAMSON
+#---------
+#ID:180
+#LASTNAME:ADAMSON
+#---------
+#ID:190
+#LASTNAME:ADAMSON
+#---------
+#ID:200
+#LASTNAME:ADAMSON
+#---------
+#ID:210
+#LASTNAME:ADAMSON
+#---------
+#ID:220
+#LASTNAME:ADAMSON
+#---------
+#ID:230
+#LASTNAME:ADAMSON
+#---------
+#ID:240
+#LASTNAME:ADAMSON
+#---------
+#ID:250
+#LASTNAME:ADAMSON
+#---------
+#ID:260
+#LASTNAME:ADAMSON
+#---------
+#ID:270
+#LASTNAME:ADAMSON
+#---------
+#ID:280
+#LASTNAME:ADAMSON
+#---------
+#ID:290
+#LASTNAME:ADAMSON
+#---------
+#ID:300
+#LASTNAME:ADAMSON
+#---------
+#ID:310
+#LASTNAME:ADAMSON
+#---------
+#ID:320
+#LASTNAME:ADAMSON
+#---------
+#ID:330
+#LASTNAME:ADAMSON
+#---------
+#ID:340
+#LASTNAME:ADAMSON
+#---------
+#ID:350
+#LASTNAME:ADAMSON
+#---------
+#ID:10
+#LASTNAME:BROWN
+#---------
+#ID:20
+#LASTNAME:BROWN
+#---------
+#ID:30
+#LASTNAME:BROWN
+#---------
+#ID:40
+#LASTNAME:BROWN
+#---------
+#ID:50
+#LASTNAME:BROWN
+#---------
+#ID:60
+#LASTNAME:BROWN
+#---------
+#ID:70
+#LASTNAME:BROWN
+#---------
+#ID:80
+#LASTNAME:BROWN
+#---------
+#ID:90
+#LASTNAME:BROWN
+#---------
+#ID:100
+#LASTNAME:BROWN
+#---------
+#ID:110
+#LASTNAME:BROWN
+#---------
+#ID:120
+#LASTNAME:BROWN
+#---------
+#ID:130
+#LASTNAME:BROWN
+#---------
+#ID:140
+#LASTNAME:BROWN
+#---------
+#ID:150
+#LASTNAME:BROWN
+#---------
+#ID:160
+#LASTNAME:BROWN
+#---------
+#ID:170
+#LASTNAME:BROWN
+#---------
+#ID:180
+#LASTNAME:BROWN
+#---------
+#ID:190
+#LASTNAME:BROWN
+#---------
+#ID:200
+#LASTNAME:BROWN
+#---------
+#ID:210
+#LASTNAME:BROWN
+#---------
+#ID:220
+#LASTNAME:BROWN
+#---------
+#ID:230
+#LASTNAME:BROWN
+#---------
+#ID:240
+#LASTNAME:BROWN
+#---------
+#ID:250
+#LASTNAME:BROWN
+#---------
+#ID:260
+#LASTNAME:BROWN
+#---------
+#ID:270
+#LASTNAME:BROWN
+#---------
+#ID:280
+#LASTNAME:BROWN
+#---------
+#ID:290
+#LASTNAME:BROWN
+#---------
+#ID:300
+#LASTNAME:BROWN
+#---------
+#ID:310
+#LASTNAME:BROWN
+#---------
+#ID:320
+#LASTNAME:BROWN
+#---------
+#ID:330
+#LASTNAME:BROWN
+#---------
+#ID:340
+#LASTNAME:BROWN
+#---------
+#ID:350
+#LASTNAME:BROWN
+#---------
+#ID:10
+#LASTNAME:GEYER
+#---------
+#ID:20
+#LASTNAME:GEYER
+#---------
+#ID:30
+#LASTNAME:GEYER
+#---------
+#ID:40
+#LASTNAME:GEYER
+#---------
+#ID:50
+#LASTNAME:GEYER
+#---------
+#ID:60
+#LASTNAME:GEYER
+#---------
+#ID:70
+#LASTNAME:GEYER
+#---------
+#ID:80
+#LASTNAME:GEYER
+#---------
+#ID:90
+#LASTNAME:GEYER
+#---------
+#ID:100
+#LASTNAME:GEYER
+#---------
+#ID:110
+#LASTNAME:GEYER
+#---------
+#ID:120
+#LASTNAME:GEYER
+#---------
+#ID:130
+#LASTNAME:GEYER
+#---------
+#ID:140
+#LASTNAME:GEYER
+#---------
+#ID:150
+#LASTNAME:GEYER
+#---------
+#ID:160
+#LASTNAME:GEYER
+#---------
+#ID:170
+#LASTNAME:GEYER
+#---------
+#ID:180
+#LASTNAME:GEYER
+#---------
+#ID:190
+#LASTNAME:GEYER
+#---------
+#ID:200
+#LASTNAME:GEYER
+#---------
+#ID:210
+#LASTNAME:GEYER
+#---------
+#ID:220
+#LASTNAME:GEYER
+#---------
+#ID:230
+#LASTNAME:GEYER
+#---------
+#ID:240
+#LASTNAME:GEYER
+#---------
+#ID:250
+#LASTNAME:GEYER
+#---------
+#ID:260
+#LASTNAME:GEYER
+#---------
+#ID:270
+#LASTNAME:GEYER
+#---------
+#ID:280
+#LASTNAME:GEYER
+#---------
+#ID:290
+#LASTNAME:GEYER
+#---------
+#ID:300
+#LASTNAME:GEYER
+#---------
+#ID:310
+#LASTNAME:GEYER
+#---------
+#ID:320
+#LASTNAME:GEYER
+#---------
+#ID:330
+#LASTNAME:GEYER
+#---------
+#ID:340
+#LASTNAME:GEYER
+#---------
+#ID:350
+#LASTNAME:GEYER
+#---------
+#ID:10
+#LASTNAME:GOUNOT
+#---------
+#ID:20
+#LASTNAME:GOUNOT
+#---------
+#ID:30
+#LASTNAME:GOUNOT
+#---------
+#ID:40
+#LASTNAME:GOUNOT
+#---------
+#ID:50
+#LASTNAME:GOUNOT
+#---------
+#ID:60
+#LASTNAME:GOUNOT
+#---------
+#ID:70
+#LASTNAME:GOUNOT
+#---------
+#ID:80
+#LASTNAME:GOUNOT
+#---------
+#ID:90
+#LASTNAME:GOUNOT
+#---------
+#ID:100
+#LASTNAME:GOUNOT
+#---------
+#ID:110
+#LASTNAME:GOUNOT
+#---------
+#ID:120
+#LASTNAME:GOUNOT
+#---------
+#ID:130
+#LASTNAME:GOUNOT
+#---------
+#ID:140
+#LASTNAME:GOUNOT
+#---------
+#ID:150
+#LASTNAME:GOUNOT
+#---------
+#ID:160
+#LASTNAME:GOUNOT
+#---------
+#ID:170
+#LASTNAME:GOUNOT
+#---------
+#ID:180
+#LASTNAME:GOUNOT
+#---------
+#ID:190
+#LASTNAME:GOUNOT
+#---------
+#ID:200
+#LASTNAME:GOUNOT
+#---------
+#ID:210
+#LASTNAME:GOUNOT
+#---------
+#ID:220
+#LASTNAME:GOUNOT
+#---------
+#ID:230
+#LASTNAME:GOUNOT
+#---------
+#ID:240
+#LASTNAME:GOUNOT
+#---------
+#ID:250
+#LASTNAME:GOUNOT
+#---------
+#ID:260
+#LASTNAME:GOUNOT
+#---------
+#ID:270
+#LASTNAME:GOUNOT
+#---------
+#ID:280
+#LASTNAME:GOUNOT
+#---------
+#ID:290
+#LASTNAME:GOUNOT
+#---------
+#ID:300
+#LASTNAME:GOUNOT
+#---------
+#ID:310
+#LASTNAME:GOUNOT
+#---------
+#ID:320
+#LASTNAME:GOUNOT
+#---------
+#ID:330
+#LASTNAME:GOUNOT
+#---------
+#ID:340
+#LASTNAME:GOUNOT
+#---------
+#ID:350
+#LASTNAME:GOUNOT
+#---------
+#ID:10
+#LASTNAME:HAAS
+#---------
+#ID:20
+#LASTNAME:HAAS
+#---------
+#ID:30
+#LASTNAME:HAAS
+#---------
+#ID:40
+#LASTNAME:HAAS
+#---------
+#ID:50
+#LASTNAME:HAAS
+#---------
+#ID:60
+#LASTNAME:HAAS
+#---------
+#ID:70
+#LASTNAME:HAAS
+#---------
+#ID:80
+#LASTNAME:HAAS
+#---------
+#ID:90
+#LASTNAME:HAAS
+#---------
+#ID:100
+#LASTNAME:HAAS
+#---------
+#ID:110
+#LASTNAME:HAAS
+#---------
+#ID:120
+#LASTNAME:HAAS
+#---------
+#ID:130
+#LASTNAME:HAAS
+#---------
+#ID:140
+#LASTNAME:HAAS
+#---------
+#ID:150
+#LASTNAME:HAAS
+#---------
+#ID:160
+#LASTNAME:HAAS
+#---------
+#ID:170
+#LASTNAME:HAAS
+#---------
+#ID:180
+#LASTNAME:HAAS
+#---------
+#ID:190
+#LASTNAME:HAAS
+#---------
+#ID:200
+#LASTNAME:HAAS
+#---------
+#ID:210
+#LASTNAME:HAAS
+#---------
+#ID:220
+#LASTNAME:HAAS
+#---------
+#ID:230
+#LASTNAME:HAAS
+#---------
+#ID:240
+#LASTNAME:HAAS
+#---------
+#ID:250
+#LASTNAME:HAAS
+#---------
+#ID:260
+#LASTNAME:HAAS
+#---------
+#ID:270
+#LASTNAME:HAAS
+#---------
+#ID:280
+#LASTNAME:HAAS
+#---------
+#ID:290
+#LASTNAME:HAAS
+#---------
+#ID:300
+#LASTNAME:HAAS
+#---------
+#ID:310
+#LASTNAME:HAAS
+#---------
+#ID:320
+#LASTNAME:HAAS
+#---------
+#ID:330
+#LASTNAME:HAAS
+#---------
+#ID:340
+#LASTNAME:HAAS
+#---------
+#ID:350
+#LASTNAME:HAAS
+#---------
+#ID:10
+#LASTNAME:HENDERSON
+#---------
+#ID:20
+#LASTNAME:HENDERSON
+#---------
+#ID:30
+#LASTNAME:HENDERSON
+#---------
+#ID:40
+#LASTNAME:HENDERSON
+#---------
+#ID:50
+#LASTNAME:HENDERSON
+#---------
+#ID:60
+#LASTNAME:HENDERSON
+#---------
+#ID:70
+#LASTNAME:HENDERSON
+#---------
+#ID:80
+#LASTNAME:HENDERSON
+#---------
+#ID:90
+#LASTNAME:HENDERSON
+#---------
+#ID:100
+#LASTNAME:HENDERSON
+#---------
+#ID:110
+#LASTNAME:HENDERSON
+#---------
+#ID:120
+#LASTNAME:HENDERSON
+#---------
+#ID:130
+#LASTNAME:HENDERSON
+#---------
+#ID:140
+#LASTNAME:HENDERSON
+#---------
+#ID:150
+#LASTNAME:HENDERSON
+#---------
+#ID:160
+#LASTNAME:HENDERSON
+#---------
+#ID:170
+#LASTNAME:HENDERSON
+#---------
+#ID:180
+#LASTNAME:HENDERSON
+#---------
+#ID:190
+#LASTNAME:HENDERSON
+#---------
+#ID:200
+#LASTNAME:HENDERSON
+#---------
+#ID:210
+#LASTNAME:HENDERSON
+#---------
+#ID:220
+#LASTNAME:HENDERSON
+#---------
+#ID:230
+#LASTNAME:HENDERSON
+#---------
+#ID:240
+#LASTNAME:HENDERSON
+#---------
+#ID:250
+#LASTNAME:HENDERSON
+#---------
+#ID:260
+#LASTNAME:HENDERSON
+#---------
+#ID:270
+#LASTNAME:HENDERSON
+#---------
+#ID:280
+#LASTNAME:HENDERSON
+#---------
+#ID:290
+#LASTNAME:HENDERSON
+#---------
+#ID:300
+#LASTNAME:HENDERSON
+#---------
+#ID:310
+#LASTNAME:HENDERSON
+#---------
+#ID:320
+#LASTNAME:HENDERSON
+#---------
+#ID:330
+#LASTNAME:HENDERSON
+#---------
+#ID:340
+#LASTNAME:HENDERSON
+#---------
+#ID:350
+#LASTNAME:HENDERSON
+#---------
+#ID:10
+#LASTNAME:JEFFERSON
+#---------
+#ID:20
+#LASTNAME:JEFFERSON
+#---------
+#ID:30
+#LASTNAME:JEFFERSON
+#---------
+#ID:40
+#LASTNAME:JEFFERSON
+#---------
+#ID:50
+#LASTNAME:JEFFERSON
+#---------
+#ID:60
+#LASTNAME:JEFFERSON
+#---------
+#ID:70
+#LASTNAME:JEFFERSON
+#---------
+#ID:80
+#LASTNAME:JEFFERSON
+#---------
+#ID:90
+#LASTNAME:JEFFERSON
+#---------
+#ID:100
+#LASTNAME:JEFFERSON
+#---------
+#ID:110
+#LASTNAME:JEFFERSON
+#---------
+#ID:120
+#LASTNAME:JEFFERSON
+#---------
+#ID:130
+#LASTNAME:JEFFERSON
+#---------
+#ID:140
+#LASTNAME:JEFFERSON
+#---------
+#ID:150
+#LASTNAME:JEFFERSON
+#---------
+#ID:160
+#LASTNAME:JEFFERSON
+#---------
+#ID:170
+#LASTNAME:JEFFERSON
+#---------
+#ID:180
+#LASTNAME:JEFFERSON
+#---------
+#ID:190
+#LASTNAME:JEFFERSON
+#---------
+#ID:200
+#LASTNAME:JEFFERSON
+#---------
+#ID:210
+#LASTNAME:JEFFERSON
+#---------
+#ID:220
+#LASTNAME:JEFFERSON
+#---------
+#ID:230
+#LASTNAME:JEFFERSON
+#---------
+#ID:240
+#LASTNAME:JEFFERSON
+#---------
+#ID:250
+#LASTNAME:JEFFERSON
+#---------
+#ID:260
+#LASTNAME:JEFFERSON
+#---------
+#ID:270
+#LASTNAME:JEFFERSON
+#---------
+#ID:280
+#LASTNAME:JEFFERSON
+#---------
+#ID:290
+#LASTNAME:JEFFERSON
+#---------
+#ID:300
+#LASTNAME:JEFFERSON
+#---------
+#ID:310
+#LASTNAME:JEFFERSON
+#---------
+#ID:320
+#LASTNAME:JEFFERSON
+#---------
+#ID:330
+#LASTNAME:JEFFERSON
+#---------
+#ID:340
+#LASTNAME:JEFFERSON
+#---------
+#ID:350
+#LASTNAME:JEFFERSON
+#---------
+#ID:10
+#LASTNAME:JOHNSON
+#---------
+#ID:20
+#LASTNAME:JOHNSON
+#---------
+#ID:30
+#LASTNAME:JOHNSON
+#---------
+#ID:40
+#LASTNAME:JOHNSON
+#---------
+#ID:50
+#LASTNAME:JOHNSON
+#---------
+#ID:60
+#LASTNAME:JOHNSON
+#---------
+#ID:70
+#LASTNAME:JOHNSON
+#---------
+#ID:80
+#LASTNAME:JOHNSON
+#---------
+#ID:90
+#LASTNAME:JOHNSON
+#---------
+#ID:100
+#LASTNAME:JOHNSON
+#---------
+#ID:110
+#LASTNAME:JOHNSON
+#---------
+#ID:120
+#LASTNAME:JOHNSON
+#---------
+#ID:130
+#LASTNAME:JOHNSON
+#---------
+#ID:140
+#LASTNAME:JOHNSON
+#---------
+#ID:150
+#LASTNAME:JOHNSON
+#---------
+#ID:160
+#LASTNAME:JOHNSON
+#---------
+#ID:170
+#LASTNAME:JOHNSON
+#---------
+#ID:180
+#LASTNAME:JOHNSON
+#---------
+#ID:190
+#LASTNAME:JOHNSON
+#---------
+#ID:200
+#LASTNAME:JOHNSON
+#---------
+#ID:210
+#LASTNAME:JOHNSON
+#---------
+#ID:220
+#LASTNAME:JOHNSON
+#---------
+#ID:230
+#LASTNAME:JOHNSON
+#---------
+#ID:240
+#LASTNAME:JOHNSON
+#---------
+#ID:250
+#LASTNAME:JOHNSON
+#---------
+#ID:260
+#LASTNAME:JOHNSON
+#---------
+#ID:270
+#LASTNAME:JOHNSON
+#---------
+#ID:280
+#LASTNAME:JOHNSON
+#---------
+#ID:290
+#LASTNAME:JOHNSON
+#---------
+#ID:300
+#LASTNAME:JOHNSON
+#---------
+#ID:310
+#LASTNAME:JOHNSON
+#---------
+#ID:320
+#LASTNAME:JOHNSON
+#---------
+#ID:330
+#LASTNAME:JOHNSON
+#---------
+#ID:340
+#LASTNAME:JOHNSON
+#---------
+#ID:350
+#LASTNAME:JOHNSON
+#---------
+#ID:10
+#LASTNAME:JONES
+#---------
+#ID:20
+#LASTNAME:JONES
+#---------
+#ID:30
+#LASTNAME:JONES
+#---------
+#ID:40
+#LASTNAME:JONES
+#---------
+#ID:50
+#LASTNAME:JONES
+#---------
+#ID:60
+#LASTNAME:JONES
+#---------
+#ID:70
+#LASTNAME:JONES
+#---------
+#ID:80
+#LASTNAME:JONES
+#---------
+#ID:90
+#LASTNAME:JONES
+#---------
+#ID:100
+#LASTNAME:JONES
+#---------
+#ID:110
+#LASTNAME:JONES
+#---------
+#ID:120
+#LASTNAME:JONES
+#---------
+#ID:130
+#LASTNAME:JONES
+#---------
+#ID:140
+#LASTNAME:JONES
+#---------
+#ID:150
+#LASTNAME:JONES
+#---------
+#ID:160
+#LASTNAME:JONES
+#---------
+#ID:170
+#LASTNAME:JONES
+#---------
+#ID:180
+#LASTNAME:JONES
+#---------
+#ID:190
+#LASTNAME:JONES
+#---------
+#ID:200
+#LASTNAME:JONES
+#---------
+#ID:210
+#LASTNAME:JONES
+#---------
+#ID:220
+#LASTNAME:JONES
+#---------
+#ID:230
+#LASTNAME:JONES
+#---------
+#ID:240
+#LASTNAME:JONES
+#---------
+#ID:250
+#LASTNAME:JONES
+#---------
+#ID:260
+#LASTNAME:JONES
+#---------
+#ID:270
+#LASTNAME:JONES
+#---------
+#ID:280
+#LASTNAME:JONES
+#---------
+#ID:290
+#LASTNAME:JONES
+#---------
+#ID:300
+#LASTNAME:JONES
+#---------
+#ID:310
+#LASTNAME:JONES
+#---------
+#ID:320
+#LASTNAME:JONES
+#---------
+#ID:330
+#LASTNAME:JONES
+#---------
+#ID:340
+#LASTNAME:JONES
+#---------
+#ID:350
+#LASTNAME:JONES
+#---------
+#ID:10
+#LASTNAME:KWAN
+#---------
+#ID:20
+#LASTNAME:KWAN
+#---------
+#ID:30
+#LASTNAME:KWAN
+#---------
+#ID:40
+#LASTNAME:KWAN
+#---------
+#ID:50
+#LASTNAME:KWAN
+#---------
+#ID:60
+#LASTNAME:KWAN
+#---------
+#ID:70
+#LASTNAME:KWAN
+#---------
+#ID:80
+#LASTNAME:KWAN
+#---------
+#ID:90
+#LASTNAME:KWAN
+#---------
+#ID:100
+#LASTNAME:KWAN
+#---------
+#ID:110
+#LASTNAME:KWAN
+#---------
+#ID:120
+#LASTNAME:KWAN
+#---------
+#ID:130
+#LASTNAME:KWAN
+#---------
+#ID:140
+#LASTNAME:KWAN
+#---------
+#ID:150
+#LASTNAME:KWAN
+#---------
+#ID:160
+#LASTNAME:KWAN
+#---------
+#ID:170
+#LASTNAME:KWAN
+#---------
+#ID:180
+#LASTNAME:KWAN
+#---------
+#ID:190
+#LASTNAME:KWAN
+#---------
+#ID:200
+#LASTNAME:KWAN
+#---------
+#ID:210
+#LASTNAME:KWAN
+#---------
+#ID:220
+#LASTNAME:KWAN
+#---------
+#ID:230
+#LASTNAME:KWAN
+#---------
+#ID:240
+#LASTNAME:KWAN
+#---------
+#ID:250
+#LASTNAME:KWAN
+#---------
+#ID:260
+#LASTNAME:KWAN
+#---------
+#ID:270
+#LASTNAME:KWAN
+#---------
+#ID:280
+#LASTNAME:KWAN
+#---------
+#ID:290
+#LASTNAME:KWAN
+#---------
+#ID:300
+#LASTNAME:KWAN
+#---------
+#ID:310
+#LASTNAME:KWAN
+#---------
+#ID:320
+#LASTNAME:KWAN
+#---------
+#ID:330
+#LASTNAME:KWAN
+#---------
+#ID:340
+#LASTNAME:KWAN
+#---------
+#ID:350
+#LASTNAME:KWAN
+#---------
+#ID:10
+#LASTNAME:LEE
+#---------
+#ID:20
+#LASTNAME:LEE
+#---------
+#ID:30
+#LASTNAME:LEE
+#---------
+#ID:40
+#LASTNAME:LEE
+#---------
+#ID:50
+#LASTNAME:LEE
+#---------
+#ID:60
+#LASTNAME:LEE
+#---------
+#ID:70
+#LASTNAME:LEE
+#---------
+#ID:80
+#LASTNAME:LEE
+#---------
+#ID:90
+#LASTNAME:LEE
+#---------
+#ID:100
+#LASTNAME:LEE
+#---------
+#ID:110
+#LASTNAME:LEE
+#---------
+#ID:120
+#LASTNAME:LEE
+#---------
+#ID:130
+#LASTNAME:LEE
+#---------
+#ID:140
+#LASTNAME:LEE
+#---------
+#ID:150
+#LASTNAME:LEE
+#---------
+#ID:160
+#LASTNAME:LEE
+#---------
+#ID:170
+#LASTNAME:LEE
+#---------
+#ID:180
+#LASTNAME:LEE
+#---------
+#ID:190
+#LASTNAME:LEE
+#---------
+#ID:200
+#LASTNAME:LEE
+#---------
+#ID:210
+#LASTNAME:LEE
+#---------
+#ID:220
+#LASTNAME:LEE
+#---------
+#ID:230
+#LASTNAME:LEE
+#---------
+#ID:240
+#LASTNAME:LEE
+#---------
+#ID:250
+#LASTNAME:LEE
+#---------
+#ID:260
+#LASTNAME:LEE
+#---------
+#ID:270
+#LASTNAME:LEE
+#---------
+#ID:280
+#LASTNAME:LEE
+#---------
+#ID:290
+#LASTNAME:LEE
+#---------
+#ID:300
+#LASTNAME:LEE
+#---------
+#ID:310
+#LASTNAME:LEE
+#---------
+#ID:320
+#LASTNAME:LEE
+#---------
+#ID:330
+#LASTNAME:LEE
+#---------
+#ID:340
+#LASTNAME:LEE
+#---------
+#ID:350
+#LASTNAME:LEE
+#---------
+#ID:10
+#LASTNAME:LUCCHESSI
+#---------
+#ID:20
+#LASTNAME:LUCCHESSI
+#---------
+#ID:30
+#LASTNAME:LUCCHESSI
+#---------
+#ID:40
+#LASTNAME:LUCCHESSI
+#---------
+#ID:50
+#LASTNAME:LUCCHESSI
+#---------
+#ID:60
+#LASTNAME:LUCCHESSI
+#---------
+#ID:70
+#LASTNAME:LUCCHESSI
+#---------
+#ID:80
+#LASTNAME:LUCCHESSI
+#---------
+#ID:90
+#LASTNAME:LUCCHESSI
+#---------
+#ID:100
+#LASTNAME:LUCCHESSI
+#---------
+#ID:110
+#LASTNAME:LUCCHESSI
+#---------
+#ID:120
+#LASTNAME:LUCCHESSI
+#---------
+#ID:130
+#LASTNAME:LUCCHESSI
+#---------
+#ID:140
+#LASTNAME:LUCCHESSI
+#---------
+#ID:150
+#LASTNAME:LUCCHESSI
+#---------
+#ID:160
+#LASTNAME:LUCCHESSI
+#---------
+#ID:170
+#LASTNAME:LUCCHESSI
+#---------
+#ID:180
+#LASTNAME:LUCCHESSI
+#---------
+#ID:190
+#LASTNAME:LUCCHESSI
+#---------
+#ID:200
+#LASTNAME:LUCCHESSI
+#---------
+#ID:210
+#LASTNAME:LUCCHESSI
+#---------
+#ID:220
+#LASTNAME:LUCCHESSI
+#---------
+#ID:230
+#LASTNAME:LUCCHESSI
+#---------
+#ID:240
+#LASTNAME:LUCCHESSI
+#---------
+#ID:250
+#LASTNAME:LUCCHESSI
+#---------
+#ID:260
+#LASTNAME:LUCCHESSI
+#---------
+#ID:270
+#LASTNAME:LUCCHESSI
+#---------
+#ID:280
+#LASTNAME:LUCCHESSI
+#---------
+#ID:290
+#LASTNAME:LUCCHESSI
+#---------
+#ID:300
+#LASTNAME:LUCCHESSI
+#---------
+#ID:310
+#LASTNAME:LUCCHESSI
+#---------
+#ID:320
+#LASTNAME:LUCCHESSI
+#---------
+#ID:330
+#LASTNAME:LUCCHESSI
+#---------
+#ID:340
+#LASTNAME:LUCCHESSI
+#---------
+#ID:350
+#LASTNAME:LUCCHESSI
+#---------
+#ID:10
+#LASTNAME:LUTZ
+#---------
+#ID:20
+#LASTNAME:LUTZ
+#---------
+#ID:30
+#LASTNAME:LUTZ
+#---------
+#ID:40
+#LASTNAME:LUTZ
+#---------
+#ID:50
+#LASTNAME:LUTZ
+#---------
+#ID:60
+#LASTNAME:LUTZ
+#---------
+#ID:70
+#LASTNAME:LUTZ
+#---------
+#ID:80
+#LASTNAME:LUTZ
+#---------
+#ID:90
+#LASTNAME:LUTZ
+#---------
+#ID:100
+#LASTNAME:LUTZ
+#---------
+#ID:110
+#LASTNAME:LUTZ
+#---------
+#ID:120
+#LASTNAME:LUTZ
+#---------
+#ID:130
+#LASTNAME:LUTZ
+#---------
+#ID:140
+#LASTNAME:LUTZ
+#---------
+#ID:150
+#LASTNAME:LUTZ
+#---------
+#ID:160
+#LASTNAME:LUTZ
+#---------
+#ID:170
+#LASTNAME:LUTZ
+#---------
+#ID:180
+#LASTNAME:LUTZ
+#---------
+#ID:190
+#LASTNAME:LUTZ
+#---------
+#ID:200
+#LASTNAME:LUTZ
+#---------
+#ID:210
+#LASTNAME:LUTZ
+#---------
+#ID:220
+#LASTNAME:LUTZ
+#---------
+#ID:230
+#LASTNAME:LUTZ
+#---------
+#ID:240
+#LASTNAME:LUTZ
+#---------
+#ID:250
+#LASTNAME:LUTZ
+#---------
+#ID:260
+#LASTNAME:LUTZ
+#---------
+#ID:270
+#LASTNAME:LUTZ
+#---------
+#ID:280
+#LASTNAME:LUTZ
+#---------
+#ID:290
+#LASTNAME:LUTZ
+#---------
+#ID:300
+#LASTNAME:LUTZ
+#---------
+#ID:310
+#LASTNAME:LUTZ
+#---------
+#ID:320
+#LASTNAME:LUTZ
+#---------
+#ID:330
+#LASTNAME:LUTZ
+#---------
+#ID:340
+#LASTNAME:LUTZ
+#---------
+#ID:350
+#LASTNAME:LUTZ
+#---------
+#ID:10
+#LASTNAME:MARINO
+#---------
+#ID:20
+#LASTNAME:MARINO
+#---------
+#ID:30
+#LASTNAME:MARINO
+#---------
+#ID:40
+#LASTNAME:MARINO
+#---------
+#ID:50
+#LASTNAME:MARINO
+#---------
+#ID:60
+#LASTNAME:MARINO
+#---------
+#ID:70
+#LASTNAME:MARINO
+#---------
+#ID:80
+#LASTNAME:MARINO
+#---------
+#ID:90
+#LASTNAME:MARINO
+#---------
+#ID:100
+#LASTNAME:MARINO
+#---------
+#ID:110
+#LASTNAME:MARINO
+#---------
+#ID:120
+#LASTNAME:MARINO
+#---------
+#ID:130
+#LASTNAME:MARINO
+#---------
+#ID:140
+#LASTNAME:MARINO
+#---------
+#ID:150
+#LASTNAME:MARINO
+#---------
+#ID:160
+#LASTNAME:MARINO
+#---------
+#ID:170
+#LASTNAME:MARINO
+#---------
+#ID:180
+#LASTNAME:MARINO
+#---------
+#ID:190
+#LASTNAME:MARINO
+#---------
+#ID:200
+#LASTNAME:MARINO
+#---------
+#ID:210
+#LASTNAME:MARINO
+#---------
+#ID:220
+#LASTNAME:MARINO
+#---------
+#ID:230
+#LASTNAME:MARINO
+#---------
+#ID:240
+#LASTNAME:MARINO
+#---------
+#ID:250
+#LASTNAME:MARINO
+#---------
+#ID:260
+#LASTNAME:MARINO
+#---------
+#ID:270
+#LASTNAME:MARINO
+#---------
+#ID:280
+#LASTNAME:MARINO
+#---------
+#ID:290
+#LASTNAME:MARINO
+#---------
+#ID:300
+#LASTNAME:MARINO
+#---------
+#ID:310
+#LASTNAME:MARINO
+#---------
+#ID:320
+#LASTNAME:MARINO
+#---------
+#ID:330
+#LASTNAME:MARINO
+#---------
+#ID:340
+#LASTNAME:MARINO
+#---------
+#ID:350
+#LASTNAME:MARINO
+#---------
+#ID:10
+#LASTNAME:MEHTA
+#---------
+#ID:20
+#LASTNAME:MEHTA
+#---------
+#ID:30
+#LASTNAME:MEHTA
+#---------
+#ID:40
+#LASTNAME:MEHTA
+#---------
+#ID:50
+#LASTNAME:MEHTA
+#---------
+#ID:60
+#LASTNAME:MEHTA
+#---------
+#ID:70
+#LASTNAME:MEHTA
+#---------
+#ID:80
+#LASTNAME:MEHTA
+#---------
+#ID:90
+#LASTNAME:MEHTA
+#---------
+#ID:100
+#LASTNAME:MEHTA
+#---------
+#ID:110
+#LASTNAME:MEHTA
+#---------
+#ID:120
+#LASTNAME:MEHTA
+#---------
+#ID:130
+#LASTNAME:MEHTA
+#---------
+#ID:140
+#LASTNAME:MEHTA
+#---------
+#ID:150
+#LASTNAME:MEHTA
+#---------
+#ID:160
+#LASTNAME:MEHTA
+#---------
+#ID:170
+#LASTNAME:MEHTA
+#---------
+#ID:180
+#LASTNAME:MEHTA
+#---------
+#ID:190
+#LASTNAME:MEHTA
+#---------
+#ID:200
+#LASTNAME:MEHTA
+#---------
+#ID:210
+#LASTNAME:MEHTA
+#---------
+#ID:220
+#LASTNAME:MEHTA
+#---------
+#ID:230
+#LASTNAME:MEHTA
+#---------
+#ID:240
+#LASTNAME:MEHTA
+#---------
+#ID:250
+#LASTNAME:MEHTA
+#---------
+#ID:260
+#LASTNAME:MEHTA
+#---------
+#ID:270
+#LASTNAME:MEHTA
+#---------
+#ID:280
+#LASTNAME:MEHTA
+#---------
+#ID:290
+#LASTNAME:MEHTA
+#---------
+#ID:300
+#LASTNAME:MEHTA
+#---------
+#ID:310
+#LASTNAME:MEHTA
+#---------
+#ID:320
+#LASTNAME:MEHTA
+#---------
+#ID:330
+#LASTNAME:MEHTA
+#---------
+#ID:340
+#LASTNAME:MEHTA
+#---------
+#ID:350
+#LASTNAME:MEHTA
+#---------
+#ID:10
+#LASTNAME:NICHOLLS
+#---------
+#ID:20
+#LASTNAME:NICHOLLS
+#---------
+#ID:30
+#LASTNAME:NICHOLLS
+#---------
+#ID:40
+#LASTNAME:NICHOLLS
+#---------
+#ID:50
+#LASTNAME:NICHOLLS
+#---------
+#ID:60
+#LASTNAME:NICHOLLS
+#---------
+#ID:70
+#LASTNAME:NICHOLLS
+#---------
+#ID:80
+#LASTNAME:NICHOLLS
+#---------
+#ID:90
+#LASTNAME:NICHOLLS
+#---------
+#ID:100
+#LASTNAME:NICHOLLS
+#---------
+#ID:110
+#LASTNAME:NICHOLLS
+#---------
+#ID:120
+#LASTNAME:NICHOLLS
+#---------
+#ID:130
+#LASTNAME:NICHOLLS
+#---------
+#ID:140
+#LASTNAME:NICHOLLS
+#---------
+#ID:150
+#LASTNAME:NICHOLLS
+#---------
+#ID:160
+#LASTNAME:NICHOLLS
+#---------
+#ID:170
+#LASTNAME:NICHOLLS
+#---------
+#ID:180
+#LASTNAME:NICHOLLS
+#---------
+#ID:190
+#LASTNAME:NICHOLLS
+#---------
+#ID:200
+#LASTNAME:NICHOLLS
+#---------
+#ID:210
+#LASTNAME:NICHOLLS
+#---------
+#ID:220
+#LASTNAME:NICHOLLS
+#---------
+#ID:230
+#LASTNAME:NICHOLLS
+#---------
+#ID:240
+#LASTNAME:NICHOLLS
+#---------
+#ID:250
+#LASTNAME:NICHOLLS
+#---------
+#ID:260
+#LASTNAME:NICHOLLS
+#---------
+#ID:270
+#LASTNAME:NICHOLLS
+#---------
+#ID:280
+#LASTNAME:NICHOLLS
+#---------
+#ID:290
+#LASTNAME:NICHOLLS
+#---------
+#ID:300
+#LASTNAME:NICHOLLS
+#---------
+#ID:310
+#LASTNAME:NICHOLLS
+#---------
+#ID:320
+#LASTNAME:NICHOLLS
+#---------
+#ID:330
+#LASTNAME:NICHOLLS
+#---------
+#ID:340
+#LASTNAME:NICHOLLS
+#---------
+#ID:350
+#LASTNAME:NICHOLLS
+#---------
+#ID:10
+#LASTNAME:OCONNELL
+#---------
+#ID:20
+#LASTNAME:OCONNELL
+#---------
+#ID:30
+#LASTNAME:OCONNELL
+#---------
+#ID:40
+#LASTNAME:OCONNELL
+#---------
+#ID:50
+#LASTNAME:OCONNELL
+#---------
+#ID:60
+#LASTNAME:OCONNELL
+#---------
+#ID:70
+#LASTNAME:OCONNELL
+#---------
+#ID:80
+#LASTNAME:OCONNELL
+#---------
+#ID:90
+#LASTNAME:OCONNELL
+#---------
+#ID:100
+#LASTNAME:OCONNELL
+#---------
+#ID:110
+#LASTNAME:OCONNELL
+#---------
+#ID:120
+#LASTNAME:OCONNELL
+#---------
+#ID:130
+#LASTNAME:OCONNELL
+#---------
+#ID:140
+#LASTNAME:OCONNELL
+#---------
+#ID:150
+#LASTNAME:OCONNELL
+#---------
+#ID:160
+#LASTNAME:OCONNELL
+#---------
+#ID:170
+#LASTNAME:OCONNELL
+#---------
+#ID:180
+#LASTNAME:OCONNELL
+#---------
+#ID:190
+#LASTNAME:OCONNELL
+#---------
+#ID:200
+#LASTNAME:OCONNELL
+#---------
+#ID:210
+#LASTNAME:OCONNELL
+#---------
+#ID:220
+#LASTNAME:OCONNELL
+#---------
+#ID:230
+#LASTNAME:OCONNELL
+#---------
+#ID:240
+#LASTNAME:OCONNELL
+#---------
+#ID:250
+#LASTNAME:OCONNELL
+#---------
+#ID:260
+#LASTNAME:OCONNELL
+#---------
+#ID:270
+#LASTNAME:OCONNELL
+#---------
+#ID:280
+#LASTNAME:OCONNELL
+#---------
+#ID:290
+#LASTNAME:OCONNELL
+#---------
+#ID:300
+#LASTNAME:OCONNELL
+#---------
+#ID:310
+#LASTNAME:OCONNELL
+#---------
+#ID:320
+#LASTNAME:OCONNELL
+#---------
+#ID:330
+#LASTNAME:OCONNELL
+#---------
+#ID:340
+#LASTNAME:OCONNELL
+#---------
+#ID:350
+#LASTNAME:OCONNELL
+#---------
+#ID:10
+#LASTNAME:PARKER
+#---------
+#ID:20
+#LASTNAME:PARKER
+#---------
+#ID:30
+#LASTNAME:PARKER
+#---------
+#ID:40
+#LASTNAME:PARKER
+#---------
+#ID:50
+#LASTNAME:PARKER
+#---------
+#ID:60
+#LASTNAME:PARKER
+#---------
+#ID:70
+#LASTNAME:PARKER
+#---------
+#ID:80
+#LASTNAME:PARKER
+#---------
+#ID:90
+#LASTNAME:PARKER
+#---------
+#ID:100
+#LASTNAME:PARKER
+#---------
+#ID:110
+#LASTNAME:PARKER
+#---------
+#ID:120
+#LASTNAME:PARKER
+#---------
+#ID:130
+#LASTNAME:PARKER
+#---------
+#ID:140
+#LASTNAME:PARKER
+#---------
+#ID:150
+#LASTNAME:PARKER
+#---------
+#ID:160
+#LASTNAME:PARKER
+#---------
+#ID:170
+#LASTNAME:PARKER
+#---------
+#ID:180
+#LASTNAME:PARKER
+#---------
+#ID:190
+#LASTNAME:PARKER
+#---------
+#ID:200
+#LASTNAME:PARKER
+#---------
+#ID:210
+#LASTNAME:PARKER
+#---------
+#ID:220
+#LASTNAME:PARKER
+#---------
+#ID:230
+#LASTNAME:PARKER
+#---------
+#ID:240
+#LASTNAME:PARKER
+#---------
+#ID:250
+#LASTNAME:PARKER
+#---------
+#ID:260
+#LASTNAME:PARKER
+#---------
+#ID:270
+#LASTNAME:PARKER
+#---------
+#ID:280
+#LASTNAME:PARKER
+#---------
+#ID:290
+#LASTNAME:PARKER
+#---------
+#ID:300
+#LASTNAME:PARKER
+#---------
+#ID:310
+#LASTNAME:PARKER
+#---------
+#ID:320
+#LASTNAME:PARKER
+#---------
+#ID:330
+#LASTNAME:PARKER
+#---------
+#ID:340
+#LASTNAME:PARKER
+#---------
+#ID:350
+#LASTNAME:PARKER
+#---------
+#ID:10
+#LASTNAME:PEREZ
+#---------
+#ID:20
+#LASTNAME:PEREZ
+#---------
+#ID:30
+#LASTNAME:PEREZ
+#---------
+#ID:40
+#LASTNAME:PEREZ
+#---------
+#ID:50
+#LASTNAME:PEREZ
+#---------
+#ID:60
+#LASTNAME:PEREZ
+#---------
+#ID:70
+#LASTNAME:PEREZ
+#---------
+#ID:80
+#LASTNAME:PEREZ
+#---------
+#ID:90
+#LASTNAME:PEREZ
+#---------
+#ID:100
+#LASTNAME:PEREZ
+#---------
+#ID:110
+#LASTNAME:PEREZ
+#---------
+#ID:120
+#LASTNAME:PEREZ
+#---------
+#ID:130
+#LASTNAME:PEREZ
+#---------
+#ID:140
+#LASTNAME:PEREZ
+#---------
+#ID:150
+#LASTNAME:PEREZ
+#---------
+#ID:160
+#LASTNAME:PEREZ
+#---------
+#ID:170
+#LASTNAME:PEREZ
+#---------
+#ID:180
+#LASTNAME:PEREZ
+#---------
+#ID:190
+#LASTNAME:PEREZ
+#---------
+#ID:200
+#LASTNAME:PEREZ
+#---------
+#ID:210
+#LASTNAME:PEREZ
+#---------
+#ID:220
+#LASTNAME:PEREZ
+#---------
+#ID:230
+#LASTNAME:PEREZ
+#---------
+#ID:240
+#LASTNAME:PEREZ
+#---------
+#ID:250
+#LASTNAME:PEREZ
+#---------
+#ID:260
+#LASTNAME:PEREZ
+#---------
+#ID:270
+#LASTNAME:PEREZ
+#---------
+#ID:280
+#LASTNAME:PEREZ
+#---------
+#ID:290
+#LASTNAME:PEREZ
+#---------
+#ID:300
+#LASTNAME:PEREZ
+#---------
+#ID:310
+#LASTNAME:PEREZ
+#---------
+#ID:320
+#LASTNAME:PEREZ
+#---------
+#ID:330
+#LASTNAME:PEREZ
+#---------
+#ID:340
+#LASTNAME:PEREZ
+#---------
+#ID:350
+#LASTNAME:PEREZ
+#---------
+#ID:10
+#LASTNAME:PIANKA
+#---------
+#ID:20
+#LASTNAME:PIANKA
+#---------
+#ID:30
+#LASTNAME:PIANKA
+#---------
+#ID:40
+#LASTNAME:PIANKA
+#---------
+#ID:50
+#LASTNAME:PIANKA
+#---------
+#ID:60
+#LASTNAME:PIANKA
+#---------
+#ID:70
+#LASTNAME:PIANKA
+#---------
+#ID:80
+#LASTNAME:PIANKA
+#---------
+#ID:90
+#LASTNAME:PIANKA
+#---------
+#ID:100
+#LASTNAME:PIANKA
+#---------
+#ID:110
+#LASTNAME:PIANKA
+#---------
+#ID:120
+#LASTNAME:PIANKA
+#---------
+#ID:130
+#LASTNAME:PIANKA
+#---------
+#ID:140
+#LASTNAME:PIANKA
+#---------
+#ID:150
+#LASTNAME:PIANKA
+#---------
+#ID:160
+#LASTNAME:PIANKA
+#---------
+#ID:170
+#LASTNAME:PIANKA
+#---------
+#ID:180
+#LASTNAME:PIANKA
+#---------
+#ID:190
+#LASTNAME:PIANKA
+#---------
+#ID:200
+#LASTNAME:PIANKA
+#---------
+#ID:210
+#LASTNAME:PIANKA
+#---------
+#ID:220
+#LASTNAME:PIANKA
+#---------
+#ID:230
+#LASTNAME:PIANKA
+#---------
+#ID:240
+#LASTNAME:PIANKA
+#---------
+#ID:250
+#LASTNAME:PIANKA
+#---------
+#ID:260
+#LASTNAME:PIANKA
+#---------
+#ID:270
+#LASTNAME:PIANKA
+#---------
+#ID:280
+#LASTNAME:PIANKA
+#---------
+#ID:290
+#LASTNAME:PIANKA
+#---------
+#ID:300
+#LASTNAME:PIANKA
+#---------
+#ID:310
+#LASTNAME:PIANKA
+#---------
+#ID:320
+#LASTNAME:PIANKA
+#---------
+#ID:330
+#LASTNAME:PIANKA
+#---------
+#ID:340
+#LASTNAME:PIANKA
+#---------
+#ID:350
+#LASTNAME:PIANKA
+#---------
+#ID:10
+#LASTNAME:PULASKI
+#---------
+#ID:20
+#LASTNAME:PULASKI
+#---------
+#ID:30
+#LASTNAME:PULASKI
+#---------
+#ID:40
+#LASTNAME:PULASKI
+#---------
+#ID:50
+#LASTNAME:PULASKI
+#---------
+#ID:60
+#LASTNAME:PULASKI
+#---------
+#ID:70
+#LASTNAME:PULASKI
+#---------
+#ID:80
+#LASTNAME:PULASKI
+#---------
+#ID:90
+#LASTNAME:PULASKI
+#---------
+#ID:100
+#LASTNAME:PULASKI
+#---------
+#ID:110
+#LASTNAME:PULASKI
+#---------
+#ID:120
+#LASTNAME:PULASKI
+#---------
+#ID:130
+#LASTNAME:PULASKI
+#---------
+#ID:140
+#LASTNAME:PULASKI
+#---------
+#ID:150
+#LASTNAME:PULASKI
+#---------
+#ID:160
+#LASTNAME:PULASKI
+#---------
+#ID:170
+#LASTNAME:PULASKI
+#---------
+#ID:180
+#LASTNAME:PULASKI
+#---------
+#ID:190
+#LASTNAME:PULASKI
+#---------
+#ID:200
+#LASTNAME:PULASKI
+#---------
+#ID:210
+#LASTNAME:PULASKI
+#---------
+#ID:220
+#LASTNAME:PULASKI
+#---------
+#ID:230
+#LASTNAME:PULASKI
+#---------
+#ID:240
+#LASTNAME:PULASKI
+#---------
+#ID:250
+#LASTNAME:PULASKI
+#---------
+#ID:260
+#LASTNAME:PULASKI
+#---------
+#ID:270
+#LASTNAME:PULASKI
+#---------
+#ID:280
+#LASTNAME:PULASKI
+#---------
+#ID:290
+#LASTNAME:PULASKI
+#---------
+#ID:300
+#LASTNAME:PULASKI
+#---------
+#ID:310
+#LASTNAME:PULASKI
+#---------
+#ID:320
+#LASTNAME:PULASKI
+#---------
+#ID:330
+#LASTNAME:PULASKI
+#---------
+#ID:340
+#LASTNAME:PULASKI
+#---------
+#ID:350
+#LASTNAME:PULASKI
+#---------
+#ID:10
+#LASTNAME:QUINTANA
+#---------
+#ID:20
+#LASTNAME:QUINTANA
+#---------
+#ID:30
+#LASTNAME:QUINTANA
+#---------
+#ID:40
+#LASTNAME:QUINTANA
+#---------
+#ID:50
+#LASTNAME:QUINTANA
+#---------
+#ID:60
+#LASTNAME:QUINTANA
+#---------
+#ID:70
+#LASTNAME:QUINTANA
+#---------
+#ID:80
+#LASTNAME:QUINTANA
+#---------
+#ID:90
+#LASTNAME:QUINTANA
+#---------
+#ID:100
+#LASTNAME:QUINTANA
+#---------
+#ID:110
+#LASTNAME:QUINTANA
+#---------
+#ID:120
+#LASTNAME:QUINTANA
+#---------
+#ID:130
+#LASTNAME:QUINTANA
+#---------
+#ID:140
+#LASTNAME:QUINTANA
+#---------
+#ID:150
+#LASTNAME:QUINTANA
+#---------
+#ID:160
+#LASTNAME:QUINTANA
+#---------
+#ID:170
+#LASTNAME:QUINTANA
+#---------
+#ID:180
+#LASTNAME:QUINTANA
+#---------
+#ID:190
+#LASTNAME:QUINTANA
+#---------
+#ID:200
+#LASTNAME:QUINTANA
+#---------
+#ID:210
+#LASTNAME:QUINTANA
+#---------
+#ID:220
+#LASTNAME:QUINTANA
+#---------
+#ID:230
+#LASTNAME:QUINTANA
+#---------
+#ID:240
+#LASTNAME:QUINTANA
+#---------
+#ID:250
+#LASTNAME:QUINTANA
+#---------
+#ID:260
+#LASTNAME:QUINTANA
+#---------
+#ID:270
+#LASTNAME:QUINTANA
+#---------
+#ID:280
+#LASTNAME:QUINTANA
+#---------
+#ID:290
+#LASTNAME:QUINTANA
+#---------
+#ID:300
+#LASTNAME:QUINTANA
+#---------
+#ID:310
+#LASTNAME:QUINTANA
+#---------
+#ID:320
+#LASTNAME:QUINTANA
+#---------
+#ID:330
+#LASTNAME:QUINTANA
+#---------
+#ID:340
+#LASTNAME:QUINTANA
+#---------
+#ID:350
+#LASTNAME:QUINTANA
+#---------
+#ID:10
+#LASTNAME:SCHNEIDER
+#---------
+#ID:20
+#LASTNAME:SCHNEIDER
+#---------
+#ID:30
+#LASTNAME:SCHNEIDER
+#---------
+#ID:40
+#LASTNAME:SCHNEIDER
+#---------
+#ID:50
+#LASTNAME:SCHNEIDER
+#---------
+#ID:60
+#LASTNAME:SCHNEIDER
+#---------
+#ID:70
+#LASTNAME:SCHNEIDER
+#---------
+#ID:80
+#LASTNAME:SCHNEIDER
+#---------
+#ID:90
+#LASTNAME:SCHNEIDER
+#---------
+#ID:100
+#LASTNAME:SCHNEIDER
+#---------
+#ID:110
+#LASTNAME:SCHNEIDER
+#---------
+#ID:120
+#LASTNAME:SCHNEIDER
+#---------
+#ID:130
+#LASTNAME:SCHNEIDER
+#---------
+#ID:140
+#LASTNAME:SCHNEIDER
+#---------
+#ID:150
+#LASTNAME:SCHNEIDER
+#---------
+#ID:160
+#LASTNAME:SCHNEIDER
+#---------
+#ID:170
+#LASTNAME:SCHNEIDER
+#---------
+#ID:180
+#LASTNAME:SCHNEIDER
+#---------
+#ID:190
+#LASTNAME:SCHNEIDER
+#---------
+#ID:200
+#LASTNAME:SCHNEIDER
+#---------
+#ID:210
+#LASTNAME:SCHNEIDER
+#---------
+#ID:220
+#LASTNAME:SCHNEIDER
+#---------
+#ID:230
+#LASTNAME:SCHNEIDER
+#---------
+#ID:240
+#LASTNAME:SCHNEIDER
+#---------
+#ID:250
+#LASTNAME:SCHNEIDER
+#---------
+#ID:260
+#LASTNAME:SCHNEIDER
+#---------
+#ID:270
+#LASTNAME:SCHNEIDER
+#---------
+#ID:280
+#LASTNAME:SCHNEIDER
+#---------
+#ID:290
+#LASTNAME:SCHNEIDER
+#---------
+#ID:300
+#LASTNAME:SCHNEIDER
+#---------
+#ID:310
+#LASTNAME:SCHNEIDER
+#---------
+#ID:320
+#LASTNAME:SCHNEIDER
+#---------
+#ID:330
+#LASTNAME:SCHNEIDER
+#---------
+#ID:340
+#LASTNAME:SCHNEIDER
+#---------
+#ID:350
+#LASTNAME:SCHNEIDER
+#---------
+#ID:10
+#LASTNAME:SCOUTTEN
+#---------
+#ID:20
+#LASTNAME:SCOUTTEN
+#---------
+#ID:30
+#LASTNAME:SCOUTTEN
+#---------
+#ID:40
+#LASTNAME:SCOUTTEN
+#---------
+#ID:50
+#LASTNAME:SCOUTTEN
+#---------
+#ID:60
+#LASTNAME:SCOUTTEN
+#---------
+#ID:70
+#LASTNAME:SCOUTTEN
+#---------
+#ID:80
+#LASTNAME:SCOUTTEN
+#---------
+#ID:90
+#LASTNAME:SCOUTTEN
+#---------
+#ID:100
+#LASTNAME:SCOUTTEN
+#---------
+#ID:110
+#LASTNAME:SCOUTTEN
+#---------
+#ID:120
+#LASTNAME:SCOUTTEN
+#---------
+#ID:130
+#LASTNAME:SCOUTTEN
+#---------
+#ID:140
+#LASTNAME:SCOUTTEN
+#---------
+#ID:150
+#LASTNAME:SCOUTTEN
+#---------
+#ID:160
+#LASTNAME:SCOUTTEN
+#---------
+#ID:170
+#LASTNAME:SCOUTTEN
+#---------
+#ID:180
+#LASTNAME:SCOUTTEN
+#---------
+#ID:190
+#LASTNAME:SCOUTTEN
+#---------
+#ID:200
+#LASTNAME:SCOUTTEN
+#---------
+#ID:210
+#LASTNAME:SCOUTTEN
+#---------
+#ID:220
+#LASTNAME:SCOUTTEN
+#---------
+#ID:230
+#LASTNAME:SCOUTTEN
+#---------
+#ID:240
+#LASTNAME:SCOUTTEN
+#---------
+#ID:250
+#LASTNAME:SCOUTTEN
+#---------
+#ID:260
+#LASTNAME:SCOUTTEN
+#---------
+#ID:270
+#LASTNAME:SCOUTTEN
+#---------
+#ID:280
+#LASTNAME:SCOUTTEN
+#---------
+#ID:290
+#LASTNAME:SCOUTTEN
+#---------
+#ID:300
+#LASTNAME:SCOUTTEN
+#---------
+#ID:310
+#LASTNAME:SCOUTTEN
+#---------
+#ID:320
+#LASTNAME:SCOUTTEN
+#---------
+#ID:330
+#LASTNAME:SCOUTTEN
+#---------
+#ID:340
+#LASTNAME:SCOUTTEN
+#---------
+#ID:350
+#LASTNAME:SCOUTTEN
+#---------
+#ID:10
+#LASTNAME:SETRIGHT
+#---------
+#ID:20
+#LASTNAME:SETRIGHT
+#---------
+#ID:30
+#LASTNAME:SETRIGHT
+#---------
+#ID:40
+#LASTNAME:SETRIGHT
+#---------
+#ID:50
+#LASTNAME:SETRIGHT
+#---------
+#ID:60
+#LASTNAME:SETRIGHT
+#---------
+#ID:70
+#LASTNAME:SETRIGHT
+#---------
+#ID:80
+#LASTNAME:SETRIGHT
+#---------
+#ID:90
+#LASTNAME:SETRIGHT
+#---------
+#ID:100
+#LASTNAME:SETRIGHT
+#---------
+#ID:110
+#LASTNAME:SETRIGHT
+#---------
+#ID:120
+#LASTNAME:SETRIGHT
+#---------
+#ID:130
+#LASTNAME:SETRIGHT
+#---------
+#ID:140
+#LASTNAME:SETRIGHT
+#---------
+#ID:150
+#LASTNAME:SETRIGHT
+#---------
+#ID:160
+#LASTNAME:SETRIGHT
+#---------
+#ID:170
+#LASTNAME:SETRIGHT
+#---------
+#ID:180
+#LASTNAME:SETRIGHT
+#---------
+#ID:190
+#LASTNAME:SETRIGHT
+#---------
+#ID:200
+#LASTNAME:SETRIGHT
+#---------
+#ID:210
+#LASTNAME:SETRIGHT
+#---------
+#ID:220
+#LASTNAME:SETRIGHT
+#---------
+#ID:230
+#LASTNAME:SETRIGHT
+#---------
+#ID:240
+#LASTNAME:SETRIGHT
+#---------
+#ID:250
+#LASTNAME:SETRIGHT
+#---------
+#ID:260
+#LASTNAME:SETRIGHT
+#---------
+#ID:270
+#LASTNAME:SETRIGHT
+#---------
+#ID:280
+#LASTNAME:SETRIGHT
+#---------
+#ID:290
+#LASTNAME:SETRIGHT
+#---------
+#ID:300
+#LASTNAME:SETRIGHT
+#---------
+#ID:310
+#LASTNAME:SETRIGHT
+#---------
+#ID:320
+#LASTNAME:SETRIGHT
+#---------
+#ID:330
+#LASTNAME:SETRIGHT
+#---------
+#ID:340
+#LASTNAME:SETRIGHT
+#---------
+#ID:350
+#LASTNAME:SETRIGHT
+#---------
+#ID:10
+#LASTNAME:SMITH
+#---------
+#ID:10
+#LASTNAME:SMITH
+#---------
+#ID:20
+#LASTNAME:SMITH
+#---------
+#ID:20
+#LASTNAME:SMITH
+#---------
+#ID:30
+#LASTNAME:SMITH
+#---------
+#ID:30
+#LASTNAME:SMITH
+#---------
+#ID:40
+#LASTNAME:SMITH
+#---------
+#ID:40
+#LASTNAME:SMITH
+#---------
+#ID:50
+#LASTNAME:SMITH
+#---------
+#ID:50
+#LASTNAME:SMITH
+#---------
+#ID:60
+#LASTNAME:SMITH
+#---------
+#ID:60
+#LASTNAME:SMITH
+#---------
+#ID:70
+#LASTNAME:SMITH
+#---------
+#ID:70
+#LASTNAME:SMITH
+#---------
+#ID:80
+#LASTNAME:SMITH
+#---------
+#ID:80
+#LASTNAME:SMITH
+#---------
+#ID:90
+#LASTNAME:SMITH
+#---------
+#ID:90
+#LASTNAME:SMITH
+#---------
+#ID:100
+#LASTNAME:SMITH
+#---------
+#ID:100
+#LASTNAME:SMITH
+#---------
+#ID:110
+#LASTNAME:SMITH
+#---------
+#ID:110
+#LASTNAME:SMITH
+#---------
+#ID:120
+#LASTNAME:SMITH
+#---------
+#ID:120
+#LASTNAME:SMITH
+#---------
+#ID:130
+#LASTNAME:SMITH
+#---------
+#ID:130
+#LASTNAME:SMITH
+#---------
+#ID:140
+#LASTNAME:SMITH
+#---------
+#ID:140
+#LASTNAME:SMITH
+#---------
+#ID:150
+#LASTNAME:SMITH
+#---------
+#ID:150
+#LASTNAME:SMITH
+#---------
+#ID:160
+#LASTNAME:SMITH
+#---------
+#ID:160
+#LASTNAME:SMITH
+#---------
+#ID:170
+#LASTNAME:SMITH
+#---------
+#ID:170
+#LASTNAME:SMITH
+#---------
+#ID:180
+#LASTNAME:SMITH
+#---------
+#ID:180
+#LASTNAME:SMITH
+#---------
+#ID:190
+#LASTNAME:SMITH
+#---------
+#ID:190
+#LASTNAME:SMITH
+#---------
+#ID:200
+#LASTNAME:SMITH
+#---------
+#ID:200
+#LASTNAME:SMITH
+#---------
+#ID:210
+#LASTNAME:SMITH
+#---------
+#ID:210
+#LASTNAME:SMITH
+#---------
+#ID:220
+#LASTNAME:SMITH
+#---------
+#ID:220
+#LASTNAME:SMITH
+#---------
+#ID:230
+#LASTNAME:SMITH
+#---------
+#ID:230
+#LASTNAME:SMITH
+#---------
+#ID:240
+#LASTNAME:SMITH
+#---------
+#ID:240
+#LASTNAME:SMITH
+#---------
+#ID:250
+#LASTNAME:SMITH
+#---------
+#ID:250
+#LASTNAME:SMITH
+#---------
+#ID:260
+#LASTNAME:SMITH
+#---------
+#ID:260
+#LASTNAME:SMITH
+#---------
+#ID:270
+#LASTNAME:SMITH
+#---------
+#ID:270
+#LASTNAME:SMITH
+#---------
+#ID:280
+#LASTNAME:SMITH
+#---------
+#ID:280
+#LASTNAME:SMITH
+#---------
+#ID:290
+#LASTNAME:SMITH
+#---------
+#ID:290
+#LASTNAME:SMITH
+#---------
+#ID:300
+#LASTNAME:SMITH
+#---------
+#ID:300
+#LASTNAME:SMITH
+#---------
+#ID:310
+#LASTNAME:SMITH
+#---------
+#ID:310
+#LASTNAME:SMITH
+#---------
+#ID:320
+#LASTNAME:SMITH
+#---------
+#ID:320
+#LASTNAME:SMITH
+#---------
+#ID:330
+#LASTNAME:SMITH
+#---------
+#ID:330
+#LASTNAME:SMITH
+#---------
+#ID:340
+#LASTNAME:SMITH
+#---------
+#ID:340
+#LASTNAME:SMITH
+#---------
+#ID:350
+#LASTNAME:SMITH
+#---------
+#ID:350
+#LASTNAME:SMITH
+#---------
+#ID:10
+#LASTNAME:SPENSER
+#---------
+#ID:20
+#LASTNAME:SPENSER
+#---------
+#ID:30
+#LASTNAME:SPENSER
+#---------
+#ID:40
+#LASTNAME:SPENSER
+#---------
+#ID:50
+#LASTNAME:SPENSER
+#---------
+#ID:60
+#LASTNAME:SPENSER
+#---------
+#ID:70
+#LASTNAME:SPENSER
+#---------
+#ID:80
+#LASTNAME:SPENSER
+#---------
+#ID:90
+#LASTNAME:SPENSER
+#---------
+#ID:100
+#LASTNAME:SPENSER
+#---------
+#ID:110
+#LASTNAME:SPENSER
+#---------
+#ID:120
+#LASTNAME:SPENSER
+#---------
+#ID:130
+#LASTNAME:SPENSER
+#---------
+#ID:140
+#LASTNAME:SPENSER
+#---------
+#ID:150
+#LASTNAME:SPENSER
+#---------
+#ID:160
+#LASTNAME:SPENSER
+#---------
+#ID:170
+#LASTNAME:SPENSER
+#---------
+#ID:180
+#LASTNAME:SPENSER
+#---------
+#ID:190
+#LASTNAME:SPENSER
+#---------
+#ID:200
+#LASTNAME:SPENSER
+#---------
+#ID:210
+#LASTNAME:SPENSER
+#---------
+#ID:220
+#LASTNAME:SPENSER
+#---------
+#ID:230
+#LASTNAME:SPENSER
+#---------
+#ID:240
+#LASTNAME:SPENSER
+#---------
+#ID:250
+#LASTNAME:SPENSER
+#---------
+#ID:260
+#LASTNAME:SPENSER
+#---------
+#ID:270
+#LASTNAME:SPENSER
+#---------
+#ID:280
+#LASTNAME:SPENSER
+#---------
+#ID:290
+#LASTNAME:SPENSER
+#---------
+#ID:300
+#LASTNAME:SPENSER
+#---------
+#ID:310
+#LASTNAME:SPENSER
+#---------
+#ID:320
+#LASTNAME:SPENSER
+#---------
+#ID:330
+#LASTNAME:SPENSER
+#---------
+#ID:340
+#LASTNAME:SPENSER
+#---------
+#ID:350
+#LASTNAME:SPENSER
+#---------
+#ID:10
+#LASTNAME:STERN
+#---------
+#ID:20
+#LASTNAME:STERN
+#---------
+#ID:30
+#LASTNAME:STERN
+#---------
+#ID:40
+#LASTNAME:STERN
+#---------
+#ID:50
+#LASTNAME:STERN
+#---------
+#ID:60
+#LASTNAME:STERN
+#---------
+#ID:70
+#LASTNAME:STERN
+#---------
+#ID:80
+#LASTNAME:STERN
+#---------
+#ID:90
+#LASTNAME:STERN
+#---------
+#ID:100
+#LASTNAME:STERN
+#---------
+#ID:110
+#LASTNAME:STERN
+#---------
+#ID:120
+#LASTNAME:STERN
+#---------
+#ID:130
+#LASTNAME:STERN
+#---------
+#ID:140
+#LASTNAME:STERN
+#---------
+#ID:150
+#LASTNAME:STERN
+#---------
+#ID:160
+#LASTNAME:STERN
+#---------
+#ID:170
+#LASTNAME:STERN
+#---------
+#ID:180
+#LASTNAME:STERN
+#---------
+#ID:190
+#LASTNAME:STERN
+#---------
+#ID:200
+#LASTNAME:STERN
+#---------
+#ID:210
+#LASTNAME:STERN
+#---------
+#ID:220
+#LASTNAME:STERN
+#---------
+#ID:230
+#LASTNAME:STERN
+#---------
+#ID:240
+#LASTNAME:STERN
+#---------
+#ID:250
+#LASTNAME:STERN
+#---------
+#ID:260
+#LASTNAME:STERN
+#---------
+#ID:270
+#LASTNAME:STERN
+#---------
+#ID:280
+#LASTNAME:STERN
+#---------
+#ID:290
+#LASTNAME:STERN
+#---------
+#ID:300
+#LASTNAME:STERN
+#---------
+#ID:310
+#LASTNAME:STERN
+#---------
+#ID:320
+#LASTNAME:STERN
+#---------
+#ID:330
+#LASTNAME:STERN
+#---------
+#ID:340
+#LASTNAME:STERN
+#---------
+#ID:350
+#LASTNAME:STERN
+#---------
+#ID:10
+#LASTNAME:THOMPSON
+#---------
+#ID:20
+#LASTNAME:THOMPSON
+#---------
+#ID:30
+#LASTNAME:THOMPSON
+#---------
+#ID:40
+#LASTNAME:THOMPSON
+#---------
+#ID:50
+#LASTNAME:THOMPSON
+#---------
+#ID:60
+#LASTNAME:THOMPSON
+#---------
+#ID:70
+#LASTNAME:THOMPSON
+#---------
+#ID:80
+#LASTNAME:THOMPSON
+#---------
+#ID:90
+#LASTNAME:THOMPSON
+#---------
+#ID:100
+#LASTNAME:THOMPSON
+#---------
+#ID:110
+#LASTNAME:THOMPSON
+#---------
+#ID:120
+#LASTNAME:THOMPSON
+#---------
+#ID:130
+#LASTNAME:THOMPSON
+#---------
+#ID:140
+#LASTNAME:THOMPSON
+#---------
+#ID:150
+#LASTNAME:THOMPSON
+#---------
+#ID:160
+#LASTNAME:THOMPSON
+#---------
+#ID:170
+#LASTNAME:THOMPSON
+#---------
+#ID:180
+#LASTNAME:THOMPSON
+#---------
+#ID:190
+#LASTNAME:THOMPSON
+#---------
+#ID:200
+#LASTNAME:THOMPSON
+#---------
+#ID:210
+#LASTNAME:THOMPSON
+#---------
+#ID:220
+#LASTNAME:THOMPSON
+#---------
+#ID:230
+#LASTNAME:THOMPSON
+#---------
+#ID:240
+#LASTNAME:THOMPSON
+#---------
+#ID:250
+#LASTNAME:THOMPSON
+#---------
+#ID:260
+#LASTNAME:THOMPSON
+#---------
+#ID:270
+#LASTNAME:THOMPSON
+#---------
+#ID:280
+#LASTNAME:THOMPSON
+#---------
+#ID:290
+#LASTNAME:THOMPSON
+#---------
+#ID:300
+#LASTNAME:THOMPSON
+#---------
+#ID:310
+#LASTNAME:THOMPSON
+#---------
+#ID:320
+#LASTNAME:THOMPSON
+#---------
+#ID:330
+#LASTNAME:THOMPSON
+#---------
+#ID:340
+#LASTNAME:THOMPSON
+#---------
+#ID:350
+#LASTNAME:THOMPSON
+#---------
+#ID:10
+#LASTNAME:WALKER
+#---------
+#ID:20
+#LASTNAME:WALKER
+#---------
+#ID:30
+#LASTNAME:WALKER
+#---------
+#ID:40
+#LASTNAME:WALKER
+#---------
+#ID:50
+#LASTNAME:WALKER
+#---------
+#ID:60
+#LASTNAME:WALKER
+#---------
+#ID:70
+#LASTNAME:WALKER
+#---------
+#ID:80
+#LASTNAME:WALKER
+#---------
+#ID:90
+#LASTNAME:WALKER
+#---------
+#ID:100
+#LASTNAME:WALKER
+#---------
+#ID:110
+#LASTNAME:WALKER
+#---------
+#ID:120
+#LASTNAME:WALKER
+#---------
+#ID:130
+#LASTNAME:WALKER
+#---------
+#ID:140
+#LASTNAME:WALKER
+#---------
+#ID:150
+#LASTNAME:WALKER
+#---------
+#ID:160
+#LASTNAME:WALKER
+#---------
+#ID:170
+#LASTNAME:WALKER
+#---------
+#ID:180
+#LASTNAME:WALKER
+#---------
+#ID:190
+#LASTNAME:WALKER
+#---------
+#ID:200
+#LASTNAME:WALKER
+#---------
+#ID:210
+#LASTNAME:WALKER
+#---------
+#ID:220
+#LASTNAME:WALKER
+#---------
+#ID:230
+#LASTNAME:WALKER
+#---------
+#ID:240
+#LASTNAME:WALKER
+#---------
+#ID:250
+#LASTNAME:WALKER
+#---------
+#ID:260
+#LASTNAME:WALKER
+#---------
+#ID:270
+#LASTNAME:WALKER
+#---------
+#ID:280
+#LASTNAME:WALKER
+#---------
+#ID:290
+#LASTNAME:WALKER
+#---------
+#ID:300
+#LASTNAME:WALKER
+#---------
+#ID:310
+#LASTNAME:WALKER
+#---------
+#ID:320
+#LASTNAME:WALKER
+#---------
+#ID:330
+#LASTNAME:WALKER
+#---------
+#ID:340
+#LASTNAME:WALKER
+#---------
+#ID:350
+#LASTNAME:WALKER
+#---------
+#ID:10
+#LASTNAME:YOSHIMURA
+#---------
+#ID:20
+#LASTNAME:YOSHIMURA
+#---------
+#ID:30
+#LASTNAME:YOSHIMURA
+#---------
+#ID:40
+#LASTNAME:YOSHIMURA
+#---------
+#ID:50
+#LASTNAME:YOSHIMURA
+#---------
+#ID:60
+#LASTNAME:YOSHIMURA
+#---------
+#ID:70
+#LASTNAME:YOSHIMURA
+#---------
+#ID:80
+#LASTNAME:YOSHIMURA
+#---------
+#ID:90
+#LASTNAME:YOSHIMURA
+#---------
+#ID:100
+#LASTNAME:YOSHIMURA
+#---------
+#ID:110
+#LASTNAME:YOSHIMURA
+#---------
+#ID:120
+#LASTNAME:YOSHIMURA
+#---------
+#ID:130
+#LASTNAME:YOSHIMURA
+#---------
+#ID:140
+#LASTNAME:YOSHIMURA
+#---------
+#ID:150
+#LASTNAME:YOSHIMURA
+#---------
+#ID:160
+#LASTNAME:YOSHIMURA
+#---------
+#ID:170
+#LASTNAME:YOSHIMURA
+#---------
+#ID:180
+#LASTNAME:YOSHIMURA
+#---------
+#ID:190
+#LASTNAME:YOSHIMURA
+#---------
+#ID:200
+#LASTNAME:YOSHIMURA
+#---------
+#ID:210
+#LASTNAME:YOSHIMURA
+#---------
+#ID:220
+#LASTNAME:YOSHIMURA
+#---------
+#ID:230
+#LASTNAME:YOSHIMURA
+#---------
+#ID:240
+#LASTNAME:YOSHIMURA
+#---------
+#ID:250
+#LASTNAME:YOSHIMURA
+#---------
+#ID:260
+#LASTNAME:YOSHIMURA
+#---------
+#ID:270
+#LASTNAME:YOSHIMURA
+#---------
+#ID:280
+#LASTNAME:YOSHIMURA
+#---------
+#ID:290
+#LASTNAME:YOSHIMURA
+#---------
+#ID:300
+#LASTNAME:YOSHIMURA
+#---------
+#ID:310
+#LASTNAME:YOSHIMURA
+#---------
+#ID:320
+#LASTNAME:YOSHIMURA
+#---------
+#ID:330
+#LASTNAME:YOSHIMURA
+#---------
+#ID:340
+#LASTNAME:YOSHIMURA
+#---------
+#ID:350
+#LASTNAME:YOSHIMURA
+#---------
diff -pruN 0.3.0-3/tests/test_124_FieldNamePos_02.py 2.0.5-0ubuntu2/tests/test_124_FieldNamePos_02.py
--- 0.3.0-3/tests/test_124_FieldNamePos_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_124_FieldNamePos_02.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,1135 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_124_FieldNamePos_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_124)
+
+  def run_test_124(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+  
+    if conn:
+       result = ibm_db.exec_immediate(conn, "select * from staff, employee, org where employee.lastname in ('HAAS','THOMPSON', 'KWAN', 'GEYER', 'STERN', 'PULASKI', 'HENDERSON', 'SPENSER', 'LUCCHESSI', 'OCONNELL', 'QUINTANA', 'NICHOLLS', 'ADAMSON', 'PIANKA', 'YOSHIMURA', 'SCOUTTEN', 'WALKER', 'BROWN', 'JONES', 'LUTZ', 'JEFFERSON', 'MARINO', 'SMITH', 'JOHNSON', 'PEREZ', 'SCHNEIDER', 'PARKER', 'SMITH', 'SETRIGHT', 'MEHTA', 'LEE', 'GOUNOT') order by org.location,employee.lastname,staff.id")
+       cols = ibm_db.num_fields(result)
+       j = 0
+       row = ibm_db.fetch_both(result)
+       while ( row ):
+          for i in range(0, cols):
+             field = ibm_db.field_name(result, i)
+             value = row[ibm_db.field_name(result, i)]
+             if (value == None): 
+                value = ''
+             print "%s:%s" % (field, value)
+          print "---------"
+          j += 1
+          if (j == 10):
+            break
+       
+          row = ibm_db.fetch_both(result)
+       
+       ibm_db.close(conn)
+       print "done"
+    else:
+       print ibm_db.conn_errormsg()
+#__END__
+#__LUW_EXPECTED__
+#ID:10
+#NAME:Sanders
+#DEPT:20
+#JOB:DESIGNER
+#YEARS:7
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:20
+#NAME:Pernal
+#DEPT:20
+#JOB:DESIGNER
+#YEARS:8
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:30
+#NAME:Marenghi
+#DEPT:38
+#JOB:DESIGNER
+#YEARS:5
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:40
+#NAME:OBrien
+#DEPT:38
+#JOB:DESIGNER
+#YEARS:6
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:50
+#NAME:Hanes
+#DEPT:15
+#JOB:DESIGNER
+#YEARS:10
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:60
+#NAME:Quigley
+#DEPT:38
+#JOB:DESIGNER
+#YEARS:
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:70
+#NAME:Rothman
+#DEPT:15
+#JOB:DESIGNER
+#YEARS:7
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:80
+#NAME:James
+#DEPT:20
+#JOB:DESIGNER
+#YEARS:
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:90
+#NAME:Koonitz
+#DEPT:42
+#JOB:DESIGNER
+#YEARS:6
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:100
+#NAME:Plotz
+#DEPT:42
+#JOB:DESIGNER
+#YEARS:7
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#done
+#__ZOS_EXPECTED__
+#ID:10
+#NAME:Sanders
+#DEPT:20
+#JOB:DESIGNER
+#YEARS:7
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:20
+#NAME:Pernal
+#DEPT:20
+#JOB:DESIGNER
+#YEARS:8
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:30
+#NAME:Marenghi
+#DEPT:38
+#JOB:DESIGNER
+#YEARS:5
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:40
+#NAME:OBrien
+#DEPT:38
+#JOB:DESIGNER
+#YEARS:6
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:50
+#NAME:Hanes
+#DEPT:15
+#JOB:DESIGNER
+#YEARS:10
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:60
+#NAME:Quigley
+#DEPT:38
+#JOB:DESIGNER
+#YEARS:
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:70
+#NAME:Rothman
+#DEPT:15
+#JOB:DESIGNER
+#YEARS:7
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:80
+#NAME:James
+#DEPT:20
+#JOB:DESIGNER
+#YEARS:
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:90
+#NAME:Koonitz
+#DEPT:42
+#JOB:DESIGNER
+#YEARS:6
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:100
+#NAME:Plotz
+#DEPT:42
+#JOB:DESIGNER
+#YEARS:7
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#done
+#__SYSTEMI_EXPECTED__
+#ID:10
+#NAME:Sanders
+#DEPT:20
+#JOB:DESIGNER
+#YEARS:7
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:20
+#NAME:Pernal
+#DEPT:20
+#JOB:DESIGNER
+#YEARS:8
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:30
+#NAME:Marenghi
+#DEPT:38
+#JOB:DESIGNER
+#YEARS:5
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:40
+#NAME:OBrien
+#DEPT:38
+#JOB:DESIGNER
+#YEARS:6
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:50
+#NAME:Hanes
+#DEPT:15
+#JOB:DESIGNER
+#YEARS:10
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:60
+#NAME:Quigley
+#DEPT:38
+#JOB:DESIGNER
+#YEARS:
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:70
+#NAME:Rothman
+#DEPT:15
+#JOB:DESIGNER
+#YEARS:7
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:80
+#NAME:James
+#DEPT:20
+#JOB:DESIGNER
+#YEARS:
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:90
+#NAME:Koonitz
+#DEPT:42
+#JOB:DESIGNER
+#YEARS:6
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#ID:100
+#NAME:Plotz
+#DEPT:42
+#JOB:DESIGNER
+#YEARS:7
+#SALARY:25280.00
+#COMM:2022.00
+#EMPNO:000150
+#FIRSTNME:BRUCE
+#MIDINIT: 
+#LASTNAME:ADAMSON
+#WORKDEPT:D11
+#PHONENO:4510
+#HIREDATE:1972-02-12
+#JOB:DESIGNER
+#EDLEVEL:16
+#SEX:M
+#BIRTHDATE:1947-05-17
+#SALARY:25280.00
+#BONUS:500.00
+#COMM:2022.00
+#DEPTNUMB:38
+#DEPTNAME:South Atlantic
+#MANAGER:30
+#DIVISION:Eastern
+#LOCATION:Atlanta
+#---------
+#done
+#__IDS_EXPECTED__
+#id:10
+#name:Sanders
+#dept:20
+#job:DESIGNER
+#years:7
+#salary:25280.00
+#comm:2022.00
+#empno:000150
+#firstnme:BRUCE
+#midinit: 
+#lastname:ADAMSON
+#workdept:D11
+#phoneno:4510
+#hiredate:1972-02-12
+#job:DESIGNER
+#edlevel:16
+#sex:M
+#birthdate:1947-05-17
+#salary:25280.00
+#bonus:500.00
+#comm:2022.00
+#deptnumb:38
+#deptname:South Atlantic
+#manager:30
+#division:Eastern
+#location:Atlanta
+#---------
+#id:20
+#name:Pernal
+#dept:20
+#job:DESIGNER
+#years:8
+#salary:25280.00
+#comm:2022.00
+#empno:000150
+#firstnme:BRUCE
+#midinit: 
+#lastname:ADAMSON
+#workdept:D11
+#phoneno:4510
+#hiredate:1972-02-12
+#job:DESIGNER
+#edlevel:16
+#sex:M
+#birthdate:1947-05-17
+#salary:25280.00
+#bonus:500.00
+#comm:2022.00
+#deptnumb:38
+#deptname:South Atlantic
+#manager:30
+#division:Eastern
+#location:Atlanta
+#---------
+#id:30
+#name:Marenghi
+#dept:38
+#job:DESIGNER
+#years:5
+#salary:25280.00
+#comm:2022.00
+#empno:000150
+#firstnme:BRUCE
+#midinit: 
+#lastname:ADAMSON
+#workdept:D11
+#phoneno:4510
+#hiredate:1972-02-12
+#job:DESIGNER
+#edlevel:16
+#sex:M
+#birthdate:1947-05-17
+#salary:25280.00
+#bonus:500.00
+#comm:2022.00
+#deptnumb:38
+#deptname:South Atlantic
+#manager:30
+#division:Eastern
+#location:Atlanta
+#---------
+#id:40
+#name:OBrien
+#dept:38
+#job:DESIGNER
+#years:6
+#salary:25280.00
+#comm:2022.00
+#empno:000150
+#firstnme:BRUCE
+#midinit: 
+#lastname:ADAMSON
+#workdept:D11
+#phoneno:4510
+#hiredate:1972-02-12
+#job:DESIGNER
+#edlevel:16
+#sex:M
+#birthdate:1947-05-17
+#salary:25280.00
+#bonus:500.00
+#comm:2022.00
+#deptnumb:38
+#deptname:South Atlantic
+#manager:30
+#division:Eastern
+#location:Atlanta
+#---------
+#id:50
+#name:Hanes
+#dept:15
+#job:DESIGNER
+#years:10
+#salary:25280.00
+#comm:2022.00
+#empno:000150
+#firstnme:BRUCE
+#midinit: 
+#lastname:ADAMSON
+#workdept:D11
+#phoneno:4510
+#hiredate:1972-02-12
+#job:DESIGNER
+#edlevel:16
+#sex:M
+#birthdate:1947-05-17
+#salary:25280.00
+#bonus:500.00
+#comm:2022.00
+#deptnumb:38
+#deptname:South Atlantic
+#manager:30
+#division:Eastern
+#location:Atlanta
+#---------
+#id:60
+#name:Quigley
+#dept:38
+#job:DESIGNER
+#years:
+#salary:25280.00
+#comm:2022.00
+#empno:000150
+#firstnme:BRUCE
+#midinit: 
+#lastname:ADAMSON
+#workdept:D11
+#phoneno:4510
+#hiredate:1972-02-12
+#job:DESIGNER
+#edlevel:16
+#sex:M
+#birthdate:1947-05-17
+#salary:25280.00
+#bonus:500.00
+#comm:2022.00
+#deptnumb:38
+#deptname:South Atlantic
+#manager:30
+#division:Eastern
+#location:Atlanta
+#---------
+#id:70
+#name:Rothman
+#dept:15
+#job:DESIGNER
+#years:7
+#salary:25280.00
+#comm:2022.00
+#empno:000150
+#firstnme:BRUCE
+#midinit: 
+#lastname:ADAMSON
+#workdept:D11
+#phoneno:4510
+#hiredate:1972-02-12
+#job:DESIGNER
+#edlevel:16
+#sex:M
+#birthdate:1947-05-17
+#salary:25280.00
+#bonus:500.00
+#comm:2022.00
+#deptnumb:38
+#deptname:South Atlantic
+#manager:30
+#division:Eastern
+#location:Atlanta
+#---------
+#id:80
+#name:James
+#dept:20
+#job:DESIGNER
+#years:
+#salary:25280.00
+#comm:2022.00
+#empno:000150
+#firstnme:BRUCE
+#midinit: 
+#lastname:ADAMSON
+#workdept:D11
+#phoneno:4510
+#hiredate:1972-02-12
+#job:DESIGNER
+#edlevel:16
+#sex:M
+#birthdate:1947-05-17
+#salary:25280.00
+#bonus:500.00
+#comm:2022.00
+#deptnumb:38
+#deptname:South Atlantic
+#manager:30
+#division:Eastern
+#location:Atlanta
+#---------
+#id:90
+#name:Koonitz
+#dept:42
+#job:DESIGNER
+#years:6
+#salary:25280.00
+#comm:2022.00
+#empno:000150
+#firstnme:BRUCE
+#midinit: 
+#lastname:ADAMSON
+#workdept:D11
+#phoneno:4510
+#hiredate:1972-02-12
+#job:DESIGNER
+#edlevel:16
+#sex:M
+#birthdate:1947-05-17
+#salary:25280.00
+#bonus:500.00
+#comm:2022.00
+#deptnumb:38
+#deptname:South Atlantic
+#manager:30
+#division:Eastern
+#location:Atlanta
+#---------
+#id:100
+#name:Plotz
+#dept:42
+#job:DESIGNER
+#years:7
+#salary:25280.00
+#comm:2022.00
+#empno:000150
+#firstnme:BRUCE
+#midinit: 
+#lastname:ADAMSON
+#workdept:D11
+#phoneno:4510
+#hiredate:1972-02-12
+#job:DESIGNER
+#edlevel:16
+#sex:M
+#birthdate:1947-05-17
+#salary:25280.00
+#bonus:500.00
+#comm:2022.00
+#deptnumb:38
+#deptname:South Atlantic
+#manager:30
+#division:Eastern
+#location:Atlanta
+#---------
+#done
+
+
+
diff -pruN 0.3.0-3/tests/test_125_FieldNamePos_03.py 2.0.5-0ubuntu2/tests/test_125_FieldNamePos_03.py
--- 0.3.0-3/tests/test_125_FieldNamePos_03.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_125_FieldNamePos_03.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,113 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_125_FieldNamePos_03(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_125)
+
+  def run_test_125(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    result = ibm_db.exec_immediate(conn, "SELECT * FROM sales")
+    result2 = ibm_db.exec_immediate(conn, "SELECT * FROM staff")
+    
+    for i in range(0, ibm_db.num_fields(result)):
+      print "%d:%s" % (i, ibm_db.field_name(result,i))
+    
+    print "-----"
+    
+    for i in range(0, ibm_db.num_fields(result2)):
+      print "%d:%s" % (i, ibm_db.field_name(result2,i))
+    
+    print "-----"
+    
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      print "Region:%s" % ibm_db.field_name(result, 'region')
+    else:
+      print "Region:%s" % ibm_db.field_name(result, 'REGION')
+    print "5:%s" % ibm_db.field_name(result2, 5)
+
+#__END__
+#__LUW_EXPECTED__
+#0:SALES_DATE
+#1:SALES_PERSON
+#2:REGION
+#3:SALES
+#
+#-----
+#0:ID
+#1:NAME
+#2:DEPT
+#3:JOB
+#4:YEARS
+#5:SALARY
+#6:COMM
+#
+#-----
+#Region:REGION
+#5:SALARY
+#__ZOS_EXPECTED__
+#0:SALES_DATE
+#1:SALES_PERSON
+#2:REGION
+#3:SALES
+#
+#-----
+#0:ID
+#1:NAME
+#2:DEPT
+#3:JOB
+#4:YEARS
+#5:SALARY
+#6:COMM
+#
+#-----
+#Region:REGION
+#5:SALARY
+#__SYSTEMI_EXPECTED__
+#0:SALES_DATE
+#1:SALES_PERSON
+#2:REGION
+#3:SALES
+#
+#-----
+#0:ID
+#1:NAME
+#2:DEPT
+#3:JOB
+#4:YEARS
+#5:SALARY
+#6:COMM
+#
+#-----
+#Region:REGION
+#5:SALARY
+#__IDS_EXPECTED__
+#0:sales_date
+#1:sales_person
+#2:region
+#3:sales
+#
+#-----
+#0:id
+#1:name
+#2:dept
+#3:job
+#4:years
+#5:salary
+#6:comm
+#
+#-----
+#Region:region
+#5:salary
diff -pruN 0.3.0-3/tests/test_130_PrepExecuteSelectStmt.py 2.0.5-0ubuntu2/tests/test_130_PrepExecuteSelectStmt.py
--- 0.3.0-3/tests/test_130_PrepExecuteSelectStmt.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_130_PrepExecuteSelectStmt.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,53 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_130_PrepExecuteSelectStmt(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_130)
+
+  def run_test_130(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      stmt = ibm_db.prepare(conn, "SELECT id, breed, name, weight FROM animals WHERE id = 0")
+    
+      if ibm_db.execute(stmt):
+        row = ibm_db.fetch_tuple(stmt)
+        while ( row ):
+          for i in row:
+            print i
+            row = ibm_db.fetch_tuple(stmt)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__ZOS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__SYSTEMI_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__IDS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
diff -pruN 0.3.0-3/tests/test_131_PrepareExecuteSelectStatementParams.py 2.0.5-0ubuntu2/tests/test_131_PrepareExecuteSelectStatementParams.py
--- 0.3.0-3/tests/test_131_PrepareExecuteSelectStatementParams.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_131_PrepareExecuteSelectStatementParams.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,54 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_131_PrepareExecuteSelectStatementParams(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_131)
+
+  def run_test_131(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      stmt = ibm_db.prepare( conn, "SELECT id, breed, name, weight FROM animals WHERE id = ?" )
+    
+      if ibm_db.execute(stmt, (0,)):
+        row = ibm_db.fetch_tuple(stmt)
+        while ( row ):
+          #row.each { |child| print child }
+          for i in row:
+            print i
+          row = ibm_db.fetch_tuple(stmt)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__ZOS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__SYSTEMI_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__IDS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
diff -pruN 0.3.0-3/tests/test_132_ExecuteStatementArrayMultipleParams.py 2.0.5-0ubuntu2/tests/test_132_ExecuteStatementArrayMultipleParams.py
--- 0.3.0-3/tests/test_132_ExecuteStatementArrayMultipleParams.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_132_ExecuteStatementArrayMultipleParams.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,56 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_132_ExecuteStatementArrayMultipleParams(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_132)
+
+  def run_test_132(self):
+    sql =  "SELECT id, breed, name, weight FROM animals WHERE id = ? AND name = ?"
+    
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      stmt = ibm_db.prepare(conn, sql)
+    
+      if (ibm_db.execute(stmt, (0, 'Pook'))):
+        row = ibm_db.fetch_tuple(stmt)
+        while ( row ):
+          #row.each { |child| print child }
+          for i in row:
+            print i
+          row = ibm_db.fetch_tuple(stmt)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__ZOS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__SYSTEMI_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__IDS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
diff -pruN 0.3.0-3/tests/test_133_ExecuteLongInputParams.py 2.0.5-0ubuntu2/tests/test_133_ExecuteLongInputParams.py
--- 0.3.0-3/tests/test_133_ExecuteLongInputParams.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_133_ExecuteLongInputParams.py	2014-01-30 20:59:58.000000000 +0000
@@ -0,0 +1,101 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_133_ExecuteLongInputParams(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_133)
+
+  def run_test_133(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    if (not conn):
+      print "Connection failed."
+      return 0
+
+    ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+
+    print "Starting test ..."
+    res = ''
+    sql =  "INSERT INTO animals (id, breed, name, weight) VALUES (?, ?, ?, ?)"
+    try:
+      stmt = ibm_db.prepare(conn, sql)
+      res = ibm_db.execute(stmt,(128, 'hacker of human and technological nature', 'Wez the ruler of all things PECL', 88.3))
+      
+      stmt = ibm_db.prepare(conn, "SELECT breed, name FROM animals WHERE id = ?")
+      res = ibm_db.execute(stmt, (128,))
+      row = ibm_db.fetch_assoc(stmt)
+      
+      for i in row:
+	         print i
+
+      ibm_db.rollback(conn)
+      print "Done"
+    except:
+      print "SQLSTATE: %s" % ibm_db.stmt_error(stmt)
+      print "Message: %s" % ibm_db.stmt_errormsg(stmt)
+
+    try:
+        stmt = ibm_db.prepare(conn, "SELECT breed, name FROM animals WHERE id = ?")
+        res = ibm_db.execute(stmt, (128,))
+        row = ibm_db.fetch_assoc(stmt)
+        if (row):
+            for i in row:
+                print i
+        print res
+        print "SQLSTATE: %s" % ibm_db.stmt_error(stmt)
+        print "Message: %s" % ibm_db.stmt_errormsg(stmt)
+    except:
+        print "An Exception is not expected"
+        print "SQLSTATE: %s" % ibm_db.stmt_error(stmt)
+        print "Message: %s" % ibm_db.stmt_errormsg(stmt)
+
+    ibm_db.rollback(conn)
+    print "Done"
+
+#__END__
+#__LUW_EXPECTED__
+#Starting test ...
+#
+#SQLSTATE: 22001
+#Message: [IBM][CLI Driver] CLI0109E  String data right truncation. SQLSTATE=22001 SQLCODE=-99999
+#True
+#SQLSTATE: 02000
+#Message: [IBM][CLI Driver][DB2/%s] SQL0100W  No row was found for FETCH, UPDATE or DELETE; or the result of a query is an empty table.  SQLSTATE=02000 SQLCODE=100
+#Done
+#__ZOS_EXPECTED__
+#Starting test ...
+#
+#SQLSTATE: 22001
+#Message: [IBM][CLI Driver] CLI0109E  String data right truncation. SQLSTATE=22001 SQLCODE=-99999
+#True
+#SQLSTATE: 02000
+#Message: [IBM][CLI Driver][DB2] SQL0100W  No row was found for FETCH, UPDATE or DELETE; or the result of a query is an empty table.  SQLSTATE=02000 SQLCODE=100
+#Done
+#__SYSTEMI_EXPECTED__
+#Starting test ...
+#
+#SQLSTATE: 22001
+#Message: [IBM][CLI Driver] CLI0109E  String data right truncation. SQLSTATE=22001 SQLCODE=-99999
+#True
+#SQLSTATE: 02000
+#Message: [IBM][CLI Driver][AS] SQL0100W  No row was found for FETCH, UPDATE or DELETE; or the result of a query is an empty table.  SQLSTATE=02000 SQLCODE=100
+#Done
+#__IDS_EXPECTED__
+#Starting test ...
+#
+#SQLSTATE: 22001
+#Message: [IBM][CLI Driver][IDS%s] Value exceeds string column length. SQLCODE=-1279
+#True
+#SQLSTATE: 02000
+#Message: [IBM][CLI Driver][IDS%s] SQL0100W  No row was found for FETCH, UPDATE or DELETE; or the result of a query is an empty table.  SQLSTATE=02000 SQLCODE=100
+#Done
\ No newline at end of file
diff -pruN 0.3.0-3/tests/test_140_BindParamSelectStmt.py 2.0.5-0ubuntu2/tests/test_140_BindParamSelectStmt.py
--- 0.3.0-3/tests/test_140_BindParamSelectStmt.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_140_BindParamSelectStmt.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,57 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_140_BindParamSelectStmt(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_140)
+
+  def run_test_140(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      stmt = ibm_db.prepare(conn, "SELECT id, breed, name, weight FROM animals WHERE id = ?")
+    
+      animal = 0
+      ibm_db.bind_param(stmt, 1, animal)
+    
+      if ibm_db.execute(stmt):
+        row = ibm_db.fetch_tuple(stmt)
+        while ( row ): 
+          #roiw.each { |child| puts child }
+          for i in row:
+            print i
+          row = ibm_db.fetch_tuple(stmt)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__ZOS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__SYSTEMI_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__IDS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
diff -pruN 0.3.0-3/tests/test_141_BindParamSelectStmtMultipleParams_01.py 2.0.5-0ubuntu2/tests/test_141_BindParamSelectStmtMultipleParams_01.py
--- 0.3.0-3/tests/test_141_BindParamSelectStmtMultipleParams_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_141_BindParamSelectStmtMultipleParams_01.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,94 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_141_BindParamSelectStmtMultipleParams_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_141)
+
+  def run_test_141(self):
+    sql = "SELECT id, breed, name, weight FROM animals WHERE id < ? AND weight > ?"
+    
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      stmt = ibm_db.prepare(conn, sql)
+    
+      animal = 5
+      mass = 2.0
+      ibm_db.bind_param(stmt, 1, animal)
+      ibm_db.bind_param(stmt, 2, mass)
+    
+      if ibm_db.execute(stmt):
+        row = ibm_db.fetch_tuple(stmt)
+        while ( row ): 
+          #row.each { |child| print child }
+          for i in row:
+            print i
+          row = ibm_db.fetch_tuple(stmt)
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#1
+#dog
+#Peaches         
+#12.30
+#2
+#horse
+#Smarty          
+#350.00
+#__ZOS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#1
+#dog
+#Peaches         
+#12.30
+#2
+#horse
+#Smarty          
+#350.00
+#__SYSTEMI_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#1
+#dog
+#Peaches         
+#12.30
+#2
+#horse
+#Smarty          
+#350.00
+#__IDS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#1
+#dog
+#Peaches         
+#12.30
+#2
+#horse
+#Smarty          
+#350.00
diff -pruN 0.3.0-3/tests/test_142_BindParamSelectStmtMultipleParams_02.py 2.0.5-0ubuntu2/tests/test_142_BindParamSelectStmtMultipleParams_02.py
--- 0.3.0-3/tests/test_142_BindParamSelectStmtMultipleParams_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_142_BindParamSelectStmtMultipleParams_02.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,112 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_142_BindParamSelectStmtMultipleParams_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_142)
+
+  def run_test_142(self):
+    sql = "SELECT id, breed, name, weight FROM animals WHERE weight < ? AND weight > ?"
+    
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      stmt = ibm_db.prepare(conn, sql)
+    
+      weight = 200.05
+      mass = 2.0
+      
+      ibm_db.bind_param(stmt, 1, weight, ibm_db.SQL_PARAM_INPUT)
+      ibm_db.bind_param(stmt, 2, mass, ibm_db.SQL_PARAM_INPUT)
+    
+      result = ibm_db.execute(stmt) 
+      if ( result ):
+        row = ibm_db.fetch_tuple(stmt)
+        while ( row ):
+          #row.each { |child| print child }
+          for i in row:
+            print i
+          row = ibm_db.fetch_tuple(stmt)
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#1
+#dog
+#Peaches         
+#12.30
+#5
+#goat
+#Rickety Ride    
+#9.70
+#6
+#llama
+#Sweater         
+#150.00
+#__ZOS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#1
+#dog
+#Peaches         
+#12.30
+#5
+#goat
+#Rickety Ride    
+#9.70
+#6
+#llama
+#Sweater         
+#150.00
+#__SYSTEMI_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#1
+#dog
+#Peaches         
+#12.30
+#5
+#goat
+#Rickety Ride    
+#9.70
+#6
+#llama
+#Sweater         
+#150.00
+#__IDS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#1
+#dog
+#Peaches         
+#12.30
+#5
+#goat
+#Rickety Ride    
+#9.70
+#6
+#llama
+#Sweater         
+#150.00
diff -pruN 0.3.0-3/tests/test_143_BindParamInsertStmtNoneParam.py 2.0.5-0ubuntu2/tests/test_143_BindParamInsertStmtNoneParam.py
--- 0.3.0-3/tests/test_143_BindParamInsertStmtNoneParam.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_143_BindParamInsertStmtNoneParam.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,65 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_143_BindParamInsertStmtNoneParam(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_143)
+
+  def run_test_143(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+
+    insert1 = "INSERT INTO animals (id, breed, name, weight) VALUES (NULL, 'ghost', NULL, ?)"
+    select = 'SELECT id, breed, name, weight FROM animals WHERE weight IS NULL'
+    
+    if conn:
+      stmt = ibm_db.prepare(conn, insert1)
+    
+      animal = None
+      ibm_db.bind_param(stmt, 1, animal)
+    
+      if ibm_db.execute(stmt):
+        stmt = ibm_db.exec_immediate(conn, select)
+        row = ibm_db.fetch_tuple(stmt)
+        while ( row ):
+          #row.each { |child| print child }
+          for i in row:
+            print i
+          row = ibm_db.fetch_tuple(stmt)
+
+      ibm_db.rollback(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#None
+#ghost
+#None
+#None
+#__ZOS_EXPECTED__
+#None
+#ghost
+#None
+#None
+#__SYSTEMI_EXPECTED__
+#None
+#ghost
+#None
+#None
+#__IDS_EXPECTED__
+#None
+#ghost
+#None
+#None
diff -pruN 0.3.0-3/tests/test_144_BindParamInsertStmtPARAM_FILE.py 2.0.5-0ubuntu2/tests/test_144_BindParamInsertStmtPARAM_FILE.py
--- 0.3.0-3/tests/test_144_BindParamInsertStmtPARAM_FILE.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_144_BindParamInsertStmtPARAM_FILE.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,54 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys, os
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_144_BindParamInsertStmtPARAM_FILE(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_144)
+
+  def run_test_144(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      # Drop the test table, in case it exists
+      drop = 'DROP TABLE pictures'
+      try:
+        result = ibm_db.exec_immediate(conn, drop)
+      except:
+        pass
+      
+      # Create the test table
+      create = 'CREATE TABLE pictures (id INTEGER, picture BLOB)'
+      result = ibm_db.exec_immediate(conn, create)
+      
+      stmt = ibm_db.prepare(conn, "INSERT INTO pictures VALUES (0, ?)")
+      
+      picture = os.path.dirname(os.path.abspath(__file__)) + "/pic1.jpg"
+      rc = ibm_db.bind_param(stmt, 1, picture, ibm_db.SQL_PARAM_INPUT, ibm_db.SQL_BINARY)
+    
+      rc = ibm_db.execute(stmt)
+      
+      num = ibm_db.num_rows(stmt)
+      
+      print num
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#1
+#__ZOS_EXPECTED__
+#1
+#__SYSTEMI_EXPECTED__
+#1
+#__IDS_EXPECTED__
+#1
diff -pruN 0.3.0-3/tests/test_145_BindRetrieveNoneEmptyString.py 2.0.5-0ubuntu2/tests/test_145_BindRetrieveNoneEmptyString.py
--- 0.3.0-3/tests/test_145_BindRetrieveNoneEmptyString.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_145_BindRetrieveNoneEmptyString.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,85 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_145_BindRetrieveNoneEmptyString(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_145)
+
+  def run_test_145(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    if conn:
+      ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+
+      stmt = ibm_db.prepare(conn, "INSERT INTO animals (id, breed, name) VALUES (?, ?, ?)")
+
+      id = 999
+      breed = None
+      name = 'PythonDS'
+      ibm_db.bind_param(stmt, 1, id)
+      ibm_db.bind_param(stmt, 2, breed)
+      ibm_db.bind_param(stmt, 3, name)
+
+      # After this statement, we expect that the BREED column will contain
+      # an SQL NULL value, while the NAME column contains an empty string
+
+      ibm_db.execute(stmt)
+
+      # After this statement, we expect that the BREED column will contain
+      # an SQL NULL value, while the NAME column contains an empty string.
+      # Use the dynamically bound parameters to ensure that the code paths
+      # for both ibm_db.bind_param and ibm_db.execute treat Python Nones and empty
+      # strings the right way.
+
+      ibm_db.execute(stmt, (1000, None, 'PythonDS'))
+
+      result = ibm_db.exec_immediate(conn, "SELECT id, breed, name FROM animals WHERE breed IS NULL")
+      row = ibm_db.fetch_tuple(result)
+      while ( row ): 
+        for i in row:
+          print i
+        row = ibm_db.fetch_tuple(result)
+
+      ibm_db.rollback(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#999
+#None
+#PythonDS        
+#1000
+#None
+#PythonDS        
+#__ZOS_EXPECTED__
+#999
+#None
+#PythonDS        
+#1000
+#None
+#PythonDS        
+#__SYSTEMI_EXPECTED__
+#999
+#None
+#PythonDS        
+#1000
+#None
+#PythonDS        
+#__IDS_EXPECTED__
+#999
+#None
+#PythonDS        
+#1000
+#None
+#PythonDS        
diff -pruN 0.3.0-3/tests/test_146_CallSPINAndOUTParams.py 2.0.5-0ubuntu2/tests/test_146_CallSPINAndOUTParams.py
--- 0.3.0-3/tests/test_146_CallSPINAndOUTParams.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_146_CallSPINAndOUTParams.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,89 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_146_CallSPINAndOUTParams(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_146)
+
+  def run_test_146(self):      
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+    
+    if conn:
+      name = "Peaches"
+      second_name = "Rickety Ride"
+      weight = 0
+    
+      print "Values of bound parameters _before_ CALL:"
+      print "  1: %s 2: %s 3: %d\n" % (name, second_name, weight)
+      
+      stmt, name, second_name, weight = ibm_db.callproc(conn, 'match_animal', (name, second_name, weight))
+    
+      if stmt is not None:
+        print "Values of bound parameters _after_ CALL:"
+        print "  1: %s 2: %s 3: %d\n" % (name, second_name, weight)
+
+        if (server.DBMS_NAME[0:3] != 'IDS'):
+          print "Results:"
+          row = ibm_db.fetch_tuple(stmt)
+          while ( row ): 
+            print "  %s, %s, %s" % (row[0].strip(), row[1].strip(), row[2])
+            row = ibm_db.fetch_tuple(stmt)
+
+#__END__
+#__LUW_EXPECTED__
+#Values of bound parameters _before_ CALL:
+#  1: Peaches 2: Rickety Ride 3: 0
+#
+#Values of bound parameters _after_ CALL:
+#  1: Peaches 2: TRUE 3: 12
+#
+#Results:
+#  Peaches, dog, 12.30
+#  Pook, cat, 3.20
+#  Rickety Ride, goat, 9.70
+#  Smarty, horse, 350.00
+#  Sweater, llama, 150.00
+#__ZOS_EXPECTED__
+#Values of bound parameters _before_ CALL:
+#  1: Peaches 2: Rickety Ride 3: 0
+#
+#Values of bound parameters _after_ CALL:
+#  1: Peaches 2: TRUE 3: 12
+#
+#Results:
+#  Peaches, dog, 12.30
+#  Pook, cat, 3.20
+#  Rickety Ride, goat, 9.70
+#  Smarty, horse, 350.00
+#  Sweater, llama, 150.00
+#__SYSTEMI_EXPECTED__
+#Values of bound parameters _before_ CALL:
+#  1: Peaches 2: Rickety Ride 3: 0
+#
+#Values of bound parameters _after_ CALL:
+#  1: Peaches 2: TRUE 3: 12
+#
+#Results:
+#  Peaches, dog, 12.30
+#  Pook, cat, 3.20
+#  Rickety Ride, goat, 9.70
+#  Smarty, horse, 350.00
+#  Sweater, llama, 150.00
+#__IDS_EXPECTED__
+#Values of bound parameters _before_ CALL:
+#  1: Peaches 2: Rickety Ride 3: 0
+#
+#Values of bound parameters _after_ CALL:
+#  1: Peaches 2: TRUE 3: 12
+#
diff -pruN 0.3.0-3/tests/test_147_PrepareWithWrongType.py 2.0.5-0ubuntu2/tests/test_147_PrepareWithWrongType.py
--- 0.3.0-3/tests/test_147_PrepareWithWrongType.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_147_PrepareWithWrongType.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,51 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_147_PrepareWithWrongType(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_147)
+
+  def run_test_147(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+
+      stmt = ibm_db.prepare(conn, "INSERT INTO animals (id, breed, name) VALUES (?, ?, ?)")
+    
+      id = "\"999\""
+      breed = None
+      name = 'PythonDS'
+      try:
+          ibm_db.bind_param(stmt, 1, id)
+          ibm_db.bind_param(stmt, 2, breed)
+          ibm_db.bind_param(stmt, 3, name)
+       
+          error = ibm_db.execute(stmt)
+          print "Should not make it this far"
+      except:
+          excp = sys.exc_info()
+          # slot 1 contains error message
+          print excp[1]
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Statement Execute Failed: [IBM][CLI Driver] CLI0112E  Error in assignment. SQLSTATE=22005 SQLCODE=-99999
+#__ZOS_EXPECTED__
+#Statement Execute Failed: [IBM][CLI Driver] CLI0112E  Error in assignment. SQLSTATE=22005 SQLCODE=-99999
+#__SYSTEMI_EXPECTED__
+#Statement Execute Failed: [IBM][CLI Driver] CLI0112E  Error in assignment. SQLSTATE=22005 SQLCODE=-99999
+#__IDS_EXPECTED__
+#Statement Execute Failed: [IBM][CLI Driver] CLI0112E  Error in assignment. SQLSTATE=22005 SQLCODE=-99999
diff -pruN 0.3.0-3/tests/test_148_CallSPDiffBindPattern_01.py 2.0.5-0ubuntu2/tests/test_148_CallSPDiffBindPattern_01.py
--- 0.3.0-3/tests/test_148_CallSPDiffBindPattern_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_148_CallSPDiffBindPattern_01.py	2014-02-03 05:44:28.000000000 +0000
@@ -0,0 +1,125 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_148_CallSPDiffBindPattern_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_148)
+
+  def run_test_148(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      ##### Set up #####
+      serverinfo = ibm_db.server_info( conn )
+      server = serverinfo.DBMS_NAME[0:3]
+      try:
+          sql = "DROP TABLE sptb"
+          ibm_db.exec_immediate(conn, sql)
+      except:
+          pass
+      
+      try:
+          sql = "DROP PROCEDURE sp"
+          ibm_db.exec_immediate(conn, sql)
+      except:
+          pass
+      
+      if (server == 'IDS'):
+        sql = "CREATE TABLE sptb (c1 INTEGER, c2 FLOAT, c3 VARCHAR(10), c4 INT8, c5 CLOB)"
+      else:
+        sql = "CREATE TABLE sptb (c1 INTEGER, c2 FLOAT, c3 VARCHAR(10), c4 BIGINT, c5 CLOB)"
+      
+      ibm_db.exec_immediate(conn, sql)
+      
+      sql = "INSERT INTO sptb (c1, c2, c3, c4, c5) VALUES (1, 5.01, 'varchar', 3271982, 'clob data clob data')"
+      ibm_db.exec_immediate(conn, sql)
+      
+      if (server == 'IDS'):
+        sql = """CREATE PROCEDURE sp(OUT out1 INTEGER, OUT out2 FLOAT, OUT out3 VARCHAR(10), OUT out4 INT8, OUT out5 CLOB);
+                 SELECT c1, c2, c3, c4, c5 INTO out1, out2, out3, out4, out5 FROM sptb; END PROCEDURE;"""
+      else:
+        sql = """CREATE PROCEDURE sp(OUT out1 INTEGER, OUT out2 FLOAT, OUT out3 VARCHAR(10), OUT out4 BIGINT, OUT out5 CLOB)
+                 DYNAMIC RESULT SETS 1 LANGUAGE SQL BEGIN
+                 SELECT c1, c2, c3, c4, c5 INTO out1, out2, out3, out4, out5 FROM sptb; END"""
+      ibm_db.exec_immediate(conn, sql)
+      #############################
+
+      ##### Run the test #####
+
+      out1 = 0
+      out2 = 0.00
+      out3 = ""
+      out4 = 0
+      out5 = ""
+
+      stmt, out1, out2, out3, out4, out5 = ibm_db.callproc(conn, 'sp', (out1, out2, out3, out4, out5))
+
+      print "out 1:"
+      print out1
+      print "out 2:"
+      print out2
+      print "out 3:"
+      print out3
+      print "out 4:"
+      print out4
+      print "out 5:"
+      print out5
+      #############################
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#out 1:
+#1
+#out 2:
+#5.01
+#out 3:
+#varchar
+#out 4:
+#3271982
+#out 5:
+#clob data clob data
+#__ZOS_EXPECTED__
+#out 1:
+#1
+#out 2:
+#5.01
+#out 3:
+#varchar
+#out 4:
+#3271982
+#out 5:
+#clob data clob data
+#__SYSTEMI_EXPECTED__
+#out 1:
+#1
+#out 2:
+#5.01
+#out 3:
+#varchar
+#out 4:
+#3271982
+#out 5:
+#clob data clob data
+#__IDS_EXPECTED__
+#out 1:
+#1
+#out 2:
+#5.01
+#out 3:
+#varchar
+#out 4:
+#3271982
+#out 5:
+#clob data clob data
diff -pruN 0.3.0-3/tests/test_150_FetchAssocSelect_01.py 2.0.5-0ubuntu2/tests/test_150_FetchAssocSelect_01.py
--- 0.3.0-3/tests/test_150_FetchAssocSelect_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_150_FetchAssocSelect_01.py	2014-01-31 11:09:02.000000000 +0000
@@ -0,0 +1,188 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_150_FetchAssocSelect_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_150)
+
+  def run_test_150(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+    
+    result = ibm_db.exec_immediate(conn, "select * from staff")
+
+    row = ibm_db.fetch_assoc(result)    
+    while ( row ):
+      #print "%5d  " % row['ID']
+      #print "%-10s " % row['NAME']
+      #print "%5d " % row['DEPT']
+      #print "%-7s " % row['JOB']
+      #print "%5d " % row['YEARS']
+      #print "%15s " % row['SALARY']
+      #print "%10s " % row['COMM']
+      if (row['YEARS'] == None):
+        row['YEARS'] = 0
+      if (row['COMM'] == None):
+        row['COMM'] = ''
+      print "%5d  %-10s %5d %-7s %5s %15s %10s " % (row['ID'], row['NAME'], row['DEPT'], row['JOB'], row['YEARS'], row['SALARY'], row['COMM'])
+      row = ibm_db.fetch_assoc(result)
+
+#__END__
+#__LUW_EXPECTED__
+#   10  Sanders       20 Mgr         7        18357.50            
+#   20  Pernal        20 Sales       8        18171.25     612.45 
+#   30  Marenghi      38 Mgr         5        17506.75            
+#   40  OBrien        38 Sales       6        18006.00     846.55 
+#   50  Hanes         15 Mgr        10        20659.80            
+#   60  Quigley       38 Sales       0        16808.30     650.25 
+#   70  Rothman       15 Sales       7        16502.83    1152.00 
+#   80  James         20 Clerk       0        13504.60     128.20 
+#   90  Koonitz       42 Sales       6        18001.75    1386.70 
+#  100  Plotz         42 Mgr         7        18352.80            
+#  110  Ngan          15 Clerk       5        12508.20     206.60 
+#  120  Naughton      38 Clerk       0        12954.75     180.00 
+#  130  Yamaguchi     42 Clerk       6        10505.90      75.60 
+#  140  Fraye         51 Mgr         6        21150.00            
+#  150  Williams      51 Sales       6        19456.50     637.65 
+#  160  Molinare      10 Mgr         7        22959.20            
+#  170  Kermisch      15 Clerk       4        12258.50     110.10 
+#  180  Abrahams      38 Clerk       3        12009.75     236.50 
+#  190  Sneider       20 Clerk       8        14252.75     126.50 
+#  200  Scoutten      42 Clerk       0        11508.60      84.20 
+#  210  Lu            10 Mgr        10        20010.00            
+#  220  Smith         51 Sales       7        17654.50     992.80 
+#  230  Lundquist     51 Clerk       3        13369.80     189.65 
+#  240  Daniels       10 Mgr         5        19260.25            
+#  250  Wheeler       51 Clerk       6        14460.00     513.30 
+#  260  Jones         10 Mgr        12        21234.00            
+#  270  Lea           66 Mgr         9        18555.50            
+#  280  Wilson        66 Sales       9        18674.50     811.50 
+#  290  Quill         84 Mgr        10        19818.00            
+#  300  Davis         84 Sales       5        15454.50     806.10 
+#  310  Graham        66 Sales      13        21000.00     200.30 
+#  320  Gonzales      66 Sales       4        16858.20     844.00 
+#  330  Burke         66 Clerk       1        10988.00      55.50 
+#  340  Edwards       84 Sales       7        17844.00    1285.00 
+#  350  Gafney        84 Clerk       5        13030.50     188.00 
+#__ZOS_EXPECTED__
+#   10  Sanders       20 Mgr         7        18357.50            
+#   20  Pernal        20 Sales       8        18171.25     612.45 
+#   30  Marenghi      38 Mgr         5        17506.75            
+#   40  OBrien        38 Sales       6        18006.00     846.55 
+#   50  Hanes         15 Mgr        10        20659.80            
+#   60  Quigley       38 Sales       0        16808.30     650.25 
+#   70  Rothman       15 Sales       7        16502.83    1152.00 
+#   80  James         20 Clerk       0        13504.60     128.20 
+#   90  Koonitz       42 Sales       6        18001.75    1386.70 
+#  100  Plotz         42 Mgr         7        18352.80            
+#  110  Ngan          15 Clerk       5        12508.20     206.60 
+#  120  Naughton      38 Clerk       0        12954.75     180.00 
+#  130  Yamaguchi     42 Clerk       6        10505.90      75.60 
+#  140  Fraye         51 Mgr         6        21150.00            
+#  150  Williams      51 Sales       6        19456.50     637.65 
+#  160  Molinare      10 Mgr         7        22959.20            
+#  170  Kermisch      15 Clerk       4        12258.50     110.10 
+#  180  Abrahams      38 Clerk       3        12009.75     236.50 
+#  190  Sneider       20 Clerk       8        14252.75     126.50 
+#  200  Scoutten      42 Clerk       0        11508.60      84.20 
+#  210  Lu            10 Mgr        10        20010.00            
+#  220  Smith         51 Sales       7        17654.50     992.80 
+#  230  Lundquist     51 Clerk       3        13369.80     189.65 
+#  240  Daniels       10 Mgr         5        19260.25            
+#  250  Wheeler       51 Clerk       6        14460.00     513.30 
+#  260  Jones         10 Mgr        12        21234.00            
+#  270  Lea           66 Mgr         9        18555.50            
+#  280  Wilson        66 Sales       9        18674.50     811.50 
+#  290  Quill         84 Mgr        10        19818.00            
+#  300  Davis         84 Sales       5        15454.50     806.10 
+#  310  Graham        66 Sales      13        21000.00     200.30 
+#  320  Gonzales      66 Sales       4        16858.20     844.00 
+#  330  Burke         66 Clerk       1        10988.00      55.50 
+#  340  Edwards       84 Sales       7        17844.00    1285.00 
+#  350  Gafney        84 Clerk       5        13030.50     188.00 
+#__SYSTEMI_EXPECTED__
+#   10  Sanders       20 Mgr         7        18357.50            
+#   20  Pernal        20 Sales       8        18171.25     612.45 
+#   30  Marenghi      38 Mgr         5        17506.75            
+#   40  OBrien        38 Sales       6        18006.00     846.55 
+#   50  Hanes         15 Mgr        10        20659.80            
+#   60  Quigley       38 Sales       0        16808.30     650.25 
+#   70  Rothman       15 Sales       7        16502.83    1152.00 
+#   80  James         20 Clerk       0        13504.60     128.20 
+#   90  Koonitz       42 Sales       6        18001.75    1386.70 
+#  100  Plotz         42 Mgr         7        18352.80            
+#  110  Ngan          15 Clerk       5        12508.20     206.60 
+#  120  Naughton      38 Clerk       0        12954.75     180.00 
+#  130  Yamaguchi     42 Clerk       6        10505.90      75.60 
+#  140  Fraye         51 Mgr         6        21150.00            
+#  150  Williams      51 Sales       6        19456.50     637.65 
+#  160  Molinare      10 Mgr         7        22959.20            
+#  170  Kermisch      15 Clerk       4        12258.50     110.10 
+#  180  Abrahams      38 Clerk       3        12009.75     236.50 
+#  190  Sneider       20 Clerk       8        14252.75     126.50 
+#  200  Scoutten      42 Clerk       0        11508.60      84.20 
+#  210  Lu            10 Mgr        10        20010.00            
+#  220  Smith         51 Sales       7        17654.50     992.80 
+#  230  Lundquist     51 Clerk       3        13369.80     189.65 
+#  240  Daniels       10 Mgr         5        19260.25            
+#  250  Wheeler       51 Clerk       6        14460.00     513.30 
+#  260  Jones         10 Mgr        12        21234.00            
+#  270  Lea           66 Mgr         9        18555.50            
+#  280  Wilson        66 Sales       9        18674.50     811.50 
+#  290  Quill         84 Mgr        10        19818.00            
+#  300  Davis         84 Sales       5        15454.50     806.10 
+#  310  Graham        66 Sales      13        21000.00     200.30 
+#  320  Gonzales      66 Sales       4        16858.20     844.00 
+#  330  Burke         66 Clerk       1        10988.00      55.50 
+#  340  Edwards       84 Sales       7        17844.00    1285.00 
+#  350  Gafney        84 Clerk       5        13030.50     188.00 
+#__IDS_EXPECTED__
+#   10  Sanders       20 Mgr         7        18357.50            
+#   20  Pernal        20 Sales       8        18171.25     612.45 
+#   30  Marenghi      38 Mgr         5        17506.75            
+#   40  OBrien        38 Sales       6        18006.00     846.55 
+#   50  Hanes         15 Mgr        10        20659.80            
+#   60  Quigley       38 Sales       0        16808.30     650.25 
+#   70  Rothman       15 Sales       7        16502.83    1152.00 
+#   80  James         20 Clerk       0        13504.60     128.20 
+#   90  Koonitz       42 Sales       6        18001.75    1386.70 
+#  100  Plotz         42 Mgr         7        18352.80            
+#  110  Ngan          15 Clerk       5        12508.20     206.60 
+#  120  Naughton      38 Clerk       0        12954.75     180.00 
+#  130  Yamaguchi     42 Clerk       6        10505.90      75.60 
+#  140  Fraye         51 Mgr         6        21150.00            
+#  150  Williams      51 Sales       6        19456.50     637.65 
+#  160  Molinare      10 Mgr         7        22959.20            
+#  170  Kermisch      15 Clerk       4        12258.50     110.10 
+#  180  Abrahams      38 Clerk       3        12009.75     236.50 
+#  190  Sneider       20 Clerk       8        14252.75     126.50 
+#  200  Scoutten      42 Clerk       0        11508.60      84.20 
+#  210  Lu            10 Mgr        10        20010.00            
+#  220  Smith         51 Sales       7        17654.50     992.80 
+#  230  Lundquist     51 Clerk       3        13369.80     189.65 
+#  240  Daniels       10 Mgr         5        19260.25            
+#  250  Wheeler       51 Clerk       6        14460.00     513.30 
+#  260  Jones         10 Mgr        12        21234.00            
+#  270  Lea           66 Mgr         9        18555.50            
+#  280  Wilson        66 Sales       9        18674.50     811.50 
+#  290  Quill         84 Mgr        10        19818.00            
+#  300  Davis         84 Sales       5        15454.50     806.10 
+#  310  Graham        66 Sales      13        21000.00     200.30 
+#  320  Gonzales      66 Sales       4        16858.20     844.00 
+#  330  Burke         66 Clerk       1        10988.00      55.50 
+#  340  Edwards       84 Sales       7        17844.00    1285.00 
+#  350  Gafney        84 Clerk       5        13030.50     188.00 
diff -pruN 0.3.0-3/tests/test_151_FetchAssocSelect_02.py 2.0.5-0ubuntu2/tests/test_151_FetchAssocSelect_02.py
--- 0.3.0-3/tests/test_151_FetchAssocSelect_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_151_FetchAssocSelect_02.py	2014-01-31 11:08:51.000000000 +0000
@@ -0,0 +1,212 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_151_FetchAssocSelect_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_151)
+
+  def run_test_151(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+    
+    result = ibm_db.exec_immediate(conn, "select * from sales")
+    
+    row = ibm_db.fetch_assoc(result)
+    while ( row ):
+      #printf("%-10s ",row['SALES_DATE'])
+      #printf("%-15s ",row['SALES_PERSON'])
+      #printf("%-15s ",row['REGION'])
+      #printf("%4s",row['SALES'])
+      #puts ""
+      if (row['SALES'] == None):
+        row['SALES'] = ''
+      print "%-10s %-15s %-15s %4s" % (row['SALES_DATE'], row['SALES_PERSON'], row['REGION'], row['SALES'])
+      row = ibm_db.fetch_assoc(result)
+
+#__END__
+#__LUW_EXPECTED__
+#
+#1995-12-31 LUCCHESSI       Ontario-South      1
+#1995-12-31 LEE             Ontario-South      3
+#1995-12-31 LEE             Quebec             1
+#1995-12-31 LEE             Manitoba           2
+#1995-12-31 GOUNOT          Quebec             1
+#1996-03-29 LUCCHESSI       Ontario-South      3
+#1996-03-29 LUCCHESSI       Quebec             1
+#1996-03-29 LEE             Ontario-South      2
+#1996-03-29 LEE             Ontario-North      2
+#1996-03-29 LEE             Quebec             3
+#1996-03-29 LEE             Manitoba           5
+#1996-03-29 GOUNOT          Ontario-South      3
+#1996-03-29 GOUNOT          Quebec             1
+#1996-03-29 GOUNOT          Manitoba           7
+#1996-03-30 LUCCHESSI       Ontario-South      1
+#1996-03-30 LUCCHESSI       Quebec             2
+#1996-03-30 LUCCHESSI       Manitoba           1
+#1996-03-30 LEE             Ontario-South      7
+#1996-03-30 LEE             Ontario-North      3
+#1996-03-30 LEE             Quebec             7
+#1996-03-30 LEE             Manitoba           4
+#1996-03-30 GOUNOT          Ontario-South      2
+#1996-03-30 GOUNOT          Quebec            18
+#1996-03-30 GOUNOT          Manitoba           1
+#1996-03-31 LUCCHESSI       Manitoba           1
+#1996-03-31 LEE             Ontario-South     14
+#1996-03-31 LEE             Ontario-North      3
+#1996-03-31 LEE             Quebec             7
+#1996-03-31 LEE             Manitoba           3
+#1996-03-31 GOUNOT          Ontario-South      2
+#1996-03-31 GOUNOT          Quebec             1
+#1996-04-01 LUCCHESSI       Ontario-South      3
+#1996-04-01 LUCCHESSI       Manitoba           1
+#1996-04-01 LEE             Ontario-South      8
+#1996-04-01 LEE             Ontario-North       
+#1996-04-01 LEE             Quebec             8
+#1996-04-01 LEE             Manitoba           9
+#1996-04-01 GOUNOT          Ontario-South      3
+#1996-04-01 GOUNOT          Ontario-North      1
+#1996-04-01 GOUNOT          Quebec             3
+#1996-04-01 GOUNOT          Manitoba           7
+#__ZOS_EXPECTED__
+#
+#1995-12-31 LUCCHESSI       Ontario-South      1
+#1995-12-31 LEE             Ontario-South      3
+#1995-12-31 LEE             Quebec             1
+#1995-12-31 LEE             Manitoba           2
+#1995-12-31 GOUNOT          Quebec             1
+#1996-03-29 LUCCHESSI       Ontario-South      3
+#1996-03-29 LUCCHESSI       Quebec             1
+#1996-03-29 LEE             Ontario-South      2
+#1996-03-29 LEE             Ontario-North      2
+#1996-03-29 LEE             Quebec             3
+#1996-03-29 LEE             Manitoba           5
+#1996-03-29 GOUNOT          Ontario-South      3
+#1996-03-29 GOUNOT          Quebec             1
+#1996-03-29 GOUNOT          Manitoba           7
+#1996-03-30 LUCCHESSI       Ontario-South      1
+#1996-03-30 LUCCHESSI       Quebec             2
+#1996-03-30 LUCCHESSI       Manitoba           1
+#1996-03-30 LEE             Ontario-South      7
+#1996-03-30 LEE             Ontario-North      3
+#1996-03-30 LEE             Quebec             7
+#1996-03-30 LEE             Manitoba           4
+#1996-03-30 GOUNOT          Ontario-South      2
+#1996-03-30 GOUNOT          Quebec            18
+#1996-03-30 GOUNOT          Manitoba           1
+#1996-03-31 LUCCHESSI       Manitoba           1
+#1996-03-31 LEE             Ontario-South     14
+#1996-03-31 LEE             Ontario-North      3
+#1996-03-31 LEE             Quebec             7
+#1996-03-31 LEE             Manitoba           3
+#1996-03-31 GOUNOT          Ontario-South      2
+#1996-03-31 GOUNOT          Quebec             1
+#1996-04-01 LUCCHESSI       Ontario-South      3
+#1996-04-01 LUCCHESSI       Manitoba           1
+#1996-04-01 LEE             Ontario-South      8
+#1996-04-01 LEE             Ontario-North       
+#1996-04-01 LEE             Quebec             8
+#1996-04-01 LEE             Manitoba           9
+#1996-04-01 GOUNOT          Ontario-South      3
+#1996-04-01 GOUNOT          Ontario-North      1
+#1996-04-01 GOUNOT          Quebec             3
+#1996-04-01 GOUNOT          Manitoba           7
+#__SYSTEMI_EXPECTED__
+#
+#1995-12-31 LUCCHESSI       Ontario-South      1
+#1995-12-31 LEE             Ontario-South      3
+#1995-12-31 LEE             Quebec             1
+#1995-12-31 LEE             Manitoba           2
+#1995-12-31 GOUNOT          Quebec             1
+#1996-03-29 LUCCHESSI       Ontario-South      3
+#1996-03-29 LUCCHESSI       Quebec             1
+#1996-03-29 LEE             Ontario-South      2
+#1996-03-29 LEE             Ontario-North      2
+#1996-03-29 LEE             Quebec             3
+#1996-03-29 LEE             Manitoba           5
+#1996-03-29 GOUNOT          Ontario-South      3
+#1996-03-29 GOUNOT          Quebec             1
+#1996-03-29 GOUNOT          Manitoba           7
+#1996-03-30 LUCCHESSI       Ontario-South      1
+#1996-03-30 LUCCHESSI       Quebec             2
+#1996-03-30 LUCCHESSI       Manitoba           1
+#1996-03-30 LEE             Ontario-South      7
+#1996-03-30 LEE             Ontario-North      3
+#1996-03-30 LEE             Quebec             7
+#1996-03-30 LEE             Manitoba           4
+#1996-03-30 GOUNOT          Ontario-South      2
+#1996-03-30 GOUNOT          Quebec            18
+#1996-03-30 GOUNOT          Manitoba           1
+#1996-03-31 LUCCHESSI       Manitoba           1
+#1996-03-31 LEE             Ontario-South     14
+#1996-03-31 LEE             Ontario-North      3
+#1996-03-31 LEE             Quebec             7
+#1996-03-31 LEE             Manitoba           3
+#1996-03-31 GOUNOT          Ontario-South      2
+#1996-03-31 GOUNOT          Quebec             1
+#1996-04-01 LUCCHESSI       Ontario-South      3
+#1996-04-01 LUCCHESSI       Manitoba           1
+#1996-04-01 LEE             Ontario-South      8
+#1996-04-01 LEE             Ontario-North       
+#1996-04-01 LEE             Quebec             8
+#1996-04-01 LEE             Manitoba           9
+#1996-04-01 GOUNOT          Ontario-South      3
+#1996-04-01 GOUNOT          Ontario-North      1
+#1996-04-01 GOUNOT          Quebec             3
+#1996-04-01 GOUNOT          Manitoba           7
+#__IDS_EXPECTED__
+#
+#1995-12-31 LUCCHESSI       Ontario-South      1
+#1995-12-31 LEE             Ontario-South      3
+#1995-12-31 LEE             Quebec             1
+#1995-12-31 LEE             Manitoba           2
+#1995-12-31 GOUNOT          Quebec             1
+#1996-03-29 LUCCHESSI       Ontario-South      3
+#1996-03-29 LUCCHESSI       Quebec             1
+#1996-03-29 LEE             Ontario-South      2
+#1996-03-29 LEE             Ontario-North      2
+#1996-03-29 LEE             Quebec             3
+#1996-03-29 LEE             Manitoba           5
+#1996-03-29 GOUNOT          Ontario-South      3
+#1996-03-29 GOUNOT          Quebec             1
+#1996-03-29 GOUNOT          Manitoba           7
+#1996-03-30 LUCCHESSI       Ontario-South      1
+#1996-03-30 LUCCHESSI       Quebec             2
+#1996-03-30 LUCCHESSI       Manitoba           1
+#1996-03-30 LEE             Ontario-South      7
+#1996-03-30 LEE             Ontario-North      3
+#1996-03-30 LEE             Quebec             7
+#1996-03-30 LEE             Manitoba           4
+#1996-03-30 GOUNOT          Ontario-South      2
+#1996-03-30 GOUNOT          Quebec            18
+#1996-03-30 GOUNOT          Manitoba           1
+#1996-03-31 LUCCHESSI       Manitoba           1
+#1996-03-31 LEE             Ontario-South     14
+#1996-03-31 LEE             Ontario-North      3
+#1996-03-31 LEE             Quebec             7
+#1996-03-31 LEE             Manitoba           3
+#1996-03-31 GOUNOT          Ontario-South      2
+#1996-03-31 GOUNOT          Quebec             1
+#1996-04-01 LUCCHESSI       Ontario-South      3
+#1996-04-01 LUCCHESSI       Manitoba           1
+#1996-04-01 LEE             Ontario-South      8
+#1996-04-01 LEE             Ontario-North       
+#1996-04-01 LEE             Quebec             8
+#1996-04-01 LEE             Manitoba           9
+#1996-04-01 GOUNOT          Ontario-South      3
+#1996-04-01 GOUNOT          Ontario-North      1
+#1996-04-01 GOUNOT          Quebec             3
+#1996-04-01 GOUNOT          Manitoba           7
diff -pruN 0.3.0-3/tests/test_152_FetchAssocSelect_03.py 2.0.5-0ubuntu2/tests/test_152_FetchAssocSelect_03.py
--- 0.3.0-3/tests/test_152_FetchAssocSelect_03.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_152_FetchAssocSelect_03.py	2014-01-31 11:08:15.000000000 +0000
@@ -0,0 +1,132 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_152_FetchAssocSelect_03(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_152)
+
+  def run_test_152(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+    
+    result = ibm_db.exec_immediate(conn, "select * from project")
+    
+    row = ibm_db.fetch_assoc(result)
+    while ( row ):
+      #printf("%6s ",row['PROJNO'])
+      #printf("%-24s ",row['PROJNAME'])
+      #printf("%3s ",row['DEPTNO'])
+      #printf("%6s",row['RESPEMP'])
+      #printf("%7s ",row['PRSTAFF'])
+      #printf("%10s ",row['PRSTDATE'])
+      #printf("%10s ",row['PRENDATE'])
+      #printf("%6s",row['MAJPROJ'])
+      #puts ""
+      if (row['MAJPROJ'] == None):
+        row['MAJPROJ'] = ''
+      print "%6s %-24s %3s %6s%7s %10s %10s %6s" % (row['PROJNO'], row['PROJNAME'], row['DEPTNO'], row['RESPEMP'], row['PRSTAFF'], row['PRSTDATE'], row['PRENDATE'], row['MAJPROJ'])
+      row = ibm_db.fetch_assoc(result) 
+
+#__END__
+#__LUW_EXPECTED__
+#
+#AD3100 ADMIN SERVICES           D01 000010   6.50 1982-01-01 1983-02-01       
+#AD3110 GENERAL ADMIN SYSTEMS    D21 000070   6.00 1982-01-01 1983-02-01 AD3100
+#AD3111 PAYROLL PROGRAMMING      D21 000230   2.00 1982-01-01 1983-02-01 AD3110
+#AD3112 PERSONNEL PROGRAMMING    D21 000250   1.00 1982-01-01 1983-02-01 AD3110
+#AD3113 ACCOUNT PROGRAMMING      D21 000270   2.00 1982-01-01 1983-02-01 AD3110
+#IF1000 QUERY SERVICES           C01 000030   2.00 1982-01-01 1983-02-01       
+#IF2000 USER EDUCATION           C01 000030   1.00 1982-01-01 1983-02-01       
+#MA2100 WELD LINE AUTOMATION     D01 000010  12.00 1982-01-01 1983-02-01       
+#MA2110 W L PROGRAMMING          D11 000060   9.00 1982-01-01 1983-02-01 MA2100
+#MA2111 W L PROGRAM DESIGN       D11 000220   2.00 1982-01-01 1982-12-01 MA2110
+#MA2112 W L ROBOT DESIGN         D11 000150   3.00 1982-01-01 1982-12-01 MA2110
+#MA2113 W L PROD CONT PROGS      D11 000160   3.00 1982-02-15 1982-12-01 MA2110
+#OP1000 OPERATION SUPPORT        E01 000050   6.00 1982-01-01 1983-02-01       
+#OP1010 OPERATION                E11 000090   5.00 1982-01-01 1983-02-01 OP1000
+#OP2000 GEN SYSTEMS SERVICES     E01 000050   5.00 1982-01-01 1983-02-01       
+#OP2010 SYSTEMS SUPPORT          E21 000100   4.00 1982-01-01 1983-02-01 OP2000
+#OP2011 SCP SYSTEMS SUPPORT      E21 000320   1.00 1982-01-01 1983-02-01 OP2010
+#OP2012 APPLICATIONS SUPPORT     E21 000330   1.00 1982-01-01 1983-02-01 OP2010
+#OP2013 DB/DC SUPPORT            E21 000340   1.00 1982-01-01 1983-02-01 OP2010
+#PL2100 WELD LINE PLANNING       B01 000020   1.00 1982-01-01 1982-09-15 MA2100
+#__ZOS_EXPECTED__
+#
+#AD3100 ADMIN SERVICES           D01 000010   6.50 1982-01-01 1983-02-01       
+#AD3110 GENERAL ADMIN SYSTEMS    D21 000070   6.00 1982-01-01 1983-02-01 AD3100
+#AD3111 PAYROLL PROGRAMMING      D21 000230   2.00 1982-01-01 1983-02-01 AD3110
+#AD3112 PERSONNEL PROGRAMMING    D21 000250   1.00 1982-01-01 1983-02-01 AD3110
+#AD3113 ACCOUNT PROGRAMMING      D21 000270   2.00 1982-01-01 1983-02-01 AD3110
+#IF1000 QUERY SERVICES           C01 000030   2.00 1982-01-01 1983-02-01       
+#IF2000 USER EDUCATION           C01 000030   1.00 1982-01-01 1983-02-01       
+#MA2100 WELD LINE AUTOMATION     D01 000010  12.00 1982-01-01 1983-02-01       
+#MA2110 W L PROGRAMMING          D11 000060   9.00 1982-01-01 1983-02-01 MA2100
+#MA2111 W L PROGRAM DESIGN       D11 000220   2.00 1982-01-01 1982-12-01 MA2110
+#MA2112 W L ROBOT DESIGN         D11 000150   3.00 1982-01-01 1982-12-01 MA2110
+#MA2113 W L PROD CONT PROGS      D11 000160   3.00 1982-02-15 1982-12-01 MA2110
+#OP1000 OPERATION SUPPORT        E01 000050   6.00 1982-01-01 1983-02-01       
+#OP1010 OPERATION                E11 000090   5.00 1982-01-01 1983-02-01 OP1000
+#OP2000 GEN SYSTEMS SERVICES     E01 000050   5.00 1982-01-01 1983-02-01       
+#OP2010 SYSTEMS SUPPORT          E21 000100   4.00 1982-01-01 1983-02-01 OP2000
+#OP2011 SCP SYSTEMS SUPPORT      E21 000320   1.00 1982-01-01 1983-02-01 OP2010
+#OP2012 APPLICATIONS SUPPORT     E21 000330   1.00 1982-01-01 1983-02-01 OP2010
+#OP2013 DB/DC SUPPORT            E21 000340   1.00 1982-01-01 1983-02-01 OP2010
+#PL2100 WELD LINE PLANNING       B01 000020   1.00 1982-01-01 1982-09-15 MA2100
+#__SYSTEMI_EXPECTED__
+#
+#AD3100 ADMIN SERVICES           D01 000010   6.50 1982-01-01 1983-02-01       
+#AD3110 GENERAL ADMIN SYSTEMS    D21 000070   6.00 1982-01-01 1983-02-01 AD3100
+#AD3111 PAYROLL PROGRAMMING      D21 000230   2.00 1982-01-01 1983-02-01 AD3110
+#AD3112 PERSONNEL PROGRAMMING    D21 000250   1.00 1982-01-01 1983-02-01 AD3110
+#AD3113 ACCOUNT PROGRAMMING      D21 000270   2.00 1982-01-01 1983-02-01 AD3110
+#IF1000 QUERY SERVICES           C01 000030   2.00 1982-01-01 1983-02-01       
+#IF2000 USER EDUCATION           C01 000030   1.00 1982-01-01 1983-02-01       
+#MA2100 WELD LINE AUTOMATION     D01 000010  12.00 1982-01-01 1983-02-01       
+#MA2110 W L PROGRAMMING          D11 000060   9.00 1982-01-01 1983-02-01 MA2100
+#MA2111 W L PROGRAM DESIGN       D11 000220   2.00 1982-01-01 1982-12-01 MA2110
+#MA2112 W L ROBOT DESIGN         D11 000150   3.00 1982-01-01 1982-12-01 MA2110
+#MA2113 W L PROD CONT PROGS      D11 000160   3.00 1982-02-15 1982-12-01 MA2110
+#OP1000 OPERATION SUPPORT        E01 000050   6.00 1982-01-01 1983-02-01       
+#OP1010 OPERATION                E11 000090   5.00 1982-01-01 1983-02-01 OP1000
+#OP2000 GEN SYSTEMS SERVICES     E01 000050   5.00 1982-01-01 1983-02-01       
+#OP2010 SYSTEMS SUPPORT          E21 000100   4.00 1982-01-01 1983-02-01 OP2000
+#OP2011 SCP SYSTEMS SUPPORT      E21 000320   1.00 1982-01-01 1983-02-01 OP2010
+#OP2012 APPLICATIONS SUPPORT     E21 000330   1.00 1982-01-01 1983-02-01 OP2010
+#OP2013 DB/DC SUPPORT            E21 000340   1.00 1982-01-01 1983-02-01 OP2010
+#PL2100 WELD LINE PLANNING       B01 000020   1.00 1982-01-01 1982-09-15 MA2100
+#__IDS_EXPECTED__
+#
+#AD3100 ADMIN SERVICES           D01 000010   6.50 1982-01-01 1983-02-01       
+#AD3110 GENERAL ADMIN SYSTEMS    D21 000070   6.00 1982-01-01 1983-02-01 AD3100
+#AD3111 PAYROLL PROGRAMMING      D21 000230   2.00 1982-01-01 1983-02-01 AD3110
+#AD3112 PERSONNEL PROGRAMMING    D21 000250   1.00 1982-01-01 1983-02-01 AD3110
+#AD3113 ACCOUNT PROGRAMMING      D21 000270   2.00 1982-01-01 1983-02-01 AD3110
+#IF1000 QUERY SERVICES           C01 000030   2.00 1982-01-01 1983-02-01       
+#IF2000 USER EDUCATION           C01 000030   1.00 1982-01-01 1983-02-01       
+#MA2100 WELD LINE AUTOMATION     D01 000010  12.00 1982-01-01 1983-02-01       
+#MA2110 W L PROGRAMMING          D11 000060   9.00 1982-01-01 1983-02-01 MA2100
+#MA2111 W L PROGRAM DESIGN       D11 000220   2.00 1982-01-01 1982-12-01 MA2110
+#MA2112 W L ROBOT DESIGN         D11 000150   3.00 1982-01-01 1982-12-01 MA2110
+#MA2113 W L PROD CONT PROGS      D11 000160   3.00 1982-02-15 1982-12-01 MA2110
+#OP1000 OPERATION SUPPORT        E01 000050   6.00 1982-01-01 1983-02-01       
+#OP1010 OPERATION                E11 000090   5.00 1982-01-01 1983-02-01 OP1000
+#OP2000 GEN SYSTEMS SERVICES     E01 000050   5.00 1982-01-01 1983-02-01       
+#OP2010 SYSTEMS SUPPORT          E21 000100   4.00 1982-01-01 1983-02-01 OP2000
+#OP2011 SCP SYSTEMS SUPPORT      E21 000320   1.00 1982-01-01 1983-02-01 OP2010
+#OP2012 APPLICATIONS SUPPORT     E21 000330   1.00 1982-01-01 1983-02-01 OP2010
+#OP2013 DB/DC SUPPORT            E21 000340   1.00 1982-01-01 1983-02-01 OP2010
+#PL2100 WELD LINE PLANNING       B01 000020   1.00 1982-01-01 1982-09-15 MA2100
diff -pruN 0.3.0-3/tests/test_153_FetchAssocSelect_04.py 2.0.5-0ubuntu2/tests/test_153_FetchAssocSelect_04.py
--- 0.3.0-3/tests/test_153_FetchAssocSelect_04.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_153_FetchAssocSelect_04.py	2014-01-31 11:07:55.000000000 +0000
@@ -0,0 +1,80 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_153_FetchAssocSelect_04(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_153)
+
+  def run_test_153(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+    
+    result = ibm_db.exec_immediate(conn, "select * from org")
+    
+    row = ibm_db.fetch_assoc(result)
+    while ( row ):
+      #printf("%4d ",row['DEPTNUMB'])
+      #printf("%-14s ",row['DEPTNAME'])
+      #printf("%4d ",row['MANAGER'])
+      #printf("%-10s",row['DIVISION'])
+      #printf("%-13s ",row['LOCATION'])
+      #puts ""
+      print "%4d %-14s %4d %-10s%-13s " % (row['DEPTNUMB'], row['DEPTNAME'], row['MANAGER'], row['DIVISION'], row['LOCATION'])
+      row = ibm_db.fetch_assoc(result)
+
+#__END__
+#__LUW_EXPECTED__
+#
+#  10 Head Office     160 Corporate New York      
+#  15 New England      50 Eastern   Boston        
+#  20 Mid Atlantic     10 Eastern   Washington    
+#  38 South Atlantic   30 Eastern   Atlanta       
+#  42 Great Lakes     100 Midwest   Chicago       
+#  51 Plains          140 Midwest   Dallas        
+#  66 Pacific         270 Western   San Francisco 
+#  84 Mountain        290 Western   Denver        
+#__ZOS_EXPECTED__
+#
+#  10 Head Office     160 Corporate New York      
+#  15 New England      50 Eastern   Boston        
+#  20 Mid Atlantic     10 Eastern   Washington    
+#  38 South Atlantic   30 Eastern   Atlanta       
+#  42 Great Lakes     100 Midwest   Chicago       
+#  51 Plains          140 Midwest   Dallas        
+#  66 Pacific         270 Western   San Francisco 
+#  84 Mountain        290 Western   Denver        
+#__SYSTEMI_EXPECTED__
+#
+#  10 Head Office     160 Corporate New York      
+#  15 New England      50 Eastern   Boston        
+#  20 Mid Atlantic     10 Eastern   Washington    
+#  38 South Atlantic   30 Eastern   Atlanta       
+#  42 Great Lakes     100 Midwest   Chicago       
+#  51 Plains          140 Midwest   Dallas        
+#  66 Pacific         270 Western   San Francisco 
+#  84 Mountain        290 Western   Denver        
+#__IDS_EXPECTED__
+#
+#  10 Head Office     160 Corporate New York      
+#  15 New England      50 Eastern   Boston        
+#  20 Mid Atlantic     10 Eastern   Washington    
+#  38 South Atlantic   30 Eastern   Atlanta       
+#  42 Great Lakes     100 Midwest   Chicago       
+#  51 Plains          140 Midwest   Dallas        
+#  66 Pacific         270 Western   San Francisco 
+#  84 Mountain        290 Western   Denver        
+#
diff -pruN 0.3.0-3/tests/test_154_AllFetches.py 2.0.5-0ubuntu2/tests/test_154_AllFetches.py
--- 0.3.0-3/tests/test_154_AllFetches.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_154_AllFetches.py	2014-01-31 11:07:40.000000000 +0000
@@ -0,0 +1,140 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_154_AllFetches(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_154)
+
+  def run_test_154(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+
+    try:
+        statement = 'DROP TABLE fetch_test'
+        result = ibm_db.exec_immediate(conn, statement)
+    except:
+        pass
+    
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      statement = 'CREATE TABLE fetch_test (col1 VARCHAR(20), col2 CLOB, col3 INTEGER)'
+      st0 = "INSERT INTO fetch_test VALUES ('column 0', 'Data in the clob 0', 0)"
+      st1 = "INSERT INTO fetch_test VALUES ('column 1', 'Data in the clob 1', 1)"
+      st2 = "INSERT INTO fetch_test VALUES ('column 2', 'Data in the clob 2', 2)"
+      st3 = "INSERT INTO fetch_test VALUES ('column 3', 'Data in the clob 3', 3)"
+    else:
+      statement = 'CREATE TABLE fetch_test (col1 VARCHAR(20), col2 CLOB(20), col3 INTEGER)'
+      st0 = "INSERT INTO fetch_test VALUES ('column 0', 'Data in the clob 0', 0)"
+      st1 = "INSERT INTO fetch_test VALUES ('column 1', 'Data in the clob 1', 1)"
+      st2 = "INSERT INTO fetch_test VALUES ('column 2', 'Data in the clob 2', 2)"
+      st3 = "INSERT INTO fetch_test VALUES ('column 3', 'Data in the clob 3', 3)"
+    result = ibm_db.exec_immediate(conn, statement)
+
+    result = ibm_db.exec_immediate(conn, st0)
+    result = ibm_db.exec_immediate(conn, st1)
+    result = ibm_db.exec_immediate(conn, st2)
+    result = ibm_db.exec_immediate(conn, st3)
+
+    statement = "SELECT col1, col2 FROM fetch_test"
+    result = ibm_db.prepare(conn, statement)
+    ibm_db.execute(result)
+
+    row = ibm_db.fetch_tuple(result)
+    while ( row ):
+      #printf("\"%s\" from VARCHAR is %d bytes long, \"%s\" from CLOB is %d bytes long.\n",
+      #        row[0],row[0].length, row[1],row[1].length)
+      print "\"%s\" from VARCHAR is %d bytes long, \"%s\" from CLOB is %d bytes long." %\
+        (row[0], len(row[0]), row[1], len(row[1]))
+      row = ibm_db.fetch_tuple(result)
+
+    result = ibm_db.prepare(conn, statement)
+    ibm_db.execute(result)
+
+    row = ibm_db.fetch_assoc(result)
+    while ( row ):
+      #printf("\"%s\" from VARCHAR is %d bytes long, \"%s\" from CLOB is %d bytes long.\n",
+      #        row['COL1'], row['COL1'].length, row['COL2'], row['COL2'].length)
+      print "\"%s\" from VARCHAR is %d bytes long, \"%s\" from CLOB is %d bytes long." %\
+        (row['COL1'], len(row['COL1']), row['COL2'], len(row['COL2']))
+      row = ibm_db.fetch_assoc(result)
+      
+    result = ibm_db.prepare(conn, statement)
+    ibm_db.execute(result)
+
+    row = ibm_db.fetch_both(result)
+    while ( row ):
+      #printf("\"%s\" from VARCHAR is %d bytes long, \"%s\" from CLOB is %d bytes long.\n",
+      #        row['COL1'], row['COL1'].length, row[1], row[1].length)
+      print "\"%s\" from VARCHAR is %d bytes long, \"%s\" from CLOB is %d bytes long.\n" % \
+        (row['COL1'],len(row['COL1']), row[1], len(row[1]))
+      row = ibm_db.fetch_both(result)
+
+    ibm_db.close(conn)
+
+#__END__
+#__LUW_EXPECTED__
+#"column 0" from VARCHAR is 8 bytes long, "Data in the clob 0" from CLOB is 18 bytes long.
+#"column 1" from VARCHAR is 8 bytes long, "Data in the clob 1" from CLOB is 18 bytes long.
+#"column 2" from VARCHAR is 8 bytes long, "Data in the clob 2" from CLOB is 18 bytes long.
+#"column 3" from VARCHAR is 8 bytes long, "Data in the clob 3" from CLOB is 18 bytes long.
+#"column 0" from VARCHAR is 8 bytes long, "Data in the clob 0" from CLOB is 18 bytes long.
+#"column 1" from VARCHAR is 8 bytes long, "Data in the clob 1" from CLOB is 18 bytes long.
+#"column 2" from VARCHAR is 8 bytes long, "Data in the clob 2" from CLOB is 18 bytes long.
+#"column 3" from VARCHAR is 8 bytes long, "Data in the clob 3" from CLOB is 18 bytes long.
+#"column 0" from VARCHAR is 8 bytes long, "Data in the clob 0" from CLOB is 18 bytes long.
+#"column 1" from VARCHAR is 8 bytes long, "Data in the clob 1" from CLOB is 18 bytes long.
+#"column 2" from VARCHAR is 8 bytes long, "Data in the clob 2" from CLOB is 18 bytes long.
+#"column 3" from VARCHAR is 8 bytes long, "Data in the clob 3" from CLOB is 18 bytes long.
+#__ZOS_EXPECTED__
+#"column 0" from VARCHAR is 8 bytes long, "Data in the clob 0" from CLOB is 18 bytes long.
+#"column 1" from VARCHAR is 8 bytes long, "Data in the clob 1" from CLOB is 18 bytes long.
+#"column 2" from VARCHAR is 8 bytes long, "Data in the clob 2" from CLOB is 18 bytes long.
+#"column 3" from VARCHAR is 8 bytes long, "Data in the clob 3" from CLOB is 18 bytes long.
+#"column 0" from VARCHAR is 8 bytes long, "Data in the clob 0" from CLOB is 18 bytes long.
+#"column 1" from VARCHAR is 8 bytes long, "Data in the clob 1" from CLOB is 18 bytes long.
+#"column 2" from VARCHAR is 8 bytes long, "Data in the clob 2" from CLOB is 18 bytes long.
+#"column 3" from VARCHAR is 8 bytes long, "Data in the clob 3" from CLOB is 18 bytes long.
+#"column 0" from VARCHAR is 8 bytes long, "Data in the clob 0" from CLOB is 18 bytes long.
+#"column 1" from VARCHAR is 8 bytes long, "Data in the clob 1" from CLOB is 18 bytes long.
+#"column 2" from VARCHAR is 8 bytes long, "Data in the clob 2" from CLOB is 18 bytes long.
+#"column 3" from VARCHAR is 8 bytes long, "Data in the clob 3" from CLOB is 18 bytes long.
+#__SYSTEMI_EXPECTED__
+#"column 0" from VARCHAR is 8 bytes long, "Data in the clob 0" from CLOB is 18 bytes long.
+#"column 1" from VARCHAR is 8 bytes long, "Data in the clob 1" from CLOB is 18 bytes long.
+#"column 2" from VARCHAR is 8 bytes long, "Data in the clob 2" from CLOB is 18 bytes long.
+#"column 3" from VARCHAR is 8 bytes long, "Data in the clob 3" from CLOB is 18 bytes long.
+#"column 0" from VARCHAR is 8 bytes long, "Data in the clob 0" from CLOB is 18 bytes long.
+#"column 1" from VARCHAR is 8 bytes long, "Data in the clob 1" from CLOB is 18 bytes long.
+#"column 2" from VARCHAR is 8 bytes long, "Data in the clob 2" from CLOB is 18 bytes long.
+#"column 3" from VARCHAR is 8 bytes long, "Data in the clob 3" from CLOB is 18 bytes long.
+#"column 0" from VARCHAR is 8 bytes long, "Data in the clob 0" from CLOB is 18 bytes long.
+#"column 1" from VARCHAR is 8 bytes long, "Data in the clob 1" from CLOB is 18 bytes long.
+#"column 2" from VARCHAR is 8 bytes long, "Data in the clob 2" from CLOB is 18 bytes long.
+#"column 3" from VARCHAR is 8 bytes long, "Data in the clob 3" from CLOB is 18 bytes long.
+#__IDS_EXPECTED__
+#"column 0" from VARCHAR is 8 bytes long, "Data in the clob 0" from CLOB is 18 bytes long.
+#"column 1" from VARCHAR is 8 bytes long, "Data in the clob 1" from CLOB is 18 bytes long.
+#"column 2" from VARCHAR is 8 bytes long, "Data in the clob 2" from CLOB is 18 bytes long.
+#"column 3" from VARCHAR is 8 bytes long, "Data in the clob 3" from CLOB is 18 bytes long.
+#"column 0" from VARCHAR is 8 bytes long, "Data in the clob 0" from CLOB is 18 bytes long.
+#"column 1" from VARCHAR is 8 bytes long, "Data in the clob 1" from CLOB is 18 bytes long.
+#"column 2" from VARCHAR is 8 bytes long, "Data in the clob 2" from CLOB is 18 bytes long.
+#"column 3" from VARCHAR is 8 bytes long, "Data in the clob 3" from CLOB is 18 bytes long.
+#"column 0" from VARCHAR is 8 bytes long, "Data in the clob 0" from CLOB is 18 bytes long.
+#"column 1" from VARCHAR is 8 bytes long, "Data in the clob 1" from CLOB is 18 bytes long.
+#"column 2" from VARCHAR is 8 bytes long, "Data in the clob 2" from CLOB is 18 bytes long.
+#"column 3" from VARCHAR is 8 bytes long, "Data in the clob 3" from CLOB is 18 bytes long.
diff -pruN 0.3.0-3/tests/test_155_FetchAssocSelect_05.py 2.0.5-0ubuntu2/tests/test_155_FetchAssocSelect_05.py
--- 0.3.0-3/tests/test_155_FetchAssocSelect_05.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_155_FetchAssocSelect_05.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,185 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_155_FetchAssocSelect_05(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_155)
+
+  def run_test_155(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    serverinfo = ibm_db.server_info( conn )
+    
+    result = ibm_db.exec_immediate(conn, "select * from employee where lastname in ('HAAS','THOMPSON', 'KWAN', 'GEYER', 'STERN', 'PULASKI', 'HENDERSON', 'SPENSER', 'LUCCHESSI', 'OCONNELL', 'QUINTANA', 'NICHOLLS', 'ADAMSON', 'PIANKA', 'YOSHIMURA', 'SCOUTTEN', 'WALKER', 'BROWN', 'JONES', 'LUTZ', 'JEFFERSON', 'MARINO', 'SMITH', 'JOHNSON', 'PEREZ', 'SCHNEIDER', 'PARKER', 'SMITH', 'SETRIGHT', 'MEHTA', 'LEE', 'GOUNOT')")
+    i=0
+    row = ibm_db.fetch_assoc(result)
+    while ( row ):
+      i += 1
+      if (serverinfo.DBMS_NAME[0:3] == 'IDS'):
+        if (row['midinit'] == None):
+          row['midinit'] = ''
+        print "%6s %12s %s %-15s%3s %4s %10s %-8s%4d %s%10s %12s %12s %12s" % \
+          (row['empno'], row['firstnme'], row['midinit'], row['lastname'], row['workdept'], \
+          row['phoneno'], row['hiredate'], row['job'], row['edlevel'], row['sex'], \
+          row['birthdate'], row['salary'], row['bonus'], row['comm'])
+        row = ibm_db.fetch_assoc(result)
+      else:
+        if (row['MIDINIT'] == None):
+          row['MIDINIT'] = ''
+        print "%6s %12s %s %-15s%3s %4s %10s %-8s%4d %s%10s %12s %12s %12s" % \
+          (row['EMPNO'], row['FIRSTNME'], row['MIDINIT'], row['LASTNAME'], row['WORKDEPT'], \
+          row['PHONENO'], row['HIREDATE'], row['JOB'], row['EDLEVEL'], row['SEX'], \
+          row['BIRTHDATE'], row['SALARY'], row['BONUS'], row['COMM'])
+        row = ibm_db.fetch_assoc(result)
+    print "%d record(s) selected." % i
+
+#__END__
+#__LUW_EXPECTED__
+#000010    CHRISTINE I HAAS           A00 3978 1965-01-01 PRES      18 F1933-08-24     52750.00      1000.00      4220.00
+#000020      MICHAEL L THOMPSON       B01 3476 1973-10-10 MANAGER   18 M1948-02-02     41250.00       800.00      3300.00
+#000030        SALLY A KWAN           C01 4738 1975-04-05 MANAGER   20 F1941-05-11     38250.00       800.00      3060.00
+#000050         JOHN B GEYER          E01 6789 1949-08-17 MANAGER   16 M1925-09-15     40175.00       800.00      3214.00
+#000060       IRVING F STERN          D11 6423 1973-09-14 MANAGER   16 M1945-07-07     32250.00       500.00      2580.00
+#000070          EVA D PULASKI        D21 7831 1980-09-30 MANAGER   16 F1953-05-26     36170.00       700.00      2893.00
+#000090       EILEEN W HENDERSON      E11 5498 1970-08-15 MANAGER   16 F1941-05-15     29750.00       600.00      2380.00
+#000100     THEODORE Q SPENSER        E21 0972 1980-06-19 MANAGER   14 M1956-12-18     26150.00       500.00      2092.00
+#000110     VINCENZO G LUCCHESSI      A00 3490 1958-05-16 SALESREP  19 M1929-11-05     46500.00       900.00      3720.00
+#000120         SEAN   OCONNELL       A00 2167 1963-12-05 CLERK     14 M1942-10-18     29250.00       600.00      2340.00
+#000130      DOLORES M QUINTANA       C01 4578 1971-07-28 ANALYST   16 F1925-09-15     23800.00       500.00      1904.00
+#000140      HEATHER A NICHOLLS       C01 1793 1976-12-15 ANALYST   18 F1946-01-19     28420.00       600.00      2274.00
+#000150        BRUCE   ADAMSON        D11 4510 1972-02-12 DESIGNER  16 M1947-05-17     25280.00       500.00      2022.00
+#000160    ELIZABETH R PIANKA         D11 3782 1977-10-11 DESIGNER  17 F1955-04-12     22250.00       400.00      1780.00
+#000170    MASATOSHI J YOSHIMURA      D11 2890 1978-09-15 DESIGNER  16 M1951-01-05     24680.00       500.00      1974.00
+#000180      MARILYN S SCOUTTEN       D11 1682 1973-07-07 DESIGNER  17 F1949-02-21     21340.00       500.00      1707.00
+#000190        JAMES H WALKER         D11 2986 1974-07-26 DESIGNER  16 M1952-06-25     20450.00       400.00      1636.00
+#000200        DAVID   BROWN          D11 4501 1966-03-03 DESIGNER  16 M1941-05-29     27740.00       600.00      2217.00
+#000210      WILLIAM T JONES          D11 0942 1979-04-11 DESIGNER  17 M1953-02-23     18270.00       400.00      1462.00
+#000220     JENNIFER K LUTZ           D11 0672 1968-08-29 DESIGNER  18 F1948-03-19     29840.00       600.00      2387.00
+#000230        JAMES J JEFFERSON      D21 2094 1966-11-21 CLERK     14 M1935-05-30     22180.00       400.00      1774.00
+#000240    SALVATORE M MARINO         D21 3780 1979-12-05 CLERK     17 M1954-03-31     28760.00       600.00      2301.00
+#000250       DANIEL S SMITH          D21 0961 1969-10-30 CLERK     15 M1939-11-12     19180.00       400.00      1534.00
+#000260        SYBIL P JOHNSON        D21 8953 1975-09-11 CLERK     16 F1936-10-05     17250.00       300.00      1380.00
+#000270        MARIA L PEREZ          D21 9001 1980-09-30 CLERK     15 F1953-05-26     27380.00       500.00      2190.00
+#000280        ETHEL R SCHNEIDER      E11 8997 1967-03-24 OPERATOR  17 F1936-03-28     26250.00       500.00      2100.00
+#000290         JOHN R PARKER         E11 4502 1980-05-30 OPERATOR  12 M1946-07-09     15340.00       300.00      1227.00
+#000300       PHILIP X SMITH          E11 2095 1972-06-19 OPERATOR  14 M1936-10-27     17750.00       400.00      1420.00
+#000310        MAUDE F SETRIGHT       E11 3332 1964-09-12 OPERATOR  12 F1931-04-21     15900.00       300.00      1272.00
+#000320       RAMLAL V MEHTA          E21 9990 1965-07-07 FIELDREP  16 M1932-08-11     19950.00       400.00      1596.00
+#000330         WING   LEE            E21 2103 1976-02-23 FIELDREP  14 M1941-07-18     25370.00       500.00      2030.00
+#000340        JASON R GOUNOT         E21 5698 1947-05-05 FIELDREP  16 M1926-05-17     23840.00       500.00      1907.00
+#
+#32 record(s) selected.
+#__ZOS_EXPECTED__
+#000010    CHRISTINE I HAAS           A00 3978 1965-01-01 PRES      18 F1933-08-24     52750.00      1000.00      4220.00
+#000020      MICHAEL L THOMPSON       B01 3476 1973-10-10 MANAGER   18 M1948-02-02     41250.00       800.00      3300.00
+#000030        SALLY A KWAN           C01 4738 1975-04-05 MANAGER   20 F1941-05-11     38250.00       800.00      3060.00
+#000050         JOHN B GEYER          E01 6789 1949-08-17 MANAGER   16 M1925-09-15     40175.00       800.00      3214.00
+#000060       IRVING F STERN          D11 6423 1973-09-14 MANAGER   16 M1945-07-07     32250.00       500.00      2580.00
+#000070          EVA D PULASKI        D21 7831 1980-09-30 MANAGER   16 F1953-05-26     36170.00       700.00      2893.00
+#000090       EILEEN W HENDERSON      E11 5498 1970-08-15 MANAGER   16 F1941-05-15     29750.00       600.00      2380.00
+#000100     THEODORE Q SPENSER        E21 0972 1980-06-19 MANAGER   14 M1956-12-18     26150.00       500.00      2092.00
+#000110     VINCENZO G LUCCHESSI      A00 3490 1958-05-16 SALESREP  19 M1929-11-05     46500.00       900.00      3720.00
+#000120         SEAN   OCONNELL       A00 2167 1963-12-05 CLERK     14 M1942-10-18     29250.00       600.00      2340.00
+#000130      DOLORES M QUINTANA       C01 4578 1971-07-28 ANALYST   16 F1925-09-15     23800.00       500.00      1904.00
+#000140      HEATHER A NICHOLLS       C01 1793 1976-12-15 ANALYST   18 F1946-01-19     28420.00       600.00      2274.00
+#000150        BRUCE   ADAMSON        D11 4510 1972-02-12 DESIGNER  16 M1947-05-17     25280.00       500.00      2022.00
+#000160    ELIZABETH R PIANKA         D11 3782 1977-10-11 DESIGNER  17 F1955-04-12     22250.00       400.00      1780.00
+#000170    MASATOSHI J YOSHIMURA      D11 2890 1978-09-15 DESIGNER  16 M1951-01-05     24680.00       500.00      1974.00
+#000180      MARILYN S SCOUTTEN       D11 1682 1973-07-07 DESIGNER  17 F1949-02-21     21340.00       500.00      1707.00
+#000190        JAMES H WALKER         D11 2986 1974-07-26 DESIGNER  16 M1952-06-25     20450.00       400.00      1636.00
+#000200        DAVID   BROWN          D11 4501 1966-03-03 DESIGNER  16 M1941-05-29     27740.00       600.00      2217.00
+#000210      WILLIAM T JONES          D11 0942 1979-04-11 DESIGNER  17 M1953-02-23     18270.00       400.00      1462.00
+#000220     JENNIFER K LUTZ           D11 0672 1968-08-29 DESIGNER  18 F1948-03-19     29840.00       600.00      2387.00
+#000230        JAMES J JEFFERSON      D21 2094 1966-11-21 CLERK     14 M1935-05-30     22180.00       400.00      1774.00
+#000240    SALVATORE M MARINO         D21 3780 1979-12-05 CLERK     17 M1954-03-31     28760.00       600.00      2301.00
+#000250       DANIEL S SMITH          D21 0961 1969-10-30 CLERK     15 M1939-11-12     19180.00       400.00      1534.00
+#000260        SYBIL P JOHNSON        D21 8953 1975-09-11 CLERK     16 F1936-10-05     17250.00       300.00      1380.00
+#000270        MARIA L PEREZ          D21 9001 1980-09-30 CLERK     15 F1953-05-26     27380.00       500.00      2190.00
+#000280        ETHEL R SCHNEIDER      E11 8997 1967-03-24 OPERATOR  17 F1936-03-28     26250.00       500.00      2100.00
+#000290         JOHN R PARKER         E11 4502 1980-05-30 OPERATOR  12 M1946-07-09     15340.00       300.00      1227.00
+#000300       PHILIP X SMITH          E11 2095 1972-06-19 OPERATOR  14 M1936-10-27     17750.00       400.00      1420.00
+#000310        MAUDE F SETRIGHT       E11 3332 1964-09-12 OPERATOR  12 F1931-04-21     15900.00       300.00      1272.00
+#000320       RAMLAL V MEHTA          E21 9990 1965-07-07 FIELDREP  16 M1932-08-11     19950.00       400.00      1596.00
+#000330         WING   LEE            E21 2103 1976-02-23 FIELDREP  14 M1941-07-18     25370.00       500.00      2030.00
+#000340        JASON R GOUNOT         E21 5698 1947-05-05 FIELDREP  16 M1926-05-17     23840.00       500.00      1907.00
+#
+#32 record(s) selected.
+#__SYSTEMI_EXPECTED__
+#000010    CHRISTINE I HAAS           A00 3978 1965-01-01 PRES      18 F1933-08-24     52750.00      1000.00      4220.00
+#000020      MICHAEL L THOMPSON       B01 3476 1973-10-10 MANAGER   18 M1948-02-02     41250.00       800.00      3300.00
+#000030        SALLY A KWAN           C01 4738 1975-04-05 MANAGER   20 F1941-05-11     38250.00       800.00      3060.00
+#000050         JOHN B GEYER          E01 6789 1949-08-17 MANAGER   16 M1925-09-15     40175.00       800.00      3214.00
+#000060       IRVING F STERN          D11 6423 1973-09-14 MANAGER   16 M1945-07-07     32250.00       500.00      2580.00
+#000070          EVA D PULASKI        D21 7831 1980-09-30 MANAGER   16 F1953-05-26     36170.00       700.00      2893.00
+#000090       EILEEN W HENDERSON      E11 5498 1970-08-15 MANAGER   16 F1941-05-15     29750.00       600.00      2380.00
+#000100     THEODORE Q SPENSER        E21 0972 1980-06-19 MANAGER   14 M1956-12-18     26150.00       500.00      2092.00
+#000110     VINCENZO G LUCCHESSI      A00 3490 1958-05-16 SALESREP  19 M1929-11-05     46500.00       900.00      3720.00
+#000120         SEAN   OCONNELL       A00 2167 1963-12-05 CLERK     14 M1942-10-18     29250.00       600.00      2340.00
+#000130      DOLORES M QUINTANA       C01 4578 1971-07-28 ANALYST   16 F1925-09-15     23800.00       500.00      1904.00
+#000140      HEATHER A NICHOLLS       C01 1793 1976-12-15 ANALYST   18 F1946-01-19     28420.00       600.00      2274.00
+#000150        BRUCE   ADAMSON        D11 4510 1972-02-12 DESIGNER  16 M1947-05-17     25280.00       500.00      2022.00
+#000160    ELIZABETH R PIANKA         D11 3782 1977-10-11 DESIGNER  17 F1955-04-12     22250.00       400.00      1780.00
+#000170    MASATOSHI J YOSHIMURA      D11 2890 1978-09-15 DESIGNER  16 M1951-01-05     24680.00       500.00      1974.00
+#000180      MARILYN S SCOUTTEN       D11 1682 1973-07-07 DESIGNER  17 F1949-02-21     21340.00       500.00      1707.00
+#000190        JAMES H WALKER         D11 2986 1974-07-26 DESIGNER  16 M1952-06-25     20450.00       400.00      1636.00
+#000200        DAVID   BROWN          D11 4501 1966-03-03 DESIGNER  16 M1941-05-29     27740.00       600.00      2217.00
+#000210      WILLIAM T JONES          D11 0942 1979-04-11 DESIGNER  17 M1953-02-23     18270.00       400.00      1462.00
+#000220     JENNIFER K LUTZ           D11 0672 1968-08-29 DESIGNER  18 F1948-03-19     29840.00       600.00      2387.00
+#000230        JAMES J JEFFERSON      D21 2094 1966-11-21 CLERK     14 M1935-05-30     22180.00       400.00      1774.00
+#000240    SALVATORE M MARINO         D21 3780 1979-12-05 CLERK     17 M1954-03-31     28760.00       600.00      2301.00
+#000250       DANIEL S SMITH          D21 0961 1969-10-30 CLERK     15 M1939-11-12     19180.00       400.00      1534.00
+#000260        SYBIL P JOHNSON        D21 8953 1975-09-11 CLERK     16 F1936-10-05     17250.00       300.00      1380.00
+#000270        MARIA L PEREZ          D21 9001 1980-09-30 CLERK     15 F1953-05-26     27380.00       500.00      2190.00
+#000280        ETHEL R SCHNEIDER      E11 8997 1967-03-24 OPERATOR  17 F1936-03-28     26250.00       500.00      2100.00
+#000290         JOHN R PARKER         E11 4502 1980-05-30 OPERATOR  12 M1946-07-09     15340.00       300.00      1227.00
+#000300       PHILIP X SMITH          E11 2095 1972-06-19 OPERATOR  14 M1936-10-27     17750.00       400.00      1420.00
+#000310        MAUDE F SETRIGHT       E11 3332 1964-09-12 OPERATOR  12 F1931-04-21     15900.00       300.00      1272.00
+#000320       RAMLAL V MEHTA          E21 9990 1965-07-07 FIELDREP  16 M1932-08-11     19950.00       400.00      1596.00
+#000330         WING   LEE            E21 2103 1976-02-23 FIELDREP  14 M1941-07-18     25370.00       500.00      2030.00
+#000340        JASON R GOUNOT         E21 5698 1947-05-05 FIELDREP  16 M1926-05-17     23840.00       500.00      1907.00
+#
+#32 record(s) selected.
+#__IDS_EXPECTED__
+#000010    CHRISTINE I HAAS           A00 3978 1965-01-01 PRES      18 F1933-08-24     52750.00      1000.00      4220.00
+#000020      MICHAEL L THOMPSON       B01 3476 1973-10-10 MANAGER   18 M1948-02-02     41250.00       800.00      3300.00
+#000030        SALLY A KWAN           C01 4738 1975-04-05 MANAGER   20 F1941-05-11     38250.00       800.00      3060.00
+#000050         JOHN B GEYER          E01 6789 1949-08-17 MANAGER   16 M1925-09-15     40175.00       800.00      3214.00
+#000060       IRVING F STERN          D11 6423 1973-09-14 MANAGER   16 M1945-07-07     32250.00       500.00      2580.00
+#000070          EVA D PULASKI        D21 7831 1980-09-30 MANAGER   16 F1953-05-26     36170.00       700.00      2893.00
+#000090       EILEEN W HENDERSON      E11 5498 1970-08-15 MANAGER   16 F1941-05-15     29750.00       600.00      2380.00
+#000100     THEODORE Q SPENSER        E21 0972 1980-06-19 MANAGER   14 M1956-12-18     26150.00       500.00      2092.00
+#000110     VINCENZO G LUCCHESSI      A00 3490 1958-05-16 SALESREP  19 M1929-11-05     46500.00       900.00      3720.00
+#000120         SEAN   OCONNELL       A00 2167 1963-12-05 CLERK     14 M1942-10-18     29250.00       600.00      2340.00
+#000130      DOLORES M QUINTANA       C01 4578 1971-07-28 ANALYST   16 F1925-09-15     23800.00       500.00      1904.00
+#000140      HEATHER A NICHOLLS       C01 1793 1976-12-15 ANALYST   18 F1946-01-19     28420.00       600.00      2274.00
+#000150        BRUCE   ADAMSON        D11 4510 1972-02-12 DESIGNER  16 M1947-05-17     25280.00       500.00      2022.00
+#000160    ELIZABETH R PIANKA         D11 3782 1977-10-11 DESIGNER  17 F1955-04-12     22250.00       400.00      1780.00
+#000170    MASATOSHI J YOSHIMURA      D11 2890 1978-09-15 DESIGNER  16 M1951-01-05     24680.00       500.00      1974.00
+#000180      MARILYN S SCOUTTEN       D11 1682 1973-07-07 DESIGNER  17 F1949-02-21     21340.00       500.00      1707.00
+#000190        JAMES H WALKER         D11 2986 1974-07-26 DESIGNER  16 M1952-06-25     20450.00       400.00      1636.00
+#000200        DAVID   BROWN          D11 4501 1966-03-03 DESIGNER  16 M1941-05-29     27740.00       600.00      2217.00
+#000210      WILLIAM T JONES          D11 0942 1979-04-11 DESIGNER  17 M1953-02-23     18270.00       400.00      1462.00
+#000220     JENNIFER K LUTZ           D11 0672 1968-08-29 DESIGNER  18 F1948-03-19     29840.00       600.00      2387.00
+#000230        JAMES J JEFFERSON      D21 2094 1966-11-21 CLERK     14 M1935-05-30     22180.00       400.00      1774.00
+#000240    SALVATORE M MARINO         D21 3780 1979-12-05 CLERK     17 M1954-03-31     28760.00       600.00      2301.00
+#000250       DANIEL S SMITH          D21 0961 1969-10-30 CLERK     15 M1939-11-12     19180.00       400.00      1534.00
+#000260        SYBIL P JOHNSON        D21 8953 1975-09-11 CLERK     16 F1936-10-05     17250.00       300.00      1380.00
+#000270        MARIA L PEREZ          D21 9001 1980-09-30 CLERK     15 F1953-05-26     27380.00       500.00      2190.00
+#000280        ETHEL R SCHNEIDER      E11 8997 1967-03-24 OPERATOR  17 F1936-03-28     26250.00       500.00      2100.00
+#000290         JOHN R PARKER         E11 4502 1980-05-30 OPERATOR  12 M1946-07-09     15340.00       300.00      1227.00
+#000300       PHILIP X SMITH          E11 2095 1972-06-19 OPERATOR  14 M1936-10-27     17750.00       400.00      1420.00
+#000310        MAUDE F SETRIGHT       E11 3332 1964-09-12 OPERATOR  12 F1931-04-21     15900.00       300.00      1272.00
+#000320       RAMLAL V MEHTA          E21 9990 1965-07-07 FIELDREP  16 M1932-08-11     19950.00       400.00      1596.00
+#000330         WING   LEE            E21 2103 1976-02-23 FIELDREP  14 M1941-07-18     25370.00       500.00      2030.00
+#000340        JASON R GOUNOT         E21 5698 1947-05-05 FIELDREP  16 M1926-05-17     23840.00       500.00      1907.00
+#
+#32 record(s) selected.
diff -pruN 0.3.0-3/tests/test_156_FetchAssocNestedSelects_01.py 2.0.5-0ubuntu2/tests/test_156_FetchAssocNestedSelects_01.py
--- 0.3.0-3/tests/test_156_FetchAssocNestedSelects_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_156_FetchAssocNestedSelects_01.py	2014-01-31 11:06:50.000000000 +0000
@@ -0,0 +1,230 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+# This test will use a lot of the heap size allocated
+# for DB2.  If is is failing on your system, please 
+# increase the application heap size.
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_156_FetchAssocNestedSelects_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_156)
+
+  def run_test_156(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+
+    result = ibm_db.exec_immediate(conn, "select * from staff")
+
+    row = ibm_db.fetch_assoc(result)      
+    count = 1
+    while ( row ): 
+        if (row['YEARS'] == None):
+            row['YEARS'] = ''
+        if (row['COMM'] == None):
+            row['COMM'] = ''
+        print row['ID'],row['NAME'],row['JOB'],row['YEARS'], row['SALARY'], row['COMM']
+        row = ibm_db.fetch_assoc(result)
+  
+    result2 = ibm_db.exec_immediate(conn,"select * from department where substr(deptno,1,1) in ('A','B','C','D','E')")
+    row2 = ibm_db.fetch_assoc(result2)
+    while ( row2 ):    
+        if (row2['MGRNO'] == None):
+            row2['MGRNO'] = ''
+        print row2['DEPTNO'], row2['DEPTNAME'], row2['MGRNO'], row2['ADMRDEPT'], row2['LOCATION']
+        row2 = ibm_db.fetch_assoc(result2)
+
+#__END__
+#__LUW_EXPECTED__
+#10 Sanders Mgr   7 18357.50 
+#20 Pernal Sales 8 18171.25 612.45
+#30 Marenghi Mgr   5 17506.75 
+#40 OBrien Sales 6 18006.00 846.55
+#50 Hanes Mgr   10 20659.80 
+#60 Quigley Sales  16808.30 650.25
+#70 Rothman Sales 7 16502.83 1152.00
+#80 James Clerk  13504.60 128.20
+#90 Koonitz Sales 6 18001.75 1386.70
+#100 Plotz Mgr   7 18352.80 
+#110 Ngan Clerk 5 12508.20 206.60
+#120 Naughton Clerk  12954.75 180.00
+#130 Yamaguchi Clerk 6 10505.90 75.60
+#140 Fraye Mgr   6 21150.00 
+#150 Williams Sales 6 19456.50 637.65
+#160 Molinare Mgr   7 22959.20 
+#170 Kermisch Clerk 4 12258.50 110.10
+#180 Abrahams Clerk 3 12009.75 236.50
+#190 Sneider Clerk 8 14252.75 126.50
+#200 Scoutten Clerk  11508.60 84.20
+#210 Lu Mgr   10 20010.00 
+#220 Smith Sales 7 17654.50 992.80
+#230 Lundquist Clerk 3 13369.80 189.65
+#240 Daniels Mgr   5 19260.25 
+#250 Wheeler Clerk 6 14460.00 513.30
+#260 Jones Mgr   12 21234.00 
+#270 Lea Mgr   9 18555.50 
+#280 Wilson Sales 9 18674.50 811.50
+#290 Quill Mgr   10 19818.00 
+#300 Davis Sales 5 15454.50 806.10
+#310 Graham Sales 13 21000.00 200.30
+#320 Gonzales Sales 4 16858.20 844.00
+#330 Burke Clerk 1 10988.00 55.50
+#340 Edwards Sales 7 17844.00 1285.00
+#350 Gafney Clerk 5 13030.50 188.00
+#A00 SPIFFY COMPUTER SERVICE DIV. 000010 A00 None
+#B01 PLANNING 000020 A00 None
+#C01 INFORMATION CENTER 000030 A00 None
+#D01 DEVELOPMENT CENTER  A00 None
+#D11 MANUFACTURING SYSTEMS 000060 D01 None
+#D21 ADMINISTRATION SYSTEMS 000070 D01 None
+#E01 SUPPORT SERVICES 000050 A00 None
+#E11 OPERATIONS 000090 E01 None
+#E21 SOFTWARE SUPPORT 000100 E01 None
+#__ZOS_EXPECTED__
+#10 Sanders Mgr   7 18357.50 
+#20 Pernal Sales 8 18171.25 612.45
+#30 Marenghi Mgr   5 17506.75 
+#40 OBrien Sales 6 18006.00 846.55
+#50 Hanes Mgr   10 20659.80 
+#60 Quigley Sales  16808.30 650.25
+#70 Rothman Sales 7 16502.83 1152.00
+#80 James Clerk  13504.60 128.20
+#90 Koonitz Sales 6 18001.75 1386.70
+#100 Plotz Mgr   7 18352.80 
+#110 Ngan Clerk 5 12508.20 206.60
+#120 Naughton Clerk  12954.75 180.00
+#130 Yamaguchi Clerk 6 10505.90 75.60
+#140 Fraye Mgr   6 21150.00 
+#150 Williams Sales 6 19456.50 637.65
+#160 Molinare Mgr   7 22959.20 
+#170 Kermisch Clerk 4 12258.50 110.10
+#180 Abrahams Clerk 3 12009.75 236.50
+#190 Sneider Clerk 8 14252.75 126.50
+#200 Scoutten Clerk  11508.60 84.20
+#210 Lu Mgr   10 20010.00 
+#220 Smith Sales 7 17654.50 992.80
+#230 Lundquist Clerk 3 13369.80 189.65
+#240 Daniels Mgr   5 19260.25 
+#250 Wheeler Clerk 6 14460.00 513.30
+#260 Jones Mgr   12 21234.00 
+#270 Lea Mgr   9 18555.50 
+#280 Wilson Sales 9 18674.50 811.50
+#290 Quill Mgr   10 19818.00 
+#300 Davis Sales 5 15454.50 806.10
+#310 Graham Sales 13 21000.00 200.30
+#320 Gonzales Sales 4 16858.20 844.00
+#330 Burke Clerk 1 10988.00 55.50
+#340 Edwards Sales 7 17844.00 1285.00
+#350 Gafney Clerk 5 13030.50 188.00
+#A00 SPIFFY COMPUTER SERVICE DIV. 000010 A00 None
+#B01 PLANNING 000020 A00 None
+#C01 INFORMATION CENTER 000030 A00 None
+#D01 DEVELOPMENT CENTER  A00 None
+#D11 MANUFACTURING SYSTEMS 000060 D01 None
+#D21 ADMINISTRATION SYSTEMS 000070 D01 None
+#E01 SUPPORT SERVICES 000050 A00 None
+#E11 OPERATIONS 000090 E01 None
+#E21 SOFTWARE SUPPORT 000100 E01 None
+#__SYSTEMI_EXPECTED__
+#10 Sanders Mgr   7 18357.50 
+#20 Pernal Sales 8 18171.25 612.45
+#30 Marenghi Mgr   5 17506.75 
+#40 OBrien Sales 6 18006.00 846.55
+#50 Hanes Mgr   10 20659.80 
+#60 Quigley Sales  16808.30 650.25
+#70 Rothman Sales 7 16502.83 1152.00
+#80 James Clerk  13504.60 128.20
+#90 Koonitz Sales 6 18001.75 1386.70
+#100 Plotz Mgr   7 18352.80 
+#110 Ngan Clerk 5 12508.20 206.60
+#120 Naughton Clerk  12954.75 180.00
+#130 Yamaguchi Clerk 6 10505.90 75.60
+#140 Fraye Mgr   6 21150.00 
+#150 Williams Sales 6 19456.50 637.65
+#160 Molinare Mgr   7 22959.20 
+#170 Kermisch Clerk 4 12258.50 110.10
+#180 Abrahams Clerk 3 12009.75 236.50
+#190 Sneider Clerk 8 14252.75 126.50
+#200 Scoutten Clerk  11508.60 84.20
+#210 Lu Mgr   10 20010.00 
+#220 Smith Sales 7 17654.50 992.80
+#230 Lundquist Clerk 3 13369.80 189.65
+#240 Daniels Mgr   5 19260.25 
+#250 Wheeler Clerk 6 14460.00 513.30
+#260 Jones Mgr   12 21234.00 
+#270 Lea Mgr   9 18555.50 
+#280 Wilson Sales 9 18674.50 811.50
+#290 Quill Mgr   10 19818.00 
+#300 Davis Sales 5 15454.50 806.10
+#310 Graham Sales 13 21000.00 200.30
+#320 Gonzales Sales 4 16858.20 844.00
+#330 Burke Clerk 1 10988.00 55.50
+#340 Edwards Sales 7 17844.00 1285.00
+#350 Gafney Clerk 5 13030.50 188.00
+#A00 SPIFFY COMPUTER SERVICE DIV. 000010 A00 None
+#B01 PLANNING 000020 A00 None
+#C01 INFORMATION CENTER 000030 A00 None
+#D01 DEVELOPMENT CENTER  A00 None
+#D11 MANUFACTURING SYSTEMS 000060 D01 None
+#D21 ADMINISTRATION SYSTEMS 000070 D01 None
+#E01 SUPPORT SERVICES 000050 A00 None
+#E11 OPERATIONS 000090 E01 None
+#E21 SOFTWARE SUPPORT 000100 E01 None
+#__IDS_EXPECTED__
+#10 Sanders Mgr   7 18357.50 
+#20 Pernal Sales 8 18171.25 612.45
+#30 Marenghi Mgr   5 17506.75 
+#40 OBrien Sales 6 18006.00 846.55
+#50 Hanes Mgr   10 20659.80 
+#60 Quigley Sales  16808.30 650.25
+#70 Rothman Sales 7 16502.83 1152.00
+#80 James Clerk  13504.60 128.20
+#90 Koonitz Sales 6 18001.75 1386.70
+#100 Plotz Mgr   7 18352.80 
+#110 Ngan Clerk 5 12508.20 206.60
+#120 Naughton Clerk  12954.75 180.00
+#130 Yamaguchi Clerk 6 10505.90 75.60
+#140 Fraye Mgr   6 21150.00 
+#150 Williams Sales 6 19456.50 637.65
+#160 Molinare Mgr   7 22959.20 
+#170 Kermisch Clerk 4 12258.50 110.10
+#180 Abrahams Clerk 3 12009.75 236.50
+#190 Sneider Clerk 8 14252.75 126.50
+#200 Scoutten Clerk  11508.60 84.20
+#210 Lu Mgr   10 20010.00 
+#220 Smith Sales 7 17654.50 992.80
+#230 Lundquist Clerk 3 13369.80 189.65
+#240 Daniels Mgr   5 19260.25 
+#250 Wheeler Clerk 6 14460.00 513.30
+#260 Jones Mgr   12 21234.00 
+#270 Lea Mgr   9 18555.50 
+#280 Wilson Sales 9 18674.50 811.50
+#290 Quill Mgr   10 19818.00 
+#300 Davis Sales 5 15454.50 806.10
+#310 Graham Sales 13 21000.00 200.30
+#320 Gonzales Sales 4 16858.20 844.00
+#330 Burke Clerk 1 10988.00 55.50
+#340 Edwards Sales 7 17844.00 1285.00
+#350 Gafney Clerk 5 13030.50 188.00
+#A00 SPIFFY COMPUTER SERVICE DIV. 000010 A00 None
+#B01 PLANNING 000020 A00 None
+#C01 INFORMATION CENTER 000030 A00 None
+#D01 DEVELOPMENT CENTER  A00 None
+#D11 MANUFACTURING SYSTEMS 000060 D01 None
+#D21 ADMINISTRATION SYSTEMS 000070 D01 None
+#E01 SUPPORT SERVICES 000050 A00 None
+#E11 OPERATIONS 000090 E01 None
+#E21 SOFTWARE SUPPORT 000100 E01 None
\ No newline at end of file
diff -pruN 0.3.0-3/tests/test_157a_FetchAssocWithoutScrollableCursorErr.py 2.0.5-0ubuntu2/tests/test_157a_FetchAssocWithoutScrollableCursorErr.py
--- 0.3.0-3/tests/test_157a_FetchAssocWithoutScrollableCursorErr.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_157a_FetchAssocWithoutScrollableCursorErr.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,63 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_157a_FetchAssocWithoutScrollableCursorErr(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_157a)
+
+  def run_test_157a(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    print "Starting..."
+    if conn:
+      sql = "SELECT id, name, breed, weight FROM animals ORDER BY breed"
+      result = ibm_db.exec_immediate(conn, sql)
+
+      try:
+          i = 2
+          row = ibm_db.fetch_assoc(result, i)
+          while ( row ):
+              if (server.DBMS_NAME[0:3] == 'IDS'):
+                print "%-5d %-16s %-32s %10s" % (row['id'], row['name'], row['breed'], row['weight'])
+              else:
+                print "%-5d %-16s %-32s %10s" % (row['ID'], row['NAME'], row['BREED'], row['WEIGHT'])
+              i = i + 2
+          row = ibm_db.fetch_assoc(result, i)
+      except:
+          print "SQLSTATE: %s" % ibm_db.stmt_error(result)
+          print "Message: %s" % ibm_db.stmt_errormsg(result)
+	
+      print "DONE"
+
+#__END__
+#__LUW_EXPECTED__
+#Starting...
+#SQLSTATE: HY106
+#Message: [IBM][CLI Driver] CLI0145E  Fetch type out of range. SQLSTATE=HY106 SQLCODE=-99999
+#DONE
+#__ZOS_EXPECTED__
+#Starting...
+#SQLSTATE: HY106
+#Message: [IBM][CLI Driver] CLI0145E  Fetch type out of range. SQLSTATE=HY106 SQLCODE=-99999
+#DONE
+#__SYSTEMI_EXPECTED__
+#Starting...
+#SQLSTATE: HY106
+#Message: [IBM][CLI Driver] CLI0145E  Fetch type out of range. SQLSTATE=HY106 SQLCODE=-99999
+#DONE
+#__IDS_EXPECTED__
+#Starting...
+#SQLSTATE: HY106
+#Message: [IBM][CLI Driver] CLI0145E  Fetch type out of range. SQLSTATE=HY106 SQLCODE=-99999
+#DONE
diff -pruN 0.3.0-3/tests/test_157b_FetchAssocScrollableCursor_02.py 2.0.5-0ubuntu2/tests/test_157b_FetchAssocScrollableCursor_02.py
--- 0.3.0-3/tests/test_157b_FetchAssocScrollableCursor_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_157b_FetchAssocScrollableCursor_02.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,57 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_157b_FetchAssocScrollableCursor_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_157b)
+
+  def run_test_157b(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    if conn:
+      sql = "SELECT id, name, breed, weight FROM animals ORDER BY breed"
+      if (server.DBMS_NAME[0:3] != 'IDS'):
+        stmt = ibm_db.prepare(conn, sql, {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+      else:
+        stmt = ibm_db.prepare(conn, sql, {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_STATIC})
+      result = ibm_db.execute(stmt)
+      i = 2
+      row = ibm_db.fetch_assoc(stmt, i)
+      while ( row ):
+        if (server.DBMS_NAME[0:3] == 'IDS'):
+          #printf("%-5d %-16s %-32s %10s\n", row['id'], row['name'], row['breed'], row['weight'])
+          print "%-5d %-16s %-32s %10s" % (row['id'], row['name'], row['breed'], row['weight'])
+        else:
+          #printf("%-5d %-16s %-32s %10s\n", row['ID'], row['NAME'], row['BREED'], row['WEIGHT'])
+          print "%-5d %-16s %-32s %10s" % (row['ID'], row['NAME'], row['BREED'], row['WEIGHT'])
+        i = i + 2
+        row = ibm_db.fetch_assoc(stmt, i)
+
+#__END__
+#__LUW_EXPECTED__
+#0     Pook             cat                                    3.20
+#5     Rickety Ride     goat                                   9.70
+#2     Smarty           horse                                350.00
+#__ZOS_EXPECTED__
+#0     Pook             cat                                    3.20
+#5     Rickety Ride     goat                                   9.70
+#2     Smarty           horse                                350.00
+#__SYSTEMI_EXPECTED__
+#0     Pook             cat                                    3.20
+#5     Rickety Ride     goat                                   9.70
+#2     Smarty           horse                                350.00
+#__IDS_EXPECTED__
+#0     Pook             cat                                    3.20
+#5     Rickety Ride     goat                                   9.70
+#2     Smarty           horse                                350.00
diff -pruN 0.3.0-3/tests/test_157_FetchAssocScrollableCursor_01.py 2.0.5-0ubuntu2/tests/test_157_FetchAssocScrollableCursor_01.py
--- 0.3.0-3/tests/test_157_FetchAssocScrollableCursor_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_157_FetchAssocScrollableCursor_01.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,55 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_157_FetchAssocScrollableCursor_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_157)
+
+  def run_test_157(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    if conn:
+      sql = "SELECT id, name, breed, weight FROM animals ORDER BY breed"
+      if (server.DBMS_NAME[0:3] != 'IDS'):
+        result = ibm_db.exec_immediate(conn, sql, {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+      else:
+        result = ibm_db.exec_immediate(conn, sql, {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_STATIC})
+
+      i = 2
+      row = ibm_db.fetch_assoc(result, i)
+      while ( row ):
+        if (server.DBMS_NAME[0:3] == 'IDS'):
+	           print "%-5d %-16s %-32s %10s\n" % (row['id'], row['name'], row['breed'], row['weight'])
+        else:
+	           print "%-5d %-16s %-32s %10s\n" % (row['ID'], row['NAME'], row['BREED'], row['WEIGHT'])
+        i = i + 2
+        row = ibm_db.fetch_assoc(result, i)
+#
+#__END__
+#__LUW_EXPECTED__
+#0     Pook             cat                                    3.20
+#5     Rickety Ride     goat                                   9.70
+#2     Smarty           horse                                350.00
+#__ZOS_EXPECTED__
+#0     Pook             cat                                    3.20
+#5     Rickety Ride     goat                                   9.70
+#2     Smarty           horse                                350.00
+#__SYSTEMI_EXPECTED__
+#0     Pook             cat                                    3.20
+#5     Rickety Ride     goat                                   9.70
+#2     Smarty           horse                                350.00
+#__IDS_EXPECTED__
+#0     Pook             cat                                    3.20
+#5     Rickety Ride     goat                                   9.70
+#2     Smarty           horse                                350.00
diff -pruN 0.3.0-3/tests/test_158_FetchAssocNestedSelects_02.py 2.0.5-0ubuntu2/tests/test_158_FetchAssocNestedSelects_02.py
--- 0.3.0-3/tests/test_158_FetchAssocNestedSelects_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_158_FetchAssocNestedSelects_02.py	2014-01-31 11:06:28.000000000 +0000
@@ -0,0 +1,59 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_158_FetchAssocNestedSelects_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_158)
+
+  def run_test_158(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+
+    result = ibm_db.exec_immediate(conn, "SELECT * FROM staff WHERE id < 50")
+    
+    output = ''
+    row = ibm_db.fetch_assoc(result)
+    while ( row ):
+      output += str(row['ID']) + ', ' + row['NAME'] + ', ' + str(row['DEPT']) + ', ' + row['JOB'] + ', ' + str(row['YEARS']) + ', ' + str(row['SALARY']) + ', ' + str(row['COMM'])
+      row = ibm_db.fetch_assoc(result)
+      
+    result2 = ibm_db.exec_immediate(conn,"SELECT * FROM department WHERE substr(deptno,1,1) in ('A','B','C','D','E')")
+    row2 = ibm_db.fetch_assoc(result2)
+    while ( row2 ):
+        if (row2['MGRNO'] == None): 
+            row2['MGRNO'] = ''
+        if (row2['LOCATION'] == None): 
+            row2['LOCATION'] = ''
+        output += str(row2['DEPTNO']) + ', ' + row2['DEPTNAME'] + ', ' + str(row2['MGRNO']) + ', ' + row2['ADMRDEPT'] + ', ' + row2['LOCATION']
+        row2 = ibm_db.fetch_assoc(result2)
+    
+    result3 = ibm_db.exec_immediate(conn,"SELECT * FROM employee WHERE lastname IN ('HAAS','THOMPSON', 'KWAN', 'GEYER', 'STERN', 'PULASKI', 'HENDERSON', 'SPENSER', 'LUCCHESSI', 'OCONNELL', 'QUINTANA', 'NICHOLLS', 'ADAMSON', 'PIANKA', 'YOSHIMURA', 'SCOUTTEN', 'WALKER', 'BROWN', 'JONES', 'LUTZ', 'JEFFERSON', 'MARINO', 'SMITH', 'JOHNSON', 'PEREZ', 'SCHNEIDER', 'PARKER', 'SMITH', 'SETRIGHT', 'MEHTA', 'LEE', 'GOUNOT')")
+    row3 = ibm_db.fetch_tuple(result3)
+    while ( row3 ):
+        output += row3[0] + ', ' + row3[3] + ', ' + row3[5]
+        row3=ibm_db.fetch_tuple(result3)
+    print output
+
+#__END__
+#__LUW_EXPECTED__
+#10, Sanders, 20, Mgr  , 7, 18357.50, None20, Pernal, 20, Sales, 8, 18171.25, 612.4530, Marenghi, 38, Mgr  , 5, 17506.75, None40, OBrien, 38, Sales, 6, 18006.00, 846.55A00, SPIFFY COMPUTER SERVICE DIV., 000010, A00, B01, PLANNING, 000020, A00, C01, INFORMATION CENTER, 000030, A00, D01, DEVELOPMENT CENTER, , A00, D11, MANUFACTURING SYSTEMS, 000060, D01, D21, ADMINISTRATION SYSTEMS, 000070, D01, E01, SUPPORT SERVICES, 000050, A00, E11, OPERATIONS, 000090, E01, E21, SOFTWARE SUPPORT, 000100, E01, 000010, HAAS, 3978000020, THOMPSON, 3476000030, KWAN, 4738000050, GEYER, 6789000060, STERN, 6423000070, PULASKI, 7831000090, HENDERSON, 5498000100, SPENSER, 0972000110, LUCCHESSI, 3490000120, OCONNELL, 2167000130, QUINTANA, 4578000140, NICHOLLS, 1793000150, ADAMSON, 4510000160, PIANKA, 3782000170, YOSHIMURA, 2890000180, SCOUTTEN, 1682000190, WALKER, 2986000200, BROWN, 4501000210, JONES, 0942000220, LUTZ, 0672000230, JEFFERSON, 2094000240, MARINO, 3780000250, SMITH, 0961000260, JOHNSON, 8953000270, PEREZ, 9001000280, SCHNEIDER, 8997000290, PARKER, 4502000300, SMITH, 2095000310, SETRIGHT, 3332000320, MEHTA, 9990000330, LEE, 2103000340, GOUNOT, 5698
+#__ZOS_EXPECTED__
+#10, Sanders, 20, Mgr  , 7, 18357.50, None20, Pernal, 20, Sales, 8, 18171.25, 612.4530, Marenghi, 38, Mgr  , 5, 17506.75, None40, OBrien, 38, Sales, 6, 18006.00, 846.55A00, SPIFFY COMPUTER SERVICE DIV., 000010, A00, B01, PLANNING, 000020, A00, C01, INFORMATION CENTER, 000030, A00, D01, DEVELOPMENT CENTER, , A00, D11, MANUFACTURING SYSTEMS, 000060, D01, D21, ADMINISTRATION SYSTEMS, 000070, D01, E01, SUPPORT SERVICES, 000050, A00, E11, OPERATIONS, 000090, E01, E21, SOFTWARE SUPPORT, 000100, E01, 000010, HAAS, 3978000020, THOMPSON, 3476000030, KWAN, 4738000050, GEYER, 6789000060, STERN, 6423000070, PULASKI, 7831000090, HENDERSON, 5498000100, SPENSER, 0972000110, LUCCHESSI, 3490000120, OCONNELL, 2167000130, QUINTANA, 4578000140, NICHOLLS, 1793000150, ADAMSON, 4510000160, PIANKA, 3782000170, YOSHIMURA, 2890000180, SCOUTTEN, 1682000190, WALKER, 2986000200, BROWN, 4501000210, JONES, 0942000220, LUTZ, 0672000230, JEFFERSON, 2094000240, MARINO, 3780000250, SMITH, 0961000260, JOHNSON, 8953000270, PEREZ, 9001000280, SCHNEIDER, 8997000290, PARKER, 4502000300, SMITH, 2095000310, SETRIGHT, 3332000320, MEHTA, 9990000330, LEE, 2103000340, GOUNOT, 5698
+#__SYSTEMI_EXPECTED__
+#10, Sanders, 20, Mgr  , 7, 18357.50, None20, Pernal, 20, Sales, 8, 18171.25, 612.4530, Marenghi, 38, Mgr  , 5, 17506.75, None40, OBrien, 38, Sales, 6, 18006.00, 846.55A00, SPIFFY COMPUTER SERVICE DIV., 000010, A00, B01, PLANNING, 000020, A00, C01, INFORMATION CENTER, 000030, A00, D01, DEVELOPMENT CENTER, , A00, D11, MANUFACTURING SYSTEMS, 000060, D01, D21, ADMINISTRATION SYSTEMS, 000070, D01, E01, SUPPORT SERVICES, 000050, A00, E11, OPERATIONS, 000090, E01, E21, SOFTWARE SUPPORT, 000100, E01, 000010, HAAS, 3978000020, THOMPSON, 3476000030, KWAN, 4738000050, GEYER, 6789000060, STERN, 6423000070, PULASKI, 7831000090, HENDERSON, 5498000100, SPENSER, 0972000110, LUCCHESSI, 3490000120, OCONNELL, 2167000130, QUINTANA, 4578000140, NICHOLLS, 1793000150, ADAMSON, 4510000160, PIANKA, 3782000170, YOSHIMURA, 2890000180, SCOUTTEN, 1682000190, WALKER, 2986000200, BROWN, 4501000210, JONES, 0942000220, LUTZ, 0672000230, JEFFERSON, 2094000240, MARINO, 3780000250, SMITH, 0961000260, JOHNSON, 8953000270, PEREZ, 9001000280, SCHNEIDER, 8997000290, PARKER, 4502000300, SMITH, 2095000310, SETRIGHT, 3332000320, MEHTA, 9990000330, LEE, 2103000340, GOUNOT, 5698
+#__IDS_EXPECTED__
+#10, Sanders, 20, Mgr  , 7, 18357.50, None20, Pernal, 20, Sales, 8, 18171.25, 612.4530, Marenghi, 38, Mgr  , 5, 17506.75, None40, OBrien, 38, Sales, 6, 18006.00, 846.55A00, SPIFFY COMPUTER SERVICE DIV., 000010, A00, B01, PLANNING, 000020, A00, C01, INFORMATION CENTER, 000030, A00, D01, DEVELOPMENT CENTER, , A00, D11, MANUFACTURING SYSTEMS, 000060, D01, D21, ADMINISTRATION SYSTEMS, 000070, D01, E01, SUPPORT SERVICES, 000050, A00, E11, OPERATIONS, 000090, E01, E21, SOFTWARE SUPPORT, 000100, E01, 000010, HAAS, 3978000020, THOMPSON, 3476000030, KWAN, 4738000050, GEYER, 6789000060, STERN, 6423000070, PULASKI, 7831000090, HENDERSON, 5498000100, SPENSER, 0972000110, LUCCHESSI, 3490000120, OCONNELL, 2167000130, QUINTANA, 4578000140, NICHOLLS, 1793000150, ADAMSON, 4510000160, PIANKA, 3782000170, YOSHIMURA, 2890000180, SCOUTTEN, 1682000190, WALKER, 2986000200, BROWN, 4501000210, JONES, 0942000220, LUTZ, 0672000230, JEFFERSON, 2094000240, MARINO, 3780000250, SMITH, 0961000260, JOHNSON, 8953000270, PEREZ, 9001000280, SCHNEIDER, 8997000290, PARKER, 4502000300, SMITH, 2095000310, SETRIGHT, 3332000320, MEHTA, 9990000330, LEE, 2103000340, GOUNOT, 5698
diff -pruN 0.3.0-3/tests/test_159a_FetchAssocSeveralRows_02.py 2.0.5-0ubuntu2/tests/test_159a_FetchAssocSeveralRows_02.py
--- 0.3.0-3/tests/test_159a_FetchAssocSeveralRows_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_159a_FetchAssocSeveralRows_02.py	2014-01-31 11:05:23.000000000 +0000
@@ -0,0 +1,120 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_159a_FetchAssocSeveralRows_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_159a)
+
+  def run_test_159a(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+
+    result = ibm_db.exec_immediate(conn, "select prstdate,prendate from project")
+    i = 1
+    
+    row = ibm_db.fetch_assoc(result)
+    while ( row ):
+      #printf("%3d %10s %10s\n",i, row['PRSTDATE'], row['PRENDATE'])
+      print "%3d %10s %10s" % (i, row['PRSTDATE'], row['PRENDATE'])
+      i += 1
+      row = ibm_db.fetch_assoc(result)
+
+#__END__
+#__LUW_EXPECTED__
+#  1 1982-01-01 1983-02-01
+#  2 1982-01-01 1983-02-01
+#  3 1982-01-01 1983-02-01
+#  4 1982-01-01 1983-02-01
+#  5 1982-01-01 1983-02-01
+#  6 1982-01-01 1983-02-01
+#  7 1982-01-01 1983-02-01
+#  8 1982-01-01 1983-02-01
+#  9 1982-01-01 1983-02-01
+# 10 1982-01-01 1982-12-01
+# 11 1982-01-01 1982-12-01
+# 12 1982-02-15 1982-12-01
+# 13 1982-01-01 1983-02-01
+# 14 1982-01-01 1983-02-01
+# 15 1982-01-01 1983-02-01
+# 16 1982-01-01 1983-02-01
+# 17 1982-01-01 1983-02-01
+# 18 1982-01-01 1983-02-01
+# 19 1982-01-01 1983-02-01
+# 20 1982-01-01 1982-09-15
+#__ZOS_EXPECTED__
+#  1 1982-01-01 1983-02-01
+#  2 1982-01-01 1983-02-01
+#  3 1982-01-01 1983-02-01
+#  4 1982-01-01 1983-02-01
+#  5 1982-01-01 1983-02-01
+#  6 1982-01-01 1983-02-01
+#  7 1982-01-01 1983-02-01
+#  8 1982-01-01 1983-02-01
+#  9 1982-01-01 1983-02-01
+# 10 1982-01-01 1982-12-01
+# 11 1982-01-01 1982-12-01
+# 12 1982-02-15 1982-12-01
+# 13 1982-01-01 1983-02-01
+# 14 1982-01-01 1983-02-01
+# 15 1982-01-01 1983-02-01
+# 16 1982-01-01 1983-02-01
+# 17 1982-01-01 1983-02-01
+# 18 1982-01-01 1983-02-01
+# 19 1982-01-01 1983-02-01
+# 20 1982-01-01 1982-09-15
+#__SYSTEMI_EXPECTED__
+#  1 1982-01-01 1983-02-01
+#  2 1982-01-01 1983-02-01
+#  3 1982-01-01 1983-02-01
+#  4 1982-01-01 1983-02-01
+#  5 1982-01-01 1983-02-01
+#  6 1982-01-01 1983-02-01
+#  7 1982-01-01 1983-02-01
+#  8 1982-01-01 1983-02-01
+#  9 1982-01-01 1983-02-01
+# 10 1982-01-01 1982-12-01
+# 11 1982-01-01 1982-12-01
+# 12 1982-02-15 1982-12-01
+# 13 1982-01-01 1983-02-01
+# 14 1982-01-01 1983-02-01
+# 15 1982-01-01 1983-02-01
+# 16 1982-01-01 1983-02-01
+# 17 1982-01-01 1983-02-01
+# 18 1982-01-01 1983-02-01
+# 19 1982-01-01 1983-02-01
+# 20 1982-01-01 1982-09-15
+#__IDS_EXPECTED__
+#  1 1982-01-01 1983-02-01
+#  2 1982-01-01 1983-02-01
+#  3 1982-01-01 1983-02-01
+#  4 1982-01-01 1983-02-01
+#  5 1982-01-01 1983-02-01
+#  6 1982-01-01 1983-02-01
+#  7 1982-01-01 1983-02-01
+#  8 1982-01-01 1983-02-01
+#  9 1982-01-01 1983-02-01
+# 10 1982-01-01 1982-12-01
+# 11 1982-01-01 1982-12-01
+# 12 1982-02-15 1982-12-01
+# 13 1982-01-01 1983-02-01
+# 14 1982-01-01 1983-02-01
+# 15 1982-01-01 1983-02-01
+# 16 1982-01-01 1983-02-01
+# 17 1982-01-01 1983-02-01
+# 18 1982-01-01 1983-02-01
+# 19 1982-01-01 1983-02-01
+# 20 1982-01-01 1982-09-15
diff -pruN 0.3.0-3/tests/test_159_FetchAssocSeveralRows_01.py 2.0.5-0ubuntu2/tests/test_159_FetchAssocSeveralRows_01.py
--- 0.3.0-3/tests/test_159_FetchAssocSeveralRows_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_159_FetchAssocSeveralRows_01.py	2014-01-31 11:06:08.000000000 +0000
@@ -0,0 +1,179 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_159_FetchAssocSeveralRows_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_159)
+
+  def run_test_159(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+
+    result = ibm_db.exec_immediate(conn, "select name,job from staff")
+    i = 1
+    row = ibm_db.fetch_assoc(result)
+    while ( row ):
+      #printf("%3d %10s %10s\n",i, row['NAME'], row['JOB'])
+      print "%3d %10s %10s" % (i, row['NAME'], row['JOB'])
+      i += 1
+      row = ibm_db.fetch_assoc(result)
+
+#__END__
+#__LUW_EXPECTED__
+#  1    Sanders      Mgr  
+#  2     Pernal      Sales
+#  3   Marenghi      Mgr  
+#  4     OBrien      Sales
+#  5      Hanes      Mgr  
+#  6    Quigley      Sales
+#  7    Rothman      Sales
+#  8      James      Clerk
+#  9    Koonitz      Sales
+# 10      Plotz      Mgr  
+# 11       Ngan      Clerk
+# 12   Naughton      Clerk
+# 13  Yamaguchi      Clerk
+# 14      Fraye      Mgr  
+# 15   Williams      Sales
+# 16   Molinare      Mgr  
+# 17   Kermisch      Clerk
+# 18   Abrahams      Clerk
+# 19    Sneider      Clerk
+# 20   Scoutten      Clerk
+# 21         Lu      Mgr  
+# 22      Smith      Sales
+# 23  Lundquist      Clerk
+# 24    Daniels      Mgr  
+# 25    Wheeler      Clerk
+# 26      Jones      Mgr  
+# 27        Lea      Mgr  
+# 28     Wilson      Sales
+# 29      Quill      Mgr  
+# 30      Davis      Sales
+# 31     Graham      Sales
+# 32   Gonzales      Sales
+# 33      Burke      Clerk
+# 34    Edwards      Sales
+# 35     Gafney      Clerk
+#__ZOS_EXPECTED__
+#  1    Sanders      Mgr  
+#  2     Pernal      Sales
+#  3   Marenghi      Mgr  
+#  4     OBrien      Sales
+#  5      Hanes      Mgr  
+#  6    Quigley      Sales
+#  7    Rothman      Sales
+#  8      James      Clerk
+#  9    Koonitz      Sales
+# 10      Plotz      Mgr  
+# 11       Ngan      Clerk
+# 12   Naughton      Clerk
+# 13  Yamaguchi      Clerk
+# 14      Fraye      Mgr  
+# 15   Williams      Sales
+# 16   Molinare      Mgr  
+# 17   Kermisch      Clerk
+# 18   Abrahams      Clerk
+# 19    Sneider      Clerk
+# 20   Scoutten      Clerk
+# 21         Lu      Mgr  
+# 22      Smith      Sales
+# 23  Lundquist      Clerk
+# 24    Daniels      Mgr  
+# 25    Wheeler      Clerk
+# 26      Jones      Mgr  
+# 27        Lea      Mgr  
+# 28     Wilson      Sales
+# 29      Quill      Mgr  
+# 30      Davis      Sales
+# 31     Graham      Sales
+# 32   Gonzales      Sales
+# 33      Burke      Clerk
+# 34    Edwards      Sales
+# 35     Gafney      Clerk
+#__SYSTEMI_EXPECTED__
+#  1    Sanders      Mgr  
+#  2     Pernal      Sales
+#  3   Marenghi      Mgr  
+#  4     OBrien      Sales
+#  5      Hanes      Mgr  
+#  6    Quigley      Sales
+#  7    Rothman      Sales
+#  8      James      Clerk
+#  9    Koonitz      Sales
+# 10      Plotz      Mgr  
+# 11       Ngan      Clerk
+# 12   Naughton      Clerk
+# 13  Yamaguchi      Clerk
+# 14      Fraye      Mgr  
+# 15   Williams      Sales
+# 16   Molinare      Mgr  
+# 17   Kermisch      Clerk
+# 18   Abrahams      Clerk
+# 19    Sneider      Clerk
+# 20   Scoutten      Clerk
+# 21         Lu      Mgr  
+# 22      Smith      Sales
+# 23  Lundquist      Clerk
+# 24    Daniels      Mgr  
+# 25    Wheeler      Clerk
+# 26      Jones      Mgr  
+# 27        Lea      Mgr  
+# 28     Wilson      Sales
+# 29      Quill      Mgr  
+# 30      Davis      Sales
+# 31     Graham      Sales
+# 32   Gonzales      Sales
+# 33      Burke      Clerk
+# 34    Edwards      Sales
+# 35     Gafney      Clerk
+#__IDS_EXPECTED__
+#  1    Sanders      Mgr  
+#  2     Pernal      Sales
+#  3   Marenghi      Mgr  
+#  4     OBrien      Sales
+#  5      Hanes      Mgr  
+#  6    Quigley      Sales
+#  7    Rothman      Sales
+#  8      James      Clerk
+#  9    Koonitz      Sales
+# 10      Plotz      Mgr  
+# 11       Ngan      Clerk
+# 12   Naughton      Clerk
+# 13  Yamaguchi      Clerk
+# 14      Fraye      Mgr  
+# 15   Williams      Sales
+# 16   Molinare      Mgr  
+# 17   Kermisch      Clerk
+# 18   Abrahams      Clerk
+# 19    Sneider      Clerk
+# 20   Scoutten      Clerk
+# 21         Lu      Mgr  
+# 22      Smith      Sales
+# 23  Lundquist      Clerk
+# 24    Daniels      Mgr  
+# 25    Wheeler      Clerk
+# 26      Jones      Mgr  
+# 27        Lea      Mgr  
+# 28     Wilson      Sales
+# 29      Quill      Mgr  
+# 30      Davis      Sales
+# 31     Graham      Sales
+# 32   Gonzales      Sales
+# 33      Burke      Clerk
+# 34    Edwards      Sales
+# 35     Gafney      Clerk
diff -pruN 0.3.0-3/tests/test_160_FetchBoth.py 2.0.5-0ubuntu2/tests/test_160_FetchBoth.py
--- 0.3.0-3/tests/test_160_FetchBoth.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_160_FetchBoth.py	2014-01-31 11:05:02.000000000 +0000
@@ -0,0 +1,345 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_160_FetchBoth(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_160)
+
+  def run_test_160(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+
+    result = ibm_db.exec_immediate(conn, "select * from emp_act")
+    
+    row = ibm_db.fetch_both(result)
+    while ( row ):
+      #printf("%6s  ",row[0])
+      #printf("%-6s ",row[1])
+      #printf("%3d ",row[2])
+      #printf("%9s ",row['EMPTIME'])
+      #printf("%10s ", row['EMSTDATE'])
+      #printf("%10s ", row['EMENDATE'])
+      #printf("%6s ", row[0])
+      #puts ""
+      print "%6s  %-6s %3d %9s %10s %10s %6s " % (row[0], row[1], row[2], row['EMPTIME'], row['EMSTDATE'], row['EMENDATE'], row[0])
+      row = ibm_db.fetch_both(result)
+
+#__END__
+#__LUW_EXPECTED__
+#000010  MA2100  10      0.50 1982-01-01 1982-11-01 000010 
+#000010  MA2110  10      1.00 1982-01-01 1983-02-01 000010 
+#000010  AD3100  10      0.50 1982-01-01 1982-07-01 000010 
+#000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#000030  IF1000  10      0.50 1982-06-01 1983-01-01 000030 
+#000030  IF2000  10      0.50 1982-01-01 1983-01-01 000030 
+#000050  OP1000  10      0.25 1982-01-01 1983-02-01 000050 
+#000050  OP2010  10      0.75 1982-01-01 1983-02-01 000050 
+#000070  AD3110  10      1.00 1982-01-01 1983-02-01 000070 
+#000090  OP1010  10      1.00 1982-01-01 1983-02-01 000090 
+#000100  OP2010  10      1.00 1982-01-01 1983-02-01 000100 
+#000110  MA2100  20      1.00 1982-01-01 1982-03-01 000110 
+#000130  IF1000  90      1.00 1982-01-01 1982-10-01 000130 
+#000130  IF1000 100      0.50 1982-10-01 1983-01-01 000130 
+#000140  IF1000  90      0.50 1982-10-01 1983-01-01 000140 
+#000140  IF2000 100      1.00 1982-01-01 1982-03-01 000140 
+#000140  IF2000 100      0.50 1982-03-01 1982-07-01 000140 
+#000140  IF2000 110      0.50 1982-03-01 1982-07-01 000140 
+#000140  IF2000 110      0.50 1982-10-01 1983-01-01 000140 
+#000150  MA2112  60      1.00 1982-01-01 1982-07-15 000150 
+#000150  MA2112 180      1.00 1982-07-15 1983-02-01 000150 
+#000160  MA2113  60      1.00 1982-07-15 1983-02-01 000160 
+#000170  MA2112  60      1.00 1982-01-01 1983-06-01 000170 
+#000170  MA2112  70      1.00 1982-06-01 1983-02-01 000170 
+#000170  MA2113  80      1.00 1982-01-01 1983-02-01 000170 
+#000180  MA2113  70      1.00 1982-04-01 1982-06-15 000180 
+#000190  MA2112  70      1.00 1982-02-01 1982-10-01 000190 
+#000190  MA2112  80      1.00 1982-10-01 1983-10-01 000190 
+#000200  MA2111  50      1.00 1982-01-01 1982-06-15 000200 
+#000200  MA2111  60      1.00 1982-06-15 1983-02-01 000200 
+#000210  MA2113  80      0.50 1982-10-01 1983-02-01 000210 
+#000210  MA2113 180      0.50 1982-10-01 1983-02-01 000210 
+#000220  MA2111  40      1.00 1982-01-01 1983-02-01 000220 
+#000230  AD3111  60      1.00 1982-01-01 1982-03-15 000230 
+#000230  AD3111  60      0.50 1982-03-15 1982-04-15 000230 
+#000230  AD3111  70      0.50 1982-03-15 1982-10-15 000230 
+#000230  AD3111  80      0.50 1982-04-15 1982-10-15 000230 
+#000230  AD3111 180      1.00 1982-10-15 1983-01-01 000230 
+#000240  AD3111  70      1.00 1982-02-15 1982-09-15 000240 
+#000240  AD3111  80      1.00 1982-09-15 1983-01-01 000240 
+#000250  AD3112  60      1.00 1982-01-01 1982-02-01 000250 
+#000250  AD3112  60      0.50 1982-02-01 1982-03-15 000250 
+#000250  AD3112  60      0.50 1982-12-01 1983-01-01 000250 
+#000250  AD3112  60      1.00 1983-01-01 1983-02-01 000250 
+#000250  AD3112  70      0.50 1982-02-01 1982-03-15 000250 
+#000250  AD3112  70      1.00 1982-03-15 1982-08-15 000250 
+#000250  AD3112  70      0.25 1982-08-15 1982-10-15 000250 
+#000250  AD3112  80      0.25 1982-08-15 1982-10-15 000250 
+#000250  AD3112  80      0.50 1982-10-15 1982-12-01 000250 
+#000250  AD3112 180      0.50 1982-08-15 1983-01-01 000250 
+#000260  AD3113  70      0.50 1982-06-15 1982-07-01 000260 
+#000260  AD3113  70      1.00 1982-07-01 1983-02-01 000260 
+#000260  AD3113  80      1.00 1982-01-01 1982-03-01 000260 
+#000260  AD3113  80      0.50 1982-03-01 1982-04-15 000260 
+#000260  AD3113 180      0.50 1982-03-01 1982-04-15 000260 
+#000260  AD3113 180      1.00 1982-04-15 1982-06-01 000260 
+#000260  AD3113 180      0.50 1982-06-01 1982-07-01 000260 
+#000270  AD3113  60      0.50 1982-03-01 1982-04-01 000270 
+#000270  AD3113  60      1.00 1982-04-01 1982-09-01 000270 
+#000270  AD3113  60      0.25 1982-09-01 1982-10-15 000270 
+#000270  AD3113  70      0.75 1982-09-01 1982-10-15 000270 
+#000270  AD3113  70      1.00 1982-10-15 1983-02-01 000270 
+#000270  AD3113  80      1.00 1982-01-01 1982-03-01 000270 
+#000270  AD3113  80      0.50 1982-03-01 1982-04-01 000270 
+#000280  OP1010 130      1.00 1982-01-01 1983-02-01 000280 
+#000290  OP1010 130      1.00 1982-01-01 1983-02-01 000290 
+#000300  OP1010 130      1.00 1982-01-01 1983-02-01 000300 
+#000310  OP1010 130      1.00 1982-01-01 1983-02-01 000310 
+#000320  OP2011 140      0.75 1982-01-01 1983-02-01 000320 
+#000320  OP2011 150      0.25 1982-01-01 1983-02-01 000320 
+#000330  OP2012 140      0.25 1982-01-01 1983-02-01 000330 
+#000330  OP2012 160      0.75 1982-01-01 1983-02-01 000330 
+#000340  OP2013 140      0.50 1982-01-01 1983-02-01 000340 
+#000340  OP2013 170      0.50 1982-01-01 1983-02-01 000340 
+#000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#__ZOS_EXPECTED__
+#000010  MA2100  10      0.50 1982-01-01 1982-11-01 000010 
+#000010  MA2110  10      1.00 1982-01-01 1983-02-01 000010 
+#000010  AD3100  10      0.50 1982-01-01 1982-07-01 000010 
+#000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#000030  IF1000  10      0.50 1982-06-01 1983-01-01 000030 
+#000030  IF2000  10      0.50 1982-01-01 1983-01-01 000030 
+#000050  OP1000  10      0.25 1982-01-01 1983-02-01 000050 
+#000050  OP2010  10      0.75 1982-01-01 1983-02-01 000050 
+#000070  AD3110  10      1.00 1982-01-01 1983-02-01 000070 
+#000090  OP1010  10      1.00 1982-01-01 1983-02-01 000090 
+#000100  OP2010  10      1.00 1982-01-01 1983-02-01 000100 
+#000110  MA2100  20      1.00 1982-01-01 1982-03-01 000110 
+#000130  IF1000  90      1.00 1982-01-01 1982-10-01 000130 
+#000130  IF1000 100      0.50 1982-10-01 1983-01-01 000130 
+#000140  IF1000  90      0.50 1982-10-01 1983-01-01 000140 
+#000140  IF2000 100      1.00 1982-01-01 1982-03-01 000140 
+#000140  IF2000 100      0.50 1982-03-01 1982-07-01 000140 
+#000140  IF2000 110      0.50 1982-03-01 1982-07-01 000140 
+#000140  IF2000 110      0.50 1982-10-01 1983-01-01 000140 
+#000150  MA2112  60      1.00 1982-01-01 1982-07-15 000150 
+#000150  MA2112 180      1.00 1982-07-15 1983-02-01 000150 
+#000160  MA2113  60      1.00 1982-07-15 1983-02-01 000160 
+#000170  MA2112  60      1.00 1982-01-01 1983-06-01 000170 
+#000170  MA2112  70      1.00 1982-06-01 1983-02-01 000170 
+#000170  MA2113  80      1.00 1982-01-01 1983-02-01 000170 
+#000180  MA2113  70      1.00 1982-04-01 1982-06-15 000180 
+#000190  MA2112  70      1.00 1982-02-01 1982-10-01 000190 
+#000190  MA2112  80      1.00 1982-10-01 1983-10-01 000190 
+#000200  MA2111  50      1.00 1982-01-01 1982-06-15 000200 
+#000200  MA2111  60      1.00 1982-06-15 1983-02-01 000200 
+#000210  MA2113  80      0.50 1982-10-01 1983-02-01 000210 
+#000210  MA2113 180      0.50 1982-10-01 1983-02-01 000210 
+#000220  MA2111  40      1.00 1982-01-01 1983-02-01 000220 
+#000230  AD3111  60      1.00 1982-01-01 1982-03-15 000230 
+#000230  AD3111  60      0.50 1982-03-15 1982-04-15 000230 
+#000230  AD3111  70      0.50 1982-03-15 1982-10-15 000230 
+#000230  AD3111  80      0.50 1982-04-15 1982-10-15 000230 
+#000230  AD3111 180      1.00 1982-10-15 1983-01-01 000230 
+#000240  AD3111  70      1.00 1982-02-15 1982-09-15 000240 
+#000240  AD3111  80      1.00 1982-09-15 1983-01-01 000240 
+#000250  AD3112  60      1.00 1982-01-01 1982-02-01 000250 
+#000250  AD3112  60      0.50 1982-02-01 1982-03-15 000250 
+#000250  AD3112  60      0.50 1982-12-01 1983-01-01 000250 
+#000250  AD3112  60      1.00 1983-01-01 1983-02-01 000250 
+#000250  AD3112  70      0.50 1982-02-01 1982-03-15 000250 
+#000250  AD3112  70      1.00 1982-03-15 1982-08-15 000250 
+#000250  AD3112  70      0.25 1982-08-15 1982-10-15 000250 
+#000250  AD3112  80      0.25 1982-08-15 1982-10-15 000250 
+#000250  AD3112  80      0.50 1982-10-15 1982-12-01 000250 
+#000250  AD3112 180      0.50 1982-08-15 1983-01-01 000250 
+#000260  AD3113  70      0.50 1982-06-15 1982-07-01 000260 
+#000260  AD3113  70      1.00 1982-07-01 1983-02-01 000260 
+#000260  AD3113  80      1.00 1982-01-01 1982-03-01 000260 
+#000260  AD3113  80      0.50 1982-03-01 1982-04-15 000260 
+#000260  AD3113 180      0.50 1982-03-01 1982-04-15 000260 
+#000260  AD3113 180      1.00 1982-04-15 1982-06-01 000260 
+#000260  AD3113 180      0.50 1982-06-01 1982-07-01 000260 
+#000270  AD3113  60      0.50 1982-03-01 1982-04-01 000270 
+#000270  AD3113  60      1.00 1982-04-01 1982-09-01 000270 
+#000270  AD3113  60      0.25 1982-09-01 1982-10-15 000270 
+#000270  AD3113  70      0.75 1982-09-01 1982-10-15 000270 
+#000270  AD3113  70      1.00 1982-10-15 1983-02-01 000270 
+#000270  AD3113  80      1.00 1982-01-01 1982-03-01 000270 
+#000270  AD3113  80      0.50 1982-03-01 1982-04-01 000270 
+#000280  OP1010 130      1.00 1982-01-01 1983-02-01 000280 
+#000290  OP1010 130      1.00 1982-01-01 1983-02-01 000290 
+#000300  OP1010 130      1.00 1982-01-01 1983-02-01 000300 
+#000310  OP1010 130      1.00 1982-01-01 1983-02-01 000310 
+#000320  OP2011 140      0.75 1982-01-01 1983-02-01 000320 
+#000320  OP2011 150      0.25 1982-01-01 1983-02-01 000320 
+#000330  OP2012 140      0.25 1982-01-01 1983-02-01 000330 
+#000330  OP2012 160      0.75 1982-01-01 1983-02-01 000330 
+#000340  OP2013 140      0.50 1982-01-01 1983-02-01 000340 
+#000340  OP2013 170      0.50 1982-01-01 1983-02-01 000340 
+#000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#__SYSTEMI_EXPECTED__
+#000010  MA2100  10      0.50 1982-01-01 1982-11-01 000010 
+#000010  MA2110  10      1.00 1982-01-01 1983-02-01 000010 
+#000010  AD3100  10      0.50 1982-01-01 1982-07-01 000010 
+#000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#000030  IF1000  10      0.50 1982-06-01 1983-01-01 000030 
+#000030  IF2000  10      0.50 1982-01-01 1983-01-01 000030 
+#000050  OP1000  10      0.25 1982-01-01 1983-02-01 000050 
+#000050  OP2010  10      0.75 1982-01-01 1983-02-01 000050 
+#000070  AD3110  10      1.00 1982-01-01 1983-02-01 000070 
+#000090  OP1010  10      1.00 1982-01-01 1983-02-01 000090 
+#000100  OP2010  10      1.00 1982-01-01 1983-02-01 000100 
+#000110  MA2100  20      1.00 1982-01-01 1982-03-01 000110 
+#000130  IF1000  90      1.00 1982-01-01 1982-10-01 000130 
+#000130  IF1000 100      0.50 1982-10-01 1983-01-01 000130 
+#000140  IF1000  90      0.50 1982-10-01 1983-01-01 000140 
+#000140  IF2000 100      1.00 1982-01-01 1982-03-01 000140 
+#000140  IF2000 100      0.50 1982-03-01 1982-07-01 000140 
+#000140  IF2000 110      0.50 1982-03-01 1982-07-01 000140 
+#000140  IF2000 110      0.50 1982-10-01 1983-01-01 000140 
+#000150  MA2112  60      1.00 1982-01-01 1982-07-15 000150 
+#000150  MA2112 180      1.00 1982-07-15 1983-02-01 000150 
+#000160  MA2113  60      1.00 1982-07-15 1983-02-01 000160 
+#000170  MA2112  60      1.00 1982-01-01 1983-06-01 000170 
+#000170  MA2112  70      1.00 1982-06-01 1983-02-01 000170 
+#000170  MA2113  80      1.00 1982-01-01 1983-02-01 000170 
+#000180  MA2113  70      1.00 1982-04-01 1982-06-15 000180 
+#000190  MA2112  70      1.00 1982-02-01 1982-10-01 000190 
+#000190  MA2112  80      1.00 1982-10-01 1983-10-01 000190 
+#000200  MA2111  50      1.00 1982-01-01 1982-06-15 000200 
+#000200  MA2111  60      1.00 1982-06-15 1983-02-01 000200 
+#000210  MA2113  80      0.50 1982-10-01 1983-02-01 000210 
+#000210  MA2113 180      0.50 1982-10-01 1983-02-01 000210 
+#000220  MA2111  40      1.00 1982-01-01 1983-02-01 000220 
+#000230  AD3111  60      1.00 1982-01-01 1982-03-15 000230 
+#000230  AD3111  60      0.50 1982-03-15 1982-04-15 000230 
+#000230  AD3111  70      0.50 1982-03-15 1982-10-15 000230 
+#000230  AD3111  80      0.50 1982-04-15 1982-10-15 000230 
+#000230  AD3111 180      1.00 1982-10-15 1983-01-01 000230 
+#000240  AD3111  70      1.00 1982-02-15 1982-09-15 000240 
+#000240  AD3111  80      1.00 1982-09-15 1983-01-01 000240 
+#000250  AD3112  60      1.00 1982-01-01 1982-02-01 000250 
+#000250  AD3112  60      0.50 1982-02-01 1982-03-15 000250 
+#000250  AD3112  60      0.50 1982-12-01 1983-01-01 000250 
+#000250  AD3112  60      1.00 1983-01-01 1983-02-01 000250 
+#000250  AD3112  70      0.50 1982-02-01 1982-03-15 000250 
+#000250  AD3112  70      1.00 1982-03-15 1982-08-15 000250 
+#000250  AD3112  70      0.25 1982-08-15 1982-10-15 000250 
+#000250  AD3112  80      0.25 1982-08-15 1982-10-15 000250 
+#000250  AD3112  80      0.50 1982-10-15 1982-12-01 000250 
+#000250  AD3112 180      0.50 1982-08-15 1983-01-01 000250 
+#000260  AD3113  70      0.50 1982-06-15 1982-07-01 000260 
+#000260  AD3113  70      1.00 1982-07-01 1983-02-01 000260 
+#000260  AD3113  80      1.00 1982-01-01 1982-03-01 000260 
+#000260  AD3113  80      0.50 1982-03-01 1982-04-15 000260 
+#000260  AD3113 180      0.50 1982-03-01 1982-04-15 000260 
+#000260  AD3113 180      1.00 1982-04-15 1982-06-01 000260 
+#000260  AD3113 180      0.50 1982-06-01 1982-07-01 000260 
+#000270  AD3113  60      0.50 1982-03-01 1982-04-01 000270 
+#000270  AD3113  60      1.00 1982-04-01 1982-09-01 000270 
+#000270  AD3113  60      0.25 1982-09-01 1982-10-15 000270 
+#000270  AD3113  70      0.75 1982-09-01 1982-10-15 000270 
+#000270  AD3113  70      1.00 1982-10-15 1983-02-01 000270 
+#000270  AD3113  80      1.00 1982-01-01 1982-03-01 000270 
+#000270  AD3113  80      0.50 1982-03-01 1982-04-01 000270 
+#000280  OP1010 130      1.00 1982-01-01 1983-02-01 000280 
+#000290  OP1010 130      1.00 1982-01-01 1983-02-01 000290 
+#000300  OP1010 130      1.00 1982-01-01 1983-02-01 000300 
+#000310  OP1010 130      1.00 1982-01-01 1983-02-01 000310 
+#000320  OP2011 140      0.75 1982-01-01 1983-02-01 000320 
+#000320  OP2011 150      0.25 1982-01-01 1983-02-01 000320 
+#000330  OP2012 140      0.25 1982-01-01 1983-02-01 000330 
+#000330  OP2012 160      0.75 1982-01-01 1983-02-01 000330 
+#000340  OP2013 140      0.50 1982-01-01 1983-02-01 000340 
+#000340  OP2013 170      0.50 1982-01-01 1983-02-01 000340 
+#000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#__IDS_EXPECTED__
+#000010  MA2100  10      0.50 1982-01-01 1982-11-01 000010 
+#000010  MA2110  10      1.00 1982-01-01 1983-02-01 000010 
+#000010  AD3100  10      0.50 1982-01-01 1982-07-01 000010 
+#000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#000030  IF1000  10      0.50 1982-06-01 1983-01-01 000030 
+#000030  IF2000  10      0.50 1982-01-01 1983-01-01 000030 
+#000050  OP1000  10      0.25 1982-01-01 1983-02-01 000050 
+#000050  OP2010  10      0.75 1982-01-01 1983-02-01 000050 
+#000070  AD3110  10      1.00 1982-01-01 1983-02-01 000070 
+#000090  OP1010  10      1.00 1982-01-01 1983-02-01 000090 
+#000100  OP2010  10      1.00 1982-01-01 1983-02-01 000100 
+#000110  MA2100  20      1.00 1982-01-01 1982-03-01 000110 
+#000130  IF1000  90      1.00 1982-01-01 1982-10-01 000130 
+#000130  IF1000 100      0.50 1982-10-01 1983-01-01 000130 
+#000140  IF1000  90      0.50 1982-10-01 1983-01-01 000140 
+#000140  IF2000 100      1.00 1982-01-01 1982-03-01 000140 
+#000140  IF2000 100      0.50 1982-03-01 1982-07-01 000140 
+#000140  IF2000 110      0.50 1982-03-01 1982-07-01 000140 
+#000140  IF2000 110      0.50 1982-10-01 1983-01-01 000140 
+#000150  MA2112  60      1.00 1982-01-01 1982-07-15 000150 
+#000150  MA2112 180      1.00 1982-07-15 1983-02-01 000150 
+#000160  MA2113  60      1.00 1982-07-15 1983-02-01 000160 
+#000170  MA2112  60      1.00 1982-01-01 1983-06-01 000170 
+#000170  MA2112  70      1.00 1982-06-01 1983-02-01 000170 
+#000170  MA2113  80      1.00 1982-01-01 1983-02-01 000170 
+#000180  MA2113  70      1.00 1982-04-01 1982-06-15 000180 
+#000190  MA2112  70      1.00 1982-02-01 1982-10-01 000190 
+#000190  MA2112  80      1.00 1982-10-01 1983-10-01 000190 
+#000200  MA2111  50      1.00 1982-01-01 1982-06-15 000200 
+#000200  MA2111  60      1.00 1982-06-15 1983-02-01 000200 
+#000210  MA2113  80      0.50 1982-10-01 1983-02-01 000210 
+#000210  MA2113 180      0.50 1982-10-01 1983-02-01 000210 
+#000220  MA2111  40      1.00 1982-01-01 1983-02-01 000220 
+#000230  AD3111  60      1.00 1982-01-01 1982-03-15 000230 
+#000230  AD3111  60      0.50 1982-03-15 1982-04-15 000230 
+#000230  AD3111  70      0.50 1982-03-15 1982-10-15 000230 
+#000230  AD3111  80      0.50 1982-04-15 1982-10-15 000230 
+#000230  AD3111 180      1.00 1982-10-15 1983-01-01 000230 
+#000240  AD3111  70      1.00 1982-02-15 1982-09-15 000240 
+#000240  AD3111  80      1.00 1982-09-15 1983-01-01 000240 
+#000250  AD3112  60      1.00 1982-01-01 1982-02-01 000250 
+#000250  AD3112  60      0.50 1982-02-01 1982-03-15 000250 
+#000250  AD3112  60      0.50 1982-12-01 1983-01-01 000250 
+#000250  AD3112  60      1.00 1983-01-01 1983-02-01 000250 
+#000250  AD3112  70      0.50 1982-02-01 1982-03-15 000250 
+#000250  AD3112  70      1.00 1982-03-15 1982-08-15 000250 
+#000250  AD3112  70      0.25 1982-08-15 1982-10-15 000250 
+#000250  AD3112  80      0.25 1982-08-15 1982-10-15 000250 
+#000250  AD3112  80      0.50 1982-10-15 1982-12-01 000250 
+#000250  AD3112 180      0.50 1982-08-15 1983-01-01 000250 
+#000260  AD3113  70      0.50 1982-06-15 1982-07-01 000260 
+#000260  AD3113  70      1.00 1982-07-01 1983-02-01 000260 
+#000260  AD3113  80      1.00 1982-01-01 1982-03-01 000260 
+#000260  AD3113  80      0.50 1982-03-01 1982-04-15 000260 
+#000260  AD3113 180      0.50 1982-03-01 1982-04-15 000260 
+#000260  AD3113 180      1.00 1982-04-15 1982-06-01 000260 
+#000260  AD3113 180      0.50 1982-06-01 1982-07-01 000260 
+#000270  AD3113  60      0.50 1982-03-01 1982-04-01 000270 
+#000270  AD3113  60      1.00 1982-04-01 1982-09-01 000270 
+#000270  AD3113  60      0.25 1982-09-01 1982-10-15 000270 
+#000270  AD3113  70      0.75 1982-09-01 1982-10-15 000270 
+#000270  AD3113  70      1.00 1982-10-15 1983-02-01 000270 
+#000270  AD3113  80      1.00 1982-01-01 1982-03-01 000270 
+#000270  AD3113  80      0.50 1982-03-01 1982-04-01 000270 
+#000280  OP1010 130      1.00 1982-01-01 1983-02-01 000280 
+#000290  OP1010 130      1.00 1982-01-01 1983-02-01 000290 
+#000300  OP1010 130      1.00 1982-01-01 1983-02-01 000300 
+#000310  OP1010 130      1.00 1982-01-01 1983-02-01 000310 
+#000320  OP2011 140      0.75 1982-01-01 1983-02-01 000320 
+#000320  OP2011 150      0.25 1982-01-01 1983-02-01 000320 
+#000330  OP2012 140      0.25 1982-01-01 1983-02-01 000330 
+#000330  OP2012 160      0.75 1982-01-01 1983-02-01 000330 
+#000340  OP2013 140      0.50 1982-01-01 1983-02-01 000340 
+#000340  OP2013 170      0.50 1982-01-01 1983-02-01 000340 
+#000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
diff -pruN 0.3.0-3/tests/test_161_FetchBothNestedSelects_01.py 2.0.5-0ubuntu2/tests/test_161_FetchBothNestedSelects_01.py
--- 0.3.0-3/tests/test_161_FetchBothNestedSelects_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_161_FetchBothNestedSelects_01.py	2014-01-31 11:04:41.000000000 +0000
@@ -0,0 +1,124 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_161_FetchBothNestedSelects_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_161)
+
+  def run_test_161(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+
+    result = ibm_db.exec_immediate(conn, "select * from emp_act order by projno desc")
+    row = ibm_db.fetch_both(result)
+    count = 1
+    while ( row ):
+      print "Record",count,": %6s  %-6s %3d %9s %10s %10s %6s " % (row[0], row[1], row[2], row['EMPTIME'], row['EMSTDATE'], row['EMENDATE'], row[0])
+      
+      result2 = ibm_db.exec_immediate(conn,"select * from employee where employee.empno='" + row['EMPNO'] + "'")
+      row2 = ibm_db.fetch_both(result2)
+      if row2:        
+         print ">>%s,%s,%s,%s,%s,%s,%s" % (row2['EMPNO'], row2['FIRSTNME'],row2['MIDINIT'], row2[3], row2[3], row2[5], row2[6])
+      count = count + 1
+      if (count > 10):
+          break
+      row = ibm_db.fetch_both(result)
+#__END__
+#__LUW_EXPECTED__
+#Record 1 : 000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
+#Record 2 : 000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
+#Record 3 : 000340  OP2013 140      0.50 1982-01-01 1983-02-01 000340 
+#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
+#Record 4 : 000340  OP2013 170      0.50 1982-01-01 1983-02-01 000340 
+#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
+#Record 5 : 000330  OP2012 140      0.25 1982-01-01 1983-02-01 000330 
+#>>000330,WING, ,LEE,LEE,2103,1976-02-23
+#Record 6 : 000330  OP2012 160      0.75 1982-01-01 1983-02-01 000330 
+#>>000330,WING, ,LEE,LEE,2103,1976-02-23
+#Record 7 : 000320  OP2011 140      0.75 1982-01-01 1983-02-01 000320 
+#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
+#Record 8 : 000320  OP2011 150      0.25 1982-01-01 1983-02-01 000320 
+#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
+#Record 9 : 000050  OP2010  10      0.75 1982-01-01 1983-02-01 000050 
+#>>000050,JOHN,B,GEYER,GEYER,6789,1949-08-17
+#Record 10 : 000100  OP2010  10      1.00 1982-01-01 1983-02-01 000100 
+#>>000100,THEODORE,Q,SPENSER,SPENSER,0972,1980-06-19
+#__ZOS_EXPECTED__
+#Record 1 : 000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
+#Record 2 : 000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
+#Record 3 : 000340  OP2013 170      0.50 1982-01-01 1983-02-01 000340 
+#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
+#Record 4 : 000340  OP2013 140      0.50 1982-01-01 1983-02-01 000340 
+#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
+#Record 5 : 000330  OP2012 160      0.75 1982-01-01 1983-02-01 000330 
+#>>000330,WING, ,LEE,LEE,2103,1976-02-23
+#Record 6 : 000330  OP2012 140      0.25 1982-01-01 1983-02-01 000330 
+#>>000330,WING, ,LEE,LEE,2103,1976-02-23
+#Record 7 : 000320  OP2011 150      0.25 1982-01-01 1983-02-01 000320 
+#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
+#Record 8 : 000320  OP2011 140      0.75 1982-01-01 1983-02-01 000320 
+#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
+#Record 9 : 000100  OP2010  10      1.00 1982-01-01 1983-02-01 000100 
+#>>000100,THEODORE,Q,SPENSER,SPENSER,0972,1980-06-19
+#Record 10 : 000050  OP2010  10      0.75 1982-01-01 1983-02-01 000050 
+#>>000050,JOHN,B,GEYER,GEYER,6789,1949-08-17
+#__SYSTEMI_EXPECTED__
+#Record 1 : 000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
+#Record 2 : 000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
+#Record 3 : 000340  OP2013 140      0.50 1982-01-01 1983-02-01 000340 
+#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
+#Record 4 : 000340  OP2013 170      0.50 1982-01-01 1983-02-01 000340 
+#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
+#Record 5 : 000330  OP2012 140      0.25 1982-01-01 1983-02-01 000330 
+#>>000330,WING, ,LEE,LEE,2103,1976-02-23
+#Record 6 : 000330  OP2012 160      0.75 1982-01-01 1983-02-01 000330 
+#>>000330,WING, ,LEE,LEE,2103,1976-02-23
+#Record 7 : 000320  OP2011 140      0.75 1982-01-01 1983-02-01 000320 
+#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
+#Record 8 : 000320  OP2011 150      0.25 1982-01-01 1983-02-01 000320 
+#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
+#Record 9 : 000050  OP2010  10      0.75 1982-01-01 1983-02-01 000050 
+#>>000050,JOHN,B,GEYER,GEYER,6789,1949-08-17
+#Record 10 : 000100  OP2010  10      1.00 1982-01-01 1983-02-01 000100 
+#>>000100,THEODORE,Q,SPENSER,SPENSER,0972,1980-06-19
+#__IDS_EXPECTED__
+#Record 1 : 000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
+#Record 2 : 000020  PL2100  30      1.00 1982-01-01 1982-09-15 000020 
+#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
+#Record 3 : 000340  OP2013 140      0.50 1982-01-01 1983-02-01 000340 
+#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
+#Record 4 : 000340  OP2013 170      0.50 1982-01-01 1983-02-01 000340 
+#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
+#Record 5 : 000330  OP2012 140      0.25 1982-01-01 1983-02-01 000330 
+#>>000330,WING, ,LEE,LEE,2103,1976-02-23
+#Record 6 : 000330  OP2012 160      0.75 1982-01-01 1983-02-01 000330 
+#>>000330,WING, ,LEE,LEE,2103,1976-02-23
+#Record 7 : 000320  OP2011 140      0.75 1982-01-01 1983-02-01 000320 
+#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
+#Record 8 : 000320  OP2011 150      0.25 1982-01-01 1983-02-01 000320 
+#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
+#Record 9 : 000100  OP2010  10      1.00 1982-01-01 1983-02-01 000100 
+#>>000100,THEODORE,Q,SPENSER,SPENSER,0972,1980-06-19
+#Record 10 : 000050  OP2010  10      0.75 1982-01-01 1983-02-01 000050 
+#>>000050,JOHN,B,GEYER,GEYER,6789,1949-08-17
diff -pruN 0.3.0-3/tests/test_162_FetchBothNestedSelects_02.py 2.0.5-0ubuntu2/tests/test_162_FetchBothNestedSelects_02.py
--- 0.3.0-3/tests/test_162_FetchBothNestedSelects_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_162_FetchBothNestedSelects_02.py	2014-01-31 10:59:58.000000000 +0000
@@ -0,0 +1,125 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_162_FetchBothNestedSelects_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_162)
+
+  def run_test_162(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+
+    result = ibm_db.exec_immediate(conn, "select * from emp_act order by projno")
+    row = ibm_db.fetch_both(result)
+    # will only retrieve 10 records
+    count = 1
+    while ( row ):
+      print "Record ",count,": %6s  %-6s %3d %9s %10s %10s %6s " % (row[0], row[1], row[2], row['EMPTIME'], row['EMSTDATE'], row['EMENDATE'], row[0])
+      
+      result2 = ibm_db.exec_immediate(conn,"select * from employee where employee.empno='" + row['EMPNO'] + "'")
+      row2 = ibm_db.fetch_both(result2)
+      if row2:        
+        print ">>%s,%s,%s,%s,%s,%s,%s" % (row2['EMPNO'], row2['FIRSTNME'],row2['MIDINIT'], row2[3], row2[3], row2[5], row2[6])      
+      count = count + 1
+      if (count > 10):
+          break
+      row = ibm_db.fetch_both(result)
+#__END__
+#__LUW_EXPECTED__
+#Record  1 : 000010  AD3100  10      0.50 1982-01-01 1982-07-01 000010 
+#>>000010,CHRISTINE,I,HAAS,HAAS,3978,1965-01-01
+#Record  2 : 000070  AD3110  10      1.00 1982-01-01 1983-02-01 000070 
+#>>000070,EVA,D,PULASKI,PULASKI,7831,1980-09-30
+#Record  3 : 000230  AD3111  60      1.00 1982-01-01 1982-03-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  4 : 000230  AD3111  60      0.50 1982-03-15 1982-04-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  5 : 000230  AD3111  70      0.50 1982-03-15 1982-10-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  6 : 000230  AD3111  80      0.50 1982-04-15 1982-10-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  7 : 000230  AD3111 180      1.00 1982-10-15 1983-01-01 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  8 : 000240  AD3111  70      1.00 1982-02-15 1982-09-15 000240 
+#>>000240,SALVATORE,M,MARINO,MARINO,3780,1979-12-05
+#Record  9 : 000240  AD3111  80      1.00 1982-09-15 1983-01-01 000240 
+#>>000240,SALVATORE,M,MARINO,MARINO,3780,1979-12-05
+#Record  10 : 000250  AD3112  60      1.00 1982-01-01 1982-02-01 000250 
+#>>000250,DANIEL,S,SMITH,SMITH,0961,1969-10-30
+#__ZOS_EXPECTED__
+#Record  1 : 000010  AD3100  10      0.50 1982-01-01 1982-07-01 000010 
+#>>000010,CHRISTINE,I,HAAS,HAAS,3978,1965-01-01
+#Record  2 : 000070  AD3110  10      1.00 1982-01-01 1983-02-01 000070 
+#>>000070,EVA,D,PULASKI,PULASKI,7831,1980-09-30
+#Record  3 : 000240  AD3111  80      1.00 1982-09-15 1983-01-01 000240 
+#>>000240,SALVATORE,M,MARINO,MARINO,3780,1979-12-05
+#Record  4 : 000240  AD3111  70      1.00 1982-02-15 1982-09-15 000240 
+#>>000240,SALVATORE,M,MARINO,MARINO,3780,1979-12-05
+#Record  5 : 000230  AD3111 180      1.00 1982-10-15 1983-01-01 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  6 : 000230  AD3111  80      0.50 1982-04-15 1982-10-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  7 : 000230  AD3111  70      0.50 1982-03-15 1982-10-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  8 : 000230  AD3111  60      0.50 1982-03-15 1982-04-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  9 : 000230  AD3111  60      1.00 1982-01-01 1982-03-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  10 : 000250  AD3112  60      1.00 1982-01-01 1982-02-01 000250 
+#>>000250,DANIEL,S,SMITH,SMITH,0961,1969-10-30
+#__SYSTEMI_EXPECTED__
+#Record  1 : 000010  AD3100  10      0.50 1982-01-01 1982-07-01 000010 
+#>>000010,CHRISTINE,I,HAAS,HAAS,3978,1965-01-01
+#Record  2 : 000070  AD3110  10      1.00 1982-01-01 1983-02-01 000070 
+#>>000070,EVA,D,PULASKI,PULASKI,7831,1980-09-30
+#Record  3 : 000230  AD3111  60      1.00 1982-01-01 1982-03-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  4 : 000230  AD3111  60      0.50 1982-03-15 1982-04-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  5 : 000230  AD3111  70      0.50 1982-03-15 1982-10-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  6 : 000230  AD3111  80      0.50 1982-04-15 1982-10-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  7 : 000230  AD3111 180      1.00 1982-10-15 1983-01-01 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  8 : 000240  AD3111  70      1.00 1982-02-15 1982-09-15 000240 
+#>>000240,SALVATORE,M,MARINO,MARINO,3780,1979-12-05
+#Record  9 : 000240  AD3111  80      1.00 1982-09-15 1983-01-01 000240 
+#>>000240,SALVATORE,M,MARINO,MARINO,3780,1979-12-05
+#Record  10 : 000250  AD3112  60      1.00 1982-01-01 1982-02-01 000250 
+#>>000250,DANIEL,S,SMITH,SMITH,0961,1969-10-30
+#__IDS_EXPECTED__
+#Record  1 : 000010  AD3100  10      0.50 1982-01-01 1982-07-01 000010 
+#>>000010,CHRISTINE,I,HAAS,HAAS,3978,1965-01-01
+#Record  2 : 000070  AD3110  10      1.00 1982-01-01 1983-02-01 000070 
+#>>000070,EVA,D,PULASKI,PULASKI,7831,1980-09-30
+#Record  3 : 000240  AD3111  80      1.00 1982-09-15 1983-01-01 000240 
+#>>000240,SALVATORE,M,MARINO,MARINO,3780,1979-12-05
+#Record  4 : 000230  AD3111 180      1.00 1982-10-15 1983-01-01 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  5 : 000230  AD3111  80      0.50 1982-04-15 1982-10-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  6 : 000240  AD3111  70      1.00 1982-02-15 1982-09-15 000240 
+#>>000240,SALVATORE,M,MARINO,MARINO,3780,1979-12-05
+#Record  7 : 000230  AD3111  70      0.50 1982-03-15 1982-10-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  8 : 000230  AD3111  60      0.50 1982-03-15 1982-04-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  9 : 000230  AD3111  60      1.00 1982-01-01 1982-03-15 000230 
+#>>000230,JAMES,J,JEFFERSON,JEFFERSON,2094,1966-11-21
+#Record  10 : 000250  AD3112  80      0.25 1982-08-15 1982-10-15 000250 
+#>>000250,DANIEL,S,SMITH,SMITH,0961,1969-10-30
diff -pruN 0.3.0-3/tests/test_180_StmtErrMsg.py 2.0.5-0ubuntu2/tests/test_180_StmtErrMsg.py
--- 0.3.0-3/tests/test_180_StmtErrMsg.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_180_StmtErrMsg.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,57 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_180_StmtErrMsg(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_180)
+
+  def run_test_180(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    if conn:
+      result = ''
+      result2 = ''
+      try:
+        result = ibm_db.exec_immediate(conn,"insert int0 t_string values(123,1.222333,'one to one')")
+      except:
+        pass
+      if result:
+        cols = ibm_db.num_fields(result)
+        print "col:", cols,", " 
+        rows = ibm_db.num_rows(result)
+        print "affected row:", rows
+      else:
+        print ibm_db.stmt_errormsg()
+      try:
+        result = ibm_db.exec_immediate(conn,"delete from t_string where a=123")
+      except:
+        pass
+      if result:
+        cols = ibm_db.num_fields(result)
+        print "col:", cols,", "
+        rows = ibm_db.num_rows(result)
+        print "affected row:", rows
+      else:
+        print ibm_db.stmt_errormsg()
+    
+    else:
+      print "no connection"
+
+#__END__
+#__LUW_EXPECTED__
+#[IBM][CLI Driver][DB2/%s] SQL0104N  An unexpected token "insert int0 t_string" was found following "BEGIN-OF-STATEMENT".  Expected tokens may include:  "<space>".  SQLSTATE=42601 SQLCODE=-104col: 0 , affected row: 0
+#__ZOS_EXPECTED__
+#[IBM][CLI Driver][DB2%s] SQL0104N  An unexpected token "INT0" was found following "".  Expected tokens may include:  "INTO".  SQLSTATE=42601 SQLCODE=-104col: 0 , affected row: 0
+#__SYSTEMI_EXPECTED__
+#[IBM][CLI Driver][AS] SQL0104N  An unexpected token "INT0" was found following "".  Expected tokens may include:  "INTO".  SQLSTATE=42601 SQLCODE=-104col: 0 , affected row: 0
+#__IDS_EXPECTED__
+#[IBM][CLI Driver][IDS/%s] A syntax error has occurred. SQLCODE=-201col: 0 , affected row: 0
diff -pruN 0.3.0-3/tests/test_190_ColumnsTable_01.py 2.0.5-0ubuntu2/tests/test_190_ColumnsTable_01.py
--- 0.3.0-3/tests/test_190_ColumnsTable_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_190_ColumnsTable_01.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,102 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+# NOTE: IDS requires that you pass the schema name (cannot pass None)
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_190_ColumnsTable_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_190)
+
+  def run_test_190(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    if conn:
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        result = ibm_db.columns(conn,None,config.user,"employee")
+      else:
+        result = ibm_db.columns(conn,None,None,"EMPLOYEE")
+
+      row = ibm_db.fetch_tuple(result)
+      while ( row ):
+        str = row[1] + "/" + row[3]
+        print str
+        row = ibm_db.fetch_tuple(result)
+      print "done!"
+    else:
+      print "no connection:", ibm_db.conn_errormsg()
+
+#__END__
+#__LUW_EXPECTED__
+#%s/EMPNO
+#%s/FIRSTNME
+#%s/MIDINIT
+#%s/LASTNAME
+#%s/WORKDEPT
+#%s/PHONENO
+#%s/HIREDATE
+#%s/JOB
+#%s/EDLEVEL
+#%s/SEX
+#%s/BIRTHDATE
+#%s/SALARY
+#%s/BONUS
+#%s/COMM
+#done!
+#__ZOS_EXPECTED__
+#%s/EMPNO
+#%s/FIRSTNME
+#%s/MIDINIT
+#%s/LASTNAME
+#%s/WORKDEPT
+#%s/PHONENO
+#%s/HIREDATE
+#%s/JOB
+#%s/EDLEVEL
+#%s/SEX
+#%s/BIRTHDATE
+#%s/SALARY
+#%s/BONUS
+#%s/COMM
+#done!
+#__SYSTEMI_EXPECTED__
+#%s/EMPNO
+#%s/FIRSTNME
+#%s/MIDINIT
+#%s/LASTNAME
+#%s/WORKDEPT
+#%s/PHONENO
+#%s/HIREDATE
+#%s/JOB
+#%s/EDLEVEL
+#%s/SEX
+#%s/BIRTHDATE
+#%s/SALARY
+#%s/BONUS
+#%s/COMM
+#done!
+#__IDS_EXPECTED__
+#%s/empno
+#%s/firstnme
+#%s/midinit
+#%s/lastname
+#%s/workdept
+#%s/phoneno
+#%s/hiredate
+#%s/job
+#%s/edlevel
+#%s/sex
+#%s/birthdate
+#%s/salary
+#%s/bonus
+#%s/comm
+#done!
diff -pruN 0.3.0-3/tests/test_191_ColumnsTable_02.py 2.0.5-0ubuntu2/tests/test_191_ColumnsTable_02.py
--- 0.3.0-3/tests/test_191_ColumnsTable_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_191_ColumnsTable_02.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,66 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+# NOTE: IDS requires that you pass the schema name (cannot pass None)
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_191_ColumnsTable_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_191)
+
+  def run_test_191(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    if conn:
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        result = ibm_db.columns(conn,None,config.user,"emp_photo");    
+      else:
+        result = ibm_db.columns(conn,None,None,"EMP_PHOTO");    
+
+      i = 0
+      row = ibm_db.fetch_both(result)
+      while ( row ):
+        if (server.DBMS_NAME[0:3] == 'IDS'):
+          if ( (row['column_name'] != 'emp_rowid') and (i < 3) ):
+            print "%s,%s,%s,%s\n" % (row['table_schem'], 
+            row['table_name'], row['column_name'], row['is_nullable'])
+        else :
+          if ( (row['COLUMN_NAME'] != 'EMP_ROWID') and (i < 3) ):
+            print"%s,%s,%s,%s\n" % (row['TABLE_SCHEM'], 
+            row['TABLE_NAME'], row['COLUMN_NAME'], row['IS_NULLABLE'])
+        i = i + 1
+        row = ibm_db.fetch_both(result)
+      print "done!"
+    else:
+      print "no connection: ", ibm_db.conn_errormsg()    
+
+#__END__
+#__LUW_EXPECTED__
+#%s,EMP_PHOTO,EMPNO,NO
+#%s,EMP_PHOTO,PHOTO_FORMAT,NO
+#%s,EMP_PHOTO,PICTURE,YES
+#done!
+#__ZOS_EXPECTED__
+#%s,EMP_PHOTO,EMPNO,NO
+#%s,EMP_PHOTO,PHOTO_FORMAT,NO
+#%s,EMP_PHOTO,PICTURE,YES
+#done!
+#__SYSTEMI_EXPECTED__
+#%s,EMP_PHOTO,EMPNO,NO
+#%s,EMP_PHOTO,PHOTO_FORMAT,NO
+#%s,EMP_PHOTO,PICTURE,YES
+#done!
+#__IDS_EXPECTED__
+#%s,emp_photo,empno,NO
+#%s,emp_photo,photo_format,NO
+#%s,emp_photo,picture,YES
+#done!
diff -pruN 0.3.0-3/tests/test_195_InsertRetrieveXMLData_01.py 2.0.5-0ubuntu2/tests/test_195_InsertRetrieveXMLData_01.py
--- 0.3.0-3/tests/test_195_InsertRetrieveXMLData_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_195_InsertRetrieveXMLData_01.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,55 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+# NOTE: IDS does not support XML as a native datatype (test is invalid for IDS)
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_195_InsertRetrieveXMLData_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_195)
+
+  def run_test_195(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    if ((server.DBMS_NAME[0:3] != 'IDS') and (server.DBMS_NAME[0:2] != "AS")):
+      drop = 'DROP TABLE test_195'
+      try:
+        result = ibm_db.exec_immediate(conn, drop)
+      except:
+        pass
+      create = 'CREATE TABLE test_195 (id INTEGER, data XML)'
+      result = ibm_db.exec_immediate(conn, create)
+    
+      insert = "INSERT INTO test_195 values (0, '<TEST><def><xml/></def></TEST>')"
+    
+      ibm_db.exec_immediate(conn, insert)
+    
+      sql =  "SELECT data FROM test_195"
+      stmt = ibm_db.prepare(conn, sql)
+      ibm_db.execute(stmt)
+      result = ibm_db.fetch_assoc(stmt)
+      while( result ):
+        print "Output:", result
+        result = ibm_db.fetch_assoc(stmt)
+      ibm_db.close(conn)
+    else:
+      print "Native XML datatype is not supported."
+
+#__END__
+#__LUW_EXPECTED__
+#Output:%s<TEST><def><xml/></def></TEST>
+#__ZOS_EXPECTED__
+#Output:%s<TEST><def><xml/></def></TEST>
+#__SYSTEMI_EXPECTED__
+#Native XML datatype is not supported.
+#__IDS_EXPECTED__
+#Native XML datatype is not supported.
diff -pruN 0.3.0-3/tests/test_196_InsertRetrieveXMLData_02.py 2.0.5-0ubuntu2/tests/test_196_InsertRetrieveXMLData_02.py
--- 0.3.0-3/tests/test_196_InsertRetrieveXMLData_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_196_InsertRetrieveXMLData_02.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,75 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+# NOTE: IDS does not support XML as a native datatype (test is invalid for IDS)
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_196_InsertRetrieveXMLData_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_196)
+
+  def run_test_196(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    if ((server.DBMS_NAME[0:3] != 'IDS') and (server.DBMS_NAME[0:2] != "AS")):
+      try:
+          rc = ibm_db.exec_immediate(conn, "DROP TABLE xml_test")
+      except:
+          pass
+      rc = ibm_db.exec_immediate(conn, "CREATE TABLE xml_test (id INTEGER, data VARCHAR(50), xmlcol XML)")
+      rc = ibm_db.exec_immediate(conn, "INSERT INTO xml_test (id, data, xmlcol) values (1, 'xml test 1', '<address><street>12485 S Pine St.</street><city>Olathe</city><state>KS</state><zip>66061</zip></address>')")
+
+      sql =  "SELECT * FROM xml_test"
+      stmt = ibm_db.prepare(conn, sql)
+      ibm_db.execute(stmt)
+      result = ibm_db.fetch_both(stmt)
+      while( result ):
+        print "Result ID:", result[0]
+        print "Result DATA:", result[1]
+        print "Result XMLCOL:", result[2]
+        result = ibm_db.fetch_both(stmt)
+
+      sql = "SELECT XMLSERIALIZE(XMLQUERY('for $i in $t/address where $i/city = \"Olathe\" return <zip>{$i/zip/text()}</zip>' passing c.xmlcol as \"t\") AS CLOB(32k)) FROM xml_test c WHERE id = 1"
+      stmt = ibm_db.prepare(conn, sql)
+      ibm_db.execute(stmt)
+      result = ibm_db.fetch_both(stmt)
+      while( result ):
+        print "Result from XMLSerialize and XMLQuery:", result[0]
+        result = ibm_db.fetch_both(stmt)
+
+      sql = "select xmlquery('for $i in $t/address where $i/city = \"Olathe\" return <zip>{$i/zip/text()}</zip>' passing c.xmlcol as \"t\") from xml_test c where id = 1"
+      stmt = ibm_db.prepare(conn, sql)
+      ibm_db.execute(stmt)
+      result = ibm_db.fetch_both(stmt)
+      while( result ):
+        print "Result from only XMLQuery:", result[0]
+        result = ibm_db.fetch_both(stmt)
+    else:
+      print 'Native XML datatype is not supported.'
+
+#__END__
+#__LUW_EXPECTED__
+#Result ID: 1
+#Result DATA: xml test 1
+#Result XMLCOL:%s<address><street>12485 S Pine St.</street><city>Olathe</city><state>KS</state><zip>66061</zip></address>
+#Result from XMLSerialize and XMLQuery: <zip>66061</zip>
+#Result from only XMLQuery:%s<zip>66061</zip>
+#__ZOS_EXPECTED__
+#Result ID: 1
+#Result DATA: xml test 1
+#Result XMLCOL:%s<address><street>12485 S Pine St.</street><city>Olathe</city><state>KS</state><zip>66061</zip></address>
+#Result from XMLSerialize and XMLQuery: <zip>66061</zip>
+#Result from only XMLQuery:%s<zip>66061</zip>
+#__SYSTEMI_EXPECTED__
+#Native XML datatype is not supported.
+#__IDS_EXPECTED__
+#Native XML datatype is not supported.
diff -pruN 0.3.0-3/tests/test_197_StatisticsIndexes.py 2.0.5-0ubuntu2/tests/test_197_StatisticsIndexes.py
--- 0.3.0-3/tests/test_197_StatisticsIndexes.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_197_StatisticsIndexes.py	2014-01-31 06:29:56.000000000 +0000
@@ -0,0 +1,131 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+# NOTE: IDS requires that you pass the schema name (cannot pass None)
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_197_StatisticsIndexes(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_197)
+
+  def run_test_197(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    if conn:
+      try:
+          rc = ibm_db.exec_immediate(conn, "DROP TABLE index_test")
+      except:
+          pass
+      rc = ibm_db.exec_immediate(conn, "CREATE TABLE index_test (id INTEGER, data VARCHAR(50))")
+      rc = ibm_db.exec_immediate(conn, "CREATE UNIQUE INDEX index1 ON index_test (id)")
+
+      print "Test first index table:"
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        result = ibm_db.statistics(conn,None,config.user,"index_test",True)
+      else:
+        result = ibm_db.statistics(conn,None,None,"INDEX_TEST",True)
+      row = ibm_db.fetch_tuple(result)
+      ## skipping table info row. statistics returns informtation about table itself for informix ###
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        row = ibm_db.fetch_tuple(result)
+      print row[2]  # TABLE_NAME
+      print row[3]  # NON_UNIQUE
+      print row[5]  # INDEX_NAME
+      print row[8]  # COLUMN_NAME
+
+      try:
+          rc = ibm_db.exec_immediate(conn, "DROP TABLE index_test2")
+      except:
+          pass
+      rc = ibm_db.exec_immediate(conn, "CREATE TABLE index_test2 (id INTEGER, data VARCHAR(50))")
+      rc = ibm_db.exec_immediate(conn, "CREATE INDEX index2 ON index_test2 (data)")
+
+      print "Test second index table:"
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        result = ibm_db.statistics(conn,None,config.user,"index_test2",True)
+      else:
+        result = ibm_db.statistics(conn,None,None,"INDEX_TEST2",True)
+      row = ibm_db.fetch_tuple(result)
+      ### skipping table info row. statistics returns informtation about table itself for informix ###
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        row = ibm_db.fetch_tuple(result)
+      print row[2]  # TABLE_NAME
+      print row[3]  # NON_UNIQUE
+      print row[5]  # INDEX_NAME
+      print row[8]  # COLUMN_NAME
+
+      print "Test non-existent table:"
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        result = ibm_db.statistics(conn,None,config.user,"non_existent_table",True)
+      else:
+        result = ibm_db.statistics(conn,None,None,"NON_EXISTENT_TABLE",True)
+      row = ibm_db.fetch_tuple(result)
+      if row:
+        print "Non-Empty"
+      else:
+        print "Empty"
+    else:
+      print 'no connection: ' + ibm_db.conn_errormsg()
+
+#__END__
+#__LUW_EXPECTED__
+#Test first index table:
+#INDEX_TEST
+#0
+#INDEX1
+#ID
+#Test second index table:
+#INDEX_TEST2
+#1
+#INDEX2
+#DATA
+#Test non-existent table:
+#Empty
+#__ZOS_EXPECTED__
+#Test first index table:
+#INDEX_TEST
+#0
+#INDEX1
+#ID
+#Test second index table:
+#INDEX_TEST2
+#1
+#INDEX2
+#DATA
+#Test non-existent table:
+#Empty
+#__SYSTEMI_EXPECTED__
+#Test first index table:
+#INDEX_TEST
+#0
+#INDEX1
+#ID
+#Test second index table:
+#INDEX_TEST2
+#1
+#INDEX2
+#DATA
+#Test non-existent table:
+#Empty
+#__IDS_EXPECTED__
+#Test first index table:
+#index_test
+#0
+#index1
+#id
+#Test second index table:
+#index_test2
+#1
+#index2
+#data
+#Test non-existent table:
+#Empty
diff -pruN 0.3.0-3/tests/test_200_MultipleRsltsetsUniformColDefs.py 2.0.5-0ubuntu2/tests/test_200_MultipleRsltsetsUniformColDefs.py
--- 0.3.0-3/tests/test_200_MultipleRsltsetsUniformColDefs.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_200_MultipleRsltsetsUniformColDefs.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,239 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_200_MultipleRsltsetsUniformColDefs(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_200)
+
+  def run_test_200(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    serverinfo = ibm_db.server_info( conn )
+    server = serverinfo.DBMS_NAME[0:3]
+    if (server == 'IDS'):
+       procedure = """
+        CREATE FUNCTION multiResults()
+         RETURNING CHAR(16), INT;
+                
+         DEFINE p_name CHAR(16);
+         DEFINE p_id INT;
+               
+         FOREACH c1 FOR
+             SELECT name, id
+              INTO p_name, p_id
+               FROM animals
+               ORDER BY name
+              RETURN p_name, p_id WITH RESUME;
+         END FOREACH;
+                
+       END FUNCTION;
+       """
+    else:
+       procedure = """
+        CREATE PROCEDURE multiResults ()
+        RESULT SETS 3
+        LANGUAGE SQL
+        BEGIN
+         DECLARE c1 CURSOR WITH RETURN FOR
+          SELECT name, id
+          FROM animals
+          ORDER BY name;
+    
+         DECLARE c2 CURSOR WITH RETURN FOR
+          SELECT name, id
+          FROM animals
+          WHERE id < 4
+          ORDER BY name DESC;
+    
+         DECLARE c3 CURSOR WITH RETURN FOR
+          SELECT name, id
+          FROM animals
+          WHERE weight < 5.0
+          ORDER BY name;
+    
+         OPEN c1;
+         OPEN c2;
+         OPEN c3;
+        END
+       """
+    
+    if conn:
+     try:
+       ibm_db.exec_immediate(conn, 'DROP PROCEDURE multiResults')
+     except:
+       pass
+     ibm_db.exec_immediate(conn, procedure)
+     stmt = ibm_db.exec_immediate(conn, 'CALL multiResults()')
+    
+     print "Fetching first result set"
+     row = ibm_db.fetch_tuple(stmt)
+     while ( row ):
+       for i in row:
+         print i
+       row = ibm_db.fetch_tuple(stmt)
+    
+     if (server == 'IDS'):
+       print "Fetching second result set (should fail -- IDS does not support multiple result sets)"
+     else:
+       print "Fetching second result set"
+     res = ibm_db.next_result (stmt)
+     if res:
+       row = ibm_db.fetch_tuple(res)
+       while ( row ):
+         for i in row:
+           print i
+         row = ibm_db.fetch_tuple(res)
+    
+     if (server == 'IDS'):
+       print "Fetching third result set (should fail -- IDS does not support multiple result sets)"
+     else:
+       print "Fetching third result set"
+     res2 = ibm_db.next_result(stmt)
+     if res2:
+       row = ibm_db.fetch_tuple(res2)
+       while ( row ):
+         for i in row:
+           print i
+         row = ibm_db.fetch_tuple(res2)
+    
+     print "Fetching fourth result set (should fail)"
+     res3 = ibm_db.next_result(stmt)
+     if res3:
+       row = ibm_db.fetch_tuple(res3)
+       while ( row ):
+         for i in row:
+           print i
+         row = ibm_db.fetch_tuple(res3)
+     
+     ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Fetching first result set
+#Bubbles         
+#3
+#Gizmo           
+#4
+#Peaches         
+#1
+#Pook            
+#0
+#Rickety Ride    
+#5
+#Smarty          
+#2
+#Sweater         
+#6
+#Fetching second result set
+#Smarty          
+#2
+#Pook            
+#0
+#Peaches         
+#1
+#Bubbles         
+#3
+#Fetching third result set
+#Bubbles         
+#3
+#Gizmo           
+#4
+#Pook            
+#0
+#Fetching fourth result set (should fail)
+#__ZOS_EXPECTED__
+#Fetching first result set
+#Bubbles         
+#3
+#Gizmo           
+#4
+#Peaches         
+#1
+#Pook            
+#0
+#Rickety Ride    
+#5
+#Smarty          
+#2
+#Sweater         
+#6
+#Fetching second result set
+#Smarty          
+#2
+#Pook            
+#0
+#Peaches         
+#1
+#Bubbles         
+#3
+#Fetching third result set
+#Bubbles         
+#3
+#Gizmo           
+#4
+#Pook            
+#0
+#Fetching fourth result set (should fail)
+#__SYSTEMI_EXPECTED__
+#Fetching first result set
+#Bubbles         
+#3
+#Gizmo           
+#4
+#Peaches         
+#1
+#Pook            
+#0
+#Rickety Ride    
+#5
+#Smarty          
+#2
+#Sweater         
+#6
+#Fetching second result set
+#Smarty          
+#2
+#Pook            
+#0
+#Peaches         
+#1
+#Bubbles         
+#3
+#Fetching third result set
+#Bubbles         
+#3
+#Gizmo           
+#4
+#Pook            
+#0
+#Fetching fourth result set (should fail)
+#__IDS_EXPECTED__
+#Fetching first result set
+#Bubbles         
+#3
+#Gizmo           
+#4
+#Peaches         
+#1
+#Pook            
+#0
+#Rickety Ride    
+#5
+#Smarty          
+#2
+#Sweater         
+#6
+#Fetching second result set (should fail -- IDS does not support multiple result sets)
+#Fetching third result set (should fail -- IDS does not support multiple result sets)
+#Fetching fourth result set (should fail)
diff -pruN 0.3.0-3/tests/test_201_MultipleRsltsetsDiffColDefs.py 2.0.5-0ubuntu2/tests/test_201_MultipleRsltsetsDiffColDefs.py
--- 0.3.0-3/tests/test_201_MultipleRsltsetsDiffColDefs.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_201_MultipleRsltsetsDiffColDefs.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,301 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_201_MultipleRsltsetsDiffColDefs(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_201)
+
+  def run_test_201(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    serverinfo = ibm_db.server_info( conn )
+    server = serverinfo.DBMS_NAME[0:3]
+    if (server == 'IDS'):
+        procedure = """CREATE FUNCTION multiResults ()
+           RETURNING CHAR(16), INT, VARCHAR(32), NUMERIC(7,2);
+           
+           DEFINE p_name CHAR(16);
+           DEFINE p_id INT;
+           DEFINE p_breed VARCHAR(32);
+           DEFINE p_weight NUMERIC(7,2);
+           
+           FOREACH c1 FOR
+              SELECT name, id, breed, weight
+              INTO p_name, p_id, p_breed, p_weight
+              FROM animals
+              ORDER BY name DESC
+              RETURN p_name, p_id, p_breed, p_weight WITH RESUME;
+           END FOREACH;
+    
+       END FUNCTION;"""
+    else:
+        procedure = """CREATE PROCEDURE multiResults ()
+        RESULT SETS 3
+        LANGUAGE SQL
+        BEGIN
+         DECLARE c1 CURSOR WITH RETURN FOR
+          SELECT name, id
+          FROM animals
+          ORDER BY name;
+    
+         DECLARE c2 CURSOR WITH RETURN FOR
+          SELECT name, id, breed, weight
+          FROM animals
+          ORDER BY name DESC;
+    
+         DECLARE c3 CURSOR WITH RETURN FOR
+          SELECT name
+          FROM animals
+          ORDER BY name;
+    
+         OPEN c1;
+         OPEN c2;
+         OPEN c3;
+        END"""
+    
+    if conn:
+        try:
+            ibm_db.exec_immediate(conn, 'DROP PROCEDURE multiResults')
+        except:
+            pass
+        ibm_db.exec_immediate(conn, procedure)
+        stmt = ibm_db.exec_immediate(conn, 'CALL multiResults()')
+    
+        print "Fetching first result set"
+        row = ibm_db.fetch_tuple(stmt)
+        while ( row ):
+           for i in row:
+                print str(i).strip()
+           row = ibm_db.fetch_tuple(stmt)
+    
+        if (server == 'IDS') :
+           print "Fetching second result set (should fail -- IDS does not support multiple result sets)"
+        else:
+           print "Fetching second result set"
+        res = ibm_db.next_result(stmt)
+      
+        if res:
+           row = ibm_db.fetch_tuple(res)
+           while ( row ):
+                for i in row:
+                   print str(i).strip()
+                row = ibm_db.fetch_tuple(res)
+     
+        if (server == 'IDS'):
+           print "Fetching third result set (should fail -- IDS does not support multiple result sets)"
+        else:
+           print "Fetching third result set"
+        res2 = ibm_db.next_result(stmt)
+        if res2:
+           row = ibm_db.fetch_tuple(res2)
+           while ( row ):
+               for i in row: 
+                   print str(i).strip()
+               row = ibm_db.fetch_tuple(res2)
+        
+        ibm_db.close(conn)
+    else:
+       print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Fetching first result set
+#Bubbles
+#3
+#Gizmo
+#4
+#Peaches
+#1
+#Pook
+#0
+#Rickety Ride
+#5
+#Smarty
+#2
+#Sweater
+#6
+#Fetching second result set
+#Sweater
+#6
+#llama
+#150.00
+#Smarty
+#2
+#horse
+#350.00
+#Rickety Ride
+#5
+#goat
+#9.70
+#Pook
+#0
+#cat
+#3.20
+#Peaches
+#1
+#dog
+#12.30
+#Gizmo
+#4
+#budgerigar
+#0.20
+#Bubbles
+#3
+#gold fish
+#0.10
+#Fetching third result set
+#Bubbles
+#Gizmo
+#Peaches
+#Pook
+#Rickety Ride
+#Smarty
+#Sweater
+#__ZOS_EXPECTED__
+#Fetching first result set
+#Bubbles
+#3
+#Gizmo
+#4
+#Peaches
+#1
+#Pook
+#0
+#Rickety Ride
+#5
+#Smarty
+#2
+#Sweater
+#6
+#Fetching second result set
+#Sweater
+#6
+#llama
+#150.00
+#Smarty
+#2
+#horse
+#350.00
+#Rickety Ride
+#5
+#goat
+#9.70
+#Pook
+#0
+#cat
+#3.20
+#Peaches
+#1
+#dog
+#12.30
+#Gizmo
+#4
+#budgerigar
+#0.20
+#Bubbles
+#3
+#gold fish
+#0.10
+#Fetching third result set
+#Bubbles
+#Gizmo
+#Peaches
+#Pook
+#Rickety Ride
+#Smarty
+#Sweater
+#__SYSTEMI_EXPECTED__
+#Fetching first result set
+#Bubbles
+#3
+#Gizmo
+#4
+#Peaches
+#1
+#Pook
+#0
+#Rickety Ride
+#5
+#Smarty
+#2
+#Sweater
+#6
+#Fetching second result set
+#Sweater
+#6
+#llama
+#150.00
+#Smarty
+#2
+#horse
+#350.00
+#Rickety Ride
+#5
+#goat
+#9.70
+#Pook
+#0
+#cat
+#3.20
+#Peaches
+#1
+#dog
+#12.30
+#Gizmo
+#4
+#budgerigar
+#0.20
+#Bubbles
+#3
+#gold fish
+#0.10
+#Fetching third result set
+#Bubbles
+#Gizmo
+#Peaches
+#Pook
+#Rickety Ride
+#Smarty
+#Sweater
+#__IDS_EXPECTED__
+#Fetching first result set
+#Sweater
+#6
+#llama
+#150.00
+#Smarty
+#2
+#horse
+#350.00
+#Rickety Ride
+#5
+#goat
+#9.70
+#Pook
+#0
+#cat
+#3.20
+#Peaches
+#1
+#dog
+#12.30
+#Gizmo
+#4
+#budgerigar
+#0.20
+#Bubbles
+#3
+#gold fish
+#0.10
+#Fetching second result set (should fail -- IDS does not support multiple result sets)
+#Fetching third result set (should fail -- IDS does not support multiple result sets)
diff -pruN 0.3.0-3/tests/test_210_FieldDisplaySize_01.py 2.0.5-0ubuntu2/tests/test_210_FieldDisplaySize_01.py
--- 0.3.0-3/tests/test_210_FieldDisplaySize_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_210_FieldDisplaySize_01.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,62 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_210_FieldDisplaySize_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_210)
+
+  def run_test_210(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    result = ibm_db.exec_immediate(conn, "select * from staff")
+    cols = ibm_db.num_fields(result)
+    
+    for i in range(0, cols):
+      size = ibm_db.field_display_size(result,i)
+      print "col:%d and size: %d" % (i, size)
+    
+    ibm_db.close(conn)
+
+#__END__
+#__LUW_EXPECTED__
+#col:0 and size: 6
+#col:1 and size: 9
+#col:2 and size: 6
+#col:3 and size: 5
+#col:4 and size: 6
+#col:5 and size: 9
+#col:6 and size: 9
+#__ZOS_EXPECTED__
+#col:0 and size: 6
+#col:1 and size: 9
+#col:2 and size: 6
+#col:3 and size: 5
+#col:4 and size: 6
+#col:5 and size: 9
+#col:6 and size: 9
+#__SYSTEMI_EXPECTED__
+#col:0 and size: 6
+#col:1 and size: 9
+#col:2 and size: 6
+#col:3 and size: 5
+#col:4 and size: 6
+#col:5 and size: 9
+#col:6 and size: 9
+#__IDS_EXPECTED__
+#col:0 and size: 6
+#col:1 and size: 9
+#col:2 and size: 6
+#col:3 and size: 5
+#col:4 and size: 6
+#col:5 and size: 9
+#col:6 and size: 9
diff -pruN 0.3.0-3/tests/test_211_FieldDisplaySize_02.py 2.0.5-0ubuntu2/tests/test_211_FieldDisplaySize_02.py
--- 0.3.0-3/tests/test_211_FieldDisplaySize_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_211_FieldDisplaySize_02.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,52 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_211_FieldDisplaySize_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_211)
+
+  def run_test_211(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    result = ibm_db.exec_immediate(conn, "select * from sales")
+    
+    i = 1
+    
+    while (i <= ibm_db.num_fields(result)):
+      #printf("%d size %d\n",i, ibm_db.field_display_size(result,i) || 0)
+      print "%d size %d" % (i, ibm_db.field_display_size(result,i) or 0)
+      i += 1
+    
+    ibm_db.close(conn)
+
+#__END__
+#__LUW_EXPECTED__
+#1 size 15
+#2 size 15
+#3 size 11
+#4 size 0
+#__ZOS_EXPECTED__
+#1 size 15
+#2 size 15
+#3 size 11
+#4 size 0
+#__SYSTEMI_EXPECTED__
+#1 size 15
+#2 size 15
+#3 size 11
+#4 size 0
+#__IDS_EXPECTED__
+#1 size 15
+#2 size 15
+#3 size 11
+#4 size 0
diff -pruN 0.3.0-3/tests/test_212_FieldDisplaySize_03.py 2.0.5-0ubuntu2/tests/test_212_FieldDisplaySize_03.py
--- 0.3.0-3/tests/test_212_FieldDisplaySize_03.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_212_FieldDisplaySize_03.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,46 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_212_FieldDisplaySize_03(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_212)
+
+  def run_test_212(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+
+    result = ibm_db.exec_immediate(conn, "select * from sales")
+    
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      i = "sales_person"
+    else:
+      i = "SALES_PERSON"
+    
+    print "%s size %d" % (i, ibm_db.field_display_size(result,i))
+    
+    i = 2
+    print "%d size %d" % (i, ibm_db.field_display_size(result,i))
+
+#__END__
+#__LUW_EXPECTED__
+#SALES_PERSON size 15
+#2 size 15
+#__ZOS_EXPECTED__
+#SALES_PERSON size 15
+#2 size 15
+#__SYSTEMI_EXPECTED__
+#SALES_PERSON size 15
+#2 size 15
+#__IDS_EXPECTED__
+#sales_person size 15
+#2 size 15
diff -pruN 0.3.0-3/tests/test_213_FieldDisplaySize_04.py 2.0.5-0ubuntu2/tests/test_213_FieldDisplaySize_04.py
--- 0.3.0-3/tests/test_213_FieldDisplaySize_04.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_213_FieldDisplaySize_04.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,91 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_213_FieldDisplaySize_04(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_213)
+
+  def run_test_213(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    result = ibm_db.exec_immediate(conn, "select * from sales")
+    
+    i = "sales_person"
+    
+    print "%s size %d\n" % (i, (ibm_db.field_display_size(result,i) or 0))
+    
+    i = "REGION"
+    
+    print "%s size %d\n" % (i, (ibm_db.field_display_size(result,i) or 0))
+    
+    i = "REgion"
+    
+    print "%s size %d\n" % (i, (ibm_db.field_display_size(result,i) or 0))
+    
+    i = "HELMUT"
+    
+    print "%s size %d\n" % (i, (ibm_db.field_display_size(result,i) or 0))
+    
+    t = ibm_db.field_display_size(result,"")
+    
+    print t
+    
+    t = ibm_db.field_display_size(result,"HELMUT")
+    
+    print t
+    
+    t = ibm_db.field_display_size(result,"Region")
+    
+    print t
+    
+    t = ibm_db.field_display_size(result,"SALES_DATE")
+    
+    print t
+
+#__END__
+#__LUW_EXPECTED__
+#sales_person size 0
+#REGION size 15
+#REgion size 0
+#HELMUT size 0
+#False
+#False
+#False
+#10
+#__ZOS_EXPECTED__
+#sales_person size 0
+#REGION size 15
+#REgion size 0
+#HELMUT size 0
+#False
+#False
+#False
+#10
+#__SYSTEMI_EXPECTED__
+#sales_person size 0
+#REGION size 15
+#REgion size 0
+#HELMUT size 0
+#False
+#False
+#False
+#10
+#__IDS_EXPECTED__
+#sales_person size 15
+#REGION size 0
+#REgion size 0
+#HELMUT size 0
+#False
+#False
+#False
+#False
diff -pruN 0.3.0-3/tests/test_220_PersistentConn.py 2.0.5-0ubuntu2/tests/test_220_PersistentConn.py
--- 0.3.0-3/tests/test_220_PersistentConn.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_220_PersistentConn.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,38 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_220_PersistentConn(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_220)
+
+  def run_test_220(self):
+    conn = ibm_db.pconnect(config.database, config.user, config.password)
+    
+    if conn:
+      ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+      stmt = ibm_db.exec_immediate(conn, "UPDATE animals SET name = 'flyweight' WHERE weight < 10.0")
+      print "Number of affected rows:", ibm_db.num_rows( stmt )
+      ibm_db.rollback(conn)
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Number of affected rows: 4
+#__ZOS_EXPECTED__
+#Number of affected rows: 4
+#__SYSTEMI_EXPECTED__
+#Number of affected rows: 4
+#__IDS_EXPECTED__
+#Number of affected rows: 4
diff -pruN 0.3.0-3/tests/test_221_100PersistentConns.py 2.0.5-0ubuntu2/tests/test_221_100PersistentConns.py
--- 0.3.0-3/tests/test_221_100PersistentConns.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_221_100PersistentConns.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,42 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_221_100PersistentConns(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_221)
+
+  def run_test_221(self):
+    pconn = range(100)
+    
+    for i in range(100):
+      pconn[i] = ibm_db.pconnect(config.database, config.user, config.password)
+    
+    if pconn[33]:
+      conn = pconn[22]
+      ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+      stmt = ibm_db.exec_immediate(pconn[33], "UPDATE animals SET name = 'flyweight' WHERE weight < 10.0")
+      print "Number of affected rows:", ibm_db.num_rows( stmt )
+      ibm_db.rollback(conn)
+      ibm_db.close(pconn[33])
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Number of affected rows: 4
+#__ZOS_EXPECTED__
+#Number of affected rows: 4
+#__SYSTEMI_EXPECTED__
+#Number of affected rows: 4
+#__IDS_EXPECTED__
+#Number of affected rows: 4
diff -pruN 0.3.0-3/tests/test_230_FieldTypePos.py 2.0.5-0ubuntu2/tests/test_230_FieldTypePos.py
--- 0.3.0-3/tests/test_230_FieldTypePos.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_230_FieldTypePos.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,139 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_230_FieldTypePos(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_230)
+
+  def run_test_230(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    result = ibm_db.exec_immediate(conn, "select * from sales")
+    result2 = ibm_db.exec_immediate(conn, "select * from staff")
+    result3 = ibm_db.exec_immediate(conn, "select * from emp_photo")
+    
+    for i in range(0, ibm_db.num_fields(result) + 1):
+      print str(i) + ":" + str(ibm_db.field_type(result,i))
+    
+    print "\n-----"
+    
+    for i in range(0, ibm_db.num_fields(result2)):
+      print str(i) + ":" + str(ibm_db.field_type(result2,i))
+      
+    print "\n-----"
+
+    for i in range(0, 3):
+      print str(i) + ":" + str(ibm_db.field_type(result3,i))
+
+    print "\n-----"
+    
+    print "region:%s" % str(ibm_db.field_type(result,'region'))
+    print "5:%s" % str(ibm_db.field_type(result2,5))
+
+#__END__
+#__LUW_EXPECTED__
+#0:date
+#1:string
+#2:string
+#3:int
+#4:False
+#
+#-----
+#0:int
+#1:string
+#2:int
+#3:string
+#4:int
+#5:decimal
+#6:decimal
+#
+#-----
+#0:string
+#1:string
+#2:blob
+#
+#-----
+#region:False
+#5:decimal
+#__ZOS_EXPECTED__
+#0:date
+#1:string
+#2:string
+#3:int
+#4:False
+#
+#-----
+#0:int
+#1:string
+#2:int
+#3:string
+#4:int
+#5:decimal
+#6:decimal
+#
+#-----
+#0:string
+#1:string
+#2:blob
+#
+#-----
+#region:False
+#5:decimal
+#__SYSTEMI_EXPECTED__
+#0:date
+#1:string
+#2:string
+#3:int
+#4:False
+#
+#-----
+#0:int
+#1:string
+#2:int
+#3:string
+#4:int
+#5:decimal
+#6:decimal
+#
+#-----
+#0:string
+#1:string
+#2:blob
+#
+#-----
+#region:False
+#5:decimal
+#__IDS_EXPECTED__
+#0:date
+#1:string
+#2:string
+#3:int
+#4:False
+#
+#-----
+#0:int
+#1:string
+#2:int
+#3:string
+#4:int
+#5:decimal
+#6:decimal
+#
+#-----
+#0:string
+#1:string
+#2:blob
+#
+#-----
+#region:string
+#5:decimal
diff -pruN 0.3.0-3/tests/test_231_FieldTypeName.py 2.0.5-0ubuntu2/tests/test_231_FieldTypeName.py
--- 0.3.0-3/tests/test_231_FieldTypeName.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_231_FieldTypeName.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,139 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_231_FieldTypeName(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_231)
+
+  def run_test_231(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    result = ibm_db.exec_immediate(conn, "select * from sales")
+    result2 = ibm_db.exec_immediate(conn, "select * from staff")
+    result3 = ibm_db.exec_immediate(conn, "select * from emp_photo")
+    
+    for i in range(0, ibm_db.num_fields(result) + 1):
+      print str(i) + ":" + str(ibm_db.field_type(result,ibm_db.field_name(result,i)))
+    
+    print "\n-----"
+    
+    for i in range(0, ibm_db.num_fields(result2)):
+      print str(i) + ":" + ibm_db.field_type(result2,ibm_db.field_name(result2,i))
+    
+    print "\n-----"
+    
+    for i in range(0, 3):
+      print str(i) + ":" + ibm_db.field_type(result3,ibm_db.field_name(result3,i))
+    
+    print "\n-----"
+    
+    print "region:%s" % ibm_db.field_type(result,'region')
+    print "5:%s" % ibm_db.field_type(result2,5)
+
+#__END__
+#__LUW_EXPECTED__
+#0:date
+#1:string
+#2:string
+#3:int
+#4:False
+#
+#-----
+#0:int
+#1:string
+#2:int
+#3:string
+#4:int
+#5:decimal
+#6:decimal
+#
+#-----
+#0:string
+#1:string
+#2:blob
+#
+#-----
+#region:False
+#5:decimal
+#__ZOS_EXPECTED__
+#0:date
+#1:string
+#2:string
+#3:int
+#4:False
+#
+#-----
+#0:int
+#1:string
+#2:int
+#3:string
+#4:int
+#5:decimal
+#6:decimal
+#
+#-----
+#0:string
+#1:string
+#2:blob
+#
+#-----
+#region:False
+#5:decimal
+#__SYSTEMI_EXPECTED__
+#0:date
+#1:string
+#2:string
+#3:int
+#4:False
+#
+#-----
+#0:int
+#1:string
+#2:int
+#3:string
+#4:int
+#5:decimal
+#6:decimal
+#
+#-----
+#0:string
+#1:string
+#2:blob
+#
+#-----
+#region:False
+#5:decimal
+#__IDS_EXPECTED__
+#0:date
+#1:string
+#2:string
+#3:int
+#4:False
+#
+#-----
+#0:int
+#1:string
+#2:int
+#3:string
+#4:int
+#5:decimal
+#6:decimal
+#
+#-----
+#0:string
+#1:string
+#2:blob
+#
+#-----
+#region:string
+#5:decimal
diff -pruN 0.3.0-3/tests/test_232_FieldTypePosName.py 2.0.5-0ubuntu2/tests/test_232_FieldTypePosName.py
--- 0.3.0-3/tests/test_232_FieldTypePosName.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_232_FieldTypePosName.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,72 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_232_FieldTypePosName(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_232)
+
+  def run_test_232(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    result = ibm_db.exec_immediate(conn, "select * from sales")
+     
+    for i in range(0, ibm_db.num_fields(result) + 1):
+      field_name = ibm_db.field_name(result,i)
+      field_type = ibm_db.field_type(result, ibm_db.field_name(result,i))
+      print str(ibm_db.field_name(result, i)) + ":" + str(ibm_db.field_type(result, ibm_db.field_name(result, i)))
+          
+    print "-----"
+    
+    t = ibm_db.field_type(result,99)
+    print t
+    
+    t1 = ibm_db.field_type(result, "HELMUT")
+    print t1
+
+#__END__
+#__LUW_EXPECTED__
+#SALES_DATE:date
+#SALES_PERSON:string
+#REGION:string
+#SALES:int
+#False:False
+#-----
+#False
+#False
+#__ZOS_EXPECTED__
+#SALES_DATE:date
+#SALES_PERSON:string
+#REGION:string
+#SALES:int
+#False:False
+#-----
+#False
+#False
+#__SYSTEMI_EXPECTED__
+#SALES_DATE:date
+#SALES_PERSON:string
+#REGION:string
+#SALES:int
+#False:False
+#-----
+#False
+#False
+#__IDS_EXPECTED__
+#sales_date:date
+#sales_person:string
+#region:string
+#sales:int
+#False:False
+#-----
+#False
+#False
diff -pruN 0.3.0-3/tests/test_240_FieldWidthPosName_01.py 2.0.5-0ubuntu2/tests/test_240_FieldWidthPosName_01.py
--- 0.3.0-3/tests/test_240_FieldWidthPosName_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_240_FieldWidthPosName_01.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,135 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_240_FieldWidthPosName_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_240)
+
+  def run_test_240(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    result = ibm_db.exec_immediate(conn, "select * from sales")
+    result2 = ibm_db.exec_immediate(conn, "select * from staff")
+    result3 = ibm_db.exec_immediate(conn, "select * from emp_photo")
+    
+    for i in range(0, ibm_db.num_fields(result)):
+      print str(i) + ":" + str(ibm_db.field_width(result,i))
+    
+    print "\n-----"
+    
+    for i in range(0, ibm_db.num_fields(result2)):
+      print str(i) + ":" + str(ibm_db.field_width(result2,ibm_db.field_name(result2,i)))
+          
+    print "\n-----"
+    
+    for i in range(0, 3):
+      print str(i) + ":" + str(ibm_db.field_width(result3,i)) + "," + str(ibm_db.field_display_size(result3,i))
+    
+    print "\n-----"
+    print "region:%s" % ibm_db.field_type(result,'region')
+    
+    print "5:%s" % ibm_db.field_type(result2,5)
+
+#__END__
+#__LUW_EXPECTED__
+#0:10
+#1:15
+#2:15
+#3:11
+#
+#-----
+#0:6
+#1:9
+#2:6
+#3:5
+#4:6
+#5:9
+#6:9
+#
+#-----
+#0:6,6
+#1:10,10
+#2:1048576,2097152
+#
+#-----
+#region:False
+#5:decimal
+#__ZOS_EXPECTED__
+#0:10
+#1:15
+#2:15
+#3:11
+#
+#-----
+#0:6
+#1:9
+#2:6
+#3:5
+#4:6
+#5:9
+#6:9
+#
+#-----
+#0:6,6
+#1:10,10
+#2:1048576,2097152
+#
+#-----
+#region:False
+#5:decimal
+#__SYSTEMI_EXPECTED__
+#0:10
+#1:15
+#2:15
+#3:11
+#
+#-----
+#0:6
+#1:9
+#2:6
+#3:5
+#4:6
+#5:9
+#6:9
+#
+#-----
+#0:6,6
+#1:10,10
+#2:1048576,2097152
+#
+#-----
+#region:False
+#5:decimal
+#__IDS_EXPECTED__
+#0:10
+#1:15
+#2:15
+#3:11
+#
+#-----
+#0:6
+#1:9
+#2:6
+#3:5
+#4:6
+#5:9
+#6:9
+#
+#-----
+#0:6,6
+#1:10,10
+#2:2147483647,-2
+#
+#-----
+#region:string
+#5:decimal
diff -pruN 0.3.0-3/tests/test_241_FieldWidthPosName_02.py 2.0.5-0ubuntu2/tests/test_241_FieldWidthPosName_02.py
--- 0.3.0-3/tests/test_241_FieldWidthPosName_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_241_FieldWidthPosName_02.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,89 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_241_FieldWidthPosName_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_241)
+
+  def run_test_241(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    result = ibm_db.exec_immediate(conn, "select * from sales")
+    result2 = ibm_db.exec_immediate(conn, "select * from staff")
+    result3 = ibm_db.exec_immediate(conn, "select * from emp_photo")
+    
+    for i in range(0, ibm_db.num_fields(result)):
+      print str(ibm_db.field_width(result,i))
+    
+    print "\n-----"
+    
+    for i in range(0, ibm_db.num_fields(result2)):
+      print str(ibm_db.field_width(result2,ibm_db.field_name(result2,i)))
+
+#__END__
+#__LUW_EXPECTED__
+#10
+#15
+#15
+#11
+#
+#-----
+#6
+#9
+#6
+#5
+#6
+#9
+#9
+#__ZOS_EXPECTED__
+#10
+#15
+#15
+#11
+#
+#-----
+#6
+#9
+#6
+#5
+#6
+#9
+#9
+#__SYSTEMI_EXPECTED__
+#10
+#15
+#15
+#11
+#
+#-----
+#6
+#9
+#6
+#5
+#6
+#9
+#9
+#__IDS_EXPECTED__
+#10
+#15
+#15
+#11
+#
+#-----
+#6
+#9
+#6
+#5
+#6
+#9
+#9
diff -pruN 0.3.0-3/tests/test_250_FreeResult_01.py 2.0.5-0ubuntu2/tests/test_250_FreeResult_01.py
--- 0.3.0-3/tests/test_250_FreeResult_01.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_250_FreeResult_01.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,49 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_250_FreeResult_01(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_250)
+
+  def run_test_250(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    result = ibm_db.exec_immediate(conn, "select * from sales")
+    result2 = ibm_db.exec_immediate(conn, "select * from staff")
+    result3 = ibm_db.exec_immediate(conn, "select * from emp_photo")
+    
+    r1 = ibm_db.free_result(result)
+    r2 = ibm_db.free_result(result2)
+    r3 = ibm_db.free_result(result3)
+    
+    print r1
+    print r2
+    print r3
+
+#__END__
+#__LUW_EXPECTED__
+#True
+#True
+#True
+#__ZOS_EXPECTED__
+#True
+#True
+#True
+#__SYSTEMI_EXPECTED__
+#True
+#True
+#True
+#__IDS_EXPECTED__
+#True
+#True
+#True
diff -pruN 0.3.0-3/tests/test_251_FreeResult_02.py 2.0.5-0ubuntu2/tests/test_251_FreeResult_02.py
--- 0.3.0-3/tests/test_251_FreeResult_02.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_251_FreeResult_02.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,51 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_251_FreeResult_02(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_251)
+
+  def run_test_251(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    result = ibm_db.exec_immediate(conn, "select * from sales")
+    
+    r1 = ibm_db.free_result(result)
+    r2 = ibm_db.free_result(result)
+    r3 = ''
+    try:
+      r3 = ibm_db.free_result(result99)
+    except:
+      r3 = None
+    
+    print r1
+    print r2
+    print r3
+
+#__END__
+#__LUW_EXPECTED__
+#True
+#True
+#None
+#__ZOS_EXPECTED__
+#True
+#True
+#None
+#__SYSTEMI_EXPECTED__
+#True
+#True
+#None
+#__IDS_EXPECTED__
+#True
+#True
+#None
diff -pruN 0.3.0-3/tests/test_260_FetchTupleMany_07.py 2.0.5-0ubuntu2/tests/test_260_FetchTupleMany_07.py
--- 0.3.0-3/tests/test_260_FetchTupleMany_07.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_260_FetchTupleMany_07.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,55 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_260_FetchTupleMany_07(self):
+    obj = IbmDbTestFunctions() 
+    obj.assert_expect(self.run_test_260)
+
+  def run_test_260(self):
+      conn = ibm_db.connect(config.database, config.user, config.password)
+      
+      if conn:
+        stmt = ibm_db.exec_immediate(conn, "SELECT id, breed, name, weight FROM animals WHERE id = 0")
+        
+        row = ibm_db.fetch_tuple(stmt)
+        while ( row ):
+            for i in row:
+                print i
+            row = ibm_db.fetch_tuple(stmt)
+        
+        ibm_db.close(conn)
+        
+      else:
+        print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__ZOS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__SYSTEMI_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
+#__IDS_EXPECTED__
+#0
+#cat
+#Pook            
+#3.20
diff -pruN 0.3.0-3/tests/test_261_FetchObjectAccess.py 2.0.5-0ubuntu2/tests/test_261_FetchObjectAccess.py
--- 0.3.0-3/tests/test_261_FetchObjectAccess.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_261_FetchObjectAccess.py	2014-01-31 11:00:24.000000000 +0000
@@ -0,0 +1,66 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_261_FetchObjectAccess(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_261)
+
+  def run_test_261(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    server = ibm_db.server_info( conn )
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+      ibm_db.set_option(conn, op, 1)
+
+    if (server.DBMS_NAME[0:3] == 'IDS'):
+      sql = "SELECT breed, TRIM(TRAILING FROM name) AS name FROM animals WHERE id = ?"
+    else:
+      sql = "SELECT breed, RTRIM(name) AS name FROM animals WHERE id = ?"
+
+    if conn:
+      stmt = ibm_db.prepare(conn, sql)
+      ibm_db.execute(stmt, (0,))
+
+#      NOTE: This is a workaround
+#      function fetch_object() to be implemented...
+#      pet = ibm_db.fetch_object(stmt)
+#      while (pet):
+#          print "Come here, %s, my little %s!" % (pet.NAME, pet.BREED)
+#          pet = ibm_db.fetch_object(stmt)
+      
+      class Pet:
+          pass
+      
+      data = ibm_db.fetch_assoc(stmt)
+      while ( data ):
+         pet = Pet()
+         pet.NAME = data['NAME']
+         pet.BREED = data['BREED']
+         print "Come here, %s, my little %s!" % (pet.NAME, pet.BREED)
+         data = ibm_db.fetch_assoc(stmt)
+         
+      ibm_db.close(conn)
+      
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Come here, Pook, my little cat!
+#__ZOS_EXPECTED__
+#Come here, Pook, my little cat!
+#__SYSTEMI_EXPECTED__
+#Come here, Pook, my little cat!
+#__IDS_EXPECTED__
+#Come here, Pook, my little cat!
diff -pruN 0.3.0-3/tests/test_264_InsertRetrieveBIGINTTypeColumn.py 2.0.5-0ubuntu2/tests/test_264_InsertRetrieveBIGINTTypeColumn.py
--- 0.3.0-3/tests/test_264_InsertRetrieveBIGINTTypeColumn.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_264_InsertRetrieveBIGINTTypeColumn.py	2014-01-31 11:00:55.000000000 +0000
@@ -0,0 +1,97 @@
+#
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_264_InsertRetrieveBIGINTTypeColumn(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_264)
+
+  def run_test_264(self):
+    # Make a connection
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    if conn:
+       server = ibm_db.server_info( conn )
+       if (server.DBMS_NAME[0:3] == 'IDS'):
+          op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+          ibm_db.set_option(conn, op, 1)
+
+       # Drop the tab_bigint table, in case it exists
+       drop = 'DROP TABLE tab_bigint'
+       result = ''
+       try:
+         result = ibm_db.exec_immediate(conn, drop)
+       except:
+         pass
+       # Create the tab_bigint table
+       if (server.DBMS_NAME[0:3] == 'IDS'):
+          create = "CREATE TABLE tab_bigint (col1 INT8, col2 INT8, col3 INT8, col4 INT8)"
+       else:
+          create = "CREATE TABLE tab_bigint (col1 BIGINT, col2 BIGINT, col3 BIGINT, col4 BIGINT)"
+       result = ibm_db.exec_immediate(conn, create)
+
+       insert = "INSERT INTO tab_bigint values (-9223372036854775807, 9223372036854775807, 0, NULL)"
+       res = ibm_db.exec_immediate(conn, insert)
+       print "Number of inserted rows:", ibm_db.num_rows(res)
+
+       stmt = ibm_db.prepare(conn, "SELECT * FROM tab_bigint")
+       ibm_db.execute(stmt)
+       data = ibm_db.fetch_both(stmt)
+       while ( data ):
+         print data[0]
+         print data[1]
+         print data[2]
+         print data[3]
+         print type(data[0]) is long
+         print type(data[1]) is long 
+         print type(data[2]) is long
+         data = ibm_db.fetch_both(stmt)
+
+       ibm_db.close(conn)
+
+#__END__
+#__LUW_EXPECTED__
+#Number of inserted rows: 1
+#-9223372036854775807
+#9223372036854775807
+#0
+#None
+#True
+#True
+#True
+#__ZOS_EXPECTED__
+#Number of inserted rows: 1
+#-9223372036854775807
+#9223372036854775807
+#0
+#None
+#True
+#True
+#True
+#__SYSTEMI_EXPECTED__
+#Number of inserted rows: 1
+#-9223372036854775807
+#9223372036854775807
+#0
+#None
+#True
+#True
+#True
+#__IDS_EXPECTED__
+#Number of inserted rows: 1
+#-9223372036854775807
+#9223372036854775807
+#0
+#None
+#True
+#True
+#True
diff -pruN 0.3.0-3/tests/test_265_NoAffectedRows.py 2.0.5-0ubuntu2/tests/test_265_NoAffectedRows.py
--- 0.3.0-3/tests/test_265_NoAffectedRows.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_265_NoAffectedRows.py	2014-02-04 08:13:23.000000000 +0000
@@ -0,0 +1,189 @@
+#
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2013
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_265_NoAffectedRows(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_265)
+
+  def run_test_265(self):
+    # Make a connection
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    cursor_option = {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_STATIC}
+
+    if conn:
+      server = ibm_db.server_info( conn )
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+         op = {ibm_db.ATTR_CASE: ibm_db.CASE_UPPER}
+         ibm_db.set_option(conn, op, 1)
+
+      try:
+        sql = 'drop table test'
+
+        stmt = ibm_db.prepare(conn, sql)
+        ibm_db.set_option(stmt, cursor_option, 0)
+        ibm_db.execute(stmt)
+        print "Number of affected rows: %d" % ibm_db.get_num_result(stmt)
+      except:
+        pass
+
+      if ((server.DBMS_NAME[0:3] == 'IDS') or (server.DBMS_NAME[0:2] == "AS")):
+        sql = "create table test(id integer, name VARCHAR(10), clob_col CLOB, some_var VARCHAR(100) )"
+      else:
+        sql = "create table test(id integer, name VARCHAR(10), clob_col CLOB, some_var XML )"
+
+      stmt = ibm_db.prepare(conn, sql)
+      ibm_db.set_option(stmt, cursor_option, 0)
+      ibm_db.execute(stmt)
+      print "Number of affected rows: %d" % ibm_db.get_num_result(stmt)
+
+      sql = 'select id from test'
+
+      stmt = ibm_db.prepare(conn, sql)
+      ibm_db.set_option(stmt, cursor_option, 0)
+      ibm_db.execute(stmt)
+      print "Number of affected rows: %d" % ibm_db.get_num_result(stmt)
+
+      sql = "insert into test values( 1, 'some', 'here is a clob value', '<?xml version=\"1.0\" encoding=\"UTF-8\" ?><test attribute=\"value\"/>')"
+
+      stmt = ibm_db.prepare(conn, sql)
+      ibm_db.set_option(stmt, cursor_option, 0)
+      ibm_db.execute(stmt)
+      print "Number of affected rows: %d" % ibm_db.get_num_result(stmt)
+
+      sql = "insert into test values(2, 'value', 'clob data', NULL)"
+
+      stmt = ibm_db.prepare(conn, sql)
+      ibm_db.set_option(stmt, cursor_option, 0)
+      ibm_db.execute(stmt)
+      print "Number of affected rows: %d" % ibm_db.get_num_result(stmt)
+
+      sql = "insert into test values(2, 'in varchar', 'data2', NULL)"
+
+      stmt = ibm_db.prepare(conn, sql)
+      ibm_db.set_option(stmt, cursor_option, 0)
+      ibm_db.execute(stmt)
+      print "Number of affected rows: %d" % ibm_db.get_num_result(stmt)
+
+      sql = 'select * from test'
+
+      stmt = ibm_db.prepare(conn, sql)
+      ibm_db.set_option(stmt, cursor_option, 0)
+      ibm_db.execute(stmt)
+      print "Number of affected rows: %d" % ibm_db.get_num_result(stmt)
+      row = ibm_db.fetch_tuple(stmt)
+      while ( row ):
+        print "%s, %s, %s, %s\n" %(row[0], row[1], row[2], ((row[3] is not None) and row[3].startswith(u'\ufeff')) and  row[3][1:] or  row[3])
+        row = ibm_db.fetch_tuple(stmt)
+
+      sql = 'select id, name from test where id = ?'
+
+      stmt = ibm_db.prepare(conn, sql)
+      ibm_db.set_option(stmt, cursor_option, 0)
+      ibm_db.execute(stmt, (2,))
+      print "Number of affected rows: %d" % ibm_db.get_num_result(stmt)
+      row = ibm_db.fetch_tuple(stmt)
+      while ( row ):
+        print "%s, %s\n" %(row[0], row[1])
+        row = ibm_db.fetch_tuple(stmt)
+
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        sql = "select * from test"
+      else:
+        sql = 'select * from test fetch first 12 rows only optimize for 12 rows'
+
+      stmt = ibm_db.prepare(conn, sql)
+      ibm_db.set_option(stmt, cursor_option, 0)
+      #ibm_db.num_fields(stmt)
+      ibm_db.execute(stmt)
+      print "Number of affected rows: %d" % ibm_db.get_num_result(stmt)
+      row = ibm_db.fetch_tuple(stmt)
+      while ( row ):
+        print "%s, %s, %s, %s\n" %(row[0], row[1], row[2], ((row[3] is not None) and row[3].startswith(u'\ufeff')) and  row[3][1:] or  row[3])
+        row = ibm_db.fetch_tuple(stmt)
+
+      ibm_db.close(conn)
+
+#__END__
+#__LUW_EXPECTED__
+#Number of affected rows: -1
+#Number of affected rows: -1
+#Number of affected rows: 0
+#Number of affected rows: -1
+#Number of affected rows: -1
+#Number of affected rows: -1
+#Number of affected rows: -1
+#1, some, here is a clob value, <?xml version="1.0" encoding="UTF-16" ?><test attribute="value"/>
+#2, value, clob data, None
+#2, in varchar, data2, None
+#Number of affected rows: 2
+#2, value
+#2, in varchar
+#Number of affected rows: -1
+#1, some, here is a clob value, <?xml version="1.0" encoding="UTF-16" ?><test attribute="value"/>
+#2, value, clob data, None
+#2, in varchar, data2, None
+#__ZOS_EXPECTED__
+#Number of affected rows: -2
+#Number of affected rows: -2
+#Number of affected rows: 0
+#Number of affected rows: -2
+#Number of affected rows: -1
+#Number of affected rows: -2
+#Number of affected rows: 0
+#1, some, here is a clob value, <?xml version="1.0" encoding="UTF-16" ?><test attribute="value"/>
+#2, value, clob data, None
+#2, in varchar, data2, None
+#Number of affected rows: 2
+#2, value
+#2, in varchar
+#Number of affected rows: 0
+#1, some, here is a clob value, <?xml version="1.0" encoding="UTF-16" ?><test attribute="value"/>
+#2, value, clob data, None
+#2, in varchar, data2, None
+#__SYSTEMI_EXPECTED__
+#Number of affected rows: -2
+#Number of affected rows: -2
+#Number of affected rows: 0
+#Number of affected rows: -2
+#Number of affected rows: -1
+#Number of affected rows: -2
+#Number of affected rows: 0
+#1, some, here is a clob value, <?xml version="1.0" encoding="UTF-16" ?><test attribute="value"/>
+#2, value, clob data, None
+#2, in varchar, data2, None
+#Number of affected rows: 2
+#2, value
+#2, in varchar
+#Number of affected rows: 0
+#1, some, here is a clob value, <?xml version="1.0" encoding="UTF-16" ?><test attribute="value"/>
+#2, value, clob data, None
+#2, in varchar, data2, None
+#__IDS_EXPECTED__
+#Number of affected rows: -1
+#Number of affected rows: -1
+#Number of affected rows: -1
+#Number of affected rows: -1
+#Number of affected rows: -1
+#Number of affected rows: -1
+#Number of affected rows: 3
+#1, some, here is a clob value, <?xml version="1.0" encoding="UTF-16" ?><test attribute="value"/>
+#2, value, clob data, None
+#2, in varchar, data2, None
+#Number of affected rows: 2
+#2, value
+#2, in varchar
+#Number of affected rows: 3
+#1, some, here is a clob value, <?xml version="1.0" encoding="UTF-16" ?><test attribute="value"/>
+#2, value, clob data, None
+#2, in varchar, data2, None
diff -pruN 0.3.0-3/tests/test_300_ServerInfo.py 2.0.5-0ubuntu2/tests/test_300_ServerInfo.py
--- 0.3.0-3/tests/test_300_ServerInfo.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_300_ServerInfo.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,146 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_300_ServerInfo(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_300)
+
+  def run_test_300(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    server = ibm_db.server_info(conn)
+    
+    if server:
+      print "DBMS_NAME: string(%d) \"%s\"" % (len(server.DBMS_NAME), server.DBMS_NAME)
+      print "DBMS_VER: string(%d) \"%s\"" % (len(server.DBMS_VER), server.DBMS_VER)
+      print "DB_CODEPAGE: int(%d)" % server.DB_CODEPAGE
+      print "DB_NAME: string(%d) \"%s\"" % (len(server.DB_NAME), server.DB_NAME)
+      print "INST_NAME: string(%d) \"%s\"" % (len(server.INST_NAME), server.INST_NAME)
+      print "SPECIAL_CHARS: string(%d) \"%s\"" % (len(server.SPECIAL_CHARS), server.SPECIAL_CHARS)
+      print "KEYWORDS: int(%d)" % len(server.KEYWORDS)
+      print "DFT_ISOLATION: string(%d) \"%s\"" % (len(server.DFT_ISOLATION), server.DFT_ISOLATION)
+      il = ''
+      for opt in server.ISOLATION_OPTION:
+        il += opt + " "
+      print "ISOLATION_OPTION: string(%d) \"%s\"" % (len(il), il)
+      print "SQL_CONFORMANCE: string(%d) \"%s\"" % (len(server.SQL_CONFORMANCE), server.SQL_CONFORMANCE)
+      print "PROCEDURES:", server.PROCEDURES
+      print "IDENTIFIER_QUOTE_CHAR: string(%d) \"%s\"" % (len(server.IDENTIFIER_QUOTE_CHAR), server.IDENTIFIER_QUOTE_CHAR)
+      print "LIKE_ESCAPE_CLAUSE:", server.LIKE_ESCAPE_CLAUSE
+      print "MAX_COL_NAME_LEN: int(%d)" % server.MAX_COL_NAME_LEN
+      print "MAX_ROW_SIZE: int(%d)" % server.MAX_ROW_SIZE
+      print "MAX_IDENTIFIER_LEN: int(%d)" % server.MAX_IDENTIFIER_LEN
+      print "MAX_INDEX_SIZE: int(%d)" % server.MAX_INDEX_SIZE
+      print "MAX_PROC_NAME_LEN: int(%d)" % server.MAX_PROC_NAME_LEN
+      print "MAX_SCHEMA_NAME_LEN: int(%d)" % server.MAX_SCHEMA_NAME_LEN
+      print "MAX_STATEMENT_LEN: int(%d)" % server.MAX_STATEMENT_LEN
+      print "MAX_TABLE_NAME_LEN: int(%d)" % server.MAX_TABLE_NAME_LEN
+      print "NON_NULLABLE_COLUMNS:", server.NON_NULLABLE_COLUMNS
+    
+      ibm_db.close(conn)
+    else:
+      print "Error."
+
+#__END__
+#__LUW_EXPECTED__
+#DBMS_NAME: string(%d) %s
+#DBMS_VER: string(%d) %s
+#DB_CODEPAGE: int(%d)
+#DB_NAME: string(%d) %s
+#INST_NAME: string(%d) %s
+#SPECIAL_CHARS: string(%d) %s
+#KEYWORDS: int(%d)
+#DFT_ISOLATION: string(%d) %s
+#ISOLATION_OPTION: string(%d) %s
+#SQL_CONFORMANCE: string(%d) %s
+#PROCEDURES: %s
+#IDENTIFIER_QUOTE_CHAR: string(%d) %s
+#LIKE_ESCAPE_CLAUSE: %s
+#MAX_COL_NAME_LEN: int(%d)
+#MAX_ROW_SIZE: int(%d)
+#MAX_IDENTIFIER_LEN: int(%d)
+#MAX_INDEX_SIZE: int(%d)
+#MAX_PROC_NAME_LEN: int(%d)
+#MAX_SCHEMA_NAME_LEN: int(%d)
+#MAX_STATEMENT_LEN: int(%d)
+#MAX_TABLE_NAME_LEN: int(%d)
+#NON_NULLABLE_COLUMNS: %s
+#__ZOS_EXPECTED__
+#DBMS_NAME: string(%d) %s
+#DBMS_VER: string(%d) %s
+#DB_CODEPAGE: int(%d)
+#DB_NAME: string(%d) %s
+#INST_NAME: string(%d) %s
+#SPECIAL_CHARS: string(%d) %s
+#KEYWORDS: int(%d)
+#DFT_ISOLATION: string(%d) %s
+#ISOLATION_OPTION: string(%d) %s
+#SQL_CONFORMANCE: string(%d) %s
+#PROCEDURES: %s
+#IDENTIFIER_QUOTE_CHAR: string(%d) %s
+#LIKE_ESCAPE_CLAUSE: %s
+#MAX_COL_NAME_LEN: int(%d)
+#MAX_ROW_SIZE: int(%d)
+#MAX_IDENTIFIER_LEN: int(%d)
+#MAX_INDEX_SIZE: int(%d)
+#MAX_PROC_NAME_LEN: int(%d)
+#MAX_SCHEMA_NAME_LEN: int(%d)
+#MAX_STATEMENT_LEN: int(%d)
+#MAX_TABLE_NAME_LEN: int(%d)
+#NON_NULLABLE_COLUMNS: %s
+#__SYSTEMI_EXPECTED__
+#DBMS_NAME: string(%d) %s
+#DBMS_VER: string(%d) %s
+#DB_CODEPAGE: int(%d)
+#DB_NAME: string(%d) %s
+#INST_NAME: string(%d) %s
+#SPECIAL_CHARS: string(%d) %s
+#KEYWORDS: int(%d)
+#DFT_ISOLATION: string(%d) %s
+#ISOLATION_OPTION: string(%d) %s
+#SQL_CONFORMANCE: string(%d) %s
+#PROCEDURES: %s
+#IDENTIFIER_QUOTE_CHAR: string(%d) %s
+#LIKE_ESCAPE_CLAUSE: %s
+#MAX_COL_NAME_LEN: int(%d)
+#MAX_ROW_SIZE: int(%d)
+#MAX_IDENTIFIER_LEN: int(%d)
+#MAX_INDEX_SIZE: int(%d)
+#MAX_PROC_NAME_LEN: int(%d)
+#MAX_SCHEMA_NAME_LEN: int(%d)
+#MAX_STATEMENT_LEN: int(%d)
+#MAX_TABLE_NAME_LEN: int(%d)
+#NON_NULLABLE_COLUMNS: %s
+#__IDS_EXPECTED__
+#DBMS_NAME: string(%d) %s
+#DBMS_VER: string(%d) %s
+#DB_CODEPAGE: int(%d)
+#DB_NAME: string(%d) %s
+#INST_NAME: string(%d) %s
+#SPECIAL_CHARS: string(%d) %s
+#KEYWORDS: int(%d)
+#DFT_ISOLATION: string(%d) %s
+#ISOLATION_OPTION: string(%d) %s
+#SQL_CONFORMANCE: string(%d) %s
+#PROCEDURES: %s
+#IDENTIFIER_QUOTE_CHAR: string(%d) %s
+#LIKE_ESCAPE_CLAUSE: %s
+#MAX_COL_NAME_LEN: int(%d)
+#MAX_ROW_SIZE: int(%d)
+#MAX_IDENTIFIER_LEN: int(%d)
+#MAX_INDEX_SIZE: int(%d)
+#MAX_PROC_NAME_LEN: int(%d)
+#MAX_SCHEMA_NAME_LEN: int(%d)
+#MAX_STATEMENT_LEN: int(%d)
+#MAX_TABLE_NAME_LEN: int(%d)
+#NON_NULLABLE_COLUMNS: %s
diff -pruN 0.3.0-3/tests/test_310_ClientInfo.py 2.0.5-0ubuntu2/tests/test_310_ClientInfo.py
--- 0.3.0-3/tests/test_310_ClientInfo.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_310_ClientInfo.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,73 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_310_ClientInfo(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_310)
+
+  def run_test_310(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    client = ibm_db.client_info(conn)
+    
+    if client:
+      print "DRIVER_NAME: string(%d) \"%s\"" % (len(client.DRIVER_NAME), client.DRIVER_NAME)
+      print "DRIVER_VER: string(%d) \"%s\"" % (len(client.DRIVER_VER), client.DRIVER_VER)
+      print "DATA_SOURCE_NAME: string(%d) \"%s\"" % (len(client.DATA_SOURCE_NAME), client.DATA_SOURCE_NAME)
+      print "DRIVER_ODBC_VER: string(%d) \"%s\"" % (len(client.DRIVER_ODBC_VER), client.DRIVER_ODBC_VER)
+      print "ODBC_VER: string(%d) \"%s\"" % (len(client.ODBC_VER), client.ODBC_VER)
+      print "ODBC_SQL_CONFORMANCE: string(%d) \"%s\"" % (len(client.ODBC_SQL_CONFORMANCE), client.ODBC_SQL_CONFORMANCE)
+      print "APPL_CODEPAGE: int(%s)" % client.APPL_CODEPAGE
+      print "CONN_CODEPAGE: int(%s)" % client.CONN_CODEPAGE
+    
+      ibm_db.close(conn)
+    else:
+      print "Error."
+
+#__END__
+#__LUW_EXPECTED__
+#DRIVER_NAME: string(%d) %s
+#DRIVER_VER: string(%d) %s
+#DATA_SOURCE_NAME: string(%d) %s
+#DRIVER_ODBC_VER: string(%d) %s
+#ODBC_VER: string(%d) %s
+#ODBC_SQL_CONFORMANCE: string(%d) %s
+#APPL_CODEPAGE: int(%d)
+#CONN_CODEPAGE: int(%d)
+#__ZOS_EXPECTED__
+#DRIVER_NAME: string(%d) %s
+#DRIVER_VER: string(%d) %s
+#DATA_SOURCE_NAME: string(%d) %s
+#DRIVER_ODBC_VER: string(%d) %s
+#ODBC_VER: string(%d) %s
+#ODBC_SQL_CONFORMANCE: string(%d) %s
+#APPL_CODEPAGE: int(%d)
+#CONN_CODEPAGE: int(%d)
+#__SYSTEMI_EXPECTED__
+#DRIVER_NAME: string(%d) %s
+#DRIVER_VER: string(%d) %s
+#DATA_SOURCE_NAME: string(%d) %s
+#DRIVER_ODBC_VER: string(%d) %s
+#ODBC_VER: string(%d) %s
+#ODBC_SQL_CONFORMANCE: string(%d) %s
+#APPL_CODEPAGE: int(%d)
+#CONN_CODEPAGE: int(%d)
+#__IDS_EXPECTED__
+#DRIVER_NAME: string(%d) %s
+#DRIVER_VER: string(%d) %s
+#DATA_SOURCE_NAME: string(%d) %s
+#DRIVER_ODBC_VER: string(%d) %s
+#ODBC_VER: string(%d) %s
+#ODBC_SQL_CONFORMANCE: string(%d) %s
+#APPL_CODEPAGE: int(%d)
+#CONN_CODEPAGE: int(%d)
diff -pruN 0.3.0-3/tests/test_311_InsertSelectDeleteNumLiterals.py 2.0.5-0ubuntu2/tests/test_311_InsertSelectDeleteNumLiterals.py
--- 0.3.0-3/tests/test_311_InsertSelectDeleteNumLiterals.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_311_InsertSelectDeleteNumLiterals.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,118 @@
+#
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_311_InsertSelectDeleteNumLiterals(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_311)
+
+  def run_test_311(self):
+    # Make a connection
+    conn = ibm_db.connect(config.database, config.user, config.password)
+
+    if conn:
+       ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_ON )
+
+       # Drop the tab_num_literals table, in case it exists
+       drop = 'DROP TABLE tab_num_literals'
+       result = ''
+       try:
+         result = ibm_db.exec_immediate(conn, drop)
+       except:
+         pass
+       # Create the animal table
+       create = "CREATE TABLE tab_num_literals (col1 INTEGER, col2 FLOAT, col3 DECIMAL(7,2))"
+       result = ibm_db.exec_immediate(conn, create)
+   
+       insert = "INSERT INTO tab_num_literals values ('11.22', '33.44', '55.66')"
+       res = ibm_db.exec_immediate(conn, insert)
+       print "Number of inserted rows:", ibm_db.num_rows(res)
+
+       stmt = ibm_db.prepare(conn, "SELECT col1, col2, col3 FROM tab_num_literals WHERE col1 = '11'")
+       ibm_db.execute(stmt)
+       data = ibm_db.fetch_both(stmt)
+       while ( data ):
+         print data[0]
+         print data[1]
+         print data[2]
+         data = ibm_db.fetch_both(stmt)
+
+       sql = "UPDATE tab_num_literals SET col1 = 77 WHERE col2 = 33.44"
+       res = ibm_db.exec_immediate(conn, sql)
+       print "Number of updated rows:", ibm_db.num_rows(res)
+
+       stmt = ibm_db.prepare(conn, "SELECT col1, col2, col3 FROM tab_num_literals WHERE col2 > '33'")
+       ibm_db.execute(stmt)
+       data = ibm_db.fetch_both(stmt)
+       while ( data ):
+         print data[0]
+         print data[1]
+         print data[2]
+         data = ibm_db.fetch_both(stmt)
+	 
+       sql = "DELETE FROM tab_num_literals WHERE col1 > '10.0'"
+       res = ibm_db.exec_immediate(conn, sql)
+       print "Number of deleted rows:", ibm_db.num_rows(res)
+
+       stmt = ibm_db.prepare(conn, "SELECT col1, col2, col3 FROM tab_num_literals WHERE col3 < '56'")
+       ibm_db.execute(stmt)
+       data = ibm_db.fetch_both(stmt)
+       while ( data ):
+         print data[0]
+         print data[1]
+         print data[2]
+         data = ibm_db.fetch_both(stmt)
+
+       ibm_db.rollback(conn)
+       ibm_db.close(conn)
+
+#__END__
+#__LUW_EXPECTED__
+#Number of inserted rows: 1
+#11
+#33.44
+#55.66
+#Number of updated rows: 1
+#77
+#33.44
+#55.66
+#Number of deleted rows: 1
+#__ZOS_EXPECTED__
+#Number of inserted rows: 1
+#11
+#33.44
+#55.66
+#Number of updated rows: 1
+#77
+#33.44
+#55.66
+#Number of deleted rows: 1
+#__SYSTEMI_EXPECTED__
+#Number of inserted rows: 1
+#11
+#33.44
+#55.66
+#Number of updated rows: 1
+#77
+#33.44
+#55.66
+#Number of deleted rows: 1
+#__IDS_EXPECTED__
+#Number of inserted rows: 1
+#11
+#33.44
+#55.66
+#Number of updated rows: 1
+#77
+#33.44
+#55.66
+#Number of deleted rows: 1
diff -pruN 0.3.0-3/tests/test_52949_TestSPIntVarcharXml.py 2.0.5-0ubuntu2/tests/test_52949_TestSPIntVarcharXml.py
--- 0.3.0-3/tests/test_52949_TestSPIntVarcharXml.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_52949_TestSPIntVarcharXml.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,117 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_52949_TestSPIntVarcharXml(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_52949)
+
+  def test_int(self, conn):
+    return_value = 0
+    stmt, return_value = ibm_db.callproc(conn, 'PROCESSINT', (return_value,))
+    print "ProcessINT:", return_value
+
+  def test_varchar(self, conn):
+    return_value = ""
+    stmt, return_value = ibm_db.callproc(conn, 'PROCESSVAR', (return_value,))
+    print "ProcessVAR:", return_value
+
+  def test_xml(self, conn):
+    return_value = "This is just a test for XML Column. The data gets truncated since we do not "
+    stmt, return_value = ibm_db.callproc(conn, 'PROCESSXML', (return_value,))
+    print "ProcessXML:", return_value.__str__()
+    
+  def drop_tables(self, conn):
+    if conn:
+     dr = "DROP PROCEDURE processxml"
+     try:
+       ibm_db.exec_immediate(conn, dr)
+     except:
+       pass
+     try:
+       dr = "DROP PROCEDURE processint"
+       ibm_db.exec_immediate(conn, dr)
+     except:
+       pass
+     try:
+       dr = "DROP PROCEDURE processvar"
+       ibm_db.exec_immediate(conn, dr)
+     except:
+       pass
+     try:
+       dr = "DROP TABLE test_stored"
+       ibm_db.exec_immediate(conn, dr)
+     except:
+       pass
+
+  def run_test_52949(self):
+   conn = ibm_db.connect(config.database, config.user, config.password)
+
+   if conn:
+     serverinfo = ibm_db.server_info(conn )
+     server = serverinfo.DBMS_NAME[0:3]
+     result = ''
+     self.drop_tables(conn)
+
+     try:
+       cr1 = "CREATE TABLE test_stored (id INT, name VARCHAR(50), age int, cv XML)"
+       result = ibm_db.exec_immediate(conn, cr1)
+       in1 = "INSERT INTO test_stored values (1, 'Kellen', 24, '<example>This is an example</example>')"
+       result = ibm_db.exec_immediate(conn, in1)
+       st1 = "CREATE PROCEDURE processxml(OUT risorsa xml) LANGUAGE SQL BEGIN SELECT cv INTO risorsa FROM test_stored WHERE ID = 1; END"
+       result = ibm_db.exec_immediate(conn, st1)
+
+       #self.test_xml(conn)
+     except:
+       pass
+   
+     try:
+       self.drop_tables(conn)
+       cr1 = "CREATE TABLE test_stored (id INT, name VARCHAR(50), age int, cv VARCHAR(200))"
+       result = ibm_db.exec_immediate(conn, cr1)
+       in1 = "INSERT INTO test_stored values (1, 'Kellen', 24, '<example>This is an example</example>')"
+       result = ibm_db.exec_immediate(conn, in1)
+     except:
+       pass
+
+     if (server == 'IDS'):
+        st2 = "CREATE PROCEDURE processint(OUT risorsa int); SELECT age INTO risorsa FROM test_stored WHERE ID = 1; END PROCEDURE;"
+     else:
+        st2 = "CREATE PROCEDURE processint(OUT risorsa int) LANGUAGE SQL BEGIN SELECT age INTO risorsa FROM test_stored WHERE ID = 1; END"
+     result = ibm_db.exec_immediate(conn, st2)
+     
+     if (server == 'IDS'):
+        st3 = "CREATE PROCEDURE processvar(OUT risorsa varchar(50)); SELECT name INTO risorsa FROM test_stored WHERE ID = 1; END PROCEDURE;"
+     else:
+        st3 = "CREATE PROCEDURE processvar(OUT risorsa varchar(50)) LANGUAGE SQL BEGIN SELECT name INTO risorsa FROM test_stored WHERE ID = 1; END"
+     result = ibm_db.exec_immediate(conn, st3)
+
+     self.test_int(conn)
+     self.test_varchar(conn)
+
+     ibm_db.close(conn)
+   else:
+     print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#ProcessINT: 24
+#ProcessVAR: Kellen
+#__ZOS_EXPECTED__
+#ProcessINT: 24
+#ProcessVAR: Kellen
+#__SYSTEMI_EXPECTED__
+#ProcessINT: 24
+#ProcessVAR: Kellen
+#__IDS_EXPECTED__
+#ProcessINT: 24
+#ProcessVAR: Kellen
diff -pruN 0.3.0-3/tests/test_6528_ScopingProblemBindParam.py 2.0.5-0ubuntu2/tests/test_6528_ScopingProblemBindParam.py
--- 0.3.0-3/tests/test_6528_ScopingProblemBindParam.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_6528_ScopingProblemBindParam.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,49 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_6528_ScopingProblemBindParam(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_6528)
+
+  def checked_db2_execute(self, stmt):
+    ibm_db.execute(stmt)
+    row = ibm_db.fetch_tuple(stmt)
+    for i in row:
+      print i
+     
+  def run_test_6528(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+    
+    if conn:
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        sql = "SELECT TRIM(TRAILING FROM name) FROM animals WHERE breed = ?"
+      else:
+        sql = "SELECT RTRIM(name) FROM animals WHERE breed = ?"
+      stmt = ibm_db.prepare(conn, sql)
+      var = "cat"
+      ibm_db.bind_param(stmt, 1, var, ibm_db.SQL_PARAM_INPUT)
+      self.checked_db2_execute(stmt)
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#Pook
+#__ZOS_EXPECTED__
+#Pook
+#__SYSTEMI_EXPECTED__
+#Pook
+#__IDS_EXPECTED__
+#Pook
diff -pruN 0.3.0-3/tests/test_6561_InsertNULLValues.py 2.0.5-0ubuntu2/tests/test_6561_InsertNULLValues.py
--- 0.3.0-3/tests/test_6561_InsertNULLValues.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_6561_InsertNULLValues.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,50 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_6561_InsertNULLValues(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_6561)
+
+  def run_test_6561(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
+
+      stmt = ibm_db.exec_immediate(conn, "INSERT INTO animals (id, breed, name, weight) VALUES (null, null, null, null)")
+      statement = "SELECT count(id) FROM animals"
+      result = ibm_db.exec_immediate(conn, statement)
+      if ( (not result) and ibm_db.stmt_error() ):
+        print "ERROR: %s" % (ibm_db.stmt_errormsg(), )
+
+      row = ibm_db.fetch_tuple(result)
+      while ( row ):
+        for i in row:
+            print i
+        row = ibm_db.fetch_tuple(result)
+    
+      ibm_db.rollback(conn)
+      ibm_db.close(conn)
+      
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#7
+#__ZOS_EXPECTED__
+#7
+#__SYSTEMI_EXPECTED__
+#7
+#__IDS_EXPECTED__
+#7
diff -pruN 0.3.0-3/tests/test_6755_ExtraNULLChar_ResultCLOBCol.py 2.0.5-0ubuntu2/tests/test_6755_ExtraNULLChar_ResultCLOBCol.py
--- 0.3.0-3/tests/test_6755_ExtraNULLChar_ResultCLOBCol.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_6755_ExtraNULLChar_ResultCLOBCol.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,63 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_6755_ExtraNULLChar_ResultCLOBCol(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_6755)
+
+  def run_test_6755(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    server = ibm_db.server_info( conn )
+    
+    if conn:
+      drop = 'DROP TABLE table_6755'
+      result = ''
+      try:
+        result = ibm_db.exec_immediate(conn, drop)
+      except:
+        pass
+
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        create = 'CREATE TABLE table_6755 (col1 VARCHAR(20), col2 CLOB)'
+        insert = "INSERT INTO table_6755 VALUES ('database', 'database')"
+      else:
+        create = 'CREATE TABLE table_6755 (col1 VARCHAR(20), col2 CLOB(20))'
+        insert = "INSERT INTO table_6755 VALUES ('database', 'database')"
+      result = ibm_db.exec_immediate(conn, create)
+      result = ibm_db.exec_immediate(conn, insert)
+      statement = "SELECT col1, col2 FROM table_6755"
+    
+      result = ibm_db.prepare(conn, statement)
+      ibm_db.execute(result)
+    
+      row = ibm_db.fetch_tuple(result)
+      while ( row ):
+        #printf("\"%s\" from VARCHAR is %d bytes long, \"%s\" from CLOB is %d bytes long.\n",
+        #    row[0], row[0].length,
+        #    row[1], row[1].length)
+        print "\"%s\" from VARCHAR is %d bytes long, \"%s\" from CLOB is %d bytes long." % (row[0], len(row[0]), row[1], len(row[1]))
+        row = ibm_db.fetch_tuple(result)
+      
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#"database" from VARCHAR is 8 bytes long, "database" from CLOB is 8 bytes long.
+#__ZOS_EXPECTED__
+#"database" from VARCHAR is 8 bytes long, "database" from CLOB is 8 bytes long.
+#__SYSTEMI_EXPECTED__
+#"database" from VARCHAR is 8 bytes long, "database" from CLOB is 8 bytes long.
+#__IDS_EXPECTED__
+#"database" from VARCHAR is 8 bytes long, "database" from CLOB is 8 bytes long.
diff -pruN 0.3.0-3/tests/test_6792_FieldTypeRetStrDatetimeTimestamp.py 2.0.5-0ubuntu2/tests/test_6792_FieldTypeRetStrDatetimeTimestamp.py
--- 0.3.0-3/tests/test_6792_FieldTypeRetStrDatetimeTimestamp.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_6792_FieldTypeRetStrDatetimeTimestamp.py	2014-01-31 09:52:10.000000000 +0000
@@ -0,0 +1,109 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2013
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_6792_FieldTypeRetStrDatetimeTimestamp(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expect(self.run_test_6792)
+
+  def run_test_6792(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      drop = 'DROP TABLE table_6792'
+      result = ''
+      try:
+        result = ibm_db.exec_immediate(conn, drop)
+      except:
+        pass
+
+      t_val = '10:42:34'
+      d_val = '1981-07-08'
+      ts_val = '1981-07-08 10:42:34'
+      ts_withT_val = '2013-06-06T15:30:39'
+      
+      server = ibm_db.server_info( conn )
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        statement = "CREATE TABLE table_6792 (col1 DATETIME HOUR TO SECOND, col2 DATE, col3 DATETIME YEAR TO SECOND, col4 DATETIME YEAR TO SECOND)"
+        result = ibm_db.exec_immediate(conn, statement)
+        statement = "INSERT INTO table_6792 (col1, col2, col3) values (?, ?, ?)"
+        stmt = ibm_db.prepare(conn, statement)
+        result = ibm_db.execute(stmt, (t_val, d_val, ts_val))
+      else:
+        statement = "CREATE TABLE table_6792 (col1 TIME, col2 DATE, col3 TIMESTAMP, col4 TIMESTAMP)"
+        result = ibm_db.exec_immediate(conn, statement)
+        statement = "INSERT INTO table_6792 (col1, col2, col3, col4) values (?, ?, ?, ?)"
+        stmt = ibm_db.prepare(conn, statement)
+        result = ibm_db.execute(stmt, (t_val, d_val, ts_val, ts_withT_val))
+
+      statement = "SELECT * FROM table_6792"
+      result = ibm_db.exec_immediate(conn, statement)
+      
+      for i in range(0, ibm_db.num_fields(result)):
+        print str(i) + ":" + ibm_db.field_type(result,i)
+
+      statement = "SELECT * FROM table_6792"
+      stmt = ibm_db.prepare(conn, statement)
+      rc = ibm_db.execute(stmt)
+      result = ibm_db.fetch_row(stmt)
+      while ( result ):
+        row0 = ibm_db.result(stmt, 0)
+        row1 = ibm_db.result(stmt, 1)
+        row2 = ibm_db.result(stmt, 2)
+        row3 = ibm_db.result(stmt, 3)
+        print row0
+        print row1
+        print row2
+        print row3
+        result = ibm_db.fetch_row(stmt)
+      
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#0:time
+#1:date
+#2:timestamp
+#3:timestamp
+#10:42:34
+#1981-07-08
+#1981-07-08 10:42:34
+#2013-06-06 15:30:39
+#__ZOS_EXPECTED__
+#0:time
+#1:date
+#2:timestamp
+#3:timestamp
+#10:42:34
+#1981-07-08
+#1981-07-08 10:42:34
+#2013-06-06 15:30:39
+#__SYSTEMI_EXPECTED__
+#0:time
+#1:date
+#2:timestamp
+#3:timestamp
+#10:42:34
+#1981-07-08
+#1981-07-08 10:42:34
+#2013-06-06 15:30:39
+#__IDS_EXPECTED__
+#0:time
+#1:date
+#2:timestamp
+#3:timestamp
+#10:42:34
+#1981-07-08
+#1981-07-08 10:42:34
+#2013-06-06 15:30:39
\ No newline at end of file
diff -pruN 0.3.0-3/tests/test_createdbNX.py 2.0.5-0ubuntu2/tests/test_createdbNX.py
--- 0.3.0-3/tests/test_createdbNX.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_createdbNX.py	2014-01-31 10:57:34.000000000 +0000
@@ -0,0 +1,95 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+    def test_createdbNX(self):
+        obj = IbmDbTestFunctions()
+        if ((obj.server.DBMS_NAME == "DB2") or (obj.server.DBMS_NAME[0:3] != "DB2")):
+            raise unittest.SkipTest("createdbNX not Supported")
+        obj.assert_expect(self.run_test_createdbNX)
+
+    def run_test_createdbNX(self):
+        database = 'test001'
+        conn_str = "DATABASE=%s;HOSTNAME=%s;PORT=%d;PROTOCOL=TCPIP;UID=%s;PWD=%s;" % (database, config.hostname, config.port, config.user, config.password)
+        conn_str_attach = "attach=true;HOSTNAME=%s;PORT=%d;PROTOCOL=TCPIP;UID=%s;PWD=%s;" % (config.hostname, config.port, config.user, config.password) #for create db or drop db API it is nessesory that connection only attach to the DB server not to any existing database of DB server
+        conn_attach = ibm_db.connect(conn_str_attach, '', '')
+
+        if conn_attach:
+            conn = False
+            try:
+                conn = ibm_db.connect(conn_str, '', '')
+            except:
+                pass
+
+            if conn:
+                ibm_db.close(conn)
+                conn = False
+                try:
+                    ibm_db.dropdb(conn_attach, database)
+                except:
+                    print 'Errors occurred during drop database'
+            try:        
+                # call createdbNX without  codeset argument when specified database not exeist   
+                rc = ibm_db.createdbNX(conn_attach, database)
+                if rc:
+                    conn = ibm_db.connect(conn_str, '', '')
+                    if conn:
+                        print 'database created sucessfully'
+                        ibm_db.close(conn)
+                        conn = False
+                    else:
+                        print 'database is not created'
+                else:
+                    print 'Error occurred during create db if not exist'
+
+                conn = ibm_db.connect(conn_str, '', '')
+                if conn:
+                    ibm_db.close(conn)
+                    conn = False
+                    # call recreate db with codeset argument when specified database  exist
+                    rc = ibm_db.createdbNX(conn_attach, database, 'iso88591')
+                    if rc:
+                        conn = ibm_db.connect(conn_str, '', '')
+                        server_info = ibm_db.server_info( conn )
+                        if conn and (server_info.DB_CODEPAGE != 819):
+                            print 'database with codeset created sucessfully'
+                            ibm_db.close(conn)
+                            conn = False
+                        else:
+                            print 'Database not created'
+                    else:
+                        print 'Error occurred during create db if not exist with codeset'
+                        
+                #drop database
+                rc = ibm_db.dropdb(conn_attach, database)
+                if rc:
+                    try:
+                        conn = ibm_db.connect(conn_str, '', '')
+                    except:
+                        print 'datbase droped sucessfully'
+                    if conn:
+                        print 'Errors occurred during drop database'
+                        ibm_db.close(conn)
+                        conn = False
+                else:
+                    print 'Errors occurred during drop database'
+            except:
+                print ibm_db.conn_errormsg()
+                pass
+            ibm_db.close(conn_attach)
+        else:
+            print ibm_db.conn_errormsg()
+            
+#__END__
+#__LUW_EXPECTED__
+#database created sucessfully
+#database with codeset created sucessfully
+#datbase droped sucessfully
diff -pruN 0.3.0-3/tests/test_createDropDB.py 2.0.5-0ubuntu2/tests/test_createDropDB.py
--- 0.3.0-3/tests/test_createDropDB.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_createDropDB.py	2014-01-31 10:56:04.000000000 +0000
@@ -0,0 +1,106 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+    def test_createDropDB(self):
+        obj = IbmDbTestFunctions()
+        if ((obj.server.DBMS_NAME == "DB2") or (obj.server.DBMS_NAME[0:3] != "DB2")):
+            raise unittest.SkipTest("createdb, dropdb not Supported")
+        obj.assert_expect(self.run_test_createDropDB)
+
+    def run_test_createDropDB(self):
+        database = 'test001'
+        conn_str = "DATABASE=%s;HOSTNAME=%s;PORT=%d;PROTOCOL=TCPIP;UID=%s;PWD=%s;" % (database, config.hostname, config.port, config.user, config.password)
+        conn_str_attach = "attach=true;HOSTNAME=%s;PORT=%d;PROTOCOL=TCPIP;UID=%s;PWD=%s;" % (config.hostname, config.port, config.user, config.password) #for create db or drop db API it is nessesory that connection only attach to the DB server not to any existing database of DB server
+        conn_attach = ibm_db.connect(conn_str_attach, '', '')
+
+        if conn_attach:
+            conn = False
+            try:
+                conn = ibm_db.connect(conn_str, '', '')
+            except:
+                pass
+
+            if conn:
+                ibm_db.close(conn)
+                conn = False
+                try:
+                    ibm_db.dropdb(conn_attach, database)
+                except:
+                    print 'Errors occurred during drop database'
+            try:        
+                #create databse   
+                rc = ibm_db.createdb(conn_attach, database)
+                if rc:
+                    conn = ibm_db.connect(conn_str, '', '')
+                    if conn:
+                        print 'database created sucessfully'
+                        ibm_db.close(conn)
+                        conn = False
+                    else:
+                        print 'database is not created'
+                else:
+                    print 'Errors occurred during create database'
+
+                #drop databse
+                rc = ibm_db.dropdb(conn_attach, database)
+                if rc:
+                    try:
+                        conn = ibm_db.connect(conn_str, '', '')
+                    except:
+                        print 'datbase droped sucessfully'
+                    if conn:
+                        print 'Errors occurred during drop database'
+                        ibm_db.close(conn)
+                        conn = False
+                else:
+                    print 'Errors occurred during delete database'
+                    
+                #create database with codeset option
+                rc = ibm_db.createdb(conn_attach, database, 'iso88591')
+                if rc:
+                    conn = ibm_db.connect(conn_str, '', '')
+                    server_info = ibm_db.server_info( conn )
+                    if conn and (server_info.DB_CODEPAGE == 819):
+                        print 'database with codeset created sucessfully'
+                        ibm_db.close(conn)
+                        conn = False
+                    else:
+                        print 'database is not created'
+                else:
+                    print 'Errors occurred during create database'
+
+                #drop database
+                rc = ibm_db.dropdb(conn_attach, database)
+                if rc:
+                    try:
+                        conn = ibm_db.connect(conn_str, '', '')
+                    except:
+                        print 'datbase droped sucessfully'
+                    if conn:
+                        print 'Errors occurred during drop database'
+                        ibm_db.close(conn)
+                        conn = False
+                else:
+                    print 'Errors occurred during drop database'
+            except:
+                print ibm_db.conn_errormsg()
+                pass
+            ibm_db.close(conn_attach)
+        else:
+            print ibm_db.conn_errormsg()
+
+#__END__
+#__LUW_EXPECTED__
+#database created sucessfully
+#datbase droped sucessfully
+#database with codeset created sucessfully
+#datbase droped sucessfully
diff -pruN 0.3.0-3/tests/test_decfloat.py 2.0.5-0ubuntu2/tests/test_decfloat.py
--- 0.3.0-3/tests/test_decfloat.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_decfloat.py	2014-01-31 11:40:51.000000000 +0000
@@ -0,0 +1,118 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2014
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+    def test_decfloat(self):
+        obj = IbmDbTestFunctions()
+        obj.assert_expect(self.run_test_decfloat)
+	
+    def run_test_decfloat(self):
+        conn = ibm_db.connect(config.database, config.user, config.password)
+		
+        if conn:
+            serverinfo = ibm_db.server_info( conn )
+			
+            drop = "DROP TABLE STOCKPRICE"
+            try:
+                result = ibm_db.exec_immediate(conn,drop)
+            except:
+                pass
+			
+            # Create the table stockprice
+            if (serverinfo.DBMS_NAME[0:3] == 'IDS'):
+                create = "CREATE TABLE STOCKPRICE (id SMALLINT NOT NULL, company VARCHAR(30), stockshare DECIMAL(7,2), stockprice DECIMAL(16))"
+            else:
+                create = "CREATE TABLE STOCKPRICE (id SMALLINT NOT NULL, company VARCHAR(30), stockshare DECIMAL(7,2), stockprice DECFLOAT(16))"
+            result = ibm_db.exec_immediate(conn, create)
+			
+            # Insert Directly
+            insert = "INSERT INTO STOCKPRICE (id, company, stockshare, stockprice) VALUES (10,'Megadeth', 100.002, 990.356736488388374888532323)"
+            result = ibm_db.exec_immediate(conn, insert)
+			
+            # Prepare and Insert in the stockprice table
+            stockprice = (\
+                    (20, "Zaral", 102.205, "100.234"),\
+                    (30, "Megabyte", 98.65, "1002.112"),\
+                    (40, "Visarsoft", 123.34, "1652.345"),\
+                    (50, "Mailersoft", 134.22, "1643.126"),\
+                    (60, "Kaerci", 100.97, "9876.765")\
+                )
+            insert = 'INSERT INTO STOCKPRICE (id, company, stockshare,stockprice) VALUES (?,?,?,?)'
+            stmt = ibm_db.prepare(conn,insert)
+            if stmt:
+                for company in stockprice:
+                    result = ibm_db.execute(stmt,company)
+			
+            id = 70
+            company = 'Nirvana'
+            stockshare = 100.1234
+            stockprice = "100.567"
+            try:
+                ibm_db.bind_param(stmt, 1, id)
+                ibm_db.bind_param(stmt, 2, company)
+                ibm_db.bind_param(stmt, 3, stockshare)
+                ibm_db.bind_param(stmt, 4, stockprice)
+                error = ibm_db.execute(stmt);
+            except:
+                excp = sys.exc_info()
+                # slot 1 contains error message
+                print excp[1]
+			
+            # Select the result from the table and
+            query = 'SELECT * FROM STOCKPRICE ORDER BY id'
+            if (serverinfo.DBMS_NAME[0:3] != 'IDS'):
+                stmt = ibm_db.prepare(conn, query, {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+            else:
+                stmt = ibm_db.prepare(conn, query)
+            ibm_db.execute(stmt)
+            data = ibm_db.fetch_both( stmt )
+            while ( data ):
+                print "%s : %s : %s : %s\n" % (data[0], data[1], data[2], data[3])
+                data = ibm_db.fetch_both( stmt )
+            try:
+                stmt = ibm_db.prepare(conn, query, {ibm_db.SQL_ATTR_CURSOR_TYPE:  ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+                ibm_db.execute(stmt)
+                rc = ibm_db.fetch_row(stmt, -1)
+                print "Fetch Row -1:%s " %str(rc)
+            except:
+                print "Requested row number must be a positive value"
+            ibm_db.close(conn)
+        else:
+            print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#10 : Megadeth : 100.00 : 990.3567364883884
+#20 : Zaral : 102.20 : 100.234
+#30 : Megabyte : 98.65 : 1002.112
+#40 : Visarsoft : 123.34 : 1652.345
+#50 : Mailersoft : 134.22 : 1643.126
+#60 : Kaerci : 100.97 : 9876.765
+#70 : Nirvana : 100.12 : 100.567
+#Requested row number must be a positive value
+#__ZOS_EXPECTED__
+#10 : Megadeth : 100.00 : 990.3567364883884
+#20 : Zaral : 102.20 : 100.234
+#30 : Megabyte : 98.65 : 1002.112
+#40 : Visarsoft : 123.34 : 1652.345
+#50 : Mailersoft : 134.22 : 1643.126
+#60 : Kaerci : 100.97 : 9876.765
+#70 : Nirvana : 100.12 : 100.567
+#Requested row number must be a positive value
+#__IDS_EXPECTED__
+#10 : Megadeth : 100.00 : 990.356736488
+#20 : Zaral : 102.20 : 100.234
+#30 : Megabyte : 98.65 : 1002.112
+#40 : Visarsoft : 123.34 : 1652.345
+#50 : Mailersoft : 134.22 : 1643.126
+#60 : Kaerci : 100.97 : 9876.765
+#70 : Nirvana : 100.12 : 100.567
+#Requested row number must be a positive value
diff -pruN 0.3.0-3/tests/test_decimal.py 2.0.5-0ubuntu2/tests/test_decimal.py
--- 0.3.0-3/tests/test_decimal.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_decimal.py	2014-01-31 11:18:13.000000000 +0000
@@ -0,0 +1,114 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+from decimal import Decimal
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+	def test_decimal(self):
+		obj = IbmDbTestFunctions()
+		obj.assert_expect(self.run_test_decimal)
+	
+	def run_test_decimal(self):
+		conn = ibm_db.connect(config.database, config.user, config.password)
+		
+		if conn:
+			serverinfo = ibm_db.server_info( conn )
+			
+			drop = "DROP TABLE STOCKSHARE"
+			try:
+				result = ibm_db.exec_immediate(conn,drop)
+			except:
+				pass
+			
+			# Create the table stockprice
+			create = "CREATE TABLE STOCKSHARE (id SMALLINT NOT NULL, company VARCHAR(30), stockshare DECIMAL(7, 2))"
+			result = ibm_db.exec_immediate(conn, create)
+			
+			# Insert Directly
+			insert = "INSERT INTO STOCKSHARE (id, company, stockshare) VALUES (10, 'Megadeth', 100.002)"
+			result = ibm_db.exec_immediate(conn, insert)
+			
+			# Prepare and Insert in the stockprice table
+			stockprice = (\
+					(20, "Zaral", 102.205),\
+					(30, "Megabyte", "98.65"),\
+					(40, "Visarsoft", Decimal("123.34")),\
+					(50, "Mailersoft", Decimal("134.222")),\
+					(60, "Kaerci", Decimal("100.976"))\
+					)
+			insert = 'INSERT INTO STOCKSHARE (id, company, stockshare) VALUES (?,?,?)'
+			stmt = ibm_db.prepare(conn,insert)
+			if stmt:
+				for company in stockprice:
+					result = ibm_db.execute(stmt,company)
+			
+			id = 70
+			company = 'Nirvana'
+			stockshare = Decimal("100.1234")
+			try:
+				ibm_db.bind_param(stmt, 1, id)
+				ibm_db.bind_param(stmt, 2, company)
+				ibm_db.bind_param(stmt, 3, stockshare)
+				error = ibm_db.execute(stmt);
+			except:
+				excp = sys.exc_info()
+				# slot 1 contains error message
+				print excp[1]
+			
+			# Select the result from the table and
+			query = 'SELECT * FROM STOCKSHARE ORDER BY id'
+			if (serverinfo.DBMS_NAME[0:3] != 'IDS'):
+				stmt = ibm_db.prepare(conn, query, {ibm_db.SQL_ATTR_CURSOR_TYPE: ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+			else:
+				stmt = ibm_db.prepare(conn, query)
+			ibm_db.execute(stmt)
+			data = ibm_db.fetch_both( stmt )
+			while ( data ):
+				print "%s : %s : %s\n" % (data[0], data[1], data[2])
+				data = ibm_db.fetch_both( stmt )
+			try:
+				stmt = ibm_db.prepare(conn, query, {ibm_db.SQL_ATTR_CURSOR_TYPE:  ibm_db.SQL_CURSOR_KEYSET_DRIVEN})
+				ibm_db.execute(stmt)
+				rc = ibm_db.fetch_row(stmt, -1)
+				print "Fetch Row -1:%s " %str(rc)
+			except:
+				print "Requested row number must be a positive value"
+			ibm_db.close(conn)
+		else:
+			print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#10 : Megadeth : 100.00
+#20 : Zaral : 102.20
+#30 : Megabyte : 98.65
+#40 : Visarsoft : 123.34
+#50 : Mailersoft : 134.22
+#60 : Kaerci : 100.97
+#70 : Nirvana : 100.12
+#Requested row number must be a positive value
+#__ZOS_EXPECTED__
+#10 : Megadeth : 100.00
+#20 : Zaral : 102.20
+#30 : Megabyte : 98.65
+#40 : Visarsoft : 123.34
+#50 : Mailersoft : 134.22
+#60 : Kaerci : 100.97
+#70 : Nirvana : 100.12
+#Requested row number must be a positive value
+#__IDS_EXPECTED__
+#10 : Megadeth : 100.00
+#20 : Zaral : 102.20
+#30 : Megabyte : 98.65
+#40 : Visarsoft : 123.34
+#50 : Mailersoft : 134.22
+#60 : Kaerci : 100.97
+#70 : Nirvana : 100.12
+#Requested row number must be a positive value
\ No newline at end of file
diff -pruN 0.3.0-3/tests/test_execute_many.py 2.0.5-0ubuntu2/tests/test_execute_many.py
--- 0.3.0-3/tests/test_execute_many.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_execute_many.py	2014-01-31 11:42:00.000000000 +0000
@@ -0,0 +1,91 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+    def test_execute_many(self):
+        obj = IbmDbTestFunctions()
+        obj.assert_expect(self.run_test_execute_many)
+
+    def run_test_execute_many(self):
+        conn = ibm_db.connect(config.database, config.user, config.password)
+
+        if conn:
+            # Drop the tabmany table, in case it exists
+            drop = "DROP TABLE TABMANY"
+            try:
+                result = ibm_db.exec_immediate(conn, drop)
+            except:
+                pass
+
+            #create table tabmany
+            create = "CREATE TABLE TABMANY(id SMALLINT NOT NULL, name VARCHAR(32))"
+            ibm_db.exec_immediate(conn, create)
+            
+            #Populate the tabmany table with execute_many
+            insert = "INSERT INTO TABMANY (id, name) VALUES(?, ?)"
+            params = ((10, 'Sanders'), (20, 'Pernal'), (30, 'Marenghi'), (40, 'OBrien'))
+            stmt_insert = ibm_db.prepare(conn, insert)
+            ibm_db.execute_many(stmt_insert, params)
+            #check the number of rows inserted
+            row_count = ibm_db.num_rows(stmt_insert)
+            print row_count
+            
+            # chaeck the inserted columns
+            select = "SELECT * FROM TABMANY"
+            stmt_select = ibm_db.exec_immediate(conn, select)
+            cols = ibm_db.fetch_tuple( stmt_select )
+            while( cols ):
+                print "%s, %s" % (cols[0], cols[1])
+                cols = ibm_db.fetch_tuple( stmt_select )
+            
+            #populate the tabmany table 
+            params = ((50, 'Hanes'), (55, ), (55.5, 'invalid row'), (60, 'Quigley'), (70, None) )
+            try:
+                ibm_db.execute_many(stmt_insert, params)
+            except Exception, inst:
+                #check the no. of inserted rows
+                row_count = ibm_db.num_rows(stmt_insert)
+                #check the exception raised by execute_many API
+                print inst
+                print row_count
+            ibm_db.close(conn)
+
+        else:
+            print ibm_db.conn_errormsg()
+
+#__END__
+#__LUW_EXPECTED__
+#4
+#10, Sanders
+#20, Pernal
+#30, Marenghi
+#40, OBrien
+#Error 1: Value parameter tuple: 2 has less no of param 
+#Error 2: Value parameters array 3 is not homogeneous with privious parameters array 
+#3
+#__ZOS_EXPECTED__
+#4
+#10, Sanders
+#20, Pernal
+#30, Marenghi
+#40, OBrien
+#Error 1: Value parameter tuple: 2 has less no of param 
+#Error 2: Value parameters array 3 is not homogeneous with privious parameters array 
+#3
+#__IDS_EXPECTED__
+#4
+#10, Sanders
+#20, Pernal
+#30, Marenghi
+#40, OBrien
+#Error 1: Value parameter tuple: 2 has less no of param 
+#Error 2: Value parameters array 3 is not homogeneous with privious parameters array 
+#3
\ No newline at end of file
diff -pruN 0.3.0-3/tests/test_InsertRetrieveDateTimeTypeColumn.py 2.0.5-0ubuntu2/tests/test_InsertRetrieveDateTimeTypeColumn.py
--- 0.3.0-3/tests/test_InsertRetrieveDateTimeTypeColumn.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_InsertRetrieveDateTimeTypeColumn.py	2014-01-31 09:17:22.000000000 +0000
@@ -0,0 +1,97 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2013
+#
+
+import unittest, sys, datetime
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+  def test_InsertRetrieveDateTimeTypeColumn(self):
+    obj = IbmDbTestFunctions()
+    obj.assert_expectf(self.run_test_InsertRetrieveDateTimeTypeColumn)
+
+  def run_test_InsertRetrieveDateTimeTypeColumn(self):
+    conn = ibm_db.connect(config.database, config.user, config.password)
+    
+    if conn:
+      drop = 'DROP TABLE tab_datetime'
+      result = ''
+      try:
+        result = ibm_db.exec_immediate(conn, drop)
+      except:
+        pass
+      t_val = datetime.time(10, 42, 34)
+      d_val = datetime.date(1981, 7, 8)
+      #ts_val = datetime.datetime.today()
+      ts_val = datetime.datetime(1981, 7, 8, 10, 42, 34, 10)
+      server = ibm_db.server_info( conn )
+      if (server.DBMS_NAME[0:3] == 'IDS'):
+        statement = "CREATE TABLE tab_datetime (col1 DATETIME HOUR TO SECOND, col2 DATE, col3 DATETIME YEAR TO FRACTION(5))"
+        result = ibm_db.exec_immediate(conn, statement)
+        statement = "INSERT INTO tab_datetime (col1, col2, col3) values (?, ?, ?)"
+        stmt = ibm_db.prepare(conn, statement)
+        result = ibm_db.execute(stmt, (t_val, d_val, ts_val))
+      else:
+        statement = "CREATE TABLE tab_datetime (col1 TIME, col2 DATE, col3 TIMESTAMP)"
+        result = ibm_db.exec_immediate(conn, statement)
+        statement = "INSERT INTO tab_datetime (col1, col2, col3) values (?, ?, ?)"
+        stmt = ibm_db.prepare(conn, statement)
+        result = ibm_db.execute(stmt, (t_val, d_val, ts_val))
+
+      statement = "SELECT * FROM tab_datetime"
+      result = ibm_db.exec_immediate(conn, statement)
+      
+      for i in range(0, ibm_db.num_fields(result)):
+        print str(i) + ":" + ibm_db.field_type(result,i)
+
+      statement = "SELECT * FROM tab_datetime"
+      stmt = ibm_db.prepare(conn, statement)
+      rc = ibm_db.execute(stmt)
+      result = ibm_db.fetch_row(stmt)
+      while ( result ):
+        row0 = ibm_db.result(stmt, 0)
+        row1 = ibm_db.result(stmt, 1)
+        row2 = ibm_db.result(stmt, 2)
+        print type(row0), row0
+        print type(row1), row1
+        print type(row2), row2
+        result = ibm_db.fetch_row(stmt)
+      
+      ibm_db.close(conn)
+    else:
+      print "Connection failed."
+
+#__END__
+#__LUW_EXPECTED__
+#0:time
+#1:date
+#2:timestamp
+#<%s 'datetime.time'> 10:42:34
+#<%s 'datetime.date'> 1981-07-08
+#<%s 'datetime.datetime'> 1981-07-08 10:42:34.000010
+#__ZOS_EXPECTED__
+#0:time
+#1:date
+#2:timestamp
+#<%s 'datetime.time'> 10:42:34
+#<%s 'datetime.date'> 1981-07-08
+#<%s 'datetime.datetime'> 1981-07-08 10:42:34.000010
+#__SYSTEMI_EXPECTED__
+#0:time
+#1:date
+#2:timestamp
+#<%s 'datetime.time'> 10:42:34
+#<%s 'datetime.date'> 1981-07-08
+#<%s 'datetime.datetime'> 1981-07-08 10:42:34.000010
+#__IDS_EXPECTED__
+#0:time
+#1:date
+#2:timestamp
+#<%s 'datetime.time'> 10:42:34
+#<%s 'datetime.date'> 1981-07-08
+#<%s 'datetime.datetime'> 1981-07-08 10:42:34.000010
diff -pruN 0.3.0-3/tests/test_recreateDB.py 2.0.5-0ubuntu2/tests/test_recreateDB.py
--- 0.3.0-3/tests/test_recreateDB.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_recreateDB.py	2014-01-31 10:48:58.000000000 +0000
@@ -0,0 +1,96 @@
+# 
+#  Licensed Materials - Property of IBM
+#
+#  (c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+    def test_recreateDB(self):
+        obj = IbmDbTestFunctions()
+        if ((obj.server.DBMS_NAME == "DB2") or (obj.server.DBMS_NAME[0:3] != "DB2")):
+            raise unittest.SkipTest("recreateDB not Supported")
+        obj.assert_expect(self.run_test_recreateDB)
+
+    def run_test_recreateDB(self):
+        database = 'test001'
+        conn_str = "DATABASE=%s;HOSTNAME=%s;PORT=%d;PROTOCOL=TCPIP;UID=%s;PWD=%s;" % (database, config.hostname, config.port, config.user, config.password)
+        conn_str_attach = "attach=true;HOSTNAME=%s;PORT=%d;PROTOCOL=TCPIP;UID=%s;PWD=%s;" % (config.hostname, config.port, config.user, config.password) #for create db or drop db API it is nessesory that connection only attach to the DB server not to any existing database of DB server
+        
+        conn_attach = ibm_db.connect(conn_str_attach, '', '')
+
+        if conn_attach:
+            conn = False
+            try:
+                conn = ibm_db.connect(conn_str, '', '')
+            except:
+                pass
+
+            if conn:
+                ibm_db.close(conn)
+                conn = False
+                try:
+                    ibm_db.dropdb(conn_attach, database)
+                except:
+                    print 'Errors occurred during drop database'
+            try:        
+                # call recreatedb with no codeset argument when specified database not exeist   
+                rc = ibm_db.recreatedb(conn_attach, database)
+                if rc:
+                    conn = ibm_db.connect(conn_str, '', '')
+                    if conn:
+                        print 'database created sucessfully'
+                        ibm_db.close(conn)
+                        conn = False
+                    else:
+                        print 'database is not created'
+                else:
+                    print 'Errors occure during create database'
+
+                conn = ibm_db.connect(conn_str, '', '')
+                if conn:
+                    ibm_db.close(conn)
+                    conn = False
+                    # call recreate db with codeset argument when specified database  exist
+                    rc = ibm_db.recreatedb(conn_attach, database, 'iso88591')
+                    if rc:
+                        conn = ibm_db.connect(conn_str, '', '')
+                        server_info = ibm_db.server_info( conn )
+                        if conn and (server_info.DB_CODEPAGE == 819):
+                            print 'database with codeset created sucessfully'
+                            ibm_db.close(conn)
+                            conn = False
+                        else:
+                            print 'database is not created'
+                    else:
+                        print 'Error occure during recreate db with codeset'
+                            
+                #drop database
+                rc = ibm_db.dropdb(conn_attach, database)
+                if rc:
+                    try:
+                        conn = ibm_db.connect(conn_str, '', '')
+                    except:
+                        print 'datbase droped sucessfully'
+                    if conn:
+                        print 'Errors occurred during drop database'
+                        ibm_db.close(conn)
+                        conn = False
+                else:
+                    print 'Errors occurred during drop database'
+            except:
+                print ibm_db.conn_errormsg()
+                pass
+            ibm_db.close(conn_attach)
+        else:
+            print ibm_db.conn_errormsg()
+            
+#__END__
+#__LUW_EXPECTED__
+#database created sucessfully
+#database with codeset created sucessfully
+#datbase droped sucessfully
diff -pruN 0.3.0-3/tests/test_trusted_context_connect.py 2.0.5-0ubuntu2/tests/test_trusted_context_connect.py
--- 0.3.0-3/tests/test_trusted_context_connect.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_trusted_context_connect.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,391 @@
+# 
+#	Licensed Materials - Property of IBM
+#
+#	(c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+	def test_trusted_context_connect(self):
+		obj = IbmDbTestFunctions()
+		obj.assert_expectf(self.run_test_trusted_context_connect)
+		
+	def run_test_trusted_context_connect(self):
+		sql_drop_role = "DROP ROLE role_01"
+		sql_create_role = "CREATE ROLE role_01"
+
+		sql_drop_trusted_context = "DROP TRUSTED CONTEXT ctx"
+
+		sql_create_trusted_context = "CREATE TRUSTED CONTEXT ctx BASED UPON CONNECTION USING SYSTEM AUTHID "
+		sql_create_trusted_context += config.auth_user
+		sql_create_trusted_context += " ATTRIBUTES (ADDRESS '"
+		sql_create_trusted_context += config.hostname
+		sql_create_trusted_context += "') DEFAULT ROLE role_01 ENABLE WITH USE FOR "
+		sql_create_trusted_context += config.tc_user
+
+		sql_drop_table = "DROP TABLE trusted_table"
+		sql_create_table = "CREATE TABLE trusted_table (i1 int, i2 int)"
+
+		sql_select = "SELECT * FROM trusted_table"
+
+		# Setting up database.
+		conn = ibm_db.connect(config.database, config.user, config.password)
+		if conn:
+			sql_grant_permission = "GRANT INSERT ON TABLE trusted_table TO ROLE role_01"
+			sql_create_trusted_context_01 = sql_create_trusted_context + " WITH AUTHENTICATION"
+			try:
+				result = ibm_db.exec_immediate(conn, sql_drop_trusted_context)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_drop_table)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_drop_role)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_create_role)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_create_table)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_grant_permission)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_create_trusted_context_01)
+			except:
+				pass
+				
+			# Populate the trusted_table
+			values = (\
+				(10, 20),\
+				(20, 40),\
+			)
+			sql_insert = 'INSERT INTO trusted_table (i1, i2) VALUES (?, ?)'
+			stmt = ibm_db.prepare(conn, sql_insert)		
+			if stmt:
+				for value in values:
+					result = ibm_db.execute(stmt, value)
+			ibm_db.close(conn)
+		else:
+			print "Connection failed."
+
+		options = {ibm_db.SQL_ATTR_USE_TRUSTED_CONTEXT: ibm_db.SQL_TRUE}
+		tc_options = {
+			ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID: config.tc_user, 
+			ibm_db.SQL_ATTR_TRUSTED_CONTEXT_PASSWORD: config.tc_pass
+		}
+		tc_all_options = {
+			ibm_db.SQL_ATTR_USE_TRUSTED_CONTEXT: ibm_db.SQL_TRUE, 
+			ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID: config.tc_user, 
+			ibm_db.SQL_ATTR_TRUSTED_CONTEXT_PASSWORD: config.tc_pass
+		}
+		dsn = "DATABASE=%s;HOSTNAME=%s;PORT=%d;PROTOCOL=TCPIP;UID=%s;PWD=%s;" % (config.database, config.hostname, config.port, config.auth_user, config.auth_pass)
+
+		# Makeing normal connection and playing with it.
+		tc_conn = ibm_db.connect(dsn, "", "")
+		if tc_conn:
+			print "Normal connection established."
+			result = ibm_db.set_option(tc_conn, tc_options, 1)
+			print ibm_db.conn_errormsg(tc_conn)
+			ibm_db.close(tc_conn)
+
+		tc_conn = ibm_db.connect(dsn, "", "")
+		if tc_conn:
+			print "Normal connection established."
+			result = ibm_db.set_option(tc_conn, tc_all_options, 1)
+			print ibm_db.conn_errormsg(tc_conn)
+			ibm_db.close(tc_conn)
+
+		tc_conn = ibm_db.connect(dsn, "", "", tc_all_options)
+		if tc_conn:
+			val = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_USE_TRUSTED_CONTEXT, 1)
+			if val:
+				print "Trusted connection succeeded."
+				get_tc_user = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID, 1)
+				if config.tc_user != get_tc_user:
+					print "But trusted user is not switched."
+		ibm_db.close(tc_conn)
+
+		# Making trusted connection and performing normal operations.
+		tc_conn = ibm_db.connect(dsn, "", "", options)
+		if tc_conn:
+			print "Trusted connection succeeded."
+			val = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_USE_TRUSTED_CONTEXT, 1)
+			if val:
+				userBefore = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID, 1)
+				ibm_db.set_option(tc_conn, tc_options, 1)
+				userAfter = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID, 1)
+				if userBefore != userAfter:
+					print "User has been switched."
+					
+					# Inserting into table using trusted_user.
+					sql_insert = "INSERT INTO " + config.user + ".trusted_table (i1, i2) VALUES (?, ?)"
+					stmt = ibm_db.prepare(tc_conn, sql_insert)
+					result = ibm_db.execute(stmt, (300, 500))
+					
+					# Updating table using trusted_user.
+					sql_update = "UPDATE " + config.user + ".trusted_table set i1 = 400 WHERE i2 = 500"
+					try:
+						stmt = ibm_db.exec_immediate(tc_conn, sql_update)
+					except:
+						print ibm_db.stmt_errormsg()
+			
+			ibm_db.close(tc_conn)
+		else:
+			print "Trusted connection failed."
+
+		# Making trusted connection and switching to fake user.
+		tc_conn = ibm_db.connect(dsn, "", "", options)
+
+		if tc_conn:
+			val = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_USE_TRUSTED_CONTEXT, 1)
+			if val:
+				print "Trusted connection succeeded."
+				ibm_db.set_option(tc_conn, {ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID: "fakeuser", ibm_db.SQL_ATTR_TRUSTED_CONTEXT_PASSWORD: "fakepassword"}, 1)
+
+				sql_update = "UPDATE " + config.user + ".trusted_table set i1 = 400 WHERE i2 = 500"
+				try:
+					stmt = ibm_db.exec_immediate(tc_conn, sql_update)
+				except:
+					print ibm_db.stmt_errormsg()
+			ibm_db.close(tc_conn)
+		else:
+			print "Connection failed."
+
+		# Making trusted connection and passing password first then user while switching.
+		tc_conn = ibm_db.connect(dsn, "", "", options)
+		tc_options_reversed = {ibm_db.SQL_ATTR_TRUSTED_CONTEXT_PASSWORD: config.tc_pass, ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID: config.tc_user}
+
+		if tc_conn:
+			val = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_USE_TRUSTED_CONTEXT, 1)
+			if val:
+				print "Trusted connection succeeded."
+				userBefore = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID, 1)
+				ibm_db.set_option(tc_conn, tc_options_reversed, 1)
+				userAfter = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID, 1)
+				if userBefore != userAfter:
+					print "User has been switched."
+			ibm_db.close(tc_conn)
+		else:
+			print "Connection failed."	
+
+		# Making trusted connection and passing password first then user while switching.
+		tc_conn = ibm_db.connect(dsn, "", "", options)
+		tc_user_options = {ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID: config.tc_user}
+		tc_pass_options = {ibm_db.SQL_ATTR_TRUSTED_CONTEXT_PASSWORD: config.tc_pass}
+
+		if tc_conn:
+			print "Trusted connection succeeded."
+			val = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_USE_TRUSTED_CONTEXT, 1)
+			if val:
+				userBefore = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID, 1)
+				try:
+					ibm_db.set_option(tc_conn, tc_pass_options, 1)
+				except:
+					print ibm_db.conn_errormsg(tc_conn)			
+			ibm_db.close(tc_conn)
+		else:
+			print "Connection failed."
+		
+
+		# Making trusted connection and passing only user while switching when both user and password are required.
+		tc_conn = ibm_db.connect(dsn, "", "", options)
+
+		if tc_conn:
+			print "Trusted connection succeeded."
+			val = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_USE_TRUSTED_CONTEXT, 1)
+			if val:
+				ibm_db.set_option(tc_conn, tc_user_options, 1)
+
+				sql_update = "UPDATE " + config.user + ".trusted_table set i1 = 400 WHERE i2 = 500"
+				try:
+					stmt = ibm_db.exec_immediate(tc_conn, sql_update)
+				except:
+					print ibm_db.stmt_errormsg()			
+			ibm_db.close(tc_conn)
+		else:
+			print "Connection failed."
+		
+
+		# Make a connection
+		conn = ibm_db.connect(config.database, config.user, config.password)
+
+		if conn:
+			# Dropping the trusted context, in case it exists
+			try:
+				result = ibm_db.exec_immediate(conn, sql_drop_trusted_context)
+			except:
+				pass
+
+			# Dropping Role.
+			try:
+				result = ibm_db.exec_immediate(conn, sql_drop_role)
+			except:
+				pass
+
+			# Creating Role.
+			try:
+				result = ibm_db.exec_immediate(conn, sql_create_role)
+			except:
+				pass
+		
+			# Granting permissions to role.
+			try:
+				sql_grant_permission = "GRANT UPDATE ON TABLE trusted_table TO ROLE role_01"
+				result = ibm_db.exec_immediate(conn, sql_grant_permission)
+			except:
+				pass
+		
+			# Creating trusted context
+			try:
+				sql_create_trusted_context_01 = sql_create_trusted_context + " WITHOUT AUTHENTICATION"
+				result = ibm_db.exec_immediate(conn, sql_create_trusted_context_01)
+			except:
+				pass
+			
+			# Closing connection
+			ibm_db.close(conn)
+		else: 
+			print "Connection failed."			
+
+		# Making trusted connection
+		tc_conn = ibm_db.connect(dsn, "", "", options)
+		if tc_conn:
+			print "Trusted connection succeeded."
+			val = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_USE_TRUSTED_CONTEXT, 1)
+			if val:
+				userBefore = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID, 1)
+				ibm_db.set_option(tc_conn, tc_user_options, 1)
+				userAfter = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID, 1)
+				if userBefore != userAfter:
+					print "User has been switched."
+					
+					# Inserting into table using trusted_user.
+					sql_insert = "INSERT INTO " + config.user + ".trusted_table (i1, i2) VALUES (300, 500)"
+					try:
+						stmt = ibm_db.exec_immediate(tc_conn, sql_insert)
+					except:
+						print ibm_db.stmt_errormsg()
+
+					# Updating table using trusted_user.
+					sql_update = "UPDATE " + config.user + ".trusted_table set i1 = 400 WHERE i2 = 20"
+					stmt = ibm_db.exec_immediate(tc_conn, sql_update)
+			ibm_db.close(tc_conn)
+		else:
+			print "Connection failed."	
+
+		# Cleaning up database.
+		conn = ibm_db.connect(config.database, config.user, config.password)
+
+		if conn:
+			print "Connection succeeded."
+
+			try:
+				result = ibm_db.exec_immediate(conn, sql_drop_trusted_context)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_drop_table)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_drop_role)
+			except:
+				pass
+			ibm_db.close(conn)
+		else:
+			print "Connection failed."
+			
+#__END__
+#__LUW_EXPECTED__
+#Normal connection established.
+#[%s][%s] CLI0197E  A trusted context is not enabled on this connection. Invalid attribute value. SQLSTATE=HY010 SQLCODE=-99999
+#Normal connection established.[%s][%s] CLI0197E  A trusted context is not enabled on this connection. Invalid attribute value. SQLSTATE=HY010 SQLCODE=-99999
+#Trusted connection succeeded.
+#But trusted user is not switched.
+#Trusted connection succeeded.
+#User has been switched.
+#[%s][%s][%s] SQL0551N  "%s" does not have the %s privilege to perform operation "UPDATE" on object "%s.TRUSTED_TABLE".  SQLSTATE=42501 SQLCODE=-551
+#Trusted connection succeeded.
+#[%s][%s][%s] SQL30082N  Security processing failed with reason "24" ("USERNAME AND/OR PASSWORD INVALID").  SQLSTATE=08001 SQLCODE=-30082
+#Trusted connection succeeded.
+#User has been switched.
+#Trusted connection succeeded.
+#Trusted connection succeeded.
+#[%s][%s][%s] SQL20361N  The switch user request using authorization ID "%s" within trusted context "CTX" failed with reason code "2".  SQLSTATE=42517 SQLCODE=-20361
+#Trusted connection succeeded.
+#User has been switched.
+#[%s][%s][%s] SQL0551N  "%s" does not have the %s privilege to perform operation "INSERT" on object "%s.TRUSTED_TABLE".  SQLSTATE=42501 SQLCODE=-551
+#Connection succeeded.
+#__ZOS_EXPECTED__
+#Normal connection established.
+#[%s][%s] CLI0197E  A trusted context is not enabled on this connection. Invalid attribute value. SQLSTATE=HY010 SQLCODE=-99999
+#Normal connection established.[%s][%s] CLI0197E  A trusted context is not enabled on this connection. Invalid attribute value. SQLSTATE=HY010 SQLCODE=-99999
+#Trusted connection succeeded.
+#But trusted user is not switched.
+#Trusted connection succeeded.
+#User has been switched.
+#[%s][%s][%s] SQL0551N  "%s" does not have the %s privilege to perform operation "UPDATE" on object "%s.TRUSTED_TABLE".  SQLSTATE=42501 SQLCODE=-551
+#Trusted connection succeeded.
+#[%s][%s][%s] SQL30082N  Security processing failed with reason "24" ("USERNAME AND/OR PASSWORD INVALID").  SQLSTATE=08001 SQLCODE=-30082
+#Trusted connection succeeded.
+#User has been switched.
+#Trusted connection succeeded.
+#Trusted connection succeeded.
+#[%s][%s][%s] SQL20361N  The switch user request using authorization ID "%s" within trusted context "CTX" failed with reason code "2".  SQLSTATE=42517 SQLCODE=-20361
+#Trusted connection succeeded.
+#User has been switched.
+#[%s][%s][%s] SQL0551N  "%s" does not have the %s privilege to perform operation "INSERT" on object "%s.TRUSTED_TABLE".  SQLSTATE=42501 SQLCODE=-551
+#Connection succeeded.
+#__SYSTEMI_EXPECTED__
+#Normal connection established.
+#[%s][%s] CLI0197E  A trusted context is not enabled on this connection. Invalid attribute value. SQLSTATE=HY010 SQLCODE=-99999
+#Normal connection established.[%s][%s] CLI0197E  A trusted context is not enabled on this connection. Invalid attribute value. SQLSTATE=HY010 SQLCODE=-99999
+#Trusted connection succeeded.
+#But trusted user is not switched.
+#Trusted connection succeeded.
+#User has been switched.
+#[%s][%s][%s] SQL0551N  "%s" does not have the %s privilege to perform operation "UPDATE" on object "%s.TRUSTED_TABLE".  SQLSTATE=42501 SQLCODE=-551
+#Trusted connection succeeded.
+#[%s][%s][%s] SQL30082N  Security processing failed with reason "24" ("USERNAME AND/OR PASSWORD INVALID").  SQLSTATE=08001 SQLCODE=-30082
+#Trusted connection succeeded.
+#User has been switched.
+#Trusted connection succeeded.
+#Trusted connection succeeded.
+#[%s][%s][%s] SQL20361N  The switch user request using authorization ID "%s" within trusted context "CTX" failed with reason code "2".  SQLSTATE=42517 SQLCODE=-20361
+#Trusted connection succeeded.
+#User has been switched.
+#[%s][%s][%s] SQL0551N  "%s" does not have the %s privilege to perform operation "INSERT" on object "%s.TRUSTED_TABLE".  SQLSTATE=42501 SQLCODE=-551
+#Connection succeeded.
+#__IDS_EXPECTED__
+#Normal connection established.
+#[%s][%s] CLI0197E  A trusted context is not enabled on this connection. Invalid attribute value. SQLSTATE=HY010 SQLCODE=-99999
+#Normal connection established.[%s][%s] CLI0197E  A trusted context is not enabled on this connection. Invalid attribute value. SQLSTATE=HY010 SQLCODE=-99999
+#Trusted connection succeeded.
+#But trusted user is not switched.
+#Trusted connection succeeded.
+#User has been switched.
+#[%s][%s][%s] SQL0551N  "%s" does not have the %s privilege to perform operation "UPDATE" on object "%s.TRUSTED_TABLE".  SQLSTATE=42501 SQLCODE=-551
+#Trusted connection succeeded.
+#[%s][%s][%s] SQL30082N  Security processing failed with reason "24" ("USERNAME AND/OR PASSWORD INVALID").  SQLSTATE=08001 SQLCODE=-30082
+#Trusted connection succeeded.
+#User has been switched.
+#Trusted connection succeeded.
+#Trusted connection succeeded.
+#[%s][%s][%s] SQL20361N  The switch user request using authorization ID "%s" within trusted context "CTX" failed with reason code "2".  SQLSTATE=42517 SQLCODE=-20361
+#Trusted connection succeeded.
+#User has been switched.
+#[%s][%s][%s] SQL0551N  "%s" does not have the %s privilege to perform operation "INSERT" on object "%s.TRUSTED_TABLE".  SQLSTATE=42501 SQLCODE=-551
+#Connection succeeded.
diff -pruN 0.3.0-3/tests/test_trusted_context_pconnect.py 2.0.5-0ubuntu2/tests/test_trusted_context_pconnect.py
--- 0.3.0-3/tests/test_trusted_context_pconnect.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests/test_trusted_context_pconnect.py	2013-09-25 06:27:06.000000000 +0000
@@ -0,0 +1,206 @@
+# 
+#	Licensed Materials - Property of IBM
+#
+#	(c) Copyright IBM Corp. 2007-2008
+#
+
+import unittest, sys
+import ibm_db
+import config
+from testfunctions import IbmDbTestFunctions
+
+class IbmDbTestCase(unittest.TestCase):
+
+	def test_trusted_context_pconnect(self):
+		obj = IbmDbTestFunctions()
+		obj.assert_expectf(self.run_test_trusted_context_pconnect)
+
+	def run_test_trusted_context_pconnect(self):
+		sql_drop_role = "DROP ROLE role_01"
+		sql_create_role = "CREATE ROLE role_01"
+
+		sql_drop_trusted_context = "DROP TRUSTED CONTEXT ctx"
+
+		sql_create_trusted_context = "CREATE TRUSTED CONTEXT ctx BASED UPON CONNECTION USING SYSTEM AUTHID "
+		sql_create_trusted_context += config.auth_user
+		sql_create_trusted_context += " ATTRIBUTES (ADDRESS '"
+		sql_create_trusted_context += config.hostname
+		sql_create_trusted_context += "') DEFAULT ROLE role_01 ENABLE WITH USE FOR "
+		sql_create_trusted_context += config.tc_user
+
+		sql_drop_table = "DROP TABLE trusted_table"
+		sql_create_table = "CREATE TABLE trusted_table (i1 int, i2 int)"
+
+		sql_select = "SELECT * FROM trusted_table"
+
+		# Setting up database.
+		conn = ibm_db.connect(config.database, config.user, config.password)
+		if conn:
+			sql_grant_permission = "GRANT INSERT ON TABLE trusted_table TO ROLE role_01"
+			sql_create_trusted_context_01 = sql_create_trusted_context + " WITH AUTHENTICATION"
+			try:
+				result = ibm_db.exec_immediate(conn, sql_drop_trusted_context)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_drop_table)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_drop_role)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_create_role)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_create_table)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_grant_permission)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_create_trusted_context_01)
+			except:
+				pass
+				
+			# Populate the trusted_table
+			values = (\
+				(10, 20),\
+				(20, 40),\
+			)
+			sql_insert = 'INSERT INTO trusted_table (i1, i2) VALUES (?, ?)'
+			stmt = ibm_db.prepare(conn, sql_insert)
+			if stmt:
+				for value in values:
+					result = ibm_db.execute(stmt, value)
+			ibm_db.close(conn)
+		else:
+			print "Connection failed."
+
+		options = {ibm_db.SQL_ATTR_USE_TRUSTED_CONTEXT:	ibm_db.SQL_TRUE}
+		tc_options = {ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID: config.tc_user, ibm_db.SQL_ATTR_TRUSTED_CONTEXT_PASSWORD: config.tc_pass}
+		dsn = "DATABASE=%s;HOSTNAME=%s;PORT=%d;PROTOCOL=TCPIP;UID=%s;PWD=%s;" % (config.database, config.hostname, config.port, config.auth_user, config.auth_pass)
+
+		# Making trusted connection and performing normal operations.
+		tc_conn = ibm_db.pconnect(dsn, "", "", options)
+		if tc_conn:
+			print "Trusted connection succeeded."
+			val = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_USE_TRUSTED_CONTEXT, 1)
+			if val:
+				userBefore = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID, 1)
+				ibm_db.set_option(tc_conn, tc_options, 1)
+				userAfter = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID, 1)
+				if userBefore != userAfter:
+					print "User has been switched."
+					
+					# Inserting into table using trusted_user.
+					sql_insert = "INSERT INTO " + config.user + ".trusted_table (i1, i2) VALUES (?, ?)"
+					stmt = ibm_db.prepare(tc_conn, sql_insert)
+					result = ibm_db.execute(stmt, (300, 500))
+					
+					# Updating table using trusted_user.
+					sql_update = "UPDATE " + config.user + ".trusted_table set i1 = 400 WHERE i2 = 500"
+					try:
+						stmt = ibm_db.exec_immediate(tc_conn, sql_update)
+					except:
+						print ibm_db.stmt_errormsg()
+			ibm_db.close(tc_conn)
+		else:
+			print "Trusted connection failed."
+
+		# Creating 10 Persistance connections and checking if trusted context is enabled (Cataloged connections)
+		for i in xrange(10):
+			tc_conn = ibm_db.pconnect(dsn, "", "")
+			if tc_conn:
+				val = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_USE_TRUSTED_CONTEXT, 1)
+				if val:
+					userAfter = ibm_db.get_option(tc_conn, ibm_db.SQL_ATTR_TRUSTED_CONTEXT_USERID, 1)
+					if userBefore != userAfter:
+						print "Explicit Trusted Connection succeeded."
+
+		# Cleaning up database.
+		conn = ibm_db.connect(config.database, config.user, config.password)
+
+		if conn:
+			print "Connection succeeded."
+
+			try:
+				result = ibm_db.exec_immediate(conn, sql_drop_trusted_context)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_drop_table)
+			except:
+				pass
+			try:
+				result = ibm_db.exec_immediate(conn, sql_drop_role)
+			except:
+				pass
+			ibm_db.close(conn)
+		else:
+			print "Connection failed."
+#__END__
+#__LUW_EXPECTED__
+#Trusted connection succeeded.
+#User has been switched.
+#[%s][%s][%s] SQL0551N  "%s" does not have the %s privilege to perform operation "UPDATE" on object "%s.TRUSTED_TABLE".  SQLSTATE=42501 SQLCODE=-551
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Connection succeeded.
+#__ZOS_EXPECTED__
+#Trusted connection succeeded.
+#User has been switched.
+#[%s][%s][%s] SQL0551N  "%s" does not have the %s privilege to perform operation "UPDATE" on object "%s.TRUSTED_TABLE".  SQLSTATE=42501 SQLCODE=-551
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Connection succeeded.
+#__SYSTEMI_EXPECTED__
+#Trusted connection succeeded.
+#User has been switched.
+#[%s][%s][%s] SQL0551N  "%s" does not have the privilege to perform operation "UPDATE" on object "%s.TRUSTED_TABLE".  SQLSTATE=42501 SQLCODE=-551
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Connection succeeded.
+#__IDS_EXPECTED__
+#Trusted connection succeeded.
+#User has been switched.
+#[%s][%s][%s] SQL0551N  "%s" does not have the %s privilege to perform operation "UPDATE" on object "%s.TRUSTED_TABLE".  SQLSTATE=42501 SQLCODE=-551
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Explicit Trusted Connection succeeded.
+#Connection succeeded.
diff -pruN 0.3.0-3/tests.py 2.0.5-0ubuntu2/tests.py
--- 0.3.0-3/tests.py	1970-01-01 00:00:00.000000000 +0000
+++ 2.0.5-0ubuntu2/tests.py	2013-09-25 06:27:05.000000000 +0000
@@ -0,0 +1,54 @@
+import os
+import sys
+import unittest
+import StringIO
+import re
+import glob
+import config
+
+class IbmDbTest(unittest.TestCase):
+  
+  slash = '/'
+  
+  # Currently, this function serves no purpose.
+  # However, this function has to be defined if the
+  #   unittest.TestCase is inherited in the given class.
+  # For future reference, this function is called 
+  #   everytime a test is ran in this testsuite.
+  def setUp(self):
+    pass
+
+  # This function gets a list of all the test files located
+  #   in the current_dir/config.test_dir directory.
+  def getFileList(self):
+    if (sys.platform[0:3] == 'win'):
+      self.slash = '\\'
+    dir = config.test_dir + self.slash
+    if (os.environ.get("SINGLE_PYTHON_TEST", None)):
+      testfile = dir + os.environ.get("SINGLE_PYTHON_TEST", None)
+      filelist = glob.glob(testfile)
+    else:
+      filelist = glob.glob(dir + "test_*.py")
+      
+    for i in range(0, len(filelist)):
+      filelist[i] = filelist[i].replace('.py', '')
+      filelist[i] = filelist[i].replace(config.test_dir + self.slash, '')
+    filelist.sort()
+    return filelist
+
+  # This function is called to run all the tests.
+  def runTest(self):
+    filelist = self.getFileList();
+    suite = unittest.TestSuite()
+    
+    sys.path = [os.path.dirname(os.path.abspath(__file__)) + self.slash + config.test_dir] + sys.path[0:]
+    
+    for i in range(0, len(filelist)):
+      exec("import %s" % filelist[i])
+      testFuncName = filelist[i].replace(config.test_dir + self.slash, '')
+      exec("suite.addTest(%s.IbmDbTestCase(testFuncName))" % filelist[i])
+      
+    unittest.TextTestRunner(verbosity=2).run(suite) 
+
+obj = IbmDbTest()
+suite = obj.runTest()
