Imported Upstream version 1.0.1 upstream upstream/1.0.1
authorJoel Rosdahl <joel@debian.org>
Mon, 11 Jan 2010 20:36:35 +0000 (21:36 +0100)
committerJoel Rosdahl <joel@debian.org>
Mon, 11 Jan 2010 20:36:35 +0000 (21:36 +0100)
29 files changed:
INSTALL.win32 [new file with mode: 0644]
LICENSE [new file with mode: 0644]
MANIFEST.in [new file with mode: 0644]
README [new file with mode: 0644]
_sqlite.c [new file with mode: 0644]
debian/changelog [new file with mode: 0644]
debian/control [new file with mode: 0644]
debian/copyright [new file with mode: 0644]
debian/rules [new file with mode: 0644]
doc/rest/manual.txt [new file with mode: 0644]
encode.c [new file with mode: 0644]
examples/converters.py [new file with mode: 0644]
examples/dbapi_transactions.py [new file with mode: 0644]
examples/manual_transactions.py [new file with mode: 0644]
misc/multithreading_crash.py [new file with mode: 0644]
port/strsep.c [new file with mode: 0644]
port/strsep.h [new file with mode: 0644]
setup.py [new file with mode: 0644]
sqlite/__init__.py [new file with mode: 0644]
sqlite/main.py [new file with mode: 0644]
test/all_tests.py [new file with mode: 0644]
test/api_tests.py [new file with mode: 0644]
test/logging_tests.py [new file with mode: 0644]
test/lowlevel_tests.py [new file with mode: 0644]
test/pgresultset_tests.py [new file with mode: 0644]
test/testsupport.py [new file with mode: 0644]
test/transaction_tests.py [new file with mode: 0644]
test/type_tests.py [new file with mode: 0644]
test/userfunction_tests.py [new file with mode: 0644]

diff --git a/INSTALL.win32 b/INSTALL.win32
new file mode 100644 (file)
index 0000000..01f6e70
--- /dev/null
@@ -0,0 +1,107 @@
+===========================
+Building PySQLite on Windows
+============================
+
+On Windows, Python's distutils defaults to the Visual C++ compiler from
+Microsoft. If you want to use other compilers for compiling Python extensions
+on Windows, look into chapter 3.1 "Using non-Microsoft compilers on Windows" in
+the "Installing Python Modules" of your Python documentation. It's available
+online at http://www.python.org/doc/current/inst/non-ms-compilers.html 
+
+The following are build instructions for the GNU C compiler, Borland C++ and
+for Microsoft's Visual C++ environment.
+
+
+========================
+Using the GNU C compiler
+========================
+
+As you have read the Python documentation for non-Microsoft compilers by now,
+you have mingw or Cygwin installed and created the required import library for
+the Python DLL. Fine, let's continue.
+
+From http://www.hwaci.com/sw/sqlite/download.html get the sqlite_source.zip and
+sqlitedll.zip files. Unpack them all in the same directory.
+
+Create an import library for the GNU linker:
+
+$ dlltool --def sqlite.def --dllname sqlite.dll --output-lib libsqlite.a
+
+Unpack the PySQLite sources and open setup.py in your editor. Search for
+"win32". Change the include_dirs and library_dirs variable to point the place
+where you've unpacked the SQLite files and where you created the import
+library.
+
+Build PySQLite:
+
+$ python setup.py build --compiler=mingw32
+
+
+==========================
+Using the Borland compiler
+==========================
+
+As you have read the Python documentation for non-Microsoft compilers by now,
+you have installed the Borland C++ compiler and created the required import
+library for the Python DLL. Fine, let's continue.
+
+From http://www.hwaci.com/sw/sqlite/download.html get the sqlite_source.zip and
+sqlitedll.zip files. Unpack them all in the same directory.
+
+Create an import library for the Borland linker:
+
+$ implib -a sqlite.lib sqlite.dll 
+
+Unpack the PySQLite sources and open setup.py in your editor. Search for
+"win32". Change the include_dirs and library_dirs variable to point the place
+where you've unpacked the SQLite files and where you created the import
+library.
+
+Build PySQLite:
+
+$ python setup.py build --compiler=bcpp
+
+
+==========================
+Using Microsoft Visual C++
+==========================
+
+From http://www.hwaci.com/sw/sqlite/download.html get the sqlite_source.zip and
+sqlitedll.zip files. Unpack them all in the same directory.
+
+Create an import library for the Microsoft linker:
+
+$ lib /def:sqlite.def
+
+Unpack the PySQLite sources and open setup.py in your editor. Search for
+"win32". Change the include_dirs and library_dirs variable to point the place
+where you've unpacked the SQLite files and where you created the import
+library.
+
+Build PySQLite:
+
+$ python setup.py build
+
+
+==================================
+What to do after building pySQLite
+==================================
+
+- Make sure the sqlite.dll can be found. Either copy it into your system
+  directory, somewhere else in your PATH or in the same directory as the .pyd
+  file from pySQLite.
+
+- Run the included test suite with these or similar commands:
+  $ copy ..\sqlite\sqlite.dll build\lib.win32-2.2 
+  $ copy test\*.py build\lib.win32-2.2
+  $ cd build\lib.win32-2.2
+  $ python all_tests.py
+
+  All the tests should pass. If something goes wrong, report it to the pySQLite
+  developers using the Sourceforge bug tracker.
+
+- Install pySQLite:
+  $ python setup.py install
+
+  Again make sure the sqlite.dll can be found
+
diff --git a/LICENSE b/LICENSE
new file mode 100644 (file)
index 0000000..a081ed9
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,8 @@
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted, provided that
+the above copyright notice appear in all copies and that both that copyright
+notice and this permission notice appear in supporting documentation.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644 (file)
index 0000000..8f49b92
--- /dev/null
@@ -0,0 +1 @@
+include port/strsep.h\r
diff --git a/README b/README
new file mode 100644 (file)
index 0000000..1dc23fb
--- /dev/null
+++ b/README
@@ -0,0 +1,36 @@
+                                            _ _ _
+                      _ __  _   _ ___  __ _| (_) |_ ___
+                     | '_ \| | | / __|/ _` | | | __/ _ \
+                     | |_) | |_| \__ \ (_| | | | ||  __/
+                     | .__/ \__, |___/\__, |_|_|\__\___|
+                     |_|    |___/        |_|
+
+           A DB API v2.0 compatible interface to the SQLite 2.x
+                       Embedded Relational Database.
+                          Copyright (c) 2001-2003
+                   Michael Owens <mike@mikesclutter.com>
+                      Gerhard Häring <gh@ghaering.de>
+
+
+Overview:
+
+This is an extension module for the SQLite embedded relational database. It
+tries to conform to the Python DB-API Spec v2 as far as possible. One problem
+is that SQLite returns everything as text. This is a result of SQLite's
+internal representation of data, however it still may be possible to return
+data in the type specified by the table definitions. I am still working on
+that, and will require some study of the SQLite source.
+
+Installation:
+
+Installation should be relatively simple. Following that prescribed by Python,
+it can be done in two steps:
+
+    python setup.py build
+    python setup.py install
+
+For building PySQLite on Windows, check out the file INSTALL.win32.
+
+Other:
+
+Check out the doc/rest and examples folders.
diff --git a/_sqlite.c b/_sqlite.c
new file mode 100644 (file)
index 0000000..7a7e2cc
--- /dev/null
+++ b/_sqlite.c
@@ -0,0 +1,1810 @@
+/*                                            _ _ _
+**                      _ __  _   _ ___  __ _| (_) |_ ___
+**                     | '_ \| | | / __|/ _` | | | __/ _ \
+**                     | |_) | |_| \__ \ (_| | | | ||  __/
+**                     | .__/ \__, |___/\__, |_|_|\__\___|
+**                     |_|    |___/        |_|
+**
+**               A DB API v2.0 compatible interface to SQLite
+**                       Embedded Relational Database.
+**                          Copyright (c) 2001-2003
+**                  Michael Owens <mike@mikesclutter.com>
+**                     Gerhard Häring <gh@ghaering.de>
+**
+** All Rights Reserved
+**
+** Permission to use, copy, modify, and distribute this software and its
+** documentation for any purpose and without fee is hereby granted, provided
+** that the above copyright notice appear in all copies and that both that
+** copyright notice and this permission notice appear in supporting
+** documentation,
+**
+** This program is distributed in the hope that it will be useful, but WITHOUT
+** ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+** FOR A PARTICULAR PURPOSE.
+*/
+
+#include "Python.h"
+#include "structmember.h"
+
+#include "sqlite.h"
+
+#include "port/strsep.h"
+
+/* Compatibility macros
+ *
+ * From Python 2.2 to 2.3, the way to export the module init function
+ * has changed. These macros keep the code compatible to both ways.
+ */
+#if PY_VERSION_HEX >= 0x02030000
+#  define PySQLite_DECLARE_MODINIT_FUNC(name) PyMODINIT_FUNC name(void)
+#  define PySQLite_MODINIT_FUNC(name)         PyMODINIT_FUNC name(void)
+#else
+#  define PySQLite_DECLARE_MODINIT_FUNC(name) void name(void)
+#  define PySQLite_MODINIT_FUNC(name)         DL_EXPORT(void) name(void)
+#endif
+
+/*
+ * These are needed because there is no "official" way to specify
+ * WHERE to save the thread state. (At least not until Python 2.3)
+ */
+#ifdef WITH_THREAD
+#  define MY_BEGIN_ALLOW_THREADS(st)    \
+    { st = PyEval_SaveThread(); }
+#  define MY_END_ALLOW_THREADS(st)      \
+    { PyEval_RestoreThread(st); st = NULL; }
+#else
+#  define MY_BEGIN_ALLOW_THREADS(st)
+#  define MY_END_ALLOW_THREADS(st)      { st = NULL; }
+#endif
+
+/*
+ * ** Some compilers do not support the "long long" datatype.  So we have
+ * ** to do a typedef that for 64-bit integers that depends on what compiler
+ * ** is being used.
+ * */
+#if defined(_MSC_VER) || defined(__BORLANDC__)
+  typedef __int64 sqlite_int64;
+    typedef unsigned __int64 sqlite_uint64;
+#else
+      typedef long long int sqlite_int64;
+        typedef unsigned long long int sqlite_uint64;
+#endif
+
+/*------------------------------------------------------------------------------
+** Object Declarations
+**------------------------------------------------------------------------------
+*/
+
+/** A connection object */
+typedef struct
+{
+    PyObject_HEAD
+    const char* database_name;
+    const char* sql;
+    sqlite* p_db;
+    PyObject* converters;
+    PyObject* expected_types;
+    PyObject* command_logfile;
+    PyThreadState *tstate;
+} pysqlc;
+
+/** A result set object. */
+typedef struct
+{
+    PyObject_HEAD
+    pysqlc* con;
+    PyObject* p_row_list;
+    PyObject* p_col_def_list;
+    int row_count;
+} pysqlrs;
+
+/** Exception objects */
+
+static PyObject* _sqlite_Warning;
+static PyObject* _sqlite_Error;
+static PyObject* _sqlite_DatabaseError;
+static PyObject* _sqlite_InterfaceError;
+static PyObject* _sqlite_DataError;
+static PyObject* _sqlite_OperationalError;
+static PyObject* _sqlite_IntegrityError;
+static PyObject* _sqlite_InternalError;
+static PyObject* _sqlite_ProgrammingError;
+static PyObject* _sqlite_NotSupportedError;
+
+static int debug_callbacks = 0;
+
+#define PRINT_OR_CLEAR_ERROR if (debug_callbacks) PyErr_Print(); else PyErr_Clear();
+
+/* A tuple describing the minimum required SQLite version */
+static PyObject* required_sqlite_version;
+
+/*** Type codes */
+
+static PyObject* tc_INTEGER;
+static PyObject* tc_FLOAT;
+static PyObject* tc_TIMESTAMP;
+static PyObject* tc_DATE;
+static PyObject* tc_TIME;
+static PyObject* tc_INTERVAL;
+static PyObject* tc_STRING;
+static PyObject* tc_UNICODESTRING;
+static PyObject* tc_BINARY;
+
+/*------------------------------------------------------------------------------
+** Function Prototypes
+**------------------------------------------------------------------------------
+*/
+
+static int process_record(void* p_data, int num_fields, char** p_fields, char** p_col_names);
+
+PySQLite_DECLARE_MODINIT_FUNC(init_sqlite);
+static int _seterror(int returncode, char* errmsg);
+static void _con_dealloc(pysqlc *self);
+static PyObject* sqlite_version_info(PyObject* self, PyObject* args);
+static PyObject* pysqlite_connect(PyObject *self, PyObject *args, PyObject *kwargs);
+static PyObject* sqlite_library_version(PyObject *self, PyObject *args);
+static PyObject* sqlite_enable_callback_debugging(PyObject *self, PyObject *args);
+static PyObject* pysqlite_encode(PyObject *self, PyObject *args);
+static PyObject* pysqlite_decode(PyObject *self, PyObject *args);
+
+/* Defined in encode.c */
+int sqlite_encode_binary(const unsigned char *in, int n, unsigned char *out);
+int sqlite_decode_binary(const unsigned char *in, unsigned char *out);
+
+/** Connection Object Methods */
+static PyObject* _con_get_attr(pysqlc *self, char *attr);
+static PyObject* _con_close(pysqlc *self, PyObject *args);
+static PyObject* _con_execute(pysqlc *self, PyObject *args);
+static PyObject* _con_register_converter(pysqlc* self, PyObject *args, PyObject* kwargs);
+static PyObject* _con_set_expected_types(pysqlc* self, PyObject *args, PyObject* kwargs);
+static PyObject* _con_create_function(pysqlc *self, PyObject *args, PyObject *kwargs);
+static PyObject* _con_create_aggregate(pysqlc *self, PyObject *args, PyObject *kwargs);
+static PyObject* _con_sqlite_exec(pysqlc *self, PyObject *args, PyObject *kwargs);
+static PyObject* _con_sqlite_last_insert_rowid(pysqlc *self, PyObject *args);
+static PyObject* _con_sqlite_changes(pysqlc *self, PyObject *args);
+static PyObject* _con_sqlite_busy_handler(pysqlc* self, PyObject *args, PyObject* kwargs);
+static PyObject* _con_sqlite_busy_timeout(pysqlc* self, PyObject *args, PyObject* kwargs);
+static PyObject* _con_set_command_logfile(pysqlc* self, PyObject *args, PyObject* kwargs);
+
+/** Result set Object Methods */
+static void _rs_dealloc(pysqlrs* self);
+static PyObject* _rs_get_attr(pysqlrs* self, char *attr);
+
+#ifdef _MSC_VER
+#define staticforward extern
+#endif
+
+staticforward PyMethodDef _con_methods[];
+staticforward struct memberlist _con_memberlist[];
+
+PyTypeObject pysqlc_Type =
+{
+    PyObject_HEAD_INIT(NULL)
+    0,
+    "Connection",
+    sizeof(pysqlc),
+    0,
+    (destructor) _con_dealloc,
+    0,
+    (getattrfunc) _con_get_attr,
+    (setattrfunc) NULL,
+};
+
+PyTypeObject pysqlrs_Type =
+{
+    PyObject_HEAD_INIT(NULL)
+    0,
+    "ResultSet",
+    sizeof(pysqlrs),
+    0,
+    (destructor) _rs_dealloc,
+    0,
+    (getattrfunc) _rs_get_attr,
+    (setattrfunc) NULL,
+};
+
+static void
+_con_dealloc(pysqlc* self)
+{
+    if(self)
+    {
+        if(self->p_db != 0)
+        {
+            /* Close the database */
+            sqlite_close(self->p_db);
+            self->p_db = 0;
+        }
+
+        if(self->sql != NULL)
+        {
+            /* Free last SQL statement string */
+            free((void*)self->sql);
+            self->sql = NULL;
+        }
+
+        if(self->database_name != NULL)
+        {
+            /* Free database name string */
+            free((void*)self->database_name);
+            self->database_name = NULL;
+        }
+
+        Py_DECREF(self->converters);
+        Py_DECREF(self->expected_types);
+        Py_DECREF(self->command_logfile);
+
+        PyObject_Del(self);
+    }
+}
+
+static char pysqlite_connect_doc[] =
+"connect(db, mode=0777) -> Connection.\n\
+Opens a new database connection.";
+
+/* return a new instance of sqlite_connection */
+PyObject* pysqlite_connect(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+    const char* db_name = 0;
+    int mode = 0777;
+    char *errmsg;
+
+    pysqlc* obj;
+
+    static char *kwlist[] = { "filename", "mode", NULL };
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s|i:pysqlite_connect",
+                                      kwlist, &db_name, &mode))
+    {
+        return NULL;
+    }
+
+    if ((obj = PyObject_New(pysqlc, &pysqlc_Type)) == NULL)
+    {
+        return NULL;
+    }
+
+    /* Open the database */
+    obj->p_db = sqlite_open(db_name, mode, &errmsg);
+
+    if(obj->p_db == 0 || errmsg != NULL)
+    {
+        PyObject_Del(obj);
+        if (errmsg != NULL)
+        {
+            PyErr_SetString(_sqlite_DatabaseError, errmsg);
+            sqlite_freemem(errmsg);
+        }
+        else
+        {
+            PyErr_SetString(_sqlite_DatabaseError, "Could not open database.");
+        }
+        return NULL;
+    }
+
+    /* Assign the database name */
+    if ((obj->database_name = strdup(db_name)) == NULL)
+    {
+        PyErr_SetString(PyExc_MemoryError, "Cannot allocate memory for database name.");
+        return NULL;
+    }
+
+    /* Init sql string to NULL */
+    obj->sql = NULL;
+
+    /* Set the thread state to NULL */
+    obj->tstate = NULL;
+
+    if ((obj->converters = PyDict_New()) == NULL)
+    {
+        PyErr_SetString(PyExc_MemoryError, "Cannot allocate memory for converters.");
+        return NULL;
+    }
+
+    Py_INCREF(Py_None);
+    obj->expected_types = Py_None;
+
+    Py_INCREF(Py_None);
+    obj->command_logfile = Py_None;
+
+    /* Get column type information */
+    (void)sqlite_exec(obj->p_db, "pragma show_datatypes=ON", (sqlite_callback)0, (void*)0, &errmsg);
+
+    return (PyObject *) obj;
+}
+
+static PyObject* _con_get_attr(pysqlc *self, char *attr)
+{
+    PyObject *res;
+
+    res = Py_FindMethod(_con_methods, (PyObject *) self,attr);
+
+    if(NULL != res)
+    {
+        return res;
+    }
+    else
+    {
+        PyErr_Clear();
+        return PyMember_Get((char *) self, _con_memberlist, attr);
+    }
+}
+
+static char _con_close_doc [] =
+"close()\n\
+Close the database connection.";
+
+static PyObject* _con_close(pysqlc *self, PyObject *args)
+{
+    if (!PyArg_ParseTuple(args,""))
+    {
+        return NULL;
+    }
+
+    if(self->p_db != 0)
+    {
+        /* Close the database */
+        sqlite_close(self->p_db);
+        self->p_db = 0;
+    }
+    else
+    {
+        PyErr_SetString(_sqlite_ProgrammingError, "Database is not open.");
+        return NULL;
+    }
+
+    Py_INCREF(Py_None);
+
+    return Py_None;
+}
+
+static void function_callback(sqlite_func *context, int argc, const char **argv)
+{
+    int i;
+    PyObject* function_result;
+    PyObject* args;
+    PyObject* userdata;
+    PyObject* func;
+    PyObject* s;
+    pysqlc* con;
+
+    userdata = (PyObject*)sqlite_user_data(context);
+    func = PyTuple_GetItem(userdata, 0);
+    con = (pysqlc*)PyTuple_GetItem(userdata, 1);
+    MY_END_ALLOW_THREADS(con->tstate)
+
+    args = PyTuple_New(argc);
+    for (i = 0; i < argc; i++)
+    {
+        if (argv[i] == NULL)
+        {
+            Py_INCREF(Py_None);
+            PyTuple_SetItem(args, i, Py_None);
+        }
+        else
+        {
+            PyTuple_SetItem(args, i, PyString_FromString(argv[i]));
+        }
+    }
+
+    function_result = PyObject_CallObject(func, args);
+    Py_DECREF(args);
+
+    if (PyErr_Occurred())
+    {
+        PRINT_OR_CLEAR_ERROR
+        sqlite_set_result_error(context, NULL, -1);
+        MY_BEGIN_ALLOW_THREADS(con->tstate)
+        return;
+    }
+
+    if (function_result == Py_None)
+    {
+        sqlite_set_result_string(context, NULL, -1);
+    }
+    else
+    {
+        s = PyObject_Str(function_result);
+        sqlite_set_result_string(context, PyString_AsString(s), -1);
+        Py_DECREF(s);
+    }
+
+    Py_DECREF(function_result);
+    MY_BEGIN_ALLOW_THREADS(con->tstate)
+}
+
+static void aggregate_step(sqlite_func *context, int argc, const char **argv)
+{
+    int i;
+    PyObject* args;
+    PyObject* function_result;
+    PyObject* userdata;
+    PyObject* aggregate_class;
+    pysqlc* con;
+    PyObject** aggregate_instance;
+    PyObject* stepmethod;
+
+    userdata = (PyObject*)sqlite_user_data(context);
+    aggregate_class = PyTuple_GetItem(userdata, 0);
+
+    con = (pysqlc*)PyTuple_GetItem(userdata, 1);
+    MY_END_ALLOW_THREADS(con->tstate)
+
+    aggregate_instance = (PyObject**)sqlite_aggregate_context(context, sizeof(PyObject*));
+
+    if (*aggregate_instance == 0) {
+        args = PyTuple_New(0);
+        *aggregate_instance = PyObject_CallObject(aggregate_class, args);
+        Py_DECREF(args);
+
+        if (PyErr_Occurred())
+        {
+            PRINT_OR_CLEAR_ERROR
+            MY_BEGIN_ALLOW_THREADS(con->tstate)
+            return;
+        }
+    }
+
+    stepmethod = PyObject_GetAttrString(*aggregate_instance, "step");
+    if (!stepmethod)
+    {
+        /* PRINT_OR_CLEAR_ERROR */
+        MY_BEGIN_ALLOW_THREADS(con->tstate)
+        return;
+    }
+
+    args = PyTuple_New(argc);
+    for (i = 0; i < argc; i++) {
+        if (argv[i] == NULL) {
+            Py_INCREF(Py_None);
+            PyTuple_SetItem(args, i, Py_None);
+        } else {
+            PyTuple_SetItem(args, i, PyString_FromString(argv[i]));
+        }
+    }
+
+    if (PyErr_Occurred())
+    {
+        PRINT_OR_CLEAR_ERROR
+    }
+
+    function_result = PyObject_CallObject(stepmethod, args);
+    Py_DECREF(args);
+    Py_DECREF(stepmethod);
+
+    if (function_result == NULL)
+    {
+        PRINT_OR_CLEAR_ERROR
+        /* Don't use sqlite_set_result_error here. Else an assertion in
+         * the SQLite code will trigger and create a core dump.
+         */
+    }
+    else
+    {
+        Py_DECREF(function_result);
+    }
+
+    MY_BEGIN_ALLOW_THREADS(con->tstate)
+}
+
+static void aggregate_finalize(sqlite_func *context)
+{
+    PyObject* args;
+    PyObject* function_result;
+    PyObject* s;
+    PyObject** aggregate_instance;
+    PyObject* userdata;
+    pysqlc* con;
+    PyObject* aggregate_class;
+    PyObject* finalizemethod;
+
+    userdata = (PyObject*)sqlite_user_data(context);
+    aggregate_class = PyTuple_GetItem(userdata, 0);
+    con = (pysqlc*)PyTuple_GetItem(userdata, 1);
+    MY_END_ALLOW_THREADS(con->tstate)
+
+    aggregate_instance = (PyObject**)sqlite_aggregate_context(context, sizeof(PyObject*));
+
+    finalizemethod = PyObject_GetAttrString(*aggregate_instance, "finalize");
+
+    if (!finalizemethod)
+    {
+        PyErr_SetString(PyExc_ValueError, "finalize method missing");
+        goto error;
+    }
+
+    args = PyTuple_New(0);
+    function_result = PyObject_CallObject(finalizemethod, args);
+    Py_DECREF(args);
+    Py_DECREF(finalizemethod);
+
+    if (PyErr_Occurred())
+    {
+        PRINT_OR_CLEAR_ERROR
+        sqlite_set_result_error(context, NULL, -1);
+    }
+    else if (function_result == Py_None)
+    {
+        Py_DECREF(function_result);
+        sqlite_set_result_string(context, NULL, -1);
+    }
+    else
+    {
+        s = PyObject_Str(function_result);
+        Py_DECREF(function_result);
+        sqlite_set_result_string(context, PyString_AsString(s), -1);
+        Py_DECREF(s);
+    }
+
+error:
+    Py_XDECREF(*aggregate_instance);
+
+    MY_BEGIN_ALLOW_THREADS(con->tstate)
+}
+
+static int sqlite_busy_handler_callback(void* void_data, const char* tablename, int num_busy)
+{
+    PyObject* data;
+    PyObject* func;
+    PyObject* userdata;
+    PyObject* args;
+    PyObject* function_result;
+    pysqlc* con;
+    int result_int;
+
+    data = (PyObject*)void_data;
+
+    func = PyTuple_GetItem(data, 0);
+    userdata = PyTuple_GetItem(data, 1);
+    con = (pysqlc*)PyTuple_GetItem(data, 2);
+
+    MY_END_ALLOW_THREADS(con->tstate)
+
+    args = PyTuple_New(3);
+    Py_INCREF(userdata);
+    PyTuple_SetItem(args, 0, userdata);
+    PyTuple_SetItem(args, 1, PyString_FromString(tablename));
+    PyTuple_SetItem(args, 2, PyInt_FromLong((long)num_busy));
+
+    function_result = PyObject_CallObject(func, args);
+    Py_DECREF(args);
+
+    if (PyErr_Occurred())
+    {
+        PRINT_OR_CLEAR_ERROR
+        MY_BEGIN_ALLOW_THREADS(con->tstate)
+        return 0;
+    }
+
+    result_int = PyObject_IsTrue(function_result);
+
+    Py_DECREF(function_result);
+
+    MY_BEGIN_ALLOW_THREADS(con->tstate)
+
+    return result_int;
+}
+
+static char _con_sqlite_busy_handler_doc[] =
+"sqlite_busy_handler(func, data)\n\
+Register a busy handler.\n\
+\n\
+    The sqlite_busy_handler() procedure can be used to register a busy\n\
+    callback with an open SQLite database. The busy callback will be invoked\n\
+    whenever SQLite tries to access a database that is locked. The callback\n\
+    will typically do some other useful work, or perhaps sleep, in order to\n\
+    give the lock a chance to clear. If the callback returns non-zero, then\n\
+    SQLite tries again to access the database and the cycle repeats. If the\n\
+    callback returns zero, then SQLite aborts the current operation and returns\n\
+    SQLITE_BUSY, which PySQLite will make throw an OperationalError.\n\
+    \n\
+    The arguments to sqlite_busy_handler() are the callback function (func) and\n\
+    an additional argument (data) that will be passed to the busy callback\n\
+    function.\n\
+    \n\
+    When the busy callback is invoked, it is sent three arguments. The first\n\
+    argument will be the 'data' that was set as the third argument to\n\
+    sqlite_busy_handler. The second will be the name of the database table or\n\
+    index that SQLite was trying to access and the third one will be the number\n\
+    of times that the library has attempted to access the database table or\n\
+    index.";
+
+static PyObject* _con_sqlite_busy_handler(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+    static char *kwlist[] = {"func", "data", NULL};
+    PyObject* func;
+    PyObject* data = Py_None;
+    PyObject* userdata;
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|O:sqlite_busy_handler",
+                                      kwlist, &func, &data))
+    {
+        return NULL;
+    }
+
+    if ((userdata = PyTuple_New(3)) == NULL)
+    {
+        return NULL;
+    }
+    Py_INCREF(func); PyTuple_SetItem(userdata, 0, func);
+    Py_INCREF(data); PyTuple_SetItem(userdata, 1, data);
+    Py_INCREF(self); PyTuple_SetItem(userdata, 2, (PyObject*)self);
+
+    sqlite_busy_handler(self->p_db, &sqlite_busy_handler_callback, userdata);
+
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+static char _con_sqlite_busy_timeout_doc[] =
+"sqlite_busy_timeout(milliseconds)\n\
+Register a busy handler that will wait for a specific time before giving up.\n\
+\n\
+    This is a convenience routine that will install a busy handler (see\n\
+    sqlite_busy_handler) that will sleep for n milliseconds before\n\
+    giving up (i. e. return SQLITE_BUSY/throw OperationalError).";
+
+static PyObject* _con_sqlite_busy_timeout(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+    int timeout;
+    static char *kwlist[] = {"timeout", NULL};
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "i:sqlite_busy_timeout",
+                                      kwlist, &timeout))
+    {
+        return NULL;
+    }
+
+    sqlite_busy_timeout(self->p_db, timeout);
+
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+static char _con_create_function_doc[] =
+"create_function(name, n_args, func)\n\
+Create a new SQL function.\n\
+\n\
+    A new function under the name 'name', with 'n_args' arguments is created.\n\
+    The callback 'func' will be called for this function.";
+
+static PyObject* _con_create_function(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+    int n_args;
+    char* name;
+    PyObject* func;
+    PyObject* userdata;
+    static char *kwlist[] = {"name", "n_args", "func", NULL};
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "siO:create_function",
+                                     kwlist, &name, &n_args,
+                                     &func))
+    {
+        return NULL;
+    }
+
+    if (!(userdata = PyTuple_New(2))) return NULL;
+    Py_INCREF(func);
+    PyTuple_SetItem(userdata, 0, func);
+    Py_INCREF(self);
+    PyTuple_SetItem(userdata, 1, (PyObject*)self);
+
+    if (!PyCallable_Check(func))
+    {
+        PyErr_SetString(PyExc_ValueError, "func must be a callable!");
+        return NULL;
+    }
+
+    Py_INCREF(func);
+    if (0 != sqlite_create_function(self->p_db, name, n_args, &function_callback, (void*)userdata))
+    {
+        PyErr_SetString(_sqlite_ProgrammingError, "Cannot create function.");
+        return NULL;
+    }
+    else
+    {
+        Py_INCREF(Py_None);
+        return Py_None;
+    }
+}
+
+static char _con_create_aggregate_doc[] =
+"create_aggregate(name, n_args, step_func, finalize_func)\n\
+Create a new SQL function.\n\
+\n\
+    A new aggregate function under the name 'name', with 'n_args' arguments to\n\
+    the 'step_func' function will be created. 'finalize_func' will be called\n\
+    without arguments for finishing the aggregate.";
+
+static PyObject* _con_create_aggregate(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+    PyObject* aggregate_class;
+
+    int n_args;
+    char* name;
+    static char *kwlist[] = { "name", "n_args", "aggregate_class", NULL };
+    PyObject* userdata;
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "siO:create_aggregate",
+                                      kwlist, &name, &n_args, &aggregate_class))
+    {
+        return NULL;
+    }
+
+    if (!(userdata = PyTuple_New(2))) return NULL;
+    Py_INCREF(aggregate_class);
+    PyTuple_SetItem(userdata, 0, aggregate_class);
+    Py_INCREF(self);
+    PyTuple_SetItem(userdata, 1, (PyObject*)self);
+
+    if (0 != sqlite_create_aggregate(self->p_db, name, n_args, &aggregate_step, &aggregate_finalize, (void*)userdata))
+    {
+        PyErr_SetString(_sqlite_ProgrammingError, "Cannot create aggregate.");
+        return NULL;
+    }
+    else
+    {
+        Py_INCREF(Py_None);
+        return Py_None;
+    }
+}
+
+static char _con_set_command_logfile_doc[] =
+"set_command_logfile(logfile)\n\
+Registers a writeable file-like object as logfile where all SQL commands\n\
+that get executed are written to.";
+
+static PyObject* _con_set_command_logfile(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+    PyObject* logfile;
+    PyObject* o;
+
+    static char *kwlist[] = { "logfile", NULL };
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O:set_command_logfile",
+                                      kwlist, &logfile))
+    {
+        return NULL;
+    }
+
+    if (logfile == Py_None)
+    {
+        Py_INCREF(Py_None);
+        return Py_None;
+    }
+
+    o = PyObject_GetAttrString(logfile, "write");
+    if (!o)
+    {
+        PyErr_SetString(PyExc_ValueError, "logfile must have a 'write' attribute!");
+        return NULL;
+    }
+
+    if (!PyCallable_Check(o))
+    {
+        PyErr_SetString(PyExc_ValueError, "logfile must have a callable 'write' attribute!");
+        Py_DECREF(o);
+        return NULL;
+    }
+
+    Py_DECREF(o);
+    Py_INCREF(logfile);
+    self->command_logfile = logfile;
+
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+int sqlite_exec_callback(void* pArg, int argc, char **argv, char **columnNames)
+{
+    PyObject* parg;
+    PyObject* callback;
+    PyObject* arg1;
+    pysqlc* con;
+    PyObject* values;
+    PyObject* colnames;
+    PyObject* calling_args;
+    PyObject* function_result;
+    int i;
+
+    parg = (PyObject*)pArg;
+
+    callback = PyTuple_GetItem(parg, 0);
+    arg1 = PyTuple_GetItem(parg, 1);
+    con = (pysqlc*)PyTuple_GetItem(parg, 2);
+
+    MY_END_ALLOW_THREADS(con->tstate)
+
+    colnames = PyTuple_New(argc);
+    for (i = 0; i < argc; i++)
+    {
+        PyTuple_SetItem(colnames, i, PyString_FromString(columnNames[i]));
+    }
+
+    values = PyTuple_New(argc);
+    for (i = 0; i < argc; i++)
+    {
+        if (argv[i] == NULL)
+        {
+            Py_INCREF(Py_None);
+            PyTuple_SetItem(values, i, Py_None);
+        }
+        else
+        {
+            PyTuple_SetItem(values, i, PyString_FromString(argv[i]));
+        }
+    }
+
+    calling_args = PyTuple_New(3);
+    Py_INCREF(arg1);
+    PyTuple_SetItem(calling_args, 0, arg1);
+    PyTuple_SetItem(calling_args, 1, values);
+    PyTuple_SetItem(calling_args, 2, colnames);
+
+    function_result = PyObject_CallObject(callback, calling_args);
+    if (PyErr_Occurred())
+    {
+        PRINT_OR_CLEAR_ERROR
+        MY_BEGIN_ALLOW_THREADS(con->tstate)
+        return 1;
+    }
+
+    Py_DECREF(function_result);
+    Py_DECREF(calling_args);
+
+    MY_BEGIN_ALLOW_THREADS(con->tstate)
+    return 0;
+}
+
+static char _con_sqlite_exec_doc[] =
+"sqlite_exec(sql, func, arg, use_types=0)\n\
+Execute SQL.\n\
+\n\
+    Executes the SQL string 'sql' and uses the callback function 'func' for\n\
+    each returned row. The argument 'arg' will be passed to the callback\n\
+    function.\n\
+    \n\
+    The signature of the callback function is (arg, values, colnames{, types}).\n\
+    types is ommitted unless use_types is true. If you use 'use_types', you\n\
+    MUST have issued 'pragma show_datatypes=ON' before.";
+
+static PyObject* _con_sqlite_exec(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+    static char *kwlist[] = {"sql", "func", "arg", "use_types", NULL};
+    char* sql;
+    PyObject* callback;
+    PyObject* arg1;
+    int use_types = 0;
+
+    PyObject* cb_args;
+
+    /* TODO add errmsg handling */
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "sOO|i:sqlite_exec",
+                                      kwlist, &sql, &callback, &arg1, &use_types))
+    {
+        return NULL;
+    }
+
+    cb_args = PyTuple_New(3);
+    Py_INCREF(callback);
+    Py_INCREF(arg1);
+    Py_INCREF(self);
+    PyTuple_SetItem(cb_args, 0, callback);
+    PyTuple_SetItem(cb_args, 1, arg1);
+    PyTuple_SetItem(cb_args, 2, (PyObject*)self);
+
+    MY_BEGIN_ALLOW_THREADS(self->tstate)
+    sqlite_exec(self->p_db, sql, &sqlite_exec_callback, cb_args, NULL);
+    MY_END_ALLOW_THREADS(self->tstate)
+
+    Py_DECREF(cb_args);
+
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+static PyObject* _con_sqlite_last_insert_rowid(pysqlc *self, PyObject *args)
+{
+    PyObject* value;
+
+    if (!PyArg_ParseTuple(args,""))
+    {
+        return NULL;
+    }
+
+    value = PyInt_FromLong((long)sqlite_last_insert_rowid(self->p_db));
+
+    return value;
+}
+
+static PyObject* _con_sqlite_changes(pysqlc *self, PyObject *args)
+{
+    PyObject* value;
+
+    if (!PyArg_ParseTuple(args,""))
+    {
+        return NULL;
+    }
+
+    value = PyInt_FromLong((long)sqlite_changes(self->p_db));
+
+    return value;
+}
+
+static PyObject * sqlite_library_version(PyObject *self, PyObject *args)
+{
+    if (!PyArg_ParseTuple(args, ""))
+    {
+        return NULL;
+    }
+
+    return Py_BuildValue("s", sqlite_libversion());
+}
+
+static PyObject* sqlite_enable_callback_debugging(PyObject *self, PyObject *args)
+{
+    if (!PyArg_ParseTuple(args, "i", &debug_callbacks))
+    {
+        return NULL;
+    }
+
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+static char pysqlite_encode_doc[] =
+"encode(s) -> encoded binary string.\n\
+Encode binary string 's' for storage in SQLite.";
+
+static PyObject* pysqlite_encode(PyObject *self, PyObject *args)
+{
+    char *in, *out;
+    int n;
+    PyObject *res;
+
+    if (!PyArg_ParseTuple(args, "s#", &in, &n))
+    {
+        return NULL;
+    }
+
+    /* See comments in encode.c for details on maximum size of encoded data. */
+    out = malloc(2 + (257 * (sqlite_uint64)n) / 254);
+    if (out == NULL)
+    {
+        return PyErr_NoMemory();
+    }
+    sqlite_encode_binary(in, n, out);
+    res = Py_BuildValue("s", out);
+    free(out);
+    return res;
+}
+
+static char pysqlite_decode_doc[] =
+"decode(s) -> decoded binary string.\n\
+Decode encoded binary string retrieved from SQLite.";
+
+static PyObject* pysqlite_decode(PyObject *self, PyObject *args)
+{
+    char *in, *out;
+    int n;
+    PyObject *res;
+
+    if (!PyArg_ParseTuple(args, "s", &in))
+    {
+        return NULL;
+    }
+
+    /* Decoded string is always shorter than encoded string. */
+    out = malloc(strlen(in));
+    if (out == NULL)
+    {
+        return PyErr_NoMemory();
+    }
+    n = sqlite_decode_binary(in, out);
+    res = Py_BuildValue("s#", out, n);
+    free(out);
+    return res;
+}
+
+static int _seterror(int returncode,  char *errmsg)
+{
+    switch (returncode)
+    {
+        case SQLITE_OK:
+            PyErr_Clear();
+            break;
+        case SQLITE_ERROR:
+            PyErr_SetString(_sqlite_DatabaseError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_INTERNAL:
+            PyErr_SetString(_sqlite_InternalError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_PERM:
+            PyErr_SetString(_sqlite_OperationalError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_ABORT:
+            PyErr_SetString(_sqlite_OperationalError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_BUSY:
+            PyErr_SetString(_sqlite_OperationalError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_LOCKED:
+            PyErr_SetString(_sqlite_OperationalError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_NOMEM:
+            (void)PyErr_NoMemory();
+            break;
+        case SQLITE_READONLY:
+            PyErr_SetString(_sqlite_DatabaseError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_INTERRUPT:
+            PyErr_SetString(_sqlite_OperationalError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_IOERR:
+            PyErr_SetString(_sqlite_OperationalError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_CORRUPT:
+            PyErr_SetString(_sqlite_DatabaseError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_NOTFOUND:
+            PyErr_SetString(_sqlite_InternalError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_FULL:
+            PyErr_SetString(_sqlite_DatabaseError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_CANTOPEN:
+            PyErr_SetString(_sqlite_DatabaseError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_PROTOCOL:
+            PyErr_SetString(_sqlite_OperationalError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_EMPTY:
+            PyErr_SetString(_sqlite_InternalError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_SCHEMA:
+            PyErr_SetString(_sqlite_DatabaseError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_TOOBIG:
+            PyErr_SetString(_sqlite_DataError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_CONSTRAINT:
+            PyErr_SetString(_sqlite_IntegrityError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_MISMATCH:
+            PyErr_SetString(_sqlite_IntegrityError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        case SQLITE_MISUSE:
+            PyErr_SetString(_sqlite_ProgrammingError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+            break;
+        default:
+            PyErr_SetString(_sqlite_DatabaseError, (errmsg!=NULL)?errmsg:sqlite_error_string(returncode));
+    }
+    sqlite_freemem(errmsg);
+    return returncode;
+}
+
+static PyObject* _con_execute(pysqlc* self, PyObject *args)
+{
+    int ret;
+    int record_number;
+    char* sql;
+    pysqlrs* p_rset;
+    char *errmsg;
+    char* buf;
+    char* iterator;
+    char* token;
+    PyObject* logfile_writemethod;
+    PyObject* logfile_writeargs;
+
+    record_number = 0;
+
+    if(!PyArg_ParseTuple(args,"s:execute", &sql))
+    {
+        return NULL;
+    }
+
+    if(self->p_db == 0)
+    {
+        /* There is no open database. */
+        PyErr_SetString(_sqlite_ProgrammingError, "There is no open database.");
+        return NULL;
+    }
+
+    if(self->sql != NULL)
+    {
+        /* Free last SQL statment string */
+        free((void*)self->sql);
+        self->sql = NULL;
+    }
+
+    /* Save SQL statement */
+    self->sql = strdup(sql);
+
+    /* Log SQL statement */
+    if (self->command_logfile != Py_None)
+    {
+        logfile_writemethod = PyObject_GetAttrString(self->command_logfile,
+                                                    "write");
+        logfile_writeargs = PyTuple_New(1);
+        PyTuple_SetItem(logfile_writeargs, 0, PyString_FromString(sql));
+
+        PyObject_CallObject(logfile_writemethod, logfile_writeargs);
+
+        Py_DECREF(logfile_writeargs);
+
+        logfile_writeargs = PyTuple_New(1);
+        PyTuple_SetItem(logfile_writeargs, 0, PyString_FromString("\n"));
+        PyObject_CallObject(logfile_writemethod, logfile_writeargs);
+
+        Py_DECREF(logfile_writeargs);
+        Py_DECREF(logfile_writemethod);
+
+        if (PyErr_Occurred())
+        {
+            free((void*)(self->sql));
+            self->sql = NULL;
+            return NULL;
+        }
+    }
+
+    p_rset = PyObject_New(pysqlrs, &pysqlrs_Type);
+    if (p_rset == NULL)
+    {
+        return NULL;
+    }
+
+    Py_INCREF(self);
+    p_rset->con = self;
+    p_rset->p_row_list = PyList_New(0);
+    p_rset->p_col_def_list = NULL;
+    p_rset->row_count = 0;
+
+    if (strstr(sql, "-- types "))
+    {
+        Py_DECREF(self->expected_types);
+        self->expected_types = PyList_New(0);
+        if (PyErr_Occurred())
+        {
+            Py_INCREF(Py_None);
+            self->expected_types = Py_None;
+            return NULL;
+        }
+
+        if ((buf = strdup(sql)) == NULL)
+        {
+            PyErr_SetString(PyExc_MemoryError, "Cannot allocate buffer for copying SQL statement!");
+            return NULL;
+        }
+
+        iterator = buf + strlen("-- types ");
+
+        if (*iterator == 0)
+        {
+            free(buf);
+            PyErr_SetString(PyExc_ValueError, "Illegal pragma!");
+            return NULL;
+        }
+
+        while (iterator != NULL)
+        {
+            token = pysqlite_strsep(&iterator, ",");
+            while (*token == ' ')
+            {
+                token++;
+            }
+
+            PyList_Append(self->expected_types, Py_BuildValue("s", token));
+        }
+
+        free(buf);
+        p_rset->p_col_def_list = PyTuple_New(0);
+        return (PyObject*)p_rset;
+    }
+
+    /* Run a query: process_record is called back for each record returned. */
+    MY_BEGIN_ALLOW_THREADS(self->tstate)
+    ret = sqlite_exec( self->p_db,
+                       sql,
+                       process_record,
+                       p_rset,
+                       &errmsg);
+    MY_END_ALLOW_THREADS(self->tstate)
+
+    Py_DECREF(self->expected_types);
+    Py_INCREF(Py_None);
+    self->expected_types = Py_None;
+
+    /* Maybe there occurred an error in a user-defined function */
+    if (PyErr_Occurred())
+    {
+        free((void*)(self->sql));
+        self->sql = NULL;
+        Py_DECREF(p_rset);
+        return NULL;
+    }
+
+    if (p_rset->p_col_def_list == NULL)
+    {
+        p_rset->p_col_def_list = PyTuple_New(0);
+    }
+
+    if(_seterror(ret, errmsg) != SQLITE_OK)
+    {
+        free((void*)(self->sql));
+        self->sql = NULL;
+        Py_DECREF(p_rset);
+        return NULL;
+    }
+
+    return (PyObject*)p_rset;
+}
+
+int process_record(void* p_data, int num_fields, char** p_fields, char** p_col_names)
+{
+    int i;
+    pysqlrs* p_rset;
+    PyObject* p_row;
+    PyObject* p_col_def;
+
+    int l, j;
+    char type_name[255];
+    PyObject* type_code;
+
+    PyObject* expected_types;
+    PyObject* expected_type_name = NULL;
+    PyObject* converters;
+    PyObject* converted;
+    PyObject* callable;
+    PyObject* callable_args;
+
+    p_rset = (pysqlrs*)p_data;
+    MY_END_ALLOW_THREADS(p_rset->con->tstate)
+
+    expected_types = p_rset->con->expected_types;
+    converters = p_rset->con->converters;
+
+    if(p_rset->row_count == 0)
+    {
+        if ((p_rset->p_col_def_list = PyTuple_New(num_fields)) == NULL)
+        {
+            PRINT_OR_CLEAR_ERROR
+            MY_BEGIN_ALLOW_THREADS(p_rset->con->tstate)
+            return 1;
+        }
+
+        for (i=0; i < num_fields; i++)
+        {
+            p_col_def = PyTuple_New(7);
+
+            /* 1. Column Name */
+            PyTuple_SetItem(p_col_def, 0, Py_BuildValue("s", p_col_names[i]));
+
+            /* 2. Type code */
+            /* Make a copy of column type. */
+            if (p_col_names[num_fields + i] == NULL)
+            {
+                strcpy(type_name, "TEXT");
+            }
+            else
+            {
+                strncpy(type_name, p_col_names[num_fields + i], sizeof(type_name) - 1);
+            }
+
+            /* Get its length. */
+            l = strlen(type_name);
+
+            /* Convert to uppercase. */
+            for(j=0; j < l; j++)
+            {
+                type_name[j] = toupper(type_name[j]);
+            }
+
+            /* Init/unset value */
+            type_code = NULL;
+
+            /* Try to determine column type. */
+            if (strstr(type_name, "INTERVAL"))
+            {
+                type_code = tc_INTERVAL;
+            }
+            else if (strstr(type_name, "INT"))
+            {
+                type_code = tc_INTEGER;
+            }
+            else if (strstr(type_name, "CHAR")
+                  || strstr(type_name, "TEXT"))
+            {
+                type_code = tc_STRING;
+            }
+            else if (strstr(type_name, "UNICODE"))
+            {
+                type_code = tc_UNICODESTRING;
+            }
+            else if (strstr(type_name, "BINARY")
+                  || strstr(type_name, "BLOB"))
+            {
+                type_code = tc_BINARY;
+            }
+            else if (strstr(type_name, "FLOAT")
+                  || strstr(type_name, "NUMERIC")
+                  || strstr(type_name, "NUMBER")
+                  || strstr(type_name, "DECIMAL")
+                  || strstr(type_name, "REAL")
+                  || strstr(type_name, "DOUBLE"))
+            {
+                type_code = tc_FLOAT;
+            }
+            else if (strstr(type_name, "TIMESTAMP"))
+            {
+                type_code = tc_TIMESTAMP;
+            }
+            else if (strstr(type_name, "DATE"))
+            {
+                type_code = tc_DATE;
+            }
+            else if (strstr(type_name, "TIME"))
+            {
+                type_code = tc_TIME;
+            }
+            else if (type_code == NULL)
+            {
+                type_code = Py_None;
+            }
+
+            /* Assign type. */
+            Py_INCREF(type_code);
+            PyTuple_SetItem(p_col_def, 1, type_code);
+
+            /* 3. Display Size */
+            Py_INCREF(Py_None);
+            PyTuple_SetItem(p_col_def, 2, Py_None);
+
+            /* 4. Internal Size */
+            Py_INCREF(Py_None);
+            PyTuple_SetItem(p_col_def, 3, Py_None);
+
+            /* 5. Precision */
+            Py_INCREF(Py_None);
+            PyTuple_SetItem(p_col_def, 4, Py_None);
+
+            /* 6. Scale */
+            Py_INCREF(Py_None);
+            PyTuple_SetItem(p_col_def, 5, Py_None);
+
+            /* 7. NULL Okay */
+            Py_INCREF(Py_None);
+            PyTuple_SetItem(p_col_def, 6, Py_None);
+
+            PyTuple_SetItem(p_rset->p_col_def_list, i, p_col_def);
+        }
+    }
+
+    if (p_fields != NULL)
+    {
+        /* Create a row */
+        p_row = PyTuple_New(num_fields);
+
+        p_rset->row_count++;
+
+        for (i=0; i < num_fields; i++)
+        {
+            /* Store the field value */
+            if(p_fields[i] != 0)
+            {
+                p_col_def = PyTuple_GetItem(p_rset->p_col_def_list, i);
+
+                type_code = PyTuple_GetItem(p_col_def, 1);
+
+                if (expected_types != Py_None)
+                {
+                    if (i < PySequence_Length(expected_types))
+                    {
+                        expected_type_name = PySequence_GetItem(expected_types, i);
+                        callable = PyDict_GetItem(converters, expected_type_name);
+                        if (callable == NULL)
+                        {
+                            Py_INCREF(Py_None);
+                            PyTuple_SetItem(p_row, i, Py_None);
+                        }
+                        else
+                        {
+                            callable_args = PyTuple_New(1);
+                            PyTuple_SetItem(callable_args, 0, Py_BuildValue("s", p_fields[i]));
+
+                            converted = PyObject_CallObject(callable, callable_args);
+                            if (PyErr_Occurred())
+                            {
+                                PRINT_OR_CLEAR_ERROR
+                                Py_INCREF(Py_None);
+                                converted = Py_None;
+                            }
+
+                            PyTuple_SetItem(p_row, i, converted);
+
+                            Py_DECREF(callable_args);
+                        }
+                    }
+                    else
+                    {
+                        Py_INCREF(Py_None);
+                        PyTuple_SetItem(p_row, i, Py_None);
+                    }
+                }
+                else if (type_code == tc_INTEGER)
+                {
+                    PyTuple_SetItem(p_row, i, Py_BuildValue("i", atol(p_fields[i])));
+                }
+                else if (type_code == tc_FLOAT)
+                {
+                    PyTuple_SetItem(p_row, i, Py_BuildValue("f", atof(p_fields[i])));
+                }
+                else if (type_code == tc_DATE || type_code == tc_TIME
+                        || type_code == tc_TIMESTAMP || type_code == tc_INTERVAL)
+                {
+                    if (type_code == tc_DATE)
+                        expected_type_name = PyString_FromString("date");
+                    else if (type_code == tc_TIME)
+                        expected_type_name = PyString_FromString("time");
+                    else if (type_code == tc_TIMESTAMP)
+                        expected_type_name = PyString_FromString("timestamp");
+                    else if (type_code == tc_INTERVAL)
+                        expected_type_name = PyString_FromString("interval");
+
+                    callable = PyDict_GetItem(converters, expected_type_name);
+                    if (callable == NULL)
+                    {
+                        PyTuple_SetItem(p_row, i, PyString_FromString(p_fields[i]));
+                    }
+                    else
+                    {
+                        callable_args = PyTuple_New(1);
+                        PyTuple_SetItem(callable_args, 0, Py_BuildValue("s", p_fields[i]));
+
+                        converted = PyObject_CallObject(callable, callable_args);
+                        if (PyErr_Occurred())
+                        {
+                            PRINT_OR_CLEAR_ERROR
+                            converted = PyString_FromString(p_fields[i]);
+                        }
+
+                        PyTuple_SetItem(p_row, i, converted);
+
+                        Py_DECREF(callable_args);
+                    }
+
+                    Py_DECREF(expected_type_name);
+                }
+                else if ((type_code == tc_UNICODESTRING) || (type_code == tc_BINARY))
+                {
+                    if (type_code == tc_UNICODESTRING)
+                        expected_type_name = PyString_FromString("unicode");
+                    else
+                        expected_type_name = PyString_FromString("binary");
+
+                    callable = PyDict_GetItem(converters, expected_type_name);
+
+                    if (callable == NULL)
+                    {
+                        PyTuple_SetItem(p_row, i, PyString_FromString(p_fields[i]));
+                    }
+                    else
+                    {
+                        callable_args = PyTuple_New(1);
+                        PyTuple_SetItem(callable_args, 0, Py_BuildValue("s", p_fields[i]));
+
+                        converted = PyObject_CallObject(callable, callable_args);
+                        if (PyErr_Occurred())
+                        {
+                            PRINT_OR_CLEAR_ERROR
+                            converted = PyString_FromString(p_fields[i]);
+                        }
+
+                        PyTuple_SetItem(p_row, i, converted);
+
+                        Py_DECREF(callable_args);
+                    }
+
+                    Py_DECREF(expected_type_name);
+                }
+                else
+                {
+                    PyTuple_SetItem(p_row, i, Py_BuildValue("s", p_fields[i]));
+                }
+            }
+            else
+            {
+                /* A NULL field */
+                Py_INCREF(Py_None);
+                PyTuple_SetItem(p_row, i, Py_None);
+            }
+        }
+
+        PyList_Append(p_rset->p_row_list, p_row);
+        Py_DECREF(p_row);
+    }
+
+    MY_BEGIN_ALLOW_THREADS(p_rset->con->tstate)
+    return 0;
+}
+
+static PyObject* _con_register_converter(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+    static char *kwlist[] = { "name", "converter", NULL };
+
+    PyObject* name;
+    PyObject* converter;
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "OO:register_converter",
+                                      kwlist, &name, &converter))
+    {
+        return NULL;
+    }
+
+    if (!PyString_Check(name)) {
+        PyErr_SetString(PyExc_ValueError, "name must be a string");
+        return NULL;
+    }
+
+    PyDict_SetItem(self->converters, name, converter);
+
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+static PyObject* _con_set_expected_types(pysqlc* self, PyObject *args, PyObject* kwargs)
+{
+    static char *kwlist[] = {"types", NULL};
+
+    PyObject* types;
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O:set_expected_types", kwlist, &types))
+    {
+        return NULL;
+    }
+
+    if ((types != Py_None) && (!PySequence_Check(types)))
+    {
+        PyErr_SetString(PyExc_ValueError, "types must be a sequence");
+        return NULL;
+    }
+
+    Py_DECREF(self->expected_types);
+
+    Py_INCREF(types);
+    self->expected_types = types;
+
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+/*------------------------------------------------------------------------------
+** Result Set Object Implementation
+**------------------------------------------------------------------------------
+*/
+
+static struct memberlist _rs_memberlist[] =
+{
+    {"row_list", T_OBJECT, offsetof(pysqlrs, p_row_list),     RO},
+    {"col_defs", T_OBJECT, offsetof(pysqlrs, p_col_def_list), RO},
+    {"rowcount", T_INT, offsetof(pysqlrs, row_count),         RO},
+    {NULL}
+};
+
+static PyMethodDef _rs_methods[] =
+{
+    { NULL, NULL}
+};
+
+static void
+_rs_dealloc(pysqlrs* self)
+{
+    if(self)
+    {
+        Py_DECREF(self->con);
+
+        if(self->p_row_list != 0)
+        {
+            Py_DECREF(self->p_row_list);
+
+            self->p_row_list = 0;
+        }
+
+        if(self->p_col_def_list != 0)
+        {
+            Py_DECREF(self->p_col_def_list);
+
+            self->p_col_def_list = 0;
+        }
+
+        PyObject_Del(self);
+    }
+}
+
+static PyObject* _rs_get_attr(pysqlrs *self, char *attr)
+{
+    PyObject *res;
+
+    res = Py_FindMethod(_rs_methods, (PyObject *) self,attr);
+
+    if(NULL != res)
+    {
+        return res;
+    }
+    else
+    {
+        PyErr_Clear();
+        return PyMember_Get((char *) self, _rs_memberlist, attr);
+    }
+}
+
+static PyObject* sqlite_version_info(PyObject* self, PyObject* args)
+{
+    PyObject* vi_list;
+    PyObject* vi_tuple;
+    char* buf;
+    char* iterator;
+    char* token;
+
+    if (!PyArg_ParseTuple(args, ""))
+    {
+        return NULL;
+    }
+
+    buf = strdup(sqlite_libversion());
+    iterator = buf;
+
+    vi_list = PyList_New(0);
+
+    while ((token = pysqlite_strsep(&iterator, ".")) != NULL)
+    {
+        PyList_Append(vi_list, PyInt_FromLong((long)atoi(token)));
+    }
+
+    vi_tuple = PyList_AsTuple(vi_list);
+    Py_DECREF(vi_list);
+
+    return vi_tuple;
+}
+
+
+/*------------------------------------------------------------------------------
+** Module Definitions / Initialization
+**------------------------------------------------------------------------------
+*/
+static PyMethodDef pysqlite_functions[] =
+{
+    { "connect", (PyCFunction)pysqlite_connect, METH_VARARGS | METH_KEYWORDS, pysqlite_connect_doc},
+    { "sqlite_version", (PyCFunction)sqlite_library_version, METH_VARARGS},
+    { "sqlite_version_info", (PyCFunction)sqlite_version_info, METH_VARARGS},
+    { "enable_callback_debugging", (PyCFunction)sqlite_enable_callback_debugging, METH_VARARGS},
+    { "encode", (PyCFunction)pysqlite_encode, METH_VARARGS, pysqlite_encode_doc},
+    { "decode", (PyCFunction)pysqlite_decode, METH_VARARGS, pysqlite_decode_doc},
+    { NULL, NULL }
+};
+
+/*------------------------------------------------------------------------------
+** Connection Object Implementation
+**------------------------------------------------------------------------------
+*/
+
+static struct memberlist _con_memberlist[] =
+{
+    {"sql",             T_STRING, offsetof(pysqlc, sql), RO},
+    {"filename",        T_STRING, offsetof(pysqlc, database_name), RO},
+    {NULL}
+};
+
+static PyMethodDef _con_methods[] =
+{
+    {"close", (PyCFunction) _con_close, METH_VARARGS, _con_close_doc},
+    {"execute",  (PyCFunction)_con_execute, METH_VARARGS},
+    {"register_converter", (PyCFunction)_con_register_converter, METH_VARARGS | METH_KEYWORDS},
+    {"set_expected_types", (PyCFunction)_con_set_expected_types, METH_VARARGS | METH_KEYWORDS},
+    {"set_command_logfile", (PyCFunction)_con_set_command_logfile, METH_VARARGS | METH_KEYWORDS, _con_set_command_logfile_doc},
+    {"create_function", (PyCFunction)_con_create_function, METH_VARARGS | METH_KEYWORDS, _con_create_function_doc},
+    {"create_aggregate", (PyCFunction)_con_create_aggregate, METH_VARARGS | METH_KEYWORDS, _con_create_aggregate_doc},
+    {"sqlite_exec", (PyCFunction)_con_sqlite_exec, METH_VARARGS | METH_KEYWORDS, _con_sqlite_exec_doc},
+    {"sqlite_last_insert_rowid", (PyCFunction)_con_sqlite_last_insert_rowid, METH_VARARGS},
+    {"sqlite_changes", (PyCFunction)_con_sqlite_changes, METH_VARARGS},
+    {"sqlite_busy_handler", (PyCFunction)_con_sqlite_busy_handler, METH_VARARGS | METH_KEYWORDS, _con_sqlite_busy_handler_doc},
+    {"sqlite_busy_timeout", (PyCFunction)_con_sqlite_busy_timeout, METH_VARARGS | METH_KEYWORDS, _con_sqlite_busy_timeout_doc},
+    { NULL, NULL}
+};
+
+PySQLite_MODINIT_FUNC(init_sqlite)
+{
+    PyObject *module, *dict;
+    PyObject* sqlite_version;
+    PyObject* args;
+    long tc = 0L;
+
+    pysqlc_Type.ob_type = &PyType_Type;
+    pysqlrs_Type.ob_type = &PyType_Type;
+
+    module = Py_InitModule("_sqlite", pysqlite_functions);
+
+    if (!(dict = PyModule_GetDict(module)))
+    {
+        goto error;
+    }
+
+    required_sqlite_version = PyTuple_New(3);
+    PyTuple_SetItem(required_sqlite_version, 0, PyInt_FromLong((long)2));
+    PyTuple_SetItem(required_sqlite_version, 1, PyInt_FromLong((long)5));
+    PyTuple_SetItem(required_sqlite_version, 2, PyInt_FromLong((long)6));
+
+    args = PyTuple_New(0);
+    sqlite_version = sqlite_version_info(NULL, args);
+    Py_DECREF(args);
+    if (PyObject_Compare(sqlite_version, required_sqlite_version) < 0)
+    {
+        Py_DECREF(sqlite_version);
+        PyErr_SetString(PyExc_ImportError, "Need to be linked against SQLite 2.5.6 or higher.");
+        return;
+    }
+    Py_DECREF(sqlite_version);
+
+    /*** Initialize type codes */
+    tc_INTEGER = PyInt_FromLong(tc++);
+    tc_FLOAT = PyInt_FromLong(tc++);
+    tc_TIMESTAMP = PyInt_FromLong(tc++);
+    tc_DATE = PyInt_FromLong(tc++);
+    tc_TIME = PyInt_FromLong(tc++);
+    tc_INTERVAL = PyInt_FromLong(tc++);
+    tc_STRING = PyInt_FromLong(tc++);
+    tc_UNICODESTRING = PyInt_FromLong(tc++);
+    tc_BINARY = PyInt_FromLong(tc++);
+
+    PyDict_SetItemString(dict, "INTEGER", tc_INTEGER);
+    PyDict_SetItemString(dict, "FLOAT", tc_FLOAT);
+    PyDict_SetItemString(dict, "TIMESTAMP", tc_TIMESTAMP);
+    PyDict_SetItemString(dict, "DATE", tc_DATE);
+    PyDict_SetItemString(dict, "TIME", tc_TIME);
+    PyDict_SetItemString(dict, "INTERVAL", tc_INTERVAL);
+    PyDict_SetItemString(dict, "STRING", tc_STRING);
+    PyDict_SetItemString(dict, "UNICODESTRING", tc_UNICODESTRING);
+    PyDict_SetItemString(dict, "BINARY", tc_BINARY);
+
+    /*** Create DB-API Exception hierarchy */
+
+    _sqlite_Error = PyErr_NewException("_sqlite.Error", PyExc_StandardError, NULL);
+    PyDict_SetItemString(dict, "Error", _sqlite_Error);
+
+    _sqlite_Warning = PyErr_NewException("_sqlite.Warning", PyExc_StandardError, NULL);
+    PyDict_SetItemString(dict, "Warning", _sqlite_Warning);
+
+    /* Error subclasses */
+
+    _sqlite_InterfaceError = PyErr_NewException("_sqlite.InterfaceError", _sqlite_Error, NULL);
+    PyDict_SetItemString(dict, "InterfaceError", _sqlite_InterfaceError);
+
+    _sqlite_DatabaseError = PyErr_NewException("_sqlite.DatabaseError", _sqlite_Error, NULL);
+    PyDict_SetItemString(dict, "DatabaseError", _sqlite_DatabaseError);
+
+    /* DatabaseError subclasses */
+
+    _sqlite_InternalError = PyErr_NewException("_sqlite.InternalError", _sqlite_DatabaseError, NULL);
+    PyDict_SetItemString(dict, "InternalError", _sqlite_InternalError);
+
+    _sqlite_OperationalError = PyErr_NewException("_sqlite.OperationalError", _sqlite_DatabaseError, NULL);
+    PyDict_SetItemString(dict, "OperationalError", _sqlite_OperationalError);
+
+    _sqlite_ProgrammingError = PyErr_NewException("_sqlite.ProgrammingError", _sqlite_DatabaseError, NULL);
+    PyDict_SetItemString(dict, "ProgrammingError", _sqlite_ProgrammingError);
+
+    _sqlite_IntegrityError = PyErr_NewException("_sqlite.IntegrityError", _sqlite_DatabaseError,NULL);
+    PyDict_SetItemString(dict, "IntegrityError", _sqlite_IntegrityError);
+
+    _sqlite_DataError = PyErr_NewException("_sqlite.DataError", _sqlite_DatabaseError, NULL);
+    PyDict_SetItemString(dict, "DataError", _sqlite_DataError);
+
+    _sqlite_NotSupportedError = PyErr_NewException("_sqlite.NotSupportedError", _sqlite_DatabaseError, NULL);
+    PyDict_SetItemString(dict, "NotSupportedError", _sqlite_NotSupportedError);
+
+  error:
+
+    if (PyErr_Occurred())
+    {
+        PyErr_SetString(PyExc_ImportError, "sqlite: init failed");
+    }
+}
diff --git a/debian/changelog b/debian/changelog
new file mode 100644 (file)
index 0000000..4893a9f
--- /dev/null
@@ -0,0 +1,80 @@
+python-sqlite (0.5.1-1) unstable; urgency=low
+
+  * New upstream release (closes: bug#232620).
+  * Updated Standards-Version to 3.6.1.
+  * Don't include *.pyc files in package.
+
+ -- Joel Rosdahl <joel@debian.org>  Sun,  4 Jul 2004 10:47:46 +0200
+
+python-sqlite (0.4.3-2) unstable; urgency=low
+
+  * Use Python 2.3 as default Debian version.
+  * Updated Standards-Version to 3.6.0.
+
+ -- Joel Rosdahl <joel@debian.org>  Sat,  9 Aug 2003 16:48:53 +0200
+
+python-sqlite (0.4.3-1) unstable; urgency=low
+
+  * New upstream version.
+  * Put packages in the python section.
+
+ -- Joel Rosdahl <joel@debian.org>  Wed,  4 Jun 2003 08:01:18 +0200
+
+python-sqlite (0.4.2-1) unstable; urgency=low
+
+  * Official Debian version.
+  * Updated Standards-Version to 3.5.10.
+
+ -- Joel Rosdahl <joel@debian.org>  Tue,  3 Jun 2003 08:25:51 +0200
+
+python-sqlite (0.4.2-0.1) unstable; urgency=low
+
+  * Version number is 0.4.2 now.
+
+ -- Gerhard Häring <gh@ghaering.de>  Sat, 31 May 2003 16:11:03 +2000
+
+python-sqlite (0.4.1-2) unstable; urgency=low
+
+  * Changed architecture of python-sqlite binary package to all.
+  * Added Debian package creator and upstream maintainers to copyright
+    file.
+
+ -- Joel Rosdahl <joel@debian.org>  Sun, 23 Mar 2003 21:19:17 +0100
+
+python-sqlite (0.4.1-1) unstable; urgency=low
+
+  * First official Debian version. Closes: bug#163027.
+
+ -- Joel Rosdahl <joel@debian.org>  Sun, 23 Mar 2003 17:02:37 +0100
+
+python-sqlite (0.4.1-0.1) unstable; urgency=low
+
+  * Version number is 0.4.1 now.
+
+ -- Gerhard Häring <gerhard.haering@gmx.de>  Tue, 14 Feb 2003 22:24:00 +2000
+
+python-sqlite (0.4.0-0.1) unstable; urgency=low
+
+  * Version number is 0.4.0 now.
+
+ -- Gerhard Häring <gerhard.haering@gmx.de>  Tue, 11 Feb 2003 10:47:00 +2000
+
+python-sqlite (0.3.2-0.1) unstable; urgency=low
+
+  * Depend on libsqlite0 instead of libsqlite.
+  * Update to version 0.3.2.
+
+ -- Gerhard Häring <gerhard.haering@gmx.de>  Tue, 21 Jan 2003 01:12:35 +2000
+
+python-sqlite (0.3.1-0.1) unstable; urgency=low
+
+  * Update to version 0.3.1.
+
+ -- Gerhard Häring <gerhard.haering@gmx.de>  Mon, 26 Aug 2002 19:59:37 +2000
+
+python-sqlite (0.3.0-0.1) unstable; urgency=low
+
+  * First try at a Debian package.
+
+ -- Gerhard Häring <gerhard.haering@gmx.de>  Mon, 26 Aug 2002 19:59:37 +2000
+
diff --git a/debian/control b/debian/control
new file mode 100644 (file)
index 0000000..b29d25a
--- /dev/null
@@ -0,0 +1,55 @@
+Source: python-sqlite
+Section: python
+Priority: optional
+Maintainer: Joel Rosdahl <joel@debian.org>
+Build-Depends: debhelper (>> 3.0.0), libsqlite0-dev (>= 2.5.6), python2.1-dev, python2.2-dev, python2.3-dev
+Standards-Version: 3.6.1
+
+Package: python2.1-sqlite
+Architecture: any
+Depends: ${shlibs:Depends}, python2.1
+Recommends: python-egenix-mxdatetime
+Description: Python interface to SQLite
+ pysqlite is an interface to the SQLite database server for
+ Python. It aims to be fully compliant with Python database
+ API version 2.0 while also exploiting the unique features of
+ SQLite.
+ .
+ This Debian package is built for Python 2.1.
+
+Package: python2.2-sqlite
+Architecture: any
+Depends: ${shlibs:Depends}, python2.2
+Recommends: python-egenix-mxdatetime
+Description: Python interface to SQLite
+ pysqlite is an interface to the SQLite database server for
+ Python. It aims to be fully compliant with Python database
+ API version 2.0 while also exploiting the unique features of
+ SQLite.
+ .
+ This Debian package is built for Python 2.2.
+
+Package: python2.3-sqlite
+Architecture: any
+Depends: ${shlibs:Depends}, python2.3
+Recommends: python-egenix-mxdatetime
+Description: Python interface to SQLite
+ pysqlite is an interface to the SQLite database server for
+ Python. It aims to be fully compliant with Python database
+ API version 2.0 while also exploiting the unique features of
+ SQLite.
+ .
+ This Debian package is built for Python 2.3.
+
+Package: python-sqlite
+Architecture: all
+Depends: python (>= 2.3), python (<< 2.4), python2.3-sqlite
+Conflicts:
+Description: Python interface to SQLite
+ pysqlite is an interface to the SQLite database server for
+ Python. It aims to be fully compliant with Python database
+ API version 2.0 while also exploiting the unique features of
+ SQLite.
+ .
+ This Debian package is an empty dummy package that always depends on
+ a package built for Debian's default Python version.
diff --git a/debian/copyright b/debian/copyright
new file mode 100644 (file)
index 0000000..aba4d57
--- /dev/null
@@ -0,0 +1,21 @@
+This Debian package was created by Joel Rosdahl <joel@debian.org>.
+
+The upstream source was found on the following address:
+
+    http://pysqlite.sourceforge.net/downloads/
+
+Upstream authors:
+
+    Michael Owens <mike@mikesclutter.com>
+    Gerhard Häring <gh@ghaering.de>
+
+License:
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted, provided that
+the above copyright notice appear in all copies and that both that copyright
+notice and this permission notice appear in supporting documentation.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.
diff --git a/debian/rules b/debian/rules
new file mode 100644 (file)
index 0000000..56f865a
--- /dev/null
@@ -0,0 +1,77 @@
+#!/usr/bin/make -f
+
+# Uncomment this to turn on verbose mode.
+#export DH_VERBOSE=1
+
+# This is the debhelper compatibility version to use.
+export DH_COMPAT=3
+
+PYTHON2.1 = /usr/bin/python2.1
+PYTHON2.2 = /usr/bin/python2.2
+PYTHON2.3 = /usr/bin/python2.3
+
+configure: configure-stamp
+configure-stamp:
+       dh_testdir
+       touch configure-stamp
+
+build: configure-stamp build-stamp
+build-stamp:
+       dh_testdir
+       $(PYTHON2.1) setup.py build
+       $(PYTHON2.2) setup.py build
+       $(PYTHON2.3) setup.py build
+       touch build-stamp
+
+clean:
+       dh_testdir
+       dh_testroot
+       rm -f build-stamp configure-stamp
+       rm -rf build
+       dh_clean
+
+install: build
+       dh_testdir
+       dh_testroot
+       dh_clean -k
+       dh_installdirs
+
+       $(PYTHON2.1) setup.py install --root=debian/python2.1-sqlite
+       $(PYTHON2.2) setup.py install --root=debian/python2.2-sqlite
+       $(PYTHON2.3) setup.py install --root=debian/python2.3-sqlite
+       find debian/python*-sqlite -name '*.pyc' | xargs rm -f
+
+# Build architecture-independent files here.
+binary-indep: build install
+       dh_testdir -i
+       dh_testroot -i
+
+       dh_installdocs -i
+       dh_installchangelogs -i
+       dh_link -i
+       dh_compress -i
+       dh_fixperms -i
+       dh_installdeb -i
+       dh_gencontrol -i
+       dh_md5sums -i
+       dh_builddeb -i
+
+# Build architecture-dependent files here.
+binary-arch: build install
+       dh_testdir -a
+       dh_testroot -a
+       dh_installdocs -a
+       dh_installexamples -a
+
+       dh_installchangelogs -a
+       dh_strip -a
+       dh_compress -a
+       dh_fixperms -a
+       dh_installdeb -a
+       dh_shlibdeps -a
+       dh_gencontrol -a
+       dh_md5sums -a
+       dh_builddeb -a
+
+binary: binary-indep binary-arch
+.PHONY: build clean binary-indep binary-arch binary install configure
diff --git a/doc/rest/manual.txt b/doc/rest/manual.txt
new file mode 100644 (file)
index 0000000..af246dc
--- /dev/null
@@ -0,0 +1,387 @@
+-----------------------------------------------------------------\r
+PySQLite: Python DB-API 2.0 Compliant Interface Module for SQLite\r
+-----------------------------------------------------------------\r
+\r
+These are the beginnings of a new manual.\r
+\r
+This document was last updated for PySQLite version 1.0.\r
+\r
+===============\r
+0. Front Matter\r
+===============\r
+\r
+0.1 Copyright notice and License\r
+--------------------------------\r
+\r
+(c) 2002 Michael Owens\r
+(c) 2002-2004 Gerhard Häring\r
+\r
+Permission to use, copy, modify, and distribute this software and its\r
+documentation for any purpose and without fee is hereby granted, provided that\r
+the above copyright notice appear in all copies and that both that copyright\r
+notice and this permission notice appear in supporting documentation.\r
+\r
+This program is distributed in the hope that it will be useful, but WITHOUT\r
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS\r
+FOR A PARTICULAR PURPOSE.\r
+\r
+0.2 Abstract\r
+------------\r
+\r
+SQLite is a powerful embedded relational database management system in a\r
+compact C library, developed by D. Richard Hipp. It offers support for a large\r
+subset of SQL92, multiple tables and indices, transactions, not to mention ODBC\r
+drivers. The library is self-contained and implemented in under 20,000 lines of\r
+code, which itself is uncopyrighted, and free to use for any purpose. It runs\r
+on a wide variety of platforms including Linux, FreeBSD, Windows, Windows CE\r
+and many others.\r
+\r
+PySQLite makes this powerful yet small database engine available to Python\r
+developers via the Python Database API Specification 2.0. The ambition is to\r
+expose all of SQLite's functionality in a pythonic way, and to offer additional\r
+ease-of-use features.\r
+\r
+===================================\r
+1. Building and installing PySQLite\r
+===================================\r
+\r
+1.1 Installing binary packages\r
+------------------------------\r
+\r
+The PySQLite developers currently offer binary packages for Microsoft Windows.\r
+Just be sure to pick the right one for your Python version (the correct\r
+download for Python 2.3.x will end in py2.3.exe, for example).\r
+\r
+Other binary packages are offered by third parties. Debian GNU/Linux for\r
+example has binaries for PySQLite in its repository.\r
+\r
+1.2 Installing PySQLite from source\r
+-----------------------------------\r
+\r
+First, make sure you have the SQLite library and header files installed. Unless\r
+it's packaged for your OS, this means building and installing SQLite from\r
+source. How to do this is beyond the scope of this manual. We'll refer you to\r
+the SQLite documentation instead. Just one important note: Be sure to compile\r
+SQLite with threads support. This means be sure that -DTHREADSAFE=1 is defined\r
+while compiling it.\r
+\r
+Next, be sure that you have a complete Python development environment for C\r
+extensions. This means:\r
+\r
+- the Python interpreter\r
+- the Python development libraries and headers: on some Linux distributions,\r
+  these need to be installed separately, the package will be called python-dev,\r
+  python-devel or some such\r
+- a C compiler, like the GNU C Compiler (gcc)\r
+\r
+Now you can try to build PySQLite with::\r
+\r
+    $ python setup.py build\r
+\r
+The paths to the SQLite headers and libraries should be found automatially, but\r
+if they're not, you'll have to accordingly edit setup.py manually.\r
+\r
+Once you got PySQLite built, it's time to install it. Normally you'll have to\r
+do this step as a system administrator (on Unix-like systems this means you'll\r
+have to become root)::\r
+\r
+    $ python setup.py install\r
+\r
+Now's a good time to run the included test suite::\r
+\r
+    $ cd test\r
+    $ python all_tests.py\r
+\r
+Be sure that all tests passed correctly.\r
+\r
+1.3 Make date functionality work\r
+--------------------------------\r
+\r
+If you plan to use the SQL types date, timestamp or interval you'll have to\r
+have the mxDateTime package from the eGenix mxExtensions installed.\r
+\r
+Get it here if you don't have it installed already:\r
+http://www.egenix.com/files/python/eGenix-mx-Extensions.html#Download-mxBASE\r
+\r
+If you're on a free Unix, your distribution most probably packages it for you,\r
+too.\r
+\r
+===============================\r
+2. Connecting to the datatabase\r
+===============================\r
+\r
+2.1 Basic usage\r
+---------------\r
+\r
+Connecting to a datatabase file "db"::\r
+\r
+    import sqlite\r
+    cx = sqlite.connect("db")\r
+\r
+\r
+2.2 Parameters explained\r
+------------------------\r
+\r
+In the most basic usage, we only used the database parameter, which is the\r
+database file we want SQLite to use, or ":memory:" if we want to use an in-RAM\r
+database.\r
+\r
+Of course there are more parameters::\r
+\r
+    def connect(database, mode=0755, converters={}, autocommit=0,\r
+                encoding=None, timeout=None, command_logfile=None)\r
+\r
+:mode:  This parameter is passed through to SQLite and means the mode in which\r
+        to open the file. The idea is to be able to open the database file in\r
+        read-only mode. But it's currently ignored by SQLite so just forget\r
+        about it.\r
+\r
+:converters:\r
+        The converters are a mapping from SQL type names to Python conversion\r
+        callables. You'll only need to define any of these if you want to make\r
+        PySQLite handle a user-defined type of you transparently.\r
+\r
+        => examples/converters.py\r
+\r
+:autocommit:\r
+        see => Transaction management\r
+\r
+:encoding:\r
+    This is an important parameter if you use unicode strings. It can either be\r
+    a simple encoding name, like "utf-8", or it can be a tuple of encoding name\r
+    and error policy, like ("utf-8", "ignore"), or ("utf-16", "replace"). Cf.\r
+    the documentation about the unicode builtin for possible error policies.\r
+    What it means is in which encoding to save unicode strings in the SQLite\r
+    database, and from which encoding to construct Unicode strings.\r
+\r
+:timeout:\r
+    A timeout value in seconds, i. e. timeout=1.5. An SQLite database can be\r
+    locked by a different connection being in an transaction. The timeout value\r
+    means how long to wait for the lock to be released. If after /timeout/\r
+    seconds the lock is still not released, a DatabaseError will be thrown.\r
+\r
+:command_logfile:\r
+    A file-like object (anything that has a write method) where all statements\r
+    sent to the SQLite library will be logged into.\r
+\r
+\r
+\r
+=========================\r
+3. Transaction management\r
+=========================\r
+\r
+3.1 The standard, DB-API way\r
+----------------------------\r
+\r
+Transactions are opened "when necessary". PySQLite is optimized quite a bit at\r
+opening transactions as late as possible. I. e. when you have a sequence::\r
+\r
+    cu = cx.cursor()                                    # (1)\r
+    cu.execute("select foo from bar")                   # (2)\r
+    cu.execute("update foo set bar=5 where blarg=3")    # (3)\r
+    cx.commit()                                         # (4)\r
+\r
+only line number 3 triggers the sending of a BEGIN statement to the SQLIte\r
+library. That's because under SQLite, it is safe to use even multiple SELECT\r
+statements outside transactions. The reason is that a BEGIN will lock the whole\r
+database, so outside transactions, you will always get consistent data using\r
+SELECT statements.\r
+\r
+Ok, so the .execute() in line #3 sees that it has got a DML (data modification\r
+language) statement as SQL string and will transparently send a BEGIN before\r
+that to the SQLite engine. .commit() will send the corresponding COMMIT\r
+statement, of course. To roll back transactions intead, you'd use .rollback()\r
+on the connection object.\r
+\r
+see => examples/dbapi_transactions.py\r
+\r
+3.2 The manual way\r
+------------------\r
+\r
+If you used the parameter autocommit=1 in the sqlite.connect() call, PySQLite\r
+will not get in your way with respect to transactions. You can send\r
+BEGIN/COMMIT/ROLLBACK statements with the .execute() method of the cursor\r
+object as you like.\r
+\r
+see => examples/manual_transactions.py\r
+\r
+I don't recommend you actually use this option, unless you're implementing a\r
+transaction management system different from the DB-API one.\r
+\r
+=====================================\r
+4. Type Conversions Python <=> SQLite\r
+=====================================\r
+\r
+SQLite is a typeless database engine. Basically this means that apart from\r
+arithmetic operations, it only knows about strings. PySQLite goes a long way to\r
+work around this limitation of SQLite. Storing Python data in an SQLite\r
+database is not the problem, PySQLite will do this correctly for you for its\r
+supported Python types.\r
+\r
+The other way around is normally no problem either. In most cases, PySQLite can\r
+infer to which Python type it needs to convert the string data the SQLite\r
+engine delivers it. In other cases, however, where SQLite doesn't deliver\r
+enough type information or the wrong type information, you will have to help\r
+PySQLite guessing right. This is what the next section is all about.\r
+\r
+4.1 Conversions SQLite => Python\r
+--------------------------------\r
+\r
+SQLite itself is typeless, it only knows about strings, and to some degree\r
+about numbers. So PySQLite has to work around this limitation. The conversion\r
+from string to the Python type we want works with a hidden dictionary called\r
+converters, which consists of the converters you registered in the .connect()\r
+call yourself, plus a few standard ones from PySQLite, listed below.\r
+\r
++-------------------------+-------------------+--------------------------------+\r
+| column types            | converter name    | converter callable             |\r
++=========================+===================+================================+\r
+| \*CHAR\*, \*TEXT\*      | str               | str()                          |\r
++-------------------------+-------------------+--------------------------------+\r
+| \*INT\*                 | int               | int()                          |\r
++-------------------------+-------------------+--------------------------------+\r
+|                         | long              | long()                         |\r
++-------------------------+-------------------+--------------------------------+\r
+| \*FLOAT\*, \*NUMERIC\*, | float             | float()                        |\r
+| \*NUMBER\*, \*DECIMAL\*,|                   |                                |\r
+| \*REAL\*, \*DOUBLE\*    |                   |                                |\r
++-------------------------+-------------------+--------------------------------+\r
+| \*UNICODE\*             | unicode           | UnicodeConverter(self.encoding)|\r
++-------------------------+-------------------+--------------------------------+\r
+| \*BINARY\*, \*BLOB\*    | binary            | sqlite.decode()                |\r
++-------------------------+-------------------+--------------------------------+\r
+| \*DATE\*                | date              | DateTime.DateFrom()            |\r
++-------------------------+-------------------+--------------------------------+\r
+| \*TIME\*                | time              | DateTime.TimeFrom()            |\r
++-------------------------+-------------------+--------------------------------+\r
+| \*TIMESTAMP\*           | timestamp         | DateTime.DateTimeFrom()        |\r
++-------------------------+-------------------+--------------------------------+\r
+| \*INTERVAL\*            | interval          | DateTime.DateTimeDeltaFrom()   |\r
++-------------------------+-------------------+--------------------------------+\r
+\r
+Now there are two ways to determine which converter to use for a given column\r
+in the resultset. If the column came directly from a table, and wasn't created\r
+by an expression, or by a function or aggregate, then SQLite delivers column\r
+type to PySQLite, and PySQLite will then use a certain converter, depending on\r
+the column type.\r
+\r
+Let's use an example to make this more clear::\r
+\r
+    CREATE TABLE TEST (V VARCHAR, I INTEGER);\r
+    INSERT INTO TEST(V, I) VALUES ('foo', 25);\r
+\r
+>>> cu = cx.cursor()\r
+>>> cu.execute("select v, i from test")\r
+>>> row = cu.fetchone()\r
+>>> row, map(type, row)\r
+(('foo', 25), [<type 'str'>, <type 'int'>])\r
+\r
+Now, with the statement "select v, i from test" you directly accessed the\r
+columns 'v' and 'i' in the table 'test'. SQLite is thus able to deliver the\r
+types of the columns to PySQLite. PySQLite thus knows that the first column is\r
+of type VARCHAR, and the second column is of type INTEGER. Now VARCHAR matches\r
+*CHAR* and INTEGER matches *INT*, so PySQLite finds the converter name 'str'\r
+for the first column in the resultset, and the converter name 'int' for the\r
+second column. Now 'str' maps to str() and 'int' maps to int(), so these two\r
+callables are called for the raw string data PySQLite gets from the SQLite\r
+engine. For you, this means you transparently got back an integer for the\r
+second column, even though SQLite basically only knows about strings.\r
+\r
+Now let's try something else:\r
+\r
+>>> cu.execute("select i*2.3 from test")\r
+>>> row = cu.fetchone()\r
+>>> row, map(type, row)\r
+((57.5,), [<type 'float'>])\r
+\r
+There's a little magic going on here. SQLite infers that the result is numeric,\r
+so it sets "NUMERIC" as the type of the result column, which in turn by\r
+PySQLite is mapped to the converter name 'float', and then to the callable\r
+float.\r
+\r
+Now of course there are areas where there is no magic left and you have to tell\r
+PySQLite yourself to which type to convert back. Basically always when result\r
+columns don't come directly from tables, but from expressions.\r
+\r
+One example would be where\r
+you'd want to concatenate two columns of a UNICODE type:\r
+\r
+>>> cx = sqlite.connect("db", encoding="utf-8")\r
+>>> cu = cx.cursor()\r
+>>> cu.execute("create table test(u1 unicode, u2 unicode)")\r
+>>> cu.execute("insert into test(u1, u2) values (%s, %s)", (u"\x99sterreich", u"Ungarn"))\r
+>>> cu.execute("select u1 || '-' || u2 from test")\r
+>>> print cu.fetchone()\r
+('\xc2\x99sterreich-Ungarn',)\r
+\r
+We didn't specify what type to convert to, so we just got a normal Python\r
+string back, with the result in UTF-8 encoding. So let's specifiy the converter\r
+name with the magical "-- types type1[, type2 ...]" SQL command that PySQLite\r
+intercepts and interprets itself and try again:\r
+\r
+>>> cu.execute("-- types unicode")\r
+>>> cu.execute("select u1 || '-' || u2 from test")\r
+>>> row = cu.fetchone()\r
+>>> row, map(type, row)\r
+((u'\x99sterreich-Ungarn',), [<type 'unicode'>])\r
+\r
+Another problematic area are SQLite functions and aggregates. SQLite will\r
+always consider their results NUMERIC. Consider this:\r
+\r
+>>> import sqlite\r
+>>> cx = sqlite.connect(":memory:")\r
+>>> cx.create_function("concat", 2, lambda x, y: "%s--%s" % (x,y))\r
+>>> cu = cx.cursor()\r
+>>> cu.execute("select concat('ab', 'cd')")\r
+>>> res = cu.fetchone()[0]\r
+>>> res, type(res)\r
+(0.0, <type 'float'>)\r
+\r
+Pretty stupid, right? SQLite tells PySQLite that the result is NUMERIC so\r
+PySQLite faithfully tries to convert it to a float, which fails so the result\r
+is 0.0. So we'll have to explicitely tell which types we want:\r
+\r
+>>> cu.execute("-- types str")\r
+>>> cu.execute("select concat('ab', 'cd')")\r
+>>> res = cu.fetchone()[0]\r
+>>> res, type(res)\r
+('ab--cd', <type 'str'>)\r
+>>>\r
+\r
+The same problem exists for aggregates, btw.:\r
+\r
+>>> import sqlite\r
+>>> cx = sqlite.connect(":memory:")\r
+>>> from mx.DateTime import *\r
+>>> today = now()\r
+>>> yesterday = now - DateTimeDelta(1)\r
+>>> today, yesterday\r
+(<DateTime object for '2004-07-06 18:50:12.12' at 401f12f8>, <DateTime object for '2004-07-05 18:50:12.12' at 401ca2f8>)\r
+>>> cu = cx.cursor()\r
+>>> cu.execute("create table test (d timestamp)")\r
+>>> cu.executemany("insert into test(d) values (%s)", [(today,), (yesterday,)])\r
+>>> cu.execute("select max(d) from test")\r
+>>> res = cu.fetchone()[0]\r
+>>> res, type(res)\r
+(2004.0, <type 'float'>)\r
+\r
+Bah! Ok let's be explicit then:\r
+\r
+>>> cu.execute("-- types timestamp")\r
+>>> cu.execute("select max(d) from test")\r
+>>> res = cu.fetchone()[0]\r
+>>> res, type(res)\r
+(<DateTime object for '2004-07-06 18:50:12.11' at 40279bf0>, <type 'DateTime'>)\r
+>>>\r
+\r
+4.2 Conversions Python => SQLite\r
+--------------------------------\r
+\r
+This section only matters if you want to create your own types and use them\r
+transparently with SQLite. Just provide them with a _quote() method that will\r
+return a string ready to be inserted directly into a SQL statement.\r
+\r
+You'll then also want to register a suitable converter callable with the\r
+converters parameter of the connect() function.\r
+\r
+\r
diff --git a/encode.c b/encode.c
new file mode 100644 (file)
index 0000000..1410813
--- /dev/null
+++ b/encode.c
@@ -0,0 +1,245 @@
+/*
+** 2002 April 25
+**
+** The author disclaims copyright to this source code.  In place of
+** a legal notice, here is a blessing:
+**
+**    May you do good and not evil.
+**    May you find forgiveness for yourself and forgive others.
+**    May you share freely, never taking more than you give.
+**
+*************************************************************************
+** This file contains helper routines used to translate binary data into
+** a null-terminated string (suitable for use in SQLite) and back again.
+** These are convenience routines for use by people who want to store binary
+** data in an SQLite database.  The code in this file is not used by any other
+** part of the SQLite library.
+**
+** $Id: encode.c,v 1.2 2004/07/03 22:51:18 ghaering Exp $
+*/
+#include <string.h>
+
+/*
+** How This Encoder Works
+**
+** The output is allowed to contain any character except 0x27 (') and
+** 0x00.  This is accomplished by using an escape character to encode
+** 0x27 and 0x00 as a two-byte sequence.  The escape character is always
+** 0x01.  An 0x00 is encoded as the two byte sequence 0x01 0x01.  The
+** 0x27 character is encoded as the two byte sequence 0x01 0x03.  Finally,
+** the escape character itself is encoded as the two-character sequence
+** 0x01 0x02.
+**
+** To summarize, the encoder works by using an escape sequences as follows:
+**
+**       0x00  ->  0x01 0x01
+**       0x01  ->  0x01 0x02
+**       0x27  ->  0x01 0x03
+**
+** If that were all the encoder did, it would work, but in certain cases
+** it could double the size of the encoded string.  For example, to
+** encode a string of 100 0x27 characters would require 100 instances of
+** the 0x01 0x03 escape sequence resulting in a 200-character output.
+** We would prefer to keep the size of the encoded string smaller than
+** this.
+**
+** To minimize the encoding size, we first add a fixed offset value to each 
+** byte in the sequence.  The addition is modulo 256.  (That is to say, if
+** the sum of the original character value and the offset exceeds 256, then
+** the higher order bits are truncated.)  The offset is chosen to minimize
+** the number of characters in the string that need to be escaped.  For
+** example, in the case above where the string was composed of 100 0x27
+** characters, the offset might be 0x01.  Each of the 0x27 characters would
+** then be converted into an 0x28 character which would not need to be
+** escaped at all and so the 100 character input string would be converted
+** into just 100 characters of output.  Actually 101 characters of output - 
+** we have to record the offset used as the first byte in the sequence so
+** that the string can be decoded.  Since the offset value is stored as
+** part of the output string and the output string is not allowed to contain
+** characters 0x00 or 0x27, the offset cannot be 0x00 or 0x27.
+**
+** Here, then, are the encoding steps:
+**
+**     (1)   Choose an offset value and make it the first character of
+**           output.
+**
+**     (2)   Copy each input character into the output buffer, one by
+**           one, adding the offset value as you copy.
+**
+**     (3)   If the value of an input character plus offset is 0x00, replace
+**           that one character by the two-character sequence 0x01 0x01.
+**           If the sum is 0x01, replace it with 0x01 0x02.  If the sum
+**           is 0x27, replace it with 0x01 0x03.
+**
+**     (4)   Put a 0x00 terminator at the end of the output.
+**
+** Decoding is obvious:
+**
+**     (5)   Copy encoded characters except the first into the decode 
+**           buffer.  Set the first encoded character aside for use as
+**           the offset in step 7 below.
+**
+**     (6)   Convert each 0x01 0x01 sequence into a single character 0x00.
+**           Convert 0x01 0x02 into 0x01.  Convert 0x01 0x03 into 0x27.
+**
+**     (7)   Subtract the offset value that was the first character of
+**           the encoded buffer from all characters in the output buffer.
+**
+** The only tricky part is step (1) - how to compute an offset value to
+** minimize the size of the output buffer.  This is accomplished by testing
+** all offset values and picking the one that results in the fewest number
+** of escapes.  To do that, we first scan the entire input and count the
+** number of occurances of each character value in the input.  Suppose
+** the number of 0x00 characters is N(0), the number of occurances of 0x01
+** is N(1), and so forth up to the number of occurances of 0xff is N(255).
+** An offset of 0 is not allowed so we don't have to test it.  The number
+** of escapes required for an offset of 1 is N(1)+N(2)+N(40).  The number
+** of escapes required for an offset of 2 is N(2)+N(3)+N(41).  And so forth.
+** In this way we find the offset that gives the minimum number of escapes,
+** and thus minimizes the length of the output string.
+*/
+
+/*
+** Encode a binary buffer "in" of size n bytes so that it contains
+** no instances of characters '\'' or '\000'.  The output is 
+** null-terminated and can be used as a string value in an INSERT
+** or UPDATE statement.  Use sqlite_decode_binary() to convert the
+** string back into its original binary.
+**
+** The result is written into a preallocated output buffer "out".
+** "out" must be able to hold at least 2 +(257*n)/254 bytes.
+** In other words, the output will be expanded by as much as 3
+** bytes for every 254 bytes of input plus 2 bytes of fixed overhead.
+** (This is approximately 2 + 1.0118*n or about a 1.2% size increase.)
+**
+** The return value is the number of characters in the encoded
+** string, excluding the "\000" terminator.
+*/
+int sqlite_encode_binary(const unsigned char *in, int n, unsigned char *out){
+  int i, j, e = 0, m;
+  int cnt[256];
+  if( n<=0 ){
+    out[0] = 'x';
+    out[1] = 0;
+    return 1;
+  }
+  memset(cnt, 0, sizeof(cnt));
+  for(i=n-1; i>=0; i--){ cnt[in[i]]++; }
+  m = n;
+  for(i=1; i<256; i++){
+    int sum;
+    if( i=='\'' ) continue;
+    sum = cnt[i] + cnt[(i+1)&0xff] + cnt[(i+'\'')&0xff];
+    if( sum<m ){
+      m = sum;
+      e = i;
+      if( m==0 ) break;
+    }
+  }
+  out[0] = e;
+  j = 1;
+  for(i=0; i<n; i++){
+    int c = (in[i] - e)&0xff;
+    if( c==0 ){
+      out[j++] = 1;
+      out[j++] = 1;
+    }else if( c==1 ){
+      out[j++] = 1;
+      out[j++] = 2;
+    }else if( c=='\'' ){
+      out[j++] = 1;
+      out[j++] = 3;
+    }else{
+      out[j++] = c;
+    }
+  }
+  out[j] = 0;
+  return j;
+}
+
+/*
+** Decode the string "in" into binary data and write it into "out".
+** This routine reverses the encoding created by sqlite_encode_binary().
+** The output will always be a few bytes less than the input.  The number
+** of bytes of output is returned.  If the input is not a well-formed
+** encoding, -1 is returned.
+**
+** The "in" and "out" parameters may point to the same buffer in order
+** to decode a string in place.
+*/
+int sqlite_decode_binary(const unsigned char *in, unsigned char *out){
+  int i, c, e;
+  e = *(in++);
+  i = 0;
+  while( (c = *(in++))!=0 ){
+    if( c==1 ){
+      c = *(in++);
+      if( c==1 ){
+        c = 0;
+      }else if( c==2 ){
+        c = 1;
+      }else if( c==3 ){
+        c = '\'';
+      }else{
+        return -1;
+      }
+    }
+    out[i++] = (c + e)&0xff;
+  }
+  return i;
+}
+
+#ifdef ENCODER_TEST
+/*
+** The subroutines above are not tested by the usual test suite.  To test
+** these routines, compile just this one file with a -DENCODER_TEST=1 option
+** and run the result.
+*/
+int main(int argc, char **argv){
+  int i, j, n, m, nOut;
+  unsigned char in[30000];
+  unsigned char out[33000];
+
+  for(i=0; i<sizeof(in); i++){
+    printf("Test %d: ", i+1);
+    n = rand() % (i+1);
+    if( i%100==0 ){
+      int k;
+      for(j=k=0; j<n; j++){
+        /* if( k==0 || k=='\'' ) k++; */
+        in[j] = k;
+        k = (k+1)&0xff;
+      }
+    }else{
+      for(j=0; j<n; j++) in[j] = rand() & 0xff;
+    }
+    nOut = sqlite_encode_binary(in, n, out);
+    if( nOut!=strlen(out) ){
+      printf(" ERROR return value is %d instead of %d\n", nOut, strlen(out));
+      exit(1);
+    }
+    m = (256*n + 1262)/253;
+    printf("size %d->%d (max %d)", n, strlen(out)+1, m);
+    if( strlen(out)+1>m ){
+      printf(" ERROR output too big\n");
+      exit(1);
+    }
+    for(j=0; out[j]; j++){
+      if( out[j]=='\'' ){
+        printf(" ERROR contains (')\n");
+        exit(1);
+      }
+    }
+    j = sqlite_decode_binary(out, out);
+    if( j!=n ){
+      printf(" ERROR decode size %d\n", j);
+      exit(1);
+    }
+    if( memcmp(in, out, n)!=0 ){
+      printf(" ERROR decode mismatch\n");
+      exit(1);
+    }
+    printf(" OK\n");
+  }
+}
+#endif /* ENCODER_TEST */
diff --git a/examples/converters.py b/examples/converters.py
new file mode 100644 (file)
index 0000000..7a34cb7
--- /dev/null
@@ -0,0 +1,39 @@
+import os\r
+import sqlite\r
+\r
+# Ok, let's define a user-defined type we can use with the SQLite database\r
+class Point:\r
+    def __init__(self, x, y):\r
+        self.x, self.y = x, y\r
+\r
+    # The _quote function is currently the way a PySQLite user-defined type\r
+    # returns its string representation to write to the database.\r
+    def _quote(self):\r
+        return "'%f,%f'" % (self.x, self.y)\r
+\r
+    def __str__(self):\r
+        return "Point(%f, %f)" % (self.x, self.y)\r
+\r
+# The conversion callable needs to accept a string, parse it and return an\r
+# instance of your user-defined type.\r
+def pointConverter(s):\r
+    x, y = s.split(",")\r
+    return Point(float(x), float(y))\r
+\r
+# Ensure we have an empty database\r
+if os.path.exists("db"): os.remove("db")\r
+\r
+cx = sqlite.connect("db", converters={"point": pointConverter}) \r
+cu = cx.cursor()\r
+cu.execute("create table test(p point, n int)")\r
+cu.execute("insert into test(p, n) values (%s, %s)", (Point(-3.2, 4.5), 25))\r
+\r
+# For user-defined types, and for statements which return anything but direct\r
+# columns, you need to use the "-- types" feature of PySQLite:\r
+cu.execute("-- types point, int")\r
+cu.execute("select p, n from test")\r
+row = cu.fetchone()\r
+\r
+print "p:", row.p       # .columnname instead of [0] is a PySQLite\r
+print "n:", row.n       # extension to the DB-API! \r
+cx.close()\r
diff --git a/examples/dbapi_transactions.py b/examples/dbapi_transactions.py
new file mode 100644 (file)
index 0000000..4e8f113
--- /dev/null
@@ -0,0 +1,107 @@
+import sys\r
+import sqlite\r
+\r
+# The shared connection object\r
+cx = None\r
+\r
+def getCon():\r
+    # All code gets the connection object via this function\r
+    global cx\r
+    return cx\r
+\r
+def createSchema():\r
+    # Create the schema and make sure we're not accessing an old, incompatible schema\r
+    cu = getCon().cursor()\r
+    cu.execute("select tbl_name from sqlite_master where type='table' order by tbl_name")\r
+    tables = []\r
+    for row in cu.fetchall():\r
+        tables.append(row.tbl_name)\r
+    if tables != ["customer", "orders"]:\r
+        if tables == []:\r
+            # ok, database is empty\r
+            cu.execute("""\r
+                create table customer (\r
+                    cust_id integer primary key,\r
+                    cust_firstname text not null,\r
+                    cust_lastname text not null,\r
+                    cust_no text not null\r
+                )\r
+                """)\r
+            cu.execute("""\r
+                create table orders (\r
+                    ord_id integer primary key,\r
+                    ord_customer int,\r
+                    ord_item text not null,\r
+                    ord_quantity integer\r
+                )\r
+            """)\r
+            getCon().commit()\r
+        else:\r
+            print "We have an unknown schema here. Please fix manually."\r
+            sys.exit(1)\r
+\r
+def createCustomer(firstname, lastname, customerNo):\r
+    # Create a new customer and return the primary key id.\r
+    cu = getCon().cursor()\r
+    cu.execute("""\r
+        insert into customer(cust_firstname, cust_lastname, cust_no)\r
+            values (%s, %s, %s)\r
+            """, (firstname, lastname, customerNo))\r
+    getCon().commit()\r
+    return cu.lastrowid\r
+\r
+def createOrder(cust_id, ord_item, ord_quantity):\r
+    # Create a new order for the customer identified by cust_id and return the\r
+    # primary key of the created order row.\r
+    cu = getCon().cursor()\r
+    cu.execute("""\r
+        insert into orders (ord_customer, ord_item, ord_quantity)\r
+            values (%s, %s, %s)\r
+            """, (cust_id, ord_item, ord_quantity))\r
+    getCon().commit()\r
+    return cu.lastrowid\r
+\r
+def deleteOrder(ord_id):\r
+    # Delete an order.\r
+    cu = getCon().cursor()\r
+    cu.execute("delete from order where ord_id=%s", (ord_id,))\r
+    getCon().commit()\r
+\r
+def deleteCustomer(cust_id):\r
+    # Delete the customer identified by cust_id and all its orders (recursive\r
+    # delete).\r
+\r
+    # So now, finally, here we have an example where you *really* need\r
+    # transactions. We either want this to happen all or not at all. So all of\r
+    # these SQL statements need to be atomic, i. e. we need a transaction here.\r
+\r
+    # This will send the BEGIN to SQLite, as soon as the first non-SELECT is\r
+    # sent.\r
+    cu = getCon().cursor()\r
+\r
+    # So, before the next 'delete' statement, a 'BEGIN' is sent\r
+    cu.execute("delete from orders where ord_customer=%s", (cust_id,))\r
+    cu.execute("delete from customer where cust_id=%s", (cust_id,))\r
+\r
+    # This will send the "COMMIT" statement to the library.\r
+    getCon().commit()\r
+\r
+def main():\r
+    global cx\r
+    cx = sqlite.connect("customerdb")\r
+    createSchema()\r
+\r
+    # Create a customer\r
+    cust_id = createCustomer("Jane", "Doe", "JD0001")\r
+\r
+    # Create two orders for the customer\r
+    ord_id = createOrder(cust_id, "White Towel", 2)\r
+    ord_id = createOrder(cust_id, "Blue Cup", 5)\r
+\r
+    # Delete the customer, and all her orders.\r
+    deleteCustomer(cust_id)\r
+\r
+    cx.close()\r
+\r
+if __name__ == "__main__":\r
+    main()\r
diff --git a/examples/manual_transactions.py b/examples/manual_transactions.py
new file mode 100644 (file)
index 0000000..114ecbf
--- /dev/null
@@ -0,0 +1,102 @@
+import sys\r
+import sqlite\r
+\r
+# The shared connection object\r
+cx = None\r
+\r
+def getCon():\r
+    # All code gets the connection object via this function\r
+    global cx\r
+    return cx\r
+\r
+def createSchema():\r
+    # Create the schema and make sure we're not accessing an old, incompatible schema\r
+    cu = getCon().cursor()\r
+    cu.execute("select tbl_name from sqlite_master where type='table' order by tbl_name")\r
+    tables = []\r
+    for row in cu.fetchall():\r
+        tables.append(row.tbl_name)\r
+    if tables != ["customer", "orders"]:\r
+        if tables == []:\r
+            # ok, database is empty\r
+            cu.execute("begin")\r
+            cu.execute("""\r
+                create table customer (\r
+                    cust_id integer primary key,\r
+                    cust_firstname text not null,\r
+                    cust_lastname text not null,\r
+                    cust_no text not null\r
+                )\r
+                """)\r
+            cu.execute("""\r
+                create table orders (\r
+                    ord_id integer primary key,\r
+                    ord_customer int,\r
+                    ord_item text not null,\r
+                    ord_quantity integer\r
+                )\r
+            """)\r
+            cu.execute("commit")\r
+        else:\r
+            print "We have an unknown schema here. Please fix manually."\r
+            sys.exit(1)\r
+\r
+def createCustomer(firstname, lastname, customerNo):\r
+    # Create a new customer and return the primary key id.\r
+    cu = getCon().cursor()\r
+    cu.execute("""\r
+        insert into customer(cust_firstname, cust_lastname, cust_no)\r
+            values (%s, %s, %s)\r
+            """, (firstname, lastname, customerNo))\r
+    return cu.lastrowid\r
+\r
+def createOrder(cust_id, ord_item, ord_quantity):\r
+    # Create a new order for the customer identified by cust_id and return the\r
+    # primary key of the created order row.\r
+    cu = getCon().cursor()\r
+    cu.execute("""\r
+        insert into orders (ord_customer, ord_item, ord_quantity)\r
+            values (%s, %s, %s)\r
+            """, (cust_id, ord_item, ord_quantity))\r
+    return cu.lastrowid\r
+\r
+def deleteOrder(ord_id):\r
+    # Delete an order.\r
+    cu = getCon().cursor()\r
+    cu.execute("delete from order where ord_id=%s", (ord_id,))\r
+\r
+def deleteCustomer(cust_id):\r
+    # Delete the customer identified by cust_id and all its orders (recursive\r
+    # delete).\r
+\r
+    # So now, finally, here we have an example where you *really* need\r
+    # transactions. We either want this to happen all or not at all. So all of\r
+    # these SQL statements need to be atomic, i. e. we need a transaction here.\r
+    cu = getCon().cursor()\r
+\r
+    cu.execute("begin")\r
+    cu.execute("delete from orders where ord_customer=%s", (cust_id,))\r
+    cu.execute("delete from customer where cust_id=%s", (cust_id,))\r
+    cu.execute("commit")\r
+\r
+def main():\r
+    global cx\r
+    # Open the connection in autocommit mode, because we believe we have reason\r
+    # to :-/\r
+    cx = sqlite.connect("customerdb", autocommit=1)\r
+    createSchema()\r
+\r
+    # Create a customer\r
+    cust_id = createCustomer("Jane", "Doe", "JD0001")\r
+\r
+    # Create two orders for the customer\r
+    ord_id = createOrder(cust_id, "White Towel", 2)\r
+    ord_id = createOrder(cust_id, "Blue Cup", 5)\r
+\r
+    # Delete the customer, and all her orders.\r
+    deleteCustomer(cust_id)\r
+\r
+    cx.close()\r
+\r
+if __name__ == "__main__":\r
+    main()\r
diff --git a/misc/multithreading_crash.py b/misc/multithreading_crash.py
new file mode 100644 (file)
index 0000000..35516f9
--- /dev/null
@@ -0,0 +1,112 @@
+#!/usr/bin/env python
+# This is a test case I got from a user that will crash PySQLite 0.5.0.
+# It stress-tests PySQLite in multithreaded mode.
+
+import os
+import sys
+import threading
+import time
+import random
+import sqlite
+
+dbname = "test.db"
+
+#MECHANISM = "no timeout"
+#MECHANISM = "use timeout"
+#MECHANISM = "use slow busy handler"
+MECHANISM = "use fast busy handler"
+
+class Modifier(threading.Thread):
+    def __init__(self, dbname):
+        threading.Thread.__init__(self)
+        self.dbname = dbname
+    def run(self):
+        print "Modifier: start"
+        cx = sqlite.connect(self.dbname)
+        cu = cx.cursor()
+        print "Modifier: INSERTing"
+        cu.execute("INSERT INTO meta (name, value) VALUES (%s, %s)",
+                   "foo", "blah blah blah")
+        for i in range(5):
+            print "Modifier: sleeping %d" % i
+            time.sleep(1)
+        print "Modifier: committing"
+        cx.commit()
+        print "Modifier: committed"
+        cu.close()
+        cx.close()
+        print "Modifier: end"
+
+class Reader(threading.Thread):
+    def __init__(self, name, dbname):
+        threading.Thread.__init__(self, name=name)
+        self.dbname = dbname
+    def busyHandler(self, delay, table, numAttempts):
+        print "Reader %s: busyHandler(delay=%r, table=%r, numAttempts=%r)"\
+              % (self.getName(), delay, table, numAttempts)
+        time.sleep(delay)
+        return 1
+    def run(self):
+        print "Reader %s: start" % self.getName()
+        if MECHANISM == "no timeout":
+            cx = sqlite.connect(self.dbname)
+        elif MECHANISM == "use timeout":
+            cx = sqlite.connect(self.dbname, timeout=5000)
+        elif MECHANISM == "use slow busy handler":
+            cx = sqlite.connect(self.dbname)
+            cx.db.sqlite_busy_handler(self.busyHandler, 1.0)
+        elif MECHANISM == "use fast busy handler":
+            cx = sqlite.connect(self.dbname, Xtimeout=5000.0)
+            cx.db.sqlite_busy_handler(self.busyHandler, 0.1)
+        else:
+            raise ValueError("MECHANISM is not one of the expected values")
+        sleepFor = random.randint(0, 3)
+        print "Reader %s: sleeping for %d seconds" % (self.getName(), sleepFor)
+        time.sleep(sleepFor)
+        print "Reader %s: waking up" % self.getName()
+        cu = cx.cursor()
+        print "Reader %s: SELECTing" % self.getName()
+        cu.execute("SELECT name, value FROM meta WHERE name='%s'" % self.getName())
+        print "Reader %s: SELECTed %s" % (self.getName(), cu.fetchone())
+        cu.close()
+        cx.close()
+        print "Reader %s: end" % self.getName()
+
+def test_sqlite_busy():
+    """Test handling of SQL_BUSY "errors" as discussed here:
+        http://www.hwaci.com/sw/sqlite/faq.html#q7
+        http://www.sqlite.org/cvstrac/wiki?p=MultiThreading
+    
+    Algorithm:
+        - start one thread that will open the database and start modifying it
+          then sleep for a while so other threads can get in there
+        - have other thread(s) do selects from the database and see if they
+          error out, if they block, if they timeout (play with timeout
+          .connect() argument)
+    """
+    # Create a fresh starting database.
+    if os.path.exists(dbname):
+        os.remove(dbname)
+    journal = dbname+"-journal"
+    if os.path.exists(journal):
+        os.remove(journal)
+    cx = sqlite.connect(dbname)
+    cu = cx.cursor()
+    cu.execute("CREATE TABLE meta (name STRING, value STRING)")
+    cx.commit()
+    cu.close()
+    cx.close()
+    
+    modifier = Modifier(dbname)
+    readerNames = ("foo",) #XXX "bar", "baz")
+    readers = [Reader(name, dbname) for name in readerNames]
+    modifier.start()
+    for reader in readers:
+        reader.start()
+    modifier.join()
+    for reader in readers:
+        reader.join()
+
+
+if __name__ == "__main__":
+    test_sqlite_busy()
diff --git a/port/strsep.c b/port/strsep.c
new file mode 100644 (file)
index 0000000..b027589
--- /dev/null
@@ -0,0 +1,73 @@
+/*-
+ * Copyright (c) 1990, 1993
+ *     The Regents of the University of California.  All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ *    must display the following acknowledgement:
+ *     This product includes software developed by the University of
+ *     California, Berkeley and its contributors.
+ * 4. Neither the name of the University nor the names of its contributors
+ *    may be used to endorse or promote products derived from this software
+ *    without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#include <string.h>
+#include <stdio.h>
+
+/*
+ * Get next token from string *stringp, where tokens are possibly-empty
+ * strings separated by characters from delim.
+ *
+ * Writes NULs into the string at *stringp to end tokens.
+ * delim need not remain constant from call to call.
+ * On return, *stringp points past the last NUL written (if there might
+ * be further tokens), or is NULL (if there are definitely no more tokens).
+ *
+ * If *stringp is NULL, strsep returns NULL.
+ */
+char *
+pysqlite_strsep(char** stringp, const char* delim)
+{
+       char *s;
+       const char *spanp;
+       int c, sc;
+       char *tok;
+
+       if ((s = *stringp) == NULL)
+               return (NULL);
+       for (tok = s;;) {
+               c = *s++;
+               spanp = delim;
+               do {
+                       if ((sc = *spanp++) == c) {
+                               if (c == 0)
+                                       s = NULL;
+                               else
+                                       s[-1] = 0;
+                               *stringp = s;
+                               return (tok);
+                       }
+               } while (sc != 0);
+       }
+       /* NOTREACHED */
+}
diff --git a/port/strsep.h b/port/strsep.h
new file mode 100644 (file)
index 0000000..a6dd640
--- /dev/null
@@ -0,0 +1 @@
+char* pysqlite_strsep(char** stringp, const char* delim);\r
diff --git a/setup.py b/setup.py
new file mode 100644 (file)
index 0000000..86c0d3f
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+
+import os, sys
+from distutils.core import setup
+from distutils.extension import Extension
+
+__version__ = "1.0.1"
+
+sqlite = "sqlite"
+sources = ["_sqlite.c", "encode.c", "port/strsep.c"]
+macros = []
+
+if sys.platform in ("linux-i386", "linux2"): # most Linux
+    include_dirs = ['/usr/include/sqlite']
+    library_dirs = ['/usr/lib/']
+    libraries = [sqlite]
+    runtime_library_dirs = []
+    extra_objects = []
+elif sys.platform in ("freebsd4", "freebsd5", "openbsd2", "cygwin", "darwin"):
+    if sys.platform == "darwin":
+        LOCALBASE = os.environ.get("LOCALBASE", "/opt/local")
+    else:
+        LOCALBASE = os.environ.get("LOCALBASE", "/usr/local")
+    include_dirs = ['%s/include' % LOCALBASE]
+    library_dirs = ['%s/lib/' % LOCALBASE]
+    libraries = [sqlite]
+    runtime_library_dirs = []
+    extra_objects = []
+elif sys.platform == "win32":
+    include_dirs = [r'..\sqlite']
+    library_dirs = [r'..\sqlite']
+    libraries = ["sqlite"]
+    runtime_library_dirs = []
+    extra_objects = []
+elif os.name == "posix": # most Unixish platforms
+    include_dirs = ['/usr/local/include']
+    library_dirs = ['/usr/local/lib']
+    libraries = [sqlite]
+    # On some platorms, this can be used to find the shared libraries
+    # at runtime, if they are in a non-standard location. Doesn't
+    # work for Linux gcc.
+    ## runtime_library_dirs = library_dirs
+    runtime_library_dirs = []
+    # This can be used on Linux to force use of static sqlite lib
+    ## extra_objects = ['/usr/lib/sqlite/libsqlite.a']
+    extra_objects = []
+else:
+    raise "UnknownPlatform", "sys.platform=%s, os.name=%s" % \
+          (sys.platform, os.name)
+    
+long_description = \
+"""Python interface to SQLite
+
+pysqlite is an interface to the SQLite database server for Python. It aims to be
+fully compliant with Python database API version 2.0 while also exploiting the
+unique features of SQLite.
+
+"""
+
+def main():
+    py_modules = ["sqlite.main"]
+                          
+    # patch distutils if it can't cope with the "classifiers" keyword
+    if sys.version < '2.2.3':
+        from distutils.dist import DistributionMetadata
+        DistributionMetadata.classifiers = None
+        DistributionMetadata.download_url = None
+    
+    setup ( # Distribution meta-data
+            name = "pysqlite",
+            version = __version__,
+            description = "An interface to SQLite",
+            long_description=long_description,
+            author = "PySQLite developers",
+            author_email = "pysqlite-devel@lists.sourceforge.net",
+            license = "Python license",
+            platforms = "ALL",
+            url = "http://pysqlite.sourceforge.net/",
+
+            # Description of the modules and packages in the distribution
+            py_modules = py_modules,
+
+            ext_modules = [Extension( name='_sqlite',
+                                      sources=sources,
+                                      include_dirs=include_dirs,
+                                      library_dirs=library_dirs,
+                                      runtime_library_dirs=runtime_library_dirs,
+                                      libraries=libraries,
+                                      extra_objects=extra_objects,
+                                      define_macros=macros
+                                      )],
+            classifiers = [
+            "Development Status :: 5 - Production/Stable",
+            "Intended Audience :: Developers",
+            "License :: OSI Approved :: MIT License",
+            "Operating System :: Microsoft :: Windows :: Windows NT/2000",
+            "Operating System :: POSIX",
+            "Programming Language :: C",
+            "Programming Language :: Python",
+            "Topic :: Database :: Database Engines/Servers",
+            "Topic :: Database :: Front-Ends"]
+            )
+
+if __name__ == "__main__":
+    main()
diff --git a/sqlite/__init__.py b/sqlite/__init__.py
new file mode 100644 (file)
index 0000000..58f6ac5
--- /dev/null
@@ -0,0 +1,72 @@
+import _sqlite
+
+"""Python interface to the SQLite embedded database engine."""
+
+#-------------------------------------------------------------------------------
+# Module Information
+#-------------------------------------------------------------------------------
+
+__revision__ = """$Revision: 1.22 $"""[11:-2]
+
+threadsafety = 1
+apilevel = "2.0"
+paramstyle = "pyformat"
+
+# This is the version string for the current PySQLite version.
+version = "1.0.1"
+
+# This is a tuple with the same digits as the vesrion string, but it's
+# suitable for comparisons of various versions.
+version_info = (1, 0, 1)
+
+#-------------------------------------------------------------------------------
+# Data type support
+#-------------------------------------------------------------------------------
+
+from main import DBAPITypeObject, Cursor, Connection, PgResultSet
+
+STRING    = DBAPITypeObject(_sqlite.STRING)
+
+BINARY    = DBAPITypeObject(_sqlite.BINARY)
+
+INT       = DBAPITypeObject(_sqlite.INTEGER)
+
+NUMBER    = DBAPITypeObject(_sqlite.INTEGER,
+                            _sqlite.FLOAT)
+
+DATE      = DBAPITypeObject(_sqlite.DATE)
+
+TIME      = DBAPITypeObject(_sqlite.TIME)
+
+TIMESTAMP = DBAPITypeObject(_sqlite.TIMESTAMP)
+
+ROWID     = DBAPITypeObject()
+
+# Nonstandard extension:
+UNICODESTRING = DBAPITypeObject(_sqlite.UNICODESTRING)
+
+#-------------------------------------------------------------------------------
+# Exceptions
+#-------------------------------------------------------------------------------
+
+from _sqlite import Warning, Error, InterfaceError, \
+    DatabaseError, DataError, OperationalError, IntegrityError, InternalError, \
+    ProgrammingError, NotSupportedError
+
+#-------------------------------------------------------------------------------
+# Global Functions
+#-------------------------------------------------------------------------------
+
+def connect(*args, **kwargs):
+    return Connection(*args, **kwargs)
+
+from _sqlite import encode, decode
+
+Binary = encode
+
+__all__ = ['connect','IntegrityError', 'InterfaceError', 'InternalError',
+           'NotSupportedError', 'OperationalError',
+           'ProgrammingError', 'Warning',
+           'Connection', 'Cursor', 'PgResultSet',
+           'apilevel', 'paramstyle', 'threadsafety', 'version', 'version_info',
+           'Binary', 'decode']
diff --git a/sqlite/main.py b/sqlite/main.py
new file mode 100644 (file)
index 0000000..d859475
--- /dev/null
@@ -0,0 +1,574 @@
+from __future__ import nested_scopes
+import _sqlite
+
+import copy, new, sys, weakref
+from types import *
+
+try:
+    from mx import DateTime
+    have_datetime = 1
+except ImportError:
+    have_datetime = 0
+
+if have_datetime:
+    # Make the required Date/Time constructor visable in the PySQLite module.
+    Date = DateTime.Date
+    Time = DateTime.Time
+    Timestamp = DateTime.Timestamp
+    DateFromTicks = DateTime.DateFromTicks
+    TimeFromTicks = DateTime.TimeFromTicks
+    TimestampFromTicks = DateTime.TimestampFromTicks
+
+    # And also the DateTime types
+    DateTimeType = DateTime.DateTimeType
+    DateTimeDeltaType = DateTime.DateTimeDeltaType
+
+class DBAPITypeObject:
+    def __init__(self,*values):
+        self.values = values
+
+    def __cmp__(self,other):
+        if other in self.values:
+            return 0
+        if other < self.values:
+            return 1
+        else:
+            return -1
+
+def _quote(value):
+    """_quote(value) -> string
+
+    This function transforms the Python value into a string suitable to send to
+    the SQLite database in a SQL statement.  This function is automatically
+    applied to all parameters sent with an execute() call.  Because of this a
+    SQL statement string in an execute() call should only use '%s' [or
+    '%(name)s'] for variable substitution without any quoting."""
+
+    if value is None:
+        return 'NULL'
+    elif type(value) in (IntType, LongType, FloatType):
+        return value
+    elif isinstance(value, StringType):
+        return "'%s'" % value.replace("'", "''")
+    elif hasattr(value, '__quote__'):
+        return value.__quote__()
+    elif hasattr(value, '_quote'):
+        return value._quote()
+    elif have_datetime and type(value) in \
+            (DateTime.DateTimeType, DateTime.DateTimeDeltaType):
+        return "'%s'" % value
+    else:
+        return repr(value)
+
+def _quoteall(vdict):
+    """_quoteall(vdict)->dict
+    Quotes all elements in a list or dictionary to make them suitable for
+    insertion in a SQL statement."""
+
+    if type(vdict) is DictType or isinstance(vdict, PgResultSet):
+        t = {}
+        for k, v in vdict.items():
+            t[k]=_quote(v)
+    elif isinstance(vdict, StringType) or isinstance(vdict, UnicodeType):
+        # Note: a string is a SequenceType, but is treated as a single
+        #    entity, not a sequence of characters.
+        t = (_quote(vdict), )
+    elif type(vdict)in (ListType, TupleType):
+        t = tuple(map(_quote, vdict))
+    else:
+        raise TypeError, \
+              "argument to _quoteall must be a sequence or dictionary!"
+
+    return t
+
+class PgResultSet:
+    """A DB-API query result set for a single row.
+    This class emulates a sequence with the added feature of being able to
+    reference a column as attribute or with dictionary access in addition to a
+    zero-based numeric index."""
+
+    def __init__(self, value):
+        self.__dict__['baseObj'] = value
+
+    def __getattr__(self, key):
+        key = key.upper()
+        if self._xlatkey.has_key(key):
+            return self.baseObj[self._xlatkey[key]]
+        raise AttributeError, key
+
+    def __len__(self):
+        return len(self.baseObj)
+
+    def __getitem__(self, key):
+        if isinstance(key, StringType):
+            key = self.__class__._xlatkey[key.upper()]
+        return self.baseObj[key]
+
+    def __contains__(self, key):
+        return self.has_key(key)
+
+    def __getslice__(self, i, j):
+        klass = make_PgResultSetClass(self._desc_[i:j])
+        obj = klass(self.baseObj[i:j])
+        return obj
+
+    def __repr__(self):
+        return repr(self.baseObj)
+
+    def __str__(self):
+        return str(self.baseObj)
+
+    def __cmp__(self, other):
+        return cmp(self.baseObj, other)
+
+    def description(self):
+        return self._desc_
+
+    def keys(self):
+        _k = []
+        for _i in self._desc_:
+            _k.append(_i[0])
+        return _k
+
+    def values(self):
+        return self.baseObj[:]
+
+    def items(self):
+        _items = []
+        for i in range(len(self.baseObj)):
+            _items.append((self._desc_[i][0], self.baseObj[i]))
+
+        return _items
+
+    def has_key(self, key):
+        return self._xlatkey.has_key(key.upper())
+
+    def get(self, key, defaultval=None):
+        if self.has_key(key):
+            return self[key]
+        else:
+            return defaultval
+
+def make_PgResultSetClass(description):
+    NewClass = new.classobj("PgResultSetConcreteClass", (PgResultSet,), {})
+    NewClass.__dict__['_desc_'] = description
+
+    NewClass.__dict__['_xlatkey'] = {}
+
+    for _i in range(len(description)):
+        NewClass.__dict__['_xlatkey'][description[_i][0].upper()] = _i
+
+    return NewClass
+
+class Cursor:
+    """Abstract cursor class implementing what all cursor classes have in
+    common."""
+
+    def __init__(self, conn, rowclass=PgResultSet):
+        self.arraysize = 1
+
+        # Add ourselves to the list of cursors for our owning connection.
+        self.con = weakref.proxy(conn)
+        self.con.cursors[id(self)] = self
+
+        self.rowclass = rowclass
+
+        self._reset()
+        self.current_recnum = -1
+
+    def _reset(self):
+        # closed is a trinary variable:
+        #     == None => Cursor has not been opened.
+        #     ==    0 => Cursor is open.
+        #     ==    1 => Cursor is closed.
+        self.closed = None
+        self.rowcount = -1
+        self._real_rowcount = 0
+        self.description = None
+        self.rs = None
+        self.current_recnum = 0
+
+    def _checkNotClosed(self, methodname=None):
+        if self.closed:
+            raise _sqlite.ProgrammingError, \
+                "%s failed - the cursor is closed." % (methodname or "")
+
+    def _unicodeConvert(self, obj):
+        """Encode all unicode strings that can be found in obj into
+        byte-strings using the encoding specified in the connection's
+        constructor, available here as self.con.encoding."""
+
+        if isinstance(obj, StringType):
+            return obj
+        elif isinstance(obj, UnicodeType):
+            return obj.encode(*self.con.encoding)
+        elif isinstance(obj, ListType) or isinstance(obj, TupleType):
+            converted_obj = []
+            for item in obj:
+                if isinstance(item, UnicodeType):
+                    converted_obj.append(item.encode(*self.con.encoding))
+                else:
+                    converted_obj.append(item)
+            return converted_obj
+        elif isinstance(obj, DictType):
+            converted_obj = {}
+            for k, v in obj.items():
+                if isinstance(v, UnicodeType):
+                    converted_obj[k] = v.encode(*self.con.encoding)
+                else:
+                    converted_obj[k] = v
+            return converted_obj
+        elif isinstance(obj, PgResultSet):
+            obj = copy.copy(obj)
+            for k, v in obj.items():
+                if isinstance(v, UnicodeType):
+                    obj[k] = v.encode(*self.con.encoding)
+            return obj
+        else:
+            return obj
+
+    def execute(self, SQL, *parms):
+        self._checkNotClosed("execute")
+
+        if self.con.autocommit:
+            pass
+        else:
+            if not(self.con.inTransaction or SQL[:6].upper() in ("SELECT","VACUUM","DETACH")):
+                self.con._begin()
+                self.con.inTransaction = 1
+
+        SQL = self._unicodeConvert(SQL)
+
+        if len(parms) == 0:
+            # If there are no paramters, just execute the query.
+            self.rs = self.con.db.execute(SQL)
+        else:
+            if len(parms) == 1 and \
+               (type(parms[0]) in (DictType, ListType, TupleType) or \
+                        isinstance(parms[0], PgResultSet)):
+                parms = (self._unicodeConvert(parms[0]),)
+                parms = _quoteall(parms[0])
+            else:
+                parms = self._unicodeConvert(parms)
+                parms = tuple(map(_quote, parms))
+
+            self.rs = self.con.db.execute(SQL % parms)
+
+        self.closed = 0
+        self.current_recnum = 0
+
+        self.rowcount, self._real_rowcount = [len(self.rs.row_list)] * 2
+        if self.rowcount == 0:
+            self.rowcount = self.con.db.sqlite_changes()
+
+        self.description = self.rs.col_defs
+
+        if issubclass(self.rowclass, PgResultSet):
+            self.rowclass = make_PgResultSetClass(self.description[:])
+
+    def executemany(self, query, parm_sequence):
+        self._checkNotClosed("executemany")
+
+        if self.con is None:
+            raise _sqlite.ProgrammingError, "connection is closed."
+
+        for _i in parm_sequence:
+            self.execute(query, _i)
+
+    def close(self):
+        if self.con and self.con.closed:
+            raise _sqlite.ProgrammingError, \
+                  "This cursor's connection is already closed."
+        if self.closed:
+            raise _sqlite.ProgrammingError, \
+                  "This cursor is already closed."
+        self.closed = 1
+
+        # Disassociate ourselves from our connection.
+        try:
+            cursors = self.con.cursors
+            del cursors.data[id(self)]
+        except:
+            pass
+
+    def __del__(self):
+        # Disassociate ourselves from our connection.
+        try:
+            cursors = self.con.cursors
+            del cursors.data[id(self)]
+        except:
+            pass
+
+    def setinputsizes(self, sizes):
+        """Does nothing, required by DB API."""
+        self._checkNotClosed("setinputsize")
+
+    def setoutputsize(self, size, column=None):
+        """Does nothing, required by DB API."""
+        self._checkNotClosed("setinputsize")
+
+    #
+    # DB-API methods:
+    #
+
+    def fetchone(self):
+        self._checkNotClosed("fetchone")
+
+        # If there are no records
+        if self._real_rowcount == 0:
+            return None
+
+        # If we have reached the last record
+        if self.current_recnum >= self._real_rowcount:
+            return None
+
+        if type(self.rowclass) is TupleType:
+            retval = self.rs.row_list[self.current_recnum]
+        else:
+            retval = self.rowclass(self.rs.row_list[self.current_recnum])
+        self.current_recnum += 1
+
+        return retval
+
+    def fetchmany(self, howmany=None):
+        self._checkNotClosed("fetchmany")
+
+        if howmany is None:
+            howmany = self.arraysize
+
+        # If there are no records
+        if self._real_rowcount == 0:
+            return []
+
+        # If we have reached the last record
+        if self.current_recnum >= self._real_rowcount:
+            return []
+
+        if type(self.rowclass) is TupleType:
+            retval = self.rs.row_list[self.current_recnum:self.current_recnum + howmany]
+        else:
+            retval = [self.rowclass(row) for row in self.rs.row_list[self.current_recnum:self.current_recnum + howmany]]
+
+        self.current_recnum += howmany
+        if self.current_recnum > self._real_rowcount:
+            self.current_recnum = self._real_rowcount
+
+        return retval
+
+    def fetchall(self):
+        self._checkNotClosed("fetchall")
+
+        # If there are no records
+        if self._real_rowcount == 0:
+            return []
+
+        # If we have reached the last record
+        if self.current_recnum >= self._real_rowcount:
+            return []
+
+        if type(self.rowclass) is TupleType:
+            retval = self.rs.row_list[self.current_recnum:]
+        else:
+            retval = [self.rowclass(row) for row in self.rs.row_list[self.current_recnum:]]
+
+        self.current_recnum =self._real_rowcount
+
+        return retval
+
+    #
+    # Optional DB-API extensions from PEP 0249:
+    #
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        item = self.fetchone()
+        if item is None:
+            if sys.version_info[:2] >= (2,2):
+                raise StopIteration
+            else:
+                raise IndexError
+        else:
+            return item
+
+    def scroll(self, value, mode="relative"):
+        if mode == "relative":
+            new_recnum = self.current_recnum + value
+        elif mode == "absolute":
+            new_recnum = value
+        else:
+            raise ValueError, "invalid mode parameter"
+        if new_recnum >= 0 and new_recnum < self.rowcount:
+            self.current_recnum = new_recnum
+        else:
+            raise IndexError
+
+    def __getattr__(self, key):
+        if self.__dict__.has_key(key):
+            return self.__dict__[key]
+        elif key == "sql":
+            # The sql attribute is a PySQLite extension.
+            return self.con.db.sql
+        elif key == "rownumber":
+            return self.current_recnum
+        elif key == "lastrowid":
+            return self.con.db.sqlite_last_insert_rowid()
+        elif key == "connection":
+            return self.con
+        else:
+            raise AttributeError, key
+
+class UnicodeConverter:
+    def __init__(self, encoding):
+        self.encoding = encoding
+
+    def __call__(self, val):
+        return unicode(val, *self.encoding)
+
+class Connection:
+
+    def __init__(self, database=None, mode=0755, converters={}, autocommit=0, encoding=None, timeout=None, command_logfile=None, *arg, **kwargs):
+        # Old parameter names, for backwards compatibility
+        database = database or kwargs.get("db")
+        encoding = encoding or kwargs.get("client_encoding")
+
+        # Set these here, to prevent an attribute access error in __del__
+        # in case the connect fails.
+        self.closed = 0
+        self.db = None
+        self.inTransaction = 0
+        self.autocommit = autocommit
+        self.cursors = weakref.WeakValueDictionary()
+        self.rowclass = PgResultSet
+
+        self.db = _sqlite.connect(database, mode)
+
+        if type(encoding) not in (TupleType, ListType):
+            self.encoding = (encoding or sys.getdefaultencoding(),)
+        else:
+            self.encoding = encoding
+
+        register = self.db.register_converter
+        # These are the converters we provide by default ...
+        register("str", str)
+        register("int", int)
+        register("long", long)
+        register("float", float)
+        register("unicode", UnicodeConverter(self.encoding))
+        register("binary", _sqlite.decode)
+
+        # ... and DateTime/DateTimeDelta, if we have the mx.DateTime module.
+        if have_datetime:
+            register("date", DateTime.DateFrom)
+            register("time", DateTime.TimeFrom)
+            register("timestamp", DateTime.DateTimeFrom)
+            register("interval", DateTime.DateTimeDeltaFrom)
+
+        for typename, conv in converters.items():
+            register(typename, conv)
+
+        if timeout is not None:
+            self.db.sqlite_busy_timeout(timeout)
+
+        self.db.set_command_logfile(command_logfile)
+
+    def __del__(self):
+        if not self.closed:
+            self.close()
+
+    def _checkNotClosed(self, methodname):
+        if self.closed:
+            raise _sqlite.ProgrammingError, \
+                  "%s failed - Connection is closed." % methodname
+
+    def __anyCursorsLeft(self):
+        return len(self.cursors.data.keys()) > 0
+
+    def __closeCursors(self, doclose=0):
+        """__closeCursors() - closes all cursors associated with this connection"""
+        if self.__anyCursorsLeft():
+            cursors = map(lambda x: x(), self.cursors.data.values())
+
+            for cursor in cursors:
+                try:
+                    if doclose:
+                        cursor.close()
+                    else:
+                        cursor._reset()
+                except weakref.ReferenceError:
+                    pass
+
+    def _begin(self):
+        self.db.execute("BEGIN")
+        self.inTransaction = 1
+
+    #
+    # PySQLite extensions:
+    #
+
+    def create_function(self, name, nargs, func):
+        self.db.create_function(name, nargs, func)
+
+    def create_aggregate(self, name, nargs, agg_class):
+        self.db.create_aggregate(name, nargs, agg_class)
+
+    #
+    # DB-API methods:
+    #
+
+    def commit(self):
+        self._checkNotClosed("commit")
+        if self.autocommit:
+            # Ignore .commit(), according to the DB-API spec.
+            return
+
+        if self.inTransaction:
+            self.db.execute("COMMIT")
+            self.inTransaction = 0
+
+    def rollback(self):
+        self._checkNotClosed("rollback")
+        if self.autocommit:
+            raise _sqlite.ProgrammingError, "Rollback failed - autocommit is on."
+
+        if self.inTransaction:
+            self.db.execute("ROLLBACK")
+            self.inTransaction = 0
+
+    def close(self):
+        self._checkNotClosed("close")
+
+        self.__closeCursors(1)
+
+        if self.inTransaction:
+            self.rollback()
+
+        if self.db:
+            self.db.close()
+        self.closed = 1
+
+    def cursor(self):
+        self._checkNotClosed("cursor")
+        return Cursor(self, self.rowclass)
+
+    #
+    # Optional DB-API extensions from PEP 0249:
+    #
+
+    def __getattr__(self, key):
+        if key in self.__dict__.keys():
+            return self.__dict__[key]
+        elif key in ('IntegrityError', 'InterfaceError', 'InternalError',
+                     'NotSupportedError', 'OperationalError',
+                     'ProgrammingError', 'Warning'):
+            return getattr(_sqlite, key)
+        else:
+            raise AttributeError, key
+
+    #
+    # MySQLdb compatibility stuff
+    #
+
+    def insert_id(self):
+        return self.db.sqlite_last_insert_rowid()
diff --git a/test/all_tests.py b/test/all_tests.py
new file mode 100644 (file)
index 0000000..df1a03d
--- /dev/null
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+"""
+This combines all PySQLite test suites into one big one.
+"""
+
+import unittest, sys
+import api_tests, logging_tests, lowlevel_tests, pgresultset_tests, type_tests
+import userfunction_tests, transaction_tests
+
+def suite():
+    suite = unittest.TestSuite((lowlevel_tests.suite(), api_tests.suite(),
+            type_tests.suite(), userfunction_tests.suite(),
+            transaction_tests.suite(), pgresultset_tests.suite(),
+            logging_tests.suite()))
+
+    return suite
+
+def main():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    main()
diff --git a/test/api_tests.py b/test/api_tests.py
new file mode 100644 (file)
index 0000000..9246215
--- /dev/null
@@ -0,0 +1,517 @@
+#!/usr/bin/env python
+import testsupport
+import os, string, sys, types, unittest, weakref
+import sqlite
+
+class DBAPICompliance(unittest.TestCase):
+    def CheckAPILevel(self):
+        self.assertEqual(sqlite.apilevel, '2.0',
+                         'apilevel is %s, should be 2.0' % sqlite.apilevel)
+
+    def CheckThreadSafety(self):
+        self.assertEqual(sqlite.threadsafety, 1,
+                         'threadsafety is %d, should be 1' % sqlite.threadsafety)
+
+    def CheckParamStyle(self):
+        self.assertEqual(sqlite.paramstyle, 'pyformat',
+                         'paramstyle is "%s", should be "pyformat"' %
+                         sqlite.paramstyle)
+
+    def CheckWarning(self):
+        self.assert_(issubclass(sqlite.Warning, StandardError),
+                     'Warning is not a subclass of StandardError')
+
+    def CheckError(self):
+        self.failUnless(issubclass(sqlite.Error, StandardError),
+                        'Error is not a subclass of StandardError')
+
+    def CheckInterfaceError(self):
+        self.failUnless(issubclass(sqlite.InterfaceError, sqlite.Error),
+                        'InterfaceError is not a subclass of Error')
+
+    def CheckDatabaseError(self):
+        self.failUnless(issubclass(sqlite.DatabaseError, sqlite.Error),
+                        'DatabaseError is not a subclass of Error')
+
+    def CheckDataError(self):
+        self.failUnless(issubclass(sqlite.DataError, sqlite.DatabaseError),
+                        'DataError is not a subclass of DatabaseError')
+
+    def CheckOperationalError(self):
+        self.failUnless(issubclass(sqlite.OperationalError, sqlite.DatabaseError),
+                        'OperationalError is not a subclass of DatabaseError')
+
+    def CheckIntegrityError(self):
+        self.failUnless(issubclass(sqlite.IntegrityError, sqlite.DatabaseError),
+                        'IntegrityError is not a subclass of DatabaseError')
+
+    def CheckInternalError(self):
+        self.failUnless(issubclass(sqlite.InternalError, sqlite.DatabaseError),
+                        'InternalError is not a subclass of DatabaseError')
+
+    def CheckProgrammingError(self):
+        self.failUnless(issubclass(sqlite.ProgrammingError, sqlite.DatabaseError),
+                        'ProgrammingError is not a subclass of DatabaseError')
+
+    def CheckNotSupportedError(self):
+        self.failUnless(issubclass(sqlite.NotSupportedError,
+                                   sqlite.DatabaseError),
+                        'NotSupportedError is not a subclass of DatabaseError')
+
+class moduleTestCases(unittest.TestCase, testsupport.TestSupport):
+    def setUp(self):
+        self.filename = self.getfilename()
+        self.cnx = sqlite.connect(self.filename)
+        self.cur = self.cnx.cursor()
+
+    def tearDown(self):
+        try:
+            self.cnx.close()
+            self.removefile()
+        except AttributeError:
+            pass
+        except sqlite.ProgrammingError:
+            pass
+
+    def CheckConnectionObject(self):
+        self.assert_(isinstance(self.cnx, sqlite.Connection),
+                     'sqlite.connect did not return a Connection object')
+
+    def CheckConnectionClose(self):
+        self.assert_(hasattr(self.cnx, 'close') and
+                     type(self.cnx.close) == types.MethodType,
+                     'close is not a method of Connection')
+        self.cnx.close()
+        self.removefile()
+        self.failUnlessRaises(sqlite.ProgrammingError, self.cnx.close)
+
+    def CheckConnectionCommit(self):
+        self.assert_(hasattr(self.cnx, "commit") and
+                     type(self.cnx.commit) == types.MethodType,
+                     'commit is not a method of Connection')
+        self.cnx.close()
+        self.removefile()
+        self.failUnlessRaises(sqlite.ProgrammingError, self.cnx.commit)
+
+    def CheckConnectionRollback(self):
+        self.assert_(hasattr(self.cnx, "rollback") and
+                     type(self.cnx.rollback) == types.MethodType,
+                     'rollback is not a method of Connection')
+        self.cnx.close()
+        self.removefile()
+        self.failUnlessRaises(sqlite.ProgrammingError, self.cnx.rollback)
+
+    def CheckConnectionCursor(self):
+        self.assert_(hasattr(self.cnx, "cursor") and
+                     type(self.cnx.cursor) == types.MethodType,
+                     'cursor is not a method of Connection')
+        self.cnx.close()
+        self.removefile()
+        self.failUnlessRaises(sqlite.ProgrammingError, self.cnx.cursor)
+
+    def CheckCloseConnection(self):
+        self.cnx.close()
+        self.removefile()
+
+    def CheckCursorObject(self):
+        self.assert_(isinstance(self.cur, sqlite.Cursor),
+                     'cnx.cursor() did not return a Cursor instance')
+
+    def CheckCursorArraysize(self):
+        self.assert_(self.cur.arraysize == 1,
+                     'cur.arraysize is %d, it should be 1' %
+                     self.cur.arraysize)
+
+    def CheckCursorDescription(self):
+        self.assert_(self.cur.description == None,
+                     "cur.description should be None at this point, it isn't.")
+
+    def CheckCursorRowcount(self):
+        self.assert_(self.cur.rowcount == -1,
+                     'cur.rowcount is %d, should be -1' % self.cur.rowcount)
+
+    def CheckCursorClose(self):
+        self.assert_(hasattr(self.cur, "close") and
+                     type(self.cur.close) == types.MethodType,
+                     'close is not a method of the Cursor object')
+        self.cur.close()
+        self.failUnlessRaises(sqlite.ProgrammingError, self.cur.close)
+
+    def CheckCursorExecute(self):
+        self.assert_(hasattr(self.cur, "execute") and
+                     type(self.cur.execute) == types.MethodType,
+                     'execute is not a method of the Cursor object')
+        self.cur.close()
+        self.failUnlessRaises(sqlite.ProgrammingError,
+                              self.cur.execute, 'SELECT max(3,4)')
+
+    def CheckCursorExecutemany(self):
+        self.assert_(hasattr(self.cur, "executemany") and
+                     type(self.cur.executemany) == types.MethodType,
+                     'executemany is not a method of the Cursor object')
+
+        self.cur.close()
+        self.failUnlessRaises(sqlite.ProgrammingError,
+                              self.cur.executemany, 'SELECT max(3,4)', [1,2])
+
+    def CheckCursorFetchone(self):
+        self.assert_(hasattr(self.cur, "fetchone") and
+                     type(self.cur.fetchone) == types.MethodType,
+                     'fetchone is not a method of the Cursor object')
+        self.cur.close()
+        self.failUnlessRaises(sqlite.ProgrammingError, self.cur.fetchone)
+
+    def CheckCursorFetchMany(self):
+        self.failUnless(hasattr(self.cur, "fetchmany") and
+                        type(self.cur.fetchmany) == types.MethodType,
+                        'fetchmany is not a method of the Cursor object')
+
+        cursor = self.cnx.cursor()
+        cursor.execute("create table test(id int)")
+        cursor.executemany("insert into test(id) values (%s)", range(10))
+        cursor.execute("select id from test")
+        res = cursor.fetchmany()
+        self.failUnlessEqual(len(res), 1, """fetchmany should have returned a
+            list of length 1, but the list was %i elements long""" % len(res))
+        res = cursor.fetchmany(2)
+        self.failUnlessEqual(len(res), 2, """fetchmany should have returned a
+            list of length 2, but the list was %i elements long""" % len(res))
+        cursor.arraysize = 5
+        res = cursor.fetchmany()
+        self.failUnlessEqual(len(res), 5, """fetchmany should have returned a
+            list of length 5, but the list was %i elements long""" % len(res))
+
+        self.cur.close()
+        self.failUnlessRaises(sqlite.ProgrammingError,
+                              self.cur.fetchmany, 10)
+
+    def CheckCursorFetchall(self):
+        self.failUnless(hasattr(self.cur, "fetchall") and
+                        type(self.cur.fetchall) == types.MethodType,
+                        'fetchall is not a method of the Cursor object')
+        self.cur.close()
+        self.failUnlessRaises(sqlite.ProgrammingError,
+                              self.cur.fetchall)
+
+    def CheckCursorSetoutputsize(self):
+        self.failUnless(hasattr(self.cur, "setoutputsize") and
+                        type(self.cur.setoutputsize) == types.MethodType,
+                        'setoutputsize is not a method of the Cursor object')
+        self.cur.close()
+        self.failUnlessRaises(sqlite.ProgrammingError,
+                              self.cur.setoutputsize, 1024)
+
+    def CheckCursorSetinputsizes(self):
+        self.failUnless(hasattr(self.cur, "setinputsizes") and
+                        type(self.cur.setinputsizes) == types.MethodType,
+                        'setinputsizes is not a method of the Cursor object')
+        self.cur.close()
+        self.failUnlessRaises(sqlite.ProgrammingError,
+                              self.cur.setinputsizes, [1, 2, 3])
+
+    def CheckExecuteWithSingleton(self):
+        """Test execute() with a singleton string as the parameter."""
+        try:
+            self.cur.execute("select max(3,4)")
+        except StandardError, msg:
+            self.fail(msg)
+
+        self.assertEqual(type(self.cur.description), types.TupleType,
+                         "cur.description should be a tuple, but isn't.")
+
+        clen = len(self.cur.description)
+        self.assertEqual(clen, 1,
+                         "Length of cur.description is %d, it should be %d." %
+                         (clen, 1))
+
+
+        self.assertEqual(len(self.cur.description[0]), 7,
+                         "Length of cur.description[0] is %d, it should be 7." %
+                         len(self.cur.description[0]))
+
+
+        self.failUnless(self.cur.description[0][0] == "max(3,4)"    and
+                        self.cur.description[0][1] == sqlite.NUMBER and
+                        self.cur.description[0][2] == None          and
+                        self.cur.description[0][3] == None          and
+                        self.cur.description[0][4] == None          and
+                        self.cur.description[0][5] == None          and
+                        self.cur.description[0][6] == None,
+                        "cur.description[0] does not match the query.")
+        self.cur.close()
+
+    def CheckExecuteWithTuple(self):
+        """Test execute() with a tuple as the parameter."""
+        try:
+            self.cur.execute("select max(%s, %s)", (4, 5))
+        except StandardError, msg:
+            self.fail(msg)
+
+        # Empty tuple
+        try:
+            self.cur.execute("select 3+4", ())
+        except StandardError, msg:
+            self.fail(msg)
+        self.cur.close()
+
+    def CheckExecuteWithDictionary(self):
+        """Test execute() with a dictionary as the parameter."""
+        try:
+            self.cur.execute("select max(%(n1)s, %(n2)s)", {"n1": 5, "n2": 6})
+        except StandardError, msg:
+            self.fail(msg)
+        self.cur.close()
+
+    def CheckQuotingOfLong(self):
+        """Test wether longs are quoted properly for SQL."""
+        try:
+            self.cur.execute("-- types long")
+            self.cur.execute("select %s + %s as x", (5L, 6L))
+        except StandardError, msg:
+            self.fail(msg)
+        res = self.cur.fetchone()
+        self.failUnlessEqual(res.x, 11L,
+            "The addition of long should have returned %i, returned %i"
+                % (11L, res.x))
+
+    def CheckCursorIterator(self):
+        self.cur.execute("create table test (id, name)")
+        self.cur.executemany("insert into test (id) values (%s)",
+                            [(1,), (2,), (3,)])
+        self.cur.execute("-- types int")
+        self.cur.execute("select id from test")
+
+        if sys.version_info[:2] >= (2,2):
+            counter = 0
+            for row in self.cur:
+                if counter == 0:
+                    self.failUnlessEqual(row.id, 1,
+                        "row.id should have been 1, was %i" % row.id)
+                elif counter == 1:
+                    self.failUnlessEqual(row.id, 2,
+                        "row.id should have been 2, was %i" % row.id)
+                elif counter == 2:
+                    self.failUnlessEqual(row.id, 3,
+                        "row.id should have been 3, was %i" % row.id)
+                else:
+                    self.fail("Iterated over too many rows.")
+                counter += 1
+        else:
+            # Python 2.1
+            counter = 0
+            try:
+                while 1:
+                    row = self.cur.next()
+                    if counter == 0:
+                        self.failUnlessEqual(row.id, 1,
+                            "row.id should have been 1, was %i" % row.id)
+                    elif counter == 1:
+                        self.failUnlessEqual(row.id, 2,
+                            "row.id should have been 2, was %i" % row.id)
+                    elif counter == 2:
+                        self.failUnlessEqual(row.id, 3,
+                            "row.id should have been 3, was %i" % row.id)
+                    else:
+                        self.fail("Iterated over too many rows.")
+                    counter += 1
+            except IndexError:
+                pass
+            self.failUnlessEqual(counter, 3,
+                "Should have iterated over 3 items, was: %i" % counter)
+
+    def CheckCursorScrollAndRownumber(self):
+        self.cur.execute("create table test (id, name)")
+        values = [("foo",)] * 20
+        self.cur.executemany("insert into test (name) values (%s)", values)
+        self.cur.execute("select name from test")
+        self.failUnlessEqual(self.cur.rownumber, 0,
+            "Directly after execute, rownumber must be 0, is: %i"
+                % self.cur.rownumber)
+
+        self.cur.scroll(1, "absolute")
+        self.cur.scroll(5, "absolute")
+        self.failUnlessEqual(self.cur.rownumber, 5,
+            "rownumber should be 5, is: %i"
+                % self.cur.rownumber)
+
+        self.cur.scroll(1, "relative")
+        self.failUnlessEqual(self.cur.rownumber, 6,
+            "rownumber should be 6, is: %i"
+                % self.cur.rownumber)
+
+        self.cur.scroll(-2, "relative")
+        self.failUnlessEqual(self.cur.rownumber, 4,
+            "rownumber should be 4, is: %i"
+                % self.cur.rownumber)
+
+        self.failUnlessRaises(IndexError, self.cur.scroll, -2, "absolute")
+        self.failUnlessRaises(IndexError, self.cur.scroll, 1000, "absolute")
+
+        self.cur.scroll(10, "absolute")
+        self.failUnlessRaises(IndexError, self.cur.scroll, -11, "relative")
+
+        self.cur.scroll(10, "absolute")
+        self.failUnlessRaises(IndexError, self.cur.scroll, 30, "relative")
+
+    def CheckCursorConnection(self):
+        if not isinstance(self.cur.connection, weakref.ProxyType) and \
+           not isinstance(self.cur.connection, weakref.CallableProxyType):
+            fail("cursor.connection doesn't return the correct type")
+
+    def CheckCursorLastRowID(self):
+        self.cur.execute("create table test (id integer primary key, name)")
+
+        self.cur.execute("insert into test(name) values ('foo')")
+        self.failUnlessEqual(self.cur.lastrowid, 1,
+            "lastrowid should be 1, is %i" % self.cur.lastrowid)
+
+        # Test MySQLdb compatibility function
+        self.failUnlessEqual(self.cur.lastrowid, self.cnx.insert_id())
+
+        self.cur.execute("insert into test(name) values ('foo')")
+        self.failUnlessEqual(self.cur.lastrowid, 2,
+            "lastrowid should be 2, is %i" % self.cur.lastrowid)
+
+    def CheckResultObject(self):
+        try:
+            self.cur.execute("select max(3,4)")
+            self.assertEqual(self.cur.rowcount, 1,
+                             "cur.rowcount is %d, it should be 1." %
+                             self.cur.rowcount)
+            self.res = self.cur.fetchall()
+        except StandardError, msg:
+            self.fail(msg)
+
+        self.assertEqual(type(self.res), types.ListType,
+                         'cur.fetchall() did not return a sequence.')
+
+        self.assertEqual(len(self.res), 1,
+                         'Length of the list of results is %d, it should be 1' %
+                         len(self.res))
+
+        self.failUnless(isinstance(self.res[0], sqlite.PgResultSet),
+                        'cur.fetchall() did not return a list of PgResultSets.')
+
+    def CheckResultFetchone(self):
+        try:
+            self.cur.execute("select max(3,4)")
+            self.res = self.cur.fetchone()
+            self.assertEqual(self.cur.rowcount, 1,
+                             'cur.rowcount is %d, it should be 1.' %
+                             self.cur.rowcount)
+        except StandardError, msg:
+            self.fail(msg)
+
+        self.failUnless(isinstance(self.res, sqlite.PgResultSet),
+                        "cur.fetchone() does not return a PgResultSet.")
+
+        try:
+            self.res = self.cur.fetchone()
+            self.assertEqual(self.res, None,
+                             "res should be None at this point, but it isn't.")
+        except StandardError, msg:
+            self.fail(msg)
+
+    def CheckRowCountAfterInsert(self):
+        try:
+            self.cur.execute("create table test(a)")
+            self.cur.execute("insert into test(a) values (5)")
+            self.assertEqual(self.cur.rowcount, 1,
+                            'cur.rowcount is %d, it should be 1.' %
+                            self.cur.rowcount)
+        except StandardError, msg:
+            self.fail(msg)
+    
+    def CheckRowCountAfterUpdate(self):
+        try:
+            self.cur.execute("create table test(a, b)")
+            self.cur.execute("insert into test(a, b) values (1, 2)")
+            self.cur.execute("insert into test(a, b) values (1, 3)")
+            self.cur.execute("insert into test(a, b) values (1, 4)")
+            self.cur.execute("update test set b=1 where a=1")
+            self.assertEqual(self.cur.rowcount, 3,
+                            'cur.rowcount is %d, it should be 3.' %
+                            self.cur.rowcount)
+        except StandardError, msg:
+            self.fail(msg)
+
+    def CheckRowCountAfterDelete(self):
+        try:
+            self.cur.execute("create table test(a, b)")
+            self.cur.execute("insert into test(a, b) values (1, 2)")
+            self.cur.execute("insert into test(a, b) values (1, 3)")
+            self.cur.execute("insert into test(a, b) values (2, 4)")
+            self.cur.execute("delete from test where a=1")
+            self.assertEqual(self.cur.rowcount, 2,
+                            'cur.rowcount is %d, it should be 2.' %
+                            self.cur.rowcount)
+        except StandardError, msg:
+            self.fail(msg)
+
+    def CheckSelectOfNonPrintableString(self):
+        try:
+            a = '\x01\x02\x03\x04'
+            self.cur.execute('select %s as a', a)
+            r = self.cur.fetchone()
+            self.assertEqual(len(r.a), len(a),
+                             "Length of result is %d, it should be %d."  %
+                             (len(r.a), len(a)))
+            self.failUnless(r.a == a,
+                             "Result is '%s', it should be '%s'" % (r.a, a))
+        except StandardError, msg:
+            self.fail(msg)
+
+    def CheckQuotingIntWithPercentS(self):
+        try:
+            self.cur.execute("create table test(a number)")
+            self.cur.execute("insert into test(a) values (%s)", (5,))
+        except StandardError, msg:
+            self.fail(msg)
+
+    def CheckQuotingLongWithPercentS(self):
+        try:
+            self.cur.execute("create table test(a number)")
+            self.cur.execute("insert into test(a) values (%s)", (50000000L,))
+        except StandardError, msg:
+            self.fail(msg)
+
+    def CheckQuotingFloatWithPercentS(self):
+        try:
+            self.cur.execute("create table test(a number)")
+            self.cur.execute("insert into test(a) values (%s)", (-3.24,))
+        except StandardError, msg:
+            self.fail(msg)
+
+    def CheckQuotingIntWithPyQuoting(self):
+        try:
+            self.cur.execute("create table test(a number)")
+            self.cur.execute("insert into test(a) values (%i)", (5,))
+        except StandardError, msg:
+            self.fail(msg)
+
+    def CheckQuotingLongWithPyQuoting(self):
+        try:
+            self.cur.execute("create table test(a number)")
+            self.cur.execute("insert into test(a) values (%i)", (50000000L,))
+        except StandardError, msg:
+            self.fail(msg)
+
+    def CheckQuotingFloatWithPyQuoting(self):
+        try:
+            self.cur.execute("create table test(a number)")
+            self.cur.execute("insert into test(a) values (%f)", (-3.24,))
+        except StandardError, msg:
+            self.fail(msg)
+
+def suite():
+    dbapi_suite = unittest.makeSuite(DBAPICompliance, "Check")
+    module_suite = unittest.makeSuite(moduleTestCases, "Check")
+    test_suite = unittest.TestSuite((dbapi_suite, module_suite))
+    return test_suite
+
+def main():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    main()
diff --git a/test/logging_tests.py b/test/logging_tests.py
new file mode 100644 (file)
index 0000000..ed7bf24
--- /dev/null
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+import testsupport
+import StringIO, unittest
+import sqlite
+
+class LogFileTemplate:
+    def write(self, s):
+        pass
+
+class LogFile:
+    def __init__(self):
+        pass
+
+def init_LogFile():
+    LogFile.write = LogFileTemplate.write
+
+class CommandLoggingTests(unittest.TestCase, testsupport.TestSupport):
+    def tearDown(self):
+        try:
+            self.cnx.close()
+            self.removefile()
+        except AttributeError:
+            pass
+        except sqlite.InterfaceError:
+            pass
+
+    def CheckNoWrite(self):
+        init_LogFile()
+        del LogFile.write
+        logger = LogFile()
+        try:
+            self.cnx = sqlite.connect(self.getfilename(),
+                command_logfile=logger)
+
+            self.fail("ValueError not raised")
+        except ValueError:
+            pass
+
+    def CheckWriteNotCallable(self):
+        logger = LogFile()
+        logger.write = 5
+        try:
+            self.cnx = sqlite.connect(self.getfilename(),
+                command_logfile=logger)
+
+            self.fail("ValueError not raised")
+        except ValueError:
+            pass
+
+    def CheckLoggingWorks(self):
+        logger = StringIO.StringIO()
+
+        expected_output = "\n".join([
+            "BEGIN", "CREATE TABLE TEST(FOO INTEGER)",
+            "INSERT INTO TEST(FOO) VALUES (5)",
+            "ROLLBACK"]) + "\n"
+
+        self.cnx = sqlite.connect(self.getfilename(),
+            command_logfile=logger)
+        cu = self.cnx.cursor()
+        cu.execute("CREATE TABLE TEST(FOO INTEGER)")
+        cu.execute("INSERT INTO TEST(FOO) VALUES (%i)", (5,))
+        self.cnx.rollback()
+
+        logger.seek(0)
+        real_output = logger.read()
+        
+        if expected_output != real_output:
+            self.fail("Logging didn't produce expected output.")
+
+def suite():
+    command_logging_suite = unittest.makeSuite(CommandLoggingTests, "Check")
+    return command_logging_suite
+
+def main():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    main()
diff --git a/test/lowlevel_tests.py b/test/lowlevel_tests.py
new file mode 100644 (file)
index 0000000..7a1d537
--- /dev/null
@@ -0,0 +1,162 @@
+#!/usr/bin/env python
+"""
+These are the tests for the low-level module _sqlite.
+
+They try to execute as much of the low-level _sqlite module as possible to
+facilitate coverage testing with the help of gcov.
+"""
+
+from __future__ import nested_scopes
+import testsupport
+import os, unittest, re
+import _sqlite
+from sqlite import ProgrammingError
+
+class lowlevelTestCases(unittest.TestCase, testsupport.TestSupport):
+    def setUp(self):
+        self.filename = self.getfilename()
+        self.cnx = _sqlite.connect(self.filename)
+
+    def tearDown(self):
+        try:
+            self.cnx.close()
+            self.removefile()
+            del self.cnx
+        except AttributeError:
+            pass
+        except ProgrammingError:
+            pass
+
+    def CheckModuleAttributeAccess(self):
+        for attr in dir(_sqlite):
+            _sqlite.__dict__[attr]
+
+    def CheckConnectionAttributes(self):
+        self.cnx.filename
+        self.cnx.sql
+        self.cnx.sqlite_changes()
+        self.cnx.sqlite_last_insert_rowid()
+
+        try:
+            self.cnx.foo = 7
+            self.fail("Could set attribute. Connection object should be read-only.")
+        except TypeError:
+            pass
+
+    def CheckSQLiteExec(self):
+        self.cnx.execute("create table test(id int, name varchar(20))")
+        self.cnx.execute("insert into test(id, name) values (1, 'foo')")
+        self.cnx.execute("insert into test(id, name) values (2, 'bar')")
+
+        expected_colnames = ('id', 'name')
+        expected_values = [('1', 'foo'), ('2', 'bar')]
+        failures = []
+
+        def callback(arg1, items, colnames):
+            if colnames != expected_colnames:
+                failures.append("expected colnames %s, got %s"
+                                % (repr(expected_colnames), repr(colnames)))
+            if items not in expected_values:
+                failures.append("%s not in expected_values %s"
+                                % (repr(items), repr(expected_values)))
+            else:
+                expected_values.pop(0)
+
+        self.cnx.sqlite_exec("select * from test", callback, None)
+        if len(failures) > 0:
+            for failure in failures:
+                self.fail(failure)
+
+    def CheckSQLiteLastInsertRowID(self):
+        self.cnx.execute("create table test(id integer primary key, name varchar(20))")
+        self.cnx.execute("insert into test(id, name) values (NULL, 'foo')")
+        self.cnx.execute("insert into test(id, name) values (NULL, 'bar')")
+        rowid = self.cnx.sqlite_last_insert_rowid()
+        self.failUnlessEqual(rowid, 2,
+                            "last inserted rowid should have been %i, was %i"
+                            % (2, rowid))
+
+    def CheckSQLiteChanges(self):
+        self.cnx.execute("create table test(id integer primary key, name varchar(20))")
+        self.cnx.execute("insert into test(id, name) values (NULL, 'foo')")
+        self.cnx.execute("insert into test(id, name) values (NULL, 'bar')")
+        self.cnx.execute("insert into test(id, name) values (NULL, 'baz')")
+        self.cnx.execute("delete from test where name='baz'")
+        changed = self.cnx.sqlite_changes()
+        self.failUnlessEqual(changed, 1,
+                            "changed rows should have been %i, was %i"
+                            % (1, changed))
+        self.cnx.execute("update test set name='foobar' where id < 10")
+        changed = self.cnx.sqlite_changes()
+        self.failUnlessEqual(changed, 2,
+                            "changed rows should have been %i, was %i"
+                            % (2, changed))
+
+    def CheckConnectionForProgrammingError(self):
+        self.cnx.close()
+        self.removefile()
+
+        self.failUnlessRaises(ProgrammingError, self.cnx.close)
+        self.failUnlessRaises(ProgrammingError, self.cnx.execute, "")
+
+    def CheckConnectionForNumberOfArguments(self):
+        self.failUnlessRaises(TypeError, self.cnx.close, None)
+        self.failUnlessRaises(TypeError, self.cnx.execute, None, None)
+        self.failUnlessRaises(TypeError, self.cnx.sqlite_changes, None)
+        self.failUnlessRaises(TypeError, self.cnx.sqlite_exec, None)
+        self.failUnlessRaises(TypeError, self.cnx.sqlite_last_insert_rowid, None)
+
+    def CheckConnectionDestructor(self):
+        del self.cnx
+        self.removefile()
+
+    def CheckResultObject(self):
+        create_statement = "create table test(id INTEGER, name TEXT)"
+        self.cnx.execute(create_statement)
+
+        self.failUnlessEqual(create_statement, self.cnx.sql,
+            ".sql should have been %s, was %s" % (create_statement, self.cnx.sql))
+
+        self.cnx.execute("insert into test(id, name) values (4, 'foo')")
+        self.cnx.execute("insert into test(id, name) values (5, 'bar')")
+
+        res = self.cnx.execute("select id, name from test")
+        self.failUnless(res.rowcount == 2, "Should have returned 2 rows, but was %i" % res.rowcount)
+
+        correct_col_defs = (('id', _sqlite.INTEGER, None, None, None, None, None), \
+                            ('name', _sqlite.STRING, None, None, None, None, None))
+        self.assertEqual(res.col_defs, correct_col_defs,
+            "col_defs should have been %s, was %s" % (repr(correct_col_defs), repr(res.col_defs)))
+
+        correct_row_list = [(4, 'foo'), (5, 'bar')]
+        self.assertEqual(res.row_list, correct_row_list,
+            "rowlist should have been %s, was %s" % (repr(correct_row_list), repr(res.row_list)))
+
+    def CheckResultAttributes(self):
+        res = self.cnx.execute("select NULL, max(4,5)")
+        try:
+            res.foo = 7
+
+        except TypeError:
+            pass
+
+    def CheckSQLiteVersion(self):
+        try:
+            ver = _sqlite.sqlite_version()
+        except:
+            self.fail('sqlite_version() failed')
+        pat = re.compile(r'\d*\.\d*\.\d*')
+        if not re.match(pat,ver):
+            self.fail('Incorrect sqlite_version() format, '
+                'should be digits.digits.digits, was %s'%ver)
+
+
+def suite():
+    return unittest.makeSuite(lowlevelTestCases, "Check")
+
+def main():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    main()
diff --git a/test/pgresultset_tests.py b/test/pgresultset_tests.py
new file mode 100644 (file)
index 0000000..81f1d46
--- /dev/null
@@ -0,0 +1,202 @@
+#!/usr/bin/env python
+import testsupport
+import os, unittest, sys
+import sqlite
+
+class PgResultSetTests(unittest.TestCase, testsupport.TestSupport):
+    def setUp(self):
+        self.filename = self.getfilename()
+        self.cnx = sqlite.connect(self.filename)
+        self.cur = self.cnx.cursor()
+
+    def tearDown(self):
+        try:
+            self.cnx.close()
+            self.removefile()
+        except AttributeError:
+            pass
+        except sqlite.ProgrammingError:
+            pass
+
+    def getResult(self):
+        try:
+            self.cur.execute("DROP TABLE TEST")
+        except sqlite.DatabaseError, reason:
+            pass
+
+        self.cur.execute("CREATE TABLE TEST (id, name, age)")
+        self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+                            (5, 'Alice', 29))
+        self.cur.execute("-- types int, str, int")
+        self.cur.execute("SELECT id, name, age FROM TEST")
+        return self.cur.fetchone()
+
+    def CheckAttributeAccess(self):
+        res = self.getResult()
+        if not hasattr(res, "id"):
+            self.fail("Resultset doesn't have attribute 'id'")
+        if not hasattr(res, "ID"):
+            self.fail("Resultset doesn't have attribute 'ID'")
+
+    def CheckAttributeValue(self):
+        res = self.getResult()
+        if res.id != 5:
+            self.fail("id should be 5, is %i" % res.id)
+        if res.ID != 5:
+            self.fail("ID should be 5, is %i" % res.ID)
+
+    def CheckKeyAccess(self):
+        res = self.getResult()
+        if not "id" in res:
+            self.fail("Resultset doesn't have item 'id'")
+        if not "ID" in res:
+            self.fail("Resultset doesn't have item 'ID'")
+
+    def CheckKeyValue(self):
+        res = self.getResult()
+        if res["id"] != 5:
+            self.fail("id should be 5, is %i" % res.id)
+        if res["ID"] != 5:
+            self.fail("ID should be 5, is %i" % res.ID)
+
+    def CheckIndexValue(self):
+        res = self.getResult()
+        if res[0] != 5:
+            self.fail("item 0 should be 5, is %i" % res.id)
+
+    def Check_haskey(self):
+        res = self.getResult()
+        if not res.has_key("id"):
+            self.fail("resultset should have key 'id'")
+        if not res.has_key("ID"):
+            self.fail("resultset should have key 'ID'")
+        if not res.has_key("Id"):
+            self.fail("resultset should have key 'Id'")
+
+    def Check_len(self):
+        l = len(self.getResult())
+        if l != 3:
+            self.fail("length of resultset should be 3, is %i", l)
+
+    def Check_keys(self):
+        res = self.getResult()
+        if res.keys() != ["id", "name", "age"]:
+            self.fail("keys() should return %s, returns %s" %
+                        (["id", "name", "age"], res.keys()))
+
+    def Check_values(self):
+        val = self.getResult().values()
+        if val != (5, 'Alice', 29):
+            self.fail("Wrong values(): %s" % val)
+
+    def Check_items(self):
+        it = self.getResult().items()
+        if it != [("id", 5), ("name", 'Alice'), ("age", 29)]:
+            self.fail("Wrong items(): %s" % it)
+
+    def Check_get(self):
+        res = self.getResult()
+        v = res.get("id")
+        if v != 5:
+            self.fail("Wrong result for get [1]")
+
+        v = res.get("ID")
+        if v != 5:
+            self.fail("Wrong result for get [2]")
+
+        v = res.get("asdf")
+        if v is not None:
+            self.fail("Wrong result for get [3]")
+
+        v = res.get("asdf", 6)
+        if v != 6:
+            self.fail("Wrong result for get [4]")
+
+class TupleResultTests(unittest.TestCase, testsupport.TestSupport):
+    def setUp(self):
+        self.filename = self.getfilename()
+        self.cnx = sqlite.connect(self.filename)
+        self.cnx.rowclass = tuple
+        self.cur = self.cnx.cursor()
+
+    def tearDown(self):
+        try:
+            self.cnx.close()
+            self.removefile()
+        except AttributeError:
+            pass
+        except sqlite.ProgrammingError:
+            pass
+
+    def getOneResult(self):
+        try:
+            self.cur.execute("DROP TABLE TEST")
+        except sqlite.DatabaseError, reason:
+            pass
+
+        self.cur.execute("CREATE TABLE TEST (id, name, age)")
+        self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+                            (5, 'Alice', 29))
+        self.cur.execute("-- types int, str, int")
+        self.cur.execute("SELECT id, name, age FROM TEST")
+        return self.cur.fetchone()
+
+    def getManyResults(self):
+        try:
+            self.cur.execute("DROP TABLE TEST")
+        except sqlite.DatabaseError, reason:
+            pass
+
+        self.cur.execute("CREATE TABLE TEST (id, name, age)")
+        self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+                            (5, 'Alice', 29))
+        self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+                            (5, 'Alice', 29))
+        self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+                            (5, 'Alice', 29))
+        self.cur.execute("-- types int, str, int")
+        self.cur.execute("SELECT id, name, age FROM TEST")
+        return self.cur.fetchmany(2)
+
+    def getAllResults(self):
+        try:
+            self.cur.execute("DROP TABLE TEST")
+        except sqlite.DatabaseError, reason:
+            pass
+
+        self.cur.execute("CREATE TABLE TEST (id, name, age)")
+        self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+                            (5, 'Alice', 29))
+        self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+                            (5, 'Alice', 29))
+        self.cur.execute("INSERT INTO TEST (id, name, age) VALUES (%s, %s, %s)",
+                            (5, 'Alice', 29))
+        self.cur.execute("-- types int, str, int")
+        self.cur.execute("SELECT id, name, age FROM TEST")
+        return self.cur.fetchall()
+
+    def CheckRowTypeIsTupleFetchone(self):
+        res = self.getOneResult()
+        self.failUnless(type(res) is tuple, "Result type of row isn't a tuple")
+
+    def CheckRowTypeIsTupleFetchmany(self):
+        res = self.getManyResults()
+        self.failUnless(type(res[1]) is tuple, "Result type of row isn't a tuple")
+
+    def CheckRowTypeIsTupleFetchall(self):
+        res = self.getAllResults()
+        self.failUnless(type(res[2]) is tuple, "Result type of row isn't a tuple")
+
+def suite():
+    tests = [unittest.makeSuite(PgResultSetTests, "Check"),
+                                unittest.makeSuite(PgResultSetTests, "Check")]
+    if sys.version_info >= (2,2):
+        tests.append(unittest.makeSuite(TupleResultTests, "Check"))
+    return unittest.TestSuite(tests)
+
+def main():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    main()
diff --git a/test/testsupport.py b/test/testsupport.py
new file mode 100644 (file)
index 0000000..18d9ff4
--- /dev/null
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+import _sqlite
+import os, tempfile, unittest
+
+class TestSupport:
+    def getfilename(self):
+        if _sqlite.sqlite_version_info() >= (2, 8, 2):
+            return ":memory:"
+        else:
+            return tempfile.mktemp()
+
+    def removefile(self):
+        if self.filename != ":memory:":
+            os.remove(self.filename)
diff --git a/test/transaction_tests.py b/test/transaction_tests.py
new file mode 100644 (file)
index 0000000..57bc70f
--- /dev/null
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+import testsupport
+import os, string, sys, types, unittest
+import sqlite
+
+class TransactionTests(unittest.TestCase, testsupport.TestSupport):
+    def setUp(self):
+        self.filename = self.getfilename()
+        self.cnx = sqlite.connect(self.filename)
+        self.cur = self.cnx.cursor()
+
+    def tearDown(self):
+        try:
+            self.cnx.close()
+            self.removefile()
+        except AttributeError:
+            pass
+        except sqlite.InterfaceError:
+            pass
+
+    def CheckValueInTransaction(self):
+        self.cur.execute("create table test (a)")
+        self.cur.execute("insert into test (a) values (%s)", "foo")
+        self.cur.execute("-- types int")
+        self.cur.execute("select count(a) as count from test")
+        res = self.cur.fetchone()
+        self.failUnlessEqual(res.count, 1,
+                             "Wrong number of rows during transaction.")
+
+    def CheckValueAfterCommit(self):
+        self.cur.execute("create table test (a)")
+        self.cur.execute("insert into test (a) values (%s)", "foo")
+        self.cur.execute("-- types int")
+        self.cur.execute("select count(a) as count from test")
+        self.cnx.commit()
+        res = self.cur.fetchone()
+        self.failUnlessEqual(res.count, 1,
+                             "Wrong number of rows during transaction.")
+
+    def CheckValueAfterRollback(self):
+        self.cur.execute("create table test (a)")
+        self.cnx.commit()
+        self.cur.execute("insert into test (a) values (%s)", "foo")
+        self.cnx.rollback()
+        self.cur.execute("-- types int")
+        self.cur.execute("select count(a) as count from test")
+        res = self.cur.fetchone()
+        self.failUnlessEqual(res.count, 0,
+                             "Wrong number of rows during transaction.")
+
+    def CheckImmediateCommit(self):
+        try:
+            self.cnx.commit()
+        except:
+            self.fail("Immediate commit raises exeption.")
+
+    def CheckImmediateRollback(self):
+        try:
+            self.cnx.rollback()
+        except:
+            self.fail("Immediate rollback raises exeption.")
+
+class AutocommitTests(unittest.TestCase, testsupport.TestSupport):
+    def setUp(self):
+        self.filename = self.getfilename()
+        self.cnx = sqlite.connect(self.filename, autocommit=1)
+        self.cur = self.cnx.cursor()
+
+    def tearDown(self):
+        try:
+            self.cnx.close()
+            self.removefile()
+        except AttributeError:
+            pass
+        except sqlite.InterfaceError:
+            pass
+
+    def CheckCommit(self):
+        self.cur.execute("select abs(5)")
+        try:
+            self.cnx.commit()
+        except:
+            self.fail(".commit() raised an exception")
+
+    def CheckRollback(self):
+        self.cur.execute("select abs(5)")
+        self.failUnlessRaises(sqlite.ProgrammingError, self.cnx.rollback)
+
+class ChangeAutocommitTests(unittest.TestCase):
+    pass
+
+def suite():
+    transaction_tests = unittest.makeSuite(TransactionTests, "Check")
+    autocommit_tests = unittest.makeSuite(AutocommitTests, "Check")
+    change_autocommit_tests = unittest.makeSuite(ChangeAutocommitTests, "Check")
+
+    test_suite = unittest.TestSuite((transaction_tests, autocommit_tests,
+                                    change_autocommit_tests))
+    return test_suite
+
+def main():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    main()
diff --git a/test/type_tests.py b/test/type_tests.py
new file mode 100644 (file)
index 0000000..0521dfd
--- /dev/null
@@ -0,0 +1,342 @@
+#!/usr/bin/env python
+#-*- coding: ISO-8859-1 -*-
+import testsupport
+import os, string, sys, types, unittest
+import sqlite
+import _sqlite
+
+try:
+    from mx.DateTime import Date, Time, DateTime, DateTimeDelta, DateFrom, \
+            TimeFrom, DateTimeDeltaFrom
+    have_datetime = 1
+except ImportError:
+    have_datetime = 0
+
+def sqlite_is_at_least(major, minor, micro):
+    version = map(int, _sqlite.sqlite_version().split("."))
+    return version >= (major, minor, micro)
+
+class MyType:
+    def __init__(self, val):
+        self.val = int(val)
+
+    def _quote(self):
+        return str(self.val)
+
+    def __repr__(self):
+        return "MyType(%s)" % self.val
+
+    def __cmp__(self, other):
+        assert(isinstance(other, MyType))
+        return cmp(self.val, other.val)
+
+class MyTypeNew(MyType):
+    def __quote__(self):
+        return str(self.val)
+
+    def __getattr__(self, key):
+        # Forbid access to the old-style _quote method
+        if key == "_quote":
+            raise AttributeError
+        else:
+            return self.__dict__[key]
+
+class ExpectedTypes(unittest.TestCase, testsupport.TestSupport):
+    def setUp(self):
+        self.filename = self.getfilename()
+        self.cnx = sqlite.connect(self.filename, converters={"mytype": MyType})
+        self.cur = self.cnx.cursor()
+
+    def tearDown(self):
+        try:
+            self.cnx.close()
+            self.removefile()
+        except AttributeError:
+            pass
+        except sqlite.InterfaceError:
+            pass
+
+    def CheckExpectedTypesStandardTypes(self):
+        self.cur.execute("create table test (a, b, c)")
+        self.cur.execute("insert into test(a, b, c) values (5, 6.3, 'hello')")
+        self.cur.execute("-- types int, float, str")
+        self.cur.execute("select * from test")
+        res = self.cur.fetchone()
+        self.failUnless(isinstance(res.a, types.IntType),
+                        "The built-in int converter didn't work.")
+        self.failUnless(isinstance(res.b, types.FloatType),
+                        "The built-in float converter didn't work.")
+        self.failUnless(isinstance(res.c, types.StringType),
+                        "The built-in string converter didn't work.")
+
+    def CheckExpectedTypesStandardTypesNull(self):
+        self.cur.execute("create table test (a, b, c)")
+        self.cur.execute("insert into test(a, b, c) values (NULL, NULL, NULL)")
+        self.cur.execute("-- types int, float, str")
+        self.cur.execute("select * from test")
+        res = self.cur.fetchone()
+        self.failUnless(res.a == None,
+                        "The built-in int converter should have returned None.")
+        self.failUnless(res.b == None,
+                        "The built-in float converter should have returned None.")
+        self.failUnless(res.c == None,
+                        "The built-in string converter should have returned None.")
+
+    def CheckExpectedTypesCustomTypes(self):
+        value = MyType(10)
+        self.cur.execute("create table test (a)")
+        self.cur.execute("insert into test(a) values (%s)", value)
+        self.cur.execute("-- types mytype")
+        self.cur.execute("select a from test")
+        res = self.cur.fetchone()
+
+        self.failUnless(isinstance(res.a, MyType),
+                        "The converter did return the wrong type.")
+        self.failUnlessEqual(value, res.a,
+                             "The returned value and the inserted one are different.")
+
+    def CheckNewQuoteMethod(self):
+        value = MyTypeNew(10)
+        self.cur.execute("create table test (a integer)")
+        self.cur.execute("insert into test(a) values (%s)", value)
+        self.cur.execute("select a from test")
+        res = self.cur.fetchone()
+
+        self.failUnlessEqual(10, res.a,
+                             "The returned value and the inserted one are different.")
+
+    def CheckExpectedTypesCustomTypesNull(self):
+        value = None
+        self.cur.execute("create table test (a)")
+        self.cur.execute("insert into test(a) values (%s)", value)
+        self.cur.execute("-- types mytype")
+        self.cur.execute("select a from test")
+        res = self.cur.fetchone()
+
+        self.failUnless(res.a == None,
+                        "The converter should have returned None.")
+
+    def CheckResetExpectedTypes(self):
+        self.cur.execute("create table test (a)")
+        self.cur.execute("insert into test(a) values (5)")
+        self.cur.execute("-- types int")
+        self.cur.execute("select a from test")
+        self.cur.execute("select a from test")
+        res = self.cur.fetchone()
+        self.assert_(isinstance(res.a, types.StringType),
+                     "'resetting types' didn't succeed.")
+
+    if have_datetime:
+        def CheckDateTypes(self):
+            dt = DateTime(2002, 6, 15)
+            dtd = DateTimeDelta(0, 0, 0, 1)
+
+            self.cur.execute("create table test (t timestamp)")
+            self.cur.execute("insert into test(t) values (%s)", (dt,))
+            self.cur.execute("select t from test")
+            res = self.cur.fetchone()
+
+            self.failUnlessEqual(dt, res.t,
+                "DateTime object should have been %s, was %s"
+                    % (repr(dt), repr(res.t)))
+
+            self.cur.execute("drop table test")
+            self.cur.execute("create table test(i interval)")
+            self.cur.execute("insert into test(i) values (%s)", (dtd,))
+            self.cur.execute("select i from test")
+            res = self.cur.fetchone()
+
+            self.failUnlessEqual(dtd, res.i,
+                "DateTimeDelta object should have been %s, was %s"
+                    % (repr(dtd), repr(res.i)))
+
+class UnicodeTestsLatin1(unittest.TestCase, testsupport.TestSupport):
+    def setUp(self):
+        self.filename = self.getfilename()
+        self.cnx = sqlite.connect(self.filename, encoding=("iso-8859-1",))
+        self.cur = self.cnx.cursor()
+
+    def tearDown(self):
+        try:
+            self.cnx.close()
+            self.removefile()
+        except AttributeError:
+            pass
+        except sqlite.InterfaceError:
+            pass
+
+    def CheckGetSameBack(self):
+        test_str = unicode("Österreich", "latin1")
+        self.cur.execute("create table test (a UNICODE)")
+        self.cur.execute("insert into test(a) values (%s)", test_str)
+        self.cur.execute("select a from test")
+        res = self.cur.fetchone()
+        self.failUnlessEqual(type(test_str), type(res.a),
+            "Something other than a Unicode string was fetched: %s"
+                % (str(type(res.a))))
+        self.failUnlessEqual(test_str, res.a,
+            "Fetching the unicode string doesn't return the inserted one.")
+
+class UnicodeTestsUtf8(unittest.TestCase, testsupport.TestSupport):
+    def setUp(self):
+        self.filename = self.getfilename()
+        self.cnx = sqlite.connect(self.filename, encoding="utf-8")
+        self.cur = self.cnx.cursor()
+
+    def tearDown(self):
+        try:
+            self.cnx.close()
+            self.removefile()
+        except AttributeError:
+            pass
+        except sqlite.InterfaceError:
+            pass
+
+    def CheckGetSameBack(self):
+        # PREZIDENT ROSSI'SKO' FEDERACII Österreich
+        test_str = unicode("ПРЕЗИДЕНТ РОССИЙСКОЙ ФЕДЕРАЦИИ Österreich", "utf-8")
+
+        self.cur.execute("create table test (a UNICODE)")
+        self.cur.execute("insert into test(a) values (%s)", test_str)
+        self.cur.execute("select a from test")
+        res = self.cur.fetchone()
+        self.failUnlessEqual(type(test_str), type(res.a),
+            "Something other than a Unicode string was fetched: %s"
+                % (str(type(res.a))))
+        self.failUnlessEqual(test_str, res.a,
+            "Fetching the unicode string doesn't return the inserted one.")
+
+class UnicodeTestsKOI8R(unittest.TestCase, testsupport.TestSupport):
+    def setUp(self):
+        self.filename = self.getfilename()
+        self.cnx = sqlite.connect(self.filename, encoding="koi8-r")
+        self.cur = self.cnx.cursor()
+
+    def tearDown(self):
+        try:
+            self.cnx.close()
+            self.removefile()
+        except AttributeError:
+            pass
+        except sqlite.InterfaceError:
+            pass
+
+    def CheckGetSameBack(self):
+        # PREZIDENT ROSSI'SKO' FEDERACII
+        # (President of the Russian Federation)
+        test_str = unicode("ðòåúéäåîô òïóóéêóëïê æåäåòáãéé", "koi8-r")
+
+        self.cur.execute("create table test (a UNICODE)")
+        self.cur.execute("insert into test(a) values (%s)", test_str)
+        self.cur.execute("select a from test")
+        res = self.cur.fetchone()
+        self.failUnlessEqual(type(test_str), type(res.a),
+            "Something other than a Unicode string was fetched: %s"
+                % (str(type(res.a))))
+        self.failUnlessEqual(test_str, res.a,
+            "Fetching the unicode string doesn't return the inserted one.")
+
+class SQLiteBuiltinTypeSupport(unittest.TestCase, testsupport.TestSupport):
+    def setUp(self):
+        self.filename = self.getfilename()
+        self.cnx = sqlite.connect(self.filename, encoding="koi8-r")
+        self.cur = self.cnx.cursor()
+
+    def tearDown(self):
+        try:
+            self.cnx.close()
+            self.removefile()
+        except AttributeError:
+            pass
+        except sqlite.InterfaceError:
+            pass
+
+    def CheckInt(self):
+        self.cur.execute("create table test (a INTEGER)")
+        self.cur.execute("insert into test(a) values (%s)", 5)
+        self.cur.execute("select a from test")
+        res = self.cur.fetchone()
+        self.failUnlessEqual(type(5), type(res.a),
+            "Something other than an INTEGER was fetched: %s"
+                % (str(type(res.a))))
+
+    def CheckFloat(self):
+        self.cur.execute("create table test (a FLOAT)")
+        self.cur.execute("insert into test(a) values (%s)", 5.7)
+        self.cur.execute("select a from test")
+        res = self.cur.fetchone()
+        self.failUnlessEqual(type(5.7), type(res.a),
+            "Something other than a FLOAT was fetched: %s"
+                % (str(type(res.a))))
+
+    def CheckString(self):
+        self.cur.execute("create table test (a VARCHAR(20))")
+        self.cur.execute("insert into test(a) values (%s)", "foo")
+        self.cur.execute("select a from test")
+        res = self.cur.fetchone()
+        self.failUnlessEqual(type("foo"), type(res.a),
+            "Something other than a VARCHAR was fetched: %s"
+                % (str(type(res.a))))
+
+    def CheckBinary(self):
+        bindata = "".join([chr(x) for x in range(256)])
+        self.cur.execute("create table test(b BINARY)")
+        self.cur.execute("insert into test(b) values (%s)", sqlite.Binary(bindata))
+        self.cur.execute("select b from test")
+        res = self.cur.fetchone()
+        self.failUnlessEqual(bindata, res.b, "Binary roundtrip didn't produce original string")
+        self.failUnlessEqual(self.cur.description[0][1], sqlite.BINARY, "Wrong type code")
+
+    if have_datetime:
+        def CheckDate(self):
+            self.cur.execute("create table test (a DATE)")
+            d = DateFrom("2002-05-07")
+            self.cur.execute("insert into test(a) values (%s)", d)
+            self.cur.execute("select a from test")
+            res = self.cur.fetchone()
+            if res.a != d:
+                self.fail("didn't get back the same DATE")
+
+        def CheckTime(self):
+            self.cur.execute("create table test (a TIME)")
+            t = TimeFrom("22:15:00")
+            self.cur.execute("insert into test(a) values (%s)", t)
+            self.cur.execute("select a from test")
+            res = self.cur.fetchone()
+            if res.a != t:
+                self.fail("didn't get back the same TIME")
+
+        def CheckTimestamp(self):
+            self.cur.execute("create table test (a TIMESTAMP)")
+            d = DateFrom("2002-05-07 22:15:00")
+            self.cur.execute("insert into test(a) values (%s)", d)
+            self.cur.execute("select a from test")
+            res = self.cur.fetchone()
+            if res.a != d:
+                self.fail("didn't get back the same TIMESTAMP")
+
+        def CheckInterval(self):
+            self.cur.execute("create table test (a INTERVAL)")
+            d = DateTimeDeltaFrom("02:00:00")
+            self.cur.execute("insert into test(a) values (%s)", d)
+            self.cur.execute("select a from test")
+            res = self.cur.fetchone()
+            if res.a != d:
+                self.fail("didn't get back the same INTERVAL")
+
+def suite():
+    expected_suite = unittest.makeSuite(ExpectedTypes, "Check")
+    unicode_suite1 = unittest.makeSuite(UnicodeTestsLatin1, "Check")
+    unicode_suite2 = unittest.makeSuite(UnicodeTestsUtf8, "Check")
+    unicode_suite3 = unittest.makeSuite(UnicodeTestsKOI8R, "Check")
+    builtin_suite = unittest.makeSuite(SQLiteBuiltinTypeSupport, "Check")
+
+    return unittest.TestSuite((expected_suite, unicode_suite1, unicode_suite2,
+        unicode_suite3, builtin_suite))
+
+def main():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    main()
diff --git a/test/userfunction_tests.py b/test/userfunction_tests.py
new file mode 100644 (file)
index 0000000..ce70387
--- /dev/null
@@ -0,0 +1,259 @@
+#!/usr/bin/env python
+import testsupport
+import os, string, sys, types, unittest
+import sqlite
+
+def intreturner(x):
+    return int(x) * 2
+
+def floatreturner(x):
+    return float(x) * 2.0
+
+def stringreturner(x):
+    return "[%s]" % x
+
+def nullreturner(x):
+    return None
+
+def exceptionreturner(x):
+    return 5 / 0
+
+class MySum:
+    def __init__(self):
+        self.reset()
+
+    def reset(self):
+        self.sum = 0
+
+    def step(self, x):
+        self.sum += int(x)
+
+    def finalize(self):
+        val = self.sum
+        self.reset()
+        return val
+
+class MySumFloat:
+    def __init__(self):
+        self.reset()
+
+    def reset(self):
+        self.sum = 0.0
+
+    def step(self, x):
+        self.sum += float(x)
+
+    def finalize(self):
+        val = self.sum
+        self.reset()
+        return val
+
+class MySumReturnNull:
+    def __init__(self):
+        self.reset()
+
+    def reset(self):
+        self.sum = 0
+
+    def step(self, x):
+        self.sum += int(x)
+
+    def finalize(self):
+        return None
+
+class MySumStepExeption:
+    def __init__(self):
+        self.reset()
+
+    def reset(self):
+        self.sum = 0
+
+    def step(self, x):
+        self.sum += int(x) / 0
+
+    def finalize(self):
+        val = self.sum
+        self.reset()
+        return val
+
+class MySumFinalizeExeption:
+    def __init__(self):
+        self.reset()
+
+    def reset(self):
+        self.sum = 0
+
+    def step(self, x):
+        self.sum += int(x)
+
+    def finalize(self):
+        val = self.sum / 0
+        self.reset()
+        return val
+
+class UserFunctions(unittest.TestCase, testsupport.TestSupport):
+    def setUp(self):
+        self.filename = self.getfilename()
+        self.cnx = sqlite.connect(self.filename)
+
+        sqlite._sqlite.enable_callback_debugging(0)
+
+        self.cnx.create_function("intreturner", 1, intreturner)
+        self.cnx.create_function("floatreturner", 1, floatreturner)
+        self.cnx.create_function("stringreturner", 1, stringreturner)
+        self.cnx.create_function("nullreturner", 1, nullreturner)
+        self.cnx.create_function("exceptionreturner", 1, exceptionreturner)
+
+        self.cnx.create_aggregate("mysum", 1, MySum)
+        self.cnx.create_aggregate("mysumfloat", 1, MySumFloat)
+        self.cnx.create_aggregate("mysumreturnnull", 1, MySumReturnNull )
+        self.cnx.create_aggregate("mysumstepexception", 1, MySumStepExeption)
+        self.cnx.create_aggregate("mysumfinalizeexception", 1, MySumFinalizeExeption)
+        self.cur = self.cnx.cursor()
+
+    def tearDown(self):
+        try:
+            self.cnx.close()
+            self.removefile()
+        except AttributeError:
+            pass
+        except sqlite.InterfaceError:
+            pass
+
+    def CheckIntFunction(self):
+        self.cur.execute("create table test (a)")
+        self.cur.execute("insert into test(a) values (%s)", 5)
+        self.cur.execute("-- types int")
+        self.cur.execute("select intreturner(a) as a from test")
+        res = self.cur.fetchone()
+        self.failUnless(isinstance(res.a, types.IntType),
+                        "The result should have been an int.")
+        self.failUnlessEqual(res.a, 10,
+                        "The function returned the wrong result.")
+
+    def CheckFloatFunction(self):
+        self.cur.execute("create table test (a)")
+        self.cur.execute("insert into test(a) values (%s)", 5.0)
+        self.cur.execute("-- types float")
+        self.cur.execute("select floatreturner(a) as a from test")
+        res = self.cur.fetchone()
+        self.failUnless(isinstance(res.a, types.FloatType),
+                        "The result should have been a float.")
+        self.failUnlessEqual(res.a, 5.0 * 2.0,
+                        "The function returned the wrong result.")
+
+    def CheckStringFunction(self):
+        mystr = "test"
+        self.cur.execute("create table test (a)")
+        self.cur.execute("insert into test(a) values (%s)", mystr)
+        self.cur.execute("-- types str")
+        self.cur.execute("select stringreturner(a) as a from test")
+        res = self.cur.fetchone()
+        self.failUnless(isinstance(res.a, types.StringType),
+                        "The result should have been a string.")
+        self.failUnlessEqual(res.a, "[%s]" % mystr,
+                        "The function returned the wrong result.")
+
+    def CheckNullFunction(self):
+        mystr = "test"
+        self.cur.execute("create table test (a)")
+        self.cur.execute("insert into test(a) values (%s)", mystr)
+        self.cur.execute("-- types str")
+        self.cur.execute("select nullreturner(a) as a from test")
+        res = self.cur.fetchone()
+        self.failUnlessEqual(res.a, None,
+                        "The result should have been None.")
+
+    def CheckFunctionWithNullArgument(self):
+        mystr = "test"
+        self.cur.execute("-- types str")
+        self.cur.execute("select nullreturner(NULL) as a")
+        res = self.cur.fetchone()
+        self.failUnlessEqual(res.a, None,
+                        "The result should have been None.")
+
+
+    def CheckExceptionFunction(self):
+        mystr = "test"
+        self.cur.execute("create table test (a)")
+        self.cur.execute("insert into test(a) values (%s)", mystr)
+        self.cur.execute("-- types str")
+        try:
+            self.cur.execute("select exceptionreturner(a) as a from test")
+        except sqlite.DatabaseError, reason:
+            pass
+        except Exception, reason:
+            self.fail("Wrong exception raised: %s", sys.exc_info()[0])
+
+    def CheckAggregateBasic(self):
+        self.cur.execute("create table test (a)")
+        self.cur.executemany("insert into test(a) values (%s)", [(10,), (20,), (30,)])
+        self.cur.execute("-- types int")
+        self.cur.execute("select mysum(a) as sum from test")
+        res = self.cur.fetchone()
+        self.failUnless(isinstance(res.sum, types.IntType),
+                        "The result should have been an int.")
+        self.failUnlessEqual(res.sum, 60,
+                        "The function returned the wrong result.")
+
+    def CheckAggregateFloat(self):
+        self.cur.execute("create table test (a)")
+        self.cur.executemany("insert into test(a) values (%s)", [(10.0,), (20.0,), (30.0,)])
+        self.cur.execute("-- types float")
+        self.cur.execute("select mysumfloat(a) as sum from test")
+        res = self.cur.fetchone()
+        self.failUnless(isinstance(res.sum, types.FloatType),
+                        "The result should have been an float.")
+        if res.sum <= 59.9 or res.sum >= 60.1:
+            self.fail("The function returned the wrong result.")
+
+    def CheckAggregateReturnNull(self):
+        self.cur.execute("create table test (a)")
+        self.cur.executemany("insert into test(a) values (%s)", [(10,), (20,), (30,)])
+        self.cur.execute("-- types int")
+        self.cur.execute("select mysumreturnnull(a) as sum from test")
+        res = self.cur.fetchone()
+        self.failUnlessEqual(res.sum, None,
+                        "The result should have been None.")
+
+    def CheckAggregateStepException(self):
+        self.cur.execute("create table test (a)")
+        self.cur.executemany("insert into test(a) values (%s)", [(10,), (20,), (30,)])
+        self.cur.execute("-- types int")
+        try:
+            self.cur.execute("select mysumstepexception(a) as sum from test")
+        except sqlite.DatabaseError, reason:
+            pass
+        except Exception, reason:
+            self.fail("Wrong exception raised: %s" % sys.exc_info()[0])
+
+    def CheckAggregateFinalizeException(self):
+        self.cur.execute("create table test (a)")
+        self.cur.executemany("insert into test(a) values (%s)", [(10,), (20,), (30,)])
+        self.cur.execute("-- types int")
+        try:
+            self.cur.execute("select mysumfinalizeexception(a) as sum from test")
+        except sqlite.DatabaseError, reason:
+            pass
+        except Exception, reason:
+            self.fail("Wrong exception raised: %s", sys.exc_info()[0])
+
+    def CheckAggregateStepNullArgument(self):
+        self.cur.execute("-- types int")
+        self.cur.execute("select mysum(NULL) as a")
+        res = self.cur.fetchone()
+        self.failUnlessEqual(res.a, 0,
+                        "The result should have been 0.")
+
+
+def suite():
+    user_functions = unittest.makeSuite(UserFunctions, "Check")
+    test_suite = unittest.TestSuite((user_functions,))
+    return test_suite
+
+def main():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    main()