1
2
3
4 """
5 This file is part of the web2py Web Framework
6 Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
7 License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
8
9 Thanks to
10 * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
11 * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
12 * Denes
13 * Chris Clark
14 * clach05
15 * Denes Lengyel
16 * and many others who have contributed to current and previous versions
17
18 This file contains the DAL support for many relational databases,
19 including:
20 - SQLite & SpatiaLite
21 - MySQL
22 - Postgres
23 - Firebird
24 - Oracle
25 - MS SQL
26 - DB2
27 - Interbase
28 - Ingres
29 - Informix (9+ and SE)
30 - SapDB (experimental)
31 - Cubrid (experimental)
32 - CouchDB (experimental)
33 - MongoDB (in progress)
34 - Google:nosql
35 - Google:sql
36 - Teradata
37 - IMAP (experimental)
38
39 Example of usage:
40
41 >>> # from dal import DAL, Field
42
43 ### create DAL connection (and create DB if it doesn't exist)
44 >>> db = DAL(('sqlite://storage.sqlite','mysql://a:b@localhost/x'),
45 ... folder=None)
46
47 ### define a table 'person' (create/alter as necessary)
48 >>> person = db.define_table('person',Field('name','string'))
49
50 ### insert a record
51 >>> id = person.insert(name='James')
52
53 ### retrieve it by id
54 >>> james = person(id)
55
56 ### retrieve it by name
57 >>> james = person(name='James')
58
59 ### retrieve it by arbitrary query
60 >>> query = (person.name=='James') & (person.name.startswith('J'))
61 >>> james = db(query).select(person.ALL)[0]
62
63 ### update one record
64 >>> james.update_record(name='Jim')
65 <Row {'id': 1, 'name': 'Jim'}>
66
67 ### update multiple records by query
68 >>> db(person.name.like('J%')).update(name='James')
69 1
70
71 ### delete records by query
72 >>> db(person.name.lower() == 'jim').delete()
73 0
74
75 ### retrieve multiple records (rows)
76 >>> people = db(person).select(orderby=person.name,
77 ... groupby=person.name, limitby=(0,100))
78
79 ### further filter them
80 >>> james = people.find(lambda row: row.name == 'James').first()
81 >>> print james.id, james.name
82 1 James
83
84 ### check aggregates
85 >>> counter = person.id.count()
86 >>> print db(person).select(counter).first()(counter)
87 1
88
89 ### delete one record
90 >>> james.delete_record()
91 1
92
93 ### delete (drop) entire database table
94 >>> person.drop()
95
96 Supported field types:
97 id string text boolean integer double decimal password upload
98 blob time date datetime
99
100 Supported DAL URI strings:
101 'sqlite://test.db'
102 'spatialite://test.db'
103 'sqlite:memory'
104 'spatialite:memory'
105 'jdbc:sqlite://test.db'
106 'mysql://root:none@localhost/test'
107 'postgres://mdipierro:password@localhost/test'
108 'postgres:psycopg2://mdipierro:password@localhost/test'
109 'postgres:pg8000://mdipierro:password@localhost/test'
110 'jdbc:postgres://mdipierro:none@localhost/test'
111 'mssql://web2py:none@A64X2/web2py_test'
112 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
113 'oracle://username:password@database'
114 'firebird://user:password@server:3050/database'
115 'db2://DSN=dsn;UID=user;PWD=pass'
116 'firebird://username:password@hostname/database'
117 'firebird_embedded://username:password@c://path'
118 'informix://user:password@server:3050/database'
119 'informixu://user:password@server:3050/database' # unicode informix
120 'ingres://database' # or use an ODBC connection string, e.g. 'ingres://dsn=dsn_name'
121 'google:datastore' # for google app engine datastore
122 'google:sql' # for google app engine with sql (mysql compatible)
123 'teradata://DSN=dsn;UID=user;PWD=pass; DATABASE=database' # experimental
124 'imap://user:password@server:port' # experimental
125 'mongodb://user:password@server:port/database' # experimental
126
127 For more info:
128 help(DAL)
129 help(Field)
130 """
131
132
133
134
135
136 __all__ = ['DAL', 'Field']
137
138 DEFAULTLENGTH = {'string':512,
139 'password':512,
140 'upload':512,
141 'text':2**15,
142 'blob':2**31}
143 TIMINGSSIZE = 100
144 SPATIALLIBS = {
145 'Windows':'libspatialite',
146 'Linux':'libspatialite.so',
147 'Darwin':'libspatialite.dylib'
148 }
149 DEFAULT_URI = 'sqlite://dummy.db'
150
151 import re
152 import sys
153 import locale
154 import os
155 import types
156 import datetime
157 import threading
158 import time
159 import csv
160 import cgi
161 import copy
162 import socket
163 import logging
164 import base64
165 import shutil
166 import marshal
167 import decimal
168 import struct
169 import urllib
170 import hashlib
171 import uuid
172 import glob
173 import traceback
174 import platform
175
176 PYTHON_VERSION = sys.version_info[0]
177 if PYTHON_VERSION == 2:
178 import cPickle as pickle
179 import cStringIO as StringIO
180 import copy_reg as copyreg
181 hashlib_md5 = hashlib.md5
182 bytes, unicode = str, unicode
183 else:
184 import pickle
185 from io import StringIO as StringIO
186 import copyreg
187 long = int
188 hashlib_md5 = lambda s: hashlib.md5(bytes(s,'utf8'))
189 bytes, unicode = bytes, str
190
191 CALLABLETYPES = (types.LambdaType, types.FunctionType,
192 types.BuiltinFunctionType,
193 types.MethodType, types.BuiltinMethodType)
194
195 TABLE_ARGS = set(
196 ('migrate','primarykey','fake_migrate','format','redefine',
197 'singular','plural','trigger_name','sequence_name','fields',
198 'common_filter','polymodel','table_class','on_define','actual_name'))
199
200 SELECT_ARGS = set(
201 ('orderby', 'groupby', 'limitby','required', 'cache', 'left',
202 'distinct', 'having', 'join','for_update', 'processor','cacheable', 'orderby_on_limitby'))
203
204 ogetattr = object.__getattribute__
205 osetattr = object.__setattr__
206 exists = os.path.exists
207 pjoin = os.path.join
208
209
210
211
212 try:
213 from gluon.utils import web2py_uuid
214 except (ImportError, SystemError):
215 import uuid
217
218 try:
219 import portalocker
220 have_portalocker = True
221 except ImportError:
222 have_portalocker = False
223
224 try:
225 from gluon import serializers
226 have_serializers = True
227 except ImportError:
228 have_serializers = False
229 try:
230 import json as simplejson
231 except ImportError:
232 try:
233 import gluon.contrib.simplejson as simplejson
234 except ImportError:
235 simplejson = None
236
237 LOGGER = logging.getLogger("web2py.dal")
238 DEFAULT = lambda:0
239
240 GLOBAL_LOCKER = threading.RLock()
241 THREAD_LOCAL = threading.local()
242
243
244
245
246 REGEX_TYPE = re.compile('^([\w\_\:]+)')
247 REGEX_DBNAME = re.compile('^(\w+)(\:\w+)*')
248 REGEX_W = re.compile('^\w+$')
249 REGEX_TABLE_DOT_FIELD = re.compile('^(\w+)\.(\w+)$')
250 REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)(\.(?P<name>\w+))?\.\w+$')
251 REGEX_CLEANUP_FN = re.compile('[\'"\s;]+')
252 REGEX_UNPACK = re.compile('(?<!\|)\|(?!\|)')
253 REGEX_PYTHON_KEYWORDS = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
254 REGEX_SELECT_AS_PARSER = re.compile("\s+AS\s+(\S+)")
255 REGEX_CONST_STRING = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')')
256 REGEX_SEARCH_PATTERN = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
257 REGEX_SQUARE_BRACKETS = re.compile('^.+\[.+\]$')
258 REGEX_STORE_PATTERN = re.compile('\.(?P<e>\w{1,5})$')
259 REGEX_QUOTES = re.compile("'[^']*'")
260 REGEX_ALPHANUMERIC = re.compile('^[0-9a-zA-Z]\w*$')
261 REGEX_PASSWORD = re.compile('\://([^:@]*)\:')
262 REGEX_NOPASSWD = re.compile('\/\/[\w\.\-]+[\:\/](.+)(?=@)')
263
264
265
266 DRIVERS = []
267
268 try:
269 from new import classobj
270 from google.appengine.ext import db as gae
271 from google.appengine.api import namespace_manager, rdbms
272 from google.appengine.api.datastore_types import Key
273 from google.appengine.ext.db.polymodel import PolyModel
274 DRIVERS.append('google')
275 except ImportError:
276 pass
277
278 if not 'google' in DRIVERS:
279
280 try:
281 from pysqlite2 import dbapi2 as sqlite2
282 DRIVERS.append('SQLite(sqlite2)')
283 except ImportError:
284 LOGGER.debug('no SQLite drivers pysqlite2.dbapi2')
285
286 try:
287 from sqlite3 import dbapi2 as sqlite3
288 DRIVERS.append('SQLite(sqlite3)')
289 except ImportError:
290 LOGGER.debug('no SQLite drivers sqlite3')
291
292 try:
293
294 try:
295 import gluon.contrib.pymysql as pymysql
296
297
298 pymysql.ESCAPE_REGEX = re.compile("'")
299 pymysql.ESCAPE_MAP = {"'": "''"}
300
301 except ImportError:
302 import pymysql
303 DRIVERS.append('MySQL(pymysql)')
304 except ImportError:
305 LOGGER.debug('no MySQL driver pymysql')
306
307 try:
308 import MySQLdb
309 DRIVERS.append('MySQL(MySQLdb)')
310 except ImportError:
311 LOGGER.debug('no MySQL driver MySQLDB')
312
313 try:
314 import mysql.connector as mysqlconnector
315 DRIVERS.append("MySQL(mysqlconnector)")
316 except ImportError:
317 LOGGER.debug("no driver mysql.connector")
318
319 try:
320 import psycopg2
321 from psycopg2.extensions import adapt as psycopg2_adapt
322 DRIVERS.append('PostgreSQL(psycopg2)')
323 except ImportError:
324 LOGGER.debug('no PostgreSQL driver psycopg2')
325
326 try:
327
328 try:
329 import gluon.contrib.pg8000.dbapi as pg8000
330 except ImportError:
331 import pg8000.dbapi as pg8000
332 DRIVERS.append('PostgreSQL(pg8000)')
333 except ImportError:
334 LOGGER.debug('no PostgreSQL driver pg8000')
335
336 try:
337 import cx_Oracle
338 DRIVERS.append('Oracle(cx_Oracle)')
339 except ImportError:
340 LOGGER.debug('no Oracle driver cx_Oracle')
341
342 try:
343 try:
344 import pyodbc
345 except ImportError:
346 try:
347 import gluon.contrib.pypyodbc as pyodbc
348 except Exception, e:
349 raise ImportError(str(e))
350 DRIVERS.append('MSSQL(pyodbc)')
351 DRIVERS.append('DB2(pyodbc)')
352 DRIVERS.append('Teradata(pyodbc)')
353 DRIVERS.append('Ingres(pyodbc)')
354 except ImportError:
355 LOGGER.debug('no MSSQL/DB2/Teradata/Ingres driver pyodbc')
356
357 try:
358 import Sybase
359 DRIVERS.append('Sybase(Sybase)')
360 except ImportError:
361 LOGGER.debug('no Sybase driver')
362
363 try:
364 import kinterbasdb
365 DRIVERS.append('Interbase(kinterbasdb)')
366 DRIVERS.append('Firebird(kinterbasdb)')
367 except ImportError:
368 LOGGER.debug('no Firebird/Interbase driver kinterbasdb')
369
370 try:
371 import fdb
372 DRIVERS.append('Firebird(fdb)')
373 except ImportError:
374 LOGGER.debug('no Firebird driver fdb')
375
376 try:
377 import firebirdsql
378 DRIVERS.append('Firebird(firebirdsql)')
379 except ImportError:
380 LOGGER.debug('no Firebird driver firebirdsql')
381
382 try:
383 import informixdb
384 DRIVERS.append('Informix(informixdb)')
385 LOGGER.warning('Informix support is experimental')
386 except ImportError:
387 LOGGER.debug('no Informix driver informixdb')
388
389 try:
390 import sapdb
391 DRIVERS.append('SQL(sapdb)')
392 LOGGER.warning('SAPDB support is experimental')
393 except ImportError:
394 LOGGER.debug('no SAP driver sapdb')
395
396 try:
397 import cubriddb
398 DRIVERS.append('Cubrid(cubriddb)')
399 LOGGER.warning('Cubrid support is experimental')
400 except ImportError:
401 LOGGER.debug('no Cubrid driver cubriddb')
402
403 try:
404 from com.ziclix.python.sql import zxJDBC
405 import java.sql
406
407 from org.sqlite import JDBC
408 zxJDBC_sqlite = java.sql.DriverManager
409 DRIVERS.append('PostgreSQL(zxJDBC)')
410 DRIVERS.append('SQLite(zxJDBC)')
411 LOGGER.warning('zxJDBC support is experimental')
412 is_jdbc = True
413 except ImportError:
414 LOGGER.debug('no SQLite/PostgreSQL driver zxJDBC')
415 is_jdbc = False
416
417 try:
418 import couchdb
419 DRIVERS.append('CouchDB(couchdb)')
420 except ImportError:
421 LOGGER.debug('no Couchdb driver couchdb')
422
423 try:
424 import pymongo
425 DRIVERS.append('MongoDB(pymongo)')
426 except:
427 LOGGER.debug('no MongoDB driver pymongo')
428
429 try:
430 import imaplib
431 DRIVERS.append('IMAP(imaplib)')
432 except:
433 LOGGER.debug('no IMAP driver imaplib')
434
435 PLURALIZE_RULES = [
436 (re.compile('child$'), re.compile('child$'), 'children'),
437 (re.compile('oot$'), re.compile('oot$'), 'eet'),
438 (re.compile('ooth$'), re.compile('ooth$'), 'eeth'),
439 (re.compile('l[eo]af$'), re.compile('l([eo])af$'), 'l\\1aves'),
440 (re.compile('sis$'), re.compile('sis$'), 'ses'),
441 (re.compile('man$'), re.compile('man$'), 'men'),
442 (re.compile('ife$'), re.compile('ife$'), 'ives'),
443 (re.compile('eau$'), re.compile('eau$'), 'eaux'),
444 (re.compile('lf$'), re.compile('lf$'), 'lves'),
445 (re.compile('[sxz]$'), re.compile('$'), 'es'),
446 (re.compile('[^aeioudgkprt]h$'), re.compile('$'), 'es'),
447 (re.compile('(qu|[^aeiou])y$'), re.compile('y$'), 'ies'),
448 (re.compile('$'), re.compile('$'), 's'),
449 ]
456
461
464
467
469
472
474 regex = re.compile('\.keyword(?=\w)')
475 a = regex.sub('."%s"' % keyword,a)
476 return a
477
478 if 'google' in DRIVERS:
479
480 is_jdbc = False
483 """
484 GAE decimal implementation
485 """
486 data_type = decimal.Decimal
487
488 - def __init__(self, precision, scale, **kwargs):
489 super(GAEDecimalProperty, self).__init__(self, **kwargs)
490 d = '1.'
491 for x in range(scale):
492 d += '0'
493 self.round = decimal.Decimal(d)
494
502
504 if value is None or value == '':
505 return None
506 else:
507 return decimal.Decimal(value).quantize(self.round)
508
510 value = super(GAEDecimalProperty, self).validate(value)
511 if value is None or isinstance(value, decimal.Decimal):
512 return value
513 elif isinstance(value, basestring):
514 return decimal.Decimal(value)
515 raise gae.BadValueError("Property %s must be a Decimal or string."\
516 % self.name)
517
523
524 POOLS = {}
525 check_active_connection = True
526
527 @staticmethod
530
531
532
533 - def close(self,action='commit',really=True):
550
551 @staticmethod
553 """ to close cleanly databases in a multithreaded environment """
554 dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
555 for db_uid, db_group in dbs:
556 for db in db_group:
557 if hasattr(db,'_adapter'):
558 db._adapter.close(action)
559 getattr(THREAD_LOCAL,'db_instances',{}).clear()
560 getattr(THREAD_LOCAL,'db_instances_zombie',{}).clear()
561 if callable(action):
562 action(None)
563 return
564
577
579 """hook for the after_connection parameter"""
580 if callable(self._after_connection):
581 self._after_connection(self)
582 self.after_connection()
583
585 """ this it is supposed to be overloaded by adapters"""
586 pass
587
589 """
590 this function defines: self.connection and self.cursor
591 (iff cursor is True)
592 if self.pool_size>0 it will try pull the connection from the pool
593 if the connection is not active (closed by db server) it will loop
594 if not self.pool_size or no active connections in pool makes a new one
595 """
596 if getattr(self,'connection', None) != None:
597 return
598 if f is None:
599 f = self.connector
600
601
602
603
604
605 if not self.pool_size:
606 self.connection = f()
607 self.cursor = cursor and self.connection.cursor()
608 else:
609 uri = self.uri
610 POOLS = ConnectionPool.POOLS
611 while True:
612 GLOBAL_LOCKER.acquire()
613 if not uri in POOLS:
614 POOLS[uri] = []
615 if POOLS[uri]:
616 self.connection = POOLS[uri].pop()
617 GLOBAL_LOCKER.release()
618 self.cursor = cursor and self.connection.cursor()
619 try:
620 if self.cursor and self.check_active_connection:
621 self.execute('SELECT 1;')
622 break
623 except:
624 pass
625 else:
626 GLOBAL_LOCKER.release()
627 self.connection = f()
628 self.cursor = cursor and self.connection.cursor()
629 break
630 self.after_connection_hook()
631
638 native_json = False
639 driver = None
640 driver_name = None
641 drivers = ()
642 connection = None
643 commit_on_alter_table = False
644 support_distributed_transaction = False
645 uploads_in_blob = False
646 can_select_for_update = True
647 dbpath = None
648 folder = None
649
650 TRUE = 'T'
651 FALSE = 'F'
652 T_SEP = ' '
653 QUOTE_TEMPLATE = '"%s"'
654
655 types = {
656 'boolean': 'CHAR(1)',
657 'string': 'CHAR(%(length)s)',
658 'text': 'TEXT',
659 'json': 'TEXT',
660 'password': 'CHAR(%(length)s)',
661 'blob': 'BLOB',
662 'upload': 'CHAR(%(length)s)',
663 'integer': 'INTEGER',
664 'bigint': 'INTEGER',
665 'float':'DOUBLE',
666 'double': 'DOUBLE',
667 'decimal': 'DOUBLE',
668 'date': 'DATE',
669 'time': 'TIME',
670 'datetime': 'TIMESTAMP',
671 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
672 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
673 'list:integer': 'TEXT',
674 'list:string': 'TEXT',
675 'list:reference': 'TEXT',
676
677 'big-id': 'BIGINT PRIMARY KEY AUTOINCREMENT',
678 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
679 }
680
682 if not hasattr(self.driver, "OperationalError"):
683 return None
684 return isinstance(exception, self.driver.OperationalError)
685
687 if not hasattr(self.driver, "ProgrammingError"):
688 return None
689 return isinstance(exception, self.driver.ProgrammingError)
690
692 pkeys = getattr(table,'_primarykey',None)
693 if pkeys:
694 return table[pkeys[0]] != None
695 else:
696 return table._id != None
697
699 return "'%s'" % obj.replace("'", "''")
700
702 if isinstance(obj,(int,float)):
703 return str(obj)
704 return self.adapt(str(obj))
705
707 """
708 to be used ONLY for files that on GAE may not be on filesystem
709 """
710 return exists(filename)
711
712 - def file_open(self, filename, mode='rb', lock=True):
713 """
714 to be used ONLY for files that on GAE may not be on filesystem
715 """
716 if have_portalocker and lock:
717 fileobj = portalocker.LockedFile(filename,mode)
718 else:
719 fileobj = open(filename,mode)
720 return fileobj
721
723 """
724 to be used ONLY for files that on GAE may not be on filesystem
725 """
726 if fileobj:
727 fileobj.close()
728
731
733 self.adapter_args = adapter_args
734 if getattr(self,'driver',None) != None:
735 return
736 drivers_available = [driver for driver in self.drivers
737 if driver in globals()]
738 if uri:
739 items = uri.split('://',1)[0].split(':')
740 request_driver = items[1] if len(items)>1 else None
741 else:
742 request_driver = None
743 request_driver = request_driver or adapter_args.get('driver')
744 if request_driver:
745 if request_driver in drivers_available:
746 self.driver_name = request_driver
747 self.driver = globals().get(request_driver)
748 else:
749 raise RuntimeError("driver %s not available" % request_driver)
750 elif drivers_available:
751 self.driver_name = drivers_available[0]
752 self.driver = globals().get(self.driver_name)
753 else:
754 raise RuntimeError("no driver available %s" % str(self.drivers))
755
756 - def log(self, message, table=None):
757 """ Logs migrations
758
759 It will not log changes if logfile is not specified. Defaults
760 to sql.log
761 """
762
763 isabs = None
764 logfilename = self.adapter_args.get('logfile','sql.log')
765 writelog = bool(logfilename)
766 if writelog:
767 isabs = os.path.isabs(logfilename)
768
769 if table and table._dbt and writelog and self.folder:
770 if isabs:
771 table._loggername = logfilename
772 else:
773 table._loggername = pjoin(self.folder, logfilename)
774 logfile = self.file_open(table._loggername, 'a')
775 logfile.write(message)
776 self.file_close(logfile)
777
778
779 - def __init__(self, db,uri,pool_size=0, folder=None, db_codec='UTF-8',
780 credential_decoder=IDENTITY, driver_args={},
781 adapter_args={},do_connect=True, after_connection=None):
782 self.db = db
783 self.dbengine = "None"
784 self.uri = uri
785 self.pool_size = pool_size
786 self.folder = folder
787 self.db_codec = db_codec
788 self._after_connection = after_connection
789 class Dummy(object):
790 lastrowid = 1
791 def __getattr__(self, value):
792 return lambda *a, **b: []
793 self.connection = Dummy()
794 self.cursor = Dummy()
795
797 return '%s_sequence' % tablename
798
800 return '%s_sequence' % tablename
801
804
805 - def create_table(self, table,
806 migrate=True,
807 fake_migrate=False,
808 polymodel=None):
809 db = table._db
810 fields = []
811
812 postcreation_fields = []
813 sql_fields = {}
814 sql_fields_aux = {}
815 TFK = {}
816 tablename = table._tablename
817 sortable = 0
818 types = self.types
819 for field in table:
820 sortable += 1
821 field_name = field.name
822 field_type = field.type
823 if isinstance(field_type,SQLCustomType):
824 ftype = field_type.native or field_type.type
825 elif field_type.startswith('reference'):
826 referenced = field_type[10:].strip()
827 if referenced == '.':
828 referenced = tablename
829 constraint_name = self.constraint_name(tablename, field_name)
830 if not '.' in referenced \
831 and referenced != tablename \
832 and hasattr(table,'_primarykey'):
833 ftype = types['integer']
834 else:
835 if hasattr(table,'_primarykey'):
836 rtablename,rfieldname = referenced.split('.')
837 rtable = db[rtablename]
838 rfield = rtable[rfieldname]
839
840 if rfieldname in rtable._primarykey or \
841 rfield.unique:
842 ftype = types[rfield.type[:9]] % \
843 dict(length=rfield.length)
844
845 if not rfield.unique and len(rtable._primarykey)>1:
846
847 if rtablename not in TFK:
848 TFK[rtablename] = {}
849 TFK[rtablename][rfieldname] = field_name
850 else:
851 ftype = ftype + \
852 types['reference FK'] % dict(
853 constraint_name = constraint_name,
854 foreign_key = '%s (%s)' % (rtablename,
855 rfieldname),
856 table_name = tablename,
857 field_name = field_name,
858 on_delete_action=field.ondelete)
859 else:
860
861 if referenced in db:
862 id_fieldname = db[referenced]._id.name
863 elif referenced == tablename:
864 id_fieldname = table._id.name
865 else:
866 id_fieldname = 'id'
867 ftype = types[field_type[:9]] % dict(
868 index_name = field_name+'__idx',
869 field_name = field_name,
870 constraint_name = constraint_name,
871 foreign_key = '%s (%s)' % (referenced,
872 id_fieldname),
873 on_delete_action=field.ondelete)
874 elif field_type.startswith('list:reference'):
875 ftype = types[field_type[:14]]
876 elif field_type.startswith('decimal'):
877 precision, scale = map(int,field_type[8:-1].split(','))
878 ftype = types[field_type[:7]] % \
879 dict(precision=precision,scale=scale)
880 elif field_type.startswith('geo'):
881 if not hasattr(self,'srid'):
882 raise RuntimeError('Adapter does not support geometry')
883 srid = self.srid
884 geotype, parms = field_type[:-1].split('(')
885 if not geotype in types:
886 raise SyntaxError(
887 'Field: unknown field type: %s for %s' \
888 % (field_type, field_name))
889 ftype = types[geotype]
890 if self.dbengine == 'postgres' and geotype == 'geometry':
891
892 dimension = 2
893 parms = parms.split(',')
894 if len(parms) == 3:
895 schema, srid, dimension = parms
896 elif len(parms) == 2:
897 schema, srid = parms
898 else:
899 schema = parms[0]
900 ftype = "SELECT AddGeometryColumn ('%%(schema)s', '%%(tablename)s', '%%(fieldname)s', %%(srid)s, '%s', %%(dimension)s);" % types[geotype]
901 ftype = ftype % dict(schema=schema,
902 tablename=tablename,
903 fieldname=field_name, srid=srid,
904 dimension=dimension)
905 postcreation_fields.append(ftype)
906 elif not field_type in types:
907 raise SyntaxError('Field: unknown field type: %s for %s' % \
908 (field_type, field_name))
909 else:
910 ftype = types[field_type]\
911 % dict(length=field.length)
912 if not field_type.startswith('id') and \
913 not field_type.startswith('reference'):
914 if field.notnull:
915 ftype += ' NOT NULL'
916 else:
917 ftype += self.ALLOW_NULL()
918 if field.unique:
919 ftype += ' UNIQUE'
920 if field.custom_qualifier:
921 ftype += ' %s' % field.custom_qualifier
922
923
924 sql_fields[field_name] = dict(
925 length=field.length,
926 unique=field.unique,
927 notnull=field.notnull,
928 sortable=sortable,
929 type=str(field_type),
930 sql=ftype)
931
932 if field.notnull and not field.default is None:
933
934
935
936
937
938
939 not_null = self.NOT_NULL(field.default, field_type)
940 ftype = ftype.replace('NOT NULL', not_null)
941 sql_fields_aux[field_name] = dict(sql=ftype)
942
943
944 if not (self.dbengine == 'postgres' and \
945 field_type.startswith('geom')):
946 fields.append('%s %s' % (field_name, ftype))
947 other = ';'
948
949
950 if self.dbengine == 'mysql':
951 if not hasattr(table, "_primarykey"):
952 fields.append('PRIMARY KEY(%s)' % table._id.name)
953 other = ' ENGINE=InnoDB CHARACTER SET utf8;'
954
955 fields = ',\n '.join(fields)
956 for rtablename in TFK:
957 rfields = TFK[rtablename]
958 pkeys = db[rtablename]._primarykey
959 fkeys = [ rfields[k] for k in pkeys ]
960 fields = fields + ',\n ' + \
961 types['reference TFK'] % dict(
962 table_name = tablename,
963 field_name=', '.join(fkeys),
964 foreign_table = rtablename,
965 foreign_key = ', '.join(pkeys),
966 on_delete_action = field.ondelete)
967
968 if getattr(table,'_primarykey',None):
969 query = "CREATE TABLE %s(\n %s,\n %s) %s" % \
970 (tablename, fields,
971 self.PRIMARY_KEY(', '.join(table._primarykey)),other)
972 else:
973 query = "CREATE TABLE %s(\n %s\n)%s" % \
974 (tablename, fields, other)
975
976 if self.uri.startswith('sqlite:///') \
977 or self.uri.startswith('spatialite:///'):
978 path_encoding = sys.getfilesystemencoding() \
979 or locale.getdefaultlocale()[1] or 'utf8'
980 dbpath = self.uri[9:self.uri.rfind('/')]\
981 .decode('utf8').encode(path_encoding)
982 else:
983 dbpath = self.folder
984
985 if not migrate:
986 return query
987 elif self.uri.startswith('sqlite:memory')\
988 or self.uri.startswith('spatialite:memory'):
989 table._dbt = None
990 elif isinstance(migrate, str):
991 table._dbt = pjoin(dbpath, migrate)
992 else:
993 table._dbt = pjoin(
994 dbpath, '%s_%s.table' % (table._db._uri_hash, tablename))
995
996 if not table._dbt or not self.file_exists(table._dbt):
997 if table._dbt:
998 self.log('timestamp: %s\n%s\n'
999 % (datetime.datetime.today().isoformat(),
1000 query), table)
1001 if not fake_migrate:
1002 self.create_sequence_and_triggers(query,table)
1003 table._db.commit()
1004
1005
1006 for query in postcreation_fields:
1007 self.execute(query)
1008 table._db.commit()
1009 if table._dbt:
1010 tfile = self.file_open(table._dbt, 'w')
1011 pickle.dump(sql_fields, tfile)
1012 self.file_close(tfile)
1013 if fake_migrate:
1014 self.log('faked!\n', table)
1015 else:
1016 self.log('success!\n', table)
1017 else:
1018 tfile = self.file_open(table._dbt, 'r')
1019 try:
1020 sql_fields_old = pickle.load(tfile)
1021 except EOFError:
1022 self.file_close(tfile)
1023 raise RuntimeError('File %s appears corrupted' % table._dbt)
1024 self.file_close(tfile)
1025 if sql_fields != sql_fields_old:
1026 self.migrate_table(table,
1027 sql_fields, sql_fields_old,
1028 sql_fields_aux, None,
1029 fake_migrate=fake_migrate)
1030 return query
1031
1032 - def migrate_table(
1033 self,
1034 table,
1035 sql_fields,
1036 sql_fields_old,
1037 sql_fields_aux,
1038 logfile,
1039 fake_migrate=False,
1040 ):
1041
1042
1043 db = table._db
1044 db._migrated.append(table._tablename)
1045 tablename = table._tablename
1046 def fix(item):
1047 k,v=item
1048 if not isinstance(v,dict):
1049 v=dict(type='unknown',sql=v)
1050 return k.lower(),v
1051
1052
1053 sql_fields = dict(map(fix,sql_fields.iteritems()))
1054 sql_fields_old = dict(map(fix,sql_fields_old.iteritems()))
1055 sql_fields_aux = dict(map(fix,sql_fields_aux.iteritems()))
1056 if db._debug:
1057 logging.debug('migrating %s to %s' % (sql_fields_old,sql_fields))
1058
1059 keys = sql_fields.keys()
1060 for key in sql_fields_old:
1061 if not key in keys:
1062 keys.append(key)
1063 new_add = self.concat_add(tablename)
1064
1065 metadata_change = False
1066 sql_fields_current = copy.copy(sql_fields_old)
1067 for key in keys:
1068 query = None
1069 if not key in sql_fields_old:
1070 sql_fields_current[key] = sql_fields[key]
1071 if self.dbengine in ('postgres',) and \
1072 sql_fields[key]['type'].startswith('geometry'):
1073
1074 query = [ sql_fields[key]['sql'] ]
1075 else:
1076 query = ['ALTER TABLE %s ADD %s %s;' % \
1077 (tablename, key,
1078 sql_fields_aux[key]['sql'].replace(', ', new_add))]
1079 metadata_change = True
1080 elif self.dbengine in ('sqlite', 'spatialite'):
1081 if key in sql_fields:
1082 sql_fields_current[key] = sql_fields[key]
1083 metadata_change = True
1084 elif not key in sql_fields:
1085 del sql_fields_current[key]
1086 ftype = sql_fields_old[key]['type']
1087 if (self.dbengine in ('postgres',) and
1088 ftype.startswith('geometry')):
1089 geotype, parms = ftype[:-1].split('(')
1090 schema = parms.split(',')[0]
1091 query = [ "SELECT DropGeometryColumn ('%(schema)s', "+
1092 "'%(table)s', '%(field)s');" %
1093 dict(schema=schema, table=tablename, field=key,) ]
1094 elif self.dbengine in ('firebird',):
1095 query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
1096 else:
1097 query = ['ALTER TABLE %s DROP COLUMN %s;' %
1098 (tablename, key)]
1099 metadata_change = True
1100 elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
1101 and not (key in table.fields and
1102 isinstance(table[key].type, SQLCustomType)) \
1103 and not sql_fields[key]['type'].startswith('reference')\
1104 and not sql_fields[key]['type'].startswith('double')\
1105 and not sql_fields[key]['type'].startswith('id'):
1106 sql_fields_current[key] = sql_fields[key]
1107 t = tablename
1108 tt = sql_fields_aux[key]['sql'].replace(', ', new_add)
1109 if self.dbengine in ('firebird',):
1110 drop_expr = 'ALTER TABLE %s DROP %s;'
1111 else:
1112 drop_expr = 'ALTER TABLE %s DROP COLUMN %s;'
1113 key_tmp = key + '__tmp'
1114 query = ['ALTER TABLE %s ADD %s %s;' % (t, key_tmp, tt),
1115 'UPDATE %s SET %s=%s;' % (t, key_tmp, key),
1116 drop_expr % (t, key),
1117 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
1118 'UPDATE %s SET %s=%s;' % (t, key, key_tmp),
1119 drop_expr % (t, key_tmp)]
1120 metadata_change = True
1121 elif sql_fields[key]['type'] != sql_fields_old[key]['type']:
1122 sql_fields_current[key] = sql_fields[key]
1123 metadata_change = True
1124
1125 if query:
1126 self.log('timestamp: %s\n'
1127 % datetime.datetime.today().isoformat(), table)
1128 db['_lastsql'] = '\n'.join(query)
1129 for sub_query in query:
1130 self.log(sub_query + '\n', table)
1131 if fake_migrate:
1132 if db._adapter.commit_on_alter_table:
1133 self.save_dbt(table,sql_fields_current)
1134 self.log('faked!\n', table)
1135 else:
1136 self.execute(sub_query)
1137
1138
1139
1140
1141
1142 if db._adapter.commit_on_alter_table:
1143 db.commit()
1144 self.save_dbt(table,sql_fields_current)
1145 self.log('success!\n', table)
1146
1147 elif metadata_change:
1148 self.save_dbt(table,sql_fields_current)
1149
1150 if metadata_change and not (query and db._adapter.commit_on_alter_table):
1151 db.commit()
1152 self.save_dbt(table,sql_fields_current)
1153 self.log('success!\n', table)
1154
1155 - def save_dbt(self,table, sql_fields_current):
1156 tfile = self.file_open(table._dbt, 'w')
1157 pickle.dump(sql_fields_current, tfile)
1158 self.file_close(tfile)
1159
1160 - def LOWER(self, first):
1162
1163 - def UPPER(self, first):
1165
1166 - def COUNT(self, first, distinct=None):
1167 return ('COUNT(%s)' if not distinct else 'COUNT(DISTINCT %s)') \
1168 % self.expand(first)
1169
1171 return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
1172
1173 - def EPOCH(self, first):
1175
1178
1181
1184
1187
1190
1191 - def NOT_NULL(self, default, field_type):
1192 return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
1193
1195 expressions = [self.expand(first)]+[self.expand(e) for e in second]
1196 return 'COALESCE(%s)' % ','.join(expressions)
1197
1200
1201 - def RAW(self, first):
1203
1206
1208 return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1209
1211 return 'PRIMARY KEY(%s)' % key
1212
1213 - def _drop(self, table, mode):
1214 return ['DROP TABLE %s;' % table]
1215
1216 - def drop(self, table, mode=''):
1230
1231 - def _insert(self, table, fields):
1238
1240 return 'INSERT INTO %s DEFAULT VALUES;' % table
1241
1242 - def insert(self, table, fields):
1243 query = self._insert(table,fields)
1244 try:
1245 self.execute(query)
1246 except Exception:
1247 e = sys.exc_info()[1]
1248 if hasattr(table,'_on_insert_error'):
1249 return table._on_insert_error(table,fields,e)
1250 raise e
1251 if hasattr(table,'_primarykey'):
1252 return dict([(k[0].name, k[1]) for k in fields \
1253 if k[0].name in table._primarykey])
1254 id = self.lastrowid(table)
1255 if not isinstance(id,int):
1256 return id
1257 rid = Reference(id)
1258 (rid._table, rid._record) = (table, None)
1259 return rid
1260
1262 return [self.insert(table,item) for item in items]
1263
1264 - def NOT(self, first):
1266
1267 - def AND(self, first, second):
1269
1270 - def OR(self, first, second):
1272
1273 - def BELONGS(self, first, second):
1274 if isinstance(second, str):
1275 return '(%s IN (%s))' % (self.expand(first), second[:-1])
1276 if not second:
1277 return '(1=0)'
1278 items = ','.join(self.expand(item, first.type) for item in second)
1279 return '(%s IN (%s))' % (self.expand(first), items)
1280
1281 - def REGEXP(self, first, second):
1282 "regular expression operator"
1283 raise NotImplementedError
1284
1285 - def LIKE(self, first, second):
1286 "case sensitive like operator"
1287 raise NotImplementedError
1288
1289 - def ILIKE(self, first, second):
1290 "case in-sensitive like operator"
1291 return '(%s LIKE %s)' % (self.expand(first),
1292 self.expand(second, 'string'))
1293
1295 return '(%s LIKE %s)' % (self.expand(first),
1296 self.expand(second+'%', 'string'))
1297
1299 return '(%s LIKE %s)' % (self.expand(first),
1300 self.expand('%'+second, 'string'))
1301
1302 - def CONTAINS(self,first,second,case_sensitive=False):
1303 if first.type in ('string','text', 'json'):
1304 if isinstance(second,Expression):
1305 second = Expression(None,self.CONCAT('%',Expression(
1306 None,self.REPLACE(second,('%','%%'))),'%'))
1307 else:
1308 second = '%'+str(second).replace('%','%%')+'%'
1309 elif first.type.startswith('list:'):
1310 if isinstance(second,Expression):
1311 second = Expression(None,self.CONCAT(
1312 '%|',Expression(None,self.REPLACE(
1313 Expression(None,self.REPLACE(
1314 second,('%','%%'))),('|','||'))),'|%'))
1315 else:
1316 second = '%|'+str(second).replace('%','%%')\
1317 .replace('|','||')+'|%'
1318 op = case_sensitive and self.LIKE or self.ILIKE
1319 return op(first,second)
1320
1321 - def EQ(self, first, second=None):
1326
1327 - def NE(self, first, second=None):
1332
1333 - def LT(self,first,second=None):
1334 if second is None:
1335 raise RuntimeError("Cannot compare %s < None" % first)
1336 return '(%s < %s)' % (self.expand(first),
1337 self.expand(second,first.type))
1338
1339 - def LE(self,first,second=None):
1340 if second is None:
1341 raise RuntimeError("Cannot compare %s <= None" % first)
1342 return '(%s <= %s)' % (self.expand(first),
1343 self.expand(second,first.type))
1344
1345 - def GT(self,first,second=None):
1346 if second is None:
1347 raise RuntimeError("Cannot compare %s > None" % first)
1348 return '(%s > %s)' % (self.expand(first),
1349 self.expand(second,first.type))
1350
1351 - def GE(self,first,second=None):
1352 if second is None:
1353 raise RuntimeError("Cannot compare %s >= None" % first)
1354 return '(%s >= %s)' % (self.expand(first),
1355 self.expand(second,first.type))
1356
1358 return ftype in ('integer','boolean','double','bigint') or \
1359 ftype.startswith('decimal')
1360
1361 - def REPLACE(self, first, (second, third)):
1362 return 'REPLACE(%s,%s,%s)' % (self.expand(first,'string'),
1363 self.expand(second,'string'),
1364 self.expand(third,'string'))
1365
1368
1369 - def ADD(self, first, second):
1375
1376 - def SUB(self, first, second):
1379
1380 - def MUL(self, first, second):
1383
1384 - def DIV(self, first, second):
1387
1388 - def MOD(self, first, second):
1391
1392 - def AS(self, first, second):
1394
1395 - def ON(self, first, second):
1399
1402
1403 - def COMMA(self, first, second):
1405
1406 - def CAST(self, first, second):
1407 return 'CAST(%s AS %s)' % (first, second)
1408
1409 - def expand(self, expression, field_type=None):
1410 if isinstance(expression, Field):
1411 out = '%s.%s' % (expression.table._tablename, expression.name)
1412 if field_type == 'string' and not expression.type in (
1413 'string','text','json','password'):
1414 out = self.CAST(out, self.types['text'])
1415 return out
1416 elif isinstance(expression, (Expression, Query)):
1417 first = expression.first
1418 second = expression.second
1419 op = expression.op
1420 optional_args = expression.optional_args or {}
1421 if not second is None:
1422 out = op(first, second, **optional_args)
1423 elif not first is None:
1424 out = op(first,**optional_args)
1425 elif isinstance(op, str):
1426 if op.endswith(';'):
1427 op=op[:-1]
1428 out = '(%s)' % op
1429 else:
1430 out = op()
1431 return out
1432 elif field_type:
1433 return str(self.represent(expression,field_type))
1434 elif isinstance(expression,(list,tuple)):
1435 return ','.join(self.represent(item,field_type) \
1436 for item in expression)
1437 elif isinstance(expression, bool):
1438 return '1' if expression else '0'
1439 else:
1440 return str(expression)
1441
1444
1445 - def alias(self, table, alias):
1446 """
1447 Given a table object, makes a new table object
1448 with alias name.
1449 """
1450 other = copy.copy(table)
1451 other['_ot'] = other._ot or other._tablename
1452 other['ALL'] = SQLALL(other)
1453 other['_tablename'] = alias
1454 for fieldname in other.fields:
1455 other[fieldname] = copy.copy(other[fieldname])
1456 other[fieldname]._tablename = alias
1457 other[fieldname].tablename = alias
1458 other[fieldname].table = other
1459 table._db[alias] = other
1460 return other
1461
1463 tablename = table._tablename
1464 return ['TRUNCATE TABLE %s %s;' % (tablename, mode or '')]
1465
1467
1468 try:
1469 queries = table._db._adapter._truncate(table, mode)
1470 for query in queries:
1471 self.log(query + '\n', table)
1472 self.execute(query)
1473 table._db.commit()
1474 self.log('success!\n', table)
1475 finally:
1476 pass
1477
1478 - def _update(self, tablename, query, fields):
1479 if query:
1480 if use_common_filters(query):
1481 query = self.common_filter(query, [tablename])
1482 sql_w = ' WHERE ' + self.expand(query)
1483 else:
1484 sql_w = ''
1485 sql_v = ','.join(['%s=%s' % (field.name,
1486 self.expand(value, field.type)) \
1487 for (field, value) in fields])
1488 tablename = "%s" % self.db[tablename]
1489 return 'UPDATE %s SET %s%s;' % (tablename, sql_v, sql_w)
1490
1491 - def update(self, tablename, query, fields):
1492 sql = self._update(tablename, query, fields)
1493 try:
1494 self.execute(sql)
1495 except Exception:
1496 e = sys.exc_info()[1]
1497 table = self.db[tablename]
1498 if hasattr(table,'_on_update_error'):
1499 return table._on_update_error(table,query,fields,e)
1500 raise e
1501 try:
1502 return self.cursor.rowcount
1503 except:
1504 return None
1505
1506 - def _delete(self, tablename, query):
1507 if query:
1508 if use_common_filters(query):
1509 query = self.common_filter(query, [tablename])
1510 sql_w = ' WHERE ' + self.expand(query)
1511 else:
1512 sql_w = ''
1513 return 'DELETE FROM %s%s;' % (tablename, sql_w)
1514
1515 - def delete(self, tablename, query):
1516 sql = self._delete(tablename, query)
1517
1518 db = self.db
1519 table = db[tablename]
1520 if self.dbengine in ('sqlite', 'spatialite') and table._referenced_by:
1521 deleted = [x[table._id.name] for x in db(query).select(table._id)]
1522
1523 self.execute(sql)
1524 try:
1525 counter = self.cursor.rowcount
1526 except:
1527 counter = None
1528
1529 if self.dbengine in ('sqlite', 'spatialite') and counter:
1530 for field in table._referenced_by:
1531 if field.type=='reference '+table._tablename \
1532 and field.ondelete=='CASCADE':
1533 db(field.belongs(deleted)).delete()
1534
1535 return counter
1536
1538 tablenames = self.tables(query)
1539 if len(tablenames)==1:
1540 return tablenames[0]
1541 elif len(tablenames)<1:
1542 raise RuntimeError("No table selected")
1543 else:
1544 raise RuntimeError("Too many tables selected")
1545
1547 db = self.db
1548 new_fields = []
1549 append = new_fields.append
1550 for item in fields:
1551 if isinstance(item,SQLALL):
1552 new_fields += item._table
1553 elif isinstance(item,str):
1554 if REGEX_TABLE_DOT_FIELD.match(item):
1555 tablename,fieldname = item.split('.')
1556 append(db[tablename][fieldname])
1557 else:
1558 append(Expression(db,lambda item=item:item))
1559 else:
1560 append(item)
1561
1562 if not new_fields:
1563 for table in tablenames:
1564 for field in db[table]:
1565 append(field)
1566 return new_fields
1567
1568 - def _select(self, query, fields, attributes):
1569 tables = self.tables
1570 for key in set(attributes.keys())-SELECT_ARGS:
1571 raise SyntaxError('invalid select attribute: %s' % key)
1572 args_get = attributes.get
1573 tablenames = tables(query)
1574 tablenames_for_common_filters = tablenames
1575 for field in fields:
1576 if isinstance(field, basestring) \
1577 and REGEX_TABLE_DOT_FIELD.match(field):
1578 tn,fn = field.split('.')
1579 field = self.db[tn][fn]
1580 for tablename in tables(field):
1581 if not tablename in tablenames:
1582 tablenames.append(tablename)
1583
1584 if len(tablenames) < 1:
1585 raise SyntaxError('Set: no tables selected')
1586 self._colnames = map(self.expand, fields)
1587 def geoexpand(field):
1588 if isinstance(field.type,str) and field.type.startswith('geometry'):
1589 field = field.st_astext()
1590 return self.expand(field)
1591 sql_f = ', '.join(map(geoexpand, fields))
1592 sql_o = ''
1593 sql_s = ''
1594 left = args_get('left', False)
1595 inner_join = args_get('join', False)
1596 distinct = args_get('distinct', False)
1597 groupby = args_get('groupby', False)
1598 orderby = args_get('orderby', False)
1599 having = args_get('having', False)
1600 limitby = args_get('limitby', False)
1601 orderby_on_limitby = args_get('orderby_on_limitby', True)
1602 for_update = args_get('for_update', False)
1603 if self.can_select_for_update is False and for_update is True:
1604 raise SyntaxError('invalid select attribute: for_update')
1605 if distinct is True:
1606 sql_s += 'DISTINCT'
1607 elif distinct:
1608 sql_s += 'DISTINCT ON (%s)' % distinct
1609 if inner_join:
1610 icommand = self.JOIN()
1611 if not isinstance(inner_join, (tuple, list)):
1612 inner_join = [inner_join]
1613 ijoint = [t._tablename for t in inner_join
1614 if not isinstance(t,Expression)]
1615 ijoinon = [t for t in inner_join if isinstance(t, Expression)]
1616 itables_to_merge={}
1617 [itables_to_merge.update(
1618 dict.fromkeys(tables(t))) for t in ijoinon]
1619 ijoinont = [t.first._tablename for t in ijoinon]
1620 [itables_to_merge.pop(t) for t in ijoinont
1621 if t in itables_to_merge]
1622 iimportant_tablenames = ijoint + ijoinont + itables_to_merge.keys()
1623 iexcluded = [t for t in tablenames
1624 if not t in iimportant_tablenames]
1625 if left:
1626 join = attributes['left']
1627 command = self.LEFT_JOIN()
1628 if not isinstance(join, (tuple, list)):
1629 join = [join]
1630 joint = [t._tablename for t in join
1631 if not isinstance(t, Expression)]
1632 joinon = [t for t in join if isinstance(t, Expression)]
1633
1634 tables_to_merge={}
1635 [tables_to_merge.update(
1636 dict.fromkeys(tables(t))) for t in joinon]
1637 joinont = [t.first._tablename for t in joinon]
1638 [tables_to_merge.pop(t) for t in joinont if t in tables_to_merge]
1639 tablenames_for_common_filters = [t for t in tablenames
1640 if not t in joinont ]
1641 important_tablenames = joint + joinont + tables_to_merge.keys()
1642 excluded = [t for t in tablenames
1643 if not t in important_tablenames ]
1644 else:
1645 excluded = tablenames
1646
1647 if use_common_filters(query):
1648 query = self.common_filter(query,tablenames_for_common_filters)
1649 sql_w = ' WHERE ' + self.expand(query) if query else ''
1650
1651 if inner_join and not left:
1652 sql_t = ', '.join([self.table_alias(t) for t in iexcluded + \
1653 itables_to_merge.keys()])
1654 for t in ijoinon:
1655 sql_t += ' %s %s' % (icommand, t)
1656 elif not inner_join and left:
1657 sql_t = ', '.join([self.table_alias(t) for t in excluded + \
1658 tables_to_merge.keys()])
1659 if joint:
1660 sql_t += ' %s %s' % (command,
1661 ','.join([self.table_alias(t) for t in joint]))
1662 for t in joinon:
1663 sql_t += ' %s %s' % (command, t)
1664 elif inner_join and left:
1665 all_tables_in_query = set(important_tablenames + \
1666 iimportant_tablenames + \
1667 tablenames)
1668 tables_in_joinon = set(joinont + ijoinont)
1669 tables_not_in_joinon = \
1670 all_tables_in_query.difference(tables_in_joinon)
1671 sql_t = ','.join([self.table_alias(t) for t in tables_not_in_joinon])
1672 for t in ijoinon:
1673 sql_t += ' %s %s' % (icommand, t)
1674 if joint:
1675 sql_t += ' %s %s' % (command,
1676 ','.join([self.table_alias(t) for t in joint]))
1677 for t in joinon:
1678 sql_t += ' %s %s' % (command, t)
1679 else:
1680 sql_t = ', '.join(self.table_alias(t) for t in tablenames)
1681 if groupby:
1682 if isinstance(groupby, (list, tuple)):
1683 groupby = xorify(groupby)
1684 sql_o += ' GROUP BY %s' % self.expand(groupby)
1685 if having:
1686 sql_o += ' HAVING %s' % attributes['having']
1687 if orderby:
1688 if isinstance(orderby, (list, tuple)):
1689 orderby = xorify(orderby)
1690 if str(orderby) == '<random>':
1691 sql_o += ' ORDER BY %s' % self.RANDOM()
1692 else:
1693 sql_o += ' ORDER BY %s' % self.expand(orderby)
1694 if (limitby and not groupby and tablenames and orderby_on_limitby and not orderby):
1695 sql_o += ' ORDER BY %s' % ', '.join(['%s.%s'%(t,x) for t in tablenames for x in (hasattr(self.db[t],'_primarykey') and self.db[t]._primarykey or [self.db[t]._id.name])])
1696
1697 sql = self.select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)
1698 if for_update and self.can_select_for_update is True:
1699 sql = sql.rstrip(';') + ' FOR UPDATE;'
1700 return sql
1701
1702 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1703 if limitby:
1704 (lmin, lmax) = limitby
1705 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
1706 return 'SELECT %s %s FROM %s%s%s;' % \
1707 (sql_s, sql_f, sql_t, sql_w, sql_o)
1708
1710 return self.cursor.fetchall()
1711
1713 args_get = attributes.get
1714 cache = args_get('cache',None)
1715 if not cache:
1716 self.execute(sql)
1717 rows = self._fetchall()
1718 else:
1719 (cache_model, time_expire) = cache
1720 key = self.uri + '/' + sql + '/rows'
1721 if len(key)>200: key = hashlib_md5(key).hexdigest()
1722 def _select_aux2():
1723 self.execute(sql)
1724 return self._fetchall()
1725 rows = cache_model(key,_select_aux2,time_expire)
1726 if isinstance(rows,tuple):
1727 rows = list(rows)
1728 limitby = args_get('limitby', None) or (0,)
1729 rows = self.rowslice(rows,limitby[0],None)
1730 processor = args_get('processor',self.parse)
1731 cacheable = args_get('cacheable',False)
1732 return processor(rows,fields,self._colnames,cacheable=cacheable)
1733
1734 - def select(self, query, fields, attributes):
1735 """
1736 Always returns a Rows object, possibly empty.
1737 """
1738 sql = self._select(query, fields, attributes)
1739 cache = attributes.get('cache', None)
1740 if cache and attributes.get('cacheable',False):
1741 del attributes['cache']
1742 (cache_model, time_expire) = cache
1743 key = self.uri + '/' + sql
1744 if len(key)>200: key = hashlib_md5(key).hexdigest()
1745 args = (sql,fields,attributes)
1746 return cache_model(
1747 key,
1748 lambda self=self,args=args:self._select_aux(*args),
1749 time_expire)
1750 else:
1751 return self._select_aux(sql,fields,attributes)
1752
1753 - def _count(self, query, distinct=None):
1754 tablenames = self.tables(query)
1755 if query:
1756 if use_common_filters(query):
1757 query = self.common_filter(query, tablenames)
1758 sql_w = ' WHERE ' + self.expand(query)
1759 else:
1760 sql_w = ''
1761 sql_t = ','.join(self.table_alias(t) for t in tablenames)
1762 if distinct:
1763 if isinstance(distinct,(list, tuple)):
1764 distinct = xorify(distinct)
1765 sql_d = self.expand(distinct)
1766 return 'SELECT count(DISTINCT %s) FROM %s%s;' % \
1767 (sql_d, sql_t, sql_w)
1768 return 'SELECT count(*) FROM %s%s;' % (sql_t, sql_w)
1769
1770 - def count(self, query, distinct=None):
1771 self.execute(self._count(query, distinct))
1772 return self.cursor.fetchone()[0]
1773
1785
1789
1793
1799
1802
1805
1808
1811
1814
1816 return '%s_%s__constraint' % (table,fieldname)
1817
1820
1822 if not self.connection: return None
1823 command = a[0]
1824 if hasattr(self,'filter_sql_command'):
1825 command = self.filter_sql_command(command)
1826 if self.db._debug:
1827 LOGGER.debug('SQL: %s' % command)
1828 self.db._lastsql = command
1829 t0 = time.time()
1830 ret = self.cursor.execute(command, *a[1:], **b)
1831 self.db._timings.append((command,time.time()-t0))
1832 del self.db._timings[:-TIMINGSSIZE]
1833 return ret
1834
1837
1839 field_is_type = fieldtype.startswith
1840 if isinstance(obj, CALLABLETYPES):
1841 obj = obj()
1842 if isinstance(fieldtype, SQLCustomType):
1843 value = fieldtype.encoder(obj)
1844 if fieldtype.type in ('string','text', 'json'):
1845 return self.adapt(value)
1846 return value
1847 if isinstance(obj, (Expression, Field)):
1848 return str(obj)
1849 if field_is_type('list:'):
1850 if not obj:
1851 obj = []
1852 elif not isinstance(obj, (list, tuple)):
1853 obj = [obj]
1854 if field_is_type('list:string'):
1855 obj = map(str,obj)
1856 else:
1857 obj = map(int,[o for o in obj if o != ''])
1858
1859 if isinstance(obj, (list, tuple)) and (not fieldtype == "json"):
1860 obj = bar_encode(obj)
1861 if obj is None:
1862 return 'NULL'
1863 if obj == '' and not fieldtype[:2] in ['st', 'te', 'js', 'pa', 'up']:
1864 return 'NULL'
1865 r = self.represent_exceptions(obj, fieldtype)
1866 if not r is None:
1867 return r
1868 if fieldtype == 'boolean':
1869 if obj and not str(obj)[:1].upper() in '0F':
1870 return self.smart_adapt(self.TRUE)
1871 else:
1872 return self.smart_adapt(self.FALSE)
1873 if fieldtype == 'id' or fieldtype == 'integer':
1874 return str(long(obj))
1875 if field_is_type('decimal'):
1876 return str(obj)
1877 elif field_is_type('reference'):
1878 if fieldtype.find('.')>0:
1879 return repr(obj)
1880 elif isinstance(obj, (Row, Reference)):
1881 return str(obj['id'])
1882 return str(long(obj))
1883 elif fieldtype == 'double':
1884 return repr(float(obj))
1885 if isinstance(obj, unicode):
1886 obj = obj.encode(self.db_codec)
1887 if fieldtype == 'blob':
1888 obj = base64.b64encode(str(obj))
1889 elif fieldtype == 'date':
1890 if isinstance(obj, (datetime.date, datetime.datetime)):
1891 obj = obj.isoformat()[:10]
1892 else:
1893 obj = str(obj)
1894 elif fieldtype == 'datetime':
1895 if isinstance(obj, datetime.datetime):
1896 obj = obj.isoformat(self.T_SEP)[:19]
1897 elif isinstance(obj, datetime.date):
1898 obj = obj.isoformat()[:10]+self.T_SEP+'00:00:00'
1899 else:
1900 obj = str(obj)
1901 elif fieldtype == 'time':
1902 if isinstance(obj, datetime.time):
1903 obj = obj.isoformat()[:10]
1904 else:
1905 obj = str(obj)
1906 elif fieldtype == 'json':
1907 if not self.native_json:
1908 if have_serializers:
1909 obj = serializers.json(obj)
1910 elif simplejson:
1911 obj = simplejson.dumps(obj)
1912 else:
1913 raise RuntimeError("missing simplejson")
1914 if not isinstance(obj,bytes):
1915 obj = bytes(obj)
1916 try:
1917 obj.decode(self.db_codec)
1918 except:
1919 obj = obj.decode('latin1').encode(self.db_codec)
1920 return self.adapt(obj)
1921
1924
1927
1928 - def rowslice(self, rows, minimum=0, maximum=None):
1929 """
1930 By default this function does nothing;
1931 overload when db does not do slicing.
1932 """
1933 return rows
1934
1935 - def parse_value(self, value, field_type, blob_decode=True):
1936 if field_type != 'blob' and isinstance(value, str):
1937 try:
1938 value = value.decode(self.db._db_codec)
1939 except Exception:
1940 pass
1941 if isinstance(value, unicode):
1942 value = value.encode('utf-8')
1943 if isinstance(field_type, SQLCustomType):
1944 value = field_type.decoder(value)
1945 if not isinstance(field_type, str) or value is None:
1946 return value
1947 elif field_type in ('string', 'text', 'password', 'upload', 'dict'):
1948 return value
1949 elif field_type.startswith('geo'):
1950 return value
1951 elif field_type == 'blob' and not blob_decode:
1952 return value
1953 else:
1954 key = REGEX_TYPE.match(field_type).group(0)
1955 return self.parsemap[key](value,field_type)
1956
1958 referee = field_type[10:].strip()
1959 if not '.' in referee:
1960 value = Reference(value)
1961 value._table, value._record = self.db[referee], None
1962 return value
1963
1965 return value == self.TRUE or str(value)[:1].lower() == 't'
1966
1968 if isinstance(value, datetime.datetime):
1969 return value.date()
1970 if not isinstance(value, (datetime.date,datetime.datetime)):
1971 (y, m, d) = map(int, str(value)[:10].strip().split('-'))
1972 value = datetime.date(y, m, d)
1973 return value
1974
1976 if not isinstance(value, datetime.time):
1977 time_items = map(int,str(value)[:8].strip().split(':')[:3])
1978 if len(time_items) == 3:
1979 (h, mi, s) = time_items
1980 else:
1981 (h, mi, s) = time_items + [0]
1982 value = datetime.time(h, mi, s)
1983 return value
1984
1986 if not isinstance(value, datetime.datetime):
1987 value = str(value)
1988 date_part,time_part,timezone = value[:10],value[11:19],value[19:]
1989 if '+' in timezone:
1990 ms,tz = timezone.split('+')
1991 h,m = tz.split(':')
1992 dt = datetime.timedelta(seconds=3600*int(h)+60*int(m))
1993 elif '-' in timezone:
1994 ms,tz = timezone.split('-')
1995 h,m = tz.split(':')
1996 dt = -datetime.timedelta(seconds=3600*int(h)+60*int(m))
1997 else:
1998 dt = None
1999 (y, m, d) = map(int,date_part.split('-'))
2000 time_parts = time_part and time_part.split(':')[:3] or (0,0,0)
2001 while len(time_parts)<3: time_parts.append(0)
2002 time_items = map(int,time_parts)
2003 (h, mi, s) = time_items
2004 value = datetime.datetime(y, m, d, h, mi, s)
2005 if dt:
2006 value = value + dt
2007 return value
2008
2010 return base64.b64decode(str(value))
2011
2013 decimals = int(field_type[8:-1].split(',')[-1])
2014 if self.dbengine in ('sqlite', 'spatialite'):
2015 value = ('%.' + str(decimals) + 'f') % value
2016 if not isinstance(value, decimal.Decimal):
2017 value = decimal.Decimal(str(value))
2018 return value
2019
2024
2029
2034
2035 - def parse_id(self, value, field_type):
2037
2040
2043
2045 if not self.native_json:
2046 if not isinstance(value, basestring):
2047 raise RuntimeError('json data not a string')
2048 if isinstance(value, unicode):
2049 value = value.encode('utf-8')
2050 if have_serializers:
2051 value = serializers.loads_json(value)
2052 elif simplejson:
2053 value = simplejson.loads(value)
2054 else:
2055 raise RuntimeError("missing simplejson")
2056 return value
2057
2059 self.parsemap = {
2060 'id':self.parse_id,
2061 'integer':self.parse_integer,
2062 'bigint':self.parse_integer,
2063 'float':self.parse_double,
2064 'double':self.parse_double,
2065 'reference':self.parse_reference,
2066 'boolean':self.parse_boolean,
2067 'date':self.parse_date,
2068 'time':self.parse_time,
2069 'datetime':self.parse_datetime,
2070 'blob':self.parse_blob,
2071 'decimal':self.parse_decimal,
2072 'json':self.parse_json,
2073 'list:integer':self.parse_list_integers,
2074 'list:reference':self.parse_list_references,
2075 'list:string':self.parse_list_strings,
2076 }
2077
2078 - def parse(self, rows, fields, colnames, blob_decode=True,
2079 cacheable = False):
2080 db = self.db
2081 virtualtables = []
2082 new_rows = []
2083 tmps = []
2084 for colname in colnames:
2085 if not REGEX_TABLE_DOT_FIELD.match(colname):
2086 tmps.append(None)
2087 else:
2088 (tablename, _the_sep_, fieldname) = colname.partition('.')
2089 table = db[tablename]
2090 field = table[fieldname]
2091 ft = field.type
2092 tmps.append((tablename,fieldname,table,field,ft))
2093 for (i,row) in enumerate(rows):
2094 new_row = Row()
2095 for (j,colname) in enumerate(colnames):
2096 value = row[j]
2097 tmp = tmps[j]
2098 if tmp:
2099 (tablename,fieldname,table,field,ft) = tmp
2100 if tablename in new_row:
2101 colset = new_row[tablename]
2102 else:
2103 colset = new_row[tablename] = Row()
2104 if tablename not in virtualtables:
2105 virtualtables.append(tablename)
2106 value = self.parse_value(value,ft,blob_decode)
2107 if field.filter_out:
2108 value = field.filter_out(value)
2109 colset[fieldname] = value
2110
2111
2112 if ft=='id' and fieldname!='id' and \
2113 not 'id' in table.fields:
2114 colset['id'] = value
2115
2116 if ft == 'id' and not cacheable:
2117
2118
2119
2120 if isinstance(self, GoogleDatastoreAdapter):
2121 id = value.key().id_or_name()
2122 colset[fieldname] = id
2123 colset.gae_item = value
2124 else:
2125 id = value
2126 colset.update_record = RecordUpdater(colset,table,id)
2127 colset.delete_record = RecordDeleter(table,id)
2128 if table._db._lazy_tables:
2129 colset['__get_lazy_reference__'] = LazyReferenceGetter(table, id)
2130 for rfield in table._referenced_by:
2131 referee_link = db._referee_name and \
2132 db._referee_name % dict(
2133 table=rfield.tablename,field=rfield.name)
2134 if referee_link and not referee_link in colset:
2135 colset[referee_link] = LazySet(rfield,id)
2136 else:
2137 if not '_extra' in new_row:
2138 new_row['_extra'] = Row()
2139 new_row['_extra'][colname] = \
2140 self.parse_value(value,
2141 fields[j].type,blob_decode)
2142 new_column_name = \
2143 REGEX_SELECT_AS_PARSER.search(colname)
2144 if not new_column_name is None:
2145 column_name = new_column_name.groups(0)
2146 setattr(new_row,column_name[0],value)
2147 new_rows.append(new_row)
2148 rowsobj = Rows(db, new_rows, colnames, rawrows=rows)
2149
2150
2151 for tablename in virtualtables:
2152 table = db[tablename]
2153 fields_virtual = [(f,v) for (f,v) in table.iteritems()
2154 if isinstance(v,FieldVirtual)]
2155 fields_lazy = [(f,v) for (f,v) in table.iteritems()
2156 if isinstance(v,FieldMethod)]
2157 if fields_virtual or fields_lazy:
2158 for row in rowsobj.records:
2159 box = row[tablename]
2160 for f,v in fields_virtual:
2161 try:
2162 box[f] = v.f(row)
2163 except AttributeError:
2164 pass
2165 for f,v in fields_lazy:
2166 try:
2167 box[f] = (v.handler or VirtualCommand)(v.f,row)
2168 except AttributeError:
2169 pass
2170
2171
2172 for item in table.virtualfields:
2173 try:
2174 rowsobj = rowsobj.setvirtualfields(**{tablename:item})
2175 except (KeyError, AttributeError):
2176
2177 pass
2178 return rowsobj
2179
2181 tenant_fieldname = self.db._request_tenant
2182
2183 for tablename in tablenames:
2184 table = self.db[tablename]
2185
2186
2187 if table._common_filter != None:
2188 query = query & table._common_filter(query)
2189
2190
2191 if tenant_fieldname in table:
2192 default = table[tenant_fieldname].default
2193 if not default is None:
2194 newquery = table[tenant_fieldname] == default
2195 if query is None:
2196 query = newquery
2197 else:
2198 query = query & newquery
2199 return query
2200
2201 - def CASE(self,query,t,f):
2202 def represent(x):
2203 types = {type(True):'boolean',type(0):'integer',type(1.0):'double'}
2204 if x is None: return 'NULL'
2205 elif isinstance(x,Expression): return str(x)
2206 else: return self.represent(x,types.get(type(x),'string'))
2207 return Expression(self.db,'CASE WHEN %s THEN %s ELSE %s END' % \
2208 (self.expand(query),represent(t),represent(f)))
2209
2215 drivers = ('sqlite2','sqlite3')
2216
2217 can_select_for_update = None
2218
2220 return "web2py_extract('%s',%s)" % (what, self.expand(field))
2221
2222 @staticmethod
2224 table = {
2225 'year': (0, 4),
2226 'month': (5, 7),
2227 'day': (8, 10),
2228 'hour': (11, 13),
2229 'minute': (14, 16),
2230 'second': (17, 19),
2231 }
2232 try:
2233 if lookup != 'epoch':
2234 (i, j) = table[lookup]
2235 return int(s[i:j])
2236 else:
2237 return time.mktime(datetime.datetime.strptime(s, '%Y-%m-%d %H:%M:%S').timetuple())
2238 except:
2239 return None
2240
2241 @staticmethod
2243 return re.compile(expression).search(item) is not None
2244
2245 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
2246 credential_decoder=IDENTITY, driver_args={},
2247 adapter_args={}, do_connect=True, after_connection=None):
2248 self.db = db
2249 self.dbengine = "sqlite"
2250 self.uri = uri
2251 if do_connect: self.find_driver(adapter_args)
2252 self.pool_size = 0
2253 self.folder = folder
2254 self.db_codec = db_codec
2255 self._after_connection = after_connection
2256 self.find_or_make_work_folder()
2257 path_encoding = sys.getfilesystemencoding() \
2258 or locale.getdefaultlocale()[1] or 'utf8'
2259 if uri.startswith('sqlite:memory'):
2260 self.dbpath = ':memory:'
2261 else:
2262 self.dbpath = uri.split('://',1)[1]
2263 if self.dbpath[0] != '/':
2264 if PYTHON_VERSION == 2:
2265 self.dbpath = pjoin(
2266 self.folder.decode(path_encoding).encode('utf8'), self.dbpath)
2267 else:
2268 self.dbpath = pjoin(self.folder, self.dbpath)
2269 if not 'check_same_thread' in driver_args:
2270 driver_args['check_same_thread'] = False
2271 if not 'detect_types' in driver_args and do_connect:
2272 driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
2273 def connector(dbpath=self.dbpath, driver_args=driver_args):
2274 return self.driver.Connection(dbpath, **driver_args)
2275 self.connector = connector
2276 if do_connect: self.reconnect()
2277
2283
2285 tablename = table._tablename
2286 return ['DELETE FROM %s;' % tablename,
2287 "DELETE FROM sqlite_sequence WHERE name='%s';" % tablename]
2288
2291
2292 - def REGEXP(self,first,second):
2293 return '(%s REGEXP %s)' % (self.expand(first),
2294 self.expand(second,'string'))
2295
2296 - def select(self, query, fields, attributes):
2297 """
2298 Simulate SELECT ... FOR UPDATE with BEGIN IMMEDIATE TRANSACTION.
2299 Note that the entire database, rather than one record, is locked
2300 (it will be locked eventually anyway by the following UPDATE).
2301 """
2302 if attributes.get('for_update', False) and not 'cache' in attributes:
2303 self.execute('BEGIN IMMEDIATE TRANSACTION;')
2304 return super(SQLiteAdapter, self).select(query, fields, attributes)
2305
2307 drivers = ('sqlite3','sqlite2')
2308
2309 types = copy.copy(BaseAdapter.types)
2310 types.update(geometry='GEOMETRY')
2311
2312 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
2313 credential_decoder=IDENTITY, driver_args={},
2314 adapter_args={}, do_connect=True, srid=4326, after_connection=None):
2315 self.db = db
2316 self.dbengine = "spatialite"
2317 self.uri = uri
2318 if do_connect: self.find_driver(adapter_args)
2319 self.pool_size = 0
2320 self.folder = folder
2321 self.db_codec = db_codec
2322 self._after_connection = after_connection
2323 self.find_or_make_work_folder()
2324 self.srid = srid
2325 path_encoding = sys.getfilesystemencoding() \
2326 or locale.getdefaultlocale()[1] or 'utf8'
2327 if uri.startswith('spatialite:memory'):
2328 self.dbpath = ':memory:'
2329 else:
2330 self.dbpath = uri.split('://',1)[1]
2331 if self.dbpath[0] != '/':
2332 self.dbpath = pjoin(
2333 self.folder.decode(path_encoding).encode('utf8'), self.dbpath)
2334 if not 'check_same_thread' in driver_args:
2335 driver_args['check_same_thread'] = False
2336 if not 'detect_types' in driver_args and do_connect:
2337 driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
2338 def connector(dbpath=self.dbpath, driver_args=driver_args):
2339 return self.driver.Connection(dbpath, **driver_args)
2340 self.connector = connector
2341 if do_connect: self.reconnect()
2342
2355
2356
2357
2359 return 'AsGeoJSON(%s,%s,%s)' %(self.expand(first),
2360 second['precision'], second['options'])
2361
2362 - def ST_ASTEXT(self, first):
2363 return 'AsText(%s)' %(self.expand(first))
2364
2368
2372
2376
2380
2384
2386 return 'Simplify(%s,%s)' %(self.expand(first),
2387 self.expand(second, 'double'))
2388
2392
2396
2398 field_is_type = fieldtype.startswith
2399 if field_is_type('geo'):
2400 srid = 4326
2401 geotype, parms = fieldtype[:-1].split('(')
2402 parms = parms.split(',')
2403 if len(parms) >= 2:
2404 schema, srid = parms[:2]
2405
2406 value = "ST_GeomFromText('%s',%s)" %(obj, srid)
2407
2408
2409
2410
2411 return value
2412 return BaseAdapter.represent(self, obj, fieldtype)
2413
2416 drivers = ('zxJDBC_sqlite',)
2417
2418 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
2419 credential_decoder=IDENTITY, driver_args={},
2420 adapter_args={}, do_connect=True, after_connection=None):
2443 self.connector = connector
2444 if do_connect: self.reconnect()
2445
2450
2453
2456 drivers = ('MySQLdb','pymysql', 'mysqlconnector')
2457
2458 commit_on_alter_table = True
2459 support_distributed_transaction = True
2460 types = {
2461 'boolean': 'CHAR(1)',
2462 'string': 'VARCHAR(%(length)s)',
2463 'text': 'LONGTEXT',
2464 'json': 'LONGTEXT',
2465 'password': 'VARCHAR(%(length)s)',
2466 'blob': 'LONGBLOB',
2467 'upload': 'VARCHAR(%(length)s)',
2468 'integer': 'INT',
2469 'bigint': 'BIGINT',
2470 'float': 'FLOAT',
2471 'double': 'DOUBLE',
2472 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2473 'date': 'DATE',
2474 'time': 'TIME',
2475 'datetime': 'DATETIME',
2476 'id': 'INT AUTO_INCREMENT NOT NULL',
2477 'reference': 'INT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2478 'list:integer': 'LONGTEXT',
2479 'list:string': 'LONGTEXT',
2480 'list:reference': 'LONGTEXT',
2481 'big-id': 'BIGINT AUTO_INCREMENT NOT NULL',
2482 'big-reference': 'BIGINT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2483 }
2484
2485 QUOTE_TEMPLATE = "`%s`"
2486
2489
2492
2494 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field),
2495 parameters[0], parameters[1])
2496
2497 - def EPOCH(self, first):
2499
2501 return 'CONCAT(%s)' % ','.join(self.expand(x,'string') for x in items)
2502
2503 - def REGEXP(self,first,second):
2504 return '(%s REGEXP %s)' % (self.expand(first),
2505 self.expand(second,'string'))
2506
2507 - def _drop(self,table,mode):
2508
2509 return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % table,
2510 'SET FOREIGN_KEY_CHECKS=1;']
2511
2513 return 'INSERT INTO %s VALUES (DEFAULT);' % table
2514
2517
2521
2524
2527
2528 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
2529
2530 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2531 credential_decoder=IDENTITY, driver_args={},
2532 adapter_args={}, do_connect=True, after_connection=None):
2533 self.db = db
2534 self.dbengine = "mysql"
2535 self.uri = uri
2536 if do_connect: self.find_driver(adapter_args,uri)
2537 self.pool_size = pool_size
2538 self.folder = folder
2539 self.db_codec = db_codec
2540 self._after_connection = after_connection
2541 self.find_or_make_work_folder()
2542 ruri = uri.split('://',1)[1]
2543 m = self.REGEX_URI.match(ruri)
2544 if not m:
2545 raise SyntaxError(
2546 "Invalid URI string in DAL: %s" % self.uri)
2547 user = credential_decoder(m.group('user'))
2548 if not user:
2549 raise SyntaxError('User required')
2550 password = credential_decoder(m.group('password'))
2551 if not password:
2552 password = ''
2553 host = m.group('host')
2554 if not host:
2555 raise SyntaxError('Host name required')
2556 db = m.group('db')
2557 if not db:
2558 raise SyntaxError('Database name required')
2559 port = int(m.group('port') or '3306')
2560 charset = m.group('charset') or 'utf8'
2561 driver_args.update(db=db,
2562 user=credential_decoder(user),
2563 passwd=credential_decoder(password),
2564 host=host,
2565 port=port,
2566 charset=charset)
2567
2568
2569 def connector(driver_args=driver_args):
2570 return self.driver.connect(**driver_args)
2571 self.connector = connector
2572 if do_connect: self.reconnect()
2573
2575 self.execute('SET FOREIGN_KEY_CHECKS=1;')
2576 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
2577
2579 self.execute('select last_insert_id();')
2580 return int(self.cursor.fetchone()[0])
2581
2582
2583 -class PostgreSQLAdapter(BaseAdapter):
2584 drivers = ('psycopg2','pg8000')
2585
2586 support_distributed_transaction = True
2587 types = {
2588 'boolean': 'CHAR(1)',
2589 'string': 'VARCHAR(%(length)s)',
2590 'text': 'TEXT',
2591 'json': 'TEXT',
2592 'password': 'VARCHAR(%(length)s)',
2593 'blob': 'BYTEA',
2594 'upload': 'VARCHAR(%(length)s)',
2595 'integer': 'INTEGER',
2596 'bigint': 'BIGINT',
2597 'float': 'FLOAT',
2598 'double': 'FLOAT8',
2599 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2600 'date': 'DATE',
2601 'time': 'TIME',
2602 'datetime': 'TIMESTAMP',
2603 'id': 'SERIAL PRIMARY KEY',
2604 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2605 'list:integer': 'TEXT',
2606 'list:string': 'TEXT',
2607 'list:reference': 'TEXT',
2608 'geometry': 'GEOMETRY',
2609 'geography': 'GEOGRAPHY',
2610 'big-id': 'BIGSERIAL PRIMARY KEY',
2611 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2612 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2613 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2614
2615 }
2616
2617 QUOTE_TEMPLATE = '%s'
2618
2619 - def varquote(self,name):
2620 return varquote_aux(name,'"%s"')
2621
2622 - def adapt(self,obj):
2623 if self.driver_name == 'psycopg2':
2624 return psycopg2_adapt(obj).getquoted()
2625 elif self.driver_name == 'pg8000':
2626 return "'%s'" % str(obj).replace("%","%%").replace("'","''")
2627 else:
2628 return "'%s'" % str(obj).replace("'","''")
2629
2630 - def sequence_name(self,table):
2631 return '%s_id_Seq' % table
2632
2635
2636 - def ADD(self, first, second):
2637 t = first.type
2638 if t in ('text','string','password', 'json', 'upload','blob'):
2639 return '(%s || %s)' % (self.expand(first), self.expand(second, t))
2640 else:
2641 return '(%s + %s)' % (self.expand(first), self.expand(second, t))
2642
2645
2646 - def prepare(self,key):
2647 self.execute("PREPARE TRANSACTION '%s';" % key)
2648
2649 - def commit_prepared(self,key):
2650 self.execute("COMMIT PREPARED '%s';" % key)
2651
2652 - def rollback_prepared(self,key):
2653 self.execute("ROLLBACK PREPARED '%s';" % key)
2654
2655 - def create_sequence_and_triggers(self, query, table, **args):
2656
2657
2658
2659
2660 self.execute(query)
2661
2662 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
2663
2664 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2665 credential_decoder=IDENTITY, driver_args={},
2666 adapter_args={}, do_connect=True, srid=4326,
2667 after_connection=None):
2668 self.db = db
2669 self.dbengine = "postgres"
2670 self.uri = uri
2671 if do_connect: self.find_driver(adapter_args,uri)
2672 self.pool_size = pool_size
2673 self.folder = folder
2674 self.db_codec = db_codec
2675 self._after_connection = after_connection
2676 self.srid = srid
2677 self.find_or_make_work_folder()
2678 ruri = uri.split('://',1)[1]
2679 m = self.REGEX_URI.match(ruri)
2680 if not m:
2681 raise SyntaxError("Invalid URI string in DAL")
2682 user = credential_decoder(m.group('user'))
2683 if not user:
2684 raise SyntaxError('User required')
2685 password = credential_decoder(m.group('password'))
2686 if not password:
2687 password = ''
2688 host = m.group('host')
2689 if not host:
2690 raise SyntaxError('Host name required')
2691 db = m.group('db')
2692 if not db:
2693 raise SyntaxError('Database name required')
2694 port = m.group('port') or '5432'
2695 sslmode = m.group('sslmode')
2696 if sslmode:
2697 msg = ("dbname='%s' user='%s' host='%s' "
2698 "port=%s password='%s' sslmode='%s'") \
2699 % (db, user, host, port, password, sslmode)
2700 else:
2701 msg = ("dbname='%s' user='%s' host='%s' "
2702 "port=%s password='%s'") \
2703 % (db, user, host, port, password)
2704
2705 if self.driver:
2706 self.__version__ = "%s %s" % (self.driver.__name__,
2707 self.driver.__version__)
2708 else:
2709 self.__version__ = None
2710 def connector(msg=msg,driver_args=driver_args):
2711 return self.driver.connect(msg,**driver_args)
2712 self.connector = connector
2713 if do_connect: self.reconnect()
2714
2715 - def after_connection(self):
2716 self.connection.set_client_encoding('UTF8')
2717 self.execute("SET standard_conforming_strings=on;")
2718 self.try_json()
2719
2720 - def lastrowid(self,table):
2721 self.execute("select currval('%s')" % table._sequence_name)
2722 return int(self.cursor.fetchone()[0])
2723
2724 - def try_json(self):
2725
2726
2727 if self.driver_name == "pg8000":
2728 supports_json = self.connection.server_version >= "9.2.0"
2729 elif (self.driver_name == "psycopg2") and \
2730 (self.driver.__version__ >= "2.0.12"):
2731 supports_json = self.connection.server_version >= 90200
2732 elif self.driver_name == "zxJDBC":
2733 supports_json = self.connection.dbversion >= "9.2.0"
2734 else: supports_json = None
2735 if supports_json:
2736 self.types["json"] = "JSON"
2737 self.native_json = True
2738 else: LOGGER.debug("Your database version does not support the JSON data type (using TEXT instead)")
2739
2740 - def LIKE(self,first,second):
2741 args = (self.expand(first), self.expand(second,'string'))
2742 if not first.type in ('string', 'text', 'json'):
2743 return '(%s LIKE %s)' % (
2744 self.CAST(args[0], 'CHAR(%s)' % first.length), args[1])
2745 else:
2746 return '(%s LIKE %s)' % args
2747
2748 - def ILIKE(self,first,second):
2749 args = (self.expand(first), self.expand(second,'string'))
2750 if not first.type in ('string', 'text', 'json'):
2751 return '(%s LIKE %s)' % (
2752 self.CAST(args[0], 'CHAR(%s)' % first.length), args[1])
2753 else:
2754 return '(%s ILIKE %s)' % args
2755
2756 - def REGEXP(self,first,second):
2757 return '(%s ~ %s)' % (self.expand(first),
2758 self.expand(second,'string'))
2759
2760 - def STARTSWITH(self,first,second):
2761 return '(%s ILIKE %s)' % (self.expand(first),
2762 self.expand(second+'%','string'))
2763
2764 - def ENDSWITH(self,first,second):
2765 return '(%s ILIKE %s)' % (self.expand(first),
2766 self.expand('%'+second,'string'))
2767
2768
2769
2770 - def ST_ASGEOJSON(self, first, second):
2771 """
2772 http://postgis.org/docs/ST_AsGeoJSON.html
2773 """
2774 return 'ST_AsGeoJSON(%s,%s,%s,%s)' %(second['version'],
2775 self.expand(first), second['precision'], second['options'])
2776
2777 - def ST_ASTEXT(self, first):
2778 """
2779 http://postgis.org/docs/ST_AsText.html
2780 """
2781 return 'ST_AsText(%s)' %(self.expand(first))
2782
2783 - def ST_X(self, first):
2784 """
2785 http://postgis.org/docs/ST_X.html
2786 """
2787 return 'ST_X(%s)' %(self.expand(first))
2788
2789 - def ST_Y(self, first):
2790 """
2791 http://postgis.org/docs/ST_Y.html
2792 """
2793 return 'ST_Y(%s)' %(self.expand(first))
2794
2795 - def ST_CONTAINS(self, first, second):
2796 """
2797 http://postgis.org/docs/ST_Contains.html
2798 """
2799 return 'ST_Contains(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2800
2801 - def ST_DISTANCE(self, first, second):
2802 """
2803 http://postgis.org/docs/ST_Distance.html
2804 """
2805 return 'ST_Distance(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2806
2807 - def ST_EQUALS(self, first, second):
2808 """
2809 http://postgis.org/docs/ST_Equals.html
2810 """
2811 return 'ST_Equals(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2812
2813 - def ST_INTERSECTS(self, first, second):
2814 """
2815 http://postgis.org/docs/ST_Intersects.html
2816 """
2817 return 'ST_Intersects(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2818
2819 - def ST_OVERLAPS(self, first, second):
2820 """
2821 http://postgis.org/docs/ST_Overlaps.html
2822 """
2823 return 'ST_Overlaps(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2824
2825 - def ST_SIMPLIFY(self, first, second):
2826 """
2827 http://postgis.org/docs/ST_Simplify.html
2828 """
2829 return 'ST_Simplify(%s,%s)' %(self.expand(first), self.expand(second, 'double'))
2830
2831 - def ST_TOUCHES(self, first, second):
2832 """
2833 http://postgis.org/docs/ST_Touches.html
2834 """
2835 return 'ST_Touches(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2836
2837 - def ST_WITHIN(self, first, second):
2838 """
2839 http://postgis.org/docs/ST_Within.html
2840 """
2841 return 'ST_Within(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2842
2843 - def represent(self, obj, fieldtype):
2844 field_is_type = fieldtype.startswith
2845 if field_is_type('geo'):
2846 srid = 4326
2847 geotype, parms = fieldtype[:-1].split('(')
2848 parms = parms.split(',')
2849 if len(parms) >= 2:
2850 schema, srid = parms[:2]
2851 if field_is_type('geometry'):
2852 value = "ST_GeomFromText('%s',%s)" %(obj, srid)
2853 elif field_is_type('geography'):
2854 value = "ST_GeogFromText('SRID=%s;%s')" %(srid, obj)
2855
2856
2857 return value
2858 return BaseAdapter.represent(self, obj, fieldtype)
2859
2860 -class NewPostgreSQLAdapter(PostgreSQLAdapter):
2861 drivers = ('psycopg2','pg8000')
2862
2863 types = {
2864 'boolean': 'CHAR(1)',
2865 'string': 'VARCHAR(%(length)s)',
2866 'text': 'TEXT',
2867 'json': 'TEXT',
2868 'password': 'VARCHAR(%(length)s)',
2869 'blob': 'BYTEA',
2870 'upload': 'VARCHAR(%(length)s)',
2871 'integer': 'INTEGER',
2872 'bigint': 'BIGINT',
2873 'float': 'FLOAT',
2874 'double': 'FLOAT8',
2875 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2876 'date': 'DATE',
2877 'time': 'TIME',
2878 'datetime': 'TIMESTAMP',
2879 'id': 'SERIAL PRIMARY KEY',
2880 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2881 'list:integer': 'BIGINT[]',
2882 'list:string': 'TEXT[]',
2883 'list:reference': 'BIGINT[]',
2884 'geometry': 'GEOMETRY',
2885 'geography': 'GEOGRAPHY',
2886 'big-id': 'BIGSERIAL PRIMARY KEY',
2887 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2888 }
2889
2890 - def parse_list_integers(self, value, field_type):
2892
2893 - def parse_list_references(self, value, field_type):
2894 return [self.parse_reference(r, field_type[5:]) for r in value]
2895
2896 - def parse_list_strings(self, value, field_type):
2898
2899 - def represent(self, obj, fieldtype):
2900 field_is_type = fieldtype.startswith
2901 if field_is_type('list:'):
2902 if not obj:
2903 obj = []
2904 elif not isinstance(obj, (list, tuple)):
2905 obj = [obj]
2906 if field_is_type('list:string'):
2907 obj = map(str,obj)
2908 else:
2909 obj = map(int,obj)
2910 return 'ARRAY[%s]' % ','.join(repr(item) for item in obj)
2911 return BaseAdapter.represent(self, obj, fieldtype)
2912
2913
2914 -class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
2915 drivers = ('zxJDBC',)
2916
2917 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$')
2918
2919 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2920 credential_decoder=IDENTITY, driver_args={},
2921 adapter_args={}, do_connect=True, after_connection=None ):
2922 self.db = db
2923 self.dbengine = "postgres"
2924 self.uri = uri
2925 if do_connect: self.find_driver(adapter_args,uri)
2926 self.pool_size = pool_size
2927 self.folder = folder
2928 self.db_codec = db_codec
2929 self._after_connection = after_connection
2930 self.find_or_make_work_folder()
2931 ruri = uri.split('://',1)[1]
2932 m = self.REGEX_URI.match(ruri)
2933 if not m:
2934 raise SyntaxError("Invalid URI string in DAL")
2935 user = credential_decoder(m.group('user'))
2936 if not user:
2937 raise SyntaxError('User required')
2938 password = credential_decoder(m.group('password'))
2939 if not password:
2940 password = ''
2941 host = m.group('host')
2942 if not host:
2943 raise SyntaxError('Host name required')
2944 db = m.group('db')
2945 if not db:
2946 raise SyntaxError('Database name required')
2947 port = m.group('port') or '5432'
2948 msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password)
2949 def connector(msg=msg,driver_args=driver_args):
2950 return self.driver.connect(*msg,**driver_args)
2951 self.connector = connector
2952 if do_connect: self.reconnect()
2953
2954 - def after_connection(self):
2955 self.connection.set_client_encoding('UTF8')
2956 self.execute('BEGIN;')
2957 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
2958 self.try_json()
2959
2962 drivers = ('cx_Oracle',)
2963
2964 commit_on_alter_table = False
2965 types = {
2966 'boolean': 'CHAR(1)',
2967 'string': 'VARCHAR2(%(length)s)',
2968 'text': 'CLOB',
2969 'json': 'CLOB',
2970 'password': 'VARCHAR2(%(length)s)',
2971 'blob': 'CLOB',
2972 'upload': 'VARCHAR2(%(length)s)',
2973 'integer': 'INT',
2974 'bigint': 'NUMBER',
2975 'float': 'FLOAT',
2976 'double': 'BINARY_DOUBLE',
2977 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2978 'date': 'DATE',
2979 'time': 'CHAR(8)',
2980 'datetime': 'DATE',
2981 'id': 'NUMBER PRIMARY KEY',
2982 'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2983 'list:integer': 'CLOB',
2984 'list:string': 'CLOB',
2985 'list:reference': 'CLOB',
2986 'big-id': 'NUMBER PRIMARY KEY',
2987 'big-reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2988 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2989 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2990 }
2991
2993 return '%s_sequence' % tablename
2994
2996 return '%s_trigger' % tablename
2997
2999 return 'LEFT OUTER JOIN'
3000
3002 return 'dbms_random.value'
3003
3004 - def NOT_NULL(self,default,field_type):
3005 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
3006
3007 - def _drop(self,table,mode):
3010
3011 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3012 if limitby:
3013 (lmin, lmax) = limitby
3014 if len(sql_w) > 1:
3015 sql_w_row = sql_w + ' AND w_row > %i' % lmin
3016 else:
3017 sql_w_row = 'WHERE w_row > %i' % lmin
3018 return 'SELECT %s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
3019 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3020
3026
3028 if fieldtype == 'blob':
3029 obj = base64.b64encode(str(obj))
3030 return ":CLOB('%s')" % obj
3031 elif fieldtype == 'date':
3032 if isinstance(obj, (datetime.date, datetime.datetime)):
3033 obj = obj.isoformat()[:10]
3034 else:
3035 obj = str(obj)
3036 return "to_date('%s','yyyy-mm-dd')" % obj
3037 elif fieldtype == 'datetime':
3038 if isinstance(obj, datetime.datetime):
3039 obj = obj.isoformat()[:19].replace('T',' ')
3040 elif isinstance(obj, datetime.date):
3041 obj = obj.isoformat()[:10]+' 00:00:00'
3042 else:
3043 obj = str(obj)
3044 return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
3045 return None
3046
3047 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3048 credential_decoder=IDENTITY, driver_args={},
3049 adapter_args={}, do_connect=True, after_connection=None):
3050 self.db = db
3051 self.dbengine = "oracle"
3052 self.uri = uri
3053 if do_connect: self.find_driver(adapter_args,uri)
3054 self.pool_size = pool_size
3055 self.folder = folder
3056 self.db_codec = db_codec
3057 self._after_connection = after_connection
3058 self.find_or_make_work_folder()
3059 ruri = uri.split('://',1)[1]
3060 if not 'threaded' in driver_args:
3061 driver_args['threaded']=True
3062 def connector(uri=ruri,driver_args=driver_args):
3063 return self.driver.connect(uri,**driver_args)
3064 self.connector = connector
3065 if do_connect: self.reconnect()
3066
3068 self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
3069 self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
3070
3071 oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))")
3072
3073 - def execute(self, command, args=None):
3074 args = args or []
3075 i = 1
3076 while True:
3077 m = self.oracle_fix.match(command)
3078 if not m:
3079 break
3080 command = command[:m.start('clob')] + str(i) + command[m.end('clob'):]
3081 args.append(m.group('clob')[6:-2].replace("''", "'"))
3082 i += 1
3083 if command[-1:]==';':
3084 command = command[:-1]
3085 return self.log_execute(command, args)
3086
3088 tablename = table._tablename
3089 id_name = table._id.name
3090 sequence_name = table._sequence_name
3091 trigger_name = table._trigger_name
3092 self.execute(query)
3093 self.execute('CREATE SEQUENCE %s START WITH 1 INCREMENT BY 1 NOMAXVALUE MINVALUE -1;' % sequence_name)
3094 self.execute("""
3095 CREATE OR REPLACE TRIGGER %(trigger_name)s BEFORE INSERT ON %(tablename)s FOR EACH ROW
3096 DECLARE
3097 curr_val NUMBER;
3098 diff_val NUMBER;
3099 PRAGMA autonomous_transaction;
3100 BEGIN
3101 IF :NEW.%(id)s IS NOT NULL THEN
3102 EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val;
3103 diff_val := :NEW.%(id)s - curr_val - 1;
3104 IF diff_val != 0 THEN
3105 EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by '|| diff_val;
3106 EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val;
3107 EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by 1';
3108 END IF;
3109 END IF;
3110 SELECT %(sequence_name)s.nextval INTO :NEW.%(id)s FROM DUAL;
3111 END;
3112 """ % dict(trigger_name=trigger_name, tablename=tablename,
3113 sequence_name=sequence_name,id=id_name))
3114
3119
3120
3121
3122
3123
3124
3125
3126
3127
3128
3130 if any(x[1]==cx_Oracle.CLOB for x in self.cursor.description):
3131 return [tuple([(c.read() if type(c) == cx_Oracle.LOB else c) \
3132 for c in r]) for r in self.cursor]
3133 else:
3134 return self.cursor.fetchall()
3135
3137 drivers = ('pyodbc',)
3138 T_SEP = 'T'
3139
3140 QUOTE_TEMPLATE = "[%s]"
3141
3142 types = {
3143 'boolean': 'BIT',
3144 'string': 'VARCHAR(%(length)s)',
3145 'text': 'TEXT',
3146 'json': 'TEXT',
3147 'password': 'VARCHAR(%(length)s)',
3148 'blob': 'IMAGE',
3149 'upload': 'VARCHAR(%(length)s)',
3150 'integer': 'INT',
3151 'bigint': 'BIGINT',
3152 'float': 'FLOAT',
3153 'double': 'FLOAT',
3154 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3155 'date': 'DATETIME',
3156 'time': 'CHAR(8)',
3157 'datetime': 'DATETIME',
3158 'id': 'INT IDENTITY PRIMARY KEY',
3159 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3160 'list:integer': 'TEXT',
3161 'list:string': 'TEXT',
3162 'list:reference': 'TEXT',
3163 'geometry': 'geometry',
3164 'geography': 'geography',
3165 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
3166 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3167 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3168 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3169 }
3170
3172 return '; ALTER TABLE %s ADD ' % tablename
3173
3176
3178 return "DATEPART(%s,%s)" % (what, self.expand(field))
3179
3181 return 'LEFT OUTER JOIN'
3182
3185
3188
3189 - def CAST(self, first, second):
3191
3193 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
3194
3196 return 'PRIMARY KEY CLUSTERED (%s)' % key
3197
3199 if what == 'LENGTH':
3200 what = 'LEN'
3201 return "%s(%s)" % (what, self.expand(first))
3202
3203
3204 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3205 if limitby:
3206 (lmin, lmax) = limitby
3207 sql_s += ' TOP %i' % lmax
3208 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3209
3210 TRUE = 1
3211 FALSE = 0
3212
3213 REGEX_DSN = re.compile('^(?P<dsn>.+)$')
3214 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$')
3215 REGEX_ARGPATTERN = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
3216
3217 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3218 credential_decoder=IDENTITY, driver_args={},
3219 adapter_args={}, do_connect=True, srid=4326,
3220 after_connection=None):
3221 self.db = db
3222 self.dbengine = "mssql"
3223 self.uri = uri
3224 if do_connect: self.find_driver(adapter_args,uri)
3225 self.pool_size = pool_size
3226 self.folder = folder
3227 self.db_codec = db_codec
3228 self._after_connection = after_connection
3229 self.srid = srid
3230 self.find_or_make_work_folder()
3231
3232 ruri = uri.split('://',1)[1]
3233 if '@' not in ruri:
3234 try:
3235 m = self.REGEX_DSN.match(ruri)
3236 if not m:
3237 raise SyntaxError(
3238 'Parsing uri string(%s) has no result' % self.uri)
3239 dsn = m.group('dsn')
3240 if not dsn:
3241 raise SyntaxError('DSN required')
3242 except SyntaxError:
3243 e = sys.exc_info()[1]
3244 LOGGER.error('NdGpatch error')
3245 raise e
3246
3247 cnxn = dsn
3248 else:
3249 m = self.REGEX_URI.match(ruri)
3250 if not m:
3251 raise SyntaxError(
3252 "Invalid URI string in DAL: %s" % self.uri)
3253 user = credential_decoder(m.group('user'))
3254 if not user:
3255 raise SyntaxError('User required')
3256 password = credential_decoder(m.group('password'))
3257 if not password:
3258 password = ''
3259 host = m.group('host')
3260 if not host:
3261 raise SyntaxError('Host name required')
3262 db = m.group('db')
3263 if not db:
3264 raise SyntaxError('Database name required')
3265 port = m.group('port') or '1433'
3266
3267
3268
3269 argsdict = { 'DRIVER':'{SQL Server}' }
3270 urlargs = m.group('urlargs') or ''
3271 for argmatch in self.REGEX_ARGPATTERN.finditer(urlargs):
3272 argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue')
3273 urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.iteritems()])
3274 cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \
3275 % (host, port, db, user, password, urlargs)
3276 def connector(cnxn=cnxn,driver_args=driver_args):
3277 return self.driver.connect(cnxn,**driver_args)
3278 self.connector = connector
3279 if do_connect: self.reconnect()
3280
3282
3283 self.execute('SELECT SCOPE_IDENTITY();')
3284 return long(self.cursor.fetchone()[0])
3285
3286 - def rowslice(self,rows,minimum=0,maximum=None):
3287 if maximum is None:
3288 return rows[minimum:]
3289 return rows[minimum:maximum]
3290
3291 - def EPOCH(self, first):
3292 return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
3293
3296
3297
3298
3299
3300
3301 - def ST_ASTEXT(self, first):
3302 return '%s.STAsText()' %(self.expand(first))
3303
3306
3309
3312
3315
3318
3319
3320
3323
3326
3328 field_is_type = fieldtype.startswith
3329 if field_is_type('geometry'):
3330 srid = 0
3331 geotype, parms = fieldtype[:-1].split('(')
3332 if parms:
3333 srid = parms
3334 return "geometry::STGeomFromText('%s',%s)" %(obj, srid)
3335 elif fieldtype == 'geography':
3336 srid = 4326
3337 geotype, parms = fieldtype[:-1].split('(')
3338 if parms:
3339 srid = parms
3340 return "geography::STGeomFromText('%s',%s)" %(obj, srid)
3341
3342
3343 return "geometry::STGeomFromText('%s',%s)" %(obj, srid)
3344 return BaseAdapter.represent(self, obj, fieldtype)
3345
3348 """ experimental support for pagination in MSSQL"""
3349 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3350 if limitby:
3351 (lmin, lmax) = limitby
3352 if lmin == 0:
3353 sql_s += ' TOP %i' % lmax
3354 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3355 lmin += 1
3356 sql_o_inner = sql_o[sql_o.find('ORDER BY ')+9:]
3357 sql_g_inner = sql_o[:sql_o.find('ORDER BY ')]
3358 sql_f_outer = ['f_%s' % f for f in range(len(sql_f.split(',')))]
3359 sql_f_inner = [f for f in sql_f.split(',')]
3360 sql_f_iproxy = ['%s AS %s' % (o, n) for (o, n) in zip(sql_f_inner, sql_f_outer)]
3361 sql_f_iproxy = ', '.join(sql_f_iproxy)
3362 sql_f_oproxy = ', '.join(sql_f_outer)
3363 return 'SELECT %s %s FROM (SELECT %s ROW_NUMBER() OVER (ORDER BY %s) AS w_row, %s FROM %s%s%s) TMP WHERE w_row BETWEEN %i AND %s;' % (sql_s,sql_f_oproxy,sql_s,sql_f,sql_f_iproxy,sql_t,sql_w,sql_g_inner,lmin,lmax)
3364 return 'SELECT %s %s FROM %s%s%s;' % (sql_s,sql_f,sql_t,sql_w,sql_o)
3365 - def rowslice(self,rows,minimum=0,maximum=None):
3367
3369 """ support for true pagination in MSSQL >= 2012"""
3370
3371 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3372 if not sql_o:
3373
3374
3375 sql_o += ' ORDER BY %s' % self.RANDOM()
3376 if limitby:
3377 (lmin, lmax) = limitby
3378 sql_o += ' OFFSET %i ROWS FETCH NEXT %i ROWS ONLY' % (lmin, lmax - lmin)
3379 return 'SELECT %s %s FROM %s%s%s;' % \
3380 (sql_s, sql_f, sql_t, sql_w, sql_o)
3381
3382 - def rowslice(self,rows,minimum=0,maximum=None):
3384
3386 drivers = ('pyodbc',)
3387
3388 types = {
3389 'boolean': 'CHAR(1)',
3390 'string': 'NVARCHAR(%(length)s)',
3391 'text': 'NTEXT',
3392 'json': 'NTEXT',
3393 'password': 'NVARCHAR(%(length)s)',
3394 'blob': 'IMAGE',
3395 'upload': 'NVARCHAR(%(length)s)',
3396 'integer': 'INT',
3397 'bigint': 'BIGINT',
3398 'float': 'FLOAT',
3399 'double': 'FLOAT',
3400 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3401 'date': 'DATETIME',
3402 'time': 'CHAR(8)',
3403 'datetime': 'DATETIME',
3404 'id': 'INT IDENTITY PRIMARY KEY',
3405 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3406 'list:integer': 'NTEXT',
3407 'list:string': 'NTEXT',
3408 'list:reference': 'NTEXT',
3409 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
3410 'big-reference': 'BIGINT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3411 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3412 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3413 }
3414
3416 value = BaseAdapter.represent(self, obj, fieldtype)
3417 if fieldtype in ('string','text', 'json') and value[:1]=="'":
3418 value = 'N'+value
3419 return value
3420
3423
3425 drivers = ('pyodbc',)
3426 T_SEP = ' '
3427
3428 types = {
3429 'boolean': 'BOOLEAN',
3430 'string': 'VARCHAR(%(length)s)',
3431 'text': 'BYTEA',
3432 'json': 'VARCHAR(%(length)s)',
3433 'password': 'VARCHAR(%(length)s)',
3434 'blob': 'BYTEA',
3435 'upload': 'VARCHAR(%(length)s)',
3436 'integer': 'INT',
3437 'bigint': 'BIGINT',
3438 'float': 'FLOAT',
3439 'double': 'DOUBLE PRECISION',
3440 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
3441 'date': 'DATE',
3442 'time': 'TIME',
3443 'datetime': 'DATETIME',
3444 'id': 'IDENTITY',
3445 'reference': 'INT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3446 'list:integer': 'BYTEA',
3447 'list:string': 'BYTEA',
3448 'list:reference': 'BYTEA',
3449 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3450 }
3451
3452
3454 return "DATE_PART('%s', TIMESTAMP %s)" % (what, self.expand(first))
3455
3457 tablename = table._tablename
3458 return ['TRUNCATE %s %s;' % (tablename, mode or '')]
3459
3460 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3461 if limitby:
3462 (lmin, lmax) = limitby
3463 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
3464 return 'SELECT %s %s FROM %s%s%s;' % \
3465 (sql_s, sql_f, sql_t, sql_w, sql_o)
3466
3468 self.execute('SELECT LAST_INSERT_ID();')
3469 return long(self.cursor.fetchone()[0])
3470
3473
3475 drivers = ('Sybase',)
3476
3477 types = {
3478 'boolean': 'BIT',
3479 'string': 'CHAR VARYING(%(length)s)',
3480 'text': 'TEXT',
3481 'json': 'TEXT',
3482 'password': 'CHAR VARYING(%(length)s)',
3483 'blob': 'IMAGE',
3484 'upload': 'CHAR VARYING(%(length)s)',
3485 'integer': 'INT',
3486 'bigint': 'BIGINT',
3487 'float': 'FLOAT',
3488 'double': 'FLOAT',
3489 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3490 'date': 'DATETIME',
3491 'time': 'CHAR(8)',
3492 'datetime': 'DATETIME',
3493 'id': 'INT IDENTITY PRIMARY KEY',
3494 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3495 'list:integer': 'TEXT',
3496 'list:string': 'TEXT',
3497 'list:reference': 'TEXT',
3498 'geometry': 'geometry',
3499 'geography': 'geography',
3500 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
3501 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3502 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3503 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3504 }
3505
3506
3507 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3508 credential_decoder=IDENTITY, driver_args={},
3509 adapter_args={}, do_connect=True, srid=4326,
3510 after_connection=None):
3511 self.db = db
3512 self.dbengine = "sybase"
3513 self.uri = uri
3514 if do_connect: self.find_driver(adapter_args,uri)
3515 self.pool_size = pool_size
3516 self.folder = folder
3517 self.db_codec = db_codec
3518 self._after_connection = after_connection
3519 self.srid = srid
3520 self.find_or_make_work_folder()
3521
3522 ruri = uri.split('://',1)[1]
3523 if '@' not in ruri:
3524 try:
3525 m = self.REGEX_DSN.match(ruri)
3526 if not m:
3527 raise SyntaxError(
3528 'Parsing uri string(%s) has no result' % self.uri)
3529 dsn = m.group('dsn')
3530 if not dsn:
3531 raise SyntaxError('DSN required')
3532 except SyntaxError:
3533 e = sys.exc_info()[1]
3534 LOGGER.error('NdGpatch error')
3535 raise e
3536 else:
3537 m = self.REGEX_URI.match(uri)
3538 if not m:
3539 raise SyntaxError(
3540 "Invalid URI string in DAL: %s" % self.uri)
3541 user = credential_decoder(m.group('user'))
3542 if not user:
3543 raise SyntaxError('User required')
3544 password = credential_decoder(m.group('password'))
3545 if not password:
3546 password = ''
3547 host = m.group('host')
3548 if not host:
3549 raise SyntaxError('Host name required')
3550 db = m.group('db')
3551 if not db:
3552 raise SyntaxError('Database name required')
3553 port = m.group('port') or '1433'
3554
3555 dsn = 'sybase:host=%s:%s;dbname=%s' % (host,port,db)
3556
3557 driver_args.update(user = credential_decoder(user),
3558 password = credential_decoder(password))
3559
3560 def connector(dsn=dsn,driver_args=driver_args):
3561 return self.driver.connect(dsn,**driver_args)
3562 self.connector = connector
3563 if do_connect: self.reconnect()
3564
3567 drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
3568
3569 commit_on_alter_table = False
3570 support_distributed_transaction = True
3571 types = {
3572 'boolean': 'CHAR(1)',
3573 'string': 'VARCHAR(%(length)s)',
3574 'text': 'BLOB SUB_TYPE 1',
3575 'json': 'BLOB SUB_TYPE 1',
3576 'password': 'VARCHAR(%(length)s)',
3577 'blob': 'BLOB SUB_TYPE 0',
3578 'upload': 'VARCHAR(%(length)s)',
3579 'integer': 'INTEGER',
3580 'bigint': 'BIGINT',
3581 'float': 'FLOAT',
3582 'double': 'DOUBLE PRECISION',
3583 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
3584 'date': 'DATE',
3585 'time': 'TIME',
3586 'datetime': 'TIMESTAMP',
3587 'id': 'INTEGER PRIMARY KEY',
3588 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3589 'list:integer': 'BLOB SUB_TYPE 1',
3590 'list:string': 'BLOB SUB_TYPE 1',
3591 'list:reference': 'BLOB SUB_TYPE 1',
3592 'big-id': 'BIGINT PRIMARY KEY',
3593 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3594 }
3595
3597 return 'genid_%s' % tablename
3598
3600 return 'trg_id_%s' % tablename
3601
3604
3605 - def EPOCH(self, first):
3606 return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
3607
3608 - def NOT_NULL(self,default,field_type):
3609 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
3610
3612 return 'SUBSTRING(%s from %s for %s)' % (self.expand(field), parameters[0], parameters[1])
3613
3616
3617 - def CONTAINS(self,first,second,case_sensitive=False):
3623
3624 - def _drop(self,table,mode):
3627
3628 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3629 if limitby:
3630 (lmin, lmax) = limitby
3631 sql_s = ' FIRST %i SKIP %i %s' % (lmax - lmin, lmin, sql_s)
3632 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3633
3635 return ['DELETE FROM %s;' % table._tablename,
3636 'SET GENERATOR %s TO 0;' % table._sequence_name]
3637
3638 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$')
3639
3640 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3641 credential_decoder=IDENTITY, driver_args={},
3642 adapter_args={}, do_connect=True, after_connection=None):
3643 self.db = db
3644 self.dbengine = "firebird"
3645 self.uri = uri
3646 if do_connect: self.find_driver(adapter_args,uri)
3647 self.pool_size = pool_size
3648 self.folder = folder
3649 self.db_codec = db_codec
3650 self._after_connection = after_connection
3651 self.find_or_make_work_folder()
3652 ruri = uri.split('://',1)[1]
3653 m = self.REGEX_URI.match(ruri)
3654 if not m:
3655 raise SyntaxError("Invalid URI string in DAL: %s" % self.uri)
3656 user = credential_decoder(m.group('user'))
3657 if not user:
3658 raise SyntaxError('User required')
3659 password = credential_decoder(m.group('password'))
3660 if not password:
3661 password = ''
3662 host = m.group('host')
3663 if not host:
3664 raise SyntaxError('Host name required')
3665 port = int(m.group('port') or 3050)
3666 db = m.group('db')
3667 if not db:
3668 raise SyntaxError('Database name required')
3669 charset = m.group('charset') or 'UTF8'
3670 driver_args.update(dsn='%s/%s:%s' % (host,port,db),
3671 user = credential_decoder(user),
3672 password = credential_decoder(password),
3673 charset = charset)
3674
3675 def connector(driver_args=driver_args):
3676 return self.driver.connect(**driver_args)
3677 self.connector = connector
3678 if do_connect: self.reconnect()
3679
3688
3693
3696 drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
3697
3698 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$')
3699
3700 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3701 credential_decoder=IDENTITY, driver_args={},
3702 adapter_args={}, do_connect=True, after_connection=None):
3703 self.db = db
3704 self.dbengine = "firebird"
3705 self.uri = uri
3706 if do_connect: self.find_driver(adapter_args,uri)
3707 self.pool_size = pool_size
3708 self.folder = folder
3709 self.db_codec = db_codec
3710 self._after_connection = after_connection
3711 self.find_or_make_work_folder()
3712 ruri = uri.split('://',1)[1]
3713 m = self.REGEX_URI.match(ruri)
3714 if not m:
3715 raise SyntaxError(
3716 "Invalid URI string in DAL: %s" % self.uri)
3717 user = credential_decoder(m.group('user'))
3718 if not user:
3719 raise SyntaxError('User required')
3720 password = credential_decoder(m.group('password'))
3721 if not password:
3722 password = ''
3723 pathdb = m.group('path')
3724 if not pathdb:
3725 raise SyntaxError('Path required')
3726 charset = m.group('charset')
3727 if not charset:
3728 charset = 'UTF8'
3729 host = ''
3730 driver_args.update(host=host,
3731 database=pathdb,
3732 user=credential_decoder(user),
3733 password=credential_decoder(password),
3734 charset=charset)
3735
3736 def connector(driver_args=driver_args):
3737 return self.driver.connect(**driver_args)
3738 self.connector = connector
3739 if do_connect: self.reconnect()
3740
3846
3851
3854
3866
3868 drivers = ('pyodbc',)
3869
3870 types = {
3871 'boolean': 'CHAR(1)',
3872 'string': 'VARCHAR(%(length)s)',
3873 'text': 'CLOB',
3874 'json': 'CLOB',
3875 'password': 'VARCHAR(%(length)s)',
3876 'blob': 'BLOB',
3877 'upload': 'VARCHAR(%(length)s)',
3878 'integer': 'INT',
3879 'bigint': 'BIGINT',
3880 'float': 'REAL',
3881 'double': 'DOUBLE',
3882 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3883 'date': 'DATE',
3884 'time': 'TIME',
3885 'datetime': 'TIMESTAMP',
3886 'id': 'INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
3887 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3888 'list:integer': 'CLOB',
3889 'list:string': 'CLOB',
3890 'list:reference': 'CLOB',
3891 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
3892 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3893 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3894 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3895 }
3896
3898 return 'LEFT OUTER JOIN'
3899
3902
3903 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3904 if limitby:
3905 (lmin, lmax) = limitby
3906 sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax
3907 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3908
3910 if fieldtype == 'blob':
3911 obj = base64.b64encode(str(obj))
3912 return "BLOB('%s')" % obj
3913 elif fieldtype == 'datetime':
3914 if isinstance(obj, datetime.datetime):
3915 obj = obj.isoformat()[:19].replace('T','-').replace(':','.')
3916 elif isinstance(obj, datetime.date):
3917 obj = obj.isoformat()[:10]+'-00.00.00'
3918 return "'%s'" % obj
3919 return None
3920
3921 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3922 credential_decoder=IDENTITY, driver_args={},
3923 adapter_args={}, do_connect=True, after_connection=None):
3936 self.connector = connector
3937 if do_connect: self.reconnect()
3938
3940 if command[-1:]==';':
3941 command = command[:-1]
3942 return self.log_execute(command)
3943
3945 self.execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;' % table)
3946 return long(self.cursor.fetchone()[0])
3947
3948 - def rowslice(self,rows,minimum=0,maximum=None):
3949 if maximum is None:
3950 return rows[minimum:]
3951 return rows[minimum:maximum]
3952
3955 drivers = ('pyodbc',)
3956
3957 types = {
3958 'boolean': 'CHAR(1)',
3959 'string': 'VARCHAR(%(length)s)',
3960 'text': 'CLOB',
3961 'json': 'CLOB',
3962 'password': 'VARCHAR(%(length)s)',
3963 'blob': 'BLOB',
3964 'upload': 'VARCHAR(%(length)s)',
3965 'integer': 'INT',
3966 'bigint': 'BIGINT',
3967 'float': 'REAL',
3968 'double': 'DOUBLE',
3969 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3970 'date': 'DATE',
3971 'time': 'TIME',
3972 'datetime': 'TIMESTAMP',
3973
3974
3975 'id': 'INT GENERATED ALWAYS AS IDENTITY',
3976 'reference': 'INT',
3977 'list:integer': 'CLOB',
3978 'list:string': 'CLOB',
3979 'list:reference': 'CLOB',
3980 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY',
3981 'big-reference': 'BIGINT',
3982 'reference FK': ' REFERENCES %(foreign_key)s',
3983 'reference TFK': ' FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s)',
3984 }
3985
3986 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3987 credential_decoder=IDENTITY, driver_args={},
3988 adapter_args={}, do_connect=True, after_connection=None):
4001 self.connector = connector
4002 if do_connect: self.reconnect()
4003
4005 return 'LEFT OUTER JOIN'
4006
4007
4008 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
4009 if limitby:
4010 (lmin, lmax) = limitby
4011 sql_s += ' TOP %i' % lmax
4012 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
4013
4015 tablename = table._tablename
4016 return ['DELETE FROM %s ALL;' % (tablename)]
4017
4018 INGRES_SEQNAME='ii***lineitemsequence'
4023 drivers = ('pyodbc',)
4024
4025 types = {
4026 'boolean': 'CHAR(1)',
4027 'string': 'VARCHAR(%(length)s)',
4028 'text': 'CLOB',
4029 'json': 'CLOB',
4030 'password': 'VARCHAR(%(length)s)',
4031 'blob': 'BLOB',
4032 'upload': 'VARCHAR(%(length)s)',
4033 'integer': 'INTEGER4',
4034 'bigint': 'BIGINT',
4035 'float': 'FLOAT',
4036 'double': 'FLOAT8',
4037 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
4038 'date': 'ANSIDATE',
4039 'time': 'TIME WITHOUT TIME ZONE',
4040 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
4041 'id': 'int not null unique with default next value for %s' % INGRES_SEQNAME,
4042 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4043 'list:integer': 'CLOB',
4044 'list:string': 'CLOB',
4045 'list:reference': 'CLOB',
4046 'big-id': 'bigint not null unique with default next value for %s' % INGRES_SEQNAME,
4047 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4048 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4049 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
4050 }
4051
4053 return 'LEFT OUTER JOIN'
4054
4057
4058 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
4059 if limitby:
4060 (lmin, lmax) = limitby
4061 fetch_amt = lmax - lmin
4062 if fetch_amt:
4063 sql_s += ' FIRST %d ' % (fetch_amt, )
4064 if lmin:
4065
4066 sql_o += ' OFFSET %d' % (lmin, )
4067 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
4068
4069 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
4070 credential_decoder=IDENTITY, driver_args={},
4071 adapter_args={}, do_connect=True, after_connection=None):
4072 self.db = db
4073 self.dbengine = "ingres"
4074 self._driver = pyodbc
4075 self.uri = uri
4076 if do_connect: self.find_driver(adapter_args,uri)
4077 self.pool_size = pool_size
4078 self.folder = folder
4079 self.db_codec = db_codec
4080 self._after_connection = after_connection
4081 self.find_or_make_work_folder()
4082 connstr = uri.split(':', 1)[1]
4083
4084 connstr = connstr.lstrip()
4085 while connstr.startswith('/'):
4086 connstr = connstr[1:]
4087 if '=' in connstr:
4088
4089 ruri = connstr
4090 else:
4091
4092 database_name = connstr
4093 default_driver_name = 'Ingres'
4094 vnode = '(local)'
4095 servertype = 'ingres'
4096 ruri = 'Driver={%s};Server=%s;Database=%s' % (default_driver_name, vnode, database_name)
4097 def connector(cnxn=ruri,driver_args=driver_args):
4098 return self.driver.connect(cnxn,**driver_args)
4099
4100 self.connector = connector
4101
4102
4103 if do_connect: self.reconnect()
4104
4106
4107
4108
4109 if hasattr(table,'_primarykey'):
4110 modify_tbl_sql = 'modify %s to btree unique on %s' % \
4111 (table._tablename,
4112 ', '.join(["'%s'" % x for x in table.primarykey]))
4113 self.execute(modify_tbl_sql)
4114 else:
4115 tmp_seqname='%s_iisq' % table._tablename
4116 query=query.replace(INGRES_SEQNAME, tmp_seqname)
4117 self.execute('create sequence %s' % tmp_seqname)
4118 self.execute(query)
4119 self.execute('modify %s to btree unique on %s' % (table._tablename, 'id'))
4120
4121
4123 tmp_seqname='%s_iisq' % table
4124 self.execute('select current value for %s' % tmp_seqname)
4125 return long(self.cursor.fetchone()[0])
4126
4129
4130 drivers = ('pyodbc',)
4131
4132 types = {
4133 'boolean': 'CHAR(1)',
4134 'string': 'NVARCHAR(%(length)s)',
4135 'text': 'NCLOB',
4136 'json': 'NCLOB',
4137 'password': 'NVARCHAR(%(length)s)',
4138 'blob': 'BLOB',
4139 'upload': 'VARCHAR(%(length)s)',
4140 'integer': 'INTEGER4',
4141 'bigint': 'BIGINT',
4142 'float': 'FLOAT',
4143 'double': 'FLOAT8',
4144 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
4145 'date': 'ANSIDATE',
4146 'time': 'TIME WITHOUT TIME ZONE',
4147 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
4148 'id': 'INTEGER4 not null unique with default next value for %s'% INGRES_SEQNAME,
4149 'reference': 'INTEGER4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4150 'list:integer': 'NCLOB',
4151 'list:string': 'NCLOB',
4152 'list:reference': 'NCLOB',
4153 'big-id': 'BIGINT not null unique with default next value for %s'% INGRES_SEQNAME,
4154 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4155 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4156 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
4157 }
4158
4160 drivers = ('sapdb',)
4161
4162 support_distributed_transaction = False
4163 types = {
4164 'boolean': 'CHAR(1)',
4165 'string': 'VARCHAR(%(length)s)',
4166 'text': 'LONG',
4167 'json': 'LONG',
4168 'password': 'VARCHAR(%(length)s)',
4169 'blob': 'LONG',
4170 'upload': 'VARCHAR(%(length)s)',
4171 'integer': 'INT',
4172 'bigint': 'BIGINT',
4173 'float': 'FLOAT',
4174 'double': 'DOUBLE PRECISION',
4175 'decimal': 'FIXED(%(precision)s,%(scale)s)',
4176 'date': 'DATE',
4177 'time': 'TIME',
4178 'datetime': 'TIMESTAMP',
4179 'id': 'INT PRIMARY KEY',
4180 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4181 'list:integer': 'LONG',
4182 'list:string': 'LONG',
4183 'list:reference': 'LONG',
4184 'big-id': 'BIGINT PRIMARY KEY',
4185 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4186 }
4187
4189 return '%s_id_Seq' % table
4190
4191 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
4192 if limitby:
4193 (lmin, lmax) = limitby
4194 if len(sql_w) > 1:
4195 sql_w_row = sql_w + ' AND w_row > %i' % lmin
4196 else:
4197 sql_w_row = 'WHERE w_row > %i' % lmin
4198 return '%s %s FROM (SELECT w_tmp.*, ROWNO w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNO=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
4199 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
4200
4202
4203 self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
4204 self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
4205 % (table._tablename, table._id.name, table._sequence_name))
4206 self.execute(query)
4207
4208 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
4209
4210
4211 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
4212 credential_decoder=IDENTITY, driver_args={},
4213 adapter_args={}, do_connect=True, after_connection=None):
4214 self.db = db
4215 self.dbengine = "sapdb"
4216 self.uri = uri
4217 if do_connect: self.find_driver(adapter_args,uri)
4218 self.pool_size = pool_size
4219 self.folder = folder
4220 self.db_codec = db_codec
4221 self._after_connection = after_connection
4222 self.find_or_make_work_folder()
4223 ruri = uri.split('://',1)[1]
4224 m = self.REGEX_URI.match(ruri)
4225 if not m:
4226 raise SyntaxError("Invalid URI string in DAL")
4227 user = credential_decoder(m.group('user'))
4228 if not user:
4229 raise SyntaxError('User required')
4230 password = credential_decoder(m.group('password'))
4231 if not password:
4232 password = ''
4233 host = m.group('host')
4234 if not host:
4235 raise SyntaxError('Host name required')
4236 db = m.group('db')
4237 if not db:
4238 raise SyntaxError('Database name required')
4239 def connector(user=user, password=password, database=db,
4240 host=host, driver_args=driver_args):
4241 return self.driver.Connection(user, password, database,
4242 host, **driver_args)
4243 self.connector = connector
4244 if do_connect: self.reconnect()
4245
4247 self.execute("select %s.NEXTVAL from dual" % table._sequence_name)
4248 return long(self.cursor.fetchone()[0])
4249
4251 drivers = ('cubriddb',)
4252
4253 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
4254
4255 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
4256 credential_decoder=IDENTITY, driver_args={},
4257 adapter_args={}, do_connect=True, after_connection=None):
4258 self.db = db
4259 self.dbengine = "cubrid"
4260 self.uri = uri
4261 if do_connect: self.find_driver(adapter_args,uri)
4262 self.pool_size = pool_size
4263 self.folder = folder
4264 self.db_codec = db_codec
4265 self._after_connection = after_connection
4266 self.find_or_make_work_folder()
4267 ruri = uri.split('://',1)[1]
4268 m = self.REGEX_URI.match(ruri)
4269 if not m:
4270 raise SyntaxError(
4271 "Invalid URI string in DAL: %s" % self.uri)
4272 user = credential_decoder(m.group('user'))
4273 if not user:
4274 raise SyntaxError('User required')
4275 password = credential_decoder(m.group('password'))
4276 if not password:
4277 password = ''
4278 host = m.group('host')
4279 if not host:
4280 raise SyntaxError('Host name required')
4281 db = m.group('db')
4282 if not db:
4283 raise SyntaxError('Database name required')
4284 port = int(m.group('port') or '30000')
4285 charset = m.group('charset') or 'utf8'
4286 user = credential_decoder(user)
4287 passwd = credential_decoder(password)
4288 def connector(host=host,port=port,db=db,
4289 user=user,passwd=password,driver_args=driver_args):
4290 return self.driver.connect(host,port,db,user,passwd,**driver_args)
4291 self.connector = connector
4292 if do_connect: self.reconnect()
4293
4295 self.execute('SET FOREIGN_KEY_CHECKS=1;')
4296 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
4297
4302
4303 web2py_filesystem = False
4304
4306 return self.db._adapter.escape(obj)
4307
4309 if not db._adapter.dbengine in ('mysql', 'postgres', 'sqlite'):
4310 raise RuntimeError("only MySQL/Postgres/SQLite can store metadata .table files in database for now")
4311 self.db = db
4312 self.filename = filename
4313 self.mode = mode
4314 if not self.web2py_filesystem:
4315 if db._adapter.dbengine == 'mysql':
4316 sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;"
4317 elif db._adapter.dbengine in ('postgres', 'sqlite'):
4318 sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content TEXT, PRIMARY KEY(path));"
4319 self.db.executesql(sql)
4320 DatabaseStoredFile.web2py_filesystem = True
4321 self.p=0
4322 self.data = ''
4323 if mode in ('r','rw','a'):
4324 query = "SELECT content FROM web2py_filesystem WHERE path='%s'" \
4325 % filename
4326 rows = self.db.executesql(query)
4327 if rows:
4328 self.data = rows[0][0]
4329 elif exists(filename):
4330 datafile = open(filename, 'r')
4331 try:
4332 self.data = datafile.read()
4333 finally:
4334 datafile.close()
4335 elif mode in ('r','rw'):
4336 raise RuntimeError("File %s does not exist" % filename)
4337
4338 - def read(self, bytes):
4339 data = self.data[self.p:self.p+bytes]
4340 self.p += len(data)
4341 return data
4342
4344 i = self.data.find('\n',self.p)+1
4345 if i>0:
4346 data, self.p = self.data[self.p:i], i
4347 else:
4348 data, self.p = self.data[self.p:], len(self.data)
4349 return data
4350
4353
4355 if self.db is not None:
4356 self.db.executesql(
4357 "DELETE FROM web2py_filesystem WHERE path='%s'" % self.filename)
4358 query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')"\
4359 % (self.filename, self.data.replace("'","''"))
4360 self.db.executesql(query)
4361 self.db.commit()
4362 self.db = None
4363
4366
4367 @staticmethod
4369 if exists(filename):
4370 return True
4371 query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename
4372 try:
4373 if db.executesql(query):
4374 return True
4375 except Exception, e:
4376 if not (db._adapter.isOperationalError(e) or
4377 db._adapter.isProgrammingError(e)):
4378 raise
4379
4380 tb = traceback.format_exc()
4381 LOGGER.error("Could not retrieve %s\n%s" % (filename, tb))
4382 return False
4383
4386
4389
4390 - def file_open(self, filename, mode='rb', lock=True):
4392
4395
4397 query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename
4398 self.db.executesql(query)
4399 self.db.commit()
4400
4402 uploads_in_blob = True
4403
4404 REGEX_URI = re.compile('^(?P<instance>.*)/(?P<db>.*)$')
4405
4406 - def __init__(self, db, uri='google:sql://realm:domain/database',
4407 pool_size=0, folder=None, db_codec='UTF-8',
4408 credential_decoder=IDENTITY, driver_args={},
4409 adapter_args={}, do_connect=True, after_connection=None):
4410
4411 self.db = db
4412 self.dbengine = "mysql"
4413 self.uri = uri
4414 self.pool_size = pool_size
4415 self.db_codec = db_codec
4416 self._after_connection = after_connection
4417 if do_connect: self.find_driver(adapter_args, uri)
4418 self.folder = folder or pjoin('$HOME',THREAD_LOCAL.folder.split(
4419 os.sep+'applications'+os.sep,1)[1])
4420 ruri = uri.split("://")[1]
4421 m = self.REGEX_URI.match(ruri)
4422 if not m:
4423 raise SyntaxError("Invalid URI string in SQLDB: %s" % self.uri)
4424 instance = credential_decoder(m.group('instance'))
4425 self.dbstring = db = credential_decoder(m.group('db'))
4426 driver_args['instance'] = instance
4427 if not 'charset' in driver_args:
4428 driver_args['charset'] = 'utf8'
4429 self.createdb = createdb = adapter_args.get('createdb',True)
4430 if not createdb:
4431 driver_args['database'] = db
4432 def connector(driver_args=driver_args):
4433 return rdbms.connect(**driver_args)
4434 self.connector = connector
4435 if do_connect: self.reconnect()
4436
4438 if self.createdb:
4439
4440 self.execute('CREATE DATABASE IF NOT EXISTS %s' % self.dbstring)
4441 self.execute('USE %s' % self.dbstring)
4442 self.execute("SET FOREIGN_KEY_CHECKS=1;")
4443 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
4444
4445 - def execute(self, command, *a, **b):
4447
4449 self.adapter_args = adapter_args
4450 self.driver = "google"
4451
4453 can_select_for_update = False
4454
4455 @staticmethod
4457 if isinstance(obj, str):
4458 return obj.decode('utf8')
4459 elif not isinstance(obj, unicode):
4460 return unicode(obj)
4461 return obj
4462
4464 return table._id > 0
4465
4467 field_is_type = fieldtype.startswith
4468 if isinstance(obj, CALLABLETYPES):
4469 obj = obj()
4470 if isinstance(fieldtype, SQLCustomType):
4471 return fieldtype.encoder(obj)
4472 if isinstance(obj, (Expression, Field)):
4473 raise SyntaxError("non supported on GAE")
4474 if self.dbengine == 'google:datastore':
4475 if isinstance(fieldtype, gae.Property):
4476 return obj
4477 is_string = isinstance(fieldtype,str)
4478 is_list = is_string and field_is_type('list:')
4479 if is_list:
4480 if not obj:
4481 obj = []
4482 if not isinstance(obj, (list, tuple)):
4483 obj = [obj]
4484 if obj == '' and not \
4485 (is_string and fieldtype[:2] in ['st','te', 'pa','up']):
4486 return None
4487 if not obj is None:
4488 if isinstance(obj, list) and not is_list:
4489 obj = [self.represent(o, fieldtype) for o in obj]
4490 elif fieldtype in ('integer','bigint','id'):
4491 obj = long(obj)
4492 elif fieldtype == 'double':
4493 obj = float(obj)
4494 elif is_string and field_is_type('reference'):
4495 if isinstance(obj, (Row, Reference)):
4496 obj = obj['id']
4497 obj = long(obj)
4498 elif fieldtype == 'boolean':
4499 if obj and not str(obj)[0].upper() in '0F':
4500 obj = True
4501 else:
4502 obj = False
4503 elif fieldtype == 'date':
4504 if not isinstance(obj, datetime.date):
4505 (y, m, d) = map(int,str(obj).strip().split('-'))
4506 obj = datetime.date(y, m, d)
4507 elif isinstance(obj,datetime.datetime):
4508 (y, m, d) = (obj.year, obj.month, obj.day)
4509 obj = datetime.date(y, m, d)
4510 elif fieldtype == 'time':
4511 if not isinstance(obj, datetime.time):
4512 time_items = map(int,str(obj).strip().split(':')[:3])
4513 if len(time_items) == 3:
4514 (h, mi, s) = time_items
4515 else:
4516 (h, mi, s) = time_items + [0]
4517 obj = datetime.time(h, mi, s)
4518 elif fieldtype == 'datetime':
4519 if not isinstance(obj, datetime.datetime):
4520 (y, m, d) = map(int,str(obj)[:10].strip().split('-'))
4521 time_items = map(int,str(obj)[11:].strip().split(':')[:3])
4522 while len(time_items)<3:
4523 time_items.append(0)
4524 (h, mi, s) = time_items
4525 obj = datetime.datetime(y, m, d, h, mi, s)
4526 elif fieldtype == 'blob':
4527 pass
4528 elif fieldtype == 'json':
4529 if isinstance(obj, basestring):
4530 obj = self.to_unicode(obj)
4531 if have_serializers:
4532 obj = serializers.loads_json(obj)
4533 elif simplejson:
4534 obj = simplejson.loads(obj)
4535 else:
4536 raise RuntimeError("missing simplejson")
4537 elif is_string and field_is_type('list:string'):
4538 return map(self.to_unicode,obj)
4539 elif is_list:
4540 return map(int,obj)
4541 else:
4542 obj = self.to_unicode(obj)
4543 return obj
4544
4546 return 'insert %s in %s' % (fields, table)
4547
4548 - def _count(self,query,distinct=None):
4549 return 'count %s' % repr(query)
4550
4551 - def _select(self,query,fields,attributes):
4552 return 'select %s where %s' % (repr(fields), repr(query))
4553
4554 - def _delete(self,tablename, query):
4555 return 'delete %s where %s' % (repr(tablename),repr(query))
4556
4557 - def _update(self,tablename,query,fields):
4558 return 'update %s (%s) where %s' % (repr(tablename),
4559 repr(fields),repr(query))
4560
4562 """
4563 remember: no transactions on many NoSQL
4564 """
4565 pass
4566
4568 """
4569 remember: no transactions on many NoSQL
4570 """
4571 pass
4572
4574 """
4575 remember: no transactions on many NoSQL
4576 """
4577 pass
4578
4579
4580
4581 - def OR(self,first,second): raise SyntaxError("Not supported")
4582 - def AND(self,first,second): raise SyntaxError("Not supported")
4583 - def AS(self,first,second): raise SyntaxError("Not supported")
4584 - def ON(self,first,second): raise SyntaxError("Not supported")
4585 - def STARTSWITH(self,first,second=None): raise SyntaxError("Not supported")
4586 - def ENDSWITH(self,first,second=None): raise SyntaxError("Not supported")
4587 - def ADD(self,first,second): raise SyntaxError("Not supported")
4588 - def SUB(self,first,second): raise SyntaxError("Not supported")
4589 - def MUL(self,first,second): raise SyntaxError("Not supported")
4590 - def DIV(self,first,second): raise SyntaxError("Not supported")
4591 - def LOWER(self,first): raise SyntaxError("Not supported")
4592 - def UPPER(self,first): raise SyntaxError("Not supported")
4594 - def LENGTH(self, first): raise SyntaxError("Not supported")
4595 - def AGGREGATE(self,first,what): raise SyntaxError("Not supported")
4596 - def LEFT_JOIN(self): raise SyntaxError("Not supported")
4597 - def RANDOM(self): raise SyntaxError("Not supported")
4598 - def SUBSTRING(self,field,parameters): raise SyntaxError("Not supported")
4599 - def PRIMARY_KEY(self,key): raise SyntaxError("Not supported")
4600 - def ILIKE(self,first,second): raise SyntaxError("Not supported")
4601 - def drop(self,table,mode): raise SyntaxError("Not supported")
4602 - def alias(self,table,alias): raise SyntaxError("Not supported")
4603 - def migrate_table(self,*a,**b): raise SyntaxError("Not supported")
4605 - def prepare(self,key): raise SyntaxError("Not supported")
4608 - def concat_add(self,table): raise SyntaxError("Not supported")
4609 - def constraint_name(self, table, fieldname): raise SyntaxError("Not supported")
4611 - def log_execute(self,*a,**b): raise SyntaxError("Not supported")
4612 - def execute(self,*a,**b): raise SyntaxError("Not supported")
4614 - def lastrowid(self,table): raise SyntaxError("Not supported")
4615 - def rowslice(self,rows,minimum=0,maximum=None): raise SyntaxError("Not supported")
4616
4617
4618 -class GAEF(object):
4619 - def __init__(self,name,op,value,apply):
4620 self.name=name=='id' and '__key__' or name
4621 self.op=op
4622 self.value=value
4623 self.apply=apply
4625 return '(%s %s %s:%s)' % (self.name, self.op, repr(self.value), type(self.value))
4626
4628 uploads_in_blob = True
4629 types = {}
4630
4632 - def file_open(self, filename, mode='rb', lock=True): pass
4634
4635 REGEX_NAMESPACE = re.compile('.*://(?P<namespace>.+)')
4636
4637 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
4638 credential_decoder=IDENTITY, driver_args={},
4639 adapter_args={}, do_connect=True, after_connection=None):
4640 self.types.update({
4641 'boolean': gae.BooleanProperty,
4642 'string': (lambda **kwargs: gae.StringProperty(multiline=True, **kwargs)),
4643 'text': gae.TextProperty,
4644 'json': gae.TextProperty,
4645 'password': gae.StringProperty,
4646 'blob': gae.BlobProperty,
4647 'upload': gae.StringProperty,
4648 'integer': gae.IntegerProperty,
4649 'bigint': gae.IntegerProperty,
4650 'float': gae.FloatProperty,
4651 'double': gae.FloatProperty,
4652 'decimal': GAEDecimalProperty,
4653 'date': gae.DateProperty,
4654 'time': gae.TimeProperty,
4655 'datetime': gae.DateTimeProperty,
4656 'id': None,
4657 'reference': gae.IntegerProperty,
4658 'list:string': (lambda **kwargs: gae.StringListProperty(default=None, **kwargs)),
4659 'list:integer': (lambda **kwargs: gae.ListProperty(int,default=None, **kwargs)),
4660 'list:reference': (lambda **kwargs: gae.ListProperty(int,default=None, **kwargs)),
4661 })
4662 self.db = db
4663 self.uri = uri
4664 self.dbengine = 'google:datastore'
4665 self.folder = folder
4666 db['_lastsql'] = ''
4667 self.db_codec = 'UTF-8'
4668 self._after_connection = after_connection
4669 self.pool_size = 0
4670 match = self.REGEX_NAMESPACE.match(uri)
4671 if match:
4672 namespace_manager.set_namespace(match.group('namespace'))
4673
4674 - def parse_id(self, value, field_type):
4676
4677 - def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None):
4678 myfields = {}
4679 for field in table:
4680 if isinstance(polymodel,Table) and field.name in polymodel.fields():
4681 continue
4682 attr = {}
4683 if isinstance(field.custom_qualifier, dict):
4684
4685 attr = field.custom_qualifier
4686 field_type = field.type
4687 if isinstance(field_type, SQLCustomType):
4688 ftype = self.types[field_type.native or field_type.type](**attr)
4689 elif isinstance(field_type, gae.Property):
4690 ftype = field_type
4691 elif field_type.startswith('id'):
4692 continue
4693 elif field_type.startswith('decimal'):
4694 precision, scale = field_type[7:].strip('()').split(',')
4695 precision = int(precision)
4696 scale = int(scale)
4697 ftype = GAEDecimalProperty(precision, scale, **attr)
4698 elif field_type.startswith('reference'):
4699 if field.notnull:
4700 attr = dict(required=True)
4701 referenced = field_type[10:].strip()
4702 ftype = self.types[field_type[:9]](referenced, **attr)
4703 elif field_type.startswith('list:reference'):
4704 if field.notnull:
4705 attr['required'] = True
4706 referenced = field_type[15:].strip()
4707 ftype = self.types[field_type[:14]](**attr)
4708 elif field_type.startswith('list:'):
4709 ftype = self.types[field_type](**attr)
4710 elif not field_type in self.types\
4711 or not self.types[field_type]:
4712 raise SyntaxError('Field: unknown field type: %s' % field_type)
4713 else:
4714 ftype = self.types[field_type](**attr)
4715 myfields[field.name] = ftype
4716 if not polymodel:
4717 table._tableobj = classobj(table._tablename, (gae.Model, ), myfields)
4718 elif polymodel==True:
4719 table._tableobj = classobj(table._tablename, (PolyModel, ), myfields)
4720 elif isinstance(polymodel,Table):
4721 table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields)
4722 else:
4723 raise SyntaxError("polymodel must be None, True, a table or a tablename")
4724 return None
4725
4726 - def expand(self,expression,field_type=None):
4727 if isinstance(expression,Field):
4728 if expression.type in ('text', 'blob', 'json'):
4729 raise SyntaxError('AppEngine does not index by: %s' % expression.type)
4730 return expression.name
4731 elif isinstance(expression, (Expression, Query)):
4732 if not expression.second is None:
4733 return expression.op(expression.first, expression.second)
4734 elif not expression.first is None:
4735 return expression.op(expression.first)
4736 else:
4737 return expression.op()
4738 elif field_type:
4739 return self.represent(expression,field_type)
4740 elif isinstance(expression,(list,tuple)):
4741 return ','.join([self.represent(item,field_type) for item in expression])
4742 else:
4743 return str(expression)
4744
4745
4746 - def AND(self,first,second):
4752
4753 - def EQ(self,first,second=None):
4754 if isinstance(second, Key):
4755 return [GAEF(first.name,'=',second,lambda a,b:a==b)]
4756 return [GAEF(first.name,'=',self.represent(second,first.type),lambda a,b:a==b)]
4757
4758 - def NE(self,first,second=None):
4759 if first.type != 'id':
4760 return [GAEF(first.name,'!=',self.represent(second,first.type),lambda a,b:a!=b)]
4761 else:
4762 if not second is None:
4763 second = Key.from_path(first._tablename, long(second))
4764 return [GAEF(first.name,'!=',second,lambda a,b:a!=b)]
4765
4766 - def LT(self,first,second=None):
4767 if first.type != 'id':
4768 return [GAEF(first.name,'<',self.represent(second,first.type),lambda a,b:a<b)]
4769 else:
4770 second = Key.from_path(first._tablename, long(second))
4771 return [GAEF(first.name,'<',second,lambda a,b:a<b)]
4772
4773 - def LE(self,first,second=None):
4774 if first.type != 'id':
4775 return [GAEF(first.name,'<=',self.represent(second,first.type),lambda a,b:a<=b)]
4776 else:
4777 second = Key.from_path(first._tablename, long(second))
4778 return [GAEF(first.name,'<=',second,lambda a,b:a<=b)]
4779
4780 - def GT(self,first,second=None):
4781 if first.type != 'id' or second==0 or second == '0':
4782 return [GAEF(first.name,'>',self.represent(second,first.type),lambda a,b:a>b)]
4783 else:
4784 second = Key.from_path(first._tablename, long(second))
4785 return [GAEF(first.name,'>',second,lambda a,b:a>b)]
4786
4787 - def GE(self,first,second=None):
4788 if first.type != 'id':
4789 return [GAEF(first.name,'>=',self.represent(second,first.type),lambda a,b:a>=b)]
4790 else:
4791 second = Key.from_path(first._tablename, long(second))
4792 return [GAEF(first.name,'>=',second,lambda a,b:a>=b)]
4793
4796
4797 - def COMMA(self,first,second):
4799
4800 - def BELONGS(self,first,second=None):
4801 if not isinstance(second,(list, tuple)):
4802 raise SyntaxError("Not supported")
4803 if first.type != 'id':
4804 return [GAEF(first.name,'in',self.represent(second,first.type),lambda a,b:a in b)]
4805 else:
4806 second = [Key.from_path(first._tablename, int(i)) for i in second]
4807 return [GAEF(first.name,'in',second,lambda a,b:a in b)]
4808
4809 - def CONTAINS(self,first,second,case_sensitive=False):
4814
4815 - def NOT(self,first):
4816 nops = { self.EQ: self.NE,
4817 self.NE: self.EQ,
4818 self.LT: self.GE,
4819 self.GT: self.LE,
4820 self.LE: self.GT,
4821 self.GE: self.LT}
4822 if not isinstance(first,Query):
4823 raise SyntaxError("Not suported")
4824 nop = nops.get(first.op,None)
4825 if not nop:
4826 raise SyntaxError("Not suported %s" % first.op.__name__)
4827 first.op = nop
4828 return self.expand(first)
4829
4832
4833 - def select_raw(self,query,fields=None,attributes=None):
4834 db = self.db
4835 fields = fields or []
4836 attributes = attributes or {}
4837 args_get = attributes.get
4838 new_fields = []
4839 for item in fields:
4840 if isinstance(item,SQLALL):
4841 new_fields += item._table
4842 else:
4843 new_fields.append(item)
4844 fields = new_fields
4845 if query:
4846 tablename = self.get_table(query)
4847 elif fields:
4848 tablename = fields[0].tablename
4849 query = db._adapter.id_query(fields[0].table)
4850 else:
4851 raise SyntaxError("Unable to determine a tablename")
4852
4853 if query:
4854 if use_common_filters(query):
4855 query = self.common_filter(query,[tablename])
4856
4857
4858 tableobj = db[tablename]._tableobj
4859 filters = self.expand(query)
4860
4861 projection = None
4862 if len(db[tablename].fields) == len(fields):
4863
4864 projection = None
4865 elif args_get('projection') == True:
4866 projection = []
4867 for f in fields:
4868 if f.type in ['text', 'blob', 'json']:
4869 raise SyntaxError(
4870 "text and blob field types not allowed in projection queries")
4871 else:
4872 projection.append(f.name)
4873 elif args_get('filterfields') == True:
4874 projection = []
4875 for f in fields:
4876 projection.append(f.name)
4877
4878
4879
4880 query_projection = [
4881 p for p in projection if \
4882 p != db[tablename]._id.name] if projection and \
4883 args_get('projection') == True\
4884 else None
4885
4886 cursor = None
4887 if isinstance(args_get('reusecursor'), str):
4888 cursor = args_get('reusecursor')
4889 items = gae.Query(tableobj, projection=query_projection,
4890 cursor=cursor)
4891
4892 for filter in filters:
4893 if args_get('projection') == True and \
4894 filter.name in query_projection and \
4895 filter.op in ['=', '<=', '>=']:
4896 raise SyntaxError(
4897 "projection fields cannot have equality filters")
4898 if filter.name=='__key__' and filter.op=='>' and filter.value==0:
4899 continue
4900 elif filter.name=='__key__' and filter.op=='=':
4901 if filter.value==0:
4902 items = []
4903 elif isinstance(filter.value, Key):
4904
4905
4906
4907 item = tableobj.get(filter.value)
4908 items = (item and [item]) or []
4909 else:
4910
4911
4912
4913 item = tableobj.get_by_id(filter.value)
4914 items = (item and [item]) or []
4915 elif isinstance(items,list):
4916 items = [i for i in items if filter.apply(
4917 getattr(item,filter.name),filter.value)]
4918 else:
4919 if filter.name=='__key__' and filter.op != 'in':
4920 items.order('__key__')
4921 items = items.filter('%s %s' % (filter.name,filter.op),
4922 filter.value)
4923 if not isinstance(items,list):
4924 if args_get('left', None):
4925 raise SyntaxError('Set: no left join in appengine')
4926 if args_get('groupby', None):
4927 raise SyntaxError('Set: no groupby in appengine')
4928 orderby = args_get('orderby', False)
4929 if orderby:
4930
4931 if isinstance(orderby, (list, tuple)):
4932 orderby = xorify(orderby)
4933 if isinstance(orderby,Expression):
4934 orderby = self.expand(orderby)
4935 orders = orderby.split(', ')
4936 for order in orders:
4937 order={'-id':'-__key__','id':'__key__'}.get(order,order)
4938 items = items.order(order)
4939 if args_get('limitby', None):
4940 (lmin, lmax) = attributes['limitby']
4941 (limit, offset) = (lmax - lmin, lmin)
4942 rows = items.fetch(limit,offset=offset)
4943
4944
4945 if args_get('reusecursor'):
4946 db['_lastcursor'] = items.cursor()
4947 items = rows
4948 return (items, tablename, projection or db[tablename].fields)
4949
4950 - def select(self,query,fields,attributes):
4951 """
4952 This is the GAE version of select. some notes to consider:
4953 - db['_lastsql'] is not set because there is not SQL statement string
4954 for a GAE query
4955 - 'nativeRef' is a magical fieldname used for self references on GAE
4956 - optional attribute 'projection' when set to True will trigger
4957 use of the GAE projection queries. note that there are rules for
4958 what is accepted imposed by GAE: each field must be indexed,
4959 projection queries cannot contain blob or text fields, and you
4960 cannot use == and also select that same field. see https://developers.google.com/appengine/docs/python/datastore/queries#Query_Projection
4961 - optional attribute 'filterfields' when set to True web2py will only
4962 parse the explicitly listed fields into the Rows object, even though
4963 all fields are returned in the query. This can be used to reduce
4964 memory usage in cases where true projection queries are not
4965 usable.
4966 - optional attribute 'reusecursor' allows use of cursor with queries
4967 that have the limitby attribute. Set the attribute to True for the
4968 first query, set it to the value of db['_lastcursor'] to continue
4969 a previous query. The user must save the cursor value between
4970 requests, and the filters must be identical. It is up to the user
4971 to follow google's limitations: https://developers.google.com/appengine/docs/python/datastore/queries#Query_Cursors
4972 """
4973
4974 (items, tablename, fields) = self.select_raw(query,fields,attributes)
4975
4976 rows = [[(t==self.db[tablename]._id.name and item) or \
4977 (t=='nativeRef' and item) or getattr(item, t) \
4978 for t in fields] for item in items]
4979 colnames = ['%s.%s' % (tablename, t) for t in fields]
4980 processor = attributes.get('processor',self.parse)
4981 return processor(rows,fields,colnames,False)
4982
4983 - def count(self,query,distinct=None,limit=None):
4984 if distinct:
4985 raise RuntimeError("COUNT DISTINCT not supported")
4986 (items, tablename, fields) = self.select_raw(query)
4987
4988 try:
4989 return len(items)
4990 except TypeError:
4991 return items.count(limit=limit)
4992
4993 - def delete(self,tablename, query):
4994 """
4995 This function was changed on 2010-05-04 because according to
4996 http://code.google.com/p/googleappengine/issues/detail?id=3119
4997 GAE no longer supports deleting more than 1000 records.
4998 """
4999
5000 (items, tablename, fields) = self.select_raw(query)
5001
5002 if not isinstance(items,list):
5003
5004
5005 leftitems = items.fetch(1000, keys_only=True)
5006 counter = 0
5007 while len(leftitems):
5008 counter += len(leftitems)
5009 gae.delete(leftitems)
5010 leftitems = items.fetch(1000, keys_only=True)
5011 else:
5012 counter = len(items)
5013 gae.delete(items)
5014 return counter
5015
5016 - def update(self,tablename,query,update_fields):
5017
5018 (items, tablename, fields) = self.select_raw(query)
5019 counter = 0
5020 for item in items:
5021 for field, value in update_fields:
5022 setattr(item, field.name, self.represent(value,field.type))
5023 item.put()
5024 counter += 1
5025 LOGGER.info(str(counter))
5026 return counter
5027
5028 - def insert(self,table,fields):
5029 dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields)
5030
5031 tmp = table._tableobj(**dfields)
5032 tmp.put()
5033 rid = Reference(tmp.key().id())
5034 (rid._table, rid._record, rid._gaekey) = (table, None, tmp.key())
5035 return rid
5036
5038 parsed_items = []
5039 for item in items:
5040 dfields=dict((f.name,self.represent(v,f.type)) for f,v in item)
5041 parsed_items.append(table._tableobj(**dfields))
5042 gae.put(parsed_items)
5043 return True
5044
5046 return uuid.UUID(uuidv).int
5047
5049 return str(uuid.UUID(int=n))
5050
5052 drivers = ('couchdb',)
5053
5054 uploads_in_blob = True
5055 types = {
5056 'boolean': bool,
5057 'string': str,
5058 'text': str,
5059 'json': str,
5060 'password': str,
5061 'blob': str,
5062 'upload': str,
5063 'integer': long,
5064 'bigint': long,
5065 'float': float,
5066 'double': float,
5067 'date': datetime.date,
5068 'time': datetime.time,
5069 'datetime': datetime.datetime,
5070 'id': long,
5071 'reference': long,
5072 'list:string': list,
5073 'list:integer': list,
5074 'list:reference': list,
5075 }
5076
5078 - def file_open(self, filename, mode='rb', lock=True): pass
5080
5081 - def expand(self,expression,field_type=None):
5082 if isinstance(expression,Field):
5083 if expression.type=='id':
5084 return "%s._id" % expression.tablename
5085 return BaseAdapter.expand(self,expression,field_type)
5086
5087 - def AND(self,first,second):
5089
5090 - def OR(self,first,second):
5092
5093 - def EQ(self,first,second):
5097
5098 - def NE(self,first,second):
5102
5103 - def COMMA(self,first,second):
5105
5107 value = NoSQLAdapter.represent(self, obj, fieldtype)
5108 if fieldtype=='id':
5109 return repr(str(long(value)))
5110 elif fieldtype in ('date','time','datetime','boolean'):
5111 return serializers.json(value)
5112 return repr(not isinstance(value,unicode) and value \
5113 or value and value.encode('utf8'))
5114
5115 - def __init__(self,db,uri='couchdb://127.0.0.1:5984',
5116 pool_size=0,folder=None,db_codec ='UTF-8',
5117 credential_decoder=IDENTITY, driver_args={},
5118 adapter_args={}, do_connect=True, after_connection=None):
5119 self.db = db
5120 self.uri = uri
5121 if do_connect: self.find_driver(adapter_args)
5122 self.dbengine = 'couchdb'
5123 self.folder = folder
5124 db['_lastsql'] = ''
5125 self.db_codec = 'UTF-8'
5126 self._after_connection = after_connection
5127 self.pool_size = pool_size
5128
5129 url='http://'+uri[10:]
5130 def connector(url=url,driver_args=driver_args):
5131 return self.driver.Server(url,**driver_args)
5132 self.reconnect(connector,cursor=False)
5133
5134 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
5135 if migrate:
5136 try:
5137 self.connection.create(table._tablename)
5138 except:
5139 pass
5140
5141 - def insert(self,table,fields):
5148
5149 - def _select(self,query,fields,attributes):
5150 if not isinstance(query,Query):
5151 raise SyntaxError("Not Supported")
5152 for key in set(attributes.keys())-SELECT_ARGS:
5153 raise SyntaxError('invalid select attribute: %s' % key)
5154 new_fields=[]
5155 for item in fields:
5156 if isinstance(item,SQLALL):
5157 new_fields += item._table
5158 else:
5159 new_fields.append(item)
5160 def uid(fd):
5161 return fd=='id' and '_id' or fd
5162 def get(row,fd):
5163 return fd=='id' and long(row['_id']) or row.get(fd,None)
5164 fields = new_fields
5165 tablename = self.get_table(query)
5166 fieldnames = [f.name for f in (fields or self.db[tablename])]
5167 colnames = ['%s.%s' % (tablename,k) for k in fieldnames]
5168 fields = ','.join(['%s.%s' % (tablename,uid(f)) for f in fieldnames])
5169 fn="(function(%(t)s){if(%(query)s)emit(%(order)s,[%(fields)s]);})" %\
5170 dict(t=tablename,
5171 query=self.expand(query),
5172 order='%s._id' % tablename,
5173 fields=fields)
5174 return fn, colnames
5175
5176 - def select(self,query,fields,attributes):
5177 if not isinstance(query,Query):
5178 raise SyntaxError("Not Supported")
5179 fn, colnames = self._select(query,fields,attributes)
5180 tablename = colnames[0].split('.')[0]
5181 ctable = self.connection[tablename]
5182 rows = [cols['value'] for cols in ctable.query(fn)]
5183 processor = attributes.get('processor',self.parse)
5184 return processor(rows,fields,colnames,False)
5185
5186 - def delete(self,tablename,query):
5187 if not isinstance(query,Query):
5188 raise SyntaxError("Not Supported")
5189 if query.first.type=='id' and query.op==self.EQ:
5190 id = query.second
5191 tablename = query.first.tablename
5192 assert(tablename == query.first.tablename)
5193 ctable = self.connection[tablename]
5194 try:
5195 del ctable[str(id)]
5196 return 1
5197 except couchdb.http.ResourceNotFound:
5198 return 0
5199 else:
5200 tablename = self.get_table(query)
5201 rows = self.select(query,[self.db[tablename]._id],{})
5202 ctable = self.connection[tablename]
5203 for row in rows:
5204 del ctable[str(row.id)]
5205 return len(rows)
5206
5207 - def update(self,tablename,query,fields):
5208 if not isinstance(query,Query):
5209 raise SyntaxError("Not Supported")
5210 if query.first.type=='id' and query.op==self.EQ:
5211 id = query.second
5212 tablename = query.first.tablename
5213 ctable = self.connection[tablename]
5214 try:
5215 doc = ctable[str(id)]
5216 for key,value in fields:
5217 doc[key.name] = self.represent(value,self.db[tablename][key.name].type)
5218 ctable.save(doc)
5219 return 1
5220 except couchdb.http.ResourceNotFound:
5221 return 0
5222 else:
5223 tablename = self.get_table(query)
5224 rows = self.select(query,[self.db[tablename]._id],{})
5225 ctable = self.connection[tablename]
5226 table = self.db[tablename]
5227 for row in rows:
5228 doc = ctable[str(row.id)]
5229 for key,value in fields:
5230 doc[key.name] = self.represent(value,table[key.name].type)
5231 ctable.save(doc)
5232 return len(rows)
5233
5234 - def count(self,query,distinct=None):
5235 if distinct:
5236 raise RuntimeError("COUNT DISTINCT not supported")
5237 if not isinstance(query,Query):
5238 raise SyntaxError("Not Supported")
5239 tablename = self.get_table(query)
5240 rows = self.select(query,[self.db[tablename]._id],{})
5241 return len(rows)
5242
5244 """
5245 validates that the given text is clean: only contains [0-9a-zA-Z_]
5246 """
5247 if not REGEX_ALPHANUMERIC.match(text):
5248 raise SyntaxError('invalid table or field name: %s' % text)
5249 return text
5250
5252 native_json = True
5253 drivers = ('pymongo',)
5254
5255 uploads_in_blob = True
5256
5257 types = {
5258 'boolean': bool,
5259 'string': str,
5260 'text': str,
5261 'json': str,
5262 'password': str,
5263 'blob': str,
5264 'upload': str,
5265 'integer': long,
5266 'bigint': long,
5267 'float': float,
5268 'double': float,
5269 'date': datetime.date,
5270 'time': datetime.time,
5271 'datetime': datetime.datetime,
5272 'id': long,
5273 'reference': long,
5274 'list:string': list,
5275 'list:integer': list,
5276 'list:reference': list,
5277 }
5278
5279 error_messages = {"javascript_needed": "This must yet be replaced" +
5280 " with javascript in order to work."}
5281
5282 - def __init__(self,db,uri='mongodb://127.0.0.1:5984/db',
5283 pool_size=0, folder=None, db_codec ='UTF-8',
5284 credential_decoder=IDENTITY, driver_args={},
5285 adapter_args={}, do_connect=True, after_connection=None):
5286
5287 self.db = db
5288 self.uri = uri
5289 if do_connect: self.find_driver(adapter_args)
5290 import random
5291 from bson.objectid import ObjectId
5292 from bson.son import SON
5293 import pymongo.uri_parser
5294
5295 m = pymongo.uri_parser.parse_uri(uri)
5296
5297 self.SON = SON
5298 self.ObjectId = ObjectId
5299 self.random = random
5300
5301 self.dbengine = 'mongodb'
5302 self.folder = folder
5303 db['_lastsql'] = ''
5304 self.db_codec = 'UTF-8'
5305 self._after_connection = after_connection
5306 self.pool_size = pool_size
5307
5308
5309 self.minimumreplication = adapter_args.get('minimumreplication',0)
5310
5311
5312
5313
5314 self.safe = adapter_args.get('safe',True)
5315
5316 if isinstance(m,tuple):
5317 m = {"database" : m[1]}
5318 if m.get('database')==None:
5319 raise SyntaxError("Database is required!")
5320
5321 def connector(uri=self.uri,m=m):
5322
5323 if hasattr(self.driver, "MongoClient"):
5324 Connection = self.driver.MongoClient
5325 else:
5326 Connection = self.driver.Connection
5327 return Connection(uri)[m.get('database')]
5328
5329 self.reconnect(connector,cursor=False)
5330
5332 """ Convert input to a valid Mongodb ObjectId instance
5333
5334 self.object_id("<random>") -> ObjectId (not unique) instance """
5335 if not arg:
5336 arg = 0
5337 if isinstance(arg, basestring):
5338
5339 rawhex = len(arg.replace("0x", "").replace("L", "")) == 24
5340 if arg.isdigit() and (not rawhex):
5341 arg = int(arg)
5342 elif arg == "<random>":
5343 arg = int("0x%sL" % \
5344 "".join([self.random.choice("0123456789abcdef") \
5345 for x in range(24)]), 0)
5346 elif arg.isalnum():
5347 if not arg.startswith("0x"):
5348 arg = "0x%s" % arg
5349 try:
5350 arg = int(arg, 0)
5351 except ValueError, e:
5352 raise ValueError(
5353 "invalid objectid argument string: %s" % e)
5354 else:
5355 raise ValueError("Invalid objectid argument string. " +
5356 "Requires an integer or base 16 value")
5357 elif isinstance(arg, self.ObjectId):
5358 return arg
5359
5360 if not isinstance(arg, (int, long)):
5361 raise TypeError("object_id argument must be of type " +
5362 "ObjectId or an objectid representable integer")
5363 if arg == 0:
5364 hexvalue = "".zfill(24)
5365 else:
5366 hexvalue = hex(arg)[2:].replace("L", "")
5367 return self.ObjectId(hexvalue)
5368
5370
5371 if isinstance(value, self.ObjectId):
5372 value = long(str(value), 16)
5373 return super(MongoDBAdapter,
5374 self).parse_reference(value, field_type)
5375
5376 - def parse_id(self, value, field_type):
5377 if isinstance(value, self.ObjectId):
5378 value = long(str(value), 16)
5379 return super(MongoDBAdapter,
5380 self).parse_id(value, field_type)
5381
5383
5384 if isinstance(obj, self.ObjectId):
5385 value = obj
5386 else:
5387 value = NoSQLAdapter.represent(self, obj, fieldtype)
5388
5389 if fieldtype =='date':
5390 if value == None:
5391 return value
5392
5393 t = datetime.time(0, 0, 0)
5394
5395
5396 return datetime.datetime.combine(value, t)
5397 elif fieldtype == 'time':
5398 if value == None:
5399 return value
5400
5401 d = datetime.date(2000, 1, 1)
5402
5403
5404 return datetime.datetime.combine(d, value)
5405 elif fieldtype == "blob":
5406 from bson import Binary
5407 if not isinstance(value, Binary):
5408 return Binary(value)
5409 return value
5410 elif (isinstance(fieldtype, basestring) and
5411 fieldtype.startswith('list:')):
5412 if fieldtype.startswith('list:reference'):
5413 newval = []
5414 for v in value:
5415 newval.append(self.object_id(v))
5416 return newval
5417 return value
5418 elif ((isinstance(fieldtype, basestring) and
5419 fieldtype.startswith("reference")) or
5420 (isinstance(fieldtype, Table)) or fieldtype=="id"):
5421 value = self.object_id(value)
5422 return value
5423
5424 - def create_table(self, table, migrate=True, fake_migrate=False,
5425 polymodel=None, isCapped=False):
5426 if isCapped:
5427 raise RuntimeError("Not implemented")
5428
5429 - def count(self, query, distinct=None, snapshot=True):
5430 if distinct:
5431 raise RuntimeError("COUNT DISTINCT not supported")
5432 if not isinstance(query,Query):
5433 raise SyntaxError("Not Supported")
5434 tablename = self.get_table(query)
5435 return long(self.select(query,[self.db[tablename]._id], {},
5436 count=True,snapshot=snapshot)['count'])
5437
5438
5439
5440
5441
5442 - def expand(self, expression, field_type=None):
5443 if isinstance(expression, Query):
5444
5445
5446
5447
5448
5449 if isinstance(expression.first,Field) and \
5450 ((expression.first.type == 'id') or \
5451 ("reference" in expression.first.type)):
5452 if expression.first.type == 'id':
5453 expression.first.name = '_id'
5454
5455 if isinstance(expression.second, (tuple, list, set)):
5456 expression.second = [self.object_id(item) for
5457 item in expression.second]
5458 else:
5459 expression.second = self.object_id(expression.second)
5460 result = expression.op(expression.first, expression.second)
5461
5462 if isinstance(expression, Field):
5463 if expression.type=='id':
5464 result = "_id"
5465 else:
5466 result = expression.name
5467 elif isinstance(expression, (Expression, Query)):
5468 if not expression.second is None:
5469 result = expression.op(expression.first, expression.second)
5470 elif not expression.first is None:
5471 result = expression.op(expression.first)
5472 elif not isinstance(expression.op, str):
5473 result = expression.op()
5474 else:
5475 result = expression.op
5476 elif field_type:
5477 result = self.represent(expression,field_type)
5478 elif isinstance(expression,(list,tuple)):
5479 result = ','.join(self.represent(item,field_type) for
5480 item in expression)
5481 else:
5482 result = expression
5483 return result
5484
5485 - def drop(self, table, mode=''):
5488
5489 - def truncate(self, table, mode, safe=None):
5490 if safe == None:
5491 safe=self.safe
5492 ctable = self.connection[table._tablename]
5493 ctable.remove(None, safe=True)
5494
5495 - def _select(self, query, fields, attributes):
5496 if 'for_update' in attributes:
5497 logging.warn('mongodb does not support for_update')
5498 for key in set(attributes.keys())-set(('limitby',
5499 'orderby','for_update')):
5500 if attributes[key]!=None:
5501 logging.warn('select attribute not implemented: %s' % key)
5502
5503 new_fields=[]
5504 mongosort_list = []
5505
5506
5507 orderby = attributes.get('orderby', False)
5508 limitby = attributes.get('limitby', False)
5509
5510 if orderby:
5511 if isinstance(orderby, (list, tuple)):
5512 orderby = xorify(orderby)
5513
5514
5515 for f in self.expand(orderby).split(','):
5516 if f.startswith('-'):
5517 mongosort_list.append((f[1:], -1))
5518 else:
5519 mongosort_list.append((f, 1))
5520 if limitby:
5521 limitby_skip, limitby_limit = limitby[0], int(limitby[1])
5522 else:
5523 limitby_skip = limitby_limit = 0
5524
5525 mongofields_dict = self.SON()
5526 mongoqry_dict = {}
5527 for item in fields:
5528 if isinstance(item, SQLALL):
5529 new_fields += item._table
5530 else:
5531 new_fields.append(item)
5532 fields = new_fields
5533 if isinstance(query,Query):
5534 tablename = self.get_table(query)
5535 elif len(fields) != 0:
5536 tablename = fields[0].tablename
5537 else:
5538 raise SyntaxError("The table name could not be found in " +
5539 "the query nor from the select statement.")
5540 mongoqry_dict = self.expand(query)
5541 fields = fields or self.db[tablename]
5542 for field in fields:
5543 mongofields_dict[field.name] = 1
5544
5545 return tablename, mongoqry_dict, mongofields_dict, mongosort_list, \
5546 limitby_limit, limitby_skip
5547
5548 - def select(self, query, fields, attributes, count=False,
5549 snapshot=False):
5550
5551 tablename, mongoqry_dict, mongofields_dict, mongosort_list, \
5552 limitby_limit, limitby_skip = self._select(query, fields, attributes)
5553 ctable = self.connection[tablename]
5554
5555 if count:
5556 return {'count' : ctable.find(
5557 mongoqry_dict, mongofields_dict,
5558 skip=limitby_skip, limit=limitby_limit,
5559 sort=mongosort_list, snapshot=snapshot).count()}
5560 else:
5561
5562 mongo_list_dicts = ctable.find(mongoqry_dict,
5563 mongofields_dict, skip=limitby_skip,
5564 limit=limitby_limit, sort=mongosort_list,
5565 snapshot=snapshot)
5566 rows = []
5567
5568
5569 colnames = []
5570 newnames = []
5571 for field in fields:
5572 colname = str(field)
5573 colnames.append(colname)
5574 tablename, fieldname = colname.split(".")
5575 if fieldname == "_id":
5576
5577 field.name = "id"
5578 newnames.append(".".join((tablename, field.name)))
5579
5580 for record in mongo_list_dicts:
5581 row=[]
5582 for colname in colnames:
5583 tablename, fieldname = colname.split(".")
5584
5585
5586 if fieldname == "id": fieldname = "_id"
5587 if fieldname in record:
5588 value = record[fieldname]
5589 else:
5590 value = None
5591 row.append(value)
5592 rows.append(row)
5593
5594 processor = attributes.get('processor', self.parse)
5595 result = processor(rows, fields, newnames, False)
5596 return result
5597
5598 - def _insert(self, table, fields):
5606
5607
5608
5609
5610 - def insert(self, table, fields, safe=None):
5617
5618
5619 - def _update(self, tablename, query, fields):
5620 if not isinstance(query, Query):
5621 raise SyntaxError("Not Supported")
5622 filter = None
5623 if query:
5624 filter = self.expand(query)
5625
5626 modify = {'$set': dict((k.name, self.represent(v, k.type)) for
5627 k, v in fields if (not k.name in ("_id", "id")))}
5628 return modify, filter
5629
5630 - def update(self, tablename, query, fields, safe=None):
5631 if safe == None:
5632 safe = self.safe
5633
5634
5635 if not isinstance(query, Query):
5636 raise RuntimeError("Not implemented")
5637 amount = self.count(query, False)
5638 modify, filter = self._update(tablename, query, fields)
5639 try:
5640 result = self.connection[tablename].update(filter,
5641 modify, multi=True, safe=safe)
5642 if safe:
5643 try:
5644
5645 return result["n"]
5646 except (KeyError, AttributeError, TypeError):
5647 return amount
5648 else:
5649 return amount
5650 except Exception, e:
5651
5652 raise RuntimeError("uncaught exception when updating rows: %s" % e)
5653
5654 - def _delete(self, tablename, query):
5655 if not isinstance(query, Query):
5656 raise RuntimeError("query type %s is not supported" % \
5657 type(query))
5658 return self.expand(query)
5659
5660 - def delete(self, tablename, query, safe=None):
5661 if safe is None:
5662 safe = self.safe
5663 amount = 0
5664 amount = self.count(query, False)
5665 filter = self._delete(tablename, query)
5666 self.connection[tablename].remove(filter, safe=safe)
5667 return amount
5668
5670 return [self.insert(table,item) for item in items]
5671
5672
5676
5677
5678 - def NOT(self, first):
5679 result = {}
5680 result["$not"] = self.expand(first)
5681 return result
5682
5683 - def AND(self,first,second):
5688
5689 - def OR(self,first,second):
5690
5691 result = {}
5692 f = self.expand(first)
5693 s = self.expand(second)
5694 result['$or'] = [f,s]
5695 return result
5696
5697 - def BELONGS(self, first, second):
5698 if isinstance(second, str):
5699 return {self.expand(first) : {"$in" : [ second[:-1]]} }
5700 elif second==[] or second==() or second==set():
5701 return {1:0}
5702 items = [self.expand(item, first.type) for item in second]
5703 return {self.expand(first) : {"$in" : items} }
5704
5705 - def EQ(self,first,second=None):
5706 result = {}
5707 result[self.expand(first)] = self.expand(second)
5708 return result
5709
5710 - def NE(self, first, second=None):
5711 result = {}
5712 result[self.expand(first)] = {'$ne': self.expand(second)}
5713 return result
5714
5715 - def LT(self,first,second=None):
5716 if second is None:
5717 raise RuntimeError("Cannot compare %s < None" % first)
5718 result = {}
5719 result[self.expand(first)] = {'$lt': self.expand(second)}
5720 return result
5721
5722 - def LE(self,first,second=None):
5723 if second is None:
5724 raise RuntimeError("Cannot compare %s <= None" % first)
5725 result = {}
5726 result[self.expand(first)] = {'$lte': self.expand(second)}
5727 return result
5728
5729 - def GT(self,first,second):
5730 result = {}
5731 result[self.expand(first)] = {'$gt': self.expand(second)}
5732 return result
5733
5734 - def GE(self,first,second=None):
5735 if second is None:
5736 raise RuntimeError("Cannot compare %s >= None" % first)
5737 result = {}
5738 result[self.expand(first)] = {'$gte': self.expand(second)}
5739 return result
5740
5741 - def ADD(self, first, second):
5745
5746 - def SUB(self, first, second):
5750
5751 - def MUL(self, first, second):
5755
5756 - def DIV(self, first, second):
5760
5761 - def MOD(self, first, second):
5765
5766 - def AS(self, first, second):
5767 raise NotImplementedError(self.error_messages["javascript_needed"])
5768 return '%s AS %s' % (self.expand(first), second)
5769
5770
5771
5772
5773 - def ON(self, first, second):
5774 raise NotImplementedError("This is not possible in NoSQL" +
5775 " but can be simulated with a wrapper.")
5776 return '%s ON %s' % (self.expand(first), self.expand(second))
5777
5778
5779
5780
5781 - def COMMA(self, first, second):
5783
5784 - def LIKE(self, first, second):
5788
5790
5791 return {self.expand(first): ('/^%s/' % \
5792 self.expand(second, 'string'))}
5793
5795
5796 return {self.expand(first): ('/%s^/' % \
5797 self.expand(second, 'string'))}
5798
5799 - def CONTAINS(self, first, second, case_sensitive=False):
5800
5801
5802
5803 val = second if isinstance(second,self.ObjectId) else \
5804 {'$regex':".*" + re.escape(self.expand(second, 'string')) + ".*"}
5805 return {self.expand(first) : val}
5806
5807 - def LIKE(self, first, second):
5812
5813
5815
5816 import re
5817 return {self.expand(first): {'$regex' : '^' +
5818 re.escape(self.expand(second,
5819 'string'))}}
5820
5821
5823
5824
5825
5826
5827 import re
5828 return {self.expand(first): {'$regex': \
5829 re.escape(self.expand(second, 'string')) + '$'}}
5830
5831
5832 - def CONTAINS(self, first, second, case_sensitive=False):
5833
5834
5835
5836
5837 return {self.expand(first) : {'$regex': \
5838 ".*" + re.escape(self.expand(second, 'string')) + ".*"}}
5839
5842 drivers = ('imaplib',)
5843
5844 """ IMAP server adapter
5845
5846 This class is intended as an interface with
5847 email IMAP servers to perform simple queries in the
5848 web2py DAL query syntax, so email read, search and
5849 other related IMAP mail services (as those implemented
5850 by brands like Google(r), and Yahoo!(r)
5851 can be managed from web2py applications.
5852
5853 The code uses examples by Yuji Tomita on this post:
5854 http://yuji.wordpress.com/2011/06/22/python-imaplib-imap-example-with-gmail/#comment-1137
5855 and is based in docs for Python imaplib, python email
5856 and email IETF's (i.e. RFC2060 and RFC3501)
5857
5858 This adapter was tested with a small set of operations with Gmail(r). Other
5859 services requests could raise command syntax and response data issues.
5860
5861 It creates its table and field names "statically",
5862 meaning that the developer should leave the table and field
5863 definitions to the DAL instance by calling the adapter's
5864 .define_tables() method. The tables are defined with the
5865 IMAP server mailbox list information.
5866
5867 .define_tables() returns a dictionary mapping dal tablenames
5868 to the server mailbox names with the following structure:
5869
5870 {<tablename>: str <server mailbox name>}
5871
5872 Here is a list of supported fields:
5873
5874 Field Type Description
5875 ################################################################
5876 uid string
5877 answered boolean Flag
5878 created date
5879 content list:string A list of dict text or html parts
5880 to string
5881 cc string
5882 bcc string
5883 size integer the amount of octets of the message*
5884 deleted boolean Flag
5885 draft boolean Flag
5886 flagged boolean Flag
5887 sender string
5888 recent boolean Flag
5889 seen boolean Flag
5890 subject string
5891 mime string The mime header declaration
5892 email string The complete RFC822 message**
5893 attachments <type list> Each non text part as dict
5894 encoding string The main detected encoding
5895
5896 *At the application side it is measured as the length of the RFC822
5897 message string
5898
5899 WARNING: As row id's are mapped to email sequence numbers,
5900 make sure your imap client web2py app does not delete messages
5901 during select or update actions, to prevent
5902 updating or deleting different messages.
5903 Sequence numbers change whenever the mailbox is updated.
5904 To avoid this sequence numbers issues, it is recommended the use
5905 of uid fields in query references (although the update and delete
5906 in separate actions rule still applies).
5907
5908 # This is the code recommended to start imap support
5909 # at the app's model:
5910
5911 imapdb = DAL("imap://user:password@server:port", pool_size=1) # port 993 for ssl
5912 imapdb.define_tables()
5913
5914 Here is an (incomplete) list of possible imap commands:
5915
5916 # Count today's unseen messages
5917 # smaller than 6000 octets from the
5918 # inbox mailbox
5919
5920 q = imapdb.INBOX.seen == False
5921 q &= imapdb.INBOX.created == datetime.date.today()
5922 q &= imapdb.INBOX.size < 6000
5923 unread = imapdb(q).count()
5924
5925 # Fetch last query messages
5926 rows = imapdb(q).select()
5927
5928 # it is also possible to filter query select results with limitby and
5929 # sequences of mailbox fields
5930
5931 set.select(<fields sequence>, limitby=(<int>, <int>))
5932
5933 # Mark last query messages as seen
5934 messages = [row.uid for row in rows]
5935 seen = imapdb(imapdb.INBOX.uid.belongs(messages)).update(seen=True)
5936
5937 # Delete messages in the imap database that have mails from mr. Gumby
5938
5939 deleted = 0
5940 for mailbox in imapdb.tables
5941 deleted += imapdb(imapdb[mailbox].sender.contains("gumby")).delete()
5942
5943 # It is possible also to mark messages for deletion instead of ereasing them
5944 # directly with set.update(deleted=True)
5945
5946
5947 # This object give access
5948 # to the adapter auto mailbox
5949 # mapped names (which native
5950 # mailbox has what table name)
5951
5952 imapdb.mailboxes <dict> # tablename, server native name pairs
5953
5954 # To retrieve a table native mailbox name use:
5955 imapdb.<table>.mailbox
5956
5957 ### New features v2.4.1:
5958
5959 # Declare mailboxes statically with tablename, name pairs
5960 # This avoids the extra server names retrieval
5961
5962 imapdb.define_tables({"inbox": "INBOX"})
5963
5964 # Selects without content/attachments/email columns will only
5965 # fetch header and flags
5966
5967 imapdb(q).select(imapdb.INBOX.sender, imapdb.INBOX.subject)
5968 """
5969
5970 types = {
5971 'string': str,
5972 'text': str,
5973 'date': datetime.date,
5974 'datetime': datetime.datetime,
5975 'id': long,
5976 'boolean': bool,
5977 'integer': int,
5978 'bigint': long,
5979 'blob': str,
5980 'list:string': str,
5981 }
5982
5983 dbengine = 'imap'
5984
5985 REGEX_URI = re.compile('^(?P<user>[^:]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?$')
5986
5987 - def __init__(self,
5988 db,
5989 uri,
5990 pool_size=0,
5991 folder=None,
5992 db_codec ='UTF-8',
5993 credential_decoder=IDENTITY,
5994 driver_args={},
5995 adapter_args={},
5996 do_connect=True,
5997 after_connection=None):
5998
5999
6000
6001
6002 self.db = db
6003 self.uri = uri
6004 if do_connect: self.find_driver(adapter_args)
6005 self.pool_size=pool_size
6006 self.folder = folder
6007 self.db_codec = db_codec
6008 self._after_connection = after_connection
6009 self.credential_decoder = credential_decoder
6010 self.driver_args = driver_args
6011 self.adapter_args = adapter_args
6012 self.mailbox_size = None
6013 self.static_names = None
6014 self.charset = sys.getfilesystemencoding()
6015
6016 self.imap4 = None
6017 uri = uri.split("://")[1]
6018
6019 """ MESSAGE is an identifier for sequence number"""
6020
6021 self.flags = {'deleted': '\\Deleted', 'draft': '\\Draft',
6022 'flagged': '\\Flagged', 'recent': '\\Recent',
6023 'seen': '\\Seen', 'answered': '\\Answered'}
6024 self.search_fields = {
6025 'id': 'MESSAGE', 'created': 'DATE',
6026 'uid': 'UID', 'sender': 'FROM',
6027 'to': 'TO', 'cc': 'CC',
6028 'bcc': 'BCC', 'content': 'TEXT',
6029 'size': 'SIZE', 'deleted': '\\Deleted',
6030 'draft': '\\Draft', 'flagged': '\\Flagged',
6031 'recent': '\\Recent', 'seen': '\\Seen',
6032 'subject': 'SUBJECT', 'answered': '\\Answered',
6033 'mime': None, 'email': None,
6034 'attachments': None
6035 }
6036
6037 db['_lastsql'] = ''
6038
6039 m = self.REGEX_URI.match(uri)
6040 user = m.group('user')
6041 password = m.group('password')
6042 host = m.group('host')
6043 port = int(m.group('port'))
6044 over_ssl = False
6045 if port==993:
6046 over_ssl = True
6047
6048 driver_args.update(host=host,port=port, password=password, user=user)
6049 def connector(driver_args=driver_args):
6050
6051
6052 if over_ssl:
6053 self.imap4 = self.driver.IMAP4_SSL
6054 else:
6055 self.imap4 = self.driver.IMAP4
6056 connection = self.imap4(driver_args["host"], driver_args["port"])
6057 data = connection.login(driver_args["user"], driver_args["password"])
6058
6059
6060 connection.mailbox_names = None
6061
6062
6063 connection.cursor = lambda : True
6064
6065 return connection
6066
6067 self.db.define_tables = self.define_tables
6068 self.connector = connector
6069 if do_connect: self.reconnect()
6070
6115
6117 last_message = None
6118
6119
6120 if not isinstance(self.connection.mailbox_names, dict):
6121 self.get_mailboxes()
6122 try:
6123 result = self.connection.select(self.connection.mailbox_names[tablename])
6124 last_message = int(result[1][0])
6125 except (IndexError, ValueError, TypeError, KeyError):
6126 e = sys.exc_info()[1]
6127 LOGGER.debug("Error retrieving the last mailbox sequence number. %s" % str(e))
6128 return last_message
6129
6131 if not isinstance(self.connection.mailbox_names, dict):
6132 self.get_mailboxes()
6133
6134
6135 last_message = self.get_last_message(tablename)
6136 result, data = self.connection.uid("search", None, "(ALL)")
6137 uid_list = data[0].strip().split()
6138 if len(uid_list) <= 0:
6139 return None
6140 else:
6141 return (uid_list[0], uid_list[-1])
6142
6144 if add is None:
6145 add = datetime.timedelta()
6146 """ Convert a date object to a string
6147 with d-Mon-Y style for IMAP or the inverse
6148 case
6149
6150 add <timedelta> adds to the date object
6151 """
6152 months = [None, "JAN","FEB","MAR","APR","MAY","JUN",
6153 "JUL", "AUG","SEP","OCT","NOV","DEC"]
6154 if isinstance(date, basestring):
6155
6156 try:
6157 if "," in date:
6158 dayname, datestring = date.split(",")
6159 else:
6160 dayname, datestring = None, date
6161 date_list = datestring.strip().split()
6162 year = int(date_list[2])
6163 month = months.index(date_list[1].upper())
6164 day = int(date_list[0])
6165 hms = map(int, date_list[3].split(":"))
6166 return datetime.datetime(year, month, day,
6167 hms[0], hms[1], hms[2]) + add
6168 except (ValueError, AttributeError, IndexError), e:
6169 LOGGER.error("Could not parse date text: %s. %s" %
6170 (date, e))
6171 return None
6172 elif isinstance(date, (datetime.date, datetime.datetime)):
6173 if imf: date_format = "%a, %d %b %Y %H:%M:%S %z"
6174 else: date_format = "%d-%b-%Y"
6175 return (date + add).strftime(date_format)
6176 else:
6177 return None
6178
6179 @staticmethod
6181 from email.header import decode_header
6182 text, encoding = decode_header(f)[0]
6183 if encoding:
6184 text = text.decode(encoding).encode('utf-8')
6185 return text
6186
6187 - def encode_text(self, text, charset, errors="replace"):
6188 """ convert text for mail to unicode"""
6189 if text is None:
6190 text = ""
6191 else:
6192 if isinstance(text, str):
6193 if charset is None:
6194 text = unicode(text, "utf-8", errors)
6195 else:
6196 text = unicode(text, charset, errors)
6197 else:
6198 raise Exception("Unsupported mail text type %s" % type(text))
6199 return text.encode("utf-8")
6200
6202 charset = message.get_content_charset()
6203 return charset
6204
6206 """ Query the mail database for mailbox names """
6207 if self.static_names:
6208
6209 self.connection.mailbox_names = self.static_names
6210 return self.static_names.keys()
6211
6212 mailboxes_list = self.connection.list()
6213 self.connection.mailbox_names = dict()
6214 mailboxes = list()
6215 x = 0
6216 for item in mailboxes_list[1]:
6217 x = x + 1
6218 item = item.strip()
6219 if not "NOSELECT" in item.upper():
6220 sub_items = item.split("\"")
6221 sub_items = [sub_item for sub_item in sub_items \
6222 if len(sub_item.strip()) > 0]
6223
6224 mailbox = sub_items[-1]
6225
6226
6227 mailbox_name = re.sub('^[_0-9]*', '', re.sub('[^_\w]','',re.sub('[/ ]','_',mailbox)))
6228 mailboxes.append(mailbox_name)
6229 self.connection.mailbox_names[mailbox_name] = mailbox
6230
6231 return mailboxes
6232
6234 nofield = True
6235 tablename = None
6236 attr = query
6237 while nofield:
6238 if hasattr(attr, "first"):
6239 attr = attr.first
6240 if isinstance(attr, Field):
6241 return attr.tablename
6242 elif isinstance(attr, Query):
6243 pass
6244 else:
6245 return None
6246 else:
6247 return None
6248 return tablename
6249
6251 if self.search_fields.get(flag, None) in self.flags.values():
6252 return True
6253 else:
6254 return False
6255
6257 """
6258 Auto create common IMAP fileds
6259
6260 This function creates fields definitions "statically"
6261 meaning that custom fields as in other adapters should
6262 not be supported and definitions handled on a service/mode
6263 basis (local syntax for Gmail(r), Ymail(r)
6264
6265 Returns a dictionary with tablename, server native mailbox name
6266 pairs.
6267 """
6268 if mailbox_names:
6269
6270 self.static_names = mailbox_names
6271 else:
6272 self.static_names = None
6273 if not isinstance(self.connection.mailbox_names, dict):
6274 self.get_mailboxes()
6275
6276 names = self.connection.mailbox_names.keys()
6277
6278 for name in names:
6279 self.db.define_table("%s" % name,
6280 Field("uid", "string", writable=False),
6281 Field("answered", "boolean"),
6282 Field("created", "datetime", writable=False),
6283 Field("content", list, writable=False),
6284 Field("to", "string", writable=False),
6285 Field("cc", "string", writable=False),
6286 Field("bcc", "string", writable=False),
6287 Field("size", "integer", writable=False),
6288 Field("deleted", "boolean"),
6289 Field("draft", "boolean"),
6290 Field("flagged", "boolean"),
6291 Field("sender", "string", writable=False),
6292 Field("recent", "boolean", writable=False),
6293 Field("seen", "boolean"),
6294 Field("subject", "string", writable=False),
6295 Field("mime", "string", writable=False),
6296 Field("email", "string", writable=False, readable=False),
6297 Field("attachments", list, writable=False, readable=False),
6298 Field("encoding", writable=False)
6299 )
6300
6301
6302
6303 self.db[name].mailbox = \
6304 self.connection.mailbox_names[name]
6305
6306
6307 self.db[name].to.represent = self.db[name].cc.represent = \
6308 self.db[name].bcc.represent = self.db[name].sender.represent = \
6309 self.db[name].subject.represent = self.header_represent
6310
6311
6312 self.db.mailboxes = self.connection.mailbox_names
6313 return self.db.mailboxes
6314
6319
6320 - def _select(self, query, fields, attributes):
6324
6325 - def select(self, query, fields, attributes):
6326 """ Search and Fetch records and return web2py rows
6327 """
6328
6329 if use_common_filters(query):
6330 query = self.common_filter(query, [self.get_query_mailbox(query),])
6331
6332 import email
6333
6334
6335 tablename = None
6336 fetch_results = list()
6337
6338 if isinstance(query, Query):
6339 tablename = self.get_table(query)
6340 mailbox = self.connection.mailbox_names.get(tablename, None)
6341 if mailbox is None:
6342 raise ValueError("Mailbox name not found: %s" % mailbox)
6343 else:
6344
6345 result, selected = self.connection.select(mailbox, True)
6346 if result != "OK":
6347 raise Exception("IMAP error: %s" % selected)
6348 self.mailbox_size = int(selected[0])
6349 search_query = "(%s)" % str(query).strip()
6350 search_result = self.connection.uid("search", None, search_query)
6351
6352 if search_result[0] == "OK":
6353
6354
6355
6356
6357 limitby = attributes.get('limitby', None)
6358 messages_set = search_result[1][0].split()
6359
6360 messages_set.reverse()
6361 if limitby is not None:
6362
6363 messages_set = messages_set[int(limitby[0]):int(limitby[1])]
6364
6365
6366 if any([(field.name in ["content", "size",
6367 "attachments", "email"]) for
6368 field in fields]):
6369 imap_fields = "(RFC822 FLAGS)"
6370 else:
6371 imap_fields = "(RFC822.HEADER FLAGS)"
6372
6373 if len(messages_set) > 0:
6374
6375
6376
6377
6378 for uid in messages_set:
6379
6380 typ, data = self.connection.uid("fetch", uid, imap_fields)
6381 if typ == "OK":
6382 fr = {"message": int(data[0][0].split()[0]),
6383 "uid": long(uid),
6384 "email": email.message_from_string(data[0][1]),
6385 "raw_message": data[0][1]}
6386 fr["multipart"] = fr["email"].is_multipart()
6387
6388 fr["flags"] = self.driver.ParseFlags(data[1])
6389 fetch_results.append(fr)
6390 else:
6391
6392 raise Exception("IMAP error retrieving the body: %s" % data)
6393 else:
6394 raise Exception("IMAP search error: %s" % search_result[1])
6395 elif isinstance(query, (Expression, basestring)):
6396 raise NotImplementedError()
6397 else:
6398 raise TypeError("Unexpected query type")
6399
6400 imapqry_dict = {}
6401 imapfields_dict = {}
6402
6403 if len(fields) == 1 and isinstance(fields[0], SQLALL):
6404 allfields = True
6405 elif len(fields) == 0:
6406 allfields = True
6407 else:
6408 allfields = False
6409 if allfields:
6410 colnames = ["%s.%s" % (tablename, field) for field in self.search_fields.keys()]
6411 else:
6412 colnames = ["%s.%s" % (tablename, field.name) for field in fields]
6413
6414 for k in colnames:
6415 imapfields_dict[k] = k
6416
6417 imapqry_list = list()
6418 imapqry_array = list()
6419 for fr in fetch_results:
6420 attachments = []
6421 content = []
6422 size = 0
6423 n = int(fr["message"])
6424 item_dict = dict()
6425 message = fr["email"]
6426 uid = fr["uid"]
6427 charset = self.get_charset(message)
6428 flags = fr["flags"]
6429 raw_message = fr["raw_message"]
6430
6431
6432
6433
6434
6435
6436
6437
6438
6439
6440 if "%s.id" % tablename in colnames:
6441 item_dict["%s.id" % tablename] = n
6442 if "%s.created" % tablename in colnames:
6443 item_dict["%s.created" % tablename] = self.convert_date(message["Date"])
6444 if "%s.uid" % tablename in colnames:
6445 item_dict["%s.uid" % tablename] = uid
6446 if "%s.sender" % tablename in colnames:
6447
6448
6449
6450 item_dict["%s.sender" % tablename] = message["From"]
6451 if "%s.to" % tablename in colnames:
6452 item_dict["%s.to" % tablename] = message["To"]
6453 if "%s.cc" % tablename in colnames:
6454 if "Cc" in message.keys():
6455 item_dict["%s.cc" % tablename] = message["Cc"]
6456 else:
6457 item_dict["%s.cc" % tablename] = ""
6458 if "%s.bcc" % tablename in colnames:
6459 if "Bcc" in message.keys():
6460 item_dict["%s.bcc" % tablename] = message["Bcc"]
6461 else:
6462 item_dict["%s.bcc" % tablename] = ""
6463 if "%s.deleted" % tablename in colnames:
6464 item_dict["%s.deleted" % tablename] = "\\Deleted" in flags
6465 if "%s.draft" % tablename in colnames:
6466 item_dict["%s.draft" % tablename] = "\\Draft" in flags
6467 if "%s.flagged" % tablename in colnames:
6468 item_dict["%s.flagged" % tablename] = "\\Flagged" in flags
6469 if "%s.recent" % tablename in colnames:
6470 item_dict["%s.recent" % tablename] = "\\Recent" in flags
6471 if "%s.seen" % tablename in colnames:
6472 item_dict["%s.seen" % tablename] = "\\Seen" in flags
6473 if "%s.subject" % tablename in colnames:
6474 item_dict["%s.subject" % tablename] = message["Subject"]
6475 if "%s.answered" % tablename in colnames:
6476 item_dict["%s.answered" % tablename] = "\\Answered" in flags
6477 if "%s.mime" % tablename in colnames:
6478 item_dict["%s.mime" % tablename] = message.get_content_type()
6479 if "%s.encoding" % tablename in colnames:
6480 item_dict["%s.encoding" % tablename] = charset
6481
6482
6483
6484
6485
6486
6487 if "%s.email" % tablename in colnames:
6488
6489 item_dict["%s.email" % tablename] = raw_message
6490
6491
6492
6493
6494
6495
6496 for part in message.walk():
6497 maintype = part.get_content_maintype()
6498 if ("%s.attachments" % tablename in colnames) or \
6499 ("%s.content" % tablename in colnames):
6500 payload = part.get_payload(decode=True)
6501 if payload:
6502 filename = part.get_filename()
6503 values = {"mime": part.get_content_type()}
6504 if ((filename or not "text" in maintype) and
6505 ("%s.attachments" % tablename in colnames)):
6506 values.update({"payload": payload,
6507 "filename": filename,
6508 "encoding": part.get_content_charset(),
6509 "disposition": part["Content-Disposition"]})
6510 attachments.append(values)
6511 elif (("text" in maintype) and
6512 ("%s.content" % tablename in colnames)):
6513 values.update({"text": self.encode_text(payload,
6514 self.get_charset(part))})
6515 content.append(values)
6516
6517 if "%s.size" % tablename in colnames:
6518 if part is not None:
6519 size += len(str(part))
6520 item_dict["%s.content" % tablename] = content
6521 item_dict["%s.attachments" % tablename] = attachments
6522 item_dict["%s.size" % tablename] = size
6523 imapqry_list.append(item_dict)
6524
6525
6526
6527 for item_dict in imapqry_list:
6528 imapqry_array_item = list()
6529 for fieldname in colnames:
6530 imapqry_array_item.append(item_dict[fieldname])
6531 imapqry_array.append(imapqry_array_item)
6532
6533
6534 colnames = colnames
6535 processor = attributes.get('processor',self.parse)
6536 return processor(imapqry_array, fields, colnames)
6537
6538 - def _insert(self, table, fields):
6539 def add_payload(message, obj):
6540 payload = Message()
6541 encoding = obj.get("encoding", "utf-8")
6542 if encoding and (encoding.upper() in
6543 ("BASE64", "7BIT", "8BIT", "BINARY")):
6544 payload.add_header("Content-Transfer-Encoding", encoding)
6545 else:
6546 payload.set_charset(encoding)
6547 mime = obj.get("mime", None)
6548 if mime:
6549 payload.set_type(mime)
6550 if "text" in obj:
6551 payload.set_payload(obj["text"])
6552 elif "payload" in obj:
6553 payload.set_payload(obj["payload"])
6554 if "filename" in obj and obj["filename"]:
6555 payload.add_header("Content-Disposition",
6556 "attachment", filename=obj["filename"])
6557 message.attach(payload)
6558
6559 mailbox = table.mailbox
6560 d = dict(((k.name, v) for k, v in fields))
6561 date_time = d.get("created", datetime.datetime.now())
6562 struct_time = date_time.timetuple()
6563 if len(d) > 0:
6564 message = d.get("email", None)
6565 attachments = d.get("attachments", [])
6566 content = d.get("content", [])
6567 flags = " ".join(["\\%s" % flag.capitalize() for flag in
6568 ("answered", "deleted", "draft", "flagged",
6569 "recent", "seen") if d.get(flag, False)])
6570 if not message:
6571 from email.message import Message
6572 mime = d.get("mime", None)
6573 charset = d.get("encoding", None)
6574 message = Message()
6575 message["from"] = d.get("sender", "")
6576 message["subject"] = d.get("subject", "")
6577 message["date"] = self.convert_date(date_time, imf=True)
6578
6579 if mime:
6580 message.set_type(mime)
6581 if charset:
6582 message.set_charset(charset)
6583 for item in ("to", "cc", "bcc"):
6584 value = d.get(item, "")
6585 if isinstance(value, basestring):
6586 message[item] = value
6587 else:
6588 message[item] = ";".join([i for i in
6589 value])
6590 if (not message.is_multipart() and
6591 (not message.get_content_type().startswith(
6592 "multipart"))):
6593 if isinstance(content, basestring):
6594 message.set_payload(content)
6595 elif len(content) > 0:
6596 message.set_payload(content[0]["text"])
6597 else:
6598 [add_payload(message, c) for c in content]
6599 [add_payload(message, a) for a in attachments]
6600 message = message.as_string()
6601 return (mailbox, flags, struct_time, message)
6602 else:
6603 raise NotImplementedError("IMAP empty insert is not implemented")
6604
6605 - def insert(self, table, fields):
6606 values = self._insert(table, fields)
6607 result, data = self.connection.append(*values)
6608 if result == "OK":
6609 uid = int(re.findall("\d+", str(data))[-1])
6610 return self.db(table.uid==uid).select(table.id).first().id
6611 else:
6612 raise Exception("IMAP message append failed: %s" % data)
6613
6614 - def _update(self, tablename, query, fields, commit=False):
6615
6616 commands = list()
6617 if use_common_filters(query):
6618 query = self.common_filter(query, [tablename,])
6619 mark = []
6620 unmark = []
6621 if query:
6622 for item in fields:
6623 field = item[0]
6624 name = field.name
6625 value = item[1]
6626 if self.is_flag(name):
6627 flag = self.search_fields[name]
6628 if (value is not None) and (flag != "\\Recent"):
6629 if value:
6630 mark.append(flag)
6631 else:
6632 unmark.append(flag)
6633 result, data = self.connection.select(
6634 self.connection.mailbox_names[tablename])
6635 string_query = "(%s)" % query
6636 result, data = self.connection.search(None, string_query)
6637 store_list = [item.strip() for item in data[0].split()
6638 if item.strip().isdigit()]
6639
6640 for number in store_list:
6641 result = None
6642 if len(mark) > 0:
6643 commands.append((number, "+FLAGS", "(%s)" % " ".join(mark)))
6644 if len(unmark) > 0:
6645 commands.append((number, "-FLAGS", "(%s)" % " ".join(unmark)))
6646 return commands
6647
6648 - def update(self, tablename, query, fields):
6649 rowcount = 0
6650 commands = self._update(tablename, query, fields)
6651 for command in commands:
6652 result, data = self.connection.store(*command)
6653 if result == "OK":
6654 rowcount += 1
6655 else:
6656 raise Exception("IMAP storing error: %s" % data)
6657 return rowcount
6658
6659 - def _count(self, query, distinct=None):
6660 raise NotImplementedError()
6661
6662 - def count(self,query,distinct=None):
6674
6675 - def delete(self, tablename, query):
6676 counter = 0
6677 if query:
6678 if use_common_filters(query):
6679 query = self.common_filter(query, [tablename,])
6680 result, data = self.connection.select(self.connection.mailbox_names[tablename])
6681 string_query = "(%s)" % query
6682 result, data = self.connection.search(None, string_query)
6683 store_list = [item.strip() for item in data[0].split() if item.strip().isdigit()]
6684 for number in store_list:
6685 result, data = self.connection.store(number, "+FLAGS", "(\\Deleted)")
6686 if result == "OK":
6687 counter += 1
6688 else:
6689 raise Exception("IMAP store error: %s" % data)
6690 if counter > 0:
6691 result, data = self.connection.expunge()
6692 return counter
6693
6694 - def BELONGS(self, first, second):
6695 result = None
6696 name = self.search_fields[first.name]
6697 if name == "MESSAGE":
6698 values = [str(val) for val in second if str(val).isdigit()]
6699 result = "%s" % ",".join(values).strip()
6700
6701 elif name == "UID":
6702 values = [str(val) for val in second if str(val).isdigit()]
6703 result = "UID %s" % ",".join(values).strip()
6704
6705 else:
6706 raise Exception("Operation not supported")
6707
6708 return result
6709
6710 - def CONTAINS(self, first, second, case_sensitive=False):
6711
6712 result = None
6713 name = self.search_fields[first.name]
6714
6715 if name in ("FROM", "TO", "SUBJECT", "TEXT"):
6716 result = "%s \"%s\"" % (name, self.expand(second))
6717 else:
6718 if first.name in ("cc", "bcc"):
6719 result = "%s \"%s\"" % (first.name.upper(), self.expand(second))
6720 elif first.name == "mime":
6721 result = "HEADER Content-Type \"%s\"" % self.expand(second)
6722 else:
6723 raise Exception("Operation not supported")
6724 return result
6725
6726 - def GT(self, first, second):
6727 result = None
6728 name = self.search_fields[first.name]
6729 if name == "MESSAGE":
6730 last_message = self.get_last_message(first.tablename)
6731 result = "%d:%d" % (int(self.expand(second)) + 1, last_message)
6732 elif name == "UID":
6733
6734
6735
6736 try:
6737 pedestal, threshold = self.get_uid_bounds(first.tablename)
6738 except TypeError:
6739 e = sys.exc_info()[1]
6740 LOGGER.debug("Error requesting uid bounds: %s", str(e))
6741 return ""
6742 try:
6743 lower_limit = int(self.expand(second)) + 1
6744 except (ValueError, TypeError):
6745 e = sys.exc_info()[1]
6746 raise Exception("Operation not supported (non integer UID)")
6747 result = "UID %s:%s" % (lower_limit, threshold)
6748 elif name == "DATE":
6749 result = "SINCE %s" % self.convert_date(second, add=datetime.timedelta(1))
6750 elif name == "SIZE":
6751 result = "LARGER %s" % self.expand(second)
6752 else:
6753 raise Exception("Operation not supported")
6754 return result
6755
6756 - def GE(self, first, second):
6757 result = None
6758 name = self.search_fields[first.name]
6759 if name == "MESSAGE":
6760 last_message = self.get_last_message(first.tablename)
6761 result = "%s:%s" % (self.expand(second), last_message)
6762 elif name == "UID":
6763
6764
6765
6766 try:
6767 pedestal, threshold = self.get_uid_bounds(first.tablename)
6768 except TypeError:
6769 e = sys.exc_info()[1]
6770 LOGGER.debug("Error requesting uid bounds: %s", str(e))
6771 return ""
6772 lower_limit = self.expand(second)
6773 result = "UID %s:%s" % (lower_limit, threshold)
6774 elif name == "DATE":
6775 result = "SINCE %s" % self.convert_date(second)
6776 else:
6777 raise Exception("Operation not supported")
6778 return result
6779
6780 - def LT(self, first, second):
6781 result = None
6782 name = self.search_fields[first.name]
6783 if name == "MESSAGE":
6784 result = "%s:%s" % (1, int(self.expand(second)) - 1)
6785 elif name == "UID":
6786 try:
6787 pedestal, threshold = self.get_uid_bounds(first.tablename)
6788 except TypeError:
6789 e = sys.exc_info()[1]
6790 LOGGER.debug("Error requesting uid bounds: %s", str(e))
6791 return ""
6792 try:
6793 upper_limit = int(self.expand(second)) - 1
6794 except (ValueError, TypeError):
6795 e = sys.exc_info()[1]
6796 raise Exception("Operation not supported (non integer UID)")
6797 result = "UID %s:%s" % (pedestal, upper_limit)
6798 elif name == "DATE":
6799 result = "BEFORE %s" % self.convert_date(second)
6800 elif name == "SIZE":
6801 result = "SMALLER %s" % self.expand(second)
6802 else:
6803 raise Exception("Operation not supported")
6804 return result
6805
6806 - def LE(self, first, second):
6807 result = None
6808 name = self.search_fields[first.name]
6809 if name == "MESSAGE":
6810 result = "%s:%s" % (1, self.expand(second))
6811 elif name == "UID":
6812 try:
6813 pedestal, threshold = self.get_uid_bounds(first.tablename)
6814 except TypeError:
6815 e = sys.exc_info()[1]
6816 LOGGER.debug("Error requesting uid bounds: %s", str(e))
6817 return ""
6818 upper_limit = int(self.expand(second))
6819 result = "UID %s:%s" % (pedestal, upper_limit)
6820 elif name == "DATE":
6821 result = "BEFORE %s" % self.convert_date(second, add=datetime.timedelta(1))
6822 else:
6823 raise Exception("Operation not supported")
6824 return result
6825
6826 - def NE(self, first, second=None):
6827 if (second is None) and isinstance(first, Field):
6828
6829 if first.type == "id":
6830 return self.GE(first, 1)
6831 result = self.NOT(self.EQ(first, second))
6832 result = result.replace("NOT NOT", "").strip()
6833 return result
6834
6835 - def EQ(self,first,second):
6836 name = self.search_fields[first.name]
6837 result = None
6838 if name is not None:
6839 if name == "MESSAGE":
6840
6841 result = "%s" % self.expand(second)
6842 elif name == "UID":
6843 result = "UID %s" % self.expand(second)
6844 elif name == "DATE":
6845 result = "ON %s" % self.convert_date(second)
6846
6847 elif name in self.flags.values():
6848 if second:
6849 result = "%s" % (name.upper()[1:])
6850 else:
6851 result = "NOT %s" % (name.upper()[1:])
6852 else:
6853 raise Exception("Operation not supported")
6854 else:
6855 raise Exception("Operation not supported")
6856 return result
6857
6858 - def AND(self, first, second):
6861
6862 - def OR(self, first, second):
6865
6866 - def NOT(self, first):
6867 result = "NOT %s" % self.expand(first)
6868 return result
6869
6870
6871
6872
6873
6874 ADAPTERS = {
6875 'sqlite': SQLiteAdapter,
6876 'spatialite': SpatiaLiteAdapter,
6877 'sqlite:memory': SQLiteAdapter,
6878 'spatialite:memory': SpatiaLiteAdapter,
6879 'mysql': MySQLAdapter,
6880 'postgres': PostgreSQLAdapter,
6881 'postgres:psycopg2': PostgreSQLAdapter,
6882 'postgres:pg8000': PostgreSQLAdapter,
6883 'postgres2:psycopg2': NewPostgreSQLAdapter,
6884 'postgres2:pg8000': NewPostgreSQLAdapter,
6885 'oracle': OracleAdapter,
6886 'mssql': MSSQLAdapter,
6887 'mssql2': MSSQL2Adapter,
6888 'mssql3': MSSQL3Adapter,
6889 'mssql4' : MSSQL4Adapter,
6890 'vertica': VerticaAdapter,
6891 'sybase': SybaseAdapter,
6892 'db2': DB2Adapter,
6893 'teradata': TeradataAdapter,
6894 'informix': InformixAdapter,
6895 'informix-se': InformixSEAdapter,
6896 'firebird': FireBirdAdapter,
6897 'firebird_embedded': FireBirdAdapter,
6898 'ingres': IngresAdapter,
6899 'ingresu': IngresUnicodeAdapter,
6900 'sapdb': SAPDBAdapter,
6901 'cubrid': CubridAdapter,
6902 'jdbc:sqlite': JDBCSQLiteAdapter,
6903 'jdbc:sqlite:memory': JDBCSQLiteAdapter,
6904 'jdbc:postgres': JDBCPostgreSQLAdapter,
6905 'gae': GoogleDatastoreAdapter,
6906 'google:datastore': GoogleDatastoreAdapter,
6907 'google:sql': GoogleSQLAdapter,
6908 'couchdb': CouchDBAdapter,
6909 'mongodb': MongoDBAdapter,
6910 'imap': IMAPAdapter
6911 }
6914 """
6915 Field type validation, using web2py's validators mechanism.
6916
6917 makes sure the content of a field is in line with the declared
6918 fieldtype
6919 """
6920 db = field.db
6921 try:
6922 from gluon import validators
6923 except ImportError:
6924 return []
6925 field_type, field_length = field.type, field.length
6926 if isinstance(field_type, SQLCustomType):
6927 if hasattr(field_type, 'validator'):
6928 return field_type.validator
6929 else:
6930 field_type = field_type.type
6931 elif not isinstance(field_type,str):
6932 return []
6933 requires=[]
6934 def ff(r,id):
6935 row=r(id)
6936 if not row:
6937 return id
6938 elif hasattr(r, '_format') and isinstance(r._format,str):
6939 return r._format % row
6940 elif hasattr(r, '_format') and callable(r._format):
6941 return r._format(row)
6942 else:
6943 return id
6944 if field_type in (('string', 'text', 'password')):
6945 requires.append(validators.IS_LENGTH(field_length))
6946 elif field_type == 'json':
6947 requires.append(validators.IS_EMPTY_OR(validators.IS_JSON(native_json=field.db._adapter.native_json)))
6948 elif field_type == 'double' or field_type == 'float':
6949 requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100))
6950 elif field_type == 'integer':
6951 requires.append(validators.IS_INT_IN_RANGE(-2**31, 2**31))
6952 elif field_type == 'bigint':
6953 requires.append(validators.IS_INT_IN_RANGE(-2**63, 2**63))
6954 elif field_type.startswith('decimal'):
6955 requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10))
6956 elif field_type == 'date':
6957 requires.append(validators.IS_DATE())
6958 elif field_type == 'time':
6959 requires.append(validators.IS_TIME())
6960 elif field_type == 'datetime':
6961 requires.append(validators.IS_DATETIME())
6962 elif db and field_type.startswith('reference') and \
6963 field_type.find('.') < 0 and \
6964 field_type[10:] in db.tables:
6965 referenced = db[field_type[10:]]
6966 def repr_ref(id, row=None, r=referenced, f=ff): return f(r, id)
6967 field.represent = field.represent or repr_ref
6968 if hasattr(referenced, '_format') and referenced._format:
6969 requires = validators.IS_IN_DB(db,referenced._id,
6970 referenced._format)
6971 if field.unique:
6972 requires._and = validators.IS_NOT_IN_DB(db,field)
6973 if field.tablename == field_type[10:]:
6974 return validators.IS_EMPTY_OR(requires)
6975 return requires
6976 elif db and field_type.startswith('list:reference') and \
6977 field_type.find('.') < 0 and \
6978 field_type[15:] in db.tables:
6979 referenced = db[field_type[15:]]
6980 def list_ref_repr(ids, row=None, r=referenced, f=ff):
6981 if not ids:
6982 return None
6983 refs = None
6984 db, id = r._db, r._id
6985 if isinstance(db._adapter, GoogleDatastoreAdapter):
6986 def count(values): return db(id.belongs(values)).select(id)
6987 rx = range(0, len(ids), 30)
6988 refs = reduce(lambda a,b:a&b, [count(ids[i:i+30]) for i in rx])
6989 else:
6990 refs = db(id.belongs(ids)).select(id)
6991 return (refs and ', '.join(f(r,x.id) for x in refs) or '')
6992 field.represent = field.represent or list_ref_repr
6993 if hasattr(referenced, '_format') and referenced._format:
6994 requires = validators.IS_IN_DB(db,referenced._id,
6995 referenced._format,multiple=True)
6996 else:
6997 requires = validators.IS_IN_DB(db,referenced._id,
6998 multiple=True)
6999 if field.unique:
7000 requires._and = validators.IS_NOT_IN_DB(db,field)
7001 if not field.notnull:
7002 requires = validators.IS_EMPTY_OR(requires)
7003 return requires
7004 elif field_type.startswith('list:'):
7005 def repr_list(values,row=None): return', '.join(str(v) for v in (values or []))
7006 field.represent = field.represent or repr_list
7007 if field.unique:
7008 requires.insert(0,validators.IS_NOT_IN_DB(db,field))
7009 sff = ['in', 'do', 'da', 'ti', 'de', 'bo']
7010 if field.notnull and not field_type[:2] in sff:
7011 requires.insert(0, validators.IS_NOT_EMPTY())
7012 elif not field.notnull and field_type[:2] in sff and requires:
7013 requires[-1] = validators.IS_EMPTY_OR(requires[-1])
7014 return requires
7015
7018 return str(item).replace('|', '||')
7019
7022
7024 if not hasattr(value,'split') and hasattr(value,'read'):
7025 value = value.read()
7026 return [long(x) for x in value.split('|') if x.strip()]
7027
7031
7032
7033 -class Row(object):
7034
7035 """
7036 a dictionary that lets you do d['a'] as well as d.a
7037 this is only used to store a Row
7038 """
7039
7040 __init__ = lambda self,*args,**kwargs: self.__dict__.update(*args,**kwargs)
7041
7043 key=str(k)
7044 _extra = self.__dict__.get('_extra', None)
7045 if _extra is not None:
7046 v = _extra.get(key, DEFAULT)
7047 if v != DEFAULT:
7048 return v
7049 m = REGEX_TABLE_DOT_FIELD.match(key)
7050 if m:
7051 try:
7052 return ogetattr(self, m.group(1))[m.group(2)]
7053 except (KeyError,AttributeError,TypeError):
7054 key = m.group(2)
7055 try:
7056 return ogetattr(self, key)
7057 except (KeyError,AttributeError,TypeError), ae:
7058 try:
7059 self[key] = ogetattr(self,'__get_lazy_reference__')(key)
7060 return self[key]
7061 except:
7062 raise ae
7063
7064 __setitem__ = lambda self, key, value: setattr(self, str(key), value)
7065
7066 __delitem__ = object.__delattr__
7067
7068 __copy__ = lambda self: Row(self)
7069
7070 __call__ = __getitem__
7071
7072
7073 - def get(self, key, default=None):
7074 try:
7075 return self.__getitem__(key)
7076 except(KeyError, AttributeError, TypeError):
7077 return self.__dict__.get(key,default)
7078
7079 has_key = __contains__ = lambda self, key: key in self.__dict__
7080
7081 __nonzero__ = lambda self: len(self.__dict__)>0
7082
7083 update = lambda self, *args, **kwargs: self.__dict__.update(*args, **kwargs)
7084
7085 keys = lambda self: self.__dict__.keys()
7086
7087 items = lambda self: self.__dict__.items()
7088
7089 values = lambda self: self.__dict__.values()
7090
7091 __iter__ = lambda self: self.__dict__.__iter__()
7092
7093 iteritems = lambda self: self.__dict__.iteritems()
7094
7095 __str__ = __repr__ = lambda self: '<Row %s>' % self.as_dict()
7096
7097 __int__ = lambda self: object.__getattribute__(self,'id')
7098
7099 __long__ = lambda self: long(object.__getattribute__(self,'id'))
7100
7101 __getattr__ = __getitem__
7102
7103
7104
7105
7106
7107
7108
7109
7110
7111
7113 try:
7114 return self.as_dict() == other.as_dict()
7115 except AttributeError:
7116 return False
7117
7119 return not (self == other)
7120
7122 return Row(dict(self))
7123
7124 - def as_dict(self, datetime_to_str=False, custom_types=None):
7125 SERIALIZABLE_TYPES = [str, unicode, int, long, float, bool, list, dict]
7126 if isinstance(custom_types,(list,tuple,set)):
7127 SERIALIZABLE_TYPES += custom_types
7128 elif custom_types:
7129 SERIALIZABLE_TYPES.append(custom_types)
7130 d = dict(self)
7131 for k in copy.copy(d.keys()):
7132 v=d[k]
7133 if d[k] is None:
7134 continue
7135 elif isinstance(v,Row):
7136 d[k]=v.as_dict()
7137 elif isinstance(v,Reference):
7138 d[k]=long(v)
7139 elif isinstance(v,decimal.Decimal):
7140 d[k]=float(v)
7141 elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)):
7142 if datetime_to_str:
7143 d[k] = v.isoformat().replace('T',' ')[:19]
7144 elif not isinstance(v,tuple(SERIALIZABLE_TYPES)):
7145 del d[k]
7146 return d
7147
7148 - def as_xml(self, row_name="row", colnames=None, indent=' '):
7149 def f(row,field,indent=' '):
7150 if isinstance(row,Row):
7151 spc = indent+' \n'
7152 items = [f(row[x],x,indent+' ') for x in row]
7153 return '%s<%s>\n%s\n%s</%s>' % (
7154 indent,
7155 field,
7156 spc.join(item for item in items if item),
7157 indent,
7158 field)
7159 elif not callable(row):
7160 if REGEX_ALPHANUMERIC.match(field):
7161 return '%s<%s>%s</%s>' % (indent,field,row,field)
7162 else:
7163 return '%s<extra name="%s">%s</extra>' % \
7164 (indent,field,row)
7165 else:
7166 return None
7167 return f(self, row_name, indent=indent)
7168
7169 - def as_json(self, mode="object", default=None, colnames=None,
7170 serialize=True, **kwargs):
7171 """
7172 serializes the row to a JSON object
7173 kwargs are passed to .as_dict method
7174 only "object" mode supported
7175
7176 serialize = False used by Rows.as_json
7177 TODO: return array mode with query column order
7178
7179 mode and colnames are not implemented
7180 """
7181
7182 item = self.as_dict(**kwargs)
7183 if serialize:
7184 if have_serializers:
7185 return serializers.json(item,
7186 default=default or
7187 serializers.custom_json)
7188 elif simplejson:
7189 return simplejson.dumps(item)
7190 else:
7191 raise RuntimeError("missing simplejson")
7192 else:
7193 return item
7194
7204
7206 if not isinstance(fields,(list,tuple)):
7207 fields = [fields]
7208 new_fields = []
7209 for field in fields:
7210 if isinstance(field,Field):
7211 new_fields.append(field)
7212 elif isinstance(field,Table):
7213 for ofield in field:
7214 new_fields.append(ofield)
7215 else:
7216 raise RuntimeError("fields must be a list of fields")
7217 fields = new_fields
7218 field_map = {}
7219 for field in fields:
7220 n = field.name.lower()
7221 if not n in field_map:
7222 field_map[n] = field
7223 n = str(field).lower()
7224 if not n in field_map:
7225 field_map[n] = field
7226 constants = {}
7227 i = 0
7228 while True:
7229 m = REGEX_CONST_STRING.search(text)
7230 if not m: break
7231 text = text[:m.start()]+('#%i' % i)+text[m.end():]
7232 constants[str(i)] = m.group()[1:-1]
7233 i+=1
7234 text = re.sub('\s+',' ',text).lower()
7235 for a,b in [('&','and'),
7236 ('|','or'),
7237 ('~','not'),
7238 ('==','='),
7239 ('<','<'),
7240 ('>','>'),
7241 ('<=','<='),
7242 ('>=','>='),
7243 ('<>','!='),
7244 ('=<','<='),
7245 ('=>','>='),
7246 ('=','='),
7247 (' less or equal than ','<='),
7248 (' greater or equal than ','>='),
7249 (' equal or less than ','<='),
7250 (' equal or greater than ','>='),
7251 (' less or equal ','<='),
7252 (' greater or equal ','>='),
7253 (' equal or less ','<='),
7254 (' equal or greater ','>='),
7255 (' not equal to ','!='),
7256 (' not equal ','!='),
7257 (' equal to ','='),
7258 (' equal ','='),
7259 (' equals ','='),
7260 (' less than ','<'),
7261 (' greater than ','>'),
7262 (' starts with ','startswith'),
7263 (' ends with ','endswith'),
7264 (' not in ' , 'notbelongs'),
7265 (' in ' , 'belongs'),
7266 (' is ','=')]:
7267 if a[0]==' ':
7268 text = text.replace(' is'+a,' %s ' % b)
7269 text = text.replace(a,' %s ' % b)
7270 text = re.sub('\s+',' ',text).lower()
7271 text = re.sub('(?P<a>[\<\>\!\=])\s+(?P<b>[\<\>\!\=])','\g<a>\g<b>',text)
7272 query = field = neg = op = logic = None
7273 for item in text.split():
7274 if field is None:
7275 if item == 'not':
7276 neg = True
7277 elif not neg and not logic and item in ('and','or'):
7278 logic = item
7279 elif item in field_map:
7280 field = field_map[item]
7281 else:
7282 raise RuntimeError("Invalid syntax")
7283 elif not field is None and op is None:
7284 op = item
7285 elif not op is None:
7286 if item.startswith('#'):
7287 if not item[1:] in constants:
7288 raise RuntimeError("Invalid syntax")
7289 value = constants[item[1:]]
7290 else:
7291 value = item
7292 if field.type in ('text', 'string', 'json'):
7293 if op == '=': op = 'like'
7294 if op == '=': new_query = field==value
7295 elif op == '<': new_query = field<value
7296 elif op == '>': new_query = field>value
7297 elif op == '<=': new_query = field<=value
7298 elif op == '>=': new_query = field>=value
7299 elif op == '!=': new_query = field!=value
7300 elif op == 'belongs': new_query = field.belongs(value.split(','))
7301 elif op == 'notbelongs': new_query = ~field.belongs(value.split(','))
7302 elif field.type in ('text', 'string', 'json'):
7303 if op == 'contains': new_query = field.contains(value)
7304 elif op == 'like': new_query = field.like(value)
7305 elif op == 'startswith': new_query = field.startswith(value)
7306 elif op == 'endswith': new_query = field.endswith(value)
7307 else: raise RuntimeError("Invalid operation")
7308 elif field._db._adapter.dbengine=='google:datastore' and \
7309 field.type in ('list:integer', 'list:string', 'list:reference'):
7310 if op == 'contains': new_query = field.contains(value)
7311 else: raise RuntimeError("Invalid operation")
7312 else: raise RuntimeError("Invalid operation")
7313 if neg: new_query = ~new_query
7314 if query is None:
7315 query = new_query
7316 elif logic == 'and':
7317 query &= new_query
7318 elif logic == 'or':
7319 query |= new_query
7320 field = op = neg = logic = None
7321 return query
7322
7324
7325 """
7326 an instance of this class represents a database connection
7327
7328 Example::
7329
7330 db = DAL('sqlite://test.db')
7331
7332 or
7333
7334 db = DAL(**{"uri": ..., "tables": [...]...}) # experimental
7335
7336 db.define_table('tablename', Field('fieldname1'),
7337 Field('fieldname2'))
7338 """
7339
7340 - def __new__(cls, uri='sqlite://dummy.db', *args, **kwargs):
7367
7368 @staticmethod
7370 """
7371 # ## this allows gluon to set a folder for this thread
7372 # ## <<<<<<<<< Should go away as new DAL replaces old sql.py
7373 """
7374 BaseAdapter.set_folder(folder)
7375
7376 @staticmethod
7378 """
7379 Returns a dictionary with uri as key with timings and defined tables
7380 {'sqlite://storage.sqlite': {
7381 'dbstats': [(select auth_user.email from auth_user, 0.02009)],
7382 'dbtables': {
7383 'defined': ['auth_cas', 'auth_event', 'auth_group',
7384 'auth_membership', 'auth_permission', 'auth_user'],
7385 'lazy': '[]'
7386 }
7387 }
7388 }
7389 """
7390 dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
7391 infos = {}
7392 for db_uid, db_group in dbs:
7393 for db in db_group:
7394 if not db._uri:
7395 continue
7396 k = hide_password(db._uri)
7397 infos[k] = dict(dbstats = [(row[0], row[1]) for row in db._timings],
7398 dbtables = {'defined':
7399 sorted(list(set(db.tables) -
7400 set(db._LAZY_TABLES.keys()))),
7401 'lazy': sorted(db._LAZY_TABLES.keys())}
7402 )
7403 return infos
7404
7405 @staticmethod
7418
7419 @staticmethod
7441
7442 - def __init__(self, uri=DEFAULT_URI,
7443 pool_size=0, folder=None,
7444 db_codec='UTF-8', check_reserved=None,
7445 migrate=True, fake_migrate=False,
7446 migrate_enabled=True, fake_migrate_all=False,
7447 decode_credentials=False, driver_args=None,
7448 adapter_args=None, attempts=5, auto_import=False,
7449 bigint_id=False, debug=False, lazy_tables=False,
7450 db_uid=None, do_connect=True,
7451 after_connection=None, tables=None):
7452 """
7453 Creates a new Database Abstraction Layer instance.
7454
7455 Keyword arguments:
7456
7457 :uri: string that contains information for connecting to a database.
7458 (default: 'sqlite://dummy.db')
7459
7460 experimental: you can specify a dictionary as uri
7461 parameter i.e. with
7462 db = DAL({"uri": "sqlite://storage.sqlite",
7463 "tables": {...}, ...})
7464
7465 for an example of dict input you can check the output
7466 of the scaffolding db model with
7467
7468 db.as_dict()
7469
7470 Note that for compatibility with Python older than
7471 version 2.6.5 you should cast your dict input keys
7472 to str due to a syntax limitation on kwarg names.
7473 for proper DAL dictionary input you can use one of:
7474
7475 obj = serializers.cast_keys(dict, [encoding="utf-8"])
7476
7477 or else (for parsing json input)
7478
7479 obj = serializers.loads_json(data, unicode_keys=False)
7480
7481 :pool_size: How many open connections to make to the database object.
7482 :folder: where .table files will be created.
7483 automatically set within web2py
7484 use an explicit path when using DAL outside web2py
7485 :db_codec: string encoding of the database (default: 'UTF-8')
7486 :check_reserved: list of adapters to check tablenames and column names
7487 against sql/nosql reserved keywords. (Default None)
7488
7489 * 'common' List of sql keywords that are common to all database types
7490 such as "SELECT, INSERT". (recommended)
7491 * 'all' Checks against all known SQL keywords. (not recommended)
7492 <adaptername> Checks against the specific adapters list of keywords
7493 (recommended)
7494 * '<adaptername>_nonreserved' Checks against the specific adapters
7495 list of nonreserved keywords. (if available)
7496 :migrate (defaults to True) sets default migrate behavior for all tables
7497 :fake_migrate (defaults to False) sets default fake_migrate behavior for all tables
7498 :migrate_enabled (defaults to True). If set to False disables ALL migrations
7499 :fake_migrate_all (defaults to False). If sets to True fake migrates ALL tables
7500 :attempts (defaults to 5). Number of times to attempt connecting
7501 :auto_import (defaults to False). If set, import automatically table definitions from the
7502 databases folder
7503 :bigint_id (defaults to False): If set, turn on bigint instead of int for id fields
7504 :lazy_tables (defaults to False): delay table definition until table access
7505 :after_connection (defaults to None): a callable that will be execute after the connection
7506 """
7507 if uri == '<zombie>' and db_uid is not None: return
7508 if not decode_credentials:
7509 credential_decoder = lambda cred: cred
7510 else:
7511 credential_decoder = lambda cred: urllib.unquote(cred)
7512 self._folder = folder
7513 if folder:
7514 self.set_folder(folder)
7515 self._uri = uri
7516 self._pool_size = pool_size
7517 self._db_codec = db_codec
7518 self._lastsql = ''
7519 self._timings = []
7520 self._pending_references = {}
7521 self._request_tenant = 'request_tenant'
7522 self._common_fields = []
7523 self._referee_name = '%(table)s'
7524 self._bigint_id = bigint_id
7525 self._debug = debug
7526 self._migrated = []
7527 self._LAZY_TABLES = {}
7528 self._lazy_tables = lazy_tables
7529 self._tables = SQLCallableList()
7530 self._driver_args = driver_args
7531 self._adapter_args = adapter_args
7532 self._check_reserved = check_reserved
7533 self._decode_credentials = decode_credentials
7534 self._attempts = attempts
7535 self._do_connect = do_connect
7536
7537 if not str(attempts).isdigit() or attempts < 0:
7538 attempts = 5
7539 if uri:
7540 uris = isinstance(uri,(list,tuple)) and uri or [uri]
7541 error = ''
7542 connected = False
7543 for k in range(attempts):
7544 for uri in uris:
7545 try:
7546 if is_jdbc and not uri.startswith('jdbc:'):
7547 uri = 'jdbc:'+uri
7548 self._dbname = REGEX_DBNAME.match(uri).group()
7549 if not self._dbname in ADAPTERS:
7550 raise SyntaxError("Error in URI '%s' or database not supported" % self._dbname)
7551
7552
7553 kwargs = dict(db=self,uri=uri,
7554 pool_size=pool_size,
7555 folder=folder,
7556 db_codec=db_codec,
7557 credential_decoder=credential_decoder,
7558 driver_args=driver_args or {},
7559 adapter_args=adapter_args or {},
7560 do_connect=do_connect,
7561 after_connection=after_connection)
7562 self._adapter = ADAPTERS[self._dbname](**kwargs)
7563 types = ADAPTERS[self._dbname].types
7564
7565 self._adapter.types = copy.copy(types)
7566 self._adapter.build_parsemap()
7567 if bigint_id:
7568 if 'big-id' in types and 'reference' in types:
7569 self._adapter.types['id'] = types['big-id']
7570 self._adapter.types['reference'] = types['big-reference']
7571 connected = True
7572 break
7573 except SyntaxError:
7574 raise
7575 except Exception:
7576 tb = traceback.format_exc()
7577 sys.stderr.write('DEBUG: connect attempt %i, connection error:\n%s' % (k, tb))
7578 if connected:
7579 break
7580 else:
7581 time.sleep(1)
7582 if not connected:
7583 raise RuntimeError("Failure to connect, tried %d times:\n%s" % (attempts, tb))
7584 else:
7585 self._adapter = BaseAdapter(db=self,pool_size=0,
7586 uri='None',folder=folder,
7587 db_codec=db_codec, after_connection=after_connection)
7588 migrate = fake_migrate = False
7589 adapter = self._adapter
7590 self._uri_hash = hashlib_md5(adapter.uri).hexdigest()
7591 self.check_reserved = check_reserved
7592 if self.check_reserved:
7593 from reserved_sql_keywords import ADAPTERS as RSK
7594 self.RSK = RSK
7595 self._migrate = migrate
7596 self._fake_migrate = fake_migrate
7597 self._migrate_enabled = migrate_enabled
7598 self._fake_migrate_all = fake_migrate_all
7599 if auto_import or tables:
7600 self.import_table_definitions(adapter.folder,
7601 tables=tables)
7602
7603 @property
7606
7609 pattern = pjoin(path,self._uri_hash+'_*.table')
7610 if tables:
7611 for table in tables:
7612 self.define_table(**table)
7613 else:
7614 for filename in glob.glob(pattern):
7615 tfile = self._adapter.file_open(filename, 'r')
7616 try:
7617 sql_fields = pickle.load(tfile)
7618 name = filename[len(pattern)-7:-6]
7619 mf = [(value['sortable'],
7620 Field(key,
7621 type=value['type'],
7622 length=value.get('length',None),
7623 notnull=value.get('notnull',False),
7624 unique=value.get('unique',False))) \
7625 for key, value in sql_fields.iteritems()]
7626 mf.sort(lambda a,b: cmp(a[0],b[0]))
7627 self.define_table(name,*[item[1] for item in mf],
7628 **dict(migrate=migrate,
7629 fake_migrate=fake_migrate))
7630 finally:
7631 self._adapter.file_close(tfile)
7632
7634 """
7635 Validates ``name`` against SQL keywords
7636 Uses self.check_reserve which is a list of
7637 operators to use.
7638 self.check_reserved
7639 ['common', 'postgres', 'mysql']
7640 self.check_reserved
7641 ['all']
7642 """
7643 for backend in self.check_reserved:
7644 if name.upper() in self.RSK[backend]:
7645 raise SyntaxError(
7646 'invalid table/column name "%s" is a "%s" reserved SQL/NOSQL keyword' % (name, backend.upper()))
7647
7648 - def parse_as_rest(self,patterns,args,vars,queries=None,nested_select=True):
7649 """
7650 EXAMPLE:
7651
7652 db.define_table('person',Field('name'),Field('info'))
7653 db.define_table('pet',Field('ownedby',db.person),Field('name'),Field('info'))
7654
7655 @request.restful()
7656 def index():
7657 def GET(*args,**vars):
7658 patterns = [
7659 "/friends[person]",
7660 "/{person.name}/:field",
7661 "/{person.name}/pets[pet.ownedby]",
7662 "/{person.name}/pets[pet.ownedby]/{pet.name}",
7663 "/{person.name}/pets[pet.ownedby]/{pet.name}/:field",
7664 ("/dogs[pet]", db.pet.info=='dog'),
7665 ("/dogs[pet]/{pet.name.startswith}", db.pet.info=='dog'),
7666 ]
7667 parser = db.parse_as_rest(patterns,args,vars)
7668 if parser.status == 200:
7669 return dict(content=parser.response)
7670 else:
7671 raise HTTP(parser.status,parser.error)
7672
7673 def POST(table_name,**vars):
7674 if table_name == 'person':
7675 return db.person.validate_and_insert(**vars)
7676 elif table_name == 'pet':
7677 return db.pet.validate_and_insert(**vars)
7678 else:
7679 raise HTTP(400)
7680 return locals()
7681 """
7682
7683 db = self
7684 re1 = REGEX_SEARCH_PATTERN
7685 re2 = REGEX_SQUARE_BRACKETS
7686
7687 def auto_table(table,base='',depth=0):
7688 patterns = []
7689 for field in db[table].fields:
7690 if base:
7691 tag = '%s/%s' % (base,field.replace('_','-'))
7692 else:
7693 tag = '/%s/%s' % (table.replace('_','-'),field.replace('_','-'))
7694 f = db[table][field]
7695 if not f.readable: continue
7696 if f.type=='id' or 'slug' in field or f.type.startswith('reference'):
7697 tag += '/{%s.%s}' % (table,field)
7698 patterns.append(tag)
7699 patterns.append(tag+'/:field')
7700 elif f.type.startswith('boolean'):
7701 tag += '/{%s.%s}' % (table,field)
7702 patterns.append(tag)
7703 patterns.append(tag+'/:field')
7704 elif f.type in ('float','double','integer','bigint'):
7705 tag += '/{%s.%s.ge}/{%s.%s.lt}' % (table,field,table,field)
7706 patterns.append(tag)
7707 patterns.append(tag+'/:field')
7708 elif f.type.startswith('list:'):
7709 tag += '/{%s.%s.contains}' % (table,field)
7710 patterns.append(tag)
7711 patterns.append(tag+'/:field')
7712 elif f.type in ('date','datetime'):
7713 tag+= '/{%s.%s.year}' % (table,field)
7714 patterns.append(tag)
7715 patterns.append(tag+'/:field')
7716 tag+='/{%s.%s.month}' % (table,field)
7717 patterns.append(tag)
7718 patterns.append(tag+'/:field')
7719 tag+='/{%s.%s.day}' % (table,field)
7720 patterns.append(tag)
7721 patterns.append(tag+'/:field')
7722 if f.type in ('datetime','time'):
7723 tag+= '/{%s.%s.hour}' % (table,field)
7724 patterns.append(tag)
7725 patterns.append(tag+'/:field')
7726 tag+='/{%s.%s.minute}' % (table,field)
7727 patterns.append(tag)
7728 patterns.append(tag+'/:field')
7729 tag+='/{%s.%s.second}' % (table,field)
7730 patterns.append(tag)
7731 patterns.append(tag+'/:field')
7732 if depth>0:
7733 for f in db[table]._referenced_by:
7734 tag+='/%s[%s.%s]' % (table,f.tablename,f.name)
7735 patterns.append(tag)
7736 patterns += auto_table(table,base=tag,depth=depth-1)
7737 return patterns
7738
7739 if patterns == 'auto':
7740 patterns=[]
7741 for table in db.tables:
7742 if not table.startswith('auth_'):
7743 patterns.append('/%s[%s]' % (table,table))
7744 patterns += auto_table(table,base='',depth=1)
7745 else:
7746 i = 0
7747 while i<len(patterns):
7748 pattern = patterns[i]
7749 if not isinstance(pattern,str):
7750 pattern = pattern[0]
7751 tokens = pattern.split('/')
7752 if tokens[-1].startswith(':auto') and re2.match(tokens[-1]):
7753 new_patterns = auto_table(tokens[-1][tokens[-1].find('[')+1:-1],
7754 '/'.join(tokens[:-1]))
7755 patterns = patterns[:i]+new_patterns+patterns[i+1:]
7756 i += len(new_patterns)
7757 else:
7758 i += 1
7759 if '/'.join(args) == 'patterns':
7760 return Row({'status':200,'pattern':'list',
7761 'error':None,'response':patterns})
7762 for pattern in patterns:
7763 basequery, exposedfields = None, []
7764 if isinstance(pattern,tuple):
7765 if len(pattern)==2:
7766 pattern, basequery = pattern
7767 elif len(pattern)>2:
7768 pattern, basequery, exposedfields = pattern[0:3]
7769 otable=table=None
7770 if not isinstance(queries,dict):
7771 dbset=db(queries)
7772 if basequery is not None:
7773 dbset = dbset(basequery)
7774 i=0
7775 tags = pattern[1:].split('/')
7776 if len(tags)!=len(args):
7777 continue
7778 for tag in tags:
7779 if re1.match(tag):
7780
7781 tokens = tag[1:-1].split('.')
7782 table, field = tokens[0], tokens[1]
7783 if not otable or table == otable:
7784 if len(tokens)==2 or tokens[2]=='eq':
7785 query = db[table][field]==args[i]
7786 elif tokens[2]=='ne':
7787 query = db[table][field]!=args[i]
7788 elif tokens[2]=='lt':
7789 query = db[table][field]<args[i]
7790 elif tokens[2]=='gt':
7791 query = db[table][field]>args[i]
7792 elif tokens[2]=='ge':
7793 query = db[table][field]>=args[i]
7794 elif tokens[2]=='le':
7795 query = db[table][field]<=args[i]
7796 elif tokens[2]=='year':
7797 query = db[table][field].year()==args[i]
7798 elif tokens[2]=='month':
7799 query = db[table][field].month()==args[i]
7800 elif tokens[2]=='day':
7801 query = db[table][field].day()==args[i]
7802 elif tokens[2]=='hour':
7803 query = db[table][field].hour()==args[i]
7804 elif tokens[2]=='minute':
7805 query = db[table][field].minutes()==args[i]
7806 elif tokens[2]=='second':
7807 query = db[table][field].seconds()==args[i]
7808 elif tokens[2]=='startswith':
7809 query = db[table][field].startswith(args[i])
7810 elif tokens[2]=='contains':
7811 query = db[table][field].contains(args[i])
7812 else:
7813 raise RuntimeError("invalid pattern: %s" % pattern)
7814 if len(tokens)==4 and tokens[3]=='not':
7815 query = ~query
7816 elif len(tokens)>=4:
7817 raise RuntimeError("invalid pattern: %s" % pattern)
7818 if not otable and isinstance(queries,dict):
7819 dbset = db(queries[table])
7820 if basequery is not None:
7821 dbset = dbset(basequery)
7822 dbset=dbset(query)
7823 else:
7824 raise RuntimeError("missing relation in pattern: %s" % pattern)
7825 elif re2.match(tag) and args[i]==tag[:tag.find('[')]:
7826 ref = tag[tag.find('[')+1:-1]
7827 if '.' in ref and otable:
7828 table,field = ref.split('.')
7829 selfld = '_id'
7830 if db[table][field].type.startswith('reference '):
7831 refs = [ x.name for x in db[otable] if x.type == db[table][field].type ]
7832 else:
7833 refs = [ x.name for x in db[table]._referenced_by if x.tablename==otable ]
7834 if refs:
7835 selfld = refs[0]
7836 if nested_select:
7837 try:
7838 dbset=db(db[table][field].belongs(dbset._select(db[otable][selfld])))
7839 except ValueError:
7840 return Row({'status':400,'pattern':pattern,
7841 'error':'invalid path','response':None})
7842 else:
7843 items = [item.id for item in dbset.select(db[otable][selfld])]
7844 dbset=db(db[table][field].belongs(items))
7845 else:
7846 table = ref
7847 if not otable and isinstance(queries,dict):
7848 dbset = db(queries[table])
7849 dbset=dbset(db[table])
7850 elif tag==':field' and table:
7851
7852 field = args[i]
7853 if not field in db[table]: break
7854
7855 if not db[table][field].readable:
7856 return Row({'status':418,'pattern':pattern,
7857 'error':'I\'m a teapot','response':None})
7858 try:
7859 distinct = vars.get('distinct', False) == 'True'
7860 offset = long(vars.get('offset',None) or 0)
7861 limits = (offset,long(vars.get('limit',None) or 1000)+offset)
7862 except ValueError:
7863 return Row({'status':400,'error':'invalid limits','response':None})
7864 items = dbset.select(db[table][field], distinct=distinct, limitby=limits)
7865 if items:
7866 return Row({'status':200,'response':items,
7867 'pattern':pattern})
7868 else:
7869 return Row({'status':404,'pattern':pattern,
7870 'error':'no record found','response':None})
7871 elif tag != args[i]:
7872 break
7873 otable = table
7874 i += 1
7875 if i==len(tags) and table:
7876 ofields = vars.get('order',db[table]._id.name).split('|')
7877 try:
7878 orderby = [db[table][f] if not f.startswith('~') else ~db[table][f[1:]] for f in ofields]
7879 except (KeyError, AttributeError):
7880 return Row({'status':400,'error':'invalid orderby','response':None})
7881 if exposedfields:
7882 fields = [field for field in db[table] if str(field).split('.')[-1] in exposedfields and field.readable]
7883 else:
7884 fields = [field for field in db[table] if field.readable]
7885 count = dbset.count()
7886 try:
7887 offset = long(vars.get('offset',None) or 0)
7888 limits = (offset,long(vars.get('limit',None) or 1000)+offset)
7889 except ValueError:
7890 return Row({'status':400,'error':'invalid limits','response':None})
7891 if count > limits[1]-limits[0]:
7892 return Row({'status':400,'error':'too many records','response':None})
7893 try:
7894 response = dbset.select(limitby=limits,orderby=orderby,*fields)
7895 except ValueError:
7896 return Row({'status':400,'pattern':pattern,
7897 'error':'invalid path','response':None})
7898 return Row({'status':200,'response':response,
7899 'pattern':pattern,'count':count})
7900 return Row({'status':400,'error':'no matching pattern','response':None})
7901
7902 - def define_table(
7903 self,
7904 tablename,
7905 *fields,
7906 **args
7907 ):
7908 if not fields and 'fields' in args:
7909 fields = args.get('fields',())
7910 if not isinstance(tablename, str):
7911 if isinstance(tablename, unicode):
7912 try:
7913 tablename = str(tablename)
7914 except UnicodeEncodeError:
7915 raise SyntaxError("invalid unicode table name")
7916 else:
7917 raise SyntaxError("missing table name")
7918 elif hasattr(self,tablename) or tablename in self.tables:
7919 if not args.get('redefine',False):
7920 raise SyntaxError('table already defined: %s' % tablename)
7921 elif tablename.startswith('_') or hasattr(self,tablename) or \
7922 REGEX_PYTHON_KEYWORDS.match(tablename):
7923 raise SyntaxError('invalid table name: %s' % tablename)
7924 elif self.check_reserved:
7925 self.check_reserved_keyword(tablename)
7926 else:
7927 invalid_args = set(args)-TABLE_ARGS
7928 if invalid_args:
7929 raise SyntaxError('invalid table "%s" attributes: %s' \
7930 % (tablename,invalid_args))
7931 if self._lazy_tables and not tablename in self._LAZY_TABLES:
7932 self._LAZY_TABLES[tablename] = (tablename,fields,args)
7933 table = None
7934 else:
7935 table = self.lazy_define_table(tablename,*fields,**args)
7936 if not tablename in self.tables:
7937 self.tables.append(tablename)
7938 return table
7939
7940 - def lazy_define_table(
7941 self,
7942 tablename,
7943 *fields,
7944 **args
7945 ):
7946 args_get = args.get
7947 common_fields = self._common_fields
7948 if common_fields:
7949 fields = list(fields) + list(common_fields)
7950
7951 table_class = args_get('table_class',Table)
7952 table = table_class(self, tablename, *fields, **args)
7953 table._actual = True
7954 self[tablename] = table
7955
7956 table._create_references()
7957 for field in table:
7958 if field.requires == DEFAULT:
7959 field.requires = sqlhtml_validators(field)
7960
7961 migrate = self._migrate_enabled and args_get('migrate',self._migrate)
7962 if migrate and not self._uri in (None,'None') \
7963 or self._adapter.dbengine=='google:datastore':
7964 fake_migrate = self._fake_migrate_all or \
7965 args_get('fake_migrate',self._fake_migrate)
7966 polymodel = args_get('polymodel',None)
7967 try:
7968 GLOBAL_LOCKER.acquire()
7969 self._lastsql = self._adapter.create_table(
7970 table,migrate=migrate,
7971 fake_migrate=fake_migrate,
7972 polymodel=polymodel)
7973 finally:
7974 GLOBAL_LOCKER.release()
7975 else:
7976 table._dbt = None
7977 on_define = args_get('on_define',None)
7978 if on_define: on_define(table)
7979 return table
7980
7981 - def as_dict(self, flat=False, sanitize=True):
7982 db_uid = uri = None
7983 if not sanitize:
7984 uri, db_uid = (self._uri, self._db_uid)
7985 db_as_dict = dict(tables=[], uri=uri, db_uid=db_uid,
7986 **dict([(k, getattr(self, "_" + k, None))
7987 for k in 'pool_size','folder','db_codec',
7988 'check_reserved','migrate','fake_migrate',
7989 'migrate_enabled','fake_migrate_all',
7990 'decode_credentials','driver_args',
7991 'adapter_args', 'attempts',
7992 'bigint_id','debug','lazy_tables',
7993 'do_connect']))
7994 for table in self:
7995 db_as_dict["tables"].append(table.as_dict(flat=flat,
7996 sanitize=sanitize))
7997 return db_as_dict
7998
7999 - def as_xml(self, sanitize=True):
8004
8005 - def as_json(self, sanitize=True):
8010
8011 - def as_yaml(self, sanitize=True):
8016
8018 try:
8019 return tablename in self.tables
8020 except AttributeError:
8021
8022 return False
8023
8024 has_key = __contains__
8025
8026 - def get(self,key,default=None):
8027 return self.__dict__.get(key,default)
8028
8030 for tablename in self.tables:
8031 yield self[tablename]
8032
8035
8037 if ogetattr(self,'_lazy_tables') and \
8038 key in ogetattr(self,'_LAZY_TABLES'):
8039 tablename, fields, args = self._LAZY_TABLES.pop(key)
8040 return self.lazy_define_table(tablename,*fields,**args)
8041 return ogetattr(self, key)
8042
8044 osetattr(self, str(key), value)
8045
8047 if key[:1]!='_' and key in self:
8048 raise SyntaxError(
8049 'Object %s exists and cannot be redefined' % key)
8050 osetattr(self,key,value)
8051
8052 __delitem__ = object.__delattr__
8053
8055 if hasattr(self,'_uri'):
8056 return '<DAL uri="%s">' % hide_password(str(self._uri))
8057 else:
8058 return '<DAL db_uid="%s">' % self._db_uid
8059
8062
8063 - def __call__(self, query=None, ignore_common_filters=None):
8064 if isinstance(query,Table):
8065 query = self._adapter.id_query(query)
8066 elif isinstance(query,Field):
8067 query = query!=None
8068 elif isinstance(query, dict):
8069 icf = query.get("ignore_common_filters")
8070 if icf: ignore_common_filters = icf
8071 return Set(self, query, ignore_common_filters=ignore_common_filters)
8072
8075
8078
8080 self._adapter.close()
8081 if self._db_uid in THREAD_LOCAL.db_instances:
8082 db_group = THREAD_LOCAL.db_instances[self._db_uid]
8083 db_group.remove(self)
8084 if not db_group:
8085 del THREAD_LOCAL.db_instances[self._db_uid]
8086
8087 - def executesql(self, query, placeholders=None, as_dict=False,
8088 fields=None, colnames=None):
8089 """
8090 placeholders is optional and will always be None.
8091 If using raw SQL with placeholders, placeholders may be
8092 a sequence of values to be substituted in
8093 or, (if supported by the DB driver), a dictionary with keys
8094 matching named placeholders in your SQL.
8095
8096 Added 2009-12-05 "as_dict" optional argument. Will always be
8097 None when using DAL. If using raw SQL can be set to True
8098 and the results cursor returned by the DB driver will be
8099 converted to a sequence of dictionaries keyed with the db
8100 field names. Tested with SQLite but should work with any database
8101 since the cursor.description used to get field names is part of the
8102 Python dbi 2.0 specs. Results returned with as_dict=True are
8103 the same as those returned when applying .to_list() to a DAL query.
8104
8105 [{field1: value1, field2: value2}, {field1: value1b, field2: value2b}]
8106
8107 Added 2012-08-24 "fields" and "colnames" optional arguments. If either
8108 is provided, the results cursor returned by the DB driver will be
8109 converted to a DAL Rows object using the db._adapter.parse() method.
8110
8111 The "fields" argument is a list of DAL Field objects that match the
8112 fields returned from the DB. The Field objects should be part of one or
8113 more Table objects defined on the DAL object. The "fields" list can
8114 include one or more DAL Table objects in addition to or instead of
8115 including Field objects, or it can be just a single table (not in a
8116 list). In that case, the Field objects will be extracted from the
8117 table(s).
8118
8119 Instead of specifying the "fields" argument, the "colnames" argument
8120 can be specified as a list of field names in tablename.fieldname format.
8121 Again, these should represent tables and fields defined on the DAL
8122 object.
8123
8124 It is also possible to specify both "fields" and the associated
8125 "colnames". In that case, "fields" can also include DAL Expression
8126 objects in addition to Field objects. For Field objects in "fields",
8127 the associated "colnames" must still be in tablename.fieldname format.
8128 For Expression objects in "fields", the associated "colnames" can
8129 be any arbitrary labels.
8130
8131 Note, the DAL Table objects referred to by "fields" or "colnames" can
8132 be dummy tables and do not have to represent any real tables in the
8133 database. Also, note that the "fields" and "colnames" must be in the
8134 same order as the fields in the results cursor returned from the DB.
8135 """
8136 adapter = self._adapter
8137 if placeholders:
8138 adapter.execute(query, placeholders)
8139 else:
8140 adapter.execute(query)
8141 if as_dict:
8142 if not hasattr(adapter.cursor,'description'):
8143 raise RuntimeError("database does not support executesql(...,as_dict=True)")
8144
8145
8146
8147 columns = adapter.cursor.description
8148
8149 fields = [f[0] for f in columns]
8150
8151 data = adapter._fetchall()
8152
8153
8154 return [dict(zip(fields,row)) for row in data]
8155 try:
8156 data = adapter._fetchall()
8157 except:
8158 return None
8159 if fields or colnames:
8160 fields = [] if fields is None else fields
8161 if not isinstance(fields, list):
8162 fields = [fields]
8163 extracted_fields = []
8164 for field in fields:
8165 if isinstance(field, Table):
8166 extracted_fields.extend([f for f in field])
8167 else:
8168 extracted_fields.append(field)
8169 if not colnames:
8170 colnames = ['%s.%s' % (f.tablename, f.name)
8171 for f in extracted_fields]
8172 data = adapter.parse(
8173 data, fields=extracted_fields, colnames=colnames)
8174 return data
8175
8177 for table in self:
8178 table._referenced_by = [field for field in table._referenced_by
8179 if not field.table==thistable]
8180
8182 step = long(kwargs.get('max_fetch_rows,',500))
8183 write_colnames = kwargs['write_colnames'] = \
8184 kwargs.get("write_colnames", True)
8185 for table in self.tables:
8186 ofile.write('TABLE %s\r\n' % table)
8187 query = self._adapter.id_query(self[table])
8188 nrows = self(query).count()
8189 kwargs['write_colnames'] = write_colnames
8190 for k in range(0,nrows,step):
8191 self(query).select(limitby=(k,k+step)).export_to_csv_file(
8192 ofile, *args, **kwargs)
8193 kwargs['write_colnames'] = False
8194 ofile.write('\r\n\r\n')
8195 ofile.write('END')
8196
8197 - def import_from_csv_file(self, ifile, id_map=None, null='<NULL>',
8198 unique='uuid', map_tablenames=None,
8199 ignore_missing_tables=False,
8200 *args, **kwargs):
8201
8202 id_offset = {}
8203 map_tablenames = map_tablenames or {}
8204 for line in ifile:
8205 line = line.strip()
8206 if not line:
8207 continue
8208 elif line == 'END':
8209 return
8210 elif not line.startswith('TABLE ') or \
8211 not line[6:] in self.tables:
8212 raise SyntaxError('invalid file format')
8213 else:
8214 tablename = line[6:]
8215 tablename = map_tablenames.get(tablename,tablename)
8216 if tablename is not None and tablename in self.tables:
8217 self[tablename].import_from_csv_file(
8218 ifile, id_map, null, unique, id_offset,
8219 *args, **kwargs)
8220 elif tablename is None or ignore_missing_tables:
8221
8222 for line in ifile:
8223 if not line.strip():
8224 break
8225 else:
8226 raise RuntimeError("Unable to import table that does not exist.\nTry db.import_from_csv_file(..., map_tablenames={'table':'othertable'},ignore_missing_tables=True)")
8227
8230 return DAL('<zombie>',db_uid=db_uid)
8231
8234
8235 copyreg.pickle(DAL, DAL_pickler, DAL_unpickler)
8238 """
8239 Helper class providing a comma-separated string having all the field names
8240 (prefixed by table name and '.')
8241
8242 normally only called from within gluon.sql
8243 """
8244
8247
8249 return ', '.join([str(field) for field in self._table])
8250
8253
8255 if not self._record:
8256 self._record = self._table[long(self)]
8257 if not self._record:
8258 raise RuntimeError(
8259 "Using a recursive select but encountered a broken reference: %s %d"%(self._table, long(self)))
8260
8262 if key == 'id':
8263 return long(self)
8264 if key in self._table:
8265 self.__allocate()
8266 if self._record:
8267 return self._record.get(key,None)
8268 else:
8269 return None
8270
8271 - def get(self, key, default=None):
8273
8280
8282 if key == 'id':
8283 return long(self)
8284 self.__allocate()
8285 return self._record.get(key, None)
8286
8288 self.__allocate()
8289 self._record[key] = value
8290
8293 return marshal.loads(data)
8294
8296 try:
8297 marshal_dump = marshal.dumps(long(data))
8298 except AttributeError:
8299 marshal_dump = 'i%s' % struct.pack('<i', long(data))
8300 return (Reference_unpickler, (marshal_dump,))
8301
8302 copyreg.pickle(Reference, Reference_pickler, Reference_unpickler)
8312 def _decorated(f):
8313 instance = self.table
8314 import types
8315 method = types.MethodType(f, instance, instance.__class__)
8316 name = method_name or f.func_name
8317 setattr(instance, name, method)
8318 return f
8319 return _decorated
8320
8322
8323 """
8324 an instance of this class represents a database table
8325
8326 Example::
8327
8328 db = DAL(...)
8329 db.define_table('users', Field('name'))
8330 db.users.insert(name='me') # print db.users._insert(...) to see SQL
8331 db.users.drop()
8332 """
8333
8334 - def __init__(
8335 self,
8336 db,
8337 tablename,
8338 *fields,
8339 **args
8340 ):
8341 """
8342 Initializes the table and performs checking on the provided fields.
8343
8344 Each table will have automatically an 'id'.
8345
8346 If a field is of type Table, the fields (excluding 'id') from that table
8347 will be used instead.
8348
8349 :raises SyntaxError: when a supplied field is of incorrect type.
8350 """
8351 self._actual = False
8352 self._tablename = tablename
8353 self._ot = args.get('actual_name')
8354 self._sequence_name = args.get('sequence_name') or \
8355 db and db._adapter.sequence_name(tablename)
8356 self._trigger_name = args.get('trigger_name') or \
8357 db and db._adapter.trigger_name(tablename)
8358 self._common_filter = args.get('common_filter')
8359 self._format = args.get('format')
8360 self._singular = args.get(
8361 'singular',tablename.replace('_',' ').capitalize())
8362 self._plural = args.get(
8363 'plural',pluralize(self._singular.lower()).capitalize())
8364
8365 if 'primarykey' in args and args['primarykey'] is not None:
8366 self._primarykey = args.get('primarykey')
8367
8368 self._before_insert = []
8369 self._before_update = [Set.delete_uploaded_files]
8370 self._before_delete = [Set.delete_uploaded_files]
8371 self._after_insert = []
8372 self._after_update = []
8373 self._after_delete = []
8374
8375 self.add_method = MethodAdder(self)
8376
8377 fieldnames,newfields=set(),[]
8378 _primarykey = getattr(self, '_primarykey', None)
8379 if _primarykey is not None:
8380 if not isinstance(_primarykey, list):
8381 raise SyntaxError(
8382 "primarykey must be a list of fields from table '%s'" \
8383 % tablename)
8384 if len(_primarykey)==1:
8385 self._id = [f for f in fields if isinstance(f,Field) \
8386 and f.name==_primarykey[0]][0]
8387 elif not [f for f in fields if (isinstance(f,Field) and
8388 f.type=='id') or (isinstance(f, dict) and
8389 f.get("type", None)=="id")]:
8390 field = Field('id', 'id')
8391 newfields.append(field)
8392 fieldnames.add('id')
8393 self._id = field
8394 virtual_fields = []
8395 def include_new(field):
8396 newfields.append(field)
8397 fieldnames.add(field.name)
8398 if field.type=='id':
8399 self._id = field
8400 for field in fields:
8401 if isinstance(field, (FieldMethod, FieldVirtual)):
8402 virtual_fields.append(field)
8403 elif isinstance(field, Field) and not field.name in fieldnames:
8404 if field.db is not None:
8405 field = copy.copy(field)
8406 include_new(field)
8407 elif isinstance(field, dict) and not field['fieldname'] in fieldnames:
8408 include_new(Field(**field))
8409 elif isinstance(field, Table):
8410 table = field
8411 for field in table:
8412 if not field.name in fieldnames and not field.type=='id':
8413 t2 = not table._actual and self._tablename
8414 include_new(field.clone(point_self_references_to=t2))
8415 elif not isinstance(field, (Field, Table)):
8416 raise SyntaxError(
8417 'define_table argument is not a Field or Table: %s' % field)
8418 fields = newfields
8419 self._db = db
8420 tablename = tablename
8421 self._fields = SQLCallableList()
8422 self.virtualfields = []
8423 fields = list(fields)
8424
8425 if db and db._adapter.uploads_in_blob==True:
8426 uploadfields = [f.name for f in fields if f.type=='blob']
8427 for field in fields:
8428 fn = field.uploadfield
8429 if isinstance(field, Field) and field.type == 'upload'\
8430 and fn is True:
8431 fn = field.uploadfield = '%s_blob' % field.name
8432 if isinstance(fn,str) and not fn in uploadfields:
8433 fields.append(Field(fn,'blob',default='',
8434 writable=False,readable=False))
8435
8436 lower_fieldnames = set()
8437 reserved = dir(Table) + ['fields']
8438 if (db and db.check_reserved):
8439 check_reserved = db.check_reserved_keyword
8440 else:
8441 def check_reserved(field_name):
8442 if field_name in reserved:
8443 raise SyntaxError("field name %s not allowed" % field_name)
8444 for field in fields:
8445 field_name = field.name
8446 check_reserved(field_name)
8447 fn_lower = field_name.lower()
8448 if fn_lower in lower_fieldnames:
8449 raise SyntaxError("duplicate field %s in table %s" \
8450 % (field_name, tablename))
8451 else:
8452 lower_fieldnames.add(fn_lower)
8453
8454 self.fields.append(field_name)
8455 self[field_name] = field
8456 if field.type == 'id':
8457 self['id'] = field
8458 field.tablename = field._tablename = tablename
8459 field.table = field._table = self
8460 field.db = field._db = db
8461 self.ALL = SQLALL(self)
8462
8463 if _primarykey is not None:
8464 for k in _primarykey:
8465 if k not in self.fields:
8466 raise SyntaxError(
8467 "primarykey must be a list of fields from table '%s " % tablename)
8468 else:
8469 self[k].notnull = True
8470 for field in virtual_fields:
8471 self[field.name] = field
8472
8473 @property
8476
8477 - def update(self,*args,**kwargs):
8478 raise RuntimeError("Syntax Not Supported")
8479
8480 - def _enable_record_versioning(self,
8481 archive_db=None,
8482 archive_name = '%(tablename)s_archive',
8483 is_active = 'is_active',
8484 current_record = 'current_record',
8485 current_record_label = None):
8486 db = self._db
8487 archive_db = archive_db or db
8488 archive_name = archive_name % dict(tablename=self._tablename)
8489 if archive_name in archive_db.tables():
8490 return
8491 fieldnames = self.fields()
8492 same_db = archive_db is db
8493 field_type = self if same_db else 'bigint'
8494 clones = []
8495 for field in self:
8496 nfk = same_db or not field.type.startswith('reference')
8497 clones.append(field.clone(
8498 unique=False, type=field.type if nfk else 'bigint'))
8499 archive_db.define_table(
8500 archive_name,
8501 Field(current_record,field_type,label=current_record_label),
8502 *clones,**dict(format=self._format))
8503
8504 self._before_update.append(
8505 lambda qset,fs,db=archive_db,an=archive_name,cn=current_record:
8506 archive_record(qset,fs,db[an],cn))
8507 if is_active and is_active in fieldnames:
8508 self._before_delete.append(
8509 lambda qset: qset.update(is_active=False))
8510 newquery = lambda query, t=self, name=self._tablename: \
8511 reduce(AND,[db[tn].is_active == True
8512 for tn in db._adapter.tables(query)
8513 if tn==name or getattr(db[tn],'_ot',None)==name])
8514 query = self._common_filter
8515 if query:
8516 newquery = query & newquery
8517 self._common_filter = newquery
8518
8526
8528 db = self._db
8529 pr = db._pending_references
8530 self._referenced_by = []
8531 self._references = []
8532 for field in self:
8533 fieldname = field.name
8534 field_type = field.type
8535 if isinstance(field_type,str) and field_type[:10] == 'reference ':
8536 ref = field_type[10:].strip()
8537 if not ref:
8538 SyntaxError('Table: reference to nothing: %s' %ref)
8539 if '.' in ref:
8540 rtablename, throw_it,rfieldname = ref.partition('.')
8541 else:
8542 rtablename, rfieldname = ref, None
8543 if not rtablename in db:
8544 pr[rtablename] = pr.get(rtablename,[]) + [field]
8545 continue
8546 rtable = db[rtablename]
8547 if rfieldname:
8548 if not hasattr(rtable,'_primarykey'):
8549 raise SyntaxError(
8550 'keyed tables can only reference other keyed tables (for now)')
8551 if rfieldname not in rtable.fields:
8552 raise SyntaxError(
8553 "invalid field '%s' for referenced table '%s' in table '%s'" \
8554 % (rfieldname, rtablename, self._tablename))
8555 rfield = rtable[rfieldname]
8556 else:
8557 rfield = rtable._id
8558 rtable._referenced_by.append(field)
8559 field.referent = rfield
8560 self._references.append(field)
8561 else:
8562 field.referent = None
8563 for referee in pr.get(self._tablename,[]):
8564 self._referenced_by.append(referee)
8565
8567 return dict([(k, v) for (k, v) in record.iteritems() if k
8568 in self.fields and (self[k].type!='id' or id)])
8569
8571 """ for keyed table only """
8572 query = None
8573 for k,v in key.iteritems():
8574 if k in self._primarykey:
8575 if query:
8576 query = query & (self[k] == v)
8577 else:
8578 query = (self[k] == v)
8579 else:
8580 raise SyntaxError(
8581 'Field %s is not part of the primary key of %s' % \
8582 (k,self._tablename))
8583 return query
8584
8586 if not key:
8587 return None
8588 elif isinstance(key, dict):
8589 """ for keyed table """
8590 query = self._build_query(key)
8591 return self._db(query).select(limitby=(0,1), orderby_on_limitby=False).first()
8592 elif str(key).isdigit() or 'google' in DRIVERS and isinstance(key, Key):
8593 return self._db(self._id == key).select(limitby=(0,1), orderby_on_limitby=False).first()
8594 elif key:
8595 return ogetattr(self, str(key))
8596
8598 for_update = kwargs.get('_for_update',False)
8599 if '_for_update' in kwargs: del kwargs['_for_update']
8600
8601 orderby = kwargs.get('_orderby',None)
8602 if '_orderby' in kwargs: del kwargs['_orderby']
8603
8604 if not key is DEFAULT:
8605 if isinstance(key, Query):
8606 record = self._db(key).select(
8607 limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
8608 elif not str(key).isdigit():
8609 record = None
8610 else:
8611 record = self._db(self._id == key).select(
8612 limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
8613 if record:
8614 for k,v in kwargs.iteritems():
8615 if record[k]!=v: return None
8616 return record
8617 elif kwargs:
8618 query = reduce(lambda a,b:a&b,[self[k]==v for k,v in kwargs.iteritems()])
8619 return self._db(query).select(limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
8620 else:
8621 return None
8622
8624 if isinstance(key, dict) and isinstance(value, dict):
8625 """ option for keyed table """
8626 if set(key.keys()) == set(self._primarykey):
8627 value = self._filter_fields(value)
8628 kv = {}
8629 kv.update(value)
8630 kv.update(key)
8631 if not self.insert(**kv):
8632 query = self._build_query(key)
8633 self._db(query).update(**self._filter_fields(value))
8634 else:
8635 raise SyntaxError(
8636 'key must have all fields from primary key: %s'%\
8637 (self._primarykey))
8638 elif str(key).isdigit():
8639 if key == 0:
8640 self.insert(**self._filter_fields(value))
8641 elif self._db(self._id == key)\
8642 .update(**self._filter_fields(value)) is None:
8643 raise SyntaxError('No such record: %s' % key)
8644 else:
8645 if isinstance(key, dict):
8646 raise SyntaxError(
8647 'value must be a dictionary: %s' % value)
8648 osetattr(self, str(key), value)
8649
8650 __getattr__ = __getitem__
8651
8653 if key[:1]!='_' and key in self:
8654 raise SyntaxError('Object exists and cannot be redefined: %s' % key)
8655 osetattr(self,key,value)
8656
8658 if isinstance(key, dict):
8659 query = self._build_query(key)
8660 if not self._db(query).delete():
8661 raise SyntaxError('No such record: %s' % key)
8662 elif not str(key).isdigit() or \
8663 not self._db(self._id == key).delete():
8664 raise SyntaxError('No such record: %s' % key)
8665
8667 return hasattr(self,key)
8668
8669 has_key = __contains__
8670
8672 return self.__dict__.items()
8673
8675 for fieldname in self.fields:
8676 yield self[fieldname]
8677
8680
8681
8683 return '<Table %s (%s)>' % (self._tablename,','.join(self.fields()))
8684
8686 if self._ot is not None:
8687 ot = self._db._adapter.QUOTE_TEMPLATE % self._ot
8688 if 'Oracle' in str(type(self._db._adapter)):
8689 return '%s %s' % (ot, self._tablename)
8690 return '%s AS %s' % (ot, self._tablename)
8691 return self._tablename
8692
8693 - def _drop(self, mode = ''):
8694 return self._db._adapter._drop(self, mode)
8695
8696 - def drop(self, mode = ''):
8697 return self._db._adapter.drop(self,mode)
8698
8699 - def _listify(self,fields,update=False):
8700 new_fields = {}
8701
8702
8703 for name in fields:
8704 if not name in self.fields:
8705 if name != 'id':
8706 raise SyntaxError(
8707 'Field %s does not belong to the table' % name)
8708 else:
8709 field = self[name]
8710 value = fields[name]
8711 if field.filter_in:
8712 value = field.filter_in(value)
8713 new_fields[name] = (field,value)
8714
8715
8716 to_compute = []
8717 for ofield in self:
8718 name = ofield.name
8719 if not name in new_fields:
8720
8721 if ofield.compute:
8722 to_compute.append((name,ofield))
8723
8724 elif not update and not ofield.default is None:
8725 value = ofield.default
8726 fields[name] = value
8727 new_fields[name] = (ofield,value)
8728
8729 elif update and not ofield.update is None:
8730 value = ofield.update
8731 fields[name] = value
8732 new_fields[name] = (ofield,value)
8733
8734 elif not update and ofield.required:
8735 raise RuntimeError(
8736 'Table: missing required field: %s' % name)
8737
8738 if to_compute:
8739 row = Row(fields)
8740 for name,ofield in to_compute:
8741
8742 try:
8743 row[name] = new_value = ofield.compute(row)
8744 new_fields[name] = (ofield, new_value)
8745 except (KeyError, AttributeError):
8746
8747 if ofield.required:
8748 raise SyntaxError('unable to compute field: %s' % name)
8749 return new_fields.values()
8750
8752 for field in self:
8753 if field.type=='upload' and field.name in fields:
8754 value = fields[field.name]
8755 if value is not None and not isinstance(value,str):
8756 if hasattr(value,'file') and hasattr(value,'filename'):
8757 new_name = field.store(value.file,filename=value.filename)
8758 elif hasattr(value,'read') and hasattr(value,'name'):
8759 new_name = field.store(value,filename=value.name)
8760 else:
8761 raise RuntimeError("Unable to handle upload")
8762 fields[field.name] = new_name
8763
8765 "If there are no fields/values specified, return table defaults"
8766 if not fields:
8767 fields = {}
8768 for field in self:
8769 if field.type != "id":
8770 fields[field.name] = field.default
8771 return fields
8772
8776
8786
8802
8828
8830 if _key is DEFAULT:
8831 record = self(**values)
8832 elif isinstance(_key,dict):
8833 record = self(**_key)
8834 else:
8835 record = self(_key)
8836 if record:
8837 record.update_record(**values)
8838 newid = None
8839 else:
8840 newid = self.insert(**values)
8841 return newid
8842
8844 """
8845 here items is a list of dictionaries
8846 """
8847 items = [self._listify(item) for item in items]
8848 if any(f(item) for item in items for f in self._before_insert):return 0
8849 ret = self._db._adapter.bulk_insert(self,items)
8850 ret and [[f(item,ret[k]) for k,item in enumerate(items)] for f in self._after_insert]
8851 return ret
8852
8854 return self._db._adapter._truncate(self, mode)
8855
8857 return self._db._adapter.truncate(self, mode)
8858
8859 - def import_from_csv_file(
8860 self,
8861 csvfile,
8862 id_map=None,
8863 null='<NULL>',
8864 unique='uuid',
8865 id_offset=None,
8866 *args, **kwargs
8867 ):
8868 """
8869 Import records from csv file.
8870 Column headers must have same names as table fields.
8871 Field 'id' is ignored.
8872 If column names read 'table.file' the 'table.' prefix is ignored.
8873 'unique' argument is a field which must be unique
8874 (typically a uuid field)
8875 'restore' argument is default False;
8876 if set True will remove old values in table first.
8877 'id_map' if set to None will not map ids.
8878 The import will keep the id numbers in the restored table.
8879 This assumes that there is an field of type id that
8880 is integer and in incrementing order.
8881 Will keep the id numbers in restored table.
8882 """
8883
8884 delimiter = kwargs.get('delimiter', ',')
8885 quotechar = kwargs.get('quotechar', '"')
8886 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
8887 restore = kwargs.get('restore', False)
8888 if restore:
8889 self._db[self].truncate()
8890
8891 reader = csv.reader(csvfile, delimiter=delimiter,
8892 quotechar=quotechar, quoting=quoting)
8893 colnames = None
8894 if isinstance(id_map, dict):
8895 if not self._tablename in id_map:
8896 id_map[self._tablename] = {}
8897 id_map_self = id_map[self._tablename]
8898
8899 def fix(field, value, id_map, id_offset):
8900 list_reference_s='list:reference'
8901 if value == null:
8902 value = None
8903 elif field.type=='blob':
8904 value = base64.b64decode(value)
8905 elif field.type=='double' or field.type=='float':
8906 if not value.strip():
8907 value = None
8908 else:
8909 value = float(value)
8910 elif field.type in ('integer','bigint'):
8911 if not value.strip():
8912 value = None
8913 else:
8914 value = long(value)
8915 elif field.type.startswith('list:string'):
8916 value = bar_decode_string(value)
8917 elif field.type.startswith(list_reference_s):
8918 ref_table = field.type[len(list_reference_s):].strip()
8919 if id_map is not None:
8920 value = [id_map[ref_table][long(v)] \
8921 for v in bar_decode_string(value)]
8922 else:
8923 value = [v for v in bar_decode_string(value)]
8924 elif field.type.startswith('list:'):
8925 value = bar_decode_integer(value)
8926 elif id_map and field.type.startswith('reference'):
8927 try:
8928 value = id_map[field.type[9:].strip()][long(value)]
8929 except KeyError:
8930 pass
8931 elif id_offset and field.type.startswith('reference'):
8932 try:
8933 value = id_offset[field.type[9:].strip()]+long(value)
8934 except KeyError:
8935 pass
8936 return (field.name, value)
8937
8938 def is_id(colname):
8939 if colname in self:
8940 return self[colname].type == 'id'
8941 else:
8942 return False
8943
8944 first = True
8945 unique_idx = None
8946 for lineno, line in enumerate(reader):
8947 if not line:
8948 break
8949 if not colnames:
8950
8951 colnames = [x.split('.',1)[-1] for x in line][:len(line)]
8952 cols, cid = [], None
8953 for i,colname in enumerate(colnames):
8954 if is_id(colname):
8955 cid = i
8956 elif colname in self.fields:
8957 cols.append((i,self[colname]))
8958 if colname == unique:
8959 unique_idx = i
8960 else:
8961
8962 items = []
8963 for i, field in cols:
8964 try:
8965 items.append(fix(field, line[i], id_map, id_offset))
8966 except ValueError:
8967 raise RuntimeError("Unable to parse line:%s field:%s value:'%s'"
8968 % (lineno+1,field,line[i]))
8969
8970 if not (id_map or cid is None or id_offset is None or unique_idx):
8971 csv_id = long(line[cid])
8972 curr_id = self.insert(**dict(items))
8973 if first:
8974 first = False
8975
8976
8977
8978 id_offset[self._tablename] = (curr_id-csv_id) \
8979 if curr_id>csv_id else 0
8980
8981 while curr_id<csv_id+id_offset[self._tablename]:
8982 self._db(self._db[self][colnames[cid]] == curr_id).delete()
8983 curr_id = self.insert(**dict(items))
8984
8985
8986 elif not unique_idx:
8987 new_id = self.insert(**dict(items))
8988 else:
8989 unique_value = line[unique_idx]
8990 query = self._db[self][unique] == unique_value
8991 record = self._db(query).select().first()
8992 if record:
8993 record.update_record(**dict(items))
8994 new_id = record[self._id.name]
8995 else:
8996 new_id = self.insert(**dict(items))
8997 if id_map and cid is not None:
8998 id_map_self[long(line[cid])] = new_id
8999
9000 - def as_dict(self, flat=False, sanitize=True):
9012
9013 - def as_xml(self, sanitize=True):
9018
9019 - def as_json(self, sanitize=True):
9024
9025 - def as_yaml(self, sanitize=True):
9030
9033
9034 - def on(self, query):
9035 return Expression(self._db,self._db._adapter.ON,self,query)
9036
9038 tablenames = qset.db._adapter.tables(qset.query)
9039 if len(tablenames)!=1: raise RuntimeError("cannot update join")
9040 table = qset.db[tablenames[0]]
9041 for row in qset.select():
9042 fields = archive_table._filter_fields(row)
9043 fields[current_record] = row.id
9044 archive_table.insert(**fields)
9045 return False
9046
9050
9051 - def __init__(
9052 self,
9053 db,
9054 op,
9055 first=None,
9056 second=None,
9057 type=None,
9058 **optional_args
9059 ):
9060
9061 self.db = db
9062 self.op = op
9063 self.first = first
9064 self.second = second
9065 self._table = getattr(first,'_table',None)
9066
9067 if not type and first and hasattr(first,'type'):
9068 self.type = first.type
9069 else:
9070 self.type = type
9071 self.optional_args = optional_args
9072
9076
9080
9084
9088
9092
9096
9100
9104
9108
9112
9116
9120
9124
9128
9132
9136
9140
9144
9146 db = self.db
9147 if start < 0:
9148 pos0 = '(%s - %d)' % (self.len(), abs(start) - 1)
9149 else:
9150 pos0 = start + 1
9151
9152 if stop < 0:
9153 length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0)
9154 elif stop == sys.maxint:
9155 length = self.len()
9156 else:
9157 length = '(%s - %s)' % (stop + 1, pos0)
9158 return Expression(db,db._adapter.SUBSTRING,
9159 self, (pos0, length), self.type)
9160
9162 return self[i:i + 1]
9163
9165 return self.db._adapter.expand(self,self.type)
9166
9168 db = self.db
9169 return Expression(db,db._adapter.COMMA,self,other,self.type)
9170
9176
9180
9182 db = self.db
9183 if self.type in ('integer','bigint'):
9184 result_type = 'integer'
9185 elif self.type in ['date','time','datetime','double','float']:
9186 result_type = 'double'
9187 elif self.type.startswith('decimal('):
9188 result_type = self.type
9189 else:
9190 raise SyntaxError("subtraction operation not supported for type")
9191 return Expression(db,db._adapter.SUB,self,other,result_type)
9192
9196
9200
9204
9208
9212
9216
9220
9224
9228
9229 - def like(self, value, case_sensitive=False):
9230 db = self.db
9231 op = case_sensitive and db._adapter.LIKE or db._adapter.ILIKE
9232 return Query(db, op, self, value)
9233
9237
9238 - def belongs(self, *value, **kwattr):
9239 """
9240 Accepts the following inputs:
9241 field.belongs(1,2)
9242 field.belongs((1,2))
9243 field.belongs(query)
9244
9245 Does NOT accept:
9246 field.belongs(1)
9247 """
9248 db = self.db
9249 if len(value) == 1:
9250 value = value[0]
9251 if isinstance(value,Query):
9252 value = db(value)._select(value.first._table._id)
9253 elif not isinstance(value, basestring):
9254 value = set(value)
9255 if kwattr.get('null') and None in value:
9256 value.remove(None)
9257 return (self == None) | Query(db, db._adapter.BELONGS, self, value)
9258 return Query(db, db._adapter.BELONGS, self, value)
9259
9261 db = self.db
9262 if not self.type in ('string', 'text', 'json', 'upload'):
9263 raise SyntaxError("startswith used with incompatible field type")
9264 return Query(db, db._adapter.STARTSWITH, self, value)
9265
9267 db = self.db
9268 if not self.type in ('string', 'text', 'json', 'upload'):
9269 raise SyntaxError("endswith used with incompatible field type")
9270 return Query(db, db._adapter.ENDSWITH, self, value)
9271
9272 - def contains(self, value, all=False, case_sensitive=False):
9273 """
9274 The case_sensitive parameters is only useful for PostgreSQL
9275 For other RDMBs it is ignored and contains is always case in-sensitive
9276 For MongoDB and GAE contains is always case sensitive
9277 """
9278 db = self.db
9279 if isinstance(value,(list, tuple)):
9280 subqueries = [self.contains(str(v).strip(),case_sensitive=case_sensitive)
9281 for v in value if str(v).strip()]
9282 if not subqueries:
9283 return self.contains('')
9284 else:
9285 return reduce(all and AND or OR,subqueries)
9286 if not self.type in ('string', 'text', 'json', 'upload') and not self.type.startswith('list:'):
9287 raise SyntaxError("contains used with incompatible field type")
9288 return Query(db, db._adapter.CONTAINS, self, value, case_sensitive=case_sensitive)
9289
9293
9294
9295
9296 - def st_asgeojson(self, precision=15, options=0, version=1):
9300
9301 - def st_astext(self):
9302 db = self.db
9303 return Expression(db, db._adapter.ST_ASTEXT, self, type='string')
9304
9308
9312
9316
9320
9321
9322
9326
9330
9334
9338
9342
9346
9351 """
9352 allows defining of custom SQL types
9353
9354 Example::
9355
9356 decimal = SQLCustomType(
9357 type ='double',
9358 native ='integer',
9359 encoder =(lambda x: int(float(x) * 100)),
9360 decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) )
9361 )
9362
9363 db.define_table(
9364 'example',
9365 Field('value', type=decimal)
9366 )
9367
9368 :param type: the web2py type (default = 'string')
9369 :param native: the backend type
9370 :param encoder: how to encode the value to store it in the backend
9371 :param decoder: how to decode the value retrieved from the backend
9372 :param validator: what validators to use ( default = None, will use the
9373 default validator for type)
9374 """
9375
9376 - def __init__(
9377 self,
9378 type='string',
9379 native=None,
9380 encoder=None,
9381 decoder=None,
9382 validator=None,
9383 _class=None,
9384 ):
9385
9386 self.type = type
9387 self.native = native
9388 self.encoder = encoder or (lambda x: x)
9389 self.decoder = decoder or (lambda x: x)
9390 self.validator = validator
9391 self._class = _class or type
9392
9394 try:
9395 return self.type.startswith(self, text)
9396 except TypeError:
9397 return False
9398
9401
9404
9407
9409 - def __init__(self, name, f=None, ftype='string',label=None,table_name=None):
9424 return '%s.%s' % (self.tablename, self.name)
9425
9427 - def __init__(self, name, f=None, handler=None):
9431
9433 return ', '.join(str(y) for y in x or [])
9434
9435 -class Field(Expression):
9436
9437 Virtual = FieldVirtual
9438 Method = FieldMethod
9439 Lazy = FieldMethod
9440
9441 """
9442 an instance of this class represents a database field
9443
9444 example::
9445
9446 a = Field(name, 'string', length=32, default=None, required=False,
9447 requires=IS_NOT_EMPTY(), ondelete='CASCADE',
9448 notnull=False, unique=False,
9449 uploadfield=True, widget=None, label=None, comment=None,
9450 uploadfield=True, # True means store on disk,
9451 # 'a_field_name' means store in this field in db
9452 # False means file content will be discarded.
9453 writable=True, readable=True, update=None, authorize=None,
9454 autodelete=False, represent=None, uploadfolder=None,
9455 uploadseparate=False # upload to separate directories by uuid_keys
9456 # first 2 character and tablename.fieldname
9457 # False - old behavior
9458 # True - put uploaded file in
9459 # <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]
9460 # directory)
9461 uploadfs=None # a pyfilesystem where to store upload
9462
9463 to be used as argument of DAL.define_table
9464
9465 allowed field types:
9466 string, boolean, integer, double, text, blob,
9467 date, time, datetime, upload, password
9468
9469 """
9470
9471 - def __init__(
9472 self,
9473 fieldname,
9474 type='string',
9475 length=None,
9476 default=DEFAULT,
9477 required=False,
9478 requires=DEFAULT,
9479 ondelete='CASCADE',
9480 notnull=False,
9481 unique=False,
9482 uploadfield=True,
9483 widget=None,
9484 label=None,
9485 comment=None,
9486 writable=True,
9487 readable=True,
9488 update=None,
9489 authorize=None,
9490 autodelete=False,
9491 represent=None,
9492 uploadfolder=None,
9493 uploadseparate=False,
9494 uploadfs=None,
9495 compute=None,
9496 custom_store=None,
9497 custom_retrieve=None,
9498 custom_retrieve_file_properties=None,
9499 custom_delete=None,
9500 filter_in = None,
9501 filter_out = None,
9502 custom_qualifier = None,
9503 map_none = None,
9504 ):
9505 self._db = self.db = None
9506 self.op = None
9507 self.first = None
9508 self.second = None
9509 if isinstance(fieldname, unicode):
9510 try:
9511 fieldname = str(fieldname)
9512 except UnicodeEncodeError:
9513 raise SyntaxError('Field: invalid unicode field name')
9514 self.name = fieldname = cleanup(fieldname)
9515 if not isinstance(fieldname, str) or hasattr(Table, fieldname) or \
9516 fieldname[0] == '_' or REGEX_PYTHON_KEYWORDS.match(fieldname):
9517 raise SyntaxError('Field: invalid field name: %s' % fieldname)
9518 self.type = type if not isinstance(type, (Table,Field)) else 'reference %s' % type
9519 self.length = length if not length is None else DEFAULTLENGTH.get(self.type,512)
9520 self.default = default if default!=DEFAULT else (update or None)
9521 self.required = required
9522 self.ondelete = ondelete.upper()
9523 self.notnull = notnull
9524 self.unique = unique
9525 self.uploadfield = uploadfield
9526 self.uploadfolder = uploadfolder
9527 self.uploadseparate = uploadseparate
9528 self.uploadfs = uploadfs
9529 self.widget = widget
9530 self.comment = comment
9531 self.writable = writable
9532 self.readable = readable
9533 self.update = update
9534 self.authorize = authorize
9535 self.autodelete = autodelete
9536 self.represent = list_represent if \
9537 represent==None and type in ('list:integer','list:string') else represent
9538 self.compute = compute
9539 self.isattachment = True
9540 self.custom_store = custom_store
9541 self.custom_retrieve = custom_retrieve
9542 self.custom_retrieve_file_properties = custom_retrieve_file_properties
9543 self.custom_delete = custom_delete
9544 self.filter_in = filter_in
9545 self.filter_out = filter_out
9546 self.custom_qualifier = custom_qualifier
9547 self.label = label if label!=None else fieldname.replace('_',' ').title()
9548 self.requires = requires if requires!=None else []
9549 self.map_none = map_none
9550
9552 self.__dict__.update(*args,**attributes)
9553
9554 - def clone(self,point_self_references_to=False,**args):
9555 field = copy.copy(self)
9556 if point_self_references_to and \
9557 field.type == 'reference %s'+field._tablename:
9558 field.type = 'reference %s' % point_self_references_to
9559 field.__dict__.update(args)
9560 return field
9561
9562 - def store(self, file, filename=None, path=None):
9563 if self.custom_store:
9564 return self.custom_store(file,filename,path)
9565 if isinstance(file, cgi.FieldStorage):
9566 filename = filename or file.filename
9567 file = file.file
9568 elif not filename:
9569 filename = file.name
9570 filename = os.path.basename(filename.replace('/', os.sep)\
9571 .replace('\\', os.sep))
9572 m = REGEX_STORE_PATTERN.search(filename)
9573 extension = m and m.group('e') or 'txt'
9574 uuid_key = web2py_uuid().replace('-', '')[-16:]
9575 encoded_filename = base64.b16encode(filename).lower()
9576 newfilename = '%s.%s.%s.%s' % \
9577 (self._tablename, self.name, uuid_key, encoded_filename)
9578 newfilename = newfilename[:(self.length - 1 - len(extension))] + '.' + extension
9579 self_uploadfield = self.uploadfield
9580 if isinstance(self_uploadfield,Field):
9581 blob_uploadfield_name = self_uploadfield.uploadfield
9582 keys={self_uploadfield.name: newfilename,
9583 blob_uploadfield_name: file.read()}
9584 self_uploadfield.table.insert(**keys)
9585 elif self_uploadfield == True:
9586 if path:
9587 pass
9588 elif self.uploadfolder:
9589 path = self.uploadfolder
9590 elif self.db._adapter.folder:
9591 path = pjoin(self.db._adapter.folder, '..', 'uploads')
9592 else:
9593 raise RuntimeError(
9594 "you must specify a Field(...,uploadfolder=...)")
9595 if self.uploadseparate:
9596 if self.uploadfs:
9597 raise RuntimeError("not supported")
9598 path = pjoin(path,"%s.%s" %(self._tablename, self.name),
9599 uuid_key[:2])
9600 if not exists(path):
9601 os.makedirs(path)
9602 pathfilename = pjoin(path, newfilename)
9603 if self.uploadfs:
9604 dest_file = self.uploadfs.open(newfilename, 'wb')
9605 else:
9606 dest_file = open(pathfilename, 'wb')
9607 try:
9608 shutil.copyfileobj(file, dest_file)
9609 except IOError:
9610 raise IOError(
9611 'Unable to store file "%s" because invalid permissions, readonly file system, or filename too long' % pathfilename)
9612 dest_file.close()
9613 return newfilename
9614
9615 - def retrieve(self, name, path=None, nameonly=False):
9616 """
9617 if nameonly==True return (filename, fullfilename) instead of
9618 (filename, stream)
9619 """
9620 self_uploadfield = self.uploadfield
9621 if self.custom_retrieve:
9622 return self.custom_retrieve(name, path)
9623 import http
9624 if self.authorize or isinstance(self_uploadfield, str):
9625 row = self.db(self == name).select().first()
9626 if not row:
9627 raise http.HTTP(404)
9628 if self.authorize and not self.authorize(row):
9629 raise http.HTTP(403)
9630 file_properties = self.retrieve_file_properties(name,path)
9631 filename = file_properties['filename']
9632 if isinstance(self_uploadfield, str):
9633 stream = StringIO.StringIO(row[self_uploadfield] or '')
9634 elif isinstance(self_uploadfield,Field):
9635 blob_uploadfield_name = self_uploadfield.uploadfield
9636 query = self_uploadfield == name
9637 data = self_uploadfield.table(query)[blob_uploadfield_name]
9638 stream = StringIO.StringIO(data)
9639 elif self.uploadfs:
9640
9641 stream = self.uploadfs.open(name, 'rb')
9642 else:
9643
9644
9645
9646 fullname = pjoin(file_properties['path'],name)
9647 if nameonly:
9648 return (filename, fullname)
9649 stream = open(fullname,'rb')
9650 return (filename, stream)
9651
9653 m = REGEX_UPLOAD_PATTERN.match(name)
9654 if not m or not self.isattachment:
9655 raise TypeError('Can\'t retrieve %s file properties' % name)
9656 self_uploadfield = self.uploadfield
9657 if self.custom_retrieve_file_properties:
9658 return self.custom_retrieve_file_properties(name, path)
9659 if m.group('name'):
9660 try:
9661 filename = base64.b16decode(m.group('name'), True)
9662 filename = REGEX_CLEANUP_FN.sub('_', filename)
9663 except (TypeError, AttributeError):
9664 filename = name
9665 else:
9666 filename = name
9667
9668 if isinstance(self_uploadfield, (str, Field)):
9669 return dict(path=None,filename=filename)
9670
9671 if not path:
9672 if self.uploadfolder:
9673 path = self.uploadfolder
9674 else:
9675 path = pjoin(self.db._adapter.folder, '..', 'uploads')
9676 if self.uploadseparate:
9677 t = m.group('table')
9678 f = m.group('field')
9679 u = m.group('uuidkey')
9680 path = pjoin(path,"%s.%s" % (t,f),u[:2])
9681 return dict(path=path,filename=filename)
9682
9683
9699
9711
9712 - def count(self, distinct=None):
9714
9715 - def as_dict(self, flat=False, sanitize=True):
9716 attrs = ("name", 'authorize', 'represent', 'ondelete',
9717 'custom_store', 'autodelete', 'custom_retrieve',
9718 'filter_out', 'uploadseparate', 'widget', 'uploadfs',
9719 'update', 'custom_delete', 'uploadfield', 'uploadfolder',
9720 'custom_qualifier', 'unique', 'writable', 'compute',
9721 'map_none', 'default', 'type', 'required', 'readable',
9722 'requires', 'comment', 'label', 'length', 'notnull',
9723 'custom_retrieve_file_properties', 'filter_in')
9724 serializable = (int, long, basestring, float, tuple,
9725 bool, type(None))
9726
9727 def flatten(obj):
9728 if isinstance(obj, dict):
9729 return dict((flatten(k), flatten(v)) for k, v in
9730 obj.items())
9731 elif isinstance(obj, (tuple, list, set)):
9732 return [flatten(v) for v in obj]
9733 elif isinstance(obj, serializable):
9734 return obj
9735 elif isinstance(obj, (datetime.datetime,
9736 datetime.date, datetime.time)):
9737 return str(obj)
9738 else:
9739 return None
9740
9741 d = dict()
9742 if not (sanitize and not (self.readable or self.writable)):
9743 for attr in attrs:
9744 if flat:
9745 d.update({attr: flatten(getattr(self, attr))})
9746 else:
9747 d.update({attr: getattr(self, attr)})
9748 d["fieldname"] = d.pop("name")
9749 return d
9750
9751 - def as_xml(self, sanitize=True):
9758
9759 - def as_json(self, sanitize=True):
9766
9767 - def as_yaml(self, sanitize=True):
9773
9776
9778 try:
9779 return '%s.%s' % (self.tablename, self.name)
9780 except:
9781 return '<no table>.%s' % self.name
9782
9783
9784 -class Query(object):
9785
9786 """
9787 a query object necessary to define a set.
9788 it can be stored or can be passed to DAL.__call__() to obtain a Set
9789
9790 Example::
9791
9792 query = db.users.name=='Max'
9793 set = db(query)
9794 records = set.select()
9795
9796 """
9797
9798 - def __init__(
9799 self,
9800 db,
9801 op,
9802 first=None,
9803 second=None,
9804 ignore_common_filters = False,
9805 **optional_args
9806 ):
9807 self.db = self._db = db
9808 self.op = op
9809 self.first = first
9810 self.second = second
9811 self.ignore_common_filters = ignore_common_filters
9812 self.optional_args = optional_args
9813
9816
9818 return self.db._adapter.expand(self)
9819
9822
9823 __rand__ = __and__
9824
9826 return Query(self.db,self.db._adapter.OR,self,other)
9827
9828 __ror__ = __or__
9829
9831 if self.op==self.db._adapter.NOT:
9832 return self.first
9833 return Query(self.db,self.db._adapter.NOT,self)
9834
9836 return repr(self) == repr(other)
9837
9839 return not (self == other)
9840
9841 - def case(self,t=1,f=0):
9842 return self.db._adapter.CASE(self,t,f)
9843
9844 - def as_dict(self, flat=False, sanitize=True):
9845 """Experimental stuff
9846
9847 This allows to return a plain dictionary with the basic
9848 query representation. Can be used with json/xml services
9849 for client-side db I/O
9850
9851 Example:
9852 >>> q = db.auth_user.id != 0
9853 >>> q.as_dict(flat=True)
9854 {"op": "NE", "first":{"tablename": "auth_user",
9855 "fieldname": "id"},
9856 "second":0}
9857 """
9858
9859 SERIALIZABLE_TYPES = (tuple, dict, set, list, int, long, float,
9860 basestring, type(None), bool)
9861 def loop(d):
9862 newd = dict()
9863 for k, v in d.items():
9864 if k in ("first", "second"):
9865 if isinstance(v, self.__class__):
9866 newd[k] = loop(v.__dict__)
9867 elif isinstance(v, Field):
9868 newd[k] = {"tablename": v._tablename,
9869 "fieldname": v.name}
9870 elif isinstance(v, Expression):
9871 newd[k] = loop(v.__dict__)
9872 elif isinstance(v, SERIALIZABLE_TYPES):
9873 newd[k] = v
9874 elif isinstance(v, (datetime.date,
9875 datetime.time,
9876 datetime.datetime)):
9877 newd[k] = unicode(v)
9878 elif k == "op":
9879 if callable(v):
9880 newd[k] = v.__name__
9881 elif isinstance(v, basestring):
9882 newd[k] = v
9883 else: pass
9884 elif isinstance(v, SERIALIZABLE_TYPES):
9885 if isinstance(v, dict):
9886 newd[k] = loop(v)
9887 else: newd[k] = v
9888 return newd
9889
9890 if flat:
9891 return loop(self.__dict__)
9892 else: return self.__dict__
9893
9894
9895 - def as_xml(self, sanitize=True):
9902
9903 - def as_json(self, sanitize=True):
9910
9912 if not orderby:
9913 return None
9914 orderby2 = orderby[0]
9915 for item in orderby[1:]:
9916 orderby2 = orderby2 | item
9917 return orderby2
9918
9920 return (query and hasattr(query,'ignore_common_filters') and \
9921 not query.ignore_common_filters)
9922
9924
9925 """
9926 a Set represents a set of records in the database,
9927 the records are identified by the query=Query(...) object.
9928 normally the Set is generated by DAL.__call__(Query(...))
9929
9930 given a set, for example
9931 set = db(db.users.name=='Max')
9932 you can:
9933 set.update(db.users.name='Massimo')
9934 set.delete() # all elements in the set
9935 set.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10))
9936 and take subsets:
9937 subset = set(db.users.id<5)
9938 """
9939
9940 - def __init__(self, db, query, ignore_common_filters = None):
9941 self.db = db
9942 self._db = db
9943 self.dquery = None
9944
9945
9946 if isinstance(query, dict):
9947 query = self.parse(query)
9948
9949 if not ignore_common_filters is None and \
9950 use_common_filters(query) == ignore_common_filters:
9951 query = copy.copy(query)
9952 query.ignore_common_filters = ignore_common_filters
9953 self.query = query
9954
9957
9958 - def __call__(self, query, ignore_common_filters=False):
9959 if query is None:
9960 return self
9961 elif isinstance(query,Table):
9962 query = self.db._adapter.id_query(query)
9963 elif isinstance(query,str):
9964 query = Expression(self.db,query)
9965 elif isinstance(query,Field):
9966 query = query!=None
9967 if self.query:
9968 return Set(self.db, self.query & query,
9969 ignore_common_filters=ignore_common_filters)
9970 else:
9971 return Set(self.db, query,
9972 ignore_common_filters=ignore_common_filters)
9973
9974 - def _count(self,distinct=None):
9975 return self.db._adapter._count(self.query,distinct)
9976
9977 - def _select(self, *fields, **attributes):
9978 adapter = self.db._adapter
9979 tablenames = adapter.tables(self.query,
9980 attributes.get('join',None),
9981 attributes.get('left',None),
9982 attributes.get('orderby',None),
9983 attributes.get('groupby',None))
9984 fields = adapter.expand_all(fields, tablenames)
9985 return adapter._select(self.query,fields,attributes)
9986
9988 db = self.db
9989 tablename = db._adapter.get_table(self.query)
9990 return db._adapter._delete(tablename,self.query)
9991
9992 - def _update(self, **update_fields):
9997
9998 - def as_dict(self, flat=False, sanitize=True):
9999 if flat:
10000 uid = dbname = uri = None
10001 codec = self.db._db_codec
10002 if not sanitize:
10003 uri, dbname, uid = (self.db._dbname, str(self.db),
10004 self.db._db_uid)
10005 d = {"query": self.query.as_dict(flat=flat)}
10006 d["db"] = {"uid": uid, "codec": codec,
10007 "name": dbname, "uri": uri}
10008 return d
10009 else: return self.__dict__
10010
10011 - def as_xml(self, sanitize=True):
10018
10019 - def as_json(self, sanitize=True):
10026
10027 - def parse(self, dquery):
10028 "Experimental: Turn a dictionary into a Query object"
10029 self.dquery = dquery
10030 return self.build(self.dquery)
10031
10033 "Experimental: see .parse()"
10034 op, first, second = (d["op"], d["first"],
10035 d.get("second", None))
10036 left = right = built = None
10037
10038 if op in ("AND", "OR"):
10039 if not (type(first), type(second)) == (dict, dict):
10040 raise SyntaxError("Invalid AND/OR query")
10041 if op == "AND":
10042 built = self.build(first) & self.build(second)
10043 else: built = self.build(first) | self.build(second)
10044
10045 elif op == "NOT":
10046 if first is None:
10047 raise SyntaxError("Invalid NOT query")
10048 built = ~self.build(first)
10049 else:
10050
10051 for k, v in {"left": first, "right": second}.items():
10052 if isinstance(v, dict) and v.get("op"):
10053 v = self.build(v)
10054 if isinstance(v, dict) and ("tablename" in v):
10055 v = self.db[v["tablename"]][v["fieldname"]]
10056 if k == "left": left = v
10057 else: right = v
10058
10059 if hasattr(self.db._adapter, op):
10060 opm = getattr(self.db._adapter, op)
10061
10062 if op == "EQ": built = left == right
10063 elif op == "NE": built = left != right
10064 elif op == "GT": built = left > right
10065 elif op == "GE": built = left >= right
10066 elif op == "LT": built = left < right
10067 elif op == "LE": built = left <= right
10068 elif op in ("JOIN", "LEFT_JOIN", "RANDOM", "ALLOW_NULL"):
10069 built = Expression(self.db, opm)
10070 elif op in ("LOWER", "UPPER", "EPOCH", "PRIMARY_KEY",
10071 "COALESCE_ZERO", "RAW", "INVERT"):
10072 built = Expression(self.db, opm, left)
10073 elif op in ("COUNT", "EXTRACT", "AGGREGATE", "SUBSTRING",
10074 "REGEXP", "LIKE", "ILIKE", "STARTSWITH",
10075 "ENDSWITH", "ADD", "SUB", "MUL", "DIV",
10076 "MOD", "AS", "ON", "COMMA", "NOT_NULL",
10077 "COALESCE", "CONTAINS", "BELONGS"):
10078 built = Expression(self.db, opm, left, right)
10079
10080 elif not (left or right): built = Expression(self.db, op)
10081 else:
10082 raise SyntaxError("Operator not supported: %s" % op)
10083
10084 return built
10085
10087 return not self.select(limitby=(0,1), orderby_on_limitby=False)
10088
10089 - def count(self,distinct=None, cache=None):
10090 db = self.db
10091 if cache:
10092 cache_model, time_expire = cache
10093 sql = self._count(distinct=distinct)
10094 key = db._uri + '/' + sql
10095 if len(key)>200: key = hashlib_md5(key).hexdigest()
10096 return cache_model(
10097 key,
10098 (lambda self=self,distinct=distinct: \
10099 db._adapter.count(self.query,distinct)),
10100 time_expire)
10101 return db._adapter.count(self.query,distinct)
10102
10103 - def select(self, *fields, **attributes):
10104 adapter = self.db._adapter
10105 tablenames = adapter.tables(self.query,
10106 attributes.get('join',None),
10107 attributes.get('left',None),
10108 attributes.get('orderby',None),
10109 attributes.get('groupby',None))
10110 fields = adapter.expand_all(fields, tablenames)
10111 return adapter.select(self.query,fields,attributes)
10112
10115
10117 db = self.db
10118 tablename = db._adapter.get_table(self.query)
10119 table = db[tablename]
10120 if any(f(self) for f in table._before_delete): return 0
10121 ret = db._adapter.delete(tablename,self.query)
10122 ret and [f(self) for f in table._after_delete]
10123 return ret
10124
10125 - def update(self, **update_fields):
10126 db = self.db
10127 tablename = db._adapter.get_table(self.query)
10128 table = db[tablename]
10129 table._attempt_upload(update_fields)
10130 if any(f(self,update_fields) for f in table._before_update):
10131 return 0
10132 fields = table._listify(update_fields,update=True)
10133 if not fields:
10134 raise SyntaxError("No fields to update")
10135 ret = db._adapter.update("%s" % table,self.query,fields)
10136 ret and [f(self,update_fields) for f in table._after_update]
10137 return ret
10138
10140 """
10141 same as update but does not call table._before_update and _after_update
10142 """
10143 tablename = self.db._adapter.get_table(self.query)
10144 table = self.db[tablename]
10145 fields = table._listify(update_fields,update=True)
10146 if not fields: raise SyntaxError("No fields to update")
10147
10148 ret = self.db._adapter.update("%s" % table,self.query,fields)
10149 return ret
10150
10152 tablename = self.db._adapter.get_table(self.query)
10153 response = Row()
10154 response.errors = Row()
10155 new_fields = copy.copy(update_fields)
10156 for key,value in update_fields.iteritems():
10157 value,error = self.db[tablename][key].validate(value)
10158 if error:
10159 response.errors[key] = error
10160 else:
10161 new_fields[key] = value
10162 table = self.db[tablename]
10163 if response.errors:
10164 response.updated = None
10165 else:
10166 if not any(f(self,new_fields) for f in table._before_update):
10167 fields = table._listify(new_fields,update=True)
10168 if not fields: raise SyntaxError("No fields to update")
10169 ret = self.db._adapter.update(tablename,self.query,fields)
10170 ret and [f(self,new_fields) for f in table._after_update]
10171 else:
10172 ret = 0
10173 response.updated = ret
10174 return response
10175
10177 table = self.db[self.db._adapter.tables(self.query)[0]]
10178
10179 if upload_fields:
10180 fields = upload_fields.keys()
10181 else:
10182 fields = table.fields
10183 fields = [f for f in fields if table[f].type == 'upload'
10184 and table[f].uploadfield == True
10185 and table[f].autodelete]
10186 if not fields:
10187 return False
10188 for record in self.select(*[table[f] for f in fields]):
10189 for fieldname in fields:
10190 field = table[fieldname]
10191 oldname = record.get(fieldname, None)
10192 if not oldname:
10193 continue
10194 if upload_fields and oldname == upload_fields[fieldname]:
10195 continue
10196 if field.custom_delete:
10197 field.custom_delete(oldname)
10198 else:
10199 uploadfolder = field.uploadfolder
10200 if not uploadfolder:
10201 uploadfolder = pjoin(
10202 self.db._adapter.folder, '..', 'uploads')
10203 if field.uploadseparate:
10204 items = oldname.split('.')
10205 uploadfolder = pjoin(
10206 uploadfolder,
10207 "%s.%s" % (items[0], items[1]),
10208 items[2][:2])
10209 oldpath = pjoin(uploadfolder, oldname)
10210 if exists(oldpath):
10211 os.unlink(oldpath)
10212 return False
10213
10215 - def __init__(self, colset, table, id):
10216 self.colset, self.db, self.tablename, self.id = \
10217 colset, table._db, table._tablename, id
10218
10220 colset, db, tablename, id = self.colset, self.db, self.tablename, self.id
10221 table = db[tablename]
10222 newfields = fields or dict(colset)
10223 for fieldname in newfields.keys():
10224 if not fieldname in table.fields or table[fieldname].type=='id':
10225 del newfields[fieldname]
10226 table._db(table._id==id,ignore_common_filters=True).update(**newfields)
10227 colset.update(newfields)
10228 return colset
10229
10232 self.db, self.tablename, self.id = table._db, table._tablename, id
10234 return self.db(self.db[self.tablename]._id==self.id).delete()
10235
10238 self.db, self.tablename, self.id = table._db, table._tablename, id
10240 if self.db._lazy_tables is False:
10241 raise AttributeError()
10242 table = self.db[self.tablename]
10243 other_table = self.db[other_tablename]
10244 for rfield in table._referenced_by:
10245 if rfield.table == other_table:
10246 return LazySet(rfield, self.id)
10247
10248 raise AttributeError()
10249
10252 self.db, self.tablename, self.fieldname, self.id = \
10253 field.db, field._tablename, field.name, id
10255 query = self.db[self.tablename][self.fieldname]==self.id
10256 return Set(self.db,query)
10259 - def __call__(self, query, ignore_common_filters=False):
10260 return self._getset()(query, ignore_common_filters)
10261 - def _count(self,distinct=None):
10263 - def _select(self, *fields, **attributes):
10267 - def _update(self, **update_fields):
10271 - def count(self,distinct=None, cache=None):
10273 - def select(self, *fields, **attributes):
10279 - def update(self, **update_fields):
10287
10290 self.method=method
10291 self.row=row
10293 return self.method(self.row,*args,**kwargs)
10294
10296 f.__lazy__ = True
10297 return f
10298
10299 -class Rows(object):
10300
10301 """
10302 A wrapper for the return value of a select. It basically represents a table.
10303 It has an iterator and each row is represented as a dictionary.
10304 """
10305
10306
10307
10308 - def __init__(
10309 self,
10310 db=None,
10311 records=[],
10312 colnames=[],
10313 compact=True,
10314 rawrows=None
10315 ):
10316 self.db = db
10317 self.records = records
10318 self.colnames = colnames
10319 self.compact = compact
10320 self.response = rawrows
10321
10323 return '<Rows (%s)>' % len(self.records)
10324
10326 """
10327 db.define_table('x',Field('number','integer'))
10328 if db(db.x).isempty(): [db.x.insert(number=i) for i in range(10)]
10329
10330 from gluon.dal import lazy_virtualfield
10331
10332 class MyVirtualFields(object):
10333 # normal virtual field (backward compatible, discouraged)
10334 def normal_shift(self): return self.x.number+1
10335 # lazy virtual field (because of @staticmethod)
10336 @lazy_virtualfield
10337 def lazy_shift(instance,row,delta=4): return row.x.number+delta
10338 db.x.virtualfields.append(MyVirtualFields())
10339
10340 for row in db(db.x).select():
10341 print row.number, row.normal_shift, row.lazy_shift(delta=7)
10342 """
10343 if not keyed_virtualfields:
10344 return self
10345 for row in self.records:
10346 for (tablename,virtualfields) in keyed_virtualfields.iteritems():
10347 attributes = dir(virtualfields)
10348 if not tablename in row:
10349 box = row[tablename] = Row()
10350 else:
10351 box = row[tablename]
10352 updated = False
10353 for attribute in attributes:
10354 if attribute[0] != '_':
10355 method = getattr(virtualfields,attribute)
10356 if hasattr(method,'__lazy__'):
10357 box[attribute]=VirtualCommand(method,row)
10358 elif type(method)==types.MethodType:
10359 if not updated:
10360 virtualfields.__dict__.update(row)
10361 updated = True
10362 box[attribute]=method()
10363 return self
10364
10366 if self.colnames!=other.colnames:
10367 raise Exception('Cannot & incompatible Rows objects')
10368 records = self.records+other.records
10369 return Rows(self.db,records,self.colnames)
10370
10372 if self.colnames!=other.colnames:
10373 raise Exception('Cannot | incompatible Rows objects')
10374 records = self.records
10375 records += [record for record in other.records \
10376 if not record in records]
10377 return Rows(self.db,records,self.colnames)
10378
10380 if len(self.records):
10381 return 1
10382 return 0
10383
10385 return len(self.records)
10386
10388 return Rows(self.db,self.records[a:b],self.colnames,compact=self.compact)
10389
10391 row = self.records[i]
10392 keys = row.keys()
10393 if self.compact and len(keys) == 1 and keys[0] != '_extra':
10394 return row[row.keys()[0]]
10395 return row
10396
10398 """
10399 iterator over records
10400 """
10401
10402 for i in xrange(len(self)):
10403 yield self[i]
10404
10406 """
10407 serializes the table into a csv file
10408 """
10409
10410 s = StringIO.StringIO()
10411 self.export_to_csv_file(s)
10412 return s.getvalue()
10413
10415 if not self.records:
10416 return None
10417 return self[0]
10418
10420 if not self.records:
10421 return None
10422 return self[-1]
10423
10424 - def find(self,f,limitby=None):
10425 """
10426 returns a new Rows object, a subset of the original object,
10427 filtered by the function f
10428 """
10429 if not self:
10430 return Rows(self.db, [], self.colnames)
10431 records = []
10432 if limitby:
10433 a,b = limitby
10434 else:
10435 a,b = 0,len(self)
10436 k = 0
10437 for row in self:
10438 if f(row):
10439 if a<=k: records.append(row)
10440 k += 1
10441 if k==b: break
10442 return Rows(self.db, records, self.colnames)
10443
10445 """
10446 removes elements from the calling Rows object, filtered by the function f,
10447 and returns a new Rows object containing the removed elements
10448 """
10449 if not self.records:
10450 return Rows(self.db, [], self.colnames)
10451 removed = []
10452 i=0
10453 while i<len(self):
10454 row = self[i]
10455 if f(row):
10456 removed.append(self.records[i])
10457 del self.records[i]
10458 else:
10459 i += 1
10460 return Rows(self.db, removed, self.colnames)
10461
10462 - def sort(self, f, reverse=False):
10463 """
10464 returns a list of sorted elements (not sorted in place)
10465 """
10466 rows = Rows(self.db,[],self.colnames,compact=False)
10467 rows.records = sorted(self,key=f,reverse=reverse)
10468 return rows
10469
10471 """
10472 regroups the rows, by one of the fields
10473 """
10474 one_result = False
10475 if 'one_result' in args:
10476 one_result = args['one_result']
10477
10478 def build_fields_struct(row, fields, num, groups):
10479 ''' helper function:
10480 '''
10481 if num > len(fields)-1:
10482 if one_result:
10483 return row
10484 else:
10485 return [row]
10486
10487 key = fields[num]
10488 value = row[key]
10489
10490 if value not in groups:
10491 groups[value] = build_fields_struct(row, fields, num+1, {})
10492 else:
10493 struct = build_fields_struct(row, fields, num+1, groups[ value ])
10494
10495
10496 if type(struct) == type(dict()):
10497 groups[value].update()
10498
10499 elif type(struct) == type(list()):
10500 groups[value] += struct
10501
10502 else:
10503 groups[value] = struct
10504
10505 return groups
10506
10507 if len(fields) == 0:
10508 return self
10509
10510
10511 if not self.records:
10512 return {}
10513
10514 grouped_row_group = dict()
10515
10516
10517 for row in self:
10518 build_fields_struct(row, fields, 0, grouped_row_group)
10519
10520 return grouped_row_group
10521
10522 - def render(self, i=None, fields=None):
10523 """
10524 Takes an index and returns a copy of the indexed row with values
10525 transformed via the "represent" attributes of the associated fields.
10526
10527 If no index is specified, a generator is returned for iteration
10528 over all the rows.
10529
10530 fields -- a list of fields to transform (if None, all fields with
10531 "represent" attributes will be transformed).
10532 """
10533
10534
10535 if i is None:
10536 return (self.render(i, fields=fields) for i in range(len(self)))
10537 import sqlhtml
10538 row = copy.deepcopy(self.records[i])
10539 keys = row.keys()
10540 tables = [f.tablename for f in fields] if fields \
10541 else [k for k in keys if k != '_extra']
10542 for table in tables:
10543 repr_fields = [f.name for f in fields if f.tablename == table] \
10544 if fields else [k for k in row[table].keys()
10545 if (hasattr(self.db[table], k) and
10546 isinstance(self.db[table][k], Field)
10547 and self.db[table][k].represent)]
10548 for field in repr_fields:
10549 row[table][field] = sqlhtml.represent(
10550 self.db[table][field], row[table][field], row[table])
10551 if self.compact and len(keys) == 1 and keys[0] != '_extra':
10552 return row[keys[0]]
10553 return row
10554
10555 - def as_list(self,
10556 compact=True,
10557 storage_to_dict=True,
10558 datetime_to_str=False,
10559 custom_types=None):
10560 """
10561 returns the data as a list or dictionary.
10562 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
10563 :param datetime_to_str: convert datetime fields as strings (default False)
10564 """
10565 (oc, self.compact) = (self.compact, compact)
10566 if storage_to_dict:
10567 items = [item.as_dict(datetime_to_str, custom_types) for item in self]
10568 else:
10569 items = [item for item in self]
10570 self.compact = compact
10571 return items
10572
10573
10574 - def as_dict(self,
10575 key='id',
10576 compact=True,
10577 storage_to_dict=True,
10578 datetime_to_str=False,
10579 custom_types=None):
10580 """
10581 returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False)
10582
10583 :param key: the name of the field to be used as dict key, normally the id
10584 :param compact: ? (default True)
10585 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
10586 :param datetime_to_str: convert datetime fields as strings (default False)
10587 """
10588
10589
10590 multi = False
10591 f = self.first()
10592 if f and isinstance(key, basestring):
10593 multi = any([isinstance(v, f.__class__) for v in f.values()])
10594 if (not "." in key) and multi:
10595
10596 def new_key():
10597 i = 0
10598 while True:
10599 yield i
10600 i += 1
10601 key_generator = new_key()
10602 key = lambda r: key_generator.next()
10603
10604 rows = self.as_list(compact, storage_to_dict, datetime_to_str, custom_types)
10605 if isinstance(key,str) and key.count('.')==1:
10606 (table, field) = key.split('.')
10607 return dict([(r[table][field],r) for r in rows])
10608 elif isinstance(key,str):
10609 return dict([(r[key],r) for r in rows])
10610 else:
10611 return dict([(key(r),r) for r in rows])
10612
10614 """
10615 export data to csv, the first line contains the column names
10616
10617 :param ofile: where the csv must be exported to
10618 :param null: how null values must be represented (default '<NULL>')
10619 :param delimiter: delimiter to separate values (default ',')
10620 :param quotechar: character to use to quote string values (default '"')
10621 :param quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL)
10622 :param represent: use the fields .represent value (default False)
10623 :param colnames: list of column names to use (default self.colnames)
10624 This will only work when exporting rows objects!!!!
10625 DO NOT use this with db.export_to_csv()
10626 """
10627 delimiter = kwargs.get('delimiter', ',')
10628 quotechar = kwargs.get('quotechar', '"')
10629 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
10630 represent = kwargs.get('represent', False)
10631 writer = csv.writer(ofile, delimiter=delimiter,
10632 quotechar=quotechar, quoting=quoting)
10633 colnames = kwargs.get('colnames', self.colnames)
10634 write_colnames = kwargs.get('write_colnames',True)
10635
10636 if write_colnames:
10637 writer.writerow(colnames)
10638
10639 def none_exception(value):
10640 """
10641 returns a cleaned up value that can be used for csv export:
10642 - unicode text is encoded as such
10643 - None values are replaced with the given representation (default <NULL>)
10644 """
10645 if value is None:
10646 return null
10647 elif isinstance(value, unicode):
10648 return value.encode('utf8')
10649 elif isinstance(value,Reference):
10650 return long(value)
10651 elif hasattr(value, 'isoformat'):
10652 return value.isoformat()[:19].replace('T', ' ')
10653 elif isinstance(value, (list,tuple)):
10654 return bar_encode(value)
10655 return value
10656
10657 for record in self:
10658 row = []
10659 for col in colnames:
10660 if not REGEX_TABLE_DOT_FIELD.match(col):
10661 row.append(record._extra[col])
10662 else:
10663 (t, f) = col.split('.')
10664 field = self.db[t][f]
10665 if isinstance(record.get(t, None), (Row,dict)):
10666 value = record[t][f]
10667 else:
10668 value = record[f]
10669 if field.type=='blob' and not value is None:
10670 value = base64.b64encode(value)
10671 elif represent and field.represent:
10672 value = field.represent(value)
10673 row.append(none_exception(value))
10674 writer.writerow(row)
10675
10676 - def xml(self,strict=False,row_name='row',rows_name='rows'):
10677 """
10678 serializes the table using sqlhtml.SQLTABLE (if present)
10679 """
10680
10681 if strict:
10682 ncols = len(self.colnames)
10683 return '<%s>\n%s\n</%s>' % (rows_name,
10684 '\n'.join(row.as_xml(row_name=row_name,
10685 colnames=self.colnames) for
10686 row in self), rows_name)
10687
10688 import sqlhtml
10689 return sqlhtml.SQLTABLE(self).xml()
10690
10691 - def as_xml(self,row_name='row',rows_name='rows'):
10692 return self.xml(strict=True, row_name=row_name, rows_name=rows_name)
10693
10694 - def as_json(self, mode='object', default=None):
10695 """
10696 serializes the rows to a JSON list or object with objects
10697 mode='object' is not implemented (should return a nested
10698 object structure)
10699 """
10700
10701 items = [record.as_json(mode=mode, default=default,
10702 serialize=False,
10703 colnames=self.colnames) for
10704 record in self]
10705
10706 if have_serializers:
10707 return serializers.json(items,
10708 default=default or
10709 serializers.custom_json)
10710 elif simplejson:
10711 return simplejson.dumps(items)
10712 else:
10713 raise RuntimeError("missing simplejson")
10714
10715
10716 as_csv = __str__
10717 json = as_json
10718
10719
10720
10721
10722
10723
10724 -def test_all():
10725 """
10726
10727 >>> if len(sys.argv)<2: db = DAL("sqlite://test.db")
10728 >>> if len(sys.argv)>1: db = DAL(sys.argv[1])
10729 >>> tmp = db.define_table('users',\
10730 Field('stringf', 'string', length=32, required=True),\
10731 Field('booleanf', 'boolean', default=False),\
10732 Field('passwordf', 'password', notnull=True),\
10733 Field('uploadf', 'upload'),\
10734 Field('blobf', 'blob'),\
10735 Field('integerf', 'integer', unique=True),\
10736 Field('doublef', 'double', unique=True,notnull=True),\
10737 Field('jsonf', 'json'),\
10738 Field('datef', 'date', default=datetime.date.today()),\
10739 Field('timef', 'time'),\
10740 Field('datetimef', 'datetime'),\
10741 migrate='test_user.table')
10742
10743 Insert a field
10744
10745 >>> db.users.insert(stringf='a', booleanf=True, passwordf='p', blobf='0A',\
10746 uploadf=None, integerf=5, doublef=3.14,\
10747 jsonf={"j": True},\
10748 datef=datetime.date(2001, 1, 1),\
10749 timef=datetime.time(12, 30, 15),\
10750 datetimef=datetime.datetime(2002, 2, 2, 12, 30, 15))
10751 1
10752
10753 Drop the table
10754
10755 >>> db.users.drop()
10756
10757 Examples of insert, select, update, delete
10758
10759 >>> tmp = db.define_table('person',\
10760 Field('name'),\
10761 Field('birth','date'),\
10762 migrate='test_person.table')
10763 >>> person_id = db.person.insert(name='Marco',birth='2005-06-22')
10764 >>> person_id = db.person.insert(name='Massimo',birth='1971-12-21')
10765
10766 commented len(db().select(db.person.ALL))
10767 commented 2
10768
10769 >>> me = db(db.person.id==person_id).select()[0] # test select
10770 >>> me.name
10771 'Massimo'
10772 >>> db.person[2].name
10773 'Massimo'
10774 >>> db.person(2).name
10775 'Massimo'
10776 >>> db.person(name='Massimo').name
10777 'Massimo'
10778 >>> db.person(db.person.name=='Massimo').name
10779 'Massimo'
10780 >>> row = db.person[2]
10781 >>> row.name == row['name'] == row['person.name'] == row('person.name')
10782 True
10783 >>> db(db.person.name=='Massimo').update(name='massimo') # test update
10784 1
10785 >>> db(db.person.name=='Marco').select().first().delete_record() # test delete
10786 1
10787
10788 Update a single record
10789
10790 >>> me.update_record(name="Max")
10791 <Row {'name': 'Max', 'birth': datetime.date(1971, 12, 21), 'id': 2}>
10792 >>> me.name
10793 'Max'
10794
10795 Examples of complex search conditions
10796
10797 >>> len(db((db.person.name=='Max')&(db.person.birth<'2003-01-01')).select())
10798 1
10799 >>> len(db((db.person.name=='Max')&(db.person.birth<datetime.date(2003,01,01))).select())
10800 1
10801 >>> len(db((db.person.name=='Max')|(db.person.birth<'2003-01-01')).select())
10802 1
10803 >>> me = db(db.person.id==person_id).select(db.person.name)[0]
10804 >>> me.name
10805 'Max'
10806
10807 Examples of search conditions using extract from date/datetime/time
10808
10809 >>> len(db(db.person.birth.month()==12).select())
10810 1
10811 >>> len(db(db.person.birth.year()>1900).select())
10812 1
10813
10814 Example of usage of NULL
10815
10816 >>> len(db(db.person.birth==None).select()) ### test NULL
10817 0
10818 >>> len(db(db.person.birth!=None).select()) ### test NULL
10819 1
10820
10821 Examples of search conditions using lower, upper, and like
10822
10823 >>> len(db(db.person.name.upper()=='MAX').select())
10824 1
10825 >>> len(db(db.person.name.like('%ax')).select())
10826 1
10827 >>> len(db(db.person.name.upper().like('%AX')).select())
10828 1
10829 >>> len(db(~db.person.name.upper().like('%AX')).select())
10830 0
10831
10832 orderby, groupby and limitby
10833
10834 >>> people = db().select(db.person.name, orderby=db.person.name)
10835 >>> order = db.person.name|~db.person.birth
10836 >>> people = db().select(db.person.name, orderby=order)
10837
10838 >>> people = db().select(db.person.name, orderby=db.person.name, groupby=db.person.name)
10839
10840 >>> people = db().select(db.person.name, orderby=order, limitby=(0,100))
10841
10842 Example of one 2 many relation
10843
10844 >>> tmp = db.define_table('dog',\
10845 Field('name'),\
10846 Field('birth','date'),\
10847 Field('owner',db.person),\
10848 migrate='test_dog.table')
10849 >>> db.dog.insert(name='Snoopy', birth=None, owner=person_id)
10850 1
10851
10852 A simple JOIN
10853
10854 >>> len(db(db.dog.owner==db.person.id).select())
10855 1
10856
10857 >>> len(db().select(db.person.ALL, db.dog.name,left=db.dog.on(db.dog.owner==db.person.id)))
10858 1
10859
10860 Drop tables
10861
10862 >>> db.dog.drop()
10863 >>> db.person.drop()
10864
10865 Example of many 2 many relation and Set
10866
10867 >>> tmp = db.define_table('author', Field('name'),\
10868 migrate='test_author.table')
10869 >>> tmp = db.define_table('paper', Field('title'),\
10870 migrate='test_paper.table')
10871 >>> tmp = db.define_table('authorship',\
10872 Field('author_id', db.author),\
10873 Field('paper_id', db.paper),\
10874 migrate='test_authorship.table')
10875 >>> aid = db.author.insert(name='Massimo')
10876 >>> pid = db.paper.insert(title='QCD')
10877 >>> tmp = db.authorship.insert(author_id=aid, paper_id=pid)
10878
10879 Define a Set
10880
10881 >>> authored_papers = db((db.author.id==db.authorship.author_id)&(db.paper.id==db.authorship.paper_id))
10882 >>> rows = authored_papers.select(db.author.name, db.paper.title)
10883 >>> for row in rows: print row.author.name, row.paper.title
10884 Massimo QCD
10885
10886 Example of search condition using belongs
10887
10888 >>> set = (1, 2, 3)
10889 >>> rows = db(db.paper.id.belongs(set)).select(db.paper.ALL)
10890 >>> print rows[0].title
10891 QCD
10892
10893 Example of search condition using nested select
10894
10895 >>> nested_select = db()._select(db.authorship.paper_id)
10896 >>> rows = db(db.paper.id.belongs(nested_select)).select(db.paper.ALL)
10897 >>> print rows[0].title
10898 QCD
10899
10900 Example of expressions
10901
10902 >>> mynumber = db.define_table('mynumber', Field('x', 'integer'))
10903 >>> db(mynumber).delete()
10904 0
10905 >>> for i in range(10): tmp = mynumber.insert(x=i)
10906 >>> db(mynumber).select(mynumber.x.sum())[0](mynumber.x.sum())
10907 45
10908
10909 >>> db(mynumber.x+2==5).select(mynumber.x + 2)[0](mynumber.x + 2)
10910 5
10911
10912 Output in csv
10913
10914 >>> print str(authored_papers.select(db.author.name, db.paper.title)).strip()
10915 author.name,paper.title\r
10916 Massimo,QCD
10917
10918 Delete all leftover tables
10919
10920 >>> DAL.distributed_transaction_commit(db)
10921
10922 >>> db.mynumber.drop()
10923 >>> db.authorship.drop()
10924 >>> db.author.drop()
10925 >>> db.paper.drop()
10926 """
10927
10928
10929
10930
10931 SQLField = Field
10932 SQLTable = Table
10933 SQLXorable = Expression
10934 SQLQuery = Query
10935 SQLSet = Set
10936 SQLRows = Rows
10937 SQLStorage = Row
10938 SQLDB = DAL
10939 GQLDB = DAL
10940 DAL.Field = Field
10941 DAL.Table = Table
10942
10943
10944
10945
10946
10947 -def geoPoint(x,y):
10948 return "POINT (%f %f)" % (x,y)
10949
10951 return "LINESTRING (%s)" % ','.join("%f %f" % item for item in line)
10952
10954 return "POLYGON ((%s))" % ','.join("%f %f" % item for item in line)
10955
10956
10957
10958
10959
10960 if __name__ == '__main__':
10961 import doctest
10962 doctest.testmod()
10963