index
int64
0
731k
package
stringlengths
2
98
name
stringlengths
1
76
docstring
stringlengths
0
281k
code
stringlengths
4
1.07M
signature
stringlengths
2
42.8k
27,701
sqlobject.col
_maxdbType
null
def _maxdbType(self): raise TypeError("Enum type is not supported on MAX DB")
(self)
27,702
sqlobject.col
_mssqlType
null
def _mssqlType(self): return self._postgresType()
(self)
27,703
sqlobject.col
_mysqlType
null
def _mysqlType(self): # We need to map None in the enum expression to an appropriate # condition on NULL if None in self.enumValues: return "ENUM(%s)" % ', '.join( [sqlbuilder.sqlrepr(v, 'mysql') for v in self.enumValues if v is not None]) else: return "ENUM(%s) NOT NULL" % ', '.join( [sqlbuilder.sqlrepr(v, 'mysql') for v in self.enumValues])
(self)
27,704
sqlobject.col
_postgresType
null
def _postgresType(self): length = max(map(self._getlength, self.enumValues)) enumValues = ', '.join( [sqlbuilder.sqlrepr(v, 'postgres') for v in self.enumValues]) checkConstraint = "CHECK (%s in (%s))" % (self.dbName, enumValues) return "VARCHAR(%i) %s" % (length, checkConstraint)
(self)
27,709
sqlobject.col
autoConstraints
null
def autoConstraints(self): return [constrs.isString, constrs.InList(self.enumValues)]
(self)
27,711
sqlobject.col
createValidators
null
def createValidators(self): return [EnumValidator(name=self.name, enumValues=self.enumValues, notNone=self.notNone)] + \ super(SOEnumCol, self).createValidators()
(self)
27,720
sqlobject.col
SOFloatCol
null
class SOFloatCol(SOCol): # 3-03 @@: support precision (e.g., DECIMAL) def autoConstraints(self): return [constrs.isFloat] def createValidators(self): return [FloatValidator(name=self.name)] + \ super(SOFloatCol, self).createValidators() def _sqlType(self): return 'FLOAT' def _mysqlType(self): return "DOUBLE PRECISION"
(name, soClass, creationOrder, dbName=None, default=<class 'sqlobject.sqlbuilder.NoDefault'>, defaultSQL=None, foreignKey=None, alternateID=False, alternateMethodName=None, constraints=None, notNull=<class 'sqlobject.sqlbuilder.NoDefault'>, notNone=<class 'sqlobject.sqlbuilder.NoDefault'>, unique=<class 'sqlobject.sqlbuilder.NoDefault'>, sqlType=None, columnDef=None, validator=None, validator2=None, immutable=False, cascade=None, lazy=False, noCache=False, forceDBName=False, title=None, tags=[], origName=None, dbEncoding=None, extra_vars=None)
27,733
sqlobject.col
_mysqlType
null
def _mysqlType(self): return "DOUBLE PRECISION"
(self)
27,736
sqlobject.col
_sqlType
null
def _sqlType(self): return 'FLOAT'
(self)
27,739
sqlobject.col
autoConstraints
null
def autoConstraints(self): return [constrs.isFloat]
(self)
27,741
sqlobject.col
createValidators
null
def createValidators(self): return [FloatValidator(name=self.name)] + \ super(SOFloatCol, self).createValidators()
(self)
27,750
sqlobject.col
SOForeignKey
null
class SOForeignKey(SOKeyCol): def __init__(self, **kw): foreignKey = kw['foreignKey'] style = kw['soClass'].sqlmeta.style if kw.get('name'): kw['origName'] = kw['name'] kw['name'] = style.instanceAttrToIDAttr(kw['name']) else: kw['name'] = style.instanceAttrToIDAttr( style.pythonClassToAttr(foreignKey)) super(SOForeignKey, self).__init__(**kw) def createValidators(self): return [ForeignKeyValidator(name=self.name)] + \ super(SOForeignKey, self).createValidators() def _idType(self): other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) return other.sqlmeta.idType def sqliteCreateSQL(self): sql = SOKeyCol.sqliteCreateSQL(self) other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName if self.cascade is not None: if self.cascade == 'null': action = 'ON DELETE SET NULL' elif self.cascade: action = 'ON DELETE CASCADE' else: action = 'ON DELETE RESTRICT' else: action = '' constraint = ('CONSTRAINT %(colName)s_exists ' # 'FOREIGN KEY(%(colName)s) ' 'REFERENCES %(tName)s(%(idName)s) ' '%(action)s' % {'tName': tName, 'colName': self.dbName, 'idName': idName, 'action': action}) sql = ' '.join([sql, constraint]) return sql def postgresCreateSQL(self): sql = SOKeyCol.postgresCreateSQL(self) return sql def postgresCreateReferenceConstraint(self): sTName = self.soClass.sqlmeta.table other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName if self.cascade is not None: if self.cascade == 'null': action = 'ON DELETE SET NULL' elif self.cascade: action = 'ON DELETE CASCADE' else: action = 'ON DELETE RESTRICT' else: action = '' constraint = ('ALTER TABLE %(sTName)s ' 'ADD CONSTRAINT %(colName)s_exists ' 'FOREIGN KEY (%(colName)s) ' 'REFERENCES %(tName)s (%(idName)s) ' '%(action)s' % {'tName': tName, 'colName': self.dbName, 'idName': idName, 'action': action, 'sTName': sTName}) return constraint def mysqlCreateReferenceConstraint(self): sTName = self.soClass.sqlmeta.table sTLocalName = sTName.split('.')[-1] other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName if self.cascade is not None: if self.cascade == 'null': action = 'ON DELETE SET NULL' elif self.cascade: action = 'ON DELETE CASCADE' else: action = 'ON DELETE RESTRICT' else: action = '' constraint = ('ALTER TABLE %(sTName)s ' 'ADD CONSTRAINT %(sTLocalName)s_%(colName)s_exists ' 'FOREIGN KEY (%(colName)s) ' 'REFERENCES %(tName)s (%(idName)s) ' '%(action)s' % {'tName': tName, 'colName': self.dbName, 'idName': idName, 'action': action, 'sTName': sTName, 'sTLocalName': sTLocalName}) return constraint def mysqlCreateSQL(self, connection=None): return SOKeyCol.mysqlCreateSQL(self, connection) def sybaseCreateSQL(self): sql = SOKeyCol.sybaseCreateSQL(self) other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName reference = ('REFERENCES %(tName)s(%(idName)s) ' % {'tName': tName, 'idName': idName}) sql = ' '.join([sql, reference]) return sql def sybaseCreateReferenceConstraint(self): # @@: Code from above should be moved here return None def mssqlCreateSQL(self, connection=None): sql = SOKeyCol.mssqlCreateSQL(self, connection) other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName reference = ('REFERENCES %(tName)s(%(idName)s) ' % {'tName': tName, 'idName': idName}) sql = ' '.join([sql, reference]) return sql def mssqlCreateReferenceConstraint(self): # @@: Code from above should be moved here return None def maxdbCreateSQL(self): other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) fidName = self.dbName # I assume that foreign key name is identical # to the id of the reference table sql = ' '.join([fidName, self._maxdbType()]) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName sql += ',\nFOREIGN KEY (%s) REFERENCES %s(%s)' % (fidName, tName, idName) return sql def maxdbCreateReferenceConstraint(self): # @@: Code from above should be moved here return None
(**kw)
27,753
sqlobject.col
__init__
null
def __init__(self, **kw): foreignKey = kw['foreignKey'] style = kw['soClass'].sqlmeta.style if kw.get('name'): kw['origName'] = kw['name'] kw['name'] = style.instanceAttrToIDAttr(kw['name']) else: kw['name'] = style.instanceAttrToIDAttr( style.pythonClassToAttr(foreignKey)) super(SOForeignKey, self).__init__(**kw)
(self, **kw)
27,757
sqlobject.col
_firebirdType
null
def _firebirdType(self): key_type = {int: "INT", str: "VARCHAR(255)"} return key_type[self._idType()]
(self)
27,761
sqlobject.col
_idType
null
def _idType(self): other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) return other.sqlmeta.idType
(self)
27,763
sqlobject.col
_mssqlType
null
def _mssqlType(self): key_type = {int: "INT", str: "TEXT"} return key_type[self._idType()]
(self)
27,767
sqlobject.col
_sqlType
null
def _sqlType(self): return self.key_type[self._idType()]
(self)
27,769
sqlobject.col
_sybaseType
null
def _sybaseType(self): key_type = {int: "NUMERIC(18,0)", str: "TEXT"} return key_type[self._idType()]
(self)
27,772
sqlobject.col
createValidators
null
def createValidators(self): return [ForeignKeyValidator(name=self.name)] + \ super(SOForeignKey, self).createValidators()
(self)
27,775
sqlobject.col
maxdbCreateReferenceConstraint
null
def maxdbCreateReferenceConstraint(self): # @@: Code from above should be moved here return None
(self)
27,776
sqlobject.col
maxdbCreateSQL
null
def maxdbCreateSQL(self): other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) fidName = self.dbName # I assume that foreign key name is identical # to the id of the reference table sql = ' '.join([fidName, self._maxdbType()]) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName sql += ',\nFOREIGN KEY (%s) REFERENCES %s(%s)' % (fidName, tName, idName) return sql
(self)
27,777
sqlobject.col
mssqlCreateReferenceConstraint
null
def mssqlCreateReferenceConstraint(self): # @@: Code from above should be moved here return None
(self)
27,778
sqlobject.col
mssqlCreateSQL
null
def mssqlCreateSQL(self, connection=None): sql = SOKeyCol.mssqlCreateSQL(self, connection) other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName reference = ('REFERENCES %(tName)s(%(idName)s) ' % {'tName': tName, 'idName': idName}) sql = ' '.join([sql, reference]) return sql
(self, connection=None)
27,779
sqlobject.col
mysqlCreateReferenceConstraint
null
def mysqlCreateReferenceConstraint(self): sTName = self.soClass.sqlmeta.table sTLocalName = sTName.split('.')[-1] other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName if self.cascade is not None: if self.cascade == 'null': action = 'ON DELETE SET NULL' elif self.cascade: action = 'ON DELETE CASCADE' else: action = 'ON DELETE RESTRICT' else: action = '' constraint = ('ALTER TABLE %(sTName)s ' 'ADD CONSTRAINT %(sTLocalName)s_%(colName)s_exists ' 'FOREIGN KEY (%(colName)s) ' 'REFERENCES %(tName)s (%(idName)s) ' '%(action)s' % {'tName': tName, 'colName': self.dbName, 'idName': idName, 'action': action, 'sTName': sTName, 'sTLocalName': sTLocalName}) return constraint
(self)
27,780
sqlobject.col
mysqlCreateSQL
null
def mysqlCreateSQL(self, connection=None): return SOKeyCol.mysqlCreateSQL(self, connection)
(self, connection=None)
27,781
sqlobject.col
postgresCreateReferenceConstraint
null
def postgresCreateReferenceConstraint(self): sTName = self.soClass.sqlmeta.table other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName if self.cascade is not None: if self.cascade == 'null': action = 'ON DELETE SET NULL' elif self.cascade: action = 'ON DELETE CASCADE' else: action = 'ON DELETE RESTRICT' else: action = '' constraint = ('ALTER TABLE %(sTName)s ' 'ADD CONSTRAINT %(colName)s_exists ' 'FOREIGN KEY (%(colName)s) ' 'REFERENCES %(tName)s (%(idName)s) ' '%(action)s' % {'tName': tName, 'colName': self.dbName, 'idName': idName, 'action': action, 'sTName': sTName}) return constraint
(self)
27,782
sqlobject.col
postgresCreateSQL
null
def postgresCreateSQL(self): sql = SOKeyCol.postgresCreateSQL(self) return sql
(self)
27,783
sqlobject.col
sqliteCreateSQL
null
def sqliteCreateSQL(self): sql = SOKeyCol.sqliteCreateSQL(self) other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName if self.cascade is not None: if self.cascade == 'null': action = 'ON DELETE SET NULL' elif self.cascade: action = 'ON DELETE CASCADE' else: action = 'ON DELETE RESTRICT' else: action = '' constraint = ('CONSTRAINT %(colName)s_exists ' # 'FOREIGN KEY(%(colName)s) ' 'REFERENCES %(tName)s(%(idName)s) ' '%(action)s' % {'tName': tName, 'colName': self.dbName, 'idName': idName, 'action': action}) sql = ' '.join([sql, constraint]) return sql
(self)
27,784
sqlobject.col
sybaseCreateReferenceConstraint
null
def sybaseCreateReferenceConstraint(self): # @@: Code from above should be moved here return None
(self)
27,785
sqlobject.col
sybaseCreateSQL
null
def sybaseCreateSQL(self): sql = SOKeyCol.sybaseCreateSQL(self) other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName reference = ('REFERENCES %(tName)s(%(idName)s) ' % {'tName': tName, 'idName': idName}) sql = ' '.join([sql, reference]) return sql
(self)
27,786
sqlobject.col
SOIntCol
null
class SOIntCol(SOCol): # 3-03 @@: support precision, maybe max and min directly def __init__(self, **kw): self.length = kw.pop('length', None) self.unsigned = bool(kw.pop('unsigned', None)) self.zerofill = bool(kw.pop('zerofill', None)) SOCol.__init__(self, **kw) def autoConstraints(self): return [constrs.isInt] def createValidators(self): return [IntValidator(name=self.name)] + \ super(SOIntCol, self).createValidators() def addSQLAttrs(self, str): _ret = str if str is None or len(str) < 1: return None if self.length and self.length >= 1: _ret = "%s(%d)" % (_ret, self.length) if self.unsigned: _ret += " UNSIGNED" if self.zerofill: _ret += " ZEROFILL" return _ret def _sqlType(self): return self.addSQLAttrs("INT")
(**kw)
27,802
sqlobject.col
_sqlType
null
def _sqlType(self): return self.addSQLAttrs("INT")
(self)
27,817
sqlobject.col
SOJSONCol
null
class SOJSONCol(SOStringCol): def createValidators(self): return [JSONValidator(name=self.name)] # Doesn't work, especially with Postgres # def _sqlType(self): # return 'JSON'
(**kw)
27,820
sqlobject.col
__init__
null
def __init__(self, **kw): self.length = kw.pop('length', None) self.varchar = kw.pop('varchar', 'auto') self.char_binary = kw.pop('char_binary', None) # A hack for MySQL if not self.length: assert self.varchar == 'auto' or not self.varchar, \ "Without a length strings are treated as TEXT, not varchar" self.varchar = False elif self.varchar == 'auto': self.varchar = True super(SOStringLikeCol, self).__init__(**kw)
(self, **kw)
27,839
sqlobject.col
createValidators
null
def createValidators(self): return [JSONValidator(name=self.name)]
(self)
27,848
sqlobject.col
SOJsonbCol
null
class SOJsonbCol(SOCol): def createValidators(self): return [JsonbValidator(name=self.name)] + \ super(SOJsonbCol, self).createValidators() def _postgresType(self): return 'JSONB'
(name, soClass, creationOrder, dbName=None, default=<class 'sqlobject.sqlbuilder.NoDefault'>, defaultSQL=None, foreignKey=None, alternateID=False, alternateMethodName=None, constraints=None, notNull=<class 'sqlobject.sqlbuilder.NoDefault'>, notNone=<class 'sqlobject.sqlbuilder.NoDefault'>, unique=<class 'sqlobject.sqlbuilder.NoDefault'>, sqlType=None, columnDef=None, validator=None, validator2=None, immutable=False, cascade=None, lazy=False, noCache=False, forceDBName=False, title=None, tags=[], origName=None, dbEncoding=None, extra_vars=None)
27,862
sqlobject.col
_postgresType
null
def _postgresType(self): return 'JSONB'
(self)
27,869
sqlobject.col
createValidators
null
def createValidators(self): return [JsonbValidator(name=self.name)] + \ super(SOJsonbCol, self).createValidators()
(self)
27,878
sqlobject.col
SOKeyCol
null
class SOKeyCol(SOCol): key_type = {int: "INT", str: "TEXT"} # 3-03 @@: this should have a simplified constructor # Should provide foreign key information for other DBs. def __init__(self, **kw): self.refColumn = kw.pop('refColumn', None) super(SOKeyCol, self).__init__(**kw) def _idType(self): return self.soClass.sqlmeta.idType def _sqlType(self): return self.key_type[self._idType()] def _sybaseType(self): key_type = {int: "NUMERIC(18,0)", str: "TEXT"} return key_type[self._idType()] def _mssqlType(self): key_type = {int: "INT", str: "TEXT"} return key_type[self._idType()] def _firebirdType(self): key_type = {int: "INT", str: "VARCHAR(255)"} return key_type[self._idType()]
(**kw)
27,881
sqlobject.col
__init__
null
def __init__(self, **kw): self.refColumn = kw.pop('refColumn', None) super(SOKeyCol, self).__init__(**kw)
(self, **kw)
27,889
sqlobject.col
_idType
null
def _idType(self): return self.soClass.sqlmeta.idType
(self)
27,909
sqlobject.col
SOMediumIntCol
null
class SOMediumIntCol(SOIntCol): def _sqlType(self): return self.addSQLAttrs("MEDIUMINT")
(**kw)
27,925
sqlobject.col
_sqlType
null
def _sqlType(self): return self.addSQLAttrs("MEDIUMINT")
(self)
27,940
sqlobject.col
SOPickleCol
null
class SOPickleCol(SOBLOBCol): def __init__(self, **kw): self.pickleProtocol = kw.pop('pickleProtocol', pickle.HIGHEST_PROTOCOL) super(SOPickleCol, self).__init__(**kw) def createValidators(self): return [PickleValidator(name=self.name, pickleProtocol=self.pickleProtocol)] + \ super(SOPickleCol, self).createValidators() def _mysqlType(self): length = self.length if length: if length >= 2 ** 24: return "LONGBLOB" if length >= 2 ** 16: return "MEDIUMBLOB" return "BLOB"
(**kw)
27,943
sqlobject.col
__init__
null
def __init__(self, **kw): self.pickleProtocol = kw.pop('pickleProtocol', pickle.HIGHEST_PROTOCOL) super(SOPickleCol, self).__init__(**kw)
(self, **kw)
27,954
sqlobject.col
_mysqlType
null
def _mysqlType(self): length = self.length if length: if length >= 2 ** 24: return "LONGBLOB" if length >= 2 ** 16: return "MEDIUMBLOB" return "BLOB"
(self)
27,962
sqlobject.col
createValidators
null
def createValidators(self): return [PickleValidator(name=self.name, pickleProtocol=self.pickleProtocol)] + \ super(SOPickleCol, self).createValidators()
(self)
27,971
sqlobject.col
SOSetCol
null
class SOSetCol(SOCol): def __init__(self, **kw): self.setValues = kw.pop('setValues', None) assert self.setValues is not None, \ 'You must provide a setValues keyword argument' super(SOSetCol, self).__init__(**kw) def autoConstraints(self): return [constrs.isString, constrs.InList(self.setValues)] def createValidators(self): return [SetValidator(name=self.name, setValues=self.setValues)] + \ super(SOSetCol, self).createValidators() def _mysqlType(self): return "SET(%s)" % ', '.join( [sqlbuilder.sqlrepr(v, 'mysql') for v in self.setValues])
(**kw)
27,974
sqlobject.col
__init__
null
def __init__(self, **kw): self.setValues = kw.pop('setValues', None) assert self.setValues is not None, \ 'You must provide a setValues keyword argument' super(SOSetCol, self).__init__(**kw)
(self, **kw)
27,984
sqlobject.col
_mysqlType
null
def _mysqlType(self): return "SET(%s)" % ', '.join( [sqlbuilder.sqlrepr(v, 'mysql') for v in self.setValues])
(self)
27,990
sqlobject.col
autoConstraints
null
def autoConstraints(self): return [constrs.isString, constrs.InList(self.setValues)]
(self)
27,992
sqlobject.col
createValidators
null
def createValidators(self): return [SetValidator(name=self.name, setValues=self.setValues)] + \ super(SOSetCol, self).createValidators()
(self)
28,001
sqlobject.col
SOSmallIntCol
null
class SOSmallIntCol(SOIntCol): def _sqlType(self): return self.addSQLAttrs("SMALLINT")
(**kw)
28,017
sqlobject.col
_sqlType
null
def _sqlType(self): return self.addSQLAttrs("SMALLINT")
(self)
28,032
sqlobject.col
SOStringCol
null
class SOStringCol(SOStringLikeCol): def createValidators(self, dataType=None): return [StringValidator(name=self.name, dataType=dataType)] + \ super(SOStringCol, self).createValidators()
(**kw)
28,054
sqlobject.col
createValidators
null
def createValidators(self, dataType=None): return [StringValidator(name=self.name, dataType=dataType)] + \ super(SOStringCol, self).createValidators()
(self, dataType=None)
28,063
sqlobject.col
SOStringLikeCol
A common ancestor for SOStringCol and SOUnicodeCol
class SOStringLikeCol(SOCol): """A common ancestor for SOStringCol and SOUnicodeCol""" def __init__(self, **kw): self.length = kw.pop('length', None) self.varchar = kw.pop('varchar', 'auto') self.char_binary = kw.pop('char_binary', None) # A hack for MySQL if not self.length: assert self.varchar == 'auto' or not self.varchar, \ "Without a length strings are treated as TEXT, not varchar" self.varchar = False elif self.varchar == 'auto': self.varchar = True super(SOStringLikeCol, self).__init__(**kw) def autoConstraints(self): constraints = [constrs.isString] if self.length is not None: constraints += [constrs.MaxLength(self.length)] return constraints def _sqlType(self): if self.customSQLType is not None: return self.customSQLType if not self.length: return 'TEXT' elif self.varchar: return 'VARCHAR(%i)' % self.length else: return 'CHAR(%i)' % self.length def _check_case_sensitive(self, db): if self.char_binary: raise ValueError("%s does not support " "binary character columns" % db) def _mysqlType(self): type = self._sqlType() if self.char_binary: type += " BINARY" return type def _postgresType(self): self._check_case_sensitive("PostgreSQL") return super(SOStringLikeCol, self)._postgresType() def _sqliteType(self): self._check_case_sensitive("SQLite") return super(SOStringLikeCol, self)._sqliteType() def _sybaseType(self): self._check_case_sensitive("SYBASE") type = self._sqlType() return type def _mssqlType(self): if self.customSQLType is not None: return self.customSQLType if not self.length: if self.connection and self.connection.can_use_max_types(): type = 'VARCHAR(MAX)' else: type = 'VARCHAR(4000)' elif self.varchar: type = 'VARCHAR(%i)' % self.length else: type = 'CHAR(%i)' % self.length return type def _firebirdType(self): self._check_case_sensitive("FireBird") if not self.length: return 'BLOB SUB_TYPE TEXT' else: return self._sqlType() def _maxdbType(self): self._check_case_sensitive("SAP DB/MaxDB") if not self.length: return 'LONG ASCII' else: return self._sqlType()
(**kw)
28,094
sqlobject.col
SOTimeCol
null
class SOTimeCol(SOCol): timeFormat = '%H:%M:%S.%f' def __init__(self, **kw): timeFormat = kw.pop('timeFormat', None) if timeFormat: self.timeFormat = timeFormat super(SOTimeCol, self).__init__(**kw) def createValidators(self): _validators = super(SOTimeCol, self).createValidators() if default_datetime_implementation == DATETIME_IMPLEMENTATION: validatorClass = TimeValidator elif default_datetime_implementation == MXDATETIME_IMPLEMENTATION: validatorClass = MXDateTimeValidator elif default_datetime_implementation == ZOPE_DATETIME_IMPLEMENTATION: validatorClass = ZopeDateTimeValidator if default_datetime_implementation: _validators.insert(0, validatorClass(name=self.name, format=self.timeFormat)) return _validators def _mysqlType(self): if self.connection and self.connection.can_use_microseconds(): return 'TIME(6)' else: return 'TIME' def _postgresType(self): return 'TIME' def _sybaseType(self): return 'TIME' def _mssqlType(self): if self.connection and self.connection.can_use_microseconds(): return 'TIME(6)' else: return 'TIME' def _sqliteType(self): return 'TIME' def _firebirdType(self): return 'TIME' def _maxdbType(self): return 'TIME'
(**kw)
28,097
sqlobject.col
__init__
null
def __init__(self, **kw): timeFormat = kw.pop('timeFormat', None) if timeFormat: self.timeFormat = timeFormat super(SOTimeCol, self).__init__(**kw)
(self, **kw)
28,101
sqlobject.col
_firebirdType
null
def _firebirdType(self): return 'TIME'
(self)
28,105
sqlobject.col
_maxdbType
null
def _maxdbType(self): return 'TIME'
(self)
28,106
sqlobject.col
_mssqlType
null
def _mssqlType(self): if self.connection and self.connection.can_use_microseconds(): return 'TIME(6)' else: return 'TIME'
(self)
28,107
sqlobject.col
_mysqlType
null
def _mysqlType(self): if self.connection and self.connection.can_use_microseconds(): return 'TIME(6)' else: return 'TIME'
(self)
28,108
sqlobject.col
_postgresType
null
def _postgresType(self): return 'TIME'
(self)
28,111
sqlobject.col
_sqliteType
null
def _sqliteType(self): return 'TIME'
(self)
28,112
sqlobject.col
_sybaseType
null
def _sybaseType(self): return 'TIME'
(self)
28,115
sqlobject.col
createValidators
null
def createValidators(self): _validators = super(SOTimeCol, self).createValidators() if default_datetime_implementation == DATETIME_IMPLEMENTATION: validatorClass = TimeValidator elif default_datetime_implementation == MXDATETIME_IMPLEMENTATION: validatorClass = MXDateTimeValidator elif default_datetime_implementation == ZOPE_DATETIME_IMPLEMENTATION: validatorClass = ZopeDateTimeValidator if default_datetime_implementation: _validators.insert(0, validatorClass(name=self.name, format=self.timeFormat)) return _validators
(self)
28,124
sqlobject.col
SOTimedeltaCol
null
class SOTimedeltaCol(SOCol): def _postgresType(self): return 'INTERVAL' def createValidators(self): return [TimedeltaValidator(name=self.name)] + \ super(SOTimedeltaCol, self).createValidators()
(name, soClass, creationOrder, dbName=None, default=<class 'sqlobject.sqlbuilder.NoDefault'>, defaultSQL=None, foreignKey=None, alternateID=False, alternateMethodName=None, constraints=None, notNull=<class 'sqlobject.sqlbuilder.NoDefault'>, notNone=<class 'sqlobject.sqlbuilder.NoDefault'>, unique=<class 'sqlobject.sqlbuilder.NoDefault'>, sqlType=None, columnDef=None, validator=None, validator2=None, immutable=False, cascade=None, lazy=False, noCache=False, forceDBName=False, title=None, tags=[], origName=None, dbEncoding=None, extra_vars=None)
28,138
sqlobject.col
_postgresType
null
def _postgresType(self): return 'INTERVAL'
(self)
28,145
sqlobject.col
createValidators
null
def createValidators(self): return [TimedeltaValidator(name=self.name)] + \ super(SOTimedeltaCol, self).createValidators()
(self)
28,154
sqlobject.col
SOTimestampCol
Necessary to support MySQL's use of TIMESTAMP versus DATETIME types
class SOTimestampCol(SODateTimeCol): """ Necessary to support MySQL's use of TIMESTAMP versus DATETIME types """ def __init__(self, **kw): if 'default' not in kw: kw['default'] = None SOCol.__init__(self, **kw) def _mysqlType(self): if self.connection and self.connection.can_use_microseconds(): return 'TIMESTAMP(6)' else: return 'TIMESTAMP'
(**kw)
28,157
sqlobject.col
__init__
null
def __init__(self, **kw): if 'default' not in kw: kw['default'] = None SOCol.__init__(self, **kw)
(self, **kw)
28,167
sqlobject.col
_mysqlType
null
def _mysqlType(self): if self.connection and self.connection.can_use_microseconds(): return 'TIMESTAMP(6)' else: return 'TIMESTAMP'
(self)
28,184
sqlobject.col
SOTinyIntCol
null
class SOTinyIntCol(SOIntCol): def _sqlType(self): return self.addSQLAttrs("TINYINT")
(**kw)
28,200
sqlobject.col
_sqlType
null
def _sqlType(self): return self.addSQLAttrs("TINYINT")
(self)
28,215
sqlobject.col
SOUnicodeCol
null
class SOUnicodeCol(SOStringLikeCol): def _mssqlType(self): if self.customSQLType is not None: return self.customSQLType return 'N' + super(SOUnicodeCol, self)._mssqlType() def createValidators(self): return [UnicodeStringValidator(name=self.name)] + \ super(SOUnicodeCol, self).createValidators()
(**kw)
28,228
sqlobject.col
_mssqlType
null
def _mssqlType(self): if self.customSQLType is not None: return self.customSQLType return 'N' + super(SOUnicodeCol, self)._mssqlType()
(self)
28,237
sqlobject.col
createValidators
null
def createValidators(self): return [UnicodeStringValidator(name=self.name)] + \ super(SOUnicodeCol, self).createValidators()
(self)
28,246
sqlobject.col
SOUuidCol
null
class SOUuidCol(SOCol): def createValidators(self): return [UuidValidator(name=self.name)] + \ super(SOUuidCol, self).createValidators() def _sqlType(self): return 'VARCHAR(36)' def _postgresType(self): return 'UUID'
(name, soClass, creationOrder, dbName=None, default=<class 'sqlobject.sqlbuilder.NoDefault'>, defaultSQL=None, foreignKey=None, alternateID=False, alternateMethodName=None, constraints=None, notNull=<class 'sqlobject.sqlbuilder.NoDefault'>, notNone=<class 'sqlobject.sqlbuilder.NoDefault'>, unique=<class 'sqlobject.sqlbuilder.NoDefault'>, sqlType=None, columnDef=None, validator=None, validator2=None, immutable=False, cascade=None, lazy=False, noCache=False, forceDBName=False, title=None, tags=[], origName=None, dbEncoding=None, extra_vars=None)
28,260
sqlobject.col
_postgresType
null
def _postgresType(self): return 'UUID'
(self)
28,262
sqlobject.col
_sqlType
null
def _sqlType(self): return 'VARCHAR(36)'
(self)
28,267
sqlobject.col
createValidators
null
def createValidators(self): return [UuidValidator(name=self.name)] + \ super(SOUuidCol, self).createValidators()
(self)
28,276
sqlobject.joins
SQLMultipleJoin
null
class SQLMultipleJoin(Join): baseClass = SOSQLMultipleJoin
(otherClass=None, **kw)
28,281
sqlobject.main
SQLObject
null
class SQLObject(with_metaclass(declarative.DeclarativeMeta, object)): _connection = sqlhub sqlmeta = sqlmeta # DSM: The _inheritable attribute controls wheter the class can by # DSM: inherited 'logically' with a foreignKey and a back reference. _inheritable = False # Is this class inheritable? _parent = None # A reference to the parent instance childName = None # Children name (to be able to get a subclass) # The law of Demeter: the class should not call another classes by name SelectResultsClass = SelectResults def __classinit__(cls, new_attrs): # This is true if we're initializing the SQLObject class, # instead of a subclass: is_base = cls.__bases__ == (object,) cls._SO_setupSqlmeta(new_attrs, is_base) implicitColumns = _collectAttributes(cls, new_attrs, col.Col) implicitJoins = _collectAttributes(cls, new_attrs, joins.Join) implicitIndexes = _collectAttributes(cls, new_attrs, index.DatabaseIndex) if not is_base: cls._SO_cleanDeprecatedAttrs(new_attrs) if '_connection' in new_attrs: connection = new_attrs['_connection'] del cls._connection assert 'connection' not in new_attrs elif 'connection' in new_attrs: connection = new_attrs['connection'] del cls.connection else: connection = None cls._SO_finishedClassCreation = False ###################################################### # Set some attributes to their defaults, if necessary. # First we get the connection: if not connection and not getattr(cls, '_connection', None): mod = sys.modules[cls.__module__] # See if there's a __connection__ global in # the module, use it if there is. if hasattr(mod, '__connection__'): connection = mod.__connection__ # Do not check hasattr(cls, '_connection') here - it is possible # SQLObject parent class has a connection attribute that came # from sqlhub, e.g.; check __dict__ only. if connection and ('_connection' not in cls.__dict__): cls.setConnection(connection) sqlmeta = cls.sqlmeta # We have to check if there are columns in the inherited # _columns where the attribute has been set to None in this # class. If so, then we need to remove that column from # _columns. for key in sqlmeta.columnDefinitions.keys(): if (key in new_attrs and new_attrs[key] is None): del sqlmeta.columnDefinitions[key] for column in sqlmeta.columnDefinitions.values(): sqlmeta.addColumn(column) for column in implicitColumns: sqlmeta.addColumn(column) # Now the class is in an essentially OK-state, so we can # set up any magic attributes: declarative.setup_attributes(cls, new_attrs) if sqlmeta.fromDatabase: sqlmeta.addColumnsFromDatabase() for j in implicitJoins: sqlmeta.addJoin(j) for i in implicitIndexes: sqlmeta.addIndex(i) def order_getter(o): return o.creationOrder sqlmeta.columnList.sort(key=order_getter) sqlmeta.indexes.sort(key=order_getter) sqlmeta.indexDefinitions.sort(key=order_getter) # Joins cannot be sorted because addJoin created accessors # that remember indexes. # sqlmeta.joins.sort(key=order_getter) sqlmeta.joinDefinitions.sort(key=order_getter) # We don't setup the properties until we're finished with the # batch adding of all the columns... cls._notifyFinishClassCreation() cls._SO_finishedClassCreation = True makeProperties(cls) # We use the magic "q" attribute for accessing lazy # SQL where-clause generation. See the sql module for # more. if not is_base: cls.q = sqlbuilder.SQLObjectTable(cls) cls.j = sqlbuilder.SQLObjectTableWithJoins(cls) classregistry.registry(sqlmeta.registry).addClass(cls) @classmethod def _SO_setupSqlmeta(cls, new_attrs, is_base): """ This fixes up the sqlmeta attribute. It handles both the case where no sqlmeta was given (in which we need to create another subclass), or the sqlmeta given doesn't have the proper inheritance. Lastly it calls sqlmeta.setClass, which handles much of the setup. """ if ('sqlmeta' not in new_attrs and not is_base): # We have to create our own subclass, usually. # type(className, bases_tuple, attr_dict) creates a new subclass. cls.sqlmeta = type('sqlmeta', (cls.sqlmeta,), {}) if not issubclass(cls.sqlmeta, sqlmeta): # We allow no superclass and an object superclass, instead # of inheriting from sqlmeta; but in that case we replace # the class and just move over its attributes: assert cls.sqlmeta.__bases__ in ((), (object,)), ( "If you do not inherit your sqlmeta class from " "sqlobject.sqlmeta, it must not inherit from any other " "class (your sqlmeta inherits from: %s)" % cls.sqlmeta.__bases__) for base in cls.__bases__: superclass = getattr(base, 'sqlmeta', None) if superclass: break else: assert 0, ( "No sqlmeta class could be found in any superclass " "(while fixing up sqlmeta %r inheritance)" % cls.sqlmeta) values = dict(cls.sqlmeta.__dict__) for key in list(values.keys()): if key.startswith('__') and key.endswith('__'): # Magic values shouldn't be passed through: del values[key] cls.sqlmeta = type('sqlmeta', (superclass,), values) if not is_base: # Do not pollute the base sqlmeta class cls.sqlmeta.setClass(cls) @classmethod def _SO_cleanDeprecatedAttrs(cls, new_attrs): """ This removes attributes on SQLObject subclasses that have been deprecated; they are moved to the sqlmeta class, and a deprecation warning is given. """ for attr in (): if attr in new_attrs: deprecated("%r is deprecated and read-only; please do " "not use it in your classes until it is fully " "deprecated" % attr, level=1, stacklevel=5) @classmethod def get(cls, id, connection=None, selectResults=None): assert id is not None, \ 'None is not a possible id for %s' % cls.__name__ id = cls.sqlmeta.idType(id) if connection is None: cache = cls._connection.cache else: cache = connection.cache # This whole sequence comes from Cache.CacheFactory's # behavior, where a None returned means a cache miss. val = cache.get(id, cls) if val is None: try: val = cls(_SO_fetch_no_create=1) val._SO_validatorState = sqlbuilder.SQLObjectState(val) val._init(id, connection, selectResults) cache.put(id, cls, val) finally: cache.finishPut(cls) elif selectResults and not val.sqlmeta.dirty: val._SO_writeLock.acquire() try: val._SO_selectInit(selectResults) val.sqlmeta.expired = False finally: val._SO_writeLock.release() return val @classmethod def _notifyFinishClassCreation(cls): pass def _init(self, id, connection=None, selectResults=None): assert id is not None # This function gets called only when the object is # created, unlike __init__ which would be called # anytime the object was returned from cache. self.id = id self._SO_writeLock = threading.Lock() # If no connection was given, we'll inherit the class # instance variable which should have a _connection # attribute. if (connection is not None) and \ (getattr(self, '_connection', None) is not connection): self._connection = connection # Sometimes we need to know if this instance is # global or tied to a particular connection. # This flag tells us that: self.sqlmeta._perConnection = True if not selectResults: dbNames = [col.dbName for col in self.sqlmeta.columnList] selectResults = self._connection._SO_selectOne(self, dbNames) if not selectResults: raise SQLObjectNotFound( "The object %s by the ID %s does not exist" % ( self.__class__.__name__, self.id)) self._SO_selectInit(selectResults) self._SO_createValues = {} self.sqlmeta.dirty = False def _SO_loadValue(self, attrName): try: return getattr(self, attrName) except AttributeError: try: self._SO_writeLock.acquire() try: # Maybe, just in the moment since we got the lock, # some other thread did a _SO_loadValue and we # have the attribute! Let's try and find out! We # can keep trying this all day and still beat the # performance on the database call (okay, we can # keep trying this for a few msecs at least)... result = getattr(self, attrName) except AttributeError: pass else: return result self.sqlmeta.expired = False dbNames = [col.dbName for col in self.sqlmeta.columnList] selectResults = self._connection._SO_selectOne(self, dbNames) if not selectResults: raise SQLObjectNotFound( "The object %s by the ID %s has been deleted" % ( self.__class__.__name__, self.id)) self._SO_selectInit(selectResults) result = getattr(self, attrName) return result finally: self._SO_writeLock.release() def sync(self): if self.sqlmeta.lazyUpdate and self._SO_createValues: self.syncUpdate() self._SO_writeLock.acquire() try: dbNames = [col.dbName for col in self.sqlmeta.columnList] selectResults = self._connection._SO_selectOne(self, dbNames) if not selectResults: raise SQLObjectNotFound( "The object %s by the ID %s has been deleted" % ( self.__class__.__name__, self.id)) self._SO_selectInit(selectResults) self.sqlmeta.expired = False finally: self._SO_writeLock.release() def syncUpdate(self): if not self._SO_createValues: return self._SO_writeLock.acquire() try: if self.sqlmeta.columns: columns = self.sqlmeta.columns values = [(columns[v[0]].dbName, v[1]) for v in sorted( self._SO_createValues.items(), key=lambda c: columns[c[0]].creationOrder)] self._connection._SO_update(self, values) self.sqlmeta.dirty = False self._SO_createValues = {} finally: self._SO_writeLock.release() post_funcs = [] self.sqlmeta.send(events.RowUpdatedSignal, self, post_funcs) for func in post_funcs: func(self) def expire(self): if self.sqlmeta.expired: return self._SO_writeLock.acquire() try: if self.sqlmeta.expired: return for column in self.sqlmeta.columnList: delattr(self, instanceName(column.name)) self.sqlmeta.expired = True self._connection.cache.expire(self.id, self.__class__) self._SO_createValues = {} finally: self._SO_writeLock.release() def _SO_setValue(self, name, value, from_python, to_python): # This is the place where we actually update the # database. # If we are _creating, the object doesn't yet exist # in the database, and we can't insert it until all # the parts are set. So we just keep them in a # dictionary until later: d = {name: value} if not self.sqlmeta._creating and \ not getattr(self.sqlmeta, "row_update_sig_suppress", False): self.sqlmeta.send(events.RowUpdateSignal, self, d) if len(d) != 1 or name not in d: # Already called RowUpdateSignal, don't call it again # inside .set() self.sqlmeta.row_update_sig_suppress = True self.set(**d) del self.sqlmeta.row_update_sig_suppress value = d[name] if from_python: dbValue = from_python(value, self._SO_validatorState) else: dbValue = value if to_python: value = to_python(dbValue, self._SO_validatorState) if self.sqlmeta._creating or self.sqlmeta.lazyUpdate: self.sqlmeta.dirty = True self._SO_createValues[name] = dbValue setattr(self, instanceName(name), value) return self._connection._SO_update( self, [(self.sqlmeta.columns[name].dbName, dbValue)]) if self.sqlmeta.cacheValues: setattr(self, instanceName(name), value) post_funcs = [] self.sqlmeta.send(events.RowUpdatedSignal, self, post_funcs) for func in post_funcs: func(self) def set(self, _suppress_set_sig=False, **kw): if not self.sqlmeta._creating and \ not getattr(self.sqlmeta, "row_update_sig_suppress", False) \ and not _suppress_set_sig: self.sqlmeta.send(events.RowUpdateSignal, self, kw) # set() is used to update multiple values at once, # potentially with one SQL statement if possible. # Filter out items that don't map to column names. # Those will be set directly on the object using # setattr(obj, name, value). def is_column(_c): return _c in self.sqlmeta._plainSetters def f_is_column(item): return is_column(item[0]) def f_not_column(item): return not is_column(item[0]) items = kw.items() extra = dict(filter(f_not_column, items)) kw = dict(filter(f_is_column, items)) # _creating is special, see _SO_setValue if self.sqlmeta._creating or self.sqlmeta.lazyUpdate: for name, value in kw.items(): from_python = getattr(self, '_SO_from_python_%s' % name, None) if from_python: kw[name] = dbValue = from_python(value, self._SO_validatorState) else: dbValue = value to_python = getattr(self, '_SO_to_python_%s' % name, None) if to_python: value = to_python(dbValue, self._SO_validatorState) setattr(self, instanceName(name), value) self._SO_createValues.update(kw) for name, value in extra.items(): try: getattr(self.__class__, name) except AttributeError: if name not in self.sqlmeta.columns: raise TypeError( "%s.set() got an unexpected keyword argument " "%s" % (self.__class__.__name__, name)) try: setattr(self, name, value) except AttributeError as e: raise AttributeError('%s (with attribute %r)' % (e, name)) self.sqlmeta.dirty = True return self._SO_writeLock.acquire() try: # We have to go through and see if the setters are # "plain", that is, if the user has changed their # definition in any way (put in something that # normalizes the value or checks for consistency, # for instance). If so then we have to use plain # old setattr() to change the value, since we can't # read the user's mind. We'll combine everything # else into a single UPDATE, if necessary. toUpdate = {} for name, value in kw.items(): from_python = getattr(self, '_SO_from_python_%s' % name, None) if from_python: dbValue = from_python(value, self._SO_validatorState) else: dbValue = value to_python = getattr(self, '_SO_to_python_%s' % name, None) if to_python: value = to_python(dbValue, self._SO_validatorState) if self.sqlmeta.cacheValues: setattr(self, instanceName(name), value) toUpdate[name] = dbValue for name, value in extra.items(): try: getattr(self.__class__, name) except AttributeError: if name not in self.sqlmeta.columns: raise TypeError( "%s.set() got an unexpected keyword argument " "%s" % (self.__class__.__name__, name)) try: setattr(self, name, value) except AttributeError as e: raise AttributeError('%s (with attribute %r)' % (e, name)) if toUpdate: toUpdate = sorted( toUpdate.items(), key=lambda c: self.sqlmeta.columns[c[0]].creationOrder) args = [(self.sqlmeta.columns[name].dbName, value) for name, value in toUpdate] self._connection._SO_update(self, args) finally: self._SO_writeLock.release() post_funcs = [] self.sqlmeta.send(events.RowUpdatedSignal, self, post_funcs) for func in post_funcs: func(self) def _SO_selectInit(self, row): for _col, colValue in zip(self.sqlmeta.columnList, row): if _col.to_python: colValue = _col.to_python(colValue, self._SO_validatorState) setattr(self, instanceName(_col.name), colValue) def _SO_getValue(self, name): # Retrieves a single value from the database. Simple. assert not self.sqlmeta._obsolete, ( "%s with id %s has become obsolete" % (self.__class__.__name__, self.id)) # @@: do we really need this lock? # self._SO_writeLock.acquire() column = self.sqlmeta.columns[name] results = self._connection._SO_selectOne(self, [column.dbName]) # self._SO_writeLock.release() assert results is not None, "%s with id %s is not in the database" % ( self.__class__.__name__, self.id) value = results[0] if column.to_python: value = column.to_python(value, self._SO_validatorState) return value def _SO_foreignKey(self, value, joinClass, idName=None): if value is None: return None if self.sqlmeta._perConnection: connection = self._connection else: connection = None if idName is None: # Get by id return joinClass.get(value, connection=connection) return joinClass.select( getattr(joinClass.q, idName) == value, connection=connection).getOne() def __init__(self, **kw): # If we are the outmost constructor of a hiearchy of # InheritableSQLObjects (or simlpy _the_ constructor of a "normal" # SQLObject), we create a threadlocal list that collects the # RowCreatedSignals, and executes them if this very constructor is left try: _postponed_local.postponed_calls postponed_created = False except AttributeError: _postponed_local.postponed_calls = [] postponed_created = True try: # We shadow the sqlmeta class with an instance of sqlmeta # that points to us (our sqlmeta buddy object; where the # sqlmeta class is our class's buddy class) self.sqlmeta = self.__class__.sqlmeta(self) # The get() classmethod/constructor uses a magic keyword # argument when it wants an empty object, fetched from the # database. So we have nothing more to do in that case: if '_SO_fetch_no_create' in kw: return post_funcs = [] self.sqlmeta.send(events.RowCreateSignal, self, kw, post_funcs) # Pass the connection object along if we were given one. if 'connection' in kw: connection = kw.pop('connection') if getattr(self, '_connection', None) is not connection: self._connection = connection self.sqlmeta._perConnection = True self._SO_writeLock = threading.Lock() if 'id' in kw: id = self.sqlmeta.idType(kw['id']) del kw['id'] else: id = None self._create(id, **kw) for func in post_funcs: func(self) finally: # if we are the creator of the tl-storage, we # have to exectute and under all circumstances # remove the tl-storage if postponed_created: try: for func in _postponed_local.postponed_calls: func() finally: del _postponed_local.postponed_calls def _create(self, id, **kw): self.sqlmeta._creating = True self._SO_createValues = {} self._SO_validatorState = sqlbuilder.SQLObjectState(self) # First we do a little fix-up on the keywords we were # passed: for column in self.sqlmeta.columnList: # Then we check if the column wasn't passed in, and # if not we try to get the default. if column.name not in kw and column.foreignName not in kw: default = column.default # If we don't get it, it's an error: # If we specified an SQL DEFAULT, then we should use that if default is NoDefault: if column.defaultSQL is None: raise TypeError( "%s() did not get expected keyword argument " "'%s'" % (self.__class__.__name__, column.name)) else: # There is defaultSQL for the column - # do not put the column to kw # so that the backend creates the value. continue # Otherwise we put it in as though they did pass # that keyword: kw[column.name] = default self.set(**kw) # Then we finalize the process: self._SO_finishCreate(id) def _SO_finishCreate(self, id=None): # Here's where an INSERT is finalized. # These are all the column values that were supposed # to be set, but were delayed until now: setters = self._SO_createValues.items() setters = sorted( setters, key=lambda c: self.sqlmeta.columns[c[0]].creationOrder) # Here's their database names: names = [self.sqlmeta.columns[v[0]].dbName for v in setters] values = [v[1] for v in setters] # Get rid of _SO_create*, we aren't creating anymore. # Doesn't have to be threadsafe because we're still in # new(), which doesn't need to be threadsafe. self.sqlmeta.dirty = False if not self.sqlmeta.lazyUpdate: del self._SO_createValues else: self._SO_createValues = {} del self.sqlmeta._creating # Do the insert -- most of the SQL in this case is left # up to DBConnection, since getting a new ID is # non-standard. id = self._connection.queryInsertID(self, id, names, values) cache = self._connection.cache cache.created(id, self.__class__, self) self._init(id) post_funcs = [] kw = dict([('class', self.__class__), ('id', id)]) def _send_RowCreatedSignal(): self.sqlmeta.send(events.RowCreatedSignal, self, kw, post_funcs) for func in post_funcs: func(self) _postponed_local.postponed_calls.append(_send_RowCreatedSignal) def _SO_getID(self, obj, refColumn=None): return getID(obj, refColumn) @classmethod def _findAlternateID(cls, name, dbName, value, connection=None): if isinstance(name, str): name = (name,) value = (value,) if len(name) != len(value): raise ValueError( "'column' and 'value' tuples must be of the same size") new_value = [] for n, v in zip(name, value): from_python = getattr(cls, '_SO_from_python_' + n) if from_python: v = from_python( v, sqlbuilder.SQLObjectState(cls, connection=connection)) new_value.append(v) condition = sqlbuilder.AND( *[getattr(cls.q, _n) == _v for _n, _v in zip(name, new_value)]) return (connection or cls._connection)._SO_selectOneAlt( cls, [cls.sqlmeta.idName] + [column.dbName for column in cls.sqlmeta.columnList], condition), None @classmethod def _SO_fetchAlternateID(cls, name, dbName, value, connection=None, idxName=None): result, obj = cls._findAlternateID(name, dbName, value, connection) if not result: if idxName is None: raise SQLObjectNotFound( "The %s by alternateID %s = %s does not exist" % ( cls.__name__, name, repr(value))) else: names = [] for i in range(len(name)): names.append("%s = %s" % (name[i], repr(value[i]))) names = ', '.join(names) raise SQLObjectNotFound( "The %s by unique index %s(%s) does not exist" % ( cls.__name__, idxName, names)) if obj: return obj if connection: obj = cls.get(result[0], connection=connection, selectResults=result[1:]) else: obj = cls.get(result[0], selectResults=result[1:]) return obj @classmethod def _SO_depends(cls): return findDependencies(cls.__name__, cls.sqlmeta.registry) @classmethod def select(cls, clause=None, clauseTables=None, orderBy=NoDefault, limit=None, lazyColumns=False, reversed=False, distinct=False, connection=None, join=None, forUpdate=False): return cls.SelectResultsClass(cls, clause, clauseTables=clauseTables, orderBy=orderBy, limit=limit, lazyColumns=lazyColumns, reversed=reversed, distinct=distinct, connection=connection, join=join, forUpdate=forUpdate) @classmethod def selectBy(cls, connection=None, **kw): conn = connection or cls._connection return cls.SelectResultsClass(cls, conn._SO_columnClause(cls, kw), connection=conn) @classmethod def tableExists(cls, connection=None): conn = connection or cls._connection return conn.tableExists(cls.sqlmeta.table) @classmethod def dropTable(cls, ifExists=False, dropJoinTables=True, cascade=False, connection=None): conn = connection or cls._connection if ifExists and not cls.tableExists(connection=conn): return extra_sql = [] post_funcs = [] cls.sqlmeta.send(events.DropTableSignal, cls, connection, extra_sql, post_funcs) conn.dropTable(cls.sqlmeta.table, cascade) if dropJoinTables: cls.dropJoinTables(ifExists=ifExists, connection=conn) for sql in extra_sql: connection.query(sql) for func in post_funcs: func(cls, conn) @classmethod def createTable(cls, ifNotExists=False, createJoinTables=True, createIndexes=True, applyConstraints=True, connection=None): conn = connection or cls._connection if ifNotExists and cls.tableExists(connection=conn): return extra_sql = [] post_funcs = [] cls.sqlmeta.send(events.CreateTableSignal, cls, connection, extra_sql, post_funcs) constraints = conn.createTable(cls) if applyConstraints: for constraint in constraints: conn.query(constraint) else: extra_sql.extend(constraints) if createJoinTables: cls.createJoinTables(ifNotExists=ifNotExists, connection=conn) if createIndexes: cls.createIndexes(ifNotExists=ifNotExists, connection=conn) for func in post_funcs: func(cls, conn) return extra_sql @classmethod def createTableSQL(cls, createJoinTables=True, createIndexes=True, connection=None): conn = connection or cls._connection sql, constraints = conn.createTableSQL(cls) if createJoinTables: join_sql = cls.createJoinTablesSQL(connection=conn) if join_sql: sql += ';\n' + join_sql if createIndexes: index_sql = cls.createIndexesSQL(connection=conn) if index_sql: sql += ';\n' + index_sql return sql, constraints @classmethod def createJoinTables(cls, ifNotExists=False, connection=None): conn = connection or cls._connection for join in cls._getJoinsToCreate(): if (ifNotExists and conn.tableExists(join.intermediateTable)): continue conn._SO_createJoinTable(join) @classmethod def createJoinTablesSQL(cls, connection=None): conn = connection or cls._connection sql = [] for join in cls._getJoinsToCreate(): sql.append(conn._SO_createJoinTableSQL(join)) return ';\n'.join(sql) @classmethod def createIndexes(cls, ifNotExists=False, connection=None): conn = connection or cls._connection for _index in cls.sqlmeta.indexes: if not _index: continue conn._SO_createIndex(cls, _index) @classmethod def createIndexesSQL(cls, connection=None): conn = connection or cls._connection sql = [] for _index in cls.sqlmeta.indexes: if not _index: continue sql.append(conn.createIndexSQL(cls, _index)) return ';\n'.join(sql) @classmethod def _getJoinsToCreate(cls): joins = [] for join in cls.sqlmeta.joins: if not join: continue if not join.hasIntermediateTable() or \ not getattr(join, 'createRelatedTable', True): continue if join.soClass.__name__ > join.otherClass.__name__: continue joins.append(join) return joins @classmethod def dropJoinTables(cls, ifExists=False, connection=None): conn = connection or cls._connection for join in cls.sqlmeta.joins: if not join: continue if not join.hasIntermediateTable() or \ not getattr(join, 'createRelatedTable', True): continue if join.soClass.__name__ > join.otherClass.__name__: continue if ifExists and \ not conn.tableExists(join.intermediateTable): continue conn._SO_dropJoinTable(join) @classmethod def clearTable(cls, connection=None, clearJoinTables=True): # 3-03 @@: Maybe this should check the cache... but it's # kind of crude anyway, so... conn = connection or cls._connection conn.clearTable(cls.sqlmeta.table) if clearJoinTables: for join in cls._getJoinsToCreate(): conn.clearTable(join.intermediateTable) def destroySelf(self): post_funcs = [] self.sqlmeta.send(events.RowDestroySignal, self, post_funcs) # Kills this object. Kills it dead! klass = self.__class__ # Free related joins on the base class for join in klass.sqlmeta.joins: if isinstance(join, joins.SORelatedJoin): q = "DELETE FROM %s WHERE %s=%d" % (join.intermediateTable, join.joinColumn, self.id) self._connection.query(q) depends = self._SO_depends() for k in depends: # Free related joins for join in k.sqlmeta.joins: if isinstance(join, joins.SORelatedJoin) and \ join.otherClassName == klass.__name__: q = "DELETE FROM %s WHERE %s=%d" % (join.intermediateTable, join.otherColumn, self.id) self._connection.query(q) cols = findDependantColumns(klass.__name__, k) # Don't confuse the rest of the process if len(cols) == 0: continue query = [] restrict = False for _col in cols: query.append(getattr(k.q, _col.name) == self.id) if _col.cascade is False: # Found a restriction restrict = True query = sqlbuilder.OR(*query) results = k.select(query, connection=self._connection) if restrict and results.count(): # Restrictions only apply if there are # matching records on the related table raise SQLObjectIntegrityError( "Tried to delete %s::%s but " "table %s has a restriction against it" % (klass.__name__, self.id, k.__name__)) setnull = {} for _col in cols: if _col.cascade == 'null': setnull[_col.name] = None if setnull: for row in results: clear = {} for name in setnull: if getattr(row, name) == self.id: clear[name] = None row.set(**clear) delete = False for _col in cols: if _col.cascade is True: delete = True assert delete or setnull or restrict, ( "Class %s depends on %s accoriding to " "findDependantColumns, but this seems inaccurate" % (k, klass)) if delete: for row in results: row.destroySelf() self.sqlmeta._obsolete = True self._connection._SO_delete(self) self._connection.cache.expire(self.id, self.__class__) for func in post_funcs: func(self) post_funcs = [] self.sqlmeta.send(events.RowDestroyedSignal, self, post_funcs) for func in post_funcs: func(self) @classmethod def delete(cls, id, connection=None): obj = cls.get(id, connection=connection) obj.destroySelf() @classmethod def deleteMany(cls, where=NoDefault, connection=None): conn = connection or cls._connection conn.query(conn.sqlrepr(sqlbuilder.Delete(cls.sqlmeta.table, where))) @classmethod def deleteBy(cls, connection=None, **kw): conn = connection or cls._connection conn.query(conn.sqlrepr(sqlbuilder.Delete( cls.sqlmeta.table, conn._SO_columnClause(cls, kw)))) def __repr__(self): if not hasattr(self, 'id'): # Object initialization not finished. No attributes can be read. return '<%s (not initialized)>' % self.__class__.__name__ return '<%s %r %s>' \ % (self.__class__.__name__, self.id, ' '.join( ['%s=%s' % (name, repr(value)) for name, value in self._reprItems()])) def __sqlrepr__(self, db): return str(self.id) @classmethod def sqlrepr(cls, value, connection=None): return (connection or cls._connection).sqlrepr(value) @classmethod def coerceID(cls, value): if isinstance(value, cls): return value.id else: return cls.sqlmeta.idType(value) def _reprItems(self): items = [] for _col in self.sqlmeta.columnList: value = getattr(self, _col.name) r = repr(value) if len(r) > 20: value = r[:17] + "..." + r[-1] items.append((_col.name, value)) return items @classmethod def setConnection(cls, value): if isinstance(value, string_type): value = dbconnection.connectionForURI(value) cls._connection = value def tablesUsedImmediate(self): return [self.__class__.q] # hash implementation def __hash__(self): # We hash on class name and id, since that should be # unique return hash((self.__class__.__name__, self.id)) # Comparison def __eq__(self, other): if self.__class__ is other.__class__: if self.id == other.id: return True return False def __ne__(self, other): return not self.__eq__(other) def __lt__(self, other): return NotImplemented def __le__(self, other): return NotImplemented def __gt__(self, other): return NotImplemented def __ge__(self, other): return NotImplemented # (De)serialization (pickle, etc.) def __getstate__(self): if self.sqlmeta._perConnection: from pickle import PicklingError raise PicklingError( 'Cannot pickle an SQLObject instance ' 'that has a per-instance connection') if self.sqlmeta.lazyUpdate and self._SO_createValues: self.syncUpdate() d = self.__dict__.copy() del d['sqlmeta'] del d['_SO_validatorState'] del d['_SO_writeLock'] del d['_SO_createValues'] return d def __setstate__(self, d): self.__init__(_SO_fetch_no_create=1) self._SO_validatorState = sqlbuilder.SQLObjectState(self) self._SO_writeLock = threading.Lock() self._SO_createValues = {} self.__dict__.update(d) cls = self.__class__ cache = self._connection.cache if cache.tryGet(self.id, cls) is not None: raise ValueError( "Cannot unpickle %s row with id=%s - " "a different instance with the id already exists " "in the cache" % (cls.__name__, self.id)) cache.created(self.id, cls, self)
(**kw)
28,282
sqlobject.main
_SO_finishCreate
null
def _SO_finishCreate(self, id=None): # Here's where an INSERT is finalized. # These are all the column values that were supposed # to be set, but were delayed until now: setters = self._SO_createValues.items() setters = sorted( setters, key=lambda c: self.sqlmeta.columns[c[0]].creationOrder) # Here's their database names: names = [self.sqlmeta.columns[v[0]].dbName for v in setters] values = [v[1] for v in setters] # Get rid of _SO_create*, we aren't creating anymore. # Doesn't have to be threadsafe because we're still in # new(), which doesn't need to be threadsafe. self.sqlmeta.dirty = False if not self.sqlmeta.lazyUpdate: del self._SO_createValues else: self._SO_createValues = {} del self.sqlmeta._creating # Do the insert -- most of the SQL in this case is left # up to DBConnection, since getting a new ID is # non-standard. id = self._connection.queryInsertID(self, id, names, values) cache = self._connection.cache cache.created(id, self.__class__, self) self._init(id) post_funcs = [] kw = dict([('class', self.__class__), ('id', id)]) def _send_RowCreatedSignal(): self.sqlmeta.send(events.RowCreatedSignal, self, kw, post_funcs) for func in post_funcs: func(self) _postponed_local.postponed_calls.append(_send_RowCreatedSignal)
(self, id=None)
28,283
sqlobject.main
_SO_foreignKey
null
def _SO_foreignKey(self, value, joinClass, idName=None): if value is None: return None if self.sqlmeta._perConnection: connection = self._connection else: connection = None if idName is None: # Get by id return joinClass.get(value, connection=connection) return joinClass.select( getattr(joinClass.q, idName) == value, connection=connection).getOne()
(self, value, joinClass, idName=None)
28,284
sqlobject.main
_SO_getID
null
def _SO_getID(self, obj, refColumn=None): return getID(obj, refColumn)
(self, obj, refColumn=None)
28,285
sqlobject.main
_SO_getValue
null
def _SO_getValue(self, name): # Retrieves a single value from the database. Simple. assert not self.sqlmeta._obsolete, ( "%s with id %s has become obsolete" % (self.__class__.__name__, self.id)) # @@: do we really need this lock? # self._SO_writeLock.acquire() column = self.sqlmeta.columns[name] results = self._connection._SO_selectOne(self, [column.dbName]) # self._SO_writeLock.release() assert results is not None, "%s with id %s is not in the database" % ( self.__class__.__name__, self.id) value = results[0] if column.to_python: value = column.to_python(value, self._SO_validatorState) return value
(self, name)
28,286
sqlobject.main
_SO_loadValue
null
def _SO_loadValue(self, attrName): try: return getattr(self, attrName) except AttributeError: try: self._SO_writeLock.acquire() try: # Maybe, just in the moment since we got the lock, # some other thread did a _SO_loadValue and we # have the attribute! Let's try and find out! We # can keep trying this all day and still beat the # performance on the database call (okay, we can # keep trying this for a few msecs at least)... result = getattr(self, attrName) except AttributeError: pass else: return result self.sqlmeta.expired = False dbNames = [col.dbName for col in self.sqlmeta.columnList] selectResults = self._connection._SO_selectOne(self, dbNames) if not selectResults: raise SQLObjectNotFound( "The object %s by the ID %s has been deleted" % ( self.__class__.__name__, self.id)) self._SO_selectInit(selectResults) result = getattr(self, attrName) return result finally: self._SO_writeLock.release()
(self, attrName)
28,287
sqlobject.main
_SO_selectInit
null
def _SO_selectInit(self, row): for _col, colValue in zip(self.sqlmeta.columnList, row): if _col.to_python: colValue = _col.to_python(colValue, self._SO_validatorState) setattr(self, instanceName(_col.name), colValue)
(self, row)
28,288
sqlobject.main
_SO_setValue
null
def _SO_setValue(self, name, value, from_python, to_python): # This is the place where we actually update the # database. # If we are _creating, the object doesn't yet exist # in the database, and we can't insert it until all # the parts are set. So we just keep them in a # dictionary until later: d = {name: value} if not self.sqlmeta._creating and \ not getattr(self.sqlmeta, "row_update_sig_suppress", False): self.sqlmeta.send(events.RowUpdateSignal, self, d) if len(d) != 1 or name not in d: # Already called RowUpdateSignal, don't call it again # inside .set() self.sqlmeta.row_update_sig_suppress = True self.set(**d) del self.sqlmeta.row_update_sig_suppress value = d[name] if from_python: dbValue = from_python(value, self._SO_validatorState) else: dbValue = value if to_python: value = to_python(dbValue, self._SO_validatorState) if self.sqlmeta._creating or self.sqlmeta.lazyUpdate: self.sqlmeta.dirty = True self._SO_createValues[name] = dbValue setattr(self, instanceName(name), value) return self._connection._SO_update( self, [(self.sqlmeta.columns[name].dbName, dbValue)]) if self.sqlmeta.cacheValues: setattr(self, instanceName(name), value) post_funcs = [] self.sqlmeta.send(events.RowUpdatedSignal, self, post_funcs) for func in post_funcs: func(self)
(self, name, value, from_python, to_python)
28,289
sqlobject.main
__classinit__
null
def __classinit__(cls, new_attrs): # This is true if we're initializing the SQLObject class, # instead of a subclass: is_base = cls.__bases__ == (object,) cls._SO_setupSqlmeta(new_attrs, is_base) implicitColumns = _collectAttributes(cls, new_attrs, col.Col) implicitJoins = _collectAttributes(cls, new_attrs, joins.Join) implicitIndexes = _collectAttributes(cls, new_attrs, index.DatabaseIndex) if not is_base: cls._SO_cleanDeprecatedAttrs(new_attrs) if '_connection' in new_attrs: connection = new_attrs['_connection'] del cls._connection assert 'connection' not in new_attrs elif 'connection' in new_attrs: connection = new_attrs['connection'] del cls.connection else: connection = None cls._SO_finishedClassCreation = False ###################################################### # Set some attributes to their defaults, if necessary. # First we get the connection: if not connection and not getattr(cls, '_connection', None): mod = sys.modules[cls.__module__] # See if there's a __connection__ global in # the module, use it if there is. if hasattr(mod, '__connection__'): connection = mod.__connection__ # Do not check hasattr(cls, '_connection') here - it is possible # SQLObject parent class has a connection attribute that came # from sqlhub, e.g.; check __dict__ only. if connection and ('_connection' not in cls.__dict__): cls.setConnection(connection) sqlmeta = cls.sqlmeta # We have to check if there are columns in the inherited # _columns where the attribute has been set to None in this # class. If so, then we need to remove that column from # _columns. for key in sqlmeta.columnDefinitions.keys(): if (key in new_attrs and new_attrs[key] is None): del sqlmeta.columnDefinitions[key] for column in sqlmeta.columnDefinitions.values(): sqlmeta.addColumn(column) for column in implicitColumns: sqlmeta.addColumn(column) # Now the class is in an essentially OK-state, so we can # set up any magic attributes: declarative.setup_attributes(cls, new_attrs) if sqlmeta.fromDatabase: sqlmeta.addColumnsFromDatabase() for j in implicitJoins: sqlmeta.addJoin(j) for i in implicitIndexes: sqlmeta.addIndex(i) def order_getter(o): return o.creationOrder sqlmeta.columnList.sort(key=order_getter) sqlmeta.indexes.sort(key=order_getter) sqlmeta.indexDefinitions.sort(key=order_getter) # Joins cannot be sorted because addJoin created accessors # that remember indexes. # sqlmeta.joins.sort(key=order_getter) sqlmeta.joinDefinitions.sort(key=order_getter) # We don't setup the properties until we're finished with the # batch adding of all the columns... cls._notifyFinishClassCreation() cls._SO_finishedClassCreation = True makeProperties(cls) # We use the magic "q" attribute for accessing lazy # SQL where-clause generation. See the sql module for # more. if not is_base: cls.q = sqlbuilder.SQLObjectTable(cls) cls.j = sqlbuilder.SQLObjectTableWithJoins(cls) classregistry.registry(sqlmeta.registry).addClass(cls)
(cls, new_attrs)
28,290
sqlobject.main
__eq__
null
def __eq__(self, other): if self.__class__ is other.__class__: if self.id == other.id: return True return False
(self, other)
28,291
sqlobject.main
__ge__
null
def __ge__(self, other): return NotImplemented
(self, other)
28,292
sqlobject.main
__getstate__
null
def __getstate__(self): if self.sqlmeta._perConnection: from pickle import PicklingError raise PicklingError( 'Cannot pickle an SQLObject instance ' 'that has a per-instance connection') if self.sqlmeta.lazyUpdate and self._SO_createValues: self.syncUpdate() d = self.__dict__.copy() del d['sqlmeta'] del d['_SO_validatorState'] del d['_SO_writeLock'] del d['_SO_createValues'] return d
(self)
28,293
sqlobject.main
__gt__
null
def __gt__(self, other): return NotImplemented
(self, other)
28,294
sqlobject.main
__hash__
null
def __hash__(self): # We hash on class name and id, since that should be # unique return hash((self.__class__.__name__, self.id))
(self)
28,295
sqlobject.main
__init__
null
def __init__(self, **kw): # If we are the outmost constructor of a hiearchy of # InheritableSQLObjects (or simlpy _the_ constructor of a "normal" # SQLObject), we create a threadlocal list that collects the # RowCreatedSignals, and executes them if this very constructor is left try: _postponed_local.postponed_calls postponed_created = False except AttributeError: _postponed_local.postponed_calls = [] postponed_created = True try: # We shadow the sqlmeta class with an instance of sqlmeta # that points to us (our sqlmeta buddy object; where the # sqlmeta class is our class's buddy class) self.sqlmeta = self.__class__.sqlmeta(self) # The get() classmethod/constructor uses a magic keyword # argument when it wants an empty object, fetched from the # database. So we have nothing more to do in that case: if '_SO_fetch_no_create' in kw: return post_funcs = [] self.sqlmeta.send(events.RowCreateSignal, self, kw, post_funcs) # Pass the connection object along if we were given one. if 'connection' in kw: connection = kw.pop('connection') if getattr(self, '_connection', None) is not connection: self._connection = connection self.sqlmeta._perConnection = True self._SO_writeLock = threading.Lock() if 'id' in kw: id = self.sqlmeta.idType(kw['id']) del kw['id'] else: id = None self._create(id, **kw) for func in post_funcs: func(self) finally: # if we are the creator of the tl-storage, we # have to exectute and under all circumstances # remove the tl-storage if postponed_created: try: for func in _postponed_local.postponed_calls: func() finally: del _postponed_local.postponed_calls
(self, **kw)
28,296
sqlobject.main
__le__
null
def __le__(self, other): return NotImplemented
(self, other)
28,297
sqlobject.main
__lt__
null
def __lt__(self, other): return NotImplemented
(self, other)