function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def RemoveTransaction(self, transaction): return self.RemoveTransactions([transaction])
mrooney/wxbanker
[ 17, 9, 17, 7, 1337971002 ]
def Purge(self): """ Call this when the account is being deleted and should purge itself. This will delete all transactions but NOT any linked transactions in other accounts, because when you close a bank account it doesn't erase the historical transfers in other accounts. """ self.RemoveTransactions(self.Transactions, removeLinkedTransactions=False)
mrooney/wxbanker
[ 17, 9, 17, 7, 1337971002 ]
def MoveTransactions(self, transactions, destAccount): Publisher.sendMessage("batch.start") sources = self.RemoveTransactions(transactions) destAccount.AddTransactions(transactions, sources) Publisher.sendMessage("batch.end")
mrooney/wxbanker
[ 17, 9, 17, 7, 1337971002 ]
def GetMintId(self): return self._MintId
mrooney/wxbanker
[ 17, 9, 17, 7, 1337971002 ]
def SetMintId(self, mintId): if mintId: mintId = int(mintId) self._MintId = mintId
mrooney/wxbanker
[ 17, 9, 17, 7, 1337971002 ]
def IsMintEnabled(self): return self.MintId is not None
mrooney/wxbanker
[ 17, 9, 17, 7, 1337971002 ]
def IsOutOfSync(self): """Returns true only if explicitly out of sync: set to sync and able to get a balance, and balance is different.""" if self.IsMintEnabled(): if Mint.IsLoggedIn(): return not self.IsInSync() return False
mrooney/wxbanker
[ 17, 9, 17, 7, 1337971002 ]
def IsInSync(self): if self.MintId is None: raise bankexceptions.MintIntegrationException("This account has no MintId.")
mrooney/wxbanker
[ 17, 9, 17, 7, 1337971002 ]
def GetSyncString(self): if self.MintId is None: raise bankexceptions.MintIntegrationException("This account has no MintId.")
mrooney/wxbanker
[ 17, 9, 17, 7, 1337971002 ]
def onTransactionAmountChanged(self, message): transaction = message.data if transaction.Parent == self: debug.debug("Updating balance because I am %s: %s" % (self.Name, transaction)) self.Balance = sum((t.Amount for t in self.Transactions)) else: debug.debug("Ignoring transaction because I am %s: %s" % (self.Name, transaction))
mrooney/wxbanker
[ 17, 9, 17, 7, 1337971002 ]
def __cmp__(self, other): return cmp(self.Name, other.Name)
mrooney/wxbanker
[ 17, 9, 17, 7, 1337971002 ]
def get_event_times(event, n_procs): result = [s[event][j]["time"] for j in range(n_procs)] max = np.max(result) min = np.min(result) if max > 0: return max, min, min / max else: return max, min, 0.0
pism/pism
[ 82, 34, 82, 29, 1320433417 ]
def get_data(event_list): "Get event data from the time-stepping loop stage." return {e: get_event_times(e, n_procs) for e in event_list if e in list(s.keys())}
pism/pism
[ 82, 34, 82, 29, 1320433417 ]
def plot(data, total, grand_total): events = [(e, data[e][0]) for e in data] events.sort(key=lambda x: x[1]) def better_name(n): if n in list(better_names.keys()): return better_names[n] else: return n names = [e[0] for e in events] times = [e[1] for e in events] times_percent = [100.0 * t / float(total) for t in times] if grand_total is not None: comments = ["(%3.1f s, %3.1f%%)" % (time, 100.0 * time / grand_total) for time in times] else: comments = ["(%3.1f s)" % time for time in times] labels = [better_name(name) + " " + comment for name, comment in zip(names, comments)] explode = [0.05]*len(times) plt.pie(times_percent, autopct="%3.1f%%", labels=labels, colors=colors, startangle=0.0, explode=explode) plt.margins(x=0.2, y=0.1) plt.axis('equal')
pism/pism
[ 82, 34, 82, 29, 1320433417 ]
def __init__(self): self.session = None
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def update(self, data, scene): if not self.session: if self.is_preview: cscene = bpy.context.scene.cycles use_osl = cscene.shading_system and cscene.device == 'CPU' engine.create(self, data, scene, None, None, None, use_osl) else: engine.create(self, data, scene) else: engine.reset(self, data, scene)
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def bake(self, scene, obj, pass_type, pass_filter, object_id, pixel_array, num_pixels, depth, result): engine.bake(self, obj, pass_type, pass_filter, object_id, pixel_array, num_pixels, depth, result)
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def view_update(self, context): if not self.session: engine.create(self, context.blend_data, context.scene, context.region, context.space_data, context.region_data) engine.update(self, context.blend_data, context.scene)
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def update_script_node(self, node): if engine.with_osl(): from . import osl osl.update_script_node(node, self.report) else: self.report({'ERROR'}, "OSL support disabled in this build.")
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def register(): from . import ui from . import properties from . import presets import atexit # Make sure we only registered the callback once. atexit.unregister(engine_exit) atexit.register(engine_exit) engine.init() properties.register() ui.register() presets.register() bpy.utils.register_module(__name__) bpy.app.handlers.version_update.append(version_update.do_versions)
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def main(arguments=None): """ The main function used when ``yaml_to_database.py`` when installed as a cl tool """ # setup the command-line util settings su = tools( arguments=arguments, docString=__doc__, logLevel="WARNING", options_first=False, projectName=False ) arguments, settings, log, dbConn = su.setup() # unpack remaining cl arguments using `exec` to setup the variable names # automatically for arg, val in list(arguments.items()): if arg[0] == "-": varname = arg.replace("-", "") + "Flag" else: varname = arg.replace("<", "").replace(">", "") if isinstance(val, str): exec(varname + " = '%s'" % (val,)) else: exec(varname + " = %s" % (val,)) if arg == "--dbConn": dbConn = val log.debug('%s = %s' % (varname, val,)) from fundamentals.mysql import sqlite2mysql converter = sqlite2mysql( log=log, settings=settings, pathToSqlite=pathToSqliteDB, tablePrefix=tablePrefix, dbConn=dbConn ) converter.convert_sqlite_to_mysql() return
thespacedoctor/fundamentals
[ 3, 3, 3, 3, 1456484053 ]
def __init__( self, log, pathToSqlite, tablePrefix="", settings=False, dbConn=False
thespacedoctor/fundamentals
[ 3, 3, 3, 3, 1456484053 ]
def convert_sqlite_to_mysql( self): """*copy the contents of the sqlite database into the mysql database* See class docstring for usage """ from fundamentals.renderer import list_of_dictionaries from fundamentals.mysql import directory_script_runner self.log.debug('starting the ``convert_sqlite_to_mysql`` method') con = lite.connect(self.pathToSqlite) con.row_factory = lite.Row cur = con.cursor() # GET ALL TABLE NAMES cur.execute("SELECT name FROM sqlite_master WHERE type='table';") tables = cur.fetchall() createStatements = [] inserts = [] for table in tables: table = table['name'] if table == "sqlite_sequence": continue # CREATE TABLE collection_books (folder_id, fingerprint, primary key(folder_id, fingerprint)); # GENEREATE THE MYSQL CREATE STATEMENTS FOR EACH TABLE cur.execute( "SELECT sql FROM sqlite_master WHERE name = '%(table)s';" % locals()) createStatement = cur.fetchone() createStatement = createStatement[0].replace('"', '`') + ";" if "DEFAULT" not in createStatement: if "primary key(" in createStatement: tmp = createStatement.split("primary key(") tmp[0] = tmp[0].replace( ",", " varchar(150) DEFAULT NULL,") createStatement = ("primary key(").join(tmp) if "primary key," in createStatement: tmp = createStatement.split("primary key,") tmp[1] = tmp[1].replace( ",", " varchar(150) DEFAULT NULL,") tmp[1] = tmp[1].replace( ");", " varchar(150) DEFAULT NULL);") createStatement = ("primary key,").join(tmp) createStatement = createStatement.replace( "INTEGER PRIMARY KEY", "INTEGER AUTO_INCREMENT PRIMARY KEY") createStatement = createStatement.replace( "AUTOINCREMENT", "AUTO_INCREMENT") createStatement = createStatement.replace( "DEFAULT 't'", "DEFAULT '1'") createStatement = createStatement.replace( "DEFAULT 'f'", "DEFAULT '0'") createStatement = createStatement.replace(",'t'", ",'1'") createStatement = createStatement.replace(",'f'", ",'0'") if "CREATE TABLE `" in createStatement: createStatement = createStatement.replace( "CREATE TABLE `", "CREATE TABLE IF NOT EXISTS `" + self.tablePrefix) else: createStatement = createStatement.replace( "CREATE TABLE ", "CREATE TABLE IF NOT EXISTS " + self.tablePrefix) if ", primary key(" in createStatement: createStatement = createStatement.replace(", primary key(", """,
thespacedoctor/fundamentals
[ 3, 3, 3, 3, 1456484053 ]
def _iter_filename_over_mountpoints(self, filename): '''iterate absolute filename over self.mountpoints and self.root''' for mountpoint in self.mountpoints + [self.root]: _drivename, _filename = os.path.splitdrive(filename) _filename = _filename.lstrip(os.path.sep) yield os.path.join(_drivename + mountpoint, _filename)
RedHatQE/python-moncov
[ 1, 2, 1, 4, 1358171954 ]
def raw_data(self, arg_ignored): '''moncov patched raw data method to fetch stats from moncov data store''' import moncov db = moncov.conf.get_db(dbhost=dbhost, dbport=dbport, dbname=dbname) if hasattr(self, '_moncov_data_cache'): return self._moncov_data_cache data = { 'arcs': {}, 'lines': {} } for filename in moncov.data.filenames(db): data['arcs'][filename] = list() data['lines'][filename] = list() for arc in moncov.data.filename_arcs(db, filename): data['arcs'][filename].append(moncov.data.arc2tuple(arc)) data['lines'][filename].append(moncov.data.arc2line(arc)) # duplicate with various mountpoints try: for mount_filename in self.iter_filename_over_mountpoints(filename): data['arcs'][mount_filename] = data['arcs'][filename] data['lines'][mount_filename] = data['lines'][filename] except Exception as e: import sys print sys.exc_info() self._moncov_data_cache = data return self._moncov_data_cache
RedHatQE/python-moncov
[ 1, 2, 1, 4, 1358171954 ]
def _execute(self, unitOfWork): raise NotImplementedError("Override this function in a subclass")
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, itemId): self.__itemId = itemId
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def _execute(self, uow): self._session = uow.session sql = '''select tags.id, tags.name, tags.synonym_code from tags left join items_tags on tags.id = items_tags.tag_id where items_tags.item_id is NULL ''' hangingTags = self._session.query(db.Tag).from_statement(sql).all() count = len(hangingTags) for tag in hangingTags: self._session.delete(tag) if count > 0: self._session.commit() return count
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def _execute(self, uow): self._session = uow.session sql = '''select fields.id, fields.name from fields left join items_fields on fields.id = items_fields.field_id where items_fields.item_id is NULL ''' hangingFields = self._session.query(db.Field).from_statement(sql).all() count = len(hangingFields) for field in hangingFields: self._session.delete(field) if count > 0: self._session.commit() return count
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, data_ref_id, thumbnail): self.__dataRefId = data_ref_id self.__thumbnail = thumbnail
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, limit=0, page=1, order_by=""): self.__limit = limit self.__page = page self.__orderBy = order_by
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __getUntaggedItems(self, limit, page, order_by): order_by_1 = "" order_by_2 = "" for col, direction in order_by: if order_by_1: order_by_1 += ", " if order_by_2: order_by_2 += ", " order_by_2 += col + " " + direction + " " if col == "title": order_by_1 += col + " " + direction + " " if order_by_1: order_by_1 = " ORDER BY " + order_by_1 if order_by_2: order_by_2 = " ORDER BY " + order_by_2 thumbnail_default_size = UserConfig().get("thumbnail_size", consts.THUMBNAIL_DEFAULT_SIZE) if page < 1: raise ValueError("Page number cannot be negative or zero.") if limit < 0: raise ValueError("Limit cannot be negative number.") limit_offset = "" if limit > 0: offset = (page-1)*limit limit_offset += "LIMIT {0} OFFSET {1}".format(limit, offset) sql = ''' select sub.*, ''' + \ db.Item_Tag._sql_from() + ", " + \ db.Tag._sql_from() + ", " + \ db.Item_Field._sql_from() + ", " + \ db.Field._sql_from() + \ ''' from (select i.*, ''' + \ db.DataRef._sql_from() + ", " + \ db.Thumbnail._sql_from() + \ ''' from items i left join items_tags it on i.id = it.item_id left join data_refs on i.data_ref_id = data_refs.id left join thumbnails on data_refs.id = thumbnails.data_ref_id and thumbnails.size = ''' + \ str(thumbnail_default_size) + ''' where it.item_id is null AND i.alive ''' + order_by_1 + " " + limit_offset + ''' ) as sub left join items_tags on sub.id = items_tags.item_id left join tags on tags.id = items_tags.tag_id left join items_fields on sub.id = items_fields.item_id left join fields on fields.id = items_fields.field_id ''' + order_by_2 items = [] try: items = self._session.query(db.Item)\ .options(contains_eager("data_ref"), \ contains_eager("data_ref.thumbnails"), \ contains_eager("item_tags"), \ contains_eager("item_tags.tag"), \ contains_eager("item_fields"),\ contains_eager("item_fields.field"))\ .from_statement(sql).all() for item in items: self._session.expunge(item) except ResourceClosedError: pass return items
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, query_tree, limit=0, page=1, order_by=[]): self.__queryTree = query_tree self.__limit = limit self.__page = page self.__orderBy = order_by
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __queryItemsByParseTree(self, query_tree, limit, page, order_by): order_by_1 = "" order_by_2 = "" for col, direction in order_by: if order_by_1: order_by_1 += ", " if order_by_2: order_by_2 += ", " order_by_2 += col + " " + direction + " " if col == "title": order_by_1 += col + " " + direction + " " if order_by_1: order_by_1 = " ORDER BY " + order_by_1 if order_by_2: order_by_2 = " ORDER BY " + order_by_2 sub_sql = query_tree.interpret() if page < 1: raise ValueError("Page number cannot be negative or zero.") if limit < 0: raise ValueError("Limit cannot be negative number.") limit_offset = "" if limit > 0: offset = (page-1)*limit limit_offset += "LIMIT {0} OFFSET {1}".format(limit, offset) sql = ''' select sub.*, ''' + db.Item_Tag._sql_from() + ''', ''' + db.Tag._sql_from() + ''', ''' + db.Thumbnail._sql_from() + ''', ''' + db.Item_Field._sql_from() + ''', ''' + db.Field._sql_from() + ''' from (''' + sub_sql + " " + order_by_1 + " " + limit_offset + ''') as sub left join items_tags on sub.id = items_tags.item_id left join tags on tags.id = items_tags.tag_id left join items_fields on sub.id = items_fields.item_id left join fields on fields.id = items_fields.field_id left join thumbnails on thumbnails.data_ref_id = sub.data_refs_id and thumbnails.size = ''' + str(UserConfig().get("thumbnail_size", consts.THUMBNAIL_DEFAULT_SIZE)) + ''' where sub.alive ''' + order_by_2 items = [] try: items = self._session.query(db.Item)\ .options(contains_eager("data_ref"), \ contains_eager("data_ref.thumbnails"), \ contains_eager("item_tags"), \ contains_eager("item_tags.tag"), \ contains_eager("item_fields"),\ contains_eager("item_fields.field"))\ .from_statement(sql).all() for item in items: self._session.expunge(item) except ResourceClosedError: pass return items
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, path, objType=None, status=None): assert not hlp.is_none_or_empty(path) assert objType is not None or status is not None #remove trailing slashes in the path while path.endswith(os.sep): path = path[0:-1] self.path = path self.status = status self.type = objType self.tags = [] self.fields = [] self.itemIds = []
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def fileBaseName(self): return os.path.basename(self.path)
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def isDir(self): return self.type == FileInfo.DIR
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, relPath): self.__relPath = relPath
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __getFileInfo(self, path): try: data_ref = self._session.query(db.DataRef)\ .filter(db.DataRef.url_raw==hlp.to_db_format(path))\ .options(joinedload_all("items"))\ .options(joinedload_all("items.item_tags.tag"))\ .options(joinedload_all("items.item_fields.field"))\ .one() self._session.expunge(data_ref) finfo = FileInfo(data_ref.url, objType=FileInfo.FILE, status=FileInfo.STORED) for item in data_ref.items: finfo.itemIds.append(item.id) for item_tag in item.item_tags: finfo.tags.append(item_tag.tag.name) for item_field in item.item_fields: finfo.fields.append((item_field.field.name, item_field.field_value)) return finfo except NoResultFound: return FileInfo(self.__relPath, status=FileInfo.UNTRACKED)
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, login, password): self.__login = login self.__password = password
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __loginUser(self, login, password): if hlp.is_none_or_empty(login): raise err.LoginError("User login cannot be empty.") user = self._session.query(db.User).get(login) if user is None: raise err.LoginError("User {} doesn't exist.".format(login)) if user.password != password: raise err.LoginError("Password incorrect.") self._session.expunge(user) return user
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, userLogin, newPasswordHash): self.__userLogin = userLogin self.__newPasswordHash = newPasswordHash
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, user): self.__user = user
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __saveNewUser(self, user): #TODO: have to do some basic check of user object validity self._session.add(user) self._session.commit() self._session.refresh(user) self._session.expunge(user)
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def _execute(self, uow): self._session = uow.session return self.__getNamesOfAllTagsAndFields()
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, dirRelPath): self._dirRelPath = dirRelPath
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, tag_names=[], user_logins=[], limit=0): self.__tag_names = tag_names self.__user_logins = user_logins self.__limit = limit
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __getRelatedTags(self, tag_names, user_logins, limit): #TODO user_logins is not used yet.. if len(tag_names) == 0: #TODO The more items in the repository, the slower this query is performed. #I think, I should store in database some statistics information, such as number of items #tagged with each tag. With this information, the query can be rewritten and became much faster. if limit > 0: sql = ''' select name, c from (select t.name as name, count(*) as c from items i, tags t join items_tags it on it.tag_id = t.id and it.item_id = i.id and i.alive where 1 group by t.name ORDER BY c DESC LIMIT ''' + str(limit) + ''') as sub ORDER BY name ''' else: sql = ''' --get_related_tags() query select t.name as name, count(*) as c from items i, tags t join items_tags it on it.tag_id = t.id and it.item_id = i.id and i.alive where 1 group by t.name ORDER BY t.name ''' # ResourceClosedError could be raised when there are no related tags # for given list of tags. try: return self._session.query("name", "c").from_statement(sql).all() except ResourceClosedError: return [] else: # First we get a list of item ids sql = '''--get_related_tags(): getting list of id for all selected tags select * from tags t where t.name in (''' + hlp.to_commalist(tag_names, lambda x: "'" + x + "'") + ''') order by t.id''' try: tags = self._session.query(db.Tag).from_statement(sql).all() except ResourceClosedError: tags = [] tag_ids = [] for tag in tags: tag_ids.append(tag.id) if len(tag_ids) == 0: #TODO Maybe raise an exception? return [] sub_from = "" for i in range(len(tag_ids)): if i == 0: sub_from = sub_from + " items_tags it{} ".format(i+1) else: sub_from = sub_from + \ (" join items_tags it{1} on it{1}.item_id=it{0}.item_id " + \ " AND it{1}.tag_id > it{0}.tag_id ").format(i, i+1) sub_where = "" for i in range(len(tag_ids)): if i == 0: sub_where = sub_where + \ " it{0}.tag_id = {1} ".format(i+1, tag_ids[i]) else: sub_where = sub_where + \ " AND it{0}.tag_id = {1} ".format(i+1, tag_ids[i]) where = "" for i in range(len(tag_ids)): where = where + \ " AND t.id <> {0} ".format(tag_ids[i]) sql = '''--get_related_tags() query select t.name as name, count(*) as c from tags t join items_tags it on it.tag_id = t.id join items i on i.id = it.item_id where it.item_id IN ( select it1.item_id from ''' + sub_from + ''' where ''' + sub_where + ''' ) ''' + where + ''' AND i.alive -- It is important that these ids followed in the order of raising group by t.name ORDER BY t.name''' try: return self._session.query("name", "c").from_statement(sql).all() except ResourceClosedError: return []
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, item_id, user_login, delete_physical_file=True): self.__itemId = item_id self.__userLogin = user_login self.__deletePhysicalFile = delete_physical_file
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __deleteItem(self, item_id, user_login, delete_physical_file=True): # We should not delete Item objects from database, because # we do not want hanging references in HistoryRec table. # So we just mark Items as deleted. # DataRef objects are deleted from database, if there are no references to it from other alive Items. # TODO: Make admin users to be able to delete any files, owned by anybody. item = self._session.query(db.Item).get(item_id) if item.user_login != user_login: raise err.AccessError("Cannot delete item id={0} because it is owned by another user {1}." .format(item_id, item.user_login)) if item.hasTagsExceptOf(user_login): raise err.AccessError("Cannot delete item id={0} because another user tagged it." .format(item_id)) if item.hasFieldsExceptOf(user_login): raise err.AccessError("Cannot delete item id={0} because another user attached a field to it." .format(item_id)) data_ref = item.data_ref item.data_ref = None item.data_ref_id = None item.alive = False self._session.flush() #All bounded ItemTag and ItemField objects stays in database with the Item #If data_ref is not referenced by other Items, we delete it delete_data_ref = data_ref is not None #We should not delete DataRef if it is owned by another user if delete_data_ref and data_ref.user_login != user_login: delete_data_ref = False if delete_data_ref: another_item = self._session.query(db.Item).filter(db.Item.data_ref==data_ref).first() if another_item: delete_data_ref = False if delete_data_ref: is_file = (data_ref.type == db.DataRef.FILE) abs_path = os.path.join(self._repoBasePath, data_ref.url) self._session.delete(data_ref) self._session.flush() if is_file and delete_physical_file and os.path.exists(abs_path): os.remove(abs_path) self._session.commit()
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, item, srcAbsPath=None, dstRelPath=None): self.__item = item self.__srcAbsPath = srcAbsPath self.__dstRelPath = dstRelPath
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __saveNewItem(self, item, srcAbsPath=None, dstRelPath=None): #We do not need any info, that can be in item.data_ref object at this point. #But if it is not None, it may break the creation of tags/fields. #Don't worry, item.data_ref will be assigned a new DataRef instance later (if required). item.data_ref = None user_login = item.user_login if hlp.is_none_or_empty(user_login): raise err.AccessError("Argument user_login shouldn't be null or empty.") user = self._session.query(db.User).get(user_login) if user is None: raise err.AccessError("User with login {} doesn't exist.".format(user_login)) #Making copies of tags and fields item_tags_copy = item.item_tags[:] item_fields_copy = item.item_fields[:] # Storing the item without tags and fields (for now) del item.item_tags[:] del item.item_fields[:] self._session.add(item) self._session.flush() tagNamesToAdd = map(lambda itag: itag.tag.name, item_tags_copy) operations.ItemOperations.addTags(self._session, item, tagNamesToAdd, user_login) nameValuePairsToAdd = map(lambda ifield: (ifield.field.name, ifield.field_value), item_fields_copy) operations.ItemOperations.addOrUpdateFields(self._session, item, nameValuePairsToAdd, user_login) isDataRefRequired = not hlp.is_none_or_empty(srcAbsPath) if isDataRefRequired: operations.ItemOperations.addUntrackedFile(self._session, item, self._repoBasePath, srcAbsPath, dstRelPath, user_login) self._session.commit() item_id = item.id self._session.expunge(item) return item_id
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, item, newSrcAbsPath, newDstRelPath, userLogin): ''' Modifies Item and returns detached updated item or raises an exception, if something goes wrong. item --- is a detached object, representing a new state for stored item with id == item.id.
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def _execute(self, uow): self._session = uow.session self._repoBasePath = uow._repo_base_path self.__updateExistingItem(self.__item, self.__newSrcAbsPath, self.__newDstRelPath, self.__userLogin)
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __updateDataRefAndFilesystem(self, item, persistentItem, newSrcAbsPath, newDstRelPath, userLogin): origSrcAbsPath = None if persistentItem.data_ref is None \ else os.path.join(self._repoBasePath, persistentItem.data_ref.url) assert origSrcAbsPath is None or len(origSrcAbsPath) > 0 assert origSrcAbsPath is None or os.path.isabs(origSrcAbsPath) if origSrcAbsPath is None and newSrcAbsPath is None: # Item is not linked with a file neither before nor after update. Do nothing. assert newDstRelPath is None return if origSrcAbsPath is None and newSrcAbsPath is not None: # Item was without a file, and now it is linked with a file self.__addStoredOrUntrackedFile(persistentItem, newSrcAbsPath, newDstRelPath, userLogin) return if origSrcAbsPath is not None and newSrcAbsPath is None: # Item was linked with a file, now it is leaved without a file assert newDstRelPath is None operations.ItemOperations.removeFile(self._session, persistentItem) return if origSrcAbsPath is not None and origSrcAbsPath != newSrcAbsPath: # Item was with file1, now it is linked with a file2 operations.ItemOperations.removeFile(self._session, persistentItem) self.__addStoredOrUntrackedFile(persistentItem, newSrcAbsPath, newDstRelPath, userLogin) return if origSrcAbsPath is not None and origSrcAbsPath == newSrcAbsPath: # Item was with a file, and after update it should be with the same file, but # it could be renamed or moved somewhere else within repository. origFileRelPath = os.path.relpath(origSrcAbsPath, self._repoBasePath) if newDstRelPath != origFileRelPath: # Item was with a file, but this file is going to be renamed or moved operations.ItemOperations.moveFile(self._session, persistentItem, self._repoBasePath, newDstRelPath) else: # Nothing has changed, do nothing pass return
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __updatePlainDataMembers(self, item, persistentItem): #Here we update simple data members of the item persistentItem.title = item.title persistentItem.user_login = item.user_login self._session.flush()
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __updateFields(self, item, persistentItem, user_login): newFieldNames = set(map(lambda ifield: ifield.field.name, [ifield for ifield in item.item_fields])) oldFieldNames = set(map(lambda ifield: ifield.field.name, [ifield for ifield in persistentItem.item_fields])) fieldNamesToRemove = oldFieldNames - newFieldNames operations.ItemOperations.removeFields(self._session, persistentItem, fieldNamesToRemove) fieldNamesToStay = newFieldNames - fieldNamesToRemove itemFieldsToStay = [ifield for ifield in item.item_fields if (ifield.field.name in fieldNamesToStay)] nameValuePairsToAdd = map(lambda ifield: (ifield.field.name, ifield.field_value), itemFieldsToStay) operations.ItemOperations.addOrUpdateFields(self._session, persistentItem, nameValuePairsToAdd, user_login)
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, fileAbsPath): super(DeleteFileCommand, self).__init__() self._fileAbsPath = fileAbsPath
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, srcFileAbsPath, dstFileAbsPath): super(MoveFileCommand, self).__init__() self._srcFileAbsPath = srcFileAbsPath self._dstFileAbsPath = dstFileAbsPath
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, fileAbsPath, newFilename): dstFileAbsPath = os.path.join(os.path.dirname(fileAbsPath), newFilename) super(RenameFileCommand, self).__init__(fileAbsPath, dstFileAbsPath)
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, dirAbsPath, newDirName): super(RenameDirectoryCommand, self).__init__() self._dirAbsPath = dirAbsPath.rstrip(os.sep) self._newDirName = newDirName.strip(os.sep) if '/' in self._newDirName or '\\' in self._newDirName: raise ValueError("OS pathname separator should not be in '{}'".format(self._newDirName))
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def __init__(self, item, repoBasePath): super(CheckItemIntegrityCommand, self).__init__() self.__item = item self.__repoBasePath = repoBasePath
vlkv/reggata
[ 28, 3, 28, 90, 1289021990 ]
def write_config(config): """Writes the config to a file""" with open(CONFIG_FILE, 'wb') as conf: config.write(conf)
callowayproject/django-app-skeleton
[ 76, 17, 76, 1, 1282242043 ]
def set_config_value(sec, opt, value): """Set a config value. Only sets the value if the value is not founc of empty. """ config = ConfigParser.RawConfigParser() config.read(CONFIG_FILE) if not config.has_option(sec, opt): config.set(sec, opt, value) elif config.get(sec, opt) == '': config.set(sec, opt, value) write_config(config)
callowayproject/django-app-skeleton
[ 76, 17, 76, 1, 1282242043 ]
def ensure_venv(): """Ensure virtualenv and virtuelenv wrapper is installed""" has_venv = bool(subprocess.Popen( ['which', 'virtualenv'], stdout=subprocess.PIPE).communicate()[0]) if not has_venv: print 'virtualenv is required to run this script. Please install it ' \ 'with\n easy_install virtualenv\n\nor\n\n pip virtualenv' sys.exit(1) has_venv_wrapper = bool(subprocess.Popen( ['which', 'virtualenvwrapper.sh'], stdout=subprocess.PIPE).communicate()[0]) if not has_venv_wrapper: print 'virtualenvwrapper is required to run this script. Please' \ 'install it with\n easy_install virtualenvwrapper\n\nor\n\n' \ 'pip virtualenvwrapper' sys.exit(1)
callowayproject/django-app-skeleton
[ 76, 17, 76, 1, 1282242043 ]
def replace(opts, text): """Replace certain strings will the supplied text `opts` is a dictionary of variables that will be replaced. Similar to django, it will look for {{..}} and replace it with the variable value Since we want to maintance compatibility with django's `startapp` command we need to also replaced `app_name` folders with the supplied value. """ if IS_WINDOWS: text = text.replace('\\app_name', '\\{0}'.format(opts['APP_NAME'])) text = text.replace('\\gitignore', '\\.gitignore') else: text = text.replace('/app_name', '/{0}'.format(opts['APP_NAME'])) text = text.replace('/gitignore', '/.gitignore') for key, value in opts.iteritems(): if not value: continue text = text.replace('{{%s}}' % (key.lower(),), value) return text
callowayproject/django-app-skeleton
[ 76, 17, 76, 1, 1282242043 ]
def main(options): config = get_config() cur_user = '' if IS_WINDOWS and win32api: cur_user = win32api.GetUserName() elif not IS_WINDOWS: cur_user = os.getlogin() # Default options opts = { 'APP_NAME': None, 'PKG_NAME': None, 'PKG_AUTHOR': None, 'PKG_AUTHOR_EMAIL': None, 'PKG_URL': None, 'VENV': None, 'SECRET_KEY': ''.join([random.choice(CHARS) for i in xrange(50)]), 'DEST_DIR': None, 'TMPL_DIR': None, 'USE_VENV': None } # Update the default options wiht the config values opts.update(config) def prompt(attr, text, default=None): """Prompt the user for certain values""" if hasattr(options, attr): if getattr(options, attr): return getattr(options, attr) default_text = default and ' [%s]: ' % default or ': ' new_val = None while not new_val: new_val = raw_input(text + default_text) or default return new_val # Package/App Information opts['PKG_NAME'] = prompt('pkg_name', 'Package Name') opts['APP_NAME'] = prompt( 'app_name', 'App Name', opts['PKG_NAME'].replace('django-', '')) opts['PKG_URL'] = prompt('pkg_url', 'Project URL') # Author Information opts['PKG_AUTHOR'] = prompt( 'pkg_author', 'Author Name', opts['PKG_AUTHOR'] or cur_user) opts['PKG_AUTHOR_EMAIL'] = prompt( 'pkg_author_email', 'Author Email', opts['PKG_AUTHOR_EMAIL']) set_config_value('main', 'author', opts['PKG_AUTHOR']) set_config_value('main', 'author_email', opts['PKG_AUTHOR_EMAIL']) # Destination and template directories opts['DEST_DIR'] = prompt( 'destination', 'Destination DIR', opts['DEST_DIR']) opts['DEST_DIR'] = os.path.join(opts['DEST_DIR'], opts['PKG_NAME']) opts['TMPL_DIR'] = prompt('template', 'Template DIR', opts['TMPL_DIR']) tmpl_dir = os.path.realpath(os.path.expanduser(opts['TMPL_DIR'])) if tmpl_dir[-1] != '/': tmpl_dir = tmpl_dir + "/" opts['TMPL_DIR'] = tmpl_dir # Copy the template and replace the proper values mk_pkg(opts, opts['DEST_DIR'], opts['TMPL_DIR']) # Virtualenv opts['USE_VENV'] = prompt('use_venv', 'Use Virtualenv', 'n') if opts['USE_VENV'].lower() in ['y', 'yes', '1']: opts['VENV'] = prompt('venv', 'Virtualenv Name', opts['PKG_NAME']) mk_virtual_env(opts['VENV'], opts['DEST_DIR'])
callowayproject/django-app-skeleton
[ 76, 17, 76, 1, 1282242043 ]
def _main(): for s in ["NA.TO", "XBB.TO", "NOU.V", "AP-UN.TO", "BRK-A", "AAPL"]: print("=============================================") print("s: {}".format(s)) print("get_name: {}".format(ysq.get_name(s))) print("get_price: {}".format(ysq.get_price(s))) print("get_volume: {}".format(ysq.get_volume(s))) print("get_stock_exchange: {}".format(ysq.get_stock_exchange(s))) print("get_market_cap: {}".format(ysq.get_market_cap(s))) print("get_dividend_yield: {}".format(ysq.get_dividend_yield(s))) print("get_price_earnings_ratio: {}".format(ysq.get_price_earnings_ratio(s))) print("get_52_week_low: {}".format(ysq.get_52_week_low(s))) print("get_52_week_high: {}".format(ysq.get_52_week_high(s))) print("get_currency: {}".format(ysq.get_currency(s)))
mathieugouin/tradesim
[ 5, 2, 5, 2, 1488071799 ]
def create_base(): base = RigidFragment() base.add_atom("O", np.array([-0.4, 0., 0.]), 1.) base.add_atom("H", np.array([0.4, 0., 0.]), 1.) base.finalize_setup(shift_com=False)
js850/PyGMIN
[ 6, 2, 6, 18, 1333032365 ]
def setup_aatopology(self): GMIN.initialize() pot = GMINPotential(GMIN) coords = pot.getCoords() nrigid = coords.size / 6 print "I have %d water molecules in the system"%nrigid print "The initial energy is", pot.getEnergy(coords) water = create_base()
js850/PyGMIN
[ 6, 2, 6, 18, 1333032365 ]
def get_potential(self): return self.potential
js850/PyGMIN
[ 6, 2, 6, 18, 1333032365 ]
def PRIMARYLANGID(lgid): return ((lgid) & 1023)
calexil/FightstickDisplay
[ 32, 2, 32, 5, 1489193659 ]
def IMAGE_SNAP_BY_ORDINAL(Ordinal): return ((Ordinal & IMAGE_ORDINAL_FLAG) != 0)
calexil/FightstickDisplay
[ 32, 2, 32, 5, 1489193659 ]
def PALETTEINDEX(i): return ((16777216 | (i)))
calexil/FightstickDisplay
[ 32, 2, 32, 5, 1489193659 ]
def GetRValue(rgb): return rgb & 0xff
calexil/FightstickDisplay
[ 32, 2, 32, 5, 1489193659 ]
def GetBValue(rgb): return (rgb >> 16) & 0xff
calexil/FightstickDisplay
[ 32, 2, 32, 5, 1489193659 ]
def b(x): return x
sircuri/GoodWeUSBLogger
[ 9, 6, 9, 4, 1504533143 ]
def b(x): return codecs.latin_1_encode(x)[0]
sircuri/GoodWeUSBLogger
[ 9, 6, 9, 4, 1504533143 ]
def __init__(self, device): """ device (file, fileno) A file object or a fileno of an open hidraw device node. """ self._device = device
sircuri/GoodWeUSBLogger
[ 9, 6, 9, 4, 1504533143 ]
def getRawReportDescriptor(self): """ Return a binary string containing the raw HID report descriptor. """ descriptor = _hidraw_report_descriptor() size = ctypes.c_uint() self._ioctl(_HIDIOCGRDESCSIZE, size, True) descriptor.size = size self._ioctl(_HIDIOCGRDESC, descriptor, True) return b''.join(chr(x) for x in descriptor.value[:size.value])
sircuri/GoodWeUSBLogger
[ 9, 6, 9, 4, 1504533143 ]
def getInfo(self): """ Returns a DevInfo instance, a named tuple with the following items: - bustype: one of BUS_USB, BUS_HIL, BUS_BLUETOOTH or BUS_VIRTUAL - vendor: device's vendor number - product: device's product number """ devinfo = _hidraw_devinfo() self._ioctl(_HIDIOCGRAWINFO, devinfo, True) return DevInfo(devinfo.bustype, devinfo.vendor, devinfo.product)
sircuri/GoodWeUSBLogger
[ 9, 6, 9, 4, 1504533143 ]
def getPhysicalAddress(self, length=512): """ Returns device physical address as a string. See hidraw documentation for value signification, as it depends on device's bus type. """ name = ctypes.create_string_buffer(length) self._ioctl(_HIDIOCGRAWPHYS(length), name, True) return name.value
sircuri/GoodWeUSBLogger
[ 9, 6, 9, 4, 1504533143 ]
def sendFeatureReport(self, report, report_num=0): """ Send a feature report. """ length = len(report) + 1 buf = ctypes.create_string_buffer(b(struct.pack("B", report_num) + report), length) self._ioctl(_HIDIOCSFEATURE(length), buf, True) print(_HIDIOCSFEATURE(length))
sircuri/GoodWeUSBLogger
[ 9, 6, 9, 4, 1504533143 ]
def run(self): DistUtilsExtra.auto.install_auto.run(self) # after DistUtilsExtra automatically copied data/org.soundconverter.gschema.xml # to /usr/share/glib-2.0/schemas/ it doesn't seem to compile them. glib_schema_path = os.path.join(self.install_data, 'share/glib-2.0/schemas/') cmd = 'glib-compile-schemas {}'.format(glib_schema_path) print('running {}'.format(cmd)) os.system(cmd)
kassoulet/soundconverter
[ 195, 48, 195, 2, 1207081526 ]
def setup_class(self, monkeypatch): monkeypatch.setattr(__builtin__, 'open', lambda x, y: DummyFileHandler())
satanas/libturpial
[ 27, 18, 27, 30, 1300121942 ]
def test_new_image(self, monkeypatch): media = Media.new_image('foo', 'binary', path='/path/to/ble', info='lorem ipsum') assert isinstance(media, Media)
satanas/libturpial
[ 27, 18, 27, 30, 1300121942 ]
def raise_ex(): raise KeyError
satanas/libturpial
[ 27, 18, 27, 30, 1300121942 ]
def setUp(self): self.config_complete = create_config_from_yaml(TEST_YAML_CONTENT_OK)
pytroll/pytroll-aapp-runner
[ 1, 6, 1, 8, 1478765185 ]
def test_check_if_scene_is_unique_return_value(self, config): """Test checking if the current scene is unique or if it has been processed earlier.""" config.return_value = self.config_complete myfilename = "/tmp/mytestfile" aapp_run_config = AappRunnerConfig(myfilename, 'norrkoping', 'xl-band') aapp_config = AappL1Config(aapp_run_config.config, 'xl-band') aapp_config['platform_name'] = 'metop03' aapp_config['collection_area_id'] = 'euron1' aapp_config['starttime'] = datetime(2022, 1, 8, 12, 49, 50) aapp_config['endtime'] = datetime(2022, 1, 8, 13, 0, 26) aapp_config.job_register = {} result = check_if_scene_is_unique(aapp_config) assert result aapp_config.job_register = {'metop03': [(datetime(2022, 1, 8, 12, 49, 50), datetime(2022, 1, 8, 13, 0, 26), 'euron1')]} # An EARS scene (same platform and overlapping time interval and over # the same area of interest) arrives shortly after: aapp_config['platform_name'] = 'metop03' aapp_config['collection_area_id'] = 'euron1' aapp_config['starttime'] = datetime(2022, 1, 8, 12, 50) aapp_config['endtime'] = datetime(2022, 1, 8, 13, 0) result = check_if_scene_is_unique(aapp_config) assert not result
pytroll/pytroll-aapp-runner
[ 1, 6, 1, 8, 1478765185 ]
def main(): import sys infile = sys.argv[1] attempts = int(sys.argv[2]) outfile = sys.argv[3] with open(infile, "r", encoding="utf-8") as f: templates = read_opdata_file(f) buf = [] for template in templates: for _ in range(attempts): buf.append(template.create_entry()) with open(outfile, "w", encoding="utf-8") as f: for dynasm, gas in buf: f.write("{}\t{}\n".format(dynasm, gas))
CensoredUsername/dynasm-rs
[ 586, 43, 586, 11, 1467672562 ]
def __init__(self, template, constraints): self.template = template self.gas_template = FIX_GAS_RE.sub(lambda m: ".{}{}".format(m.group(2), m.group(1)), template) self.constraints = constraints self.args = parse_template(template)
CensoredUsername/dynasm-rs
[ 586, 43, 586, 11, 1467672562 ]
def reformat_reglist(m): start = int(m.group(1)) format = m.group(2) amount = int(m.group(3)) items = [] for i in range(amount): items.append("v{}{}".format((start + i) % 32, format)) return "{{{}}}".format(", ".join(items))
CensoredUsername/dynasm-rs
[ 586, 43, 586, 11, 1467672562 ]
def __init__(self): self.values = [] self.emitted = [] self.gas = []
CensoredUsername/dynasm-rs
[ 586, 43, 586, 11, 1467672562 ]
def parse_template(template): matches = [] for argty, argidx in SUBSTITUTION_RE.findall(template): if argty == "Imm": arg = Immediate() elif argty == "Ident": arg = Ident() elif argty == "Mod": arg = Modifier() elif argty == "Off": arg = Offset() elif argty in "WXBHSDQV" or argty in ("WSP", "XSP", "WX"): arg = Register(argty) else: raise NotImplementedError(argty) matches.append((arg, int(argidx))) return matches
CensoredUsername/dynasm-rs
[ 586, 43, 586, 11, 1467672562 ]
def __init__(self): pass
CensoredUsername/dynasm-rs
[ 586, 43, 586, 11, 1467672562 ]
def __init__(self, *args): self.options = args
CensoredUsername/dynasm-rs
[ 586, 43, 586, 11, 1467672562 ]
def create_value(self, history): prev = history.emitted[-1] if prev.startswith("X") or prev.startswith("x"): return random.choice(("LSL", "SXTX")) else: return random.choice(("UXTW", "SXTW"))
CensoredUsername/dynasm-rs
[ 586, 43, 586, 11, 1467672562 ]
def __init__(self, min, max, scale): self.min = min self.max = max self.scale = scale
CensoredUsername/dynasm-rs
[ 586, 43, 586, 11, 1467672562 ]
def create_value(self, history): prev = history.values[-1] return random.randrange(1, self.max - prev, self.scale)
CensoredUsername/dynasm-rs
[ 586, 43, 586, 11, 1467672562 ]
def __init__(self, count, scale=1): self.count = count self.scale = scale
CensoredUsername/dynasm-rs
[ 586, 43, 586, 11, 1467672562 ]