text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Processes the management section from a configuration data dict. <END_TASK> <USER_TASK:> Description: def process_management_config_section(config, management_config): """ Processes the management section from a configuration data dict. :param config: The config reference of the object that will hold the configuration data from the config_data. :param management_config: Management section from a config data dict. """
if 'commands' in management_config: for command in management_config['commands']: config.management['commands'].append(command)
<SYSTEM_TASK:> Processes the session section from the configuration data dict. <END_TASK> <USER_TASK:> Description: def process_session_config_section(config, session_config): """ Processes the session section from the configuration data dict. :param config: The config reference of the object that will hold the configuration data from the config_data. :param session_config: Session configuration section from a config data dict. """
# Setting session type as file by default config.session['type'] = 'file' if 'enabled' in session_config: config.session['enabled'] = session_config['enabled'] if 'type' in session_config: config.session['type'] = session_config['type'] if config.session['type'] == 'file': if 'path' in session_config: config.session['file']['path'] = session_config['path'] if config.session['type'] == 'redis': if 'data' in session_config: if 'source' in session_config['data']: config.session['redis']['data']['source'] = session_config[ 'data']['source'] if 'handlers' in session_config: for handler in session_config['handlers']: handler_class_x = handler['class'].split('.') handler['class'] = handler_class_x[-1] handler['module'] = '.'.join(handler_class_x[:-1][:]) config.session['handlers'][handler['name']] = handler del config.session['handlers'][handler['name']]['name'] if 'encoders' in session_config: for encoder in session_config['encoders']: encoder_class_x = encoder['class'].split('.') encoder['encoder'] = encoder_class_x[-1] encoder['class'] = encoder_class_x[-1] encoder['module'] = '.'.join(encoder_class_x[:-1][:]) config.session['encoders'][encoder['name']] = encoder del config.session['encoders'][encoder['name']]['name'] if 'id_generators' in session_config: for generator in session_config['id_generators']: generator_ref_x = generator['function'].split('.') generator['function'] = generator_ref_x[-1] generator['module'] = '.'.join(generator_ref_x[:-1][:]) config.session['id_generators'][generator['name']] = generator del config.session['id_generators'][generator['name']]['name'] if 'name' in session_config: config.session['name'] = session_config['name'] if 'life_time' in session_config: config.session['life_time'] = session_config['life_time'] if 'callback_hiccup' in session_config: config.session['callback_hiccup'] = session_config['callback_hiccup'] if 'callback_time' in session_config: config.session['callback_time'] = session_config['callback_time'] if 'purge_limit' in session_config: config.session['purge_limit'] = session_config['purge_limit']
<SYSTEM_TASK:> Handles simple, SQL specific connection creation. This will not <END_TASK> <USER_TASK:> Description: def _open(self, db, writeAccess=False): """ Handles simple, SQL specific connection creation. This will not have to manage thread information as it is already managed within the main open method for the SQLBase class. :param db | <orb.Database> :return <variant> | backend specific database connection """
if not pymysql: raise orb.errors.BackendNotFound('psycopg2 is not installed.') # create the python connection try: return pymysql.connect(db=db.name(), user=db.username(), passwd=db.password(), host=(db.writeHost() if writeAccess else db.host()) or 'localhost', port=db.port() or 3306, cursorclass=pymysql.cursors.DictCursor) except pymysql.OperationalError as err: log.exception('Failed to connect to postgres') raise orb.errors.ConnectionFailed()
<SYSTEM_TASK:> Run a script file using a valid sqlalchemy session. <END_TASK> <USER_TASK:> Description: def run_script(script_path, session, handle_command=None, handle_line=None): """ Run a script file using a valid sqlalchemy session. Based on https://bit.ly/2CToAhY. See also sqlalchemy transaction control: https://bit.ly/2yKso0A :param script_path: The path where the script is located :param session: A sqlalchemy session to execute the sql commands from the script :param handle_command: Function to handle a valid command :param handle_line: Function to handle a valid line :return: """
logger.debug("Opening script %s." % script_path) with open(script_path, "r") as stream: sql_command = "" for line in stream: # Ignore commented lines if not line.startswith("--") and line.strip("\n"): # Append line to the command string if handle_line is not None: logger.debug("Calling the handle line function for: " "%s." % line) line = handle_line(line) sql_command = "%s%s" % (sql_command, line.strip("\n")) # If the command string ends with ";", it is a full statement if sql_command.endswith(";"): # Try to execute statement and commit it try: if handle_command is not None: logger.debug("Calling the handle command function " "for: %s." % sql_command) sql_command = handle_command(sql_command) session.execute(text(sql_command)) # Assert in case of error except Exception as e: session.rollback() raise e # Finally, clear command string finally: sql_command = "" session.commit()
<SYSTEM_TASK:> Run Firenado's management commands from a command line <END_TASK> <USER_TASK:> Description: def run_from_command_line(): """ Run Firenado's management commands from a command line """
for commands_conf in firenado.conf.management['commands']: logger.debug("Loading %s commands from %s." % ( commands_conf['name'], commands_conf['module'] )) exec('import %s' % commands_conf['module']) command_index = 1 for arg in sys.argv[1:]: command_index += 1 if arg[0] != "-": break parser = FirenadoArgumentParser(prog=os.path.split(sys.argv[0])[1], add_help=False) parser.add_argument("-h", "--help", default=argparse.SUPPRESS) parser.add_argument("command", default="help", help="Command to executed") try: namespace = parser.parse_args(sys.argv[1:command_index]) if not command_exists(namespace.command): show_command_line_usage(parser) else: run_command(namespace.command, sys.argv[command_index-1:]) except FirenadoArgumentError: show_command_line_usage(parser, True)
<SYSTEM_TASK:> Return the command line header <END_TASK> <USER_TASK:> Description: def get_command_header(parser, usage_message="", usage=False): """ Return the command line header :param parser: :param usage_message: :param usage: :return: The command header """
loader = template.Loader(os.path.join( firenado.conf.ROOT, 'management', 'templates', 'help')) return loader.load("header.txt").generate( parser=parser, usage_message=usage_message, usage=usage, firenado_version=".".join(map(str, firenado.__version__))).decode( sys.stdout.encoding)
<SYSTEM_TASK:> Show the command line help <END_TASK> <USER_TASK:> Description: def show_command_line_usage(parser, usage=False): """ Show the command line help """
help_header_message = get_command_header(parser, "command", usage) loader = template.Loader(os.path.join( firenado.conf.ROOT, 'management', 'templates', 'help')) command_template = " {0.name:15}{0.description:40}" help_message = loader.load("main_command_help.txt").generate( command_categories=command_categories, command_template=command_template ).decode(sys.stdout.encoding) # TODO: This print has to go. Use proper stream instead(stdout or stderr) print(''.join([help_header_message, help_message]))
<SYSTEM_TASK:> Check if the given command was registered. In another words if it <END_TASK> <USER_TASK:> Description: def command_exists(command): """ Check if the given command was registered. In another words if it exists. """
for category, commands in iteritems(command_categories): for existing_command in commands: if existing_command.match(command): return True return False
<SYSTEM_TASK:> Run all tasks registered in a command. <END_TASK> <USER_TASK:> Description: def run_command(command, args): """ Run all tasks registered in a command. """
for category, commands in iteritems(command_categories): for existing_command in commands: if existing_command.match(command): existing_command.run(args)
<SYSTEM_TASK:> See whether the TypeVar is bound for the first time <END_TASK> <USER_TASK:> Description: def check(self, value, namespace): """ See whether the TypeVar is bound for the first time or is met with _exactly_ the same type as previously. That type must also obey the TypeVar's bound, if any. Everything else is a type error. """
return namespace.is_compatible(self.typevar, type(value))
<SYSTEM_TASK:> check lines have less than a maximum number of characters. <END_TASK> <USER_TASK:> Description: def check_lines(self, lines, i): """ check lines have less than a maximum number of characters. It ignored lines with long URLs. """
maxChars = self.config.max_line_length for line in lines.splitlines(): if len(line) > maxChars: if 'http://' in line or 'https://' in line: continue self.add_message('C0301', line=i, args=(len(line), maxChars)) i += 1
<SYSTEM_TASK:> Represent data for the field. <END_TASK> <USER_TASK:> Description: def to_representation(self, obj): """ Represent data for the field. """
many = isinstance(obj, collections.Iterable) \ or isinstance(obj, models.Manager) \ and not isinstance(obj, dict) assert self.serializer is not None \ and issubclass(self.serializer, serializers.ModelSerializer), ( "Bad serializer defined %s" % self.serializer ) extra_params = {} if issubclass(self.serializer, ModelPermissionsSerializer): extra_params['cached_allowed_fields'] =\ self.parent.cached_allowed_fields ser = self.serializer(obj, context=self.context, many=many, **extra_params) return ser.data
<SYSTEM_TASK:> Sets the remove action that should be taken when a model is removed from the collection generated by <END_TASK> <USER_TASK:> Description: def setRemoveAction(self, action): """ Sets the remove action that should be taken when a model is removed from the collection generated by this reverse lookup. Valid actions are "unset" or "delete", any other values will raise an exception. :param action: <str> """
if action not in ('unset', 'delete'): raise orb.errors.ValidationError('The remove action must be either "unset" or "delete"') else: self.__removeAction = action
<SYSTEM_TASK:> Binds typevar to the type its_type. <END_TASK> <USER_TASK:> Description: def bind(self, typevar, its_type): """ Binds typevar to the type its_type. Binding occurs on the instance if the typevar is a TypeVar of the generic type of the instance, on call level otherwise. """
assert type(typevar) == tg.TypeVar if self.is_generic_in(typevar): self.bind_to_instance(typevar, its_type) else: self._ns[typevar] = its_type
<SYSTEM_TASK:> Returns the type the typevar is bound to, or None. <END_TASK> <USER_TASK:> Description: def binding_of(self, typevar): """Returns the type the typevar is bound to, or None."""
if typevar in self._ns: return self._ns[typevar] if self._instance_ns and typevar in self._instance_ns: return self._instance_ns[typevar] return None
<SYSTEM_TASK:> Check whether the module has copyright header. <END_TASK> <USER_TASK:> Description: def _checkCopyright(self, text, node): """ Check whether the module has copyright header. @param text: codes of the module @param node: node of the module """
if not re.search(br"%s\s*\n\s*%s" % self.commentsCopyright, text): self.add_message('W9001', node=node)
<SYSTEM_TASK:> Manage message of different type and in the context of path. <END_TASK> <USER_TASK:> Description: def handle_message(self, msg): """ Manage message of different type and in the context of path. """
if msg.msg_id in self.messagesAllowed: super(LimitedReporter, self).handle_message(msg)
<SYSTEM_TASK:> Unregisters the object from the system. If None is supplied, then <END_TASK> <USER_TASK:> Description: def unregister(self, obj=None): """ Unregisters the object from the system. If None is supplied, then all objects will be unregistered :param obj: <str> or <orb.Database> or <orb.Schema> or None """
if obj is None: self.__databases.clear() self.__schemas.clear() elif isinstance(obj, orb.Schema): self.__schemas.pop(obj.name(), None) elif isinstance(obj, orb.Database): if obj == self.__current_db: self.__current_db = None self.__databases.pop(obj.name(), None) else: self.__current_db = None self.__schemas.pop(obj, None) self.__databases.pop(obj, None)
<SYSTEM_TASK:> Returns the records for the current page, or the specified page number. <END_TASK> <USER_TASK:> Description: def page(self, number, **context): """ Returns the records for the current page, or the specified page number. If a page size is not specified, then this record sets page size will be used. :param pageno | <int> pageSize | <int> :return <orb.RecordSet> """
size = max(0, self.context(**context).pageSize) if not size: return self.copy() else: return self.copy(page=number, pageSize=size)
<SYSTEM_TASK:> Processes a load event by setting the properties of this record <END_TASK> <USER_TASK:> Description: def _load(self, event): """ Processes a load event by setting the properties of this record to the data restored from the database. :param event: <orb.events.LoadEvent> """
if not event.data: return context = self.context() schema = self.schema() dbname = schema.dbname() clean = {} for col, value in event.data.items(): try: model_dbname, col_name = col.split('.') except ValueError: col_name = col model_dbname = dbname # make sure the value we're setting is specific to this model try: column = schema.column(col_name) except orb.errors.ColumnNotFound: column = None if model_dbname != dbname or (column in clean and isinstance(clean[column], Model)): continue # look for preloaded reverse lookups and pipes elif not column: self.__preload[col_name] = value # extract the value from the database else: value = column.dbRestore(value, context=context) clean[column] = value # update the local values with WriteLocker(self.__dataLock): for col, val in clean.items(): default = val if not isinstance(val, dict) else val.copy() self.__values[col.name()] = (default, val) self.__loaded.add(col) if self.processEvent(event): self.onLoad(event)
<SYSTEM_TASK:> Returns a dictionary of changes that have been made <END_TASK> <USER_TASK:> Description: def changes(self, columns=None, recurse=True, flags=0, inflated=False): """ Returns a dictionary of changes that have been made to the data from this record. :return { <orb.Column>: ( <variant> old, <variant> new), .. } """
output = {} is_record = self.isRecord() schema = self.schema() columns = [schema.column(c) for c in columns] if columns else \ schema.columns(recurse=recurse, flags=flags).values() context = self.context(inflated=inflated) with ReadLocker(self.__dataLock): for col in columns: old, curr = self.__values.get(col.name(), (None, None)) if col.testFlag(col.Flags.ReadOnly): continue elif not is_record: old = None check_old = col.restore(old, context) check_curr = col.restore(curr, context) try: different = check_old != check_curr except StandardError: different = True if different: output[col] = (check_old, check_curr) return output
<SYSTEM_TASK:> Returns the lookup options for this record. This will track the options that were <END_TASK> <USER_TASK:> Description: def context(self, **context): """ Returns the lookup options for this record. This will track the options that were used when looking this record up from the database. :return <orb.LookupOptions> """
output = orb.Context(context=self.__context) if self.__context is not None else orb.Context() output.update(context) return output
<SYSTEM_TASK:> Removes this record from the database. If the dryRun \ <END_TASK> <USER_TASK:> Description: def delete(self, **context): """ Removes this record from the database. If the dryRun \ flag is specified then the command will be logged and \ not executed. :note From version 0.6.0 on, this method now accepts a mutable keyword dictionary of values. You can supply any member value for either the <orb.LookupOptions> or <orb.Context>, as well as the keyword 'lookup' to an instance of <orb.LookupOptions> and 'options' for an instance of the <orb.Context> :return <int> """
if not self.isRecord(): return 0 event = orb.events.DeleteEvent(record=self, context=self.context(**context)) if self.processEvent(event): self.onDelete(event) if event.preventDefault: return 0 if self.__delayed: self.__delayed = False self.read() with WriteLocker(self.__dataLock): self.__loaded.clear() context = self.context(**context) conn = context.db.connection() _, count = conn.delete([self], context) # clear out the old values if count == 1: col = self.schema().column(self.schema().idColumn()) with WriteLocker(self.__dataLock): self.__values[col.name()] = (None, None) return count
<SYSTEM_TASK:> Tells the model to treat the given columns as though they had been loaded from the database. <END_TASK> <USER_TASK:> Description: def markLoaded(self, *columns): """ Tells the model to treat the given columns as though they had been loaded from the database. :param columns: (<str>, ..) """
schema = self.schema() columns = {schema.column(col) for col in columns} column_names = {col.name() for col in columns} with WriteLocker(self.__dataLock): for key, (old_value, new_value) in self.__values.items(): if key in column_names: self.__values[key] = (new_value, new_value) self.__loaded.update(columns)
<SYSTEM_TASK:> Returns whether or not this database table record exists <END_TASK> <USER_TASK:> Description: def isRecord(self, db=None): """ Returns whether or not this database table record exists in the database. :return <bool> """
if db is not None: same_db = db == self.context().db if db is None or same_db: col = self.schema().idColumn() with ReadLocker(self.__dataLock): return (col in self.__loaded) and (self.__values[col.name()][0] is not None) else: return None
<SYSTEM_TASK:> Commits the current change set information to the database, <END_TASK> <USER_TASK:> Description: def save(self, values=None, after=None, before=None, **context): """ Commits the current change set information to the database, or inserts this object as a new record into the database. This method will only update the database if the record has any local changes to it, otherwise, no commit will take place. If the dryRun flag is set, then the SQL will be logged but not executed. :param values: None or dictionary of values to update before save :param after: <orb.Model> || None (optional) if provided, this save call will be delayed until after the given record has been saved, triggering a PostSaveEvent callback :param before: <orb.Model> || None (optional) if provided, this save call will be delayed until before the given record is about to be saved, triggering a PreSaveEvent callback :note From version 0.6.0 on, this method now accepts a mutable keyword dictionary of values. You can supply any member value for either the <orb.LookupOptions> or <orb.Context>, 'options' for an instance of the <orb.Context> :return <bool> success """
# specify that this save call should be performed after the save of # another record, useful for chaining events if after is not None: callback = orb.events.Callback(self.save, values=values, **context) after.addCallback(orb.events.PostSaveEvent, callback, record=after, once=True) return callback # specify that this save call should be performed before the save # of another record, useful for chaining events elif before is not None: callback = orb.events.Callback(self.save, values=values, **context) after.addCallback(orb.events.PreSaveEvent, callback, record=after, once=True) return callback if values is not None: self.update(values, **context) # create the commit options context = self.context(**context) new_record = not self.isRecord() # create the pre-commit event changes = self.changes(columns=context.columns) event = orb.events.PreSaveEvent(record=self, context=context, newRecord=new_record, changes=changes) if self.processEvent(event): self.onPreSave(event) if event.preventDefault: return event.result # check to see if we have any modifications to store if not (self.isModified() and self.validate()): return False conn = context.db.connection() if not self.isRecord(): records, _ = conn.insert([self], context) if records: event = orb.events.LoadEvent(record=self, data=records[0]) self._load(event) else: conn.update([self], context) # mark all the data as committed cols = [self.schema().column(c).name() for c in context.columns or []] with WriteLocker(self.__dataLock): for col_name, (_, value) in self.__values.items(): if not cols or col_name in cols: self.__values[col_name] = (value, value) # create post-commit event event = orb.events.PostSaveEvent(record=self, context=context, newRecord=new_record, changes=changes) if self.processEvent(event): self.onPostSave(event) return True
<SYSTEM_TASK:> Validates the current record object to make sure it is ok to commit to the database. If <END_TASK> <USER_TASK:> Description: def validate(self, columns=None): """ Validates the current record object to make sure it is ok to commit to the database. If the optional override dictionary is passed in, then it will use the given values vs. the one stored with this record object which can be useful to check to see if the record will be valid before it is committed. :param overrides | <dict> :return <bool> """
schema = self.schema() if not columns: ignore_flags = orb.Column.Flags.Virtual | orb.Column.Flags.ReadOnly columns = schema.columns(flags=~ignore_flags).values() use_indexes = True else: use_indexes = False # validate the column values values = self.values(key='column', columns=columns) for col, value in values.items(): if not col.validate(value): return False # valide the index values if use_indexes: for index in self.schema().indexes().values(): if not index.validate(self, values): return False return True
<SYSTEM_TASK:> Adds a callback method to the class. When an event of the given type is triggered, any registered <END_TASK> <USER_TASK:> Description: def addCallback(cls, eventType, func, record=None, once=False): """ Adds a callback method to the class. When an event of the given type is triggered, any registered callback will be executed. :param eventType: <str> :param func: <callable> """
callbacks = cls.callbacks() callbacks.setdefault(eventType, []) callbacks[eventType].append((func, record, once))
<SYSTEM_TASK:> Returns a list of callback methods that can be invoked whenever an event is processed. <END_TASK> <USER_TASK:> Description: def callbacks(cls, eventType=None): """ Returns a list of callback methods that can be invoked whenever an event is processed. :return: {subclass of <Event>: <list>, ..} """
key = '_{0}__callbacks'.format(cls.__name__) try: callbacks = getattr(cls, key) except AttributeError: callbacks = {} setattr(cls, key, callbacks) return callbacks.get(eventType, []) if eventType is not None else callbacks
<SYSTEM_TASK:> Shortcut for creating a new record for this table. <END_TASK> <USER_TASK:> Description: def create(cls, values, **context): """ Shortcut for creating a new record for this table. :param values | <dict> :return <orb.Table> """
schema = cls.schema() model = cls # check for creating inherited classes from a sub class polymorphic_columns = schema.columns(flags=orb.Column.Flags.Polymorphic) if polymorphic_columns: polymorphic_column = polymorphic_columns.values()[0] schema_name = values.get(polymorphic_column.name(), schema.name()) if schema_name and schema_name != schema.name(): schema = orb.system.schema(schema_name) if not schema: raise orb.errors.ModelNotFound(schema=schema_name) else: model = schema.model() column_values = {} collector_values = {} for key, value in values.items(): obj = schema.collector(key) or schema.column(key) if isinstance(obj, orb.Collector): collector_values[key] = value else: column_values[key] = value # create the new record with column values (values stored on this record) record = model(context=orb.Context(**context)) record.update(column_values) record.save() # save any collector values after the model is generated (values stored on other records) record.update(collector_values) return record
<SYSTEM_TASK:> Defines a new record for the given class based on the <END_TASK> <USER_TASK:> Description: def ensureExists(cls, values, defaults=None, **context): """ Defines a new record for the given class based on the inputted set of keywords. If a record already exists for the query, the first found record is returned, otherwise a new record is created and returned. :param values | <dict> """
# require at least some arguments to be set if not values: return cls() # lookup the record from the database q = orb.Query() for key, value in values.items(): column = cls.schema().column(key) if not column: raise orb.errors.ColumnNotFound(schema=cls.schema(), column=key) elif column.testFlag(column.Flags.Virtual): continue if (isinstance(column, orb.AbstractStringColumn) and not column.testFlag(column.Flags.CaseSensitive) and not column.testFlag(column.Flags.I18n) and isinstance(value, (str, unicode))): q &= orb.Query(key).lower() == value.lower() else: q &= orb.Query(key) == value record = cls.select(where=q).first() if record is None: record = cls(context=orb.Context(**context)) record.update(values) record.update(defaults or {}) record.save() return record
<SYSTEM_TASK:> Processes the given event by dispatching it to any waiting callbacks. <END_TASK> <USER_TASK:> Description: def processEvent(cls, event): """ Processes the given event by dispatching it to any waiting callbacks. :param event: <orb.Event> """
callbacks = cls.callbacks(type(event)) keep_going = True remove_callbacks = [] for callback, record, once in callbacks: if record is not None and record != event.record: continue callback(event) if once: remove_callbacks.append((callback, record)) if event.preventDefault: keep_going = False break for callback, record in remove_callbacks: cls.removeCallback(type(event), callback, record=record) return keep_going
<SYSTEM_TASK:> Looks up a record based on the given key. This will use the <END_TASK> <USER_TASK:> Description: def fetch(cls, key, **context): """ Looks up a record based on the given key. This will use the default id field, as well as any keyable properties if the given key is a string. :param key: <variant> :param context: <orb.Context> :return: <orb.Model> || None """
# include any keyable columns for lookup if isinstance(key, basestring) and not key.isdigit(): keyable_columns = cls.schema().columns(flags=orb.Column.Flags.Keyable) if keyable_columns: base_q = orb.Query() for col in keyable_columns: base_q |= orb.Query(col) == key context.setdefault('where', base_q) else: context.setdefault('where', orb.Query(cls) == key) else: context.setdefault('where', orb.Query(cls) == key) # don't have slicing for lookup by id context['page'] = None context['pageSize'] = None context['start'] = None context['limit'] = None return cls.select(**context).first()
<SYSTEM_TASK:> Returns a new record instance for the given class with the values <END_TASK> <USER_TASK:> Description: def inflate(cls, values, **context): """ Returns a new record instance for the given class with the values defined from the database. :param cls | <subclass of orb.Table> values | <dict> values :return <orb.Table> """
context = orb.Context(**context) # inflate values from the database into the given class type if isinstance(values, Model): record = values values = dict(values) else: record = None schema = cls.schema() polymorphs = schema.columns(flags=orb.Column.Flags.Polymorphic).values() column = polymorphs[0] if polymorphs else None # attempt to expand the class to its defined polymorphic type if column and column.field() in values: morph_cls_name = values.get(column.name(), values.get(column.field())) morph_cls = orb.system.model(morph_cls_name) id_col = schema.idColumn().name() if morph_cls and morph_cls != cls: try: record = morph_cls(values[id_col], context=context) except KeyError: raise orb.errors.RecordNotFound(schema=morph_cls.schema(), column=values.get(id_col)) if record is None: event = orb.events.LoadEvent(record=record, data=values) record = cls(loadEvent=event, context=context) return record
<SYSTEM_TASK:> Removes a callback from the model's event callbacks. <END_TASK> <USER_TASK:> Description: def removeCallback(cls, eventType, func, record=None): """ Removes a callback from the model's event callbacks. :param eventType: <str> :param func: <callable> """
callbacks = cls.callbacks() callbacks.setdefault(eventType, []) for i in xrange(len(callbacks[eventType])): my_func, my_record, _ = callbacks[eventType][i] if func == my_func and record == my_record: del callbacks[eventType][i] break
<SYSTEM_TASK:> Selects records for the class based on the inputted \ <END_TASK> <USER_TASK:> Description: def select(cls, **context): """ Selects records for the class based on the inputted \ options. If no db is specified, then the current \ global database will be used. If the inflated flag is specified, then \ the results will be inflated to class instances. If the flag is left as None, then results will be auto-inflated if no columns were supplied. If columns were supplied, then the results will not be inflated by default. If the groupBy flag is specified, then the groupBy columns will be added to the beginning of the ordered search (to ensure proper paging). See the Table.groupRecords methods for more details. :note From version 0.6.0 on, this method now accepts a mutable keyword dictionary of values. You can supply any member value for either the <orb.LookupOptions> or <orb.Context>, as well as the keyword 'lookup' to an instance of <orb.LookupOptions> and 'context' for an instance of the <orb.Context> :return [ <cls>, .. ] || { <variant> grp: <variant> result, .. } """
rset_type = getattr(cls, 'Collection', orb.Collection) return rset_type(model=cls, **context)
<SYSTEM_TASK:> Partial render of jinja templates. This is useful if you want to re-render <END_TASK> <USER_TASK:> Description: def partial_jinja_template(template_name, name='data', mimetype="text/html"): """ Partial render of jinja templates. This is useful if you want to re-render the template in the output middleware phase. These templates are rendered in a way that all undefined variables will be kept in the emplate intact. """
def partial_jinja_renderer(result, errors): template = get_jinja_template(template_name) old = template.environment.undefined template.environment.undefined = DebugUndefined context = {name: result or Mock(), 'errors': errors} rendered = template.render(**context) template.environment.undefined = old return {'body': rendered, 'mimetype': mimetype} return partial_jinja_renderer
<SYSTEM_TASK:> Jinja template renderer that does not render the template at all. <END_TASK> <USER_TASK:> Description: def lazy_jinja_template(template_name, name='data', mimetype='text/html'): """ Jinja template renderer that does not render the template at all. Instead of returns the context and template object blended together. Make sure to add ``giotto.middleware.RenderLazytemplate`` to the output middleware stread of any program that uses this renderer. """
def lazy_jinja_renderer(result, errors): template = get_jinja_template(template_name) context = {name: result or Mock(), 'errors': errors} data = ('jinja2', template, context) return {'body': data, 'mimetype': mimetype} return lazy_jinja_renderer
<SYSTEM_TASK:> Go through the passed in list of attributes and register those renderers <END_TASK> <USER_TASK:> Description: def _register_renderers(self, attrs): """ Go through the passed in list of attributes and register those renderers in the render map. """
for method in attrs: func = getattr(self, method) mimetypes = getattr(func, 'mimetypes', []) for mimetype in mimetypes: if not '/' in mimetype: self.reject_map[mimetype] = func if mimetype not in self.render_map: self.render_map[mimetype] = func else: # about to redefine an already defined renderer. # make sure this new render method is not on a base class. base_classes = self.__class__.mro()[1:] from_baseclass = any([x for x in base_classes if func.__name__ in dir(x)]) if not from_baseclass: self.render_map[mimetype] = func
<SYSTEM_TASK:> Render a model result into `mimetype` format. <END_TASK> <USER_TASK:> Description: def render(self, result, mimetype, errors=None): """ Render a model result into `mimetype` format. """
available_mimetypes = [x for x in self.render_map.keys() if '/' in x] render_func = None if '/' not in mimetype: # naked superformat (does not correspond to a mimetype) render_func = self.reject_map.get(mimetype, None) if not render_func: raise NoViewMethod("Unknown Superformat: %s" % mimetype) if not render_func and available_mimetypes: target_mimetype = mimeparse.best_match(available_mimetypes, mimetype) render_func = self.render_map.get(target_mimetype, None) if not render_func: raise NoViewMethod("%s not supported for this program" % mimetype) principle_mimetype = render_func.mimetypes[0] if GiottoControl in render_func.__class__.mro(): # redirection defined as view (not wrapped in lambda) return {'body': render_func, 'persist': render_func.persist} if callable(self.persist): # persist (cookie data) can be either an object, or a callable) persist = self.persist(result) else: persist = self.persist # render functins can take either one or two arguments, both are # supported by the API arg_names = inspect.getargspec(render_func).args num_args = len(set(arg_names) - set(['self', 'cls'])) if num_args == 2: data = render_func(result, errors or Mock()) else: # if the renderer only has one argument, don't pass in the 2nd arg. data = render_func(result) if GiottoControl in data.__class__.mro(): # render function returned a control object return {'body': data, 'persist': persist} if not hasattr(data, 'items'): # view returned string data = {'body': data, 'mimetype': principle_mimetype} else: # result is a dict in for form {body: XX, mimetype: xx} if not 'mimetype' in data and target_mimetype == '*/*': data['mimetype'] = '' if not 'mimetype' in data: data['mimetype'] = target_mimetype data['persist'] = persist return data
<SYSTEM_TASK:> Try to display any object in sensible HTML. <END_TASK> <USER_TASK:> Description: def generic_html(self, result, errors): """ Try to display any object in sensible HTML. """
h1 = htmlize(type(result)) out = [] result = pre_process_json(result) if not hasattr(result, 'items'): # result is a non-container header = "<tr><th>Value</th></tr>" if type(result) is list: result = htmlize_list(result) else: result = htmlize(result) out = ["<tr><td>" + result + "</td></tr>"] elif hasattr(result, 'lower'): out = ["<tr><td>" + result + "</td></tr>"] else: # object is a dict header = "<tr><th>Key</th><th>Value</th></tr>" for key, value in result.items(): v = htmlize(value) row = "<tr><td>{0}</td><td>{1}</td></tr>".format(key, v) out.append(row) env = Environment(loader=PackageLoader('giotto')) template = env.get_template('generic.html') rendered = template.render({'header': h1, 'table_header': header, 'table_body': out}) return {'body': rendered, 'mimetype': 'text/html'}
<SYSTEM_TASK:> If twisted is available, make `emit' return a DeferredList <END_TASK> <USER_TASK:> Description: def install_twisted(): """ If twisted is available, make `emit' return a DeferredList This has been successfully tested with Twisted 14.0 and later. """
global emit, _call_partial try: from twisted.internet import defer emit = _emit_twisted _call_partial = defer.maybeDeferred return True except ImportError: _call_partial = lambda fn, *a, **kw: fn(*a, **kw) return False
<SYSTEM_TASK:> Calls a callback with optional args and keyword args lists. This method exists so <END_TASK> <USER_TASK:> Description: def _call(callback, args=[], kwargs={}): """ Calls a callback with optional args and keyword args lists. This method exists so we can inspect the `_max_calls` attribute that's set by `_on`. If this value is None, the callback is considered to have no limit. Otherwise, an integer value is expected and decremented until there are no remaining calls """
if not hasattr(callback, '_max_calls'): callback._max_calls = None # None implies no callback limit if callback._max_calls is None: return _call_partial(callback, *args, **kwargs) # Should the signal be disconnected? if callback._max_calls <= 0: return disconnect(callback) callback._max_calls -= 1 return _call_partial(callback, *args, **kwargs)
<SYSTEM_TASK:> Proxy for `smokesignal.on`, which is compatible as both a function call and <END_TASK> <USER_TASK:> Description: def _on(on_signals, callback, max_calls=None): """ Proxy for `smokesignal.on`, which is compatible as both a function call and a decorator. This method cannot be used as a decorator :param signals: A single signal or list/tuple of signals that callback should respond to :param callback: A callable that should repond to supplied signal(s) :param max_calls: Integer maximum calls for callback. None for no limit. """
if not callable(callback): raise AssertionError('Signal callbacks must be callable') # Support for lists of signals if not isinstance(on_signals, (list, tuple)): on_signals = [on_signals] callback._max_calls = max_calls # Register the callback for signal in on_signals: receivers[signal].add(callback) # Setup responds_to partial for use later if not hasattr(callback, 'responds_to'): callback.responds_to = partial(responds_to, callback) # Setup signals partial for use later. if not hasattr(callback, 'signals'): callback.signals = partial(signals, callback) # Setup disconnect partial for user later if not hasattr(callback, 'disconnect'): callback.disconnect = partial(disconnect, callback) # Setup disconnect_from partial for user later if not hasattr(callback, 'disconnect_from'): callback.disconnect_from = partial(disconnect_from, callback) return callback
<SYSTEM_TASK:> Removes a callback from specified signal registries and prevents it from responding <END_TASK> <USER_TASK:> Description: def disconnect_from(callback, signals): """ Removes a callback from specified signal registries and prevents it from responding to any emitted signal. :param callback: A callable registered with smokesignal :param signals: A single signal or list/tuple of signals """
# Support for lists of signals if not isinstance(signals, (list, tuple)): signals = [signals] # Remove callback from receiver list if it responds to the signal for signal in signals: if responds_to(callback, signal): receivers[signal].remove(callback)
<SYSTEM_TASK:> Clears all callbacks for a particular signal or signals <END_TASK> <USER_TASK:> Description: def clear(*signals): """ Clears all callbacks for a particular signal or signals """
signals = signals if signals else receivers.keys() for signal in signals: receivers[signal].clear()
<SYSTEM_TASK:> Validates whether or not this index's requirements are satisfied by the inputted record and <END_TASK> <USER_TASK:> Description: def validate(self, record, values): """ Validates whether or not this index's requirements are satisfied by the inputted record and values. If this index fails validation, a ValidationError will be raised. :param record | subclass of <orb.Table> values | {<orb.Column>: <variant>, ..} :return <bool> """
schema = record.schema() columns = self.columns() try: column_values = [values[col] for col in columns] except KeyError as err: msg = 'Missing {0} from {1}.{2} index'.format(err[0].name(), record.schema().name(), self.name()) raise errors.InvalidIndexArguments(self.schema(), msg=msg) # # ensure a unique record is preserved # if self.unique(): # lookup = getattr(record, self.name()) # other = lookup(*column_values) # if other and other != record: # msg = 'A record already exists with the same {0} combination.'.format(', '.join(self.columnNames())) # raise errors.IndexValidationError(self, msg=msg) return True
<SYSTEM_TASK:> Meta program for serving any file based on the path <END_TASK> <USER_TASK:> Description: def StaticServe(base_path='/views/static/'): """ Meta program for serving any file based on the path """
def get_file(path=RAW_INVOCATION_ARGS): fullpath = get_config('project_path') + os.path.join(base_path, path) try: mime, encoding = mimetypes.guess_type(fullpath) return open(fullpath, 'rb'), mime or 'application/octet-stream' except IOError: raise DataNotFound("File does not exist") class StaticServe(Program): controllers = ['http-get'] model = [get_file] view = FileView() return StaticServe()
<SYSTEM_TASK:> Meta program for serving a single file. Useful for favicon.ico and robots.txt <END_TASK> <USER_TASK:> Description: def SingleStaticServe(file_path): """ Meta program for serving a single file. Useful for favicon.ico and robots.txt """
def get_file(): mime, encoding = mimetypes.guess_type(file_path) fullpath = os.path.join(get_config('project_path'), file_path) return open(fullpath, 'rb'), mime or 'application/octet-stream' class SingleStaticServe(Program): controllers = ['http-get'] model = [get_file] view = FileView() return SingleStaticServe()
<SYSTEM_TASK:> Initializes the database by defining any additional structures that are required during selection. <END_TASK> <USER_TASK:> Description: def onSync(self, event): """ Initializes the database by defining any additional structures that are required during selection. """
SETUP = self.statement('SETUP') if SETUP: sql, data = SETUP(self.database()) if event.context.dryRun: print sql % data else: self.execute(sql, data, writeAccess=True)
<SYSTEM_TASK:> Closes the connection to the database for this connection. <END_TASK> <USER_TASK:> Description: def close(self): """ Closes the connection to the database for this connection. :return <bool> closed """
for pool in self.__pool.values(): while not pool.empty(): conn = pool.get_nowait() try: self._close(conn) except Exception: pass # reset the pool size after closing all connections self.__poolSize.clear()
<SYSTEM_TASK:> Returns the count of records that will be loaded for the inputted <END_TASK> <USER_TASK:> Description: def count(self, model, context): """ Returns the count of records that will be loaded for the inputted information. :param model | <subclass of orb.Model> context | <orb.Context> :return <int> """
SELECT_COUNT = self.statement('SELECT COUNT') try: sql, data = SELECT_COUNT(model, context) except orb.errors.QueryIsNull: return 0 else: if context.dryRun: print sql % data return 0 else: try: rows, _ = self.execute(sql, data) except orb.errors.EmptyCommand: rows = [] return sum([row['count'] for row in rows])
<SYSTEM_TASK:> Commits the changes to the current database connection. <END_TASK> <USER_TASK:> Description: def commit(self): """ Commits the changes to the current database connection. :return <bool> success """
with self.native(writeAccess=True) as conn: if not self._closed(conn): return self._commit(conn)
<SYSTEM_TASK:> Creates a new table in the database based cff the inputted <END_TASK> <USER_TASK:> Description: def createModel(self, model, context, owner='', includeReferences=True): """ Creates a new table in the database based cff the inputted schema information. If the dryRun flag is specified, then the SQLConnection will only be logged to the current logger, and not actually executed in the database. :param model | <orb.Model> context | <orb.Context> :return <bool> success """
CREATE = self.statement('CREATE') sql, data = CREATE(model, includeReferences=includeReferences, owner=owner) if not sql: log.error('Failed to create {0}'.format(model.schema().dbname())) return False else: if context.dryRun: print sql % data else: self.execute(sql, data, writeAccess=True) log.info('Created {0}'.format(model.schema().dbname())) return True
<SYSTEM_TASK:> Removes the inputted record from the database. <END_TASK> <USER_TASK:> Description: def delete(self, records, context): """ Removes the inputted record from the database. :param records | <orb.Collection> context | <orb.Context> :return <int> number of rows removed """
# include various schema records to remove DELETE = self.statement('DELETE') sql, data = DELETE(records, context) if context.dryRun: print sql % data return 0 else: return self.execute(sql, data, writeAccess=True)
<SYSTEM_TASK:> Inserts the table instance into the database. If the <END_TASK> <USER_TASK:> Description: def insert(self, records, context): """ Inserts the table instance into the database. If the dryRun flag is specified, then the command will be logged but not executed. :param records | <orb.Table> lookup | <orb.LookupOptions> options | <orb.Context> :return <dict> changes """
INSERT = self.statement('INSERT') sql, data = INSERT(records) if context.dryRun: print sql, data return [], 0 else: return self.execute(sql, data, writeAccess=True)
<SYSTEM_TASK:> Returns whether or not this connection is currently <END_TASK> <USER_TASK:> Description: def isConnected(self): """ Returns whether or not this connection is currently active. :return <bool> connected """
for pool in self.__pool.values(): if not pool.empty(): return True return False
<SYSTEM_TASK:> Opens a new database connection to the database defined <END_TASK> <USER_TASK:> Description: def native(self, writeAccess=False, isolation_level=None): """ Opens a new database connection to the database defined by the inputted database. :return <varaint> native connection """
host = self.database().writeHost() if writeAccess else self.database().host() conn = self.open(writeAccess=writeAccess) try: if isolation_level is not None: if conn.isolation_level == isolation_level: isolation_level = None else: conn.set_isolation_level(isolation_level) yield conn except Exception: if self._closed(conn): conn = None self.close() else: conn = self._rollback(conn) raise else: if not self._closed(conn): self._commit(conn) finally: if conn is not None and not self._closed(conn): if isolation_level is not None: conn.set_isolation_level(isolation_level) self.__pool[host].put(conn)
<SYSTEM_TASK:> Returns the sqlite database for the current thread. <END_TASK> <USER_TASK:> Description: def open(self, writeAccess=False): """ Returns the sqlite database for the current thread. :return <variant> || None """
host = self.database().writeHost() if writeAccess else self.database().host() pool = self.__pool[host] if self.__poolSize[host] >= self.__maxSize or pool.qsize(): if pool.qsize() == 0: log.warning('Waiting for connection to database!!!') return pool.get() else: db = self.database() # process a pre-connect event event = orb.events.ConnectionEvent() db.onPreConnect(event) self.__poolSize[host] += 1 try: conn = self._open(self.database(), writeAccess=writeAccess) except Exception: self.__poolSize[host] -= 1 raise else: event = orb.events.ConnectionEvent(success=conn is not None, native=conn) db.onPostConnect(event) return conn
<SYSTEM_TASK:> Rolls back changes to this database. <END_TASK> <USER_TASK:> Description: def rollback(self): """ Rolls back changes to this database. """
with self.native(writeAccess=True) as conn: return self._rollback(conn)
<SYSTEM_TASK:> Updates the modified data in the database for the <END_TASK> <USER_TASK:> Description: def update(self, records, context): """ Updates the modified data in the database for the inputted record. If the dryRun flag is specified then the command will be logged but not executed. :param record | <orb.Table> lookup | <orb.LookupOptions> options | <orb.Context> :return <dict> changes """
UPDATE = self.statement('UPDATE') sql, data = UPDATE(records) if context.dryRun: print sql, data return [], 0 else: return self.execute(sql, data, writeAccess=True)
<SYSTEM_TASK:> Type of a valid object. <END_TASK> <USER_TASK:> Description: def type(self): """ Type of a valid object. Type may be a JSON type name or a list of such names. Valid JSON type names are ``string``, ``number``, ``integer``, ``boolean``, ``object``, ``array``, ``any`` (default). """
value = self._schema.get("type", "any") if not isinstance(value, (basestring, dict, list)): raise SchemaError( "type value {0!r} is not a simple type name, nested " "schema nor a list of those".format(value)) if isinstance(value, list): type_list = value # Union types have to have at least two alternatives if len(type_list) < 2: raise SchemaError( "union type {0!r} is too short".format(value)) else: type_list = [value] seen = set() for js_type in type_list: if isinstance(js_type, dict): # no nested validation here pass elif isinstance(js_type, list): # no nested validation here pass else: if js_type in seen: raise SchemaError( ("type value {0!r} contains duplicate element" " {1!r}").format(value, js_type)) else: seen.add(js_type) if js_type not in ( "string", "number", "integer", "boolean", "object", "array", "null", "any"): raise SchemaError( "type value {0!r} is not a simple type " "name".format(js_type)) return value
<SYSTEM_TASK:> Schema for particular properties of the object. <END_TASK> <USER_TASK:> Description: def properties(self): """Schema for particular properties of the object."""
value = self._schema.get("properties", {}) if not isinstance(value, dict): raise SchemaError( "properties value {0!r} is not an object".format(value)) return value
<SYSTEM_TASK:> Schema or a list of schemas describing particular elements of the object. <END_TASK> <USER_TASK:> Description: def items(self): """ Schema or a list of schemas describing particular elements of the object. A single schema applies to all the elements. Each element of the object must match that schema. A list of schemas describes particular elements of the object. """
value = self._schema.get("items", {}) if not isinstance(value, (list, dict)): raise SchemaError( "items value {0!r} is neither a list nor an object". format(value)) return value
<SYSTEM_TASK:> Flag indicating an optional property. <END_TASK> <USER_TASK:> Description: def optional(self): """Flag indicating an optional property."""
value = self._schema.get("optional", False) if value is not False and value is not True: raise SchemaError( "optional value {0!r} is not a boolean".format(value)) return value
<SYSTEM_TASK:> Schema for all additional properties, or False. <END_TASK> <USER_TASK:> Description: def additionalProperties(self): """Schema for all additional properties, or False."""
value = self._schema.get("additionalProperties", {}) if not isinstance(value, dict) and value is not False: raise SchemaError( "additionalProperties value {0!r} is neither false nor" " an object".format(value)) return value
<SYSTEM_TASK:> Additional object or objects required by this object. <END_TASK> <USER_TASK:> Description: def requires(self): """Additional object or objects required by this object."""
# NOTE: spec says this can also be a list of strings value = self._schema.get("requires", {}) if not isinstance(value, (basestring, dict)): raise SchemaError( "requires value {0!r} is neither a string nor an" " object".format(value)) return value
<SYSTEM_TASK:> Flag indicating if maximum value is inclusive or exclusive. <END_TASK> <USER_TASK:> Description: def minimumCanEqual(self): """Flag indicating if maximum value is inclusive or exclusive."""
if self.minimum is None: raise SchemaError("minimumCanEqual requires presence of minimum") value = self._schema.get("minimumCanEqual", True) if value is not True and value is not False: raise SchemaError( "minimumCanEqual value {0!r} is not a boolean".format( value)) return value
<SYSTEM_TASK:> Flag indicating if the minimum value is inclusive or exclusive. <END_TASK> <USER_TASK:> Description: def maximumCanEqual(self): """Flag indicating if the minimum value is inclusive or exclusive."""
if self.maximum is None: raise SchemaError("maximumCanEqual requires presence of maximum") value = self._schema.get("maximumCanEqual", True) if value is not True and value is not False: raise SchemaError( "maximumCanEqual value {0!r} is not a boolean".format( value)) return value
<SYSTEM_TASK:> Regular expression describing valid objects. <END_TASK> <USER_TASK:> Description: def pattern(self): """ Regular expression describing valid objects. .. note:: JSON schema specifications says that this value SHOULD follow the ``EMCA 262/Perl 5`` format. We cannot support this so we support python regular expressions instead. This is still valid but should be noted for clarity. :returns: None or compiled regular expression """
value = self._schema.get("pattern", None) if value is None: return try: return re.compile(value) except re.error as ex: raise SchemaError( "pattern value {0!r} is not a valid regular expression:" " {1}".format(value, str(ex)))
<SYSTEM_TASK:> Enumeration of allowed object values. <END_TASK> <USER_TASK:> Description: def enum(self): """ Enumeration of allowed object values. The enumeration must not contain duplicates. """
value = self._schema.get("enum", None) if value is None: return if not isinstance(value, list): raise SchemaError( "enum value {0!r} is not a list".format(value)) if len(value) == 0: raise SchemaError( "enum value {0!r} does not contain any" " elements".format(value)) seen = set() for item in value: if item in seen: raise SchemaError( "enum value {0!r} contains duplicate element" " {1!r}".format(value, item)) else: seen.add(item) return value
<SYSTEM_TASK:> Title of the object. <END_TASK> <USER_TASK:> Description: def title(self): """ Title of the object. This schema element is purely informative. """
value = self._schema.get("title", None) if value is None: return if not isinstance(value, basestring): raise SchemaError( "title value {0!r} is not a string".format(value)) return value
<SYSTEM_TASK:> Integer that divides the object without reminder. <END_TASK> <USER_TASK:> Description: def divisibleBy(self): """Integer that divides the object without reminder."""
value = self._schema.get("divisibleBy", 1) if value is None: return if not isinstance(value, NUMERIC_TYPES): raise SchemaError( "divisibleBy value {0!r} is not a numeric type". format(value)) if value < 0: raise SchemaError( "divisibleBy value {0!r} cannot be" " negative".format(value)) return value
<SYSTEM_TASK:> Description of disallowed objects. <END_TASK> <USER_TASK:> Description: def disallow(self): """ Description of disallowed objects. Disallow must be a type name, a nested schema or a list of those. Type name must be one of ``string``, ``number``, ``integer``, ``boolean``, ``object``, ``array``, ``null`` or ``any``. """
value = self._schema.get("disallow", None) if value is None: return if not isinstance(value, (basestring, dict, list)): raise SchemaError( "disallow value {0!r} is not a simple type name, nested " "schema nor a list of those".format(value)) if isinstance(value, list): disallow_list = value else: disallow_list = [value] seen = set() for js_disallow in disallow_list: if isinstance(js_disallow, dict): # no nested validation here pass else: if js_disallow in seen: raise SchemaError( "disallow value {0!r} contains duplicate element" " {1!r}".format(value, js_disallow)) else: seen.add(js_disallow) if js_disallow not in ( "string", "number", "integer", "boolean", "object", "array", "null", "any"): raise SchemaError( "disallow value {0!r} is not a simple type" " name".format(js_disallow)) return disallow_list
<SYSTEM_TASK:> Validate specified JSON text with specified schema. <END_TASK> <USER_TASK:> Description: def validate(schema_text, data_text, deserializer=_default_deserializer): """ Validate specified JSON text with specified schema. Both arguments are converted to JSON objects with :func:`simplejson.loads`, if present, or :func:`json.loads`. :param schema_text: Text of the JSON schema to check against :type schema_text: :class:`str` :param data_text: Text of the JSON object to check :type data_text: :class:`str` :param deserializer: Function to convert the schema and data to JSON objects :type deserializer: :class:`callable` :returns: Same as :meth:`json_schema_validator.validator.Validator.validate` :raises: Whatever may be raised by simplejson (in particular :class:`simplejson.decoder.JSONDecoderError`, a subclass of :class:`ValueError`) or json :raises: Whatever may be raised by :meth:`json_schema_validator.validator.Validator.validate`. In particular :class:`json_schema_validator.errors.ValidationError` and :class:`json_schema_validator.errors.SchemaError` """
schema = Schema(deserializer(schema_text)) data = deserializer(data_text) return Validator.validate(schema, data)
<SYSTEM_TASK:> Validate an activation key and activate the corresponding <END_TASK> <USER_TASK:> Description: def activate_user(activation_key): """ Validate an activation key and activate the corresponding ``User`` if valid. If the key is valid and has not expired, return the ``User`` after activating. If the key is not valid or has expired, return ``False``. If the key is valid but the ``User`` is already active, return ``False``. To prevent reactivation of an account which has been deactivated by site administrators, the activation key is reset to the string constant ``RegistrationProfile.ACTIVATED`` after successful activation. """
# Make sure the key we're trying conforms to the pattern of a # SHA1 hash; if it doesn't, no point trying to look it up in # the database. if SHA1_RE.search(activation_key): try: profile = RegistrationProfile.objects.get( activation_key=activation_key) except RegistrationProfile.DoesNotExist: return False if not profile.activation_key_expired(): user = profile.user user.is_active = True user.save() profile.activation_key = RegistrationProfile.ACTIVATED profile.save() return user return False
<SYSTEM_TASK:> Program control is returned from the subroutine to the calling program. <END_TASK> <USER_TASK:> Description: def instruction_RTS(self, opcode): """ Program control is returned from the subroutine to the calling program. The return address is pulled from the stack. source code forms: RTS CC bits "HNZVC": ----- """
ea = self.pull_word(self.system_stack_pointer) # log.info("%x|\tRTS to $%x \t| %s" % ( # self.last_op_address, # ea, # self.cfg.mem_info.get_shortest(ea) # )) self.program_counter.set(ea)
<SYSTEM_TASK:> Program control is transferred to the effective address after storing <END_TASK> <USER_TASK:> Description: def instruction_BSR_JSR(self, opcode, ea): """ Program control is transferred to the effective address after storing the return address on the hardware stack. A return from subroutine (RTS) instruction is used to reverse this process and must be the last instruction executed in a subroutine. source code forms: BSR dd; LBSR DDDD; JSR EA CC bits "HNZVC": ----- """
# log.info("%x|\tJSR/BSR to $%x \t| %s" % ( # self.last_op_address, # ea, self.cfg.mem_info.get_shortest(ea) # )) self.push_word(self.system_stack_pointer, self.program_counter.value) self.program_counter.set(ea)
<SYSTEM_TASK:> Causes a branch if the previous operation caused neither a carry nor a <END_TASK> <USER_TASK:> Description: def instruction_BHI(self, opcode, ea): """ Causes a branch if the previous operation caused neither a carry nor a zero result. When used after a subtract or compare operation on unsigned binary values, this instruction will branch if the register was higher than the memory register. Generally not useful after INC/DEC, LD/TST, and TST/CLR/COM instructions. source code forms: BHI dd; LBHI DDDD CC bits "HNZVC": ----- """
if self.C == 0 and self.Z == 0: # log.info("$%x BHI branch to $%x, because C==0 and Z==0 \t| %s" % ( # self.program_counter, ea, self.cfg.mem_info.get_shortest(ea) # )) self.program_counter.set(ea)
<SYSTEM_TASK:> Causes a branch if the previous operation caused either a carry or a <END_TASK> <USER_TASK:> Description: def instruction_BLS(self, opcode, ea): """ Causes a branch if the previous operation caused either a carry or a zero result. When used after a subtract or compare operation on unsigned binary values, this instruction will branch if the register was lower than or the same as the memory register. Generally not useful after INC/DEC, LD/ST, and TST/CLR/COM instructions. source code forms: BLS dd; LBLS DDDD CC bits "HNZVC": ----- """
# if (self.C|self.Z) == 0: if self.C == 1 or self.Z == 1: # log.info("$%x BLS branch to $%x, because C|Z==1 \t| %s" % ( # self.program_counter, ea, self.cfg.mem_info.get_shortest(ea) # )) self.program_counter.set(ea)
<SYSTEM_TASK:> Loads the contents of memory location M into the designated register. <END_TASK> <USER_TASK:> Description: def instruction_LD8(self, opcode, m, register): """ Loads the contents of memory location M into the designated register. source code forms: LDA P; LDB P CC bits "HNZVC": -aa0- """
# log.debug("$%x LD8 %s = $%x" % ( # self.program_counter, # register.name, m, # )) register.set(m) self.clear_NZV() self.update_NZ_8(m)
<SYSTEM_TASK:> Writes the contents of a 16-bit register into two consecutive memory <END_TASK> <USER_TASK:> Description: def instruction_ST16(self, opcode, ea, register): """ Writes the contents of a 16-bit register into two consecutive memory locations. source code forms: STD P; STX P; STY P; STS P; STU P CC bits "HNZVC": -aa0- """
value = register.value # log.debug("$%x ST16 store value $%x from %s at $%x \t| %s" % ( # self.program_counter, # value, register.name, ea, # self.cfg.mem_info.get_shortest(ea) # )) self.clear_NZV() self.update_NZ_16(value) return ea, value
<SYSTEM_TASK:> Writes the contents of an 8-bit register into a memory location. <END_TASK> <USER_TASK:> Description: def instruction_ST8(self, opcode, ea, register): """ Writes the contents of an 8-bit register into a memory location. source code forms: STA P; STB P CC bits "HNZVC": -aa0- """
value = register.value # log.debug("$%x ST8 store value $%x from %s at $%x \t| %s" % ( # self.program_counter, # value, register.name, ea, # self.cfg.mem_info.get_shortest(ea) # )) self.clear_NZV() self.update_NZ_8(value) return ea, value
<SYSTEM_TASK:> Remove password field when serializing an object <END_TASK> <USER_TASK:> Description: def to_native(self, obj): """Remove password field when serializing an object"""
ret = super(UserSerializer, self).to_native(obj) del ret['password'] return ret
<SYSTEM_TASK:> List of headers appropriate for the upgrade <END_TASK> <USER_TASK:> Description: def handshake_headers(self): """ List of headers appropriate for the upgrade handshake. """
headers = [ ('Host', self.host), ('Connection', 'Upgrade'), ('Upgrade', 'WebSocket'), ('Sec-WebSocket-Key', self.key.decode('utf-8')), # Origin is proxyed from the downstream server, don't set it twice # ('Origin', self.url), ('Sec-WebSocket-Version', str(max(WS_VERSION))) ] if self.protocols: headers.append(('Sec-WebSocket-Protocol', ','.join(self.protocols))) if self.extra_headers: headers.extend(self.extra_headers) logger.info("Handshake headers: %s", headers) return headers
<SYSTEM_TASK:> Push upstream messages to downstream. <END_TASK> <USER_TASK:> Description: def received_message(self, m): """Push upstream messages to downstream."""
# TODO: No support for binary messages m = str(m) logger.debug("Incoming upstream WS: %s", m) uwsgi.websocket_send(m) logger.debug("Send ok")
<SYSTEM_TASK:> Combine async uwsgi message loop with ws4py message loop. <END_TASK> <USER_TASK:> Description: def run(self): """Combine async uwsgi message loop with ws4py message loop. TODO: This could do some serious optimizations and behave asynchronously correct instead of just sleep(). """
self.sock.setblocking(False) try: while not self.terminated: logger.debug("Doing nothing") time.sleep(0.050) logger.debug("Asking for downstream msg") msg = uwsgi.websocket_recv_nb() if msg: logger.debug("Incoming downstream WS: %s", msg) self.send(msg) s = self.stream self.opened() logger.debug("Asking for upstream msg {s}".format(s=s)) try: bytes = self.sock.recv(self.reading_buffer_size) if bytes: self.process(bytes) except BlockingIOError: pass except Exception as e: logger.exception(e) finally: logger.info("Terminating WS proxy loop") self.terminate()
<SYSTEM_TASK:> Returns the content of a cached resource. <END_TASK> <USER_TASK:> Description: def get_content(self, url): """Returns the content of a cached resource. Args: url: The url of the resource Returns: The content of the cached resource or None if not in the cache """
cache_path = self._url_to_path(url) try: with open(cache_path, 'rb') as f: return f.read() except IOError: return None
<SYSTEM_TASK:> Returns the path of a cached resource. <END_TASK> <USER_TASK:> Description: def get_path(self, url): """Returns the path of a cached resource. Args: url: The url of the resource Returns: The path to the cached resource or None if not in the cache """
cache_path = self._url_to_path(url) if os.path.exists(cache_path): return cache_path return None
<SYSTEM_TASK:> Stores the content of a resource into the disk cache. <END_TASK> <USER_TASK:> Description: def put_content(self, url, content): """Stores the content of a resource into the disk cache. Args: url: The url of the resource content: The content of the resource Raises: CacheError: If the content cannot be put in cache """
cache_path = self._url_to_path(url) # Ensure that cache directories exist try: dir = os.path.dirname(cache_path) os.makedirs(dir) except OSError as e: if e.errno != errno.EEXIST: raise Error('Failed to create cache directories for ' % cache_path) try: with open(cache_path, 'wb') as f: f.write(content) except IOError: raise Error('Failed to cache content as %s for %s' % (cache_path, url))
<SYSTEM_TASK:> Puts a resource already on disk into the disk cache. <END_TASK> <USER_TASK:> Description: def put_path(self, url, path): """Puts a resource already on disk into the disk cache. Args: url: The original url of the resource path: The resource already available on disk Raises: CacheError: If the file cannot be put in cache """
cache_path = self._url_to_path(url) # Ensure that cache directories exist try: dir = os.path.dirname(cache_path) os.makedirs(dir) except OSError as e: if e.errno != errno.EEXIST: raise Error('Failed to create cache directories for ' % cache_path) # Remove the resource already exist try: os.unlink(cache_path) except OSError: pass try: # First try hard link to avoid wasting disk space & overhead os.link(path, cache_path) except OSError: try: # Use file copy as fallaback shutil.copyfile(path, cache_path) except IOError: raise Error('Failed to cache %s as %s for %s' % (path, cache_path, url))
<SYSTEM_TASK:> Returns the size of the cache in bytes. <END_TASK> <USER_TASK:> Description: def size(self): """Returns the size of the cache in bytes."""
total_size = 0 for dir_path, dir_names, filenames in os.walk(self.dir): for f in filenames: fp = os.path.join(dir_path, f) total_size += os.path.getsize(fp) return total_size
<SYSTEM_TASK:> Create a named notebook if one doesn't exist. <END_TASK> <USER_TASK:> Description: def create_named_notebook(fname, context): """Create a named notebook if one doesn't exist."""
if os.path.exists(fname): return from nbformat import v4 as nbf # Courtesy of http://nbviewer.ipython.org/gist/fperez/9716279 text = "Welcome to *pyramid_notebook!* Use *File* *>* *Shutdown* to close this." cells = [nbf.new_markdown_cell(text)] greeting = context.get("greeting") if greeting: cells.append(nbf.new_markdown_cell(greeting)) cells.append(nbf.new_code_cell('')) nb = nbf.new_notebook(cells=cells) with open(fname, 'w') as f: writer = JSONWriter() writer.write(nb, f)
<SYSTEM_TASK:> Reconstruct the remote url from the given WSGI ``environ`` dictionary. <END_TASK> <USER_TASK:> Description: def reconstruct_url(environ, port): """Reconstruct the remote url from the given WSGI ``environ`` dictionary. :param environ: the WSGI environment :type environ: :class:`collections.MutableMapping` :returns: the remote url to proxy :rtype: :class:`basestring` """
# From WSGI spec, PEP 333 url = environ.get('PATH_INFO', '') if not url.startswith(('http://', 'https://')): url = '%s://%s%s' % ( environ['wsgi.url_scheme'], environ['HTTP_HOST'], url ) # Fix ;arg=value in url if '%3B' in url: url, arg = url.split('%3B', 1) url = ';'.join([url, arg.replace('%3D', '=')]) # Stick query string back in try: query_string = environ['QUERY_STRING'] except KeyError: pass else: url += '?' + query_string parsed = urlparse(url) replaced = parsed._replace(netloc="localhost:{}".format(port)) url = urlunparse(replaced) environ['reconstructed_url'] = url return url
<SYSTEM_TASK:> Generate presentation summaries in a reverse chronological order. <END_TASK> <USER_TASK:> Description: def get_summaries(client, filter=None): """ Generate presentation summaries in a reverse chronological order. A filter class can be supplied to filter summaries or bound the fetching process. """
try: index = 0 while True: rb = _RightBarPage(client, index) summaries = rb.summaries() if filter is not None: summaries = filter.filter(summaries) for summary in summaries: yield summary index += len(summaries) except StopIteration: pass
<SYSTEM_TASK:> Return a list of all the presentation summaries contained in this page <END_TASK> <USER_TASK:> Description: def summaries(self): """Return a list of all the presentation summaries contained in this page"""
def create_summary(div): def get_id(div): return get_url(div).rsplit('/')[-1] def get_url(div): return client.get_url(div.find('h2', class_='itemtitle').a['href']) def get_desc(div): return div.p.get_text(strip=True) def get_auth(div): return div.find('span', class_='author').a['title'] def get_date(div): str = div.find('span', class_='author').get_text() str = str.replace('\n', ' ') str = str.replace(six.u('\xa0'), ' ') match = re.search(r'on\s+(\w{3} [0-9]{1,2}, 20[0-9]{2})', str) return datetime.datetime.strptime(match.group(1), "%b %d, %Y") def get_title(div): return div.find('h2', class_='itemtitle').a['title'] return { 'id': get_id(div), 'url': get_url(div), 'desc': get_desc(div), 'auth': get_auth(div), 'date': get_date(div), 'title': get_title(div), } videos = self.soup.findAll('div', {'class': 'news_type_video'}) return [create_summary(div) for div in videos]
<SYSTEM_TASK:> Populate notebook context with startup.py initialization file skeleton and greeting. <END_TASK> <USER_TASK:> Description: def make_startup(notebook_context, config_file, bootstrap_py=PYRAMID_BOOSTRAP, bootstrap_greeting=PYRAMID_GREETING, cwd=""): """Populate notebook context with startup.py initialization file skeleton and greeting. This will set up context ``startup`` and ``greeting`` for their default values. :param notebook_context: Dictionary of notebook context info to be to passed to NotebookManager :param config_file: The current .ini file used to start up the Pyramid. This is used to pass it around to ``pyramid.paster.boostrap()`` to initialize dummy request object and such. :param bootstrap_py: startup.py script header which sets up environment creation :parma bootstrap_greeting: Markdown snippet which sets up start of greeting text :param cwd: Optional forced working directory. If not set use the directory of a config file. """
# Set up some default imports and variables nc = notebook_context add_greeting(nc, "\nAvailable variables and functions:") # http://docs.pylonsproject.org/projects/pyramid/en/1.1-branch/narr/commandline.html#writing-a-script if config_file is not None: assert type(config_file) == str, "Got bad config_file {}".format(config_file) config_file = os.path.abspath(config_file) assert os.path.exists(config_file), "Passed in bad config file: {}".format(config_file) add_script(nc, bootstrap_py.format(config_uri=config_file, cwd=cwd)) add_greeting(nc, bootstrap_greeting) add_script(nc, "import datetime") add_greeting(nc, "* **datetime** - Python [datetime module](https://docs.python.org/3.5/library/datetime.html)") add_script(nc, "import time") add_greeting(nc, "* **time** - Python [time module](https://docs.python.org/3.5/library/time.html)") try: # Commonly used with Pyramid applications import transaction # noQA add_script(nc, "import transaction\n") add_greeting(nc, "* **transaction** - Zope [transaction manager](http://zodb.readthedocs.org/en/latest/transactions.html), e.g. `transaction.commit()`") except ImportError: pass
<SYSTEM_TASK:> Include all SQLAlchemy models in the script context. <END_TASK> <USER_TASK:> Description: def include_sqlalchemy_models(nc, Base): """Include all SQLAlchemy models in the script context. :param nc: notebook_context dictionary :param Base: SQLAlchemy model Base class from where the all models inherit. """
from sqlalchemy.ext.declarative.clsregistry import _ModuleMarker # Include all SQLAlchemy models in the local namespace for name, klass in Base._decl_class_registry.items(): print(name, klass) if isinstance(klass, _ModuleMarker): continue add_script(nc, get_import_statement(klass)) add_greeting(nc, "* **{}** - {}".format(klass.__name__, get_dotted_path(klass)))
<SYSTEM_TASK:> Performs an inclusive OR operation between the contents of accumulator A <END_TASK> <USER_TASK:> Description: def instruction_OR(self, opcode, m, register): """ Performs an inclusive OR operation between the contents of accumulator A or B and the contents of memory location M and the result is stored in accumulator A or B. source code forms: ORA P; ORB P CC bits "HNZVC": -aa0- """
a = register.value r = a | m register.set(r) self.clear_NZV() self.update_NZ_8(r)
<SYSTEM_TASK:> Performs a logical AND between the condition code register and the <END_TASK> <USER_TASK:> Description: def instruction_ANDCC(self, opcode, m, register): """ Performs a logical AND between the condition code register and the immediate byte specified in the instruction and places the result in the condition code register. source code forms: ANDCC #xx CC bits "HNZVC": ddddd """
assert register == self.cc_register old_cc = self.get_cc_value() new_cc = old_cc & m self.set_cc(new_cc)