text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Synchronize persistent identifier status. <END_TASK> <USER_TASK:> Description: def sync_status(self, status): """Synchronize persistent identifier status. Used when the provider uses an external service, which might have been modified outside of our system. :param status: The new status to set. :returns: `True` if the PID is successfully sync. """
if self.status == status: return True try: with db.session.begin_nested(): self.status = status db.session.add(self) except SQLAlchemyError: logger.exception("Failed to sync status {0}.".format(status), extra=dict(pid=self)) raise logger.info("Synced PID status to {0}.".format(status), extra=dict(pid=self)) return True
<SYSTEM_TASK:> Internal function to reset sequence to specific value. <END_TASK> <USER_TASK:> Description: def _set_sequence(cls, val): """Internal function to reset sequence to specific value. Note: this function is for PostgreSQL compatibility. :param val: The value to be set. """
if db.engine.dialect.name == 'postgresql': # pragma: no cover db.session.execute( "SELECT setval(pg_get_serial_sequence(" "'{0}', 'recid'), :newval)".format( cls.__tablename__), dict(newval=val))
<SYSTEM_TASK:> Insert a record identifier. <END_TASK> <USER_TASK:> Description: def insert(cls, val): """Insert a record identifier. :param val: The `recid` column value to insert. """
with db.session.begin_nested(): obj = cls(recid=val) db.session.add(obj) cls._set_sequence(cls.max())
<SYSTEM_TASK:> Resolve a persistent identifier to an internal object. <END_TASK> <USER_TASK:> Description: def resolve(self, pid_value): """Resolve a persistent identifier to an internal object. :param pid_value: Persistent identifier. :returns: A tuple containing (pid, object). """
pid = PersistentIdentifier.get(self.pid_type, pid_value) if pid.is_new() or pid.is_reserved(): raise PIDUnregistered(pid) if pid.is_deleted(): obj_id = pid.get_assigned_object(object_type=self.object_type) try: obj = self.object_getter(obj_id) if obj_id else None except NoResultFound: obj = None raise PIDDeletedError(pid, obj) if pid.is_redirected(): raise PIDRedirectedError(pid, pid.get_redirect()) obj_id = pid.get_assigned_object(object_type=self.object_type) if not obj_id: raise PIDMissingObjectError(self.pid_type, pid_value) return pid, self.object_getter(obj_id)
<SYSTEM_TASK:> Create a sheet with the given title. This does not check if <END_TASK> <USER_TASK:> Description: def create_sheet(self, title): """ Create a sheet with the given title. This does not check if another sheet by the same name already exists. """
ws = self.conn.sheets_service.AddWorksheet(title, 10, 10, self.id) self._wsf = None return Sheet(self, ws)
<SYSTEM_TASK:> Open the spreadsheet named ``title``. If no spreadsheet with <END_TASK> <USER_TASK:> Description: def open(cls, title, conn=None, google_user=None, google_password=None): """ Open the spreadsheet named ``title``. If no spreadsheet with that name exists, a new one will be created. """
spreadsheet = cls.by_title(title, conn=conn, google_user=google_user, google_password=google_password) if spreadsheet is None: spreadsheet = cls.create(title, conn=conn, google_user=google_user, google_password=google_password) return spreadsheet
<SYSTEM_TASK:> Create a new spreadsheet with the given ``title``. <END_TASK> <USER_TASK:> Description: def create(cls, title, conn=None, google_user=None, google_password=None): """ Create a new spreadsheet with the given ``title``. """
conn = Connection.connect(conn=conn, google_user=google_user, google_password=google_password) res = Resource(type='spreadsheet', title=title) res = conn.docs_client.CreateResource(res) id = res.id.text.rsplit('%3A', 1)[-1] return cls(id, conn, resource=res)
<SYSTEM_TASK:> Open a spreadsheet via its resource ID. This is more precise <END_TASK> <USER_TASK:> Description: def by_id(cls, id, conn=None, google_user=None, google_password=None): """ Open a spreadsheet via its resource ID. This is more precise than opening a document by title, and should be used with preference. """
conn = Connection.connect(conn=conn, google_user=google_user, google_password=google_password) return cls(id=id, conn=conn)
<SYSTEM_TASK:> Open the first document with the given ``title`` that is <END_TASK> <USER_TASK:> Description: def by_title(cls, title, conn=None, google_user=None, google_password=None): """ Open the first document with the given ``title`` that is returned by document search. """
conn = Connection.connect(conn=conn, google_user=google_user, google_password=google_password) q = DocsQuery(categories=['spreadsheet'], title=title) feed = conn.docs_client.GetResources(q=q) for entry in feed.entry: if entry.title.text == title: id = entry.id.text.rsplit('%3A', 1)[-1] return cls.by_id(id, conn=conn)
<SYSTEM_TASK:> Handle email sending verification form. <END_TASK> <USER_TASK:> Description: def handle_verification_form(form): """Handle email sending verification form."""
form.process(formdata=request.form) if form.validate_on_submit(): send_confirmation_instructions(current_user) # NOTE: Flash message. flash(_("Verification email sent."), category="success")
<SYSTEM_TASK:> Send text message to telegram user. Text message should be markdown <END_TASK> <USER_TASK:> Description: def send_text(self, text): """Send text message to telegram user. Text message should be markdown formatted. :param text: markdown formatted text. :return: status code on error. """
if not self.is_token_set: raise ValueError('TelepythClient: Access token is not set!') stream = StringIO() stream.write(text) stream.seek(0) return self(stream)
<SYSTEM_TASK:> Render matplotlib figure into temporary bytes buffer and then send <END_TASK> <USER_TASK:> Description: def send_figure(self, fig, caption=''): """Render matplotlib figure into temporary bytes buffer and then send it to telegram user. :param fig: matplotlib figure object. :param caption: text caption of picture. :return: status code on error. """
if not self.is_token_set: raise ValueError('TelepythClient: Access token is not set!') figure = BytesIO() fig.savefig(figure, format='png') figure.seek(0) parts = [ContentDisposition('caption', caption), ContentDisposition('figure', figure, filename="figure.png", content_type='image/png')] form = MultipartFormData(*parts) content_type = 'multipart/form-data; boundary=%s' % form.boundary url = self.base_url + self.access_token req = Request(url, method='POST') req.add_header('Content-Type', content_type) req.add_header('User-Agent', __user_agent__ + '/' + __version__) req.data = form().read() res = urlopen(req) return res.getcode()
<SYSTEM_TASK:> Delete clusters with fewer than n elements. <END_TASK> <USER_TASK:> Description: def prune_clusters(clusters, index, n=3): """ Delete clusters with fewer than n elements. """
torem = set(c for c in clusters if c.size < n) pruned_clusters = [c for c in clusters if c.size >= n] terms_torem = [] for term, clusters in index.items(): index[term] = clusters - torem if len(index[term]) == 0: terms_torem.append(term) for t in terms_torem: del index[t] return pruned_clusters, index
<SYSTEM_TASK:> Monitor `field` for change <END_TASK> <USER_TASK:> Description: def monitor(self, field, callback, poll_interval=None): """ Monitor `field` for change Will monitor ``field`` for change and execute ``callback`` when change is detected. Example usage:: def handle(resource, field, previous, current): print "Change from {} to {}".format(previous, current) switch = TapSwitch.objects.get(id=3) # Note that we monitor the entire state of the Hue Tap # switch rather than a specific field switch.monitor(lambda sw: sw.state.as_dict(), handle, poll_interval=0.2) # Execution will stop here and the API client will begin polling for changes hue_api.start_monitor_loop() Args: field (string): The name of the field to be monitored. This may also be a callable which will be called with the resource instance as its single argument and must return a value which can be compared to previous values. callback (callable): The callable to be called when a change is detected. It will be called with parameters as follows: * resource instance * field name, * previous value * current value. poll_interval (float): Interval between polling in seconds. Defaults to the API's `poll_interval` value (which defaults to 0.1 second. Returns: Monitor: """
poll_interval = poll_interval or self.api.poll_interval monitor = self.monitor_class( resource=self, field=field, callback=callback, poll_interval=poll_interval, event_queue=self.api.event_queue, poll_pool=self.api.poll_pool, ) monitor.start() return monitor
<SYSTEM_TASK:> Factory for creating an extended user registration form. <END_TASK> <USER_TASK:> Description: def register_form_factory(Form): """Factory for creating an extended user registration form."""
class CsrfDisabledProfileForm(ProfileForm): """Subclass of ProfileForm to disable CSRF token in the inner form. This class will always be a inner form field of the parent class `Form`. The parent will add/remove the CSRF token in the form. """ def __init__(self, *args, **kwargs): """Initialize the object by hardcoding CSRF token to false.""" kwargs = _update_with_csrf_disabled(kwargs) super(CsrfDisabledProfileForm, self).__init__(*args, **kwargs) class RegisterForm(Form): """RegisterForm extended with UserProfile details.""" profile = FormField(CsrfDisabledProfileForm, separator='.') return RegisterForm
<SYSTEM_TASK:> Factory for creating a confirm register form. <END_TASK> <USER_TASK:> Description: def confirm_register_form_factory(Form): """Factory for creating a confirm register form."""
class CsrfDisabledProfileForm(ProfileForm): """Subclass of ProfileForm to disable CSRF token in the inner form. This class will always be a inner form field of the parent class `Form`. The parent will add/remove the CSRF token in the form. """ def __init__(self, *args, **kwargs): """Initialize the object by hardcoding CSRF token to false.""" kwargs = _update_with_csrf_disabled(kwargs) super(CsrfDisabledProfileForm, self).__init__(*args, **kwargs) class ConfirmRegisterForm(Form): """RegisterForm extended with UserProfile details.""" profile = FormField(CsrfDisabledProfileForm, separator='.') return ConfirmRegisterForm
<SYSTEM_TASK:> Update the input dict with CSRF disabled depending on WTF-Form version. <END_TASK> <USER_TASK:> Description: def _update_with_csrf_disabled(d=None): """Update the input dict with CSRF disabled depending on WTF-Form version. From Flask-WTF 0.14.0, `csrf_enabled` param has been deprecated in favor of `meta={csrf: True/False}`. """
if d is None: d = {} import flask_wtf from pkg_resources import parse_version supports_meta = parse_version(flask_wtf.__version__) >= parse_version( "0.14.0") if supports_meta: d.setdefault('meta', {}) d['meta'].update({'csrf': False}) else: d['csrf_enabled'] = False return d
<SYSTEM_TASK:> Choices pages by pagenum and pagename <END_TASK> <USER_TASK:> Description: def filter_pages(pages, pagenum, pagename): """ Choices pages by pagenum and pagename """
if pagenum: try: pages = [list(pages)[pagenum - 1]] except IndexError: raise IndexError('Invalid page number: %d' % pagenum) if pagename: pages = [page for page in pages if page.name == pagename] if pages == []: raise IndexError('Page not found: pagename=%s' % pagename) return pages
<SYSTEM_TASK:> Exports images from visio file <END_TASK> <USER_TASK:> Description: def export_img(visio_filename, image_filename, pagenum=None, pagename=None): """ Exports images from visio file """
# visio requires absolute path image_pathname = os.path.abspath(image_filename) if not os.path.isdir(os.path.dirname(image_pathname)): msg = 'Could not write image file: %s' % image_filename raise IOError(msg) with VisioFile.Open(visio_filename) as visio: pages = filter_pages(visio.pages, pagenum, pagename) try: if len(pages) == 1: pages[0].Export(image_pathname) else: digits = int(log(len(pages), 10)) + 1 basename, ext = os.path.splitext(image_pathname) filename_format = "%s%%0%dd%s" % (basename, digits, ext) for i, page in enumerate(pages): filename = filename_format % (i + 1) page.Export(filename) except Exception: raise IOError('Could not write image: %s' % image_pathname)
<SYSTEM_TASK:> Imports providers classes by paths given in SITEMETRICS_PROVIDERS setting. <END_TASK> <USER_TASK:> Description: def get_custom_providers(): """Imports providers classes by paths given in SITEMETRICS_PROVIDERS setting."""
providers = getattr(settings, 'SITEMETRICS_PROVIDERS', False) if not providers: return [] p_clss = [] for provider_path in providers: path_splitted = provider_path.split('.') mod = import_module('.'.join(path_splitted[:-1])) p_cls = getattr(mod, path_splitted[-1]) p_clss.append(p_cls) return p_clss
<SYSTEM_TASK:> Switch a Socket <END_TASK> <USER_TASK:> Description: def switch_onoff(self, device, status): """Switch a Socket"""
if status == 1 or status == True or status == '1': return self.switch_on(device) else: return self.switch_off(device)
<SYSTEM_TASK:> Toggles the current state of the given device <END_TASK> <USER_TASK:> Description: def switch_toggle(self, device): """Toggles the current state of the given device"""
state = self.get_state(device) if(state == '1'): return self.switch_off(device) elif(state == '0'): return self.switch_on(device) else: return state
<SYSTEM_TASK:> Returns a Dict with devicenames <END_TASK> <USER_TASK:> Description: def get_device_names(self): """Returns a Dict with devicenames"""
dev_names = {} for device in self.get_device_ids(): dev_names[device] = self.get_device_name(device) return dev_names
<SYSTEM_TASK:> Returns the power in mW for all devices <END_TASK> <USER_TASK:> Description: def get_power_all(self): """Returns the power in mW for all devices"""
power_dict = {} for device in self.get_device_names().keys(): power_dict[device] = self.get_power_single(device) return power_dict
<SYSTEM_TASK:> Call entry points exposed for the SECRET_KEY change. <END_TASK> <USER_TASK:> Description: def migrate_secret_key(old_key): """Call entry points exposed for the SECRET_KEY change."""
if 'SECRET_KEY' not in current_app.config or \ current_app.config['SECRET_KEY'] is None: raise click.ClickException( 'SECRET_KEY is not set in the configuration.') for ep in iter_entry_points('invenio_base.secret_key'): try: ep.load()(old_key=old_key) except Exception: current_app.logger.error( 'Failed to initialize entry point: {0}'.format(ep)) raise click.secho('Successfully changed secret key.', fg='green')
<SYSTEM_TASK:> Returns a list of currently available metrics providers <END_TASK> <USER_TASK:> Description: def get_provider_choices(): """Returns a list of currently available metrics providers suitable for use as model fields choices. """
choices = [] for provider in METRICS_PROVIDERS: choices.append((provider.alias, provider.title)) return choices
<SYSTEM_TASK:> Create a Flask application factory. <END_TASK> <USER_TASK:> Description: def create_app_factory(app_name, config_loader=None, extension_entry_points=None, extensions=None, blueprint_entry_points=None, blueprints=None, converter_entry_points=None, converters=None, wsgi_factory=None, **app_kwargs): """Create a Flask application factory. The application factory will load Flask extensions and blueprints specified using both entry points and directly in the arguments. Loading order of entry points are not guaranteed and can happen in any order. :param app_name: Flask application name. :param config_loader: Callable which will be invoked on application creation in order to load the Flask configuration. See example below. :param extension_entry_points: List of entry points, which specifies Flask extensions that will be initialized only by passing in the Flask application object :param extensions: List of Flask extensions that can be initialized only by passing in the Flask application object. :param blueprint_entry_points: List of entry points, which specifies Blueprints that will be registered on the Flask application. :param blueprints: List of Blueprints that will be registered on the Flask application. :param converter_entry_points: List of entry points, which specifies Werkzeug URL map converters that will be added to ``app.url_map.converters``. :param converters: Map of Werkzeug URL map converter classes that will be added to ``app.url_map.converters``. :param wsgi_factory: A callable that will be passed the Flask application object in order to overwrite the default WSGI application (e.g. to install ``DispatcherMiddleware``). :param app_kwargs: Keyword arguments passed to :py:meth:`base_app`. :returns: Flask application factory. Example of a configuration loader: .. code-block:: python def my_config_loader(app, **kwargs): app.config.from_module('mysite.config') app.config.update(**kwargs) .. note:: `Invenio-Config <https://pythonhosted.org/invenio-config>`_ provides a factory creating default configuration loader (see :func:`invenio_config.utils.create_config_loader`) which is sufficient for most cases. Example of a WSGI factory: .. code-block:: python def my_wsgi_factory(app): return DispatcherMiddleware(app.wsgi_app, {'/api': api_app}) .. versionadded: 1.0.0 """
def _create_app(**kwargs): app = base_app(app_name, **app_kwargs) app_created.send(_create_app, app=app) debug = kwargs.get('debug') if debug is not None: app.debug = debug # Load configuration if config_loader: config_loader(app, **kwargs) # Load URL converters. converter_loader( app, entry_points=converter_entry_points, modules=converters, ) # Load application based on entrypoints. app_loader( app, entry_points=extension_entry_points, modules=extensions, ) # Load blueprints blueprint_loader( app, entry_points=blueprint_entry_points, modules=blueprints, ) app_loaded.send(_create_app, app=app) # Replace WSGI application using factory if provided (e.g. to install # WSGI middleware). if wsgi_factory: app.wsgi_app = wsgi_factory(app, **kwargs) return app return _create_app
<SYSTEM_TASK:> Create CLI for ``inveniomanage`` command. <END_TASK> <USER_TASK:> Description: def create_cli(create_app=None): """Create CLI for ``inveniomanage`` command. :param create_app: Flask application factory. :returns: Click command group. .. versionadded: 1.0.0 """
def create_cli_app(info): """Application factory for CLI app. Internal function for creating the CLI. When invoked via ``inveniomanage`` FLASK_APP must be set. """ if create_app is None: # Fallback to normal Flask behavior info.create_app = None app = info.load_app() else: app = create_app(debug=get_debug_flag()) return app @click.group(cls=FlaskGroup, create_app=create_cli_app) def cli(**params): """Command Line Interface for Invenio.""" pass return cli
<SYSTEM_TASK:> Run default application loader. <END_TASK> <USER_TASK:> Description: def app_loader(app, entry_points=None, modules=None): """Run default application loader. :param entry_points: List of entry points providing to Flask extensions. :param modules: List of Flask extensions. .. versionadded: 1.0.0 """
_loader(app, lambda ext: ext(app), entry_points=entry_points, modules=modules)
<SYSTEM_TASK:> Run default blueprint loader. <END_TASK> <USER_TASK:> Description: def blueprint_loader(app, entry_points=None, modules=None): """Run default blueprint loader. The value of any entry_point or module passed can be either an instance of ``flask.Blueprint`` or a callable accepting a ``flask.Flask`` application instance as a single argument and returning an instance of ``flask.Blueprint``. :param entry_points: List of entry points providing to Blueprints. :param modules: List of Blueprints. .. versionadded: 1.0.0 """
url_prefixes = app.config.get('BLUEPRINTS_URL_PREFIXES', {}) def loader_init_func(bp_or_func): bp = bp_or_func(app) if callable(bp_or_func) else bp_or_func app.register_blueprint(bp, url_prefix=url_prefixes.get(bp.name)) _loader(app, loader_init_func, entry_points=entry_points, modules=modules)
<SYSTEM_TASK:> Run default converter loader. <END_TASK> <USER_TASK:> Description: def converter_loader(app, entry_points=None, modules=None): """Run default converter loader. :param entry_points: List of entry points providing to Blue. :param modules: Map of coverters. .. versionadded: 1.0.0 """
if entry_points: for entry_point in entry_points: for ep in pkg_resources.iter_entry_points(entry_point): try: app.url_map.converters[ep.name] = ep.load() except Exception: app.logger.error( 'Failed to initialize entry point: {0}'.format(ep)) raise if modules: app.url_map.converters.update(**modules)
<SYSTEM_TASK:> Run generic loader. <END_TASK> <USER_TASK:> Description: def _loader(app, init_func, entry_points=None, modules=None): """Run generic loader. Used to load and initialize entry points and modules using an custom initialization function. .. versionadded: 1.0.0 """
if entry_points: for entry_point in entry_points: for ep in pkg_resources.iter_entry_points(entry_point): try: init_func(ep.load()) except Exception: app.logger.error( 'Failed to initialize entry point: {0}'.format(ep)) raise if modules: for m in modules: try: init_func(m) except Exception: app.logger.error('Failed to initialize module: {0}'.format(m)) raise
<SYSTEM_TASK:> Invenio base application factory. <END_TASK> <USER_TASK:> Description: def base_app(import_name, instance_path=None, static_folder=None, static_url_path='/static', template_folder='templates', instance_relative_config=True, app_class=Flask): """Invenio base application factory. If the instance folder does not exists, it will be created. :param import_name: The name of the application package. :param env_prefix: Environment variable prefix. :param instance_path: Instance path for Flask application. :param static_folder: Static folder path. :param app_class: Flask application class. :returns: Flask application instance. .. versionadded: 1.0.0 """
configure_warnings() # Create the Flask application instance app = app_class( import_name, instance_path=instance_path, instance_relative_config=instance_relative_config, static_folder=static_folder, static_url_path=static_url_path, template_folder=template_folder, ) # Create instance path if it doesn't exists try: if instance_path and not os.path.exists(instance_path): os.makedirs(instance_path) except Exception: # pragma: no cover app.logger.exception( 'Failed to create instance folder: "{0}"'.format(instance_path) ) return app
<SYSTEM_TASK:> Configure warnings by routing warnings to the logging system. <END_TASK> <USER_TASK:> Description: def configure_warnings(): """Configure warnings by routing warnings to the logging system. It also unhides ``DeprecationWarning``. .. versionadded: 1.0.0 """
if not sys.warnoptions: # Route warnings through python logging logging.captureWarnings(True) # DeprecationWarning is by default hidden, hence we force the # 'default' behavior on deprecation warnings which is not to hide # errors. warnings.simplefilter('default', DeprecationWarning) warnings.simplefilter('ignore', PendingDeprecationWarning)
<SYSTEM_TASK:> Load all project Task Runs from Tasks. <END_TASK> <USER_TASK:> Description: def get_task_runs(self, json_file=None): """Load all project Task Runs from Tasks."""
if self.project is None: raise ProjectError loader = create_task_runs_loader(self.project.id, self.tasks, json_file, self.all) self.task_runs, self.task_runs_file = loader.load() self._check_project_has_taskruns() self.task_runs_df = dataframer.create_task_run_data_frames(self.tasks, self.task_runs)
<SYSTEM_TASK:> Return tasks or task_runs Panda describe. <END_TASK> <USER_TASK:> Description: def describe(self, element): # pragma: no cover """Return tasks or task_runs Panda describe."""
if (element == 'tasks'): return self.tasks_df.describe() elif (element == 'task_runs'): return self.task_runs_df.describe() else: return "ERROR: %s not found" % element
<SYSTEM_TASK:> Create a WSGI application factory. <END_TASK> <USER_TASK:> Description: def create_wsgi_factory(mounts_factories): """Create a WSGI application factory. Usage example: .. code-block:: python wsgi_factory = create_wsgi_factory({'/api': create_api}) :param mounts_factories: Dictionary of mount points per application factory. .. versionadded:: 1.0.0 """
def create_wsgi(app, **kwargs): mounts = { mount: factory(**kwargs) for mount, factory in mounts_factories.items() } return DispatcherMiddleware(app.wsgi_app, mounts) return create_wsgi
<SYSTEM_TASK:> Fix ``REMOTE_ADDR`` based on ``X-Forwarded-For`` headers. <END_TASK> <USER_TASK:> Description: def wsgi_proxyfix(factory=None): """Fix ``REMOTE_ADDR`` based on ``X-Forwarded-For`` headers. .. note:: You must set ``WSGI_PROXIES`` to the correct number of proxies, otherwise you application is susceptible to malicious attacks. .. versionadded:: 1.0.0 """
def create_wsgi(app, **kwargs): wsgi_app = factory(app, **kwargs) if factory else app.wsgi_app if app.config.get('WSGI_PROXIES'): return ProxyFix(wsgi_app, num_proxies=app.config['WSGI_PROXIES']) return wsgi_app return create_wsgi
<SYSTEM_TASK:> Check if migrations directory exists. <END_TASK> <USER_TASK:> Description: def check_directory(self): """Check if migrations directory exists."""
exists = os.path.exists(self.directory) if not exists: logger.error("No migrations directory found. Check your path or create a migration first.") logger.error("Directory: %s" % self.directory) return exists
<SYSTEM_TASK:> Set username. <END_TASK> <USER_TASK:> Description: def username(self, username): """Set username. .. note:: The username will be converted to lowercase. The display name will contain the original version. """
validate_username(username) self._username = username.lower() self._displayname = username
<SYSTEM_TASK:> Get profile by username. <END_TASK> <USER_TASK:> Description: def get_by_username(cls, username): """Get profile by username. :param username: A username to query for (case insensitive). """
return cls.query.filter( UserProfile._username == username.lower() ).one()
<SYSTEM_TASK:> Returns a nice name for class object or class instance. <END_TASK> <USER_TASK:> Description: def nice_classname(obj): """Returns a nice name for class object or class instance. >>> nice_classname(Exception()) # doctest: +ELLIPSIS '...Exception' >>> nice_classname(Exception) # doctest: +ELLIPSIS '...Exception' """
if inspect.isclass(obj): cls_name = obj.__name__ else: cls_name = obj.__class__.__name__ mod = inspect.getmodule(obj) if mod: name = mod.__name__ # jython if name.startswith('org.python.core.'): name = name[len('org.python.core.'):] return "%s.%s" % (name, cls_name) else: return cls_name
<SYSTEM_TASK:> Sets additional command line options. <END_TASK> <USER_TASK:> Description: def options(self, parser, env): """Sets additional command line options."""
Plugin.options(self, parser, env) parser.add_option( '--html-file', action='store', dest='html_file', metavar="FILE", default=env.get('NOSE_HTML_FILE', 'nosetests.html'), help="Path to html file to store the report in. " "Default is nosetests.html in the working directory " "[NOSE_HTML_FILE]")
<SYSTEM_TASK:> Writes an Xunit-formatted XML file <END_TASK> <USER_TASK:> Description: def report(self, stream): """Writes an Xunit-formatted XML file The file includes a report of test errors and failures. """
from collections import OrderedDict self.stats['total'] = sum(self.stats.values()) for group in self.report_data.values(): group.stats['total'] = sum(group.stats.values()) self.report_file.write(self.jinja.get_template('report.html').render( report=OrderedDict(sorted(self.report_data.items())), stats=self.stats, )) self.report_file.close() if self.config.verbosity > 1: stream.writeln("-" * 70) stream.writeln("HTML: %s" % self.report_file.name)
<SYSTEM_TASK:> Add error output to Xunit report. <END_TASK> <USER_TASK:> Description: def addError(self, test, err, capt=None): """Add error output to Xunit report. """
exc_type, exc_val, tb = err tb = ''.join(traceback.format_exception( exc_type, exc_val if isinstance(exc_val, exc_type) else exc_type(exc_val), tb )) name = id_split(test.id()) group = self.report_data[name[0]] if issubclass(err[0], SkipTest): type = 'skipped' self.stats['skipped'] += 1 group.stats['skipped'] += 1 else: type = 'error' self.stats['errors'] += 1 group.stats['errors'] += 1 group.tests.append({ 'name': name[-1], 'failed': True, 'type': type, 'errtype': nice_classname(err[0]), 'message': exc_message(err), 'tb': tb, })
<SYSTEM_TASK:> Gets a single list of messages from all storage backends. <END_TASK> <USER_TASK:> Description: def _get(self, *args, **kwargs): """ Gets a single list of messages from all storage backends. """
all_messages = [] for storage in self.storages: messages, all_retrieved = storage._get() # If the backend hasn't been used, no more retrieval is necessary. if messages is None: break if messages: self._used_storages.add(storage) all_messages.extend(messages) # If this storage class contained all the messages, no further # retrieval is necessary if all_retrieved: break return all_messages, all_retrieved
<SYSTEM_TASK:> Stores the messages, returning any unstored messages after trying all <END_TASK> <USER_TASK:> Description: def _store(self, messages, response, *args, **kwargs): """ Stores the messages, returning any unstored messages after trying all backends. For each storage backend, any messages not stored are passed on to the next backend. """
for storage in self.storages: if messages: messages = storage._store(messages, response, remove_oldest=False) # Even if there are no more messages, continue iterating to ensure # storages which contained messages are flushed. elif storage in self._used_storages: storage._store([], response) self._used_storages.remove(storage) return messages
<SYSTEM_TASK:> Return a queryset of messages for the request user <END_TASK> <USER_TASK:> Description: def _message_queryset(self, include_read=False): """ Return a queryset of messages for the request user """
expire = timezone.now() qs = PersistentMessage.objects.\ filter(user=self.get_user()).\ filter(Q(expires=None) | Q(expires__gt=expire)) if not include_read: qs = qs.exclude(read=True) return qs
<SYSTEM_TASK:> If its level is into persist levels, convert the message to models and save it <END_TASK> <USER_TASK:> Description: def process_message(self, message, *args, **kwargs): """ If its level is into persist levels, convert the message to models and save it """
if not message.level in PERSISTENT_MESSAGE_LEVELS: return message user = kwargs.get("user") or self.get_user() try: anonymous = user.is_anonymous() except TypeError: anonymous = user.is_anonymous if anonymous: raise NotImplementedError('Persistent message levels cannot be used for anonymous users.') message_persistent = PersistentMessage() message_persistent.level = message.level message_persistent.message = message.message message_persistent.extra_tags = message.extra_tags message_persistent.user = user if "expires" in kwargs: message_persistent.expires = kwargs["expires"] message_persistent.save() return None
<SYSTEM_TASK:> Queues a message to be stored. <END_TASK> <USER_TASK:> Description: def add(self, level, message, extra_tags='', *args, **kwargs): """ Queues a message to be stored. The message is only queued if it contained something and its level is not less than the recording level (``self.level``). """
if not message: return # Check that the message level is not less than the recording level. level = int(level) if level < self.level: return # Add the message. self.added_new = True message = Message(level, message, extra_tags=extra_tags) message = self.process_message(message, *args, **kwargs) if message: self._queued_messages.append(message)
<SYSTEM_TASK:> Delete all messages that are sticky and return the other messages <END_TASK> <USER_TASK:> Description: def _store(self, messages, response, *args, **kwargs): """ Delete all messages that are sticky and return the other messages This storage never save objects """
return [message for message in messages if not message.level in STICKY_MESSAGE_LEVELS]
<SYSTEM_TASK:> Get current user profile. <END_TASK> <USER_TASK:> Description: def _get_current_userprofile(): """Get current user profile. .. note:: If the user is anonymous, then a :class:`invenio_userprofiles.models.AnonymousUserProfile` instance is returned. :returns: The :class:`invenio_userprofiles.models.UserProfile` instance. """
if current_user.is_anonymous: return AnonymousUserProfile() profile = g.get( 'userprofile', UserProfile.get_by_userid(current_user.get_id())) if profile is None: profile = UserProfile(user_id=int(current_user.get_id())) g.userprofile = profile return profile
<SYSTEM_TASK:> Silences the excessive ibapi logging to the root logger. <END_TASK> <USER_TASK:> Description: def silence_ibapi_logging(levels=["DEBUG", "INFO"]): """ Silences the excessive ibapi logging to the root logger. """
levels = levels or ["DEBUG", "INFO"] for level in levels: if level not in ("DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"): raise ValueError("unknown log level: {0}".format(level)) for _, module_name, _ in pkgutil.iter_modules(ibapi.__path__): module = __import__("ibapi.{0}".format(module_name), fromlist="ibapi") if not hasattr(module, "logging"): continue for level in levels: setattr(module.logging, level.lower(), noop)
<SYSTEM_TASK:> Adds a persistant message with the ``DEBUG`` level. <END_TASK> <USER_TASK:> Description: def persistant_debug(request, message, extra_tags='', fail_silently=False, *args, **kwargs): """ Adds a persistant message with the ``DEBUG`` level. """
add_message(request, DEBUG_PERSISTENT, message, extra_tags=extra_tags, fail_silently=fail_silently, *args, **kwargs)
<SYSTEM_TASK:> Adds a persistant message with the ``INFO`` level. <END_TASK> <USER_TASK:> Description: def persistant_info(request, message, extra_tags='', fail_silently=False, *args, **kwargs): """ Adds a persistant message with the ``INFO`` level. """
add_message(request, INFO_PERSISTENT, message, extra_tags=extra_tags, fail_silently=fail_silently, *args, **kwargs)
<SYSTEM_TASK:> Adds a persistant message with the ``SUCCESS`` level. <END_TASK> <USER_TASK:> Description: def persistant_success(request, message, extra_tags='', fail_silently=False, *args, **kwargs): """ Adds a persistant message with the ``SUCCESS`` level. """
add_message(request, SUCCESS_PERSISTENT, message, extra_tags=extra_tags, fail_silently=fail_silently, *args, **kwargs)
<SYSTEM_TASK:> Adds a persistant message with the ``WARNING`` level. <END_TASK> <USER_TASK:> Description: def persistant_warning(request, message, extra_tags='', fail_silently=False, *args, **kwargs): """ Adds a persistant message with the ``WARNING`` level. """
add_message(request, WARNING_PERSISTENT, message, extra_tags=extra_tags, fail_silently=fail_silently, *args, **kwargs)
<SYSTEM_TASK:> Adds a persistant message with the ``ERROR`` level. <END_TASK> <USER_TASK:> Description: def persistant_error(request, message, extra_tags='', fail_silently=False, *args, **kwargs): """ Adds a persistant message with the ``ERROR`` level. """
add_message(request, ERROR_PERSISTENT, message, extra_tags=extra_tags, fail_silently=fail_silently, *args, **kwargs)
<SYSTEM_TASK:> Yield successive chunks from list \a items with a minimum size \a limit <END_TASK> <USER_TASK:> Description: def _chunks(self, items, limit): """ Yield successive chunks from list \a items with a minimum size \a limit """
for i in range(0, len(items), limit): yield items[i:i + limit]
<SYSTEM_TASK:> Return the week corresponding to the proleptic Gregorian ordinal, <END_TASK> <USER_TASK:> Description: def fromordinal(cls, ordinal): """Return the week corresponding to the proleptic Gregorian ordinal, where January 1 of year 1 starts the week with ordinal 1. """
if ordinal < 1: raise ValueError("ordinal must be >= 1") return super(Week, cls).__new__(cls, *(date.fromordinal((ordinal-1) * 7 + 1).isocalendar()[:2]))
<SYSTEM_TASK:> Return a week initialized from an ISO formatted string like "2011W08" or "2011-W08". <END_TASK> <USER_TASK:> Description: def fromstring(cls, isostring): """Return a week initialized from an ISO formatted string like "2011W08" or "2011-W08"."""
if isinstance(isostring, basestring) and len(isostring) == 7 and isostring[4] == 'W': return cls(int(isostring[0:4]), int(isostring[5:7])) elif isinstance(isostring, basestring) and len(isostring) == 8 and isostring[4:6] == '-W': return cls(int(isostring[0:4]), int(isostring[6:8])) else: raise ValueError("Week.tostring argument must be on the form <yyyy>W<ww>; got %r" % (isostring,))
<SYSTEM_TASK:> Return an iterator over the weeks of the given year. <END_TASK> <USER_TASK:> Description: def weeks_of_year(cls, year): """Return an iterator over the weeks of the given year. Years have either 52 or 53 weeks."""
w = cls(year, 1) while w.year == year: yield w w += 1
<SYSTEM_TASK:> Return the last week of the given year. <END_TASK> <USER_TASK:> Description: def last_week_of_year(cls, year): """Return the last week of the given year. This week with either have week-number 52 or 53. This will be the same as Week(year+1, 0), but will even work for year 9999 where this expression would overflow. The first week of a given year is simply Week(year, 1), so there is no dedicated classmethod for that. """
if year == cls.max.year: return cls.max return cls(year+1, 0)
<SYSTEM_TASK:> Return the given day of week as a date object. Day 0 is the Monday. <END_TASK> <USER_TASK:> Description: def day(self, num): """Return the given day of week as a date object. Day 0 is the Monday."""
d = date(self.year, 1, 4) # The Jan 4th must be in week 1 according to ISO return d + timedelta(weeks=self.week-1, days=-d.weekday() + num)
<SYSTEM_TASK:> Return a Week with either the year or week attribute value replaced <END_TASK> <USER_TASK:> Description: def replace(self, year=None, week=None): """Return a Week with either the year or week attribute value replaced"""
return self.__class__(self.year if year is None else year, self.week if week is None else week)
<SYSTEM_TASK:> Provides a sanitized & serializeable dict of the alert mainly for forward & backwards compatibility <END_TASK> <USER_TASK:> Description: def _serialized(self): """Provides a sanitized & serializeable dict of the alert mainly for forward & backwards compatibility"""
return {'title': self.title, 'summary': self.summary, 'areadesc': self.areadesc, 'event': self.event, 'samecodes': self.samecodes, 'zonecodes': self.zonecodes, 'expiration': self.expiration, 'updated': self.updated, 'effective': self.effective, 'published': self.published, 'severity': self.severity, 'category': self.category, 'urgency': self.urgency, 'msgtype': self.msgtype, 'link': self.link, }
<SYSTEM_TASK:> A lot of measurement types make use of a protocol value, so we handle <END_TASK> <USER_TASK:> Description: def clean_protocol(self, protocol): """ A lot of measurement types make use of a protocol value, so we handle that here. """
if protocol is not None: try: return self.PROTOCOL_MAP[protocol] except KeyError: self._handle_malformation( '"{protocol}" is not a recognised protocol'.format( protocol=protocol ) )
<SYSTEM_TASK:> Returns the median of values in the given list. <END_TASK> <USER_TASK:> Description: def calculate_median(given_list): """ Returns the median of values in the given list. """
median = None if not given_list: return median given_list = sorted(given_list) list_length = len(given_list) if list_length % 2: median = given_list[int(list_length / 2)] else: median = (given_list[int(list_length / 2)] + given_list[int(list_length / 2) - 1]) / 2.0 return median
<SYSTEM_TASK:> Return a list of Subject Alternative Name values for the given x509 <END_TASK> <USER_TASK:> Description: def _get_subject_alternative_names(self, ext): """ Return a list of Subject Alternative Name values for the given x509 extension object. """
values = [] for san in ext.value: if isinstance(san.value, string): # Pass on simple string SAN values values.append(san.value) elif isinstance(san.value, x509.Name): # In theory there there could be >1 RDN here... values.extend( self._name_attribute_to_string(rdn) for rdn in san.value.rdns ) return values
<SYSTEM_TASK:> Configure SPI controller with the SPI mode and operating frequency <END_TASK> <USER_TASK:> Description: def configure(self): """ Configure SPI controller with the SPI mode and operating frequency """
# Convert standard SPI sheme to USBISS scheme lookup_table = [0, 2, 1, 3] mode = lookup_table[self._mode] # Add signal for SPI switch iss_mode = self._usbiss.SPI_MODE + mode # Configure USB-ISS self._usbiss.mode = [iss_mode, self.sck_divisor]
<SYSTEM_TASK:> Perform SPI transaction. <END_TASK> <USER_TASK:> Description: def exchange(self, data): """ Perform SPI transaction. The first received byte is either ACK or NACK. :TODO: enforce rule that up to 63 bytes of data can be sent. :TODO: enforce rule that there is no gaps in data bytes (what define a gap?) :param data: List of bytes :returns: List of bytes :rtype: List of bytes """
self._usbiss.write_data([self._usbiss.SPI_CMD] + data) response = self._usbiss.read_data(1 + len(data)) if len(response) != 0: response = self._usbiss.decode(response) status = response.pop(0) if status == 0: raise USBISSError('SPI Transmission Error') return response else: raise USBISSError('SPI Transmission Error: No bytes received!')
<SYSTEM_TASK:> If a recent cache exists, return it, else return None <END_TASK> <USER_TASK:> Description: def _get_feed_cache(self): """If a recent cache exists, return it, else return None"""
feed_cache = None if os.path.exists(self._feed_cache_file): maxage = datetime.now() - timedelta(minutes=self._cachetime) file_ts = datetime.fromtimestamp(os.stat(self._feed_cache_file).st_mtime) if file_ts > maxage: try: with open(self._feed_cache_file, 'rb') as cache: feed_cache = cache.read() finally: pass return feed_cache
<SYSTEM_TASK:> Sets median values for rtt and the offset of result packets. <END_TASK> <USER_TASK:> Description: def _set_medians_and_extremes(self): """ Sets median values for rtt and the offset of result packets. """
rtts = sorted([p.rtt for p in self.packets if p.rtt is not None]) if rtts: self.rtt_min = rtts[0] self.rtt_max = rtts[-1] self.rtt_median = self.calculate_median(rtts) offsets = sorted( [p.offset for p in self.packets if p.offset is not None] ) if offsets: self.offset_min = offsets[0] self.offset_max = offsets[-1] self.offset_median = self.calculate_median(offsets)
<SYSTEM_TASK:> Given a county and state, return alerts <END_TASK> <USER_TASK:> Description: def county_state_alerts(self, county, state): """Given a county and state, return alerts"""
samecode = self.geo.lookup_samecode(county, state) return self.samecode_alerts(samecode)
<SYSTEM_TASK:> register a request <END_TASK> <USER_TASK:> Description: def start_request(req, collect=False, collector_addr='tcp://127.0.0.2:2345', prefix='my_app'): """ register a request registers a request in the internal request table, optionally also sends it to the collector :param req: request, can be mostly any hash-able object :param collect: whether to send the request started event to the collector (bool) :param collector_addr: collector address, in zeromq format (string, default tcp://127.0.0.2:2345) :param prefix: label under which to register the request (string, default my_app) """
if collect: collector = get_context().socket(zmq.PUSH) collector.connect(collector_addr) collector.send_multipart([prefix, '']) collector.close() requests[hash(req)] = time()
<SYSTEM_TASK:> registers the end of a request <END_TASK> <USER_TASK:> Description: def end_request(req, collector_addr='tcp://127.0.0.2:2345', prefix='my_app'): """ registers the end of a request registers the end of a request, computes elapsed time, sends it to the collector :param req: request, can be mostly any hash-able object :param collector_addr: collector address, in zeromq format (string, default tcp://127.0.0.2:2345) :param prefix: label under which to register the request (string, default my_app) """
req_end = time() hreq = hash(req) if hreq in requests: req_time = req_end - requests[hreq] req_time *= 1000 del requests[hreq] collector = get_context().socket(zmq.PUSH) collector.connect(collector_addr) collector.send_multipart([prefix, str(req_time)]) collector.close() return req_time
<SYSTEM_TASK:> Cleanup the raw target areas description string <END_TASK> <USER_TASK:> Description: def build_target_areas(entry): """Cleanup the raw target areas description string"""
target_areas = [] areas = str(entry['cap:areaDesc']).split(';') for area in areas: target_areas.append(area.strip()) return target_areas
<SYSTEM_TASK:> Public method that parses <END_TASK> <USER_TASK:> Description: def get_alerts(self): """ Public method that parses """
emptyfeed = "There are no active watches, warnings or advisories" alerts = [] if emptyfeed in str(self._raw_cap): pass else: main_dom = minidom.parseString(self._raw_cap) xml_entries = main_dom.getElementsByTagName('entry') # title is currently first so we can detect an empty cap feed for dom in xml_entries: # parse the entry to a temp 'entry' dict entry = self._parse_entry(dom) # perform some cleanup before creating an object # entry['locations'] = self.build_locations(entry) # FIXME: remove? entry['target_areas'] = build_target_areas(entry) alert = Alert(entry) alerts.append(alert) del entry del alert return alerts
<SYSTEM_TASK:> find a notebook, given its fully qualified name and an optional path <END_TASK> <USER_TASK:> Description: def find_notebook(fullname, path=None): """find a notebook, given its fully qualified name and an optional path This turns "foo.bar" into "foo/bar.ipynb" and tries turning "Foo_Bar" into "Foo Bar" if Foo_Bar does not exist. """
name = fullname.rsplit('.', 1)[-1] if not path: path = [''] for d in path: nb_path = os.path.join(d, name + ".ipynb") if os.path.isfile(nb_path): return nb_path # let import Notebook_Name find "Notebook Name.ipynb" nb_path = nb_path.replace("_", " ") if os.path.isfile(nb_path): return nb_path
<SYSTEM_TASK:> import a notebook as a module <END_TASK> <USER_TASK:> Description: def load_module(self, fullname): """import a notebook as a module"""
path = find_notebook(fullname, self.path) print ("importing Jupyter notebook from %s" % path) # load the notebook object with io.open(path, 'r', encoding='utf-8') as f: nb = read(f, 4) # create the module and add it to sys.modules if name in sys.modules: # return sys.modules[name] mod = types.ModuleType(fullname) mod.__file__ = path mod.__loader__ = self mod.__dict__['get_ipython'] = get_ipython sys.modules[fullname] = mod # extra work to ensure that magics that would affect the user_ns # actually affect the notebook module's ns save_user_ns = self.shell.user_ns self.shell.user_ns = mod.__dict__ try: for cell in nb.cells: if cell.cell_type == 'code': # transform the input to executable Python code = self.shell.input_transformer_manager.transform_cell(cell.source) # run the code in themodule exec(code, mod.__dict__) finally: self.shell.user_ns = save_user_ns return mod
<SYSTEM_TASK:> returns full location given samecode or county and state. Returns False if not valid. <END_TASK> <USER_TASK:> Description: def location_lookup(self, req_location): """ returns full location given samecode or county and state. Returns False if not valid. *currently locations are a dictionary, once other geo data is added, they will move to a location class/obj* """
location = False try: location = self.samecodes[req_location['code']] except Exception: pass try: location = self.lookup_samecode(req_location['local'], req_location['state']) except Exception: pass return location
<SYSTEM_TASK:> Given County, State return the SAME code for specified location. Return False if not found <END_TASK> <USER_TASK:> Description: def lookup_samecode(self, local, state): """Given County, State return the SAME code for specified location. Return False if not found"""
for location in self.samecodes: if state.lower() == self.samecodes[location]['state'].lower(): if local.lower() == self.samecodes[location]['local'].lower(): return self.samecodes[location] return False
<SYSTEM_TASK:> Given multiple SAME codes, determine if they are all in one state. If so, it returns that state. <END_TASK> <USER_TASK:> Description: def getfeedscope(self, geocodes): """Given multiple SAME codes, determine if they are all in one state. If so, it returns that state. Otherwise return 'US'. This is used to determine which NWS feed needs to be parsed to get all alerts for the requested SAME codes"""
states = self._get_states_from_samecodes(geocodes) if len(states) >= 2: return 'US' else: return states[0]
<SYSTEM_TASK:> Returns all states for a given list of SAME codes <END_TASK> <USER_TASK:> Description: def _get_states_from_samecodes(self, geocodes): """Returns all states for a given list of SAME codes *Shouldn't be used to determine feed scope, please use getfeedscope()* """
states = [] for code in geocodes: if not isinstance(geocodes, list): raise Exception("specified geocodes must be list") try: state = self.samecodes[code]['state'] except KeyError: raise Exception("Samecode Not Found") else: if state not in states: states.append(state) return states
<SYSTEM_TASK:> Loads the Same Codes into this object <END_TASK> <USER_TASK:> Description: def _load_same_codes(self, refresh=False): """Loads the Same Codes into this object"""
if refresh is True: self._get_same_codes() else: self._cached_same_codes()
<SYSTEM_TASK:> get SAME codes, load into a dict and cache <END_TASK> <USER_TASK:> Description: def _get_same_codes(self): """get SAME codes, load into a dict and cache"""
same = {} url = '''http://www.nws.noaa.gov/nwr/data/SameCode.txt''' # pylint: disable=E1103 raw = requests.get(url).content.decode('utf-8') # py3 compatibility for row in raw.split('\n'): try: code, local, state = str(row).strip().split(',') location = {'code': code, 'local': local, 'state': state.strip()} # when I contacted the nws to add a missing same code # they added a space before the state in the samecodes file # stripping it out same[code] = location finally: pass cache = open(self._same_cache_file, 'wb') cPickle.dump(same, cache) cache.close() return same
<SYSTEM_TASK:> If a cached copy is available, return it <END_TASK> <USER_TASK:> Description: def _cached_same_codes(self): """If a cached copy is available, return it"""
cache_file = self._same_cache_file if os.path.exists(cache_file): maxage = datetime.now() - timedelta(minutes=4320) file_ts = datetime.fromtimestamp(os.stat(cache_file).st_mtime) if file_ts > maxage: try: cache = open(cache_file, 'rb') self._samecodes = cPickle.load(cache) cache.close() return True finally: pass self.reload()
<SYSTEM_TASK:> Sets the flag if last hop responded. <END_TASK> <USER_TASK:> Description: def set_last_hop_responded(self, last_hop): """Sets the flag if last hop responded."""
for packet in last_hop.packets: if packet.rtt: self.last_hop_responded = True break
<SYSTEM_TASK:> Sets the flag if traceroute result is successful or not. <END_TASK> <USER_TASK:> Description: def set_is_success(self, last_hop): """Sets the flag if traceroute result is successful or not."""
for packet in last_hop.packets: if packet.rtt and not packet.is_error: self.is_success = True break else: self.set_last_hop_errors(last_hop)
<SYSTEM_TASK:> Returns just the IPs from the traceroute. <END_TASK> <USER_TASK:> Description: def ip_path(self): """ Returns just the IPs from the traceroute. """
r = [] for hop in self.hops: r.append([packet.origin for packet in hop.packets]) return r
<SYSTEM_TASK:> Get information about the USB-ISS <END_TASK> <USER_TASK:> Description: def get_iss_info(self): """ Get information about the USB-ISS Querying will return three bytes; - the module ID (7), - firmware version (currently 2), - the current operating mode. """
self.write_data([self.ISS_CMD, self.ISS_VERSION]) response = self.read_data(3) if len(response) == 3: response = self.decode(response) self.module = response[0] self.firmware = response[1] self._mode = response[2] else: raise USBISSError("Could not get version details")
<SYSTEM_TASK:> Set the operating protocol of the USB-ISS with additional <END_TASK> <USER_TASK:> Description: def mode(self, set_bytes): """Set the operating protocol of the USB-ISS with additional parameters for the protocol """
self._mode = set_bytes data = [self.ISS_CMD, self.ISS_SET_MODE] + set_bytes self.write_data(data) response = self.read_data(2) if response[0] == 0: error_dict = { 0x05: 'Unknown Command', 0x06: 'Internal Error 1', 0x07: 'Internal Error 2' } try: raise USBISSError(error_dict[response(1)]) except KeyError: raise USBISSError('Undocumented Error')
<SYSTEM_TASK:> Converts all the keys in an object to DateTime instances. <END_TASK> <USER_TASK:> Description: def keys_to_datetime(obj, *keys): """ Converts all the keys in an object to DateTime instances. Args: obj (dict): the JSON-like ``dict`` object to modify inplace. keys (str): keys of the object being converted into DateTime instances. Returns: dict: ``obj`` inplace. >>> keys_to_datetime(None) is None True >>> keys_to_datetime({}) {} >>> a = {} >>> id(keys_to_datetime(a)) == id(a) True >>> a = {'one': '2016-06-06T19:41:43.039284', 'two': '2016-06-06T19:41:43.039284'} >>> keys_to_datetime(a) == a True >>> keys_to_datetime(a, 'one')['one'] datetime.datetime(2016, 6, 6, 19, 41, 43, 39284) >>> keys_to_datetime(a, 'one')['two'] '2016-06-06T19:41:43.039284' """
if not keys: return obj for k in keys: if k not in obj: continue v = obj[k] if not isinstance(v, string_types): continue obj[k] = parse_datetime(v) return obj
<SYSTEM_TASK:> Returns a list of directories matching the path given. <END_TASK> <USER_TASK:> Description: def browse(self, path=None): """ Returns a list of directories matching the path given. Args: path (str): glob pattern. Returns: List[str] """
params = None if path: assert isinstance(path, string_types) params = {'current': path} return self.get('browse', params=params)
<SYSTEM_TASK:> Returns whether the config is in sync, i.e. whether the running <END_TASK> <USER_TASK:> Description: def config_insync(self): """ Returns whether the config is in sync, i.e. whether the running configuration is the same as that on disk. Returns: bool """
status = self.get('config/insync').get('configInSync', False) if status is None: status = False return status
<SYSTEM_TASK:> Returns the list of recent errors. <END_TASK> <USER_TASK:> Description: def errors(self): """ Returns the list of recent errors. Returns: list: of :obj:`.ErrorEvent` tuples. """
ret_errs = list() errors = self.get('error').get('errors', None) or list() assert isinstance(errors, list) for err in errors: when = parse_datetime(err.get('when', None)) msg = err.get('message', '') e = ErrorEvent(when, msg) ret_errs.append(e) return ret_errs
<SYSTEM_TASK:> Send an error message to the active client. The new error will be <END_TASK> <USER_TASK:> Description: def show_error(self, message): """ Send an error message to the active client. The new error will be displayed on any active GUI clients. Args: message (str): Plain-text message to display. Returns: None >>> s = _syncthing() >>> s.system.show_error('my error msg') >>> s.system.errors()[0] ... # doctest: +ELLIPSIS ErrorEvent(when=datetime.datetime(...), message='"my error msg"') >>> s.system.clear_errors() >>> s.system.errors() [] """
assert isinstance(message, string_types) self.post('error', data=message)
<SYSTEM_TASK:> Pause the given device. <END_TASK> <USER_TASK:> Description: def pause(self, device): """ Pause the given device. Args: device (str): Device ID. Returns: dict: with keys ``success`` and ``error``. """
resp = self.post('pause', params={'device': device}, return_response=True) error = resp.text if not error: error = None return {'success': resp.status_code == requests.codes.ok, 'error': error}
<SYSTEM_TASK:> Erase the database index from a given folder and restart Syncthing. <END_TASK> <USER_TASK:> Description: def reset_folder(self, folder): """ Erase the database index from a given folder and restart Syncthing. Args: folder (str): Folder ID. Returns: None """
warnings.warn('This is a destructive action that cannot be undone.') self.post('reset', data={}, params={'folder': folder})
<SYSTEM_TASK:> Returns the directory tree of the global model. <END_TASK> <USER_TASK:> Description: def browse(self, folder, levels=None, prefix=None): """ Returns the directory tree of the global model. Directories are always JSON objects (map/dictionary), and files are always arrays of modification time and size. The first integer is the files modification time, and the second integer is the file size. Args: folder (str): The root folder to traverse. levels (int): How deep within the tree we want to dwell down. (0 based, defaults to unlimited depth) prefix (str): Defines a prefix within the tree where to start building the structure. Returns: dict """
assert isinstance(levels, int) or levels is None assert isinstance(prefix, string_types) or prefix is None return self.get('browse', params={'folder': folder, 'levels': levels, 'prefix': prefix})