text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def push_front(self, value): '''Appends a copy of ``value`` to the beginning of the list.''' self.cache.push_front(self.value_pickler.dumps(value))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def aggregate(self, kwargs): '''Aggregate lookup parameters.''' meta = self._meta fields = meta.dfields field_lookups = {} for name, value in iteritems(kwargs): bits = name.split(JSPLITTER) field_name = bits.pop(0) if field_name not in fields: raise QuerySetError('Could not filter on model "{0}".\ Field "{1}" does not exist.'.format(meta, field_name)) field = fields[field_name] attname = field.attname lookup = None if bits: bits = [n.lower() for n in bits] if bits[-1] == 'in': bits.pop() elif bits[-1] in range_lookups: lookup = bits.pop() remaining = JSPLITTER.join(bits) if lookup: # this is a range lookup attname, nested = field.get_lookup(remaining, QuerySetError) lookups = get_lookups(attname, field_lookups) lookups.append(lookup_value(lookup, (value, nested))) continue elif remaining: # Not a range lookup, must be a nested filter value = field.filter(self.session, remaining, value) lookups = get_lookups(attname, field_lookups) # If we are here the field must be an index if not field.index: raise QuerySetError("%s %s is not an index. Cannot query." % (field.__class__.__name__, field_name)) if not iterable(value): value = (value,) for v in value: if isinstance(v, Q): v = lookup_value('set', v.construct()) else: v = lookup_value('value', field.serialise(v, lookup)) lookups.append(v) # return [queryset(self, name=name, underlying=field_lookups[name]) for name in sorted(field_lookups)]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def models_from_model(model, include_related=False, exclude=None): '''Generator of all model in model.''' if exclude is None: exclude = set() if model and model not in exclude: exclude.add(model) if isinstance(model, ModelType) and not model._meta.abstract: yield model if include_related: exclude.add(model) for field in model._meta.fields: if hasattr(field, 'relmodel'): through = getattr(field, 'through', None) for rmodel in (field.relmodel, field.model, through): for m in models_from_model( rmodel, include_related=include_related, exclude=exclude): yield m for manytomany in model._meta.manytomany: related = getattr(model, manytomany) for m in models_from_model(related.model, include_related=include_related, exclude=exclude): yield m elif not isinstance(model, ModelType) and isclass(model): # This is a class which is not o ModelType yield model
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def unregister(self, model=None): '''Unregister a ``model`` if provided, otherwise it unregister all registered models. Return a list of unregistered model managers or ``None`` if no managers were removed.''' if model is not None: try: manager = self._registered_models.pop(model) except KeyError: return if self._registered_names.get(manager._meta.name) == manager: self._registered_names.pop(manager._meta.name) return [manager] else: managers = list(self._registered_models.values()) self._registered_models.clear() return managers
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def execute_script(self, name, keys, *args, **options): '''Execute a script. makes sure all required scripts are loaded. ''' script = get_script(name) if not script: raise redis.RedisError('No such script "%s"' % name) address = self.address() if address not in all_loaded_scripts: all_loaded_scripts[address] = set() loaded = all_loaded_scripts[address] toload = script.required_scripts.difference(loaded) for name in toload: s = get_script(name) yield self.script_load(s.script) loaded.update(toload) yield script(self, keys, args, options)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def query(self, model): '''Return a query for ``model`` when it needs to be indexed. ''' session = self.router.session() fields = tuple((f.name for f in model._meta.scalarfields if f.type == 'text')) qs = session.query(model).load_only(*fields) for related in self.get_related_fields(model): qs = qs.load_related(related) return qs
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def intervals(self, startdate, enddate, parseinterval=None): '''Given a ``startdate`` and an ``enddate`` dates, evaluate the date intervals from which data is not available. It return a list of two-dimensional tuples containing start and end date for the interval. The list could contain 0, 1 or 2 tuples.''' return missing_intervals(startdate, enddate, self.data_start, self.data_end, dateconverter=self.todate, parseinterval=parseinterval)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def merged_series(cls, *series, **kwargs): '''Merge ``series`` and return the results without storing data in the backend server.''' router, backend = cls.check_router(None, *series) if backend: target = router.register(cls(), backend) router.session().add(target) target._merge(*series, **kwargs) backend = target.backend return backend.execute( backend.structure(target).irange_and_delete(), target.load_data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def backend_fields(self, fields): '''Return a two elements tuple containing a list of fields names and a list of field attribute names.''' dfields = self.dfields processed = set() names = [] atts = [] pkname = self.pkname() for name in fields: if name == pkname or name in processed: continue elif name in dfields: processed.add(name) field = dfields[name] names.append(field.name) atts.append(field.attname) else: bname = name.split(JSPLITTER)[0] if bname in dfields: field = dfields[bname] if field.type in ('json object', 'related object'): processed.add(name) names.append(name) atts.append(name) return names, atts
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def as_dict(self): '''Model metadata in a dictionary''' pk = self.pk id_type = 3 if pk.type == 'auto': id_type = 1 return {'id_name': pk.name, 'id_type': id_type, 'sorted': bool(self.ordering), 'autoincr': self.ordering and self.ordering.auto, 'multi_fields': [field.name for field in self.multifields], 'indices': dict(((idx.attname, idx.unique) for idx in self.indices))}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def loadedfields(self): '''Generator of fields loaded from database''' if self._loadedfields is None: for field in self._meta.scalarfields: yield field else: fields = self._meta.dfields processed = set() for name in self._loadedfields: if name in processed: continue if name in fields: processed.add(name) yield fields[name] else: name = name.split(JSPLITTER)[0] if name in fields and name not in processed: field = fields[name] if field.type == 'json object': processed.add(name) yield field
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def clone(self, **data): '''Utility method for cloning the instance as a new object. :parameter data: additional which override field data. :rtype: a new instance of this class. ''' meta = self._meta session = self.session pkname = meta.pkname() pkvalue = data.pop(pkname, None) fields = self.todict(exclude_cache=True) fields.update(data) fields.pop('__dbdata__', None) obj = self._meta.make_object((pkvalue, None, fields)) obj.session = session return obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def title(self): ''' Returns the axis instance where the title will be printed ''' return self.title_left(on=False), self.title_center(on=False), \ self.title_right(on=False)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def footer(self): ''' Returns the axis instance where the footer will be printed ''' return self.footer_left(on=False), self.footer_center(on=False), \ self.footer_right(on=False)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def top_right(self): ''' Returns the axis instance at the top right of the page, where the postage stamp and aperture is displayed ''' res = self.body_top_right[self.tcount]() self.tcount += 1 return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def left(self): ''' Returns the current axis instance on the left side of the page where each successive light curve is displayed ''' res = self.body_left[self.lcount]() self.lcount += 1 return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def right(self): ''' Returns the current axis instance on the right side of the page, where cross-validation information is displayed ''' res = self.body_right[self.rcount]() self.rcount += 1 return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def body(self): ''' Returns the axis instance where the light curves will be shown ''' res = self._body[self.bcount]() self.bcount += 1 return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def hashmodel(model, library=None): '''Calculate the Hash id of metaclass ``meta``''' library = library or 'python-stdnet' meta = model._meta sha = hashlib.sha1(to_bytes('{0}({1})'.format(library, meta))) hash = sha.hexdigest()[:8] meta.hash = hash if hash in _model_dict: raise KeyError('Model "{0}" already in hash table.\ Rename your model or the module containing the model.'.format(meta)) _model_dict[hash] = model
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def bind(self, callback, sender=None): '''Bind a ``callback`` for a given ``sender``.''' key = (_make_id(callback), _make_id(sender)) self.callbacks.append((key, callback))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def fire(self, sender=None, **params): '''Fire callbacks from a ``sender``.''' keys = (_make_id(None), _make_id(sender)) results = [] for (_, key), callback in self.callbacks: if key in keys: results.append(callback(self, sender, **params)) return results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def Channel(EPIC, campaign=None): ''' Returns the channel number for a given EPIC target. ''' if campaign is None: campaign = Campaign(EPIC) if hasattr(campaign, '__len__'): raise AttributeError( "Please choose a campaign/season for this target: %s." % campaign) try: stars = GetK2Stars()[campaign] except KeyError: # Not sure what else to do here! log.warn("Unknown channel for target. Defaulting to channel 2.") return 2 i = np.argmax([s[0] == EPIC for s in stars]) return stars[i][2]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def Module(EPIC, campaign=None): ''' Returns the module number for a given EPIC target. ''' channel = Channel(EPIC, campaign=campaign) nums = {2: 1, 3: 5, 4: 9, 6: 13, 7: 17, 8: 21, 9: 25, 10: 29, 11: 33, 12: 37, 13: 41, 14: 45, 15: 49, 16: 53, 17: 57, 18: 61, 19: 65, 20: 69, 22: 73, 23: 77, 24: 81} for c in [channel, channel - 1, channel - 2, channel - 3]: if c in nums.values(): for mod, chan in nums.items(): if chan == c: return mod return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def Channels(module): ''' Returns the channels contained in the given K2 module. ''' nums = {2: 1, 3: 5, 4: 9, 6: 13, 7: 17, 8: 21, 9: 25, 10: 29, 11: 33, 12: 37, 13: 41, 14: 45, 15: 49, 16: 53, 17: 57, 18: 61, 19: 65, 20: 69, 22: 73, 23: 77, 24: 81} if module in nums: return [nums[module], nums[module] + 1, nums[module] + 2, nums[module] + 3] else: return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def GetSources(ID, darcsec=None, stars_only=False): ''' Grabs the EPIC coordinates from the TPF and searches MAST for other EPIC targets within the same aperture. :param int ID: The 9-digit :py:obj:`EPIC` number of the target :param float darcsec: The search radius in arcseconds. \ Default is four times the largest dimension of the aperture. :param bool stars_only: If :py:obj:`True`, only returns objects \ explicitly designated as `"stars"` in MAST. Default :py:obj:`False` :returns: A list of :py:class:`Source` instances containing \ other :py:obj:`EPIC` targets within or close to this \ target's aperture ''' client = kplr.API() star = client.k2_star(ID) tpf = star.get_target_pixel_files()[0] with tpf.open() as f: crpix1 = f[2].header['CRPIX1'] crpix2 = f[2].header['CRPIX2'] crval1 = f[2].header['CRVAL1'] crval2 = f[2].header['CRVAL2'] cdelt1 = f[2].header['CDELT1'] cdelt2 = f[2].header['CDELT2'] pc1_1 = f[2].header['PC1_1'] pc1_2 = f[2].header['PC1_2'] pc2_1 = f[2].header['PC2_1'] pc2_2 = f[2].header['PC2_2'] pc = np.array([[pc1_1, pc1_2], [pc2_1, pc2_2]]) pc = np.linalg.inv(pc) crpix1p = f[2].header['CRPIX1P'] crpix2p = f[2].header['CRPIX2P'] crval1p = f[2].header['CRVAL1P'] crval2p = f[2].header['CRVAL2P'] cdelt1p = f[2].header['CDELT1P'] cdelt2p = f[2].header['CDELT2P'] if darcsec is None: darcsec = 4 * max(f[2].data.shape) epicid, ra, dec, kepmag = MASTRADec( star.k2_ra, star.k2_dec, darcsec, stars_only) sources = [] for i, epic in enumerate(epicid): dra = (ra[i] - crval1) * np.cos(np.radians(dec[i])) / cdelt1 ddec = (dec[i] - crval2) / cdelt2 sx = pc[0, 0] * dra + pc[0, 1] * ddec + crpix1 + crval1p - 1.0 sy = pc[1, 0] * dra + pc[1, 1] * ddec + crpix2 + crval2p - 1.0 sources.append(dict(ID=epic, x=sx, y=sy, mag=kepmag[i], x0=crval1p, y0=crval2p)) return sources
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def SaturationFlux(EPIC, campaign=None, **kwargs): ''' Returns the well depth for the target. If any of the target's pixels have flux larger than this value, they are likely to be saturated and cause charge bleeding. The well depths were obtained from Table 13 of the Kepler instrument handbook. We assume an exposure time of 6.02s. ''' channel, well_depth = np.loadtxt(os.path.join(EVEREST_SRC, 'missions', 'k2', 'tables', 'well_depth.tsv'), unpack=True) satflx = well_depth[channel == Channel(EPIC, campaign=campaign)][0] / 6.02 return satflx
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def GetStars(campaign, module, model='nPLD', **kwargs): ''' Returns de-trended light curves for all stars on a given module in a given campaign. ''' # Get the channel numbers channels = Channels(module) assert channels is not None, "No channels available on this module." # Get the EPIC numbers all = GetK2Campaign(campaign) stars = np.array([s[0] for s in all if s[2] in channels and os.path.exists( os.path.join(EVEREST_DAT, 'k2', 'c%02d' % int(campaign), ('%09d' % s[0])[:4] + '00000', ('%09d' % s[0])[4:], model + '.npz'))], dtype=int) N = len(stars) assert N > 0, "No light curves found for campaign %d, module %d." % ( campaign, module) # Loop over all stars and store the fluxes in a list fluxes = [] errors = [] kpars = [] for n in range(N): # De-trended light curve file name nf = os.path.join(EVEREST_DAT, 'k2', 'c%02d' % int(campaign), ('%09d' % stars[n])[:4] + '00000', ('%09d' % stars[n])[4:], model + '.npz') # Get the data data = np.load(nf) t = data['time'] if n == 0: time = t breakpoints = data['breakpoints'] # Get de-trended light curve y = data['fraw'] - data['model'] err = data['fraw_err'] # De-weight outliers and bad timestamps m = np.array(list(set(np.concatenate([data['outmask'], data['badmask'], data['nanmask'], data['transitmask']]))), dtype=int) # Interpolate over the outliers y = np.interp(t, np.delete(t, m), np.delete(y, m)) err = np.interp(t, np.delete(t, m), np.delete(err, m)) # Append to our running lists fluxes.append(y) errors.append(err) kpars.append(data['kernel_params']) return time, breakpoints, np.array(fluxes), \ np.array(errors), np.array(kpars)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def parse_info(response): '''Parse the response of Redis's INFO command into a Python dict. In doing so, convert byte data into unicode.''' info = {} response = response.decode('utf-8') def get_value(value): if ',' and '=' not in value: return value sub_dict = {} for item in value.split(','): k, v = item.split('=') try: sub_dict[k] = int(v) except ValueError: sub_dict[k] = v return sub_dict data = info for line in response.splitlines(): keyvalue = line.split(':') if len(keyvalue) == 2: key, value = keyvalue try: data[key] = int(value) except ValueError: data[key] = get_value(value) else: data = {} info[line[2:]] = data return info
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def zdiffstore(self, dest, keys, withscores=False): '''Compute the difference of multiple sorted. The difference of sets specified by ``keys`` into a new sorted set in ``dest``. ''' keys = (dest,) + tuple(keys) wscores = 'withscores' if withscores else '' return self.execute_script('zdiffstore', keys, wscores, withscores=withscores)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def zpopbyrank(self, name, start, stop=None, withscores=False, desc=False): '''Pop a range by rank. ''' stop = stop if stop is not None else start return self.execute_script('zpop', (name,), 'rank', start, stop, int(desc), int(withscores), withscores=withscores)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def lnprior(x): """Return the log prior given parameter vector `x`."""
per, t0, b = x if b < -1 or b > 1: return -np.inf elif per < 7 or per > 10: return -np.inf elif t0 < 1978 or t0 > 1979: return -np.inf else: return 0.
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def lnlike(x, star): """Return the log likelihood given parameter vector `x`."""
ll = lnprior(x) if np.isinf(ll): return ll, (np.nan, np.nan) per, t0, b = x model = TransitModel('b', per=per, t0=t0, b=b, rhos=10.)(star.time) like, d, vard = star.lnlike(model, full_output=True) ll += like return ll, (d,)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def permitted_query(self, query, group, operations): '''Change the ``query`` so that only instances for which ``group`` has roles with permission on ``operations`` are returned.''' session = query.session models = session.router user = group.user if user.is_superuser: # super-users have all permissions return query roles = group.roles.query() roles = group.roles.query() # query on all roles for group # The throgh model for Role/Permission relationship throgh_model = models.role.permissions.model models[throgh_model].filter(role=roles, permission__model_type=query.model, permission__operations=operations) # query on all relevant permissions permissions = router.permission.filter(model_type=query.model, level=operations) owner_query = query.filter(user=user) # all roles for the query model with appropriate permission level roles = models.role.filter(model_type=query.model, level__ge=level) # Now we need groups which have these roles groups = Role.groups.throughquery( session).filter(role=roles).get_field('group') # I need to know if user is in any of these groups if user.groups.filter(id=groups).count(): # it is, lets get the model with permissions less # or equal permission level permitted = models.instancerole.filter( role=roles).get_field('object_id') return owner_query.union(model.objects.filter(id=permitted)) else: return owner_query
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def init_app(self, app, session=None, parameters=None): """Initializes snow extension Set config default and find out which client type to use :param app: App passed from constructor or directly to init_app (factory) :param session: requests-compatible session to pass along to init_app :param parameters: `ParamsBuilder` object passed to `Client` after instantiation :raises: - ConfigError - if unable to determine client type """
if parameters is not None and not isinstance(parameters, ParamsBuilder): raise InvalidUsage("parameters should be a pysnow.ParamsBuilder object, not %r" % type(parameters).__name__) self._session = session self._parameters = parameters app.config.setdefault('SNOW_INSTANCE', None) app.config.setdefault('SNOW_HOST', None) app.config.setdefault('SNOW_USER', None) app.config.setdefault('SNOW_PASSWORD', None) app.config.setdefault('SNOW_OAUTH_CLIENT_ID', None) app.config.setdefault('SNOW_OAUTH_CLIENT_SECRET', None) app.config.setdefault('SNOW_USE_SSL', True) if app.config['SNOW_OAUTH_CLIENT_ID'] and app.config['SNOW_OAUTH_CLIENT_SECRET']: self._client_type_oauth = True elif self._session or (app.config['SNOW_USER'] and app.config['SNOW_PASSWORD']): self._client_type_basic = True else: raise ConfigError("You must supply user credentials, a session or OAuth credentials to use flask-snow")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def connection(self): """Snow connection instance, stores a `pysnow.Client` instance and `pysnow.Resource` instances Creates a new :class:`pysnow.Client` object if it doesn't exist in the app slice of the context stack :returns: :class:`pysnow.Client` object """
ctx = stack.top.app if ctx is not None: if not hasattr(ctx, 'snow'): if self._client_type_oauth: if not self._token_updater: warnings.warn("No token updater has been set. Token refreshes will be ignored.") client = self._get_oauth_client() else: client = self._get_basic_client() if self._parameters: # Set parameters passed on app init client.parameters = self._parameters ctx.snow = client return ctx.snow
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def usage(): """Print out a usage message"""
global options l = len(options['long']) options['shortlist'] = [s for s in options['short'] if s is not ":"] print("python -m behave2cucumber [-h] [-d level|--debug=level]") for i in range(l): print(" -{0}|--{1:20} {2}".format(options['shortlist'][i], options['long'][i], options['descriptions'][i]))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def direction(theta, phi): '''Return the direction vector of a cylinder defined by the spherical coordinates theta and phi. ''' return np.array([np.cos(phi) * np.sin(theta), np.sin(phi) * np.sin(theta), np.cos(theta)])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def projection_matrix(w): '''Return the projection matrix of a direction w.''' return np.identity(3) - np.dot(np.reshape(w, (3,1)), np.reshape(w, (1, 3)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def skew_matrix(w): '''Return the skew matrix of a direction w.''' return np.array([[0, -w[2], w[1]], [w[2], 0, -w[0]], [-w[1], w[0], 0]])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def calc_A(Ys): '''Return the matrix A from a list of Y vectors.''' return sum(np.dot(np.reshape(Y, (3,1)), np.reshape(Y, (1, 3))) for Y in Ys)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def calc_A_hat(A, S): '''Return the A_hat matrix of A given the skew matrix S''' return np.dot(S, np.dot(A, np.transpose(S)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def G(w, Xs): '''Calculate the G function given a cylinder direction w and a list of data points Xs to be fitted.''' n = len(Xs) P = projection_matrix(w) Ys = [np.dot(P, X) for X in Xs] A = calc_A(Ys) A_hat = calc_A_hat(A, skew_matrix(w)) u = sum(np.dot(Y, Y) for Y in Ys) / n v = np.dot(A_hat, sum(np.dot(Y, Y) * Y for Y in Ys)) / np.trace(np.dot(A_hat, A)) return sum((np.dot(Y, Y) - u - 2 * np.dot(Y, v)) ** 2 for Y in Ys)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def C(w, Xs): '''Calculate the cylinder center given the cylinder direction and a list of data points. ''' n = len(Xs) P = projection_matrix(w) Ys = [np.dot(P, X) for X in Xs] A = calc_A(Ys) A_hat = calc_A_hat(A, skew_matrix(w)) return np.dot(A_hat, sum(np.dot(Y, Y) * Y for Y in Ys)) / np.trace(np.dot(A_hat, A))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def r(w, Xs): '''Calculate the radius given the cylinder direction and a list of data points. ''' n = len(Xs) P = projection_matrix(w) c = C(w, Xs) return np.sqrt(sum(np.dot(c - X, np.dot(P, c - X)) for X in Xs) / n)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, request, key): """Validate an email with the given key"""
try: email_val = EmailAddressValidation.objects.get(validation_key=key) except EmailAddressValidation.DoesNotExist: messages.error(request, _('The email address you are trying to ' 'verify either has already been verified' ' or does not exist.')) return redirect('/') try: email = EmailAddress.objects.get(address=email_val.address) except EmailAddress.DoesNotExist: email = EmailAddress(address=email_val.address) if email.user and email.user.is_active: messages.error(request, _('The email address you are trying to ' 'verify is already an active email ' 'address.')) email_val.delete() return redirect('/') email.user = email_val.user email.save() email_val.delete() user = User.objects.get(username=email.user.username) user.is_active = True user.save() messages.success(request, _('Email address verified!')) return redirect('user_profile', username=email_val.user.username)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete(self, request, key): """Remove an email address, validated or not."""
request.DELETE = http.QueryDict(request.body) email_addr = request.DELETE.get('email') user_id = request.DELETE.get('user') if not email_addr: return http.HttpResponseBadRequest() try: email = EmailAddressValidation.objects.get(address=email_addr, user_id=user_id) except EmailAddressValidation.DoesNotExist: pass else: email.delete() return http.HttpResponse(status=204) try: email = EmailAddress.objects.get(address=email_addr, user_id=user_id) except EmailAddress.DoesNotExist: raise http.Http404 email.user = None email.save() return http.HttpResponse(status=204)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update(self, request, key): """Set an email address as primary address."""
request.UPDATE = http.QueryDict(request.body) email_addr = request.UPDATE.get('email') user_id = request.UPDATE.get('user') if not email_addr: return http.HttpResponseBadRequest() try: email = EmailAddress.objects.get(address=email_addr, user_id=user_id) except EmailAddress.DoesNotExist: raise http.Http404 email.user.email = email_addr email.user.save() return http.HttpResponse(status=204)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_env_setting(setting): """ Get the environment setting or return exception """
try: return os.environ[setting] except KeyError: error_msg = "Set the %s env variable" % setting raise ImproperlyConfigured(error_msg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate_social_account(account, url): """Verifies if a social account is valid. Examples: True 'http://twitter.com') False """
request = urllib2.Request(urlparse.urljoin(url, account)) request.get_method = lambda: 'HEAD' try: response = urllib2.urlopen(request) except urllib2.HTTPError: return False return response.code == 200
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def fitting_rmsd(w_fit, C_fit, r_fit, Xs): '''Calculate the RMSD of fitting.''' return np.sqrt(sum((geometry.point_line_distance(p, C_fit, w_fit) - r_fit) ** 2 for p in Xs) / len(Xs))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def basic_parse(response, buf_size=ijson.backend.BUFSIZE): """ Iterator yielding unprefixed events. Parameters: - response: a stream response from requests """
lexer = iter(IncrementalJsonParser.lexer(response, buf_size)) for value in ijson.backend.parse_value(lexer): yield value try: next(lexer) except StopIteration: pass else: raise ijson.common.JSONError('Additional data')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def drop_connection(self, name, database=None): """ Force server to close current client subscription connection to the server @param str name: The name of the subscription @param str database: The name of the database """
request_executor = self._store.get_request_executor(database) command = DropSubscriptionConnectionCommand(name) request_executor.execute(command)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def execute_from_command_line(argv=None): """ A simple method that runs a ManagementUtility. """
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "colab.settings") from django.conf import settings if not hasattr(settings, 'SECRET_KEY') and 'initconfig' in sys.argv: command = initconfig.Command() command.handle() else: utility = ManagementUtility(argv) utility.execute()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def normalize(v): '''Normalize a vector based on its 2 norm.''' if 0 == np.linalg.norm(v): return v return v / np.linalg.norm(v)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def rotation_matrix_from_axis_and_angle(u, theta): '''Calculate a rotation matrix from an axis and an angle.''' x = u[0] y = u[1] z = u[2] s = np.sin(theta) c = np.cos(theta) return np.array([[c + x**2 * (1 - c), x * y * (1 - c) - z * s, x * z * (1 - c) + y * s], [y * x * (1 - c) + z * s, c + y**2 * (1 - c), y * z * (1 - c) - x * s ], [z * x * (1 - c) - y * s, z * y * (1 - c) + x * s, c + z**2 * (1 - c) ]])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def point_line_distance(p, l_p, l_v): '''Calculate the distance between a point and a line defined by a point and a direction vector. ''' l_v = normalize(l_v) u = p - l_p return np.linalg.norm(u - np.dot(u, l_v) * l_v)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def raw_query(self, query, query_parameters=None): """ To get all the document that equal to the query @param str query: The rql query @param dict query_parameters: Add query parameters to the query {key : value} """
self.assert_no_raw_query() if len(self._where_tokens) != 0 or len(self._select_tokens) != 0 or len( self._order_by_tokens) != 0 or len(self._group_by_tokens) != 0: raise InvalidOperationException( "You can only use raw_query on a new query, without applying any operations " "(such as where, select, order_by, group_by, etc)") if query_parameters: self.query_parameters = query_parameters self._query = query return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def where_equals(self, field_name, value, exact=False): """ To get all the document that equal to the value in the given field_name @param str field_name: The field name in the index you want to query. @param value: The value will be the fields value you want to query @param bool exact: If True getting exact match of the query """
if field_name is None: raise ValueError("None field_name is invalid") field_name = Query.escape_if_needed(field_name) self._add_operator_if_needed() token = "equals" if self.negate: self.negate = False token = "not_equals" self.last_equality = {field_name: value} token = _Token(field_name=field_name, value=self.add_query_parameter(value), token=token, exact=exact) token.write = self.rql_where_write(token) self._where_tokens.append(token) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def where(self, exact=False, **kwargs): """ To get all the document that equal to the value within kwargs with the specific key @param bool exact: If True getting exact match of the query @param kwargs: the keys of the kwargs will be the fields name in the index you want to query. The value will be the the fields value you want to query (if kwargs[field_name] is a list it will behave has the where_in method) """
for field_name in kwargs: if isinstance(kwargs[field_name], list): self.where_in(field_name, kwargs[field_name], exact) else: self.where_equals(field_name, kwargs[field_name], exact) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def search(self, field_name, search_terms, operator=QueryOperator.OR): """ For more complex text searching @param str field_name: The field name in the index you want to query. :type str @param str search_terms: The terms you want to query @param QueryOperator operator: OR or AND """
if field_name is None: raise ValueError("None field_name is invalid") field_name = Query.escape_if_needed(field_name) self._add_operator_if_needed() self.negate_if_needed(field_name) self.last_equality = {field_name: "(" + search_terms + ")" if ' ' in search_terms else search_terms} token = _Token(field_name=field_name, token="search", value=self.add_query_parameter(search_terms), search_operator=operator) token.write = self.rql_where_write(token) self._where_tokens.append(token) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def where_ends_with(self, field_name, value): """ To get all the document that ends with the value in the giving field_name @param str field_name:The field name in the index you want to query. @param str value: The value will be the fields value you want to query """
if field_name is None: raise ValueError("None field_name is invalid") field_name = Query.escape_if_needed(field_name) self._add_operator_if_needed() self.negate_if_needed(field_name) self.last_equality = {field_name: value} token = _Token(field_name=field_name, token="endsWith", value=self.add_query_parameter(value)) token.write = self.rql_where_write(token) self._where_tokens.append(token) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def where_in(self, field_name, values, exact=False): """ Check that the field has one of the specified values @param str field_name: Name of the field @param str values: The values we wish to query @param bool exact: Getting the exact query (ex. case sensitive) """
field_name = Query.escape_if_needed(field_name) self._add_operator_if_needed() self.negate_if_needed(field_name) token = _Token(field_name=field_name, value=self.add_query_parameter(list(Utils.unpack_iterable(values))), token="in", exact=exact) token.write = self.rql_where_write(token) self._where_tokens.append(token) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_facets(self, facets, start=0, page_size=None): """ Query the facets results for this query using the specified list of facets with the given start and pageSize @param List[Facet] facets: List of facets @param int start: Start index for paging @param page_size: Paging PageSize. If set, overrides Facet.max_result """
if len(facets) == 0: raise ValueError("Facets must contain at least one entry", "facets") str_query = self.__str__() facet_query = FacetQuery(str_query, None, facets, start, page_size, query_parameters=self.query_parameters, wait_for_non_stale_results=self.wait_for_non_stale_results, wait_for_non_stale_results_timeout=self.timeout, cutoff_etag=self.cutoff_etag) command = GetFacetsCommand(query=facet_query) return self.session.requests_executor.execute(command)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def show_G_distribution(data): '''Show the distribution of the G function.''' Xs, t = fitting.preprocess_data(data) Theta, Phi = np.meshgrid(np.linspace(0, np.pi, 50), np.linspace(0, 2 * np.pi, 50)) G = [] for i in range(len(Theta)): G.append([]) for j in range(len(Theta[i])): w = fitting.direction(Theta[i][j], Phi[i][j]) G[-1].append(fitting.G(w, Xs)) plt.imshow(G, extent=[0, np.pi, 0, 2 * np.pi], origin='lower') plt.show()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def show_fit(w_fit, C_fit, r_fit, Xs): '''Plot the fitting given the fitted axis direction, the fitted center, the fitted radius and the data points. ''' fig = plt.figure() ax = fig.gca(projection='3d') # Plot the data points ax.scatter([X[0] for X in Xs], [X[1] for X in Xs], [X[2] for X in Xs]) # Get the transformation matrix theta = np.arccos(np.dot(w_fit, np.array([0, 0, 1]))) phi = np.arctan2(w_fit[1], w_fit[0]) M = np.dot(rotation_matrix_from_axis_and_angle(np.array([0, 0, 1]), phi), rotation_matrix_from_axis_and_angle(np.array([0, 1, 0]), theta)) # Plot the cylinder surface delta = np.linspace(-np.pi, np.pi, 20) z = np.linspace(-10, 10, 20) Delta, Z = np.meshgrid(delta, z) X = r_fit * np.cos(Delta) Y = r_fit * np.sin(Delta) for i in range(len(X)): for j in range(len(X[i])): p = np.dot(M, np.array([X[i][j], Y[i][j], Z[i][j]])) + C_fit X[i][j] = p[0] Y[i][j] = p[1] Z[i][j] = p[2] ax.plot_surface(X, Y, Z, alpha=0.2) # Plot the center and direction ax.quiver(C_fit[0], C_fit[1], C_fit[2], r_fit * w_fit[0], r_fit * w_fit[1], r_fit * w_fit[2], color='red') plt.show()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_window(self, highlight_locations): """Getting the HIGHLIGHT_NUM_CHARS_BEFORE_MATCH setting to find how many characters before the first word found should be removed from the window """
if len(self.text_block) <= self.max_length: return (0, self.max_length) num_chars_before = getattr( settings, 'HIGHLIGHT_NUM_CHARS_BEFORE_MATCH', 0 ) best_start, best_end = super(ColabHighlighter, self).find_window( highlight_locations ) if best_start <= num_chars_before: best_end -= best_start best_start = 0 else: best_start -= num_chars_before best_end -= num_chars_before return (best_start, best_end)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def login(self): """ Try to login and set the internal session id. Please note: - Any failed login resets all existing session ids, even of other users. - SIDs expire after some time """
response = self.session.get(self.base_url + '/login_sid.lua', timeout=10) xml = ET.fromstring(response.text) if xml.find('SID').text == "0000000000000000": challenge = xml.find('Challenge').text url = self.base_url + "/login_sid.lua" response = self.session.get(url, params={ "username": self.username, "response": self.calculate_response(challenge, self.password), }, timeout=10) xml = ET.fromstring(response.text) sid = xml.find('SID').text if xml.find('SID').text == "0000000000000000": blocktime = int(xml.find('BlockTime').text) exc = Exception("Login failed, please wait {} seconds".format( blocktime )) exc.blocktime = blocktime raise exc self.sid = sid return sid
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def calculate_response(self, challenge, password): """Calculate response for the challenge-response authentication"""
to_hash = (challenge + "-" + password).encode("UTF-16LE") hashed = hashlib.md5(to_hash).hexdigest() return "{0}-{1}".format(challenge, hashed)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_actors(self): """ Returns a list of Actor objects for querying SmartHome devices. This is currently the only working method for getting temperature data. """
devices = self.homeautoswitch("getdevicelistinfos") xml = ET.fromstring(devices) actors = [] for device in xml.findall('device'): actors.append(Actor(fritzbox=self, device=device)) return actors
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_actor_by_ain(self, ain): """ Return a actor identified by it's ain or return None """
for actor in self.get_actors(): if actor.actor_id == ain: return actor
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def homeautoswitch(self, cmd, ain=None, param=None): """ Call a switch method. Should only be used by internal library functions. """
assert self.sid, "Not logged in" params = { 'switchcmd': cmd, 'sid': self.sid, } if param is not None: params['param'] = param if ain: params['ain'] = ain url = self.base_url + '/webservices/homeautoswitch.lua' response = self.session.get(url, params=params, timeout=10) response.raise_for_status() return response.text.strip().encode('utf-8')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_switch_actors(self): """ Get information about all actors This needs 1+(5n) requests where n = number of actors registered Deprecated, use get_actors instead. Returns a dict: [ain] = { 'name': Name of actor, 'state': Powerstate (boolean) 'present': Connected to server? (boolean) 'power': Current power consumption in mW 'energy': Used energy in Wh since last energy reset 'temperature': Current environment temperature in celsius } """
actors = {} for ain in self.homeautoswitch("getswitchlist").split(','): actors[ain] = { 'name': self.homeautoswitch("getswitchname", ain), 'state': bool(self.homeautoswitch("getswitchstate", ain)), 'present': bool(self.homeautoswitch("getswitchpresent", ain)), 'power': self.homeautoswitch("getswitchpower", ain), 'energy': self.homeautoswitch("getswitchenergy", ain), 'temperature': self.homeautoswitch("getswitchtemperature", ain), } return actors
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_devices(self): """ Return a list of devices. Deprecated, use get_actors instead. """
url = self.base_url + '/net/home_auto_query.lua' response = self.session.get(url, params={ 'sid': self.sid, 'command': 'AllOutletStates', 'xhr': 0, }, timeout=15) response.raise_for_status() data = response.json() count = int(data["Outlet_count"]) devices = [] for i in range(1, count + 1): device = Device( int(data["DeviceID_{0}".format(i)]), int(data["DeviceConnectState_{0}".format(i)]), int(data["DeviceSwitchState_{0}".format(i)]) ) devices.append(device) return devices
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_consumption(self, deviceid, timerange="10"): """ Return all available energy consumption data for the device. You need to divice watt_values by 100 and volt_values by 1000 to get the "real" values. :return: dict """
tranges = ("10", "24h", "month", "year") if timerange not in tranges: raise ValueError( "Unknown timerange. Possible values are: {0}".format(tranges) ) url = self.base_url + "/net/home_auto_query.lua" response = self.session.get(url, params={ 'sid': self.sid, 'command': 'EnergyStats_{0}'.format(timerange), 'id': deviceid, 'xhr': 0, }, timeout=15) response.raise_for_status() data = response.json() result = {} # Single result values values_map = { 'MM_Value_Amp': 'mm_value_amp', 'MM_Value_Power': 'mm_value_power', 'MM_Value_Volt': 'mm_value_volt', 'EnStats_average_value': 'enstats_average_value', 'EnStats_max_value': 'enstats_max_value', 'EnStats_min_value': 'enstats_min_value', 'EnStats_timer_type': 'enstats_timer_type', 'sum_Day': 'sum_day', 'sum_Month': 'sum_month', 'sum_Year': 'sum_year', } for avm_key, py_key in values_map.items(): result[py_key] = int(data[avm_key]) # Stats counts count = int(data["EnStats_count"]) watt_values = [None for i in range(count)] volt_values = [None for i in range(count)] for i in range(1, count + 1): watt_values[i - 1] = int(data["EnStats_watt_value_{}".format(i)]) volt_values[i - 1] = int(data["EnStats_volt_value_{}".format(i)]) result['watt_values'] = watt_values result['volt_values'] = volt_values return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_logs(self): """ Return the system logs since the last reboot. """
assert BeautifulSoup, "Please install bs4 to use this method" url = self.base_url + "/system/syslog.lua" response = self.session.get(url, params={ 'sid': self.sid, 'stylemode': 'print', }, timeout=15) response.raise_for_status() entries = [] tree = BeautifulSoup(response.text) rows = tree.find('table').find_all('tr') for row in rows: columns = row.find_all("td") date = columns[0].string time = columns[1].string message = columns[2].find("a").string merged = "{} {} {}".format(date, time, message.encode("UTF-8")) msg_hash = hashlib.md5(merged).hexdigest() entries.append(LogEntry(date, time, message, msg_hash)) return entries
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def seen_nonce(id, nonce, timestamp): """ Returns True if the Hawk nonce has been seen already. """
key = '{id}:{n}:{ts}'.format(id=id, n=nonce, ts=timestamp) if cache.get(key): log.warning('replay attack? already processed nonce {k}' .format(k=key)) return True else: log.debug('caching nonce {k}'.format(k=key)) cache.set(key, True, # We only need the nonce until the message itself expires. # This also adds a little bit of padding. timeout=getattr(settings, 'HAWK_MESSAGE_EXPIRATION', default_message_expiration) + 5) return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cli(context, host, username, password): """ FritzBox SmartHome Tool \b Provides the following functions: - A easy to use library for querying SmartHome actors - This CLI tool for testing - A carbon client for pipeing data into graphite """
context.obj = FritzBox(host, username, password)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def actors(context): """Display a list of actors"""
fritz = context.obj fritz.login() for actor in fritz.get_actors(): click.echo("{} ({} {}; AIN {} )".format( actor.name, actor.manufacturer, actor.productname, actor.actor_id, )) if actor.has_temperature: click.echo("Temp: act {} target {}; battery (low): {}".format( actor.temperature, actor.target_temperature, actor.battery_low, )) click.echo("Temp (via get): act {} target {}".format( actor.get_temperature(), actor.get_target_temperature(), ))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def switch_on(context, ain): """Switch an actor's power to ON"""
context.obj.login() actor = context.obj.get_actor_by_ain(ain) if actor: click.echo("Switching {} on".format(actor.name)) actor.switch_on() else: click.echo("Actor not found: {}".format(ain))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def switch_state(context, ain): """Get an actor's power state"""
context.obj.login() actor = context.obj.get_actor_by_ain(ain) if actor: click.echo("State for {} is: {}".format(ain,'ON' if actor.get_state() else 'OFF')) else: click.echo("Actor not found: {}".format(ain))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def switch_toggle(context, ain): """Toggle an actor's power state"""
context.obj.login() actor = context.obj.get_actor_by_ain(ain) if actor: if actor.get_state(): actor.switch_off() click.echo("State for {} is now OFF".format(ain)) else: actor.switch_on() click.echo("State for {} is now ON".format(ain)) else: click.echo("Actor not found: {}".format(ain))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def logs(context, format): """Show system logs since last reboot"""
fritz = context.obj fritz.login() messages = fritz.get_logs() if format == "plain": for msg in messages: merged = "{} {} {}".format(msg.date, msg.time, msg.message.encode("UTF-8")) click.echo(merged) if format == "json": entries = [msg._asdict() for msg in messages] click.echo(json.dumps({ "entries": entries, }))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_temperature(self, temp): """ Sets the temperature in celcius """
# Temperature is send to fritz.box a little weird param = 16 + ( ( temp - 8 ) * 2 ) if param < 16: param = 253 logger.info("Actor " + self.name + ": Temperature control set to off") elif param >= 56: param = 254 logger.info("Actor " + self.name + ": Temperature control set to on") else: logger.info("Actor " + self.name + ": Temperature control set to " + str(temp)) return self.box.homeautoswitch("sethkrtsoll", self.actor_id, param)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_builder_openshift_url(self): """ url of OpenShift where builder will connect """
key = "builder_openshift_url" url = self._get_deprecated(key, self.conf_section, key) if url is None: logger.warning("%r not found, falling back to get_openshift_base_uri()", key) url = self.get_openshift_base_uri() return url
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load(self): """ Extract tabular data as |TableData| instances from a CSV file. |load_source_desc_file| :return: Loaded table data. |load_table_name_desc| =================== ======================================== Format specifier Value after the replacement =================== ======================================== ``%(filename)s`` |filename_desc| ``%(format_name)s`` ``"csv"`` ``%(format_id)s`` |format_id_desc| ``%(global_id)s`` |global_id| =================== ======================================== :rtype: |TableData| iterator :raises pytablereader.DataError: If the CSV data is invalid. .. seealso:: :py:func:`csv.reader` """
self._validate() self._logger.logging_load() self.encoding = get_file_encoding(self.source, self.encoding) if six.PY3: self._csv_reader = csv.reader( io.open(self.source, "r", encoding=self.encoding), delimiter=self.delimiter, quotechar=self.quotechar, strict=True, skipinitialspace=True, ) else: self._csv_reader = csv.reader( _utf_8_encoder(io.open(self.source, "r", encoding=self.encoding)), delimiter=self.delimiter, quotechar=self.quotechar, strict=True, skipinitialspace=True, ) formatter = CsvTableFormatter(self._to_data_matrix()) formatter.accept(self) return formatter.to_table_data()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load(self): """ Extract tabular data as |TableData| instances from a CSV text object. |load_source_desc_text| :return: Loaded table data. |load_table_name_desc| =================== ======================================== Format specifier Value after the replacement =================== ======================================== ``%(filename)s`` ``""`` ``%(format_name)s`` ``"csv"`` ``%(format_id)s`` |format_id_desc| ``%(global_id)s`` |global_id| =================== ======================================== :rtype: |TableData| iterator :raises pytablereader.DataError: If the CSV data is invalid. .. seealso:: :py:func:`csv.reader` """
self._validate() self._logger.logging_load() self._csv_reader = csv.reader( six.StringIO(self.source.strip()), delimiter=self.delimiter, quotechar=self.quotechar, strict=True, skipinitialspace=True, ) formatter = CsvTableFormatter(self._to_data_matrix()) formatter.accept(self) return formatter.to_table_data()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_params(self, **kwargs): """ set parameters according to specification these parameters are accepted: :param pulp_secret: str, resource name of pulp secret :param koji_target: str, koji tag with packages used to build the image :param kojiroot: str, URL from which koji packages are fetched :param kojihub: str, URL of the koji hub :param koji_certs_secret: str, resource name of secret that holds the koji certificates :param koji_task_id: int, Koji Task that created this build config :param flatpak: if we should build a Flatpak OCI Image :param filesystem_koji_task_id: int, Koji Task that created the base filesystem :param pulp_registry: str, name of pulp registry in dockpulp.conf :param sources_command: str, command used to fetch dist-git sources :param architecture: str, architecture we are building for :param vendor: str, vendor name :param build_host: str, host the build will run on or None for auto :param authoritative_registry: str, the docker registry authoritative for this image :param distribution_scope: str, distribution scope for this image (private, authoritative-source-only, restricted, public) :param use_auth: bool, use auth from atomic-reactor? :param platform_node_selector: dict, a nodeselector for a specific platform :param platform_descriptors: dict, platforms and their archiectures and enable_v1 settings :param scratch_build_node_selector: dict, a nodeselector for scratch builds :param explicit_build_node_selector: dict, a nodeselector for explicit builds :param auto_build_node_selector: dict, a nodeselector for auto builds :param isolated_build_node_selector: dict, a nodeselector for isolated builds :param is_auto: bool, indicates if build is auto build :param parent_images_digests: dict, mapping image names with tags to platform specific digests, example: {'registry.fedorahosted.org/fedora:29': { } """
# Here we cater to the koji "scratch" build type, this will disable # all plugins that might cause importing of data to koji self.scratch = kwargs.pop('scratch', False) # When true, it indicates build was automatically started by # OpenShift via a trigger, for instance ImageChangeTrigger self.is_auto = kwargs.pop('is_auto', False) # An isolated build is meant to patch a certain release and not # update transient tags in container registry self.isolated = kwargs.pop('isolated', False) self.validate_build_variation() self.base_image = kwargs.get('base_image') self.platform_node_selector = kwargs.get('platform_node_selector', {}) self.platform_descriptors = kwargs.get('platform_descriptors', {}) self.scratch_build_node_selector = kwargs.get('scratch_build_node_selector', {}) self.explicit_build_node_selector = kwargs.get('explicit_build_node_selector', {}) self.auto_build_node_selector = kwargs.get('auto_build_node_selector', {}) self.isolated_build_node_selector = kwargs.get('isolated_build_node_selector', {}) logger.debug("setting params '%s' for %s", kwargs, self.spec) self.spec.set_params(**kwargs) self.osbs_api = kwargs.pop('osbs_api')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def has_ist_trigger(self): """Return True if this BuildConfig has ImageStreamTag trigger."""
triggers = self.template['spec'].get('triggers', []) if not triggers: return False for trigger in triggers: if trigger['type'] == 'ImageChange' and \ trigger['imageChange']['from']['kind'] == 'ImageStreamTag': return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_secret_for_plugin(self, secret, plugin=None, mount_path=None): """ Sets secret for plugin, if no plugin specified it will also set general secret :param secret: str, secret name :param plugin: tuple, (plugin type, plugin name, argument name) :param mount_path: str, mount path of secret """
has_plugin_conf = False if plugin is not None: has_plugin_conf = self.dj.dock_json_has_plugin_conf(plugin[0], plugin[1]) if 'secrets' in self.template['spec']['strategy']['customStrategy']: if not plugin or has_plugin_conf: custom = self.template['spec']['strategy']['customStrategy'] if mount_path: secret_path = mount_path else: secret_path = os.path.join(SECRETS_PATH, secret) logger.info("Configuring %s secret at %s", secret, secret_path) existing = [secret_mount for secret_mount in custom['secrets'] if secret_mount['secretSource']['name'] == secret] if existing: logger.debug("secret %s already set", secret) else: custom['secrets'].append({ 'secretSource': { 'name': secret, }, 'mountPath': secret_path, }) # there's no need to set args if no plugin secret specified # this is used in tag_and_push plugin, as it sets secret path # for each registry separately if plugin and plugin[2] is not None: self.dj.dock_json_set_arg(*(plugin + (secret_path,))) else: logger.debug("not setting secret for unused plugin %s", plugin[1])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def adjust_for_triggers(self): """Remove trigger-related plugins when needed If there are no triggers defined, it's assumed the feature is disabled and all trigger-related plugins are removed. If there are triggers defined, and this is a custom base image, some trigger-related plugins do not apply. Additionally, this method ensures that custom base images never have triggers since triggering a base image rebuild is not a valid scenario. """
triggers = self.template['spec'].get('triggers', []) remove_plugins = [ ("prebuild_plugins", "check_and_set_rebuild"), ("prebuild_plugins", "stop_autorebuild_if_disabled"), ] should_remove = False if triggers and (self.is_custom_base_image() or self.is_from_scratch_image()): if self.is_custom_base_image(): msg = "removing %s from request because custom base image" elif self.is_from_scratch_image(): msg = 'removing %s from request because FROM scratch image' del self.template['spec']['triggers'] should_remove = True elif not triggers: msg = "removing %s from request because there are no triggers" should_remove = True if should_remove: for when, which in remove_plugins: logger.info(msg, which) self.dj.remove_plugin(when, which)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def adjust_for_custom_base_image(self): """ Disable plugins to handle builds depending on whether or not this is a build from a custom base image. """
plugins = [] if self.is_custom_base_image(): # Plugins irrelevant to building base images. plugins.append(("prebuild_plugins", "pull_base_image")) plugins.append(("prebuild_plugins", "koji_parent")) plugins.append(("prebuild_plugins", "inject_parent_image")) msg = "removing %s from custom image build request" else: # Plugins not needed for building non base images. plugins.append(("prebuild_plugins", "add_filesystem")) msg = "removing %s from non custom image build request" for when, which in plugins: logger.info(msg, which) self.dj.remove_plugin(when, which)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def render_koji(self): """ if there is yum repo specified, don't pick stuff from koji """
phase = 'prebuild_plugins' plugin = 'koji' if not self.dj.dock_json_has_plugin_conf(phase, plugin): return if self.spec.yum_repourls.value: logger.info("removing koji from request " "because there is yum repo specified") self.dj.remove_plugin(phase, plugin) elif not (self.spec.koji_target.value and self.spec.kojiroot.value and self.spec.kojihub.value): logger.info("removing koji from request as not specified") self.dj.remove_plugin(phase, plugin) else: self.dj.dock_json_set_arg(phase, plugin, "target", self.spec.koji_target.value) self.dj.dock_json_set_arg(phase, plugin, "root", self.spec.kojiroot.value) self.dj.dock_json_set_arg(phase, plugin, "hub", self.spec.kojihub.value) if self.spec.proxy.value: self.dj.dock_json_set_arg(phase, plugin, "proxy", self.spec.proxy.value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def render_sendmail(self): """ if we have smtp_host and smtp_from, configure sendmail plugin, else remove it """
phase = 'exit_plugins' plugin = 'sendmail' if not self.dj.dock_json_has_plugin_conf(phase, plugin): return if self.spec.smtp_host.value and self.spec.smtp_from.value: self.dj.dock_json_set_arg(phase, plugin, 'url', self.spec.builder_openshift_url.value) self.dj.dock_json_set_arg(phase, plugin, 'smtp_host', self.spec.smtp_host.value) self.dj.dock_json_set_arg(phase, plugin, 'from_address', self.spec.smtp_from.value) else: logger.info("removing sendmail from request, " "requires smtp_host and smtp_from") self.dj.remove_plugin(phase, plugin) return if self.spec.kojihub.value and self.spec.kojiroot.value: self.dj.dock_json_set_arg(phase, plugin, 'koji_hub', self.spec.kojihub.value) self.dj.dock_json_set_arg(phase, plugin, "koji_root", self.spec.kojiroot.value) if self.spec.smtp_to_submitter.value: self.dj.dock_json_set_arg(phase, plugin, 'to_koji_submitter', self.spec.smtp_to_submitter.value) if self.spec.smtp_to_pkgowner.value: self.dj.dock_json_set_arg(phase, plugin, 'to_koji_pkgowner', self.spec.smtp_to_pkgowner.value) if self.spec.smtp_additional_addresses.value: self.dj.dock_json_set_arg(phase, plugin, 'additional_addresses', self.spec.smtp_additional_addresses.value) if self.spec.smtp_error_addresses.value: self.dj.dock_json_set_arg(phase, plugin, 'error_addresses', self.spec.smtp_error_addresses.value) if self.spec.smtp_email_domain.value: self.dj.dock_json_set_arg(phase, plugin, 'email_domain', self.spec.smtp_email_domain.value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def render_fetch_maven_artifacts(self): """Configure fetch_maven_artifacts plugin"""
phase = 'prebuild_plugins' plugin = 'fetch_maven_artifacts' if not self.dj.dock_json_has_plugin_conf(phase, plugin): return koji_hub = self.spec.kojihub.value koji_root = self.spec.kojiroot.value if not koji_hub and not koji_root: logger.info('Removing %s because kojihub and kojiroot were not specified', plugin) self.dj.remove_plugin(phase, plugin) return self.dj.dock_json_set_arg(phase, plugin, 'koji_hub', koji_hub) self.dj.dock_json_set_arg(phase, plugin, "koji_root", koji_root) if self.spec.artifacts_allowed_domains.value: self.dj.dock_json_set_arg(phase, plugin, 'allowed_domains', self.spec.artifacts_allowed_domains.value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def render_pulp_pull(self): """ If a pulp registry is specified, use pulp_pull plugin """
# pulp_pull is a multi-phase plugin phases = ('postbuild_plugins', 'exit_plugins') plugin = 'pulp_pull' for phase in phases: if not self.dj.dock_json_has_plugin_conf(phase, plugin): continue pulp_registry = self.spec.pulp_registry.value if not pulp_registry: logger.info("removing %s from request, requires pulp_registry", pulp_registry) self.dj.remove_plugin(phase, plugin) continue if not self.spec.kojihub.value: logger.info('Removing %s because no kojihub was specified', plugin) self.dj.remove_plugin(phase, plugin) continue if self.spec.prefer_schema1_digest.value is not None: self.dj.dock_json_set_arg(phase, 'pulp_pull', 'expect_v2schema2', not self.spec.prefer_schema1_digest.value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def render_pulp_sync(self): """ If a pulp registry is specified, use the pulp plugin as well as the delete_from_registry to delete the image after sync """
if not self.dj.dock_json_has_plugin_conf('postbuild_plugins', 'pulp_sync'): return pulp_registry = self.spec.pulp_registry.value # Find which registry to use docker_registry = None registry_secret = None registries = zip_longest(self.spec.registry_uris.value, self.spec.registry_secrets.value) for registry, secret in registries: if registry.version == 'v2': # First specified v2 registry is the one we'll tell pulp # to sync from. Keep the http prefix -- pulp wants it. docker_registry = registry.uri registry_secret = secret logger.info("using docker v2 registry %s for pulp_sync", docker_registry) break if pulp_registry and docker_registry: self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_sync', 'pulp_registry_name', pulp_registry) self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_sync', 'docker_registry', docker_registry) if registry_secret: self.set_secret_for_plugin(registry_secret, plugin=('postbuild_plugins', 'pulp_sync', 'registry_secret_path')) # Verify we have a pulp secret if self.spec.pulp_secret.value is None: raise OsbsValidationException("Pulp registry specified " "but no auth config") source_registry = self.spec.source_registry_uri.value perform_delete = (source_registry is None or source_registry.docker_uri != registry.docker_uri) if perform_delete: push_conf = self.dj.dock_json_get_plugin_conf('exit_plugins', 'delete_from_registry') args = push_conf.setdefault('args', {}) delete_registries = args.setdefault('registries', {}) placeholder = '{{REGISTRY_URI}}' # use passed in params like 'insecure' if available if placeholder in delete_registries: regdict = delete_registries[placeholder].copy() del delete_registries[placeholder] else: regdict = {} if registry_secret: regdict['secret'] = \ os.path.join(SECRETS_PATH, registry_secret) # tag_and_push configured the registry secret, no neet to set it again delete_registries[docker_registry] = regdict self.dj.dock_json_set_arg('exit_plugins', 'delete_from_registry', 'registries', delete_registries) else: logger.info("removing delete_from_registry from request, " "source and target registry are identical") self.dj.remove_plugin("exit_plugins", "delete_from_registry") else: # If no pulp registry is specified, don't run the pulp plugin logger.info("removing pulp_sync+delete_from_registry from request, " "requires pulp_registry and a v2 registry") self.dj.remove_plugin("postbuild_plugins", "pulp_sync") self.dj.remove_plugin("exit_plugins", "delete_from_registry")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def render_pulp_tag(self): """ Configure the pulp_tag plugin. """
if not self.dj.dock_json_has_plugin_conf('postbuild_plugins', 'pulp_tag'): return pulp_registry = self.spec.pulp_registry.value if pulp_registry: self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_tag', 'pulp_registry_name', pulp_registry) # Verify we have either a secret or username/password if self.spec.pulp_secret.value is None: conf = self.dj.dock_json_get_plugin_conf('postbuild_plugins', 'pulp_tag') args = conf.get('args', {}) if 'username' not in args: raise OsbsValidationException("Pulp registry specified " "but no auth config") else: # If no pulp registry is specified, don't run the pulp plugin logger.info("removing pulp_tag from request, " "requires pulp_registry") self.dj.remove_plugin("postbuild_plugins", "pulp_tag")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def render_group_manifests(self): """ Configure the group_manifests plugin. Group is always set to false for now. """
if not self.dj.dock_json_has_plugin_conf('postbuild_plugins', 'group_manifests'): return push_conf = self.dj.dock_json_get_plugin_conf('postbuild_plugins', 'group_manifests') args = push_conf.setdefault('args', {}) # modify registries in place registries = args.setdefault('registries', {}) placeholder = '{{REGISTRY_URI}}' if placeholder in registries: for registry, secret in zip_longest(self.spec.registry_uris.value, self.spec.registry_secrets.value): if not registry.uri: continue regdict = registries[placeholder].copy() regdict['version'] = registry.version if secret: regdict['secret'] = os.path.join(SECRETS_PATH, secret) registries[registry.docker_uri] = regdict del registries[placeholder] self.dj.dock_json_set_arg('postbuild_plugins', 'group_manifests', 'group', self.spec.group_manifests.value) goarch = {} for platform, architecture in self.platform_descriptors.items(): goarch[platform] = architecture['architecture'] self.dj.dock_json_set_arg('postbuild_plugins', 'group_manifests', 'goarch', goarch)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def render_customizations(self): """ Customize prod_inner for site specific customizations """
disable_plugins = self.customize_conf.get('disable_plugins', []) if not disable_plugins: logger.debug("No site-specific plugins to disable") else: for plugin_dict in disable_plugins: try: self.dj.remove_plugin( plugin_dict['plugin_type'], plugin_dict['plugin_name'] ) logger.debug( "site-specific plugin disabled -> Type:{} Name:{}".format( plugin_dict['plugin_type'], plugin_dict['plugin_name'] ) ) except KeyError: # Malformed config logger.debug("Invalid custom configuration found for disable_plugins") enable_plugins = self.customize_conf.get('enable_plugins', []) if not enable_plugins: logger.debug("No site-specific plugins to enable") else: for plugin_dict in enable_plugins: try: self.dj.add_plugin( plugin_dict['plugin_type'], plugin_dict['plugin_name'], plugin_dict['plugin_args'] ) logger.debug( "site-specific plugin enabled -> Type:{} Name:{} Args: {}".format( plugin_dict['plugin_type'], plugin_dict['plugin_name'], plugin_dict['plugin_args'] ) ) except KeyError: # Malformed config logger.debug("Invalid custom configuration found for enable_plugins")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setup_json_capture(osbs, os_conf, capture_dir): """ Only used for setting up the testing framework. """
try: os.mkdir(capture_dir) except OSError: pass finally: osbs.os._con.request = ResponseSaver(capture_dir, os_conf.get_openshift_api_uri(), os_conf.get_k8s_api_uri(), osbs.os._con.request).request