text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def handle_line(self, obj): """Handle a line event. This function displays a line of dialogue. It generates a blocking wait for a period of time calculated from the length of the line. :param obj: A :py:class:`~turberfield.dialogue.model.Model.Line` object. :return: The supplied object. """
if obj.persona is None: return obj name = getattr(obj.persona, "_name", "") print( textwrap.indent( "{t.normal}{name}".format(name=name, t=self.terminal), " " * 2 ), end="\n", file=self.terminal.stream ) print( textwrap.indent( "{t.normal}{obj.text}".format( obj=obj, t=self.terminal ), " " * 10 ), end="\n" * 2, file=self.terminal.stream ) interval = self.pause + self.dwell * obj.text.count(" ") time.sleep(interval) return obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def handle_memory(self, obj): """Handle a memory event. This function accesses the internal database. It writes a record containing state information and an optional note. :param obj: A :py:class:`~turberfield.dialogue.model.Model.Memory` object. :return: The supplied object. """
if obj.subject is not None: with self.con as db: SchemaBase.note( db, obj.subject, obj.state, obj.object, text=obj.text, html=obj.html, ) return obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def handle_property(self, obj): """Handle a property event. This function will set an attribute on an object if the event requires it. :param obj: A :py:class:`~turberfield.dialogue.model.Model.Property` object. :return: The supplied object. """
if obj.object is not None: try: setattr(obj.object, obj.attr, obj.val) except AttributeError as e: self.log.error(". ".join(getattr(e, "args", e) or e)) try: print( "{t.dim}{obj.object._name}.{obj.attr} = {obj.val!s}{t.normal}".format( obj=obj, t=self.terminal ), end="\n" * 2, file=self.terminal.stream ) except AttributeError as e: self.log.error(". ".join(getattr(e, "args", e) or e)) return obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def handle_scene(self, obj): """Handle a scene event. This function applies a blocking wait at the start of a scene. :param obj: A :py:class:`~turberfield.dialogue.model.Model.Shot` object. :return: The supplied object. """
print( "{t.dim}{scene}{t.normal}".format( scene=obj.scene.capitalize(), t=self.terminal ), end="\n" * 3, file=self.terminal.stream ) time.sleep(self.pause) return obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def handle_shot(self, obj): """Handle a shot event. :param obj: A :py:class:`~turberfield.dialogue.model.Model.Shot` object. :return: The supplied object. """
print( "{t.dim}{shot}{t.normal}".format( shot=obj.name.capitalize(), t=self.terminal ), end="\n" * 3, file=self.terminal.stream ) return obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def process_request_thread(self, mainthread): """obtain request from queue instead of directly from server socket"""
life_time = time.time() nb_requests = 0 while not mainthread.killed(): if self.max_life_time > 0: if (time.time() - life_time) >= self.max_life_time: mainthread.add_worker(1) return try: SocketServer.ThreadingTCPServer.process_request_thread(self, *self.requests.get(True, 0.5)) except Queue.Empty: continue else: SocketServer.ThreadingTCPServer.process_request_thread(self, *self.requests.get()) LOG.debug("nb_requests: %d, max_requests: %d", nb_requests, self.max_requests) nb_requests += 1 if self.max_requests > 0 and nb_requests >= self.max_requests: mainthread.add_worker(1) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_distribution(): """Build distributions of the code."""
result = invoke.run('python setup.py sdist bdist_egg bdist_wheel', warn=True, hide=True) if result.ok: print("[{}GOOD{}] Distribution built without errors." .format(GOOD_COLOR, RESET_COLOR)) else: print('[{}ERROR{}] Something broke trying to package your ' 'code...'.format(ERROR_COLOR, RESET_COLOR)) print(result.stderr) sys.exit(1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def other_dependencies(ctx, server, environment): """Install things that need to be in place before installing the main package."""
if 'extra_packages' in ctx.releaser: server = server.lower() extra_pkgs = [] if server in ["local"]: if 'local' in ctx.releaser.extra_packages: extra_pkgs.extend(ctx.releaser.extra_packages.local) elif server in ["testpypi", "pypitest"]: # these are packages not available on the test server, so install them # off the regular pypi server if 'test' in ctx.releaser.extra_packages and \ ctx.releaser.extra_packages.test is not None: extra_pkgs.extend(ctx.releaser.extra_packages.test) elif server in ["pypi"]: if 'pypi' in ctx.releaser.extra_packages and \ ctx.releaser.extra_packages.pypi is not None: extra_pkgs.extend(ctx.releaser.extra_packages.pypi) else: print("** Nothing more to install **") if extra_pkgs: print('** Other Dependencies, based on server', server, '**') for pkg in extra_pkgs: result = invoke.run('env{0}{1}{0}Scripts{0}pip{2} install {3}' .format(os.sep, environment, PIP_EXT, pkg), hide=True) if result.ok: print('{}[{}GOOD{}] Installed {}'.format("", GOOD_COLOR, RESET_COLOR, pkg)) else: print('{}[{}ERROR{}] Something broke trying to install ' 'package: {}'.format("", ERROR_COLOR, RESET_COLOR, pkg)) print(result.stderr) sys.exit(1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_local_install(ctx, version, ext, server="local"): """ Upload and install works? Uploads a distribution to PyPI, and then tests to see if I can download and install it. Returns: str: string summazing operation """
here = Path(ctx.releaser.here).resolve() dist_dir = here / 'dist' all_files = list(dist_dir.glob('*.{}'.format(ext))) the_file = all_files[0] for f in all_files[1:]: if f.stat().st_mtime > the_file.stat().st_mtime: the_file = f # this is the latest generated file of the given version environment = 'env-{}-{}-{}'.format(version, ext, server) if server == "local": pass else: # upload to server print("** Uploading to server **") cmd = 'twine upload {}'.format(the_file) # for PyPI, let twine pick the server if server != "pypi": cmd = cmd + ' -r {}'.format(server) result = invoke.run(cmd, warn=True) if result.failed: print(textwrap.fill("[{}ERROR{}] Something broke trying to upload " "your package. This will be the case if you " "have already uploaded it before. To upload " "again, use a different version number " "(or a different build by including a '+' " "suffix to your version number)." .format(ERROR_COLOR, RESET_COLOR), width=text.get_terminal_size().columns - 1, subsequent_indent=' '*8)) # print(result.stderr) # remove directory if it exists if (here / 'env' / environment).exists(): shutil.rmtree('env' + os.sep + environment) invoke.run('python -m venv env{}{}'.format(os.sep, environment)) other_dependencies(ctx, server, environment) if server == "local": result = invoke.run('env{0}{1}{0}Scripts{0}pip{2} install {3} --no-cache' .format(os.sep, environment, '.exe', the_file), hide=True) else: #print(" **Install from server**") result = invoke.run('env{0}{1}{0}Scripts{0}pip{2} install -i {3} ' '{4}=={5} --no-cache' .format(os.sep, environment, '.exe', server_url(server), ctx.releaser.module_name, version), hide=True) if result.failed: print('[{}ERROR{}] Something broke trying to install your package.' .format(ERROR_COLOR, RESET_COLOR)) print(result.stderr) sys.exit(1) print("** Test version of installed package **") result = invoke.run('env{0}{1}{0}Scripts{0}python{2} -c ' 'exec("""import {3}\\nprint({3}.__version__)""")' .format(os.sep, environment, '.exe', (ctx.releaser.module_name).strip())) test_version = result.stdout.strip() # print(test_version, type(test_version), type(expected_version)) if Version(test_version) == version: results = '{}{} install {} works!{}'.format(GOOD_COLOR, server, ext, RESET_COLOR) else: results = '{}{} install {} broken{}'.format(ERROR_COLOR, server, ext, RESET_COLOR) print(results) return results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def iscm_md_update_dict(self, keypath, data): """ Update a metadata dictionary entry """
current = self.metadata for k in string.split(keypath, "."): if not current.has_key(k): current[k] = {} current = current[k] current.update(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def iscm_md_append_array(self, arraypath, member): """ Append a member to a metadata array entry """
array_path = string.split(arraypath, ".") array_key = array_path.pop() current = self.metadata for k in array_path: if not current.has_key(k): current[k] = {} current = current[k] if not current.has_key(array_key): current[array_key] = [] if not type(current[array_key]) == list: raise KeyError("%s doesn't point to an array" % arraypath) current[array_key].append(member)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def context_lookup(self, vars): """ Lookup the variables in the provided dictionary, resolve with entries in the context """
while isinstance(vars, IscmExpr): vars = vars.resolve(self.context) # for (k,v) in vars.items(): if isinstance(v, IscmExpr): vars[k] = v.resolve(self.context) return vars
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def apply_to(self, launchable): """ Apply this ISCM configuration into a launchable resource, such as an EC2 instance or an AutoScalingGroup LaunchConfig. """
# Update user data if launchable.get_property("UserData") is not None: raise NotImplementedError("It's not yet supported to append SCM to existing userdata") user_data = { "Fn::Base64" : { "Fn::Join" : ["", [ "\n".join([ r'#!/bin/bash', r'FATAL() { code=$1; shift; echo "[FATAL] $*" >&2; exit $code; }', r'ERROR() { echo "[ERROR] $*" >&2 ; }', r'WARN() { echo "[WARNING] $*" >&2 ; }', r'INFO() { echo "[INFO] $*" >&2 ; }', "", ]) ] + (self.wc_handle is not None and [ cfnjoin("", r'ISCM_WCHANDLE_URL="', self.wc_handle, '"\n' ) ] or []) + [ "\n".join([ r'{', r'INFO "CloudCast ISCM booting on $(date)"', "\n\n" ]) ] + self.userdata_elems + [ "\n".join([ "", r'iscm_result=$?', r'[ -n "$ISCM_WCHANDLE_URL" ] && [ -n "$(which cfn-signal)" ] && cfn-signal -e $iscm_result $ISCM_WCHANDLE_URL', '\nINFO "CloudCast ISCM successfully completed on $(date)"', '} 2>&1 | tee -a /iscm.log\n' ]) ] ]} } launchable.add_property("UserData", user_data) # Set meta data keys for k in self.metadata: if launchable.get_metadata_key(k) is not None: raise NotImplementedError("It's not yet supported to append to existing metadata keys") launchable.add_metadata_key(k, self.metadata[k])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def register_uri_backend(uri_scheme, create_method, module, c14n_uri_method, escape, cast, is_connected): """ This method is intended to be used by backends only. It lets them register their services, identified by the URI scheme, at import time. The associated method create_method must take one parameter: the complete requested RFC 3986 compliant URI. The associated module must be compliant with DBAPI v2.0 but will not be directly used for other purposes than compatibility testing. c14n_uri_method must be a function that takes one string argument (the same form that the one that would be passed to connect_by_uri) and returns its canonicalized form in an implementation dependant way. This includes transforming any local pathname into an absolute form. c14n_uri_method can also be None, in which case the behavior will be the same as the one of the identity function. escape must be a function that takes one string argument (an unescaped column name) and returns an escaped version for use as an escaped column name in an SQL query for this backend. If something obviously not compatible is tried to be registred, NotImplementedError is raised. """
try: delta_api = __compare_api_level(module.apilevel, any_apilevel) mod_paramstyle = module.paramstyle mod_threadsafety = module.threadsafety except NameError: raise NotImplementedError("This module does not support registration " "of non DBAPI services of at least apilevel 2.0") if delta_api < 0 or delta_api > 1: raise NotImplementedError("This module does not support registration " "of DBAPI services with a specified apilevel of %s" % module.apilevel) if mod_paramstyle not in ['pyformat', 'format', 'qmark']: raise NotImplementedError("This module only supports registration " "of DBAPI services with a 'format' or 'pyformat' 'qmark' paramstyle, not %r" % mod_paramstyle) if mod_threadsafety < any_threadsafety: raise NotImplementedError("This module does not support registration " "of DBAPI services of threadsafety %d (more generally under %d)" % (mod_threadsafety, any_threadsafety)) if not urisup.valid_scheme(uri_scheme): raise urisup.InvalidSchemeError("Can't register an invalid URI scheme %r" % uri_scheme) __uri_create_methods[uri_scheme] = (create_method, module, c14n_uri_method, escape, cast, is_connected)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def setoutputsize(self, size, column = None): "As in DBAPI2.0" if column is None: self.__dbapi2_cursor.setoutputsize(size) else: self.__dbapi2_cursor.setoutputsize(size, column)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __connect(self): """ Connect to the database. """
self.__methods = _get_methods_by_uri(self.sqluri) uri_connect_method = self.__methods[METHOD_CONNECT] self.__dbapi2_conn = uri_connect_method(self.sqluri)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def reconnect(self, query = None, log_reconnect = False): """ Reconnect to the database. """
uri = list(urisup.uri_help_split(self.sqluri)) if uri[1]: authority = list(uri[1]) if authority[1]: authority[1] = None uri[1] = authority if log_reconnect: LOG.warning('reconnecting to %r database (query: %r)', urisup.uri_help_unsplit(uri), query) self.__connect()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def perform(self): """ Performs bulk operation """
for request in self._cfg[Integrator._CFG_KEY_REQUESTS]: request_type = self._cfg[Integrator._CFG_KEY_REQUESTS][request][Integrator._CFG_KEY_REQUEST_TYPE] request_cfg_file = self._cfg[Integrator._CFG_KEY_REQUESTS][request][Integrator._CFG_KEY_REQUEST_CFG_FILE] self._logger.debug('{}'.format(request_cfg_file)) self._process_request(request, request_type, request_cfg_file)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_session(): """Gets a session. If there's no yet, creates one. :returns: a session """
if hasattr(g, 'session'): return g.session sess = create_session(bind=current_app.config['DATABASE_ENGINE']) try: g.session = sess except RuntimeError: pass return sess
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _plugin_get(self, plugin_name): """ Find plugins in controller :param plugin_name: Name of the plugin to find :type plugin_name: str | None :return: Plugin or None and error message :rtype: (settable_plugin.SettablePlugin | None, str) """
if not plugin_name: return None, u"Plugin name not set" for plugin in self.controller.plugins: if not isinstance(plugin, SettablePlugin): continue if plugin.name == plugin_name: return plugin, "" return None, u"Settable plugin '{}' not found".format(plugin_name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _make_builder(config, current_target): """ Create and return a Builder for a component. Arguments component - The component the builder should be created for. """
tool_key = devpipeline_core.toolsupport.choose_tool_key( current_target, _BUILD_TOOL_KEYS ) return devpipeline_core.toolsupport.tool_builder( config, tool_key, devpipeline_build.BUILDERS, current_target )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_task(current_target): """ Build a target. Arguments target - The target to build. """
target = current_target.config try: builder = _make_builder(target, current_target) build_path = _get_build_path(target, builder) if not os.path.exists(build_path): os.makedirs(build_path) builder.configure(src_dir=target.get("dp.src_dir"), build_dir=build_path) builder.build(build_dir=build_path) no_install = devpipeline_core.toolsupport.choose_tool_key( current_target, _NO_INSTALL_KEYS ) if no_install not in target: install_path = target.get( devpipeline_core.toolsupport.choose_tool_key( current_target, _INSTALL_PATH_KEYS ), fallback="install", ) builder.install(build_dir=build_path, install_dir=install_path) _find_file_paths(target, os.path.join(build_path, install_path)) except devpipeline_core.toolsupport.MissingToolKey as mtk: current_target.executor.warning(mtk)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _get_registry(self, registry_path_or_url): '''Return a dict with objects mapped by their id from a CSV endpoint''' if os.path.isfile(registry_path_or_url): with open(registry_path_or_url, 'r') as f: reader = compat.csv_dict_reader(f.readlines()) else: res = requests.get(registry_path_or_url) res.raise_for_status() reader = compat.csv_dict_reader(StringIO(res.text)) return dict([(o['id'], o) for o in reader])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _load_json_file_or_url(self, json_path_or_url): '''Return the JSON at the local path or URL as a dict This method raises DataPackageRegistryException if there were any errors. ''' try: if os.path.isfile(json_path_or_url): with open(json_path_or_url, 'r') as f: result = json.load(f) else: res = requests.get(json_path_or_url) res.raise_for_status() result = res.json() return result except (ValueError, requests.exceptions.RequestException) as e: six.raise_from(DataPackageRegistryException(e), e)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def allow(self, comment, content_object, request): """Moderates comments."""
POST = urlencode({ "blog": settings.AKISMET_BLOG.encode("utf-8"), "user_ip": comment.ip_address, "user_agent": request.META.get('HTTP_USER_AGENT', ""). encode("utf-8"), "referrer": request.META.get('HTTP_REFERRER', ""). encode("utf-8"), "comment_author": comment.user_name.encode("utf-8"), "comment_author_email": comment.user_email.encode("utf-8"), "comment_author_url": comment.user_url.encode("utf-8"), "comment_content": comment.comment.encode("utf-8")}) connection = HTTPConnection(AKISMET_URL, AKISMET_PORT) connection.request("POST", AKISMET_PATH, POST, {"User-Agent": AKISMET_USERAGENT, "Content-type":"application/x-www-form-urlencoded" }) response = connection.getresponse() status, result = response.status, response.read() if result == "false": return True elif result == "true" and settings.DISCARD_SPAM: return False elif result == "true": comment.is_removed = True comment.is_public = False return True else: raise AkismetError(status, result)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_dict_repr(self): """ Return a dictionary representation of this phase. This will be used for checksumming, in order to uniquely compare instance images against their requirements """
return dict( phase_name = self.phase_name, phase_type = self.phase_type, actions = self.actions )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def discrete_index(self, indices): """get elements by discrete indices :param indices: list discrete indices :return: elements """
elements = [] for i in indices: elements.append(self[i]) return elements
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def register_images(im0, im1, *, rmMean=True, correctScale=True): """Finds the rotation, scaling and translation of im1 relative to im0 Parameters im0: First image im1: Second image rmMean: Set to true to remove the mean (Default) Returns ------- angle: The angle difference scale: The scale difference [y, x]: The offset im2: The rotated and translated second image Notes ----- The algorithm uses gaussian fit for subpixel precision. The best case would be to have two squares images of the same size. The algorithm is faster if the size is a power of 2. """
# sanitize input im0 = np.asarray(im0, dtype=np.float32) im1 = np.asarray(im1, dtype=np.float32) if rmMean: # remove mean im0 = im0 - im0.mean() im1 = im1 - im1.mean() # Compute DFT (THe images are resized to the same size) f0, f1 = dft_optsize_same(im0, im1) # Get rotation and scale angle, scale = find_rotation_scale(f0, f1, isccs=True) # Avoid fluctiuations if not correctScale: if np.abs(1 - scale) > 0.05: warnings.warn("Scale should be corrected") scale = 1 # apply rotation and scale im2 = rotate_scale(im1, angle, scale) f2 = dft_optsize(im2, shape=f0.shape) # Find offset y, x = find_shift_dft(f0, f2, isccs=True) return angle, scale, [y, x], im2
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_rotation_scale(im0, im1, isccs=False): """Compares the images and return the best guess for the rotation angle, and scale difference. Parameters im0: 2d array First image im1: 2d array Second image isccs: boolean, default False Set to True if the images are alredy DFT and in CCS representation Returns ------- angle: number The angle difference scale: number The scale difference Notes ----- Uses find_shift_dft """
# sanitize input im0 = np.asarray(im0, dtype=np.float32) im1 = np.asarray(im1, dtype=np.float32) truesize = None # if ccs, convert to shifted dft before giving to polar_fft if isccs: truesize = im0.shape im0 = centered_mag_sq_ccs(im0) im1 = centered_mag_sq_ccs(im1) # Get log polar coordinates. choose the log base lp1, log_base = polar_fft(im1, logpolar=True, isshiftdft=isccs, logoutput=True, truesize=truesize) lp0, log_base = polar_fft(im0, logpolar=True, isshiftdft=isccs, logoutput=True, truesize=truesize, nangle=lp1.shape[0], radiimax=lp1.shape[1]) # Find the shift with log of the log-polar images, # to compensate for dft intensity repartition angle, scale = find_shift_dft(lp0, lp1) # get angle in correct units angle *= np.pi / lp1.shape[0] # get scale in linear units scale = log_base ** (scale) # return angle and scale return angle, scale
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_shift_cc(im0, im1, ylim=None, xlim=None, subpix=True): """Finds the best shift between im0 and im1 using cross correlation Parameters im0: 2d array First image im1: 2d array Second image ylim: 2 numbers, optional The y limits of the search (if None full range is searched) xlim: 2 numbers, optional Ibidem with x Returns ------- [y, x]: 2 numbers The offset Notes ----- The origin of im1 in the im0 referential is returned ylim and xlim limit the possible output. No subpixel precision """
# sanitize input im0 = np.asarray(im0, dtype=np.float32) im1 = np.asarray(im1, dtype=np.float32) # Remove mean im0 = im0 - np.nanmean(im0) im1 = im1 - np.nanmean(im1) # Save shapes as np array shape0 = np.asarray(im0.shape) shape1 = np.asarray(im1.shape) # Compute the offset and the pad (yleft,yright,xtop,xbottom) offset = 1 - shape1 pad = np.lib.pad(-offset, (1, 1), mode='edge') # apply limit on padding if ylim is not None: pad[0] = -ylim[0] pad[1] = ylim[1] + (shape1 - shape0)[0] if xlim is not None: pad[2] = -xlim[0] pad[3] = xlim[1] + (shape1 - shape0)[1] # pad image im0, offset = pad_img(im0, pad) # compute Cross correlation matrix xc = cv2.matchTemplate(im0, im1, cv2.TM_CCORR) # Find maximum of abs (can be anticorrelated) idx = np.asarray(np.unravel_index(np.argmax(xc), xc.shape)) # Return origin in im0 units if subpix: # update idx idx = np.asarray([get_peak_pos(xc[:, idx[1]], wrap=False), get_peak_pos(xc[idx[0], :], wrap=False)]) else: # restrics to reasonable values idx[idx > shape // 2] -= shape[idx > shape // 2] return idx + offset
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def combine_images(imgs, register=True): """Combine similar images into one to reduce the noise Parameters imgs: list of 2d array Series of images register: Boolean, default False True if the images should be register before combination Returns ------- im: 2d array The result image Notes ----- This is an example of the usage of the library """
imgs = np.asarray(imgs, dtype="float") if register: for i in range(1, imgs.shape[0]): ret = register_images(imgs[0, :, :], imgs[i, :, :]) imgs[i, :, :] = rotate_scale_shift(imgs[i, :, :], *ret[:3], np.nan) return np.mean(imgs, 0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dft_optsize(im, shape=None): """Resize image for optimal DFT and computes it Parameters im: 2d array The image shape: 2 numbers, optional The shape of the output image (None will optimize the shape) Returns ------- dft: 2d array The dft in CCS representation Notes ----- Th shape shoulb be a product of 2, 3, and 5 """
im = np.asarray(im) # save shape initshape = im.shape # get optimal size if shape is None: ys = cv2.getOptimalDFTSize(initshape[0]) xs = cv2.getOptimalDFTSize(initshape[1]) shape = [ys, xs] # Add zeros to go to optimal size im = cv2.copyMakeBorder(im, 0, shape[0] - initshape[0], 0, shape[1] - initshape[1], borderType=cv2.BORDER_CONSTANT, value=0) # Compute dft ignoring 0 rows (0 columns can not be optimized) f = cv2.dft(im, nonzeroRows=initshape[0]) return f
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dft_optsize_same(im0, im1): """Resize 2 image same size for optimal DFT and computes it Parameters im0: 2d array First image im1: 2d array Second image Returns ------- dft0: 2d array The dft of the first image dft1: 2d array The dft of the second image Notes ----- dft0 and dft1 will have the same size """
im0 = np.asarray(im0) im1 = np.asarray(im1) # save shape shape0 = im0.shape shape1 = im1.shape # get optimal size ys = max(cv2.getOptimalDFTSize(shape0[0]), cv2.getOptimalDFTSize(shape1[0])) xs = max(cv2.getOptimalDFTSize(shape0[1]), cv2.getOptimalDFTSize(shape1[1])) shape = [ys, xs] f0 = dft_optsize(im0, shape=shape) f1 = dft_optsize(im1, shape=shape) return f0, f1
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def rotate_scale(im, angle, scale, borderValue=0, interp=cv2.INTER_CUBIC): """Rotates and scales the image Parameters im: 2d array The image angle: number The angle, in radians, to rotate scale: positive number The scale factor borderValue: number, default 0 The value for the pixels outside the border (default 0) Returns ------- im: 2d array the rotated and scaled image Notes ----- The output image has the same size as the input. Therefore the image may be cropped in the process. """
im = np.asarray(im, dtype=np.float32) rows, cols = im.shape M = cv2.getRotationMatrix2D( (cols / 2, rows / 2), -angle * 180 / np.pi, 1 / scale) im = cv2.warpAffine(im, M, (cols, rows), borderMode=cv2.BORDER_CONSTANT, flags=interp, borderValue=borderValue) # REPLICATE return im
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def shift_image(im, shift, borderValue=0): """shift the image Parameters im: 2d array The image shift: 2 numbers (y,x) the shift in y and x direction borderValue: number, default 0 The value for the pixels outside the border (default 0) Returns ------- im: 2d array The shifted image Notes ----- The output image has the same size as the input. Therefore the image will be cropped in the process. """
im = np.asarray(im, dtype=np.float32) rows, cols = im.shape M = np.asarray([[1, 0, shift[1]], [0, 1, shift[0]]], dtype=np.float32) return cv2.warpAffine(im, M, (cols, rows), borderMode=cv2.BORDER_CONSTANT, flags=cv2.INTER_CUBIC, borderValue=borderValue)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ccs_normalize(compIM, ccsnorm): """ normalize the ccs representation Parameters compIM: 2d array The CCS image in CCS representation ccsnorm: 2d array The normalization matrix in ccs representation Returns ------- compIM: 2d array The normalized CCS image Notes ----- (basically an element wise division for CCS) Should probably not be used from outside """
compIM = np.asarray(compIM) ccsnorm = np.asarray(ccsnorm) ys = ccsnorm.shape[0] xs = ccsnorm.shape[1] # start with first column ccsnorm[2::2, 0] = ccsnorm[1:ys - 1:2, 0] # continue with middle columns ccsnorm[:, 2::2] = ccsnorm[:, 1:xs - 1:2] # finish whith last row if even if xs % 2 is 0: ccsnorm[2::2, xs - 1] = ccsnorm[1:ys - 1:2, xs - 1] # solve problem with 0/0 ccsnorm[ccsnorm == 0] = np.nextafter(0., 1., dtype = ccsnorm.dtype) res = compIM / ccsnorm return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def gauss_fit(X, Y): """ Fit the function to a gaussian. Parameters X: 1d array X values Y: 1d array Y values Returns ------- (The return from scipy.optimize.curve_fit) popt : array Optimal values for the parameters pcov : 2d array The estimated covariance of popt. Notes ----- /!\ This uses a slow curve_fit function! do not use if need speed! """
X = np.asarray(X) Y = np.asarray(Y) # Can not have negative values Y[Y < 0] = 0 # define gauss function def gauss(x, a, x0, sigma): return a * np.exp(-(x - x0)**2 / (2 * sigma**2)) # get first estimation for parameter mean = (X * Y).sum() / Y.sum() sigma = np.sqrt((Y * ((X - mean)**2)).sum() / Y.sum()) height = Y.max() # fit with curve_fit return curve_fit(gauss, X, Y, p0=[height, mean, sigma])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def gauss_fit_log(X, Y): """ Fit the log of the input to the log of a gaussian. Parameters X: 1d array X values Y: 1d array Y values Returns ------- mean: number The mean of the gaussian curve var: number The variance of the gaussian curve Notes ----- The least square method is used. As this is a log, make sure the amplitude is >> noise See the gausslog_sympy.py file for explaination """
X = np.asarray(X) Y = np.asarray(Y) # take log data Data = np.log(Y) # Get Di and Xi D = [(Data * X**i).sum() for i in range(3)] X = [(X**i).sum() for i in range(5)] # compute numerator and denominator for mean and variance num = (D[0] * (X[1] * X[4] - X[2] * X[3]) + D[1] * (X[2]**2 - X[0] * X[4]) + D[2] * (X[0] * X[3] - X[1] * X[2])) den = 2 * (D[0] * (X[1] * X[3] - X[2]**2) + D[1] * (X[1] * X[2] - X[0] * X[3]) + D[2] * (X[0] * X[2] - X[1]**2)) varnum = (-X[0] * X[2] * X[4] + X[0] * X[3]**2 + X[1]**2 * X[4] - 2 * X[1] * X[2] * X[3] + X[2]**3) # if denominator is 0, can't do anything if abs(den) < 0.00001: # print('Warning: zero denominator!',den) return np.nan, np.nan # compute mean and variance mean = num / den var = varnum / den # if variance is negative, the data are not a gaussian if var < 0: # print('Warning: negative Variance!',var) return np.nan, np.nan return mean, var
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def center_of_mass(X, Y): """Get center of mass Parameters X: 1d array X values Y: 1d array Y values Returns ------- res: number The position of the center of mass in X Notes ----- Uses least squares """
X = np.asarray(X) Y = np.asarray(Y) return (X * Y).sum() / Y.sum()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_peak_pos(im, wrap=False): """Get the peak position with subpixel precision Parameters im: 2d array The image containing a peak wrap: boolean, defaults False True if the image reoresents a torric world Returns ------- [y,x]: 2 numbers The position of the highest peak with subpixel precision Notes ----- This is a bit hacky and could be improved """
im = np.asarray(im) # remove invalid values (assuming im>0) im[np.logical_not(np.isfinite(im))] = 0 # remove mean im = im - im.mean() # get maximum value argmax = im.argmax() dsize = im.size # get cut value (30% biggest peak) # TODO: choose less random value cut = .3 * im[argmax] # isolate peak peak = im > cut peak, __ = label(peak) # wrap border if wrap and peak[0] != 0 and peak[-1] != 0 and peak[0] != peak[-1]: peak[peak == peak[-1]] = peak[0] # extract peak peak = peak == peak[argmax] # get values along X and Y X = np.arange(dsize)[peak] Y = im[peak] # wrap border if wrap: # wrap X values d X[X > dsize // 2] -= dsize # remove argmax as in X**4 X should be small offset = X[Y == Y.max()][0] X -= offset # We want to fit in a radius of 3 around the center Y = Y[abs(X) < 3] X = X[abs(X) < 3] # if>2, use fit_log if peak.sum() > 2: ret, __ = gauss_fit_log(X, Y) # if fails, use center_of_mass if ret is np.nan: ret = center_of_mass(X, Y) elif peak.sum() > 1: # If only 2 pixel, gauss fitting is imposible, use center_of_mass ret = center_of_mass(X, Y) else: # 1 px peak is easy ret = X[0] """ import matplotlib.pyplot as plt plt.figure() plt.plot(X,Y,'x',label='im') plt.plot([ret,ret],[1,Y.max()],label='logfit') plt.plot([X.min(),X.max()],[cut,cut]) plt.plot([X.min(),X.max()],[im.std(),im.std()]) #""" return ret + offset
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def centered_mag_sq_ccs(im): """return centered squared magnitude Parameters im: 2d array A CCS DFT image Returns ------- im: 2d array A centered image of the magnitude of the DFT Notes ----- Check doc Intel* Image Processing Library https://www.comp.nus.edu.sg/~cs4243/doc/ipl.pdf The center is at position (ys//2, 0) """
im = np.asarray(im) # multiply image by image* to get squared magnitude im = cv2.mulSpectrums(im, im, flags=0, conjB=True) ys = im.shape[0] xs = im.shape[1] # get correct size return ret = np.zeros((ys, xs // 2 + 1)) # first column: # center ret[ys // 2, 0] = im[0, 0] # bottom ret[ys // 2 + 1:, 0] = im[1:ys - 1:2, 0] # top (Inverted copy bottom) ret[ys // 2 - 1::-1, 0] = im[1::2, 0] # center columns ret[ys // 2:, 1:] = im[:(ys - 1) // 2 + 1, 1::2] ret[:ys // 2, 1:] = im[(ys - 1) // 2 + 1:, 1::2] # correct last line if even if xs % 2 is 0: ret[ys // 2 + 1:, xs // 2] = im[1:ys - 1:2, xs - 1] ret[:ys // 2, xs // 2] = 0 return ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_overexposed(ims): """Simple test to check if image is overexposed Parameters im: 2d array integer the image Returns ------- overexposed: Bool Is the image overexposed """
if len(np.shape(ims)) == 3: return [is_overexposed(im) for im in ims] ims = np.array(ims, int) diffbincount = np.diff(np.bincount(np.ravel(ims))) overexposed = diffbincount[-1] > np.std(diffbincount) return overexposed
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_taps_aff(self): """Returns True if taps aff for this location"""
request = requests.get('https://www.taps-aff.co.uk/api/%s' % self.location) if request.status_code == 200: try: taps = request.json()['taps']['status'] if taps == 'aff': return True elif taps == 'oan': return False else: raise RuntimeError("Unexpected taps value: %s" % taps) except ValueError: raise RuntimeError("Unexpected response from service") else: raise IOError("Failure downloading from Api")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def equal(actual, expected): ''' Compare actual and expected using == >>> expect = Expector([]) >>> expect(1).to_not(equal, 2) (True, 'equal: expect 1 == 2') >>> expect(1).to(equal, 1) (True, 'equal: expect 1 == 1') ''' is_passing = (actual == expected) types_to_diff = (str, dict, list, tuple) if not is_passing and isinstance(expected, types_to_diff) and isinstance(actual, types_to_diff): readable_diff = difflib.unified_diff(pformat(expected).split('\n'), pformat(actual).split('\n'), n=99) description = '\n'.join(['equal:'] + list(readable_diff)) else: description = "equal: expect {} == {}".format(actual, expected) outcome = (is_passing, description) return outcome
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def change(subject, evaluator, by=_NOT_SET, frm=_NOT_SET, to=_NOT_SET): ''' Calls function `evaluator` before and after a call function `subject`. Output of `evaluator` should change. >>> expect = Expector([]) >>> a = [1, 2, 3] >>> expect(a.clear).to(change, lambda: len(a)) (True, 'expect change: actual from=3 to=0') >>> a = [1, 2, 3] >>> expect(a.clear).to(change, lambda: len(a), by=-3) (True, 'expect change by=-3: actual from=3 to=0') >>> a = [1, 2, 3] >>> expect(a.clear).to(change, lambda: len(a), frm=3, to=0) (True, 'expect change from=3 to=0: actual from=3 to=0') ''' output_before = evaluator() subject() output_after = evaluator() clauses = [] is_passing = output_before != output_after if by != _NOT_SET: clauses.append(' by={}'.format(repr(by))) delta = output_after - output_before if delta != by: is_passing = False if frm != _NOT_SET: clauses.append(' from={}'.format(repr(frm))) if frm != output_before: is_passing = False if to != _NOT_SET: clauses.append(' to={}'.format(repr(to))) if to != output_after: is_passing = False return (is_passing, 'expect change{}: actual from={} to={}'.format( ''.join(clauses), repr(output_before), repr(output_after)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def stitch_pdfs(pdf_list): ''' Merges a series of single page pdfs into one multi-page doc ''' pdf_merger = PdfFileMerger() for pdf in pdf_list: pdf_merger.append(pdf) with NamedTemporaryFile(prefix='pyglass', suffix='.pdf', delete=False) as tempfileobj: dest_path = tempfileobj.name pdf_merger.write(dest_path) pdf_merger.close() return dest_path
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def split_pdf(pdf_path): ''' Splits a multi-page pdf into a list of single page pdfs ''' pdf = PdfFileReader(pdf_path) pdf_list = [] for page_num in range(pdf.numPages): page = pdf.getPage(page_num) pdf_writer = PdfFileWriter() pdf_writer.addPage(page) with NamedTemporaryFile(prefix='pyglass', suffix='.pdf', delete=False) as tempfileobj: pdf_writer.write(tempfileobj) page_path = tempfileobj.name pdf_list.append(page_path) return pdf_list
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def send_head(self): """Builds response headers and in process renders templates, if any. This method overrides SimpleHTTPRequestHandlet.send_head() """
path = self.translate_path(self.path) f = None to_render = False if os.path.isdir(path): if not self.path.endswith('/'): # redirect browser - doing basically what apache does self.send_response(301) self.send_header("Location", self.path + "/") self.end_headers() return None else: # check if URL corresponds to a template to render if path.endswith("/"): index = path[:-1] else: index = path for ext in '.html', '.htm': if os.path.exists(index + ext): to_render = True realpath = index + ext break if os.path.isdir(path): # if dir, check for existence of index.htm* for index in "index.html", "index.htm": index = os.path.join(path, index) if os.path.exists(index): realpath = index to_render = True break else: return self.list_directory(path) # deny if URL directly requests for *.html file, allow if dir file_extension = os.path.splitext(path)[1] if file_extension in ('.html', '.htm') and not os.path.isdir(path): self.send_error(404, "File not found") return None if to_render: path = realpath ctype = self.guess_type(path) try: f = open(path, 'rb') except IOError: self.send_error(404, "File not found") return None fs = os.fstat(f.fileno()) if to_render: stream, length = self.gen_response_stream(f) else: length = fs[6] self.send_response(200) self.send_header("Content-type", ctype) self.send_header("Content-Length", str(length)) self.send_header("Last-Modified", self.date_time_string( fs.st_mtime)) self.end_headers() if to_render: return stream return f
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pct_decode(s): """ Return the percent-decoded version of string s. 'Coucou, je suis convivial' '' '%25' """
if s is None: return None elif not isinstance(s, unicode): s = str(s) else: s = s.encode('utf8') return PERCENT_CODE_SUB(lambda mo: chr(int(mo.group(0)[1:], 16)), s)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pct_encode(s, encdct): """ Return a translated version of s where each character is mapped to a string thanks to the encdct dictionary. Use the encdct parameter to construct a string from parameter s where each character k from s is replaced by the value corresponding to key k in encdct. It happens that callers use dictionaries smartly constructed so that this function will perform percent-encoding quickly when called whith such a dictionary. """
if s is None: return None elif not isinstance(s, unicode): s = str(s) else: s = s.encode('utf8') return ''.join(map(encdct.__getitem__, s))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def split_authority(authority): """ Split authority into component parts. This function supports IP-literal as defined in RFC 3986. ('user', 'passwd', 'host', 'port') ('user', None, 'host', 'port') ('user', None, 'host', 'port') Traceback (most recent call last): File "<stdin>", line 1, in ? File "<stdin>", line 26, in split_authority InvalidIPLiteralError: Highly invalid IP-literal detected in URI authority 'user@[host]:port' ('user', None, '[::dead:192.168.42.131]', 'port') (None, None, '[::dead:192.168.42.131]', 'port') (None, None, None, 'port') ('user', None, None, 'port') Very basic validation is done if the host part of the authority starts with an '[' as when this is the case, the splitting is done in a quite different manner than the one used by most URI parsers. As a result an InvalidIPLiteralError exception is raised if IP-literal is patently wrong, so the risk of major clashes between two deviant implementations is highly reduced. """
if '@' in authority: userinfo, hostport = authority.split('@', 1) if userinfo and ':' in userinfo: user, passwd = userinfo.split(':', 1) else: user, passwd = userinfo, None else: user, passwd, hostport = None, None, authority if hostport: if hostport[0] == '[': m = re.match(r"\[([\da-fA-F:\.]+|" + IPV_FUTURE_RE + r")\](\:.*|)$", hostport) if m: host = '[' + m.group(1) + ']' port = m.group(2)[1:] else: raise InvalidIPLiteralError, "Highly invalid IP-literal detected in URI authority %r" % (authority,) elif ':' in hostport: host, port = hostport.split(':', 1) else: host, port = hostport, None else: host, port = None, None return (user or None, passwd or None, host or None, port or None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def basic_urisplit(uri): """ Basic URI Parser according to RFC 3986 ('scheme', 'authority', '/path', 'query', 'fragment') """
p = RFC3986_MATCHER(uri).groups() return (p[1], p[3], p[4], p[6], p[8])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def uri_tree_normalize(uri_tree): """ Transform an URI tree so that adjacent all-empty fields are coalesced into a single None at parent level. The return value can be used for validation. As a result, no distinction is made between empty and absent fields. It is believed that this limitation is harmless because this is the behavior of most implementations, and even useful in the context of this Python module because empty strings are already not distinguished from None when converting to boolean, so we are only generalizing this concept in order to keep code small and minimize special cases. If the distinction is ever really needed, for example to support empty anchor special HTTP script related URI in a clean way, one will probably need to completely rewrite (or at least review and modify) this module, and special care would be needed to distinguish between '', (), None, and others everywhere implicit boolean conversion is performed. The behavior should then be checked in regards to its conformance with RFC 3986, especially (but this would probably not be sufficient) the classification switches of some URI parts according to the content of others. """
scheme, authority, path, query, fragment = uri_tree if authority and (filter(bool, authority) == ()): authority = None if query: query = filter(lambda (x, y): bool(x) or bool(y), query) return (scheme or None, authority or None, path or None, query or None, fragment or None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def uri_tree_precode_check(uri_tree, type_host = HOST_REG_NAME): """ Call this function to validate a raw URI tree before trying to encode it. """
scheme, authority, path, query, fragment = uri_tree # pylint: disable-msg=W0612 if scheme: if not valid_scheme(scheme): raise InvalidSchemeError, "Invalid scheme %r" % (scheme,) if authority: user, passwd, host, port = authority # pylint: disable-msg=W0612 if port and not __all_in(port, DIGIT): raise InvalidPortError, "Invalid port %r" % (port,) if type_host == HOST_IP_LITERAL: if host and (not __valid_IPLiteral(host)): raise InvalidIPLiteralError, "Invalid IP-literal %r" % (host,) elif type_host == HOST_IPV4_ADDRESS: if host and (not __valid_IPv4address(host)): raise InvalidIPv4addressError, "Invalid IPv4address %r" % (host,) if path: if authority and path[0] != '/': raise InvalidPathError, "Invalid path %r - non-absolute path can't be used with an authority" % (path,) return uri_tree
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_configuration(ctx, base_key, needed_keys): """ Confrim a valid configuration. Args: ctx (invoke.context): base_key (str): the base configuration key everything is under. needed_keys (list): sub-keys of the base key that are checked to make sure they exist. """
# check for valid configuration if base_key not in ctx.keys(): exit("[{}ERROR{}] missing configuration for '{}'" .format(ERROR_COLOR, RESET_COLOR, base_key)) # TODO: offer to create configuration file if ctx.releaser is None: exit("[{}ERROR{}] empty configuration for '{}' found" .format(ERROR_COLOR, RESET_COLOR, base_key)) # TODO: offer to create configuration file # TODO: allow use of default values for my_key in needed_keys: if my_key not in ctx[base_key].keys(): exit("[{}ERROR{}] missing configuration key '{}.{}'" .format(ERROR_COLOR, RESET_COLOR, base_key, my_key))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_existence(to_check, name, config_key=None, relative_to=None, allow_undefined=False, allow_not_existing=False, base_key='releaser'): """Determine whether a file or folder actually exists."""
if allow_undefined and (to_check is None or to_check.lower() == 'none'): print("{: <14} -> {}UNDEFINED{}".format(name, WARNING_COLOR, RESET_COLOR)) return else: if config_key is None: config_key = "{}.{}".format(base_key, name) my_check = Path(to_check).resolve() if my_check.exists() and relative_to is not None: printed_path = str(my_check.relative_to(relative_to)) if printed_path != '.': printed_path = '.' + os.sep + printed_path else: printed_path = str(my_check) if my_check.exists() or allow_not_existing: print("{: <14} -> {}".format(name, printed_path)) return else: raise FileNotFoundError("[{}ERROR{}] '{}', as given, doesn't " "exist. For configuration key '{}', was " "given: {}".format(ERROR_COLOR, RESET_COLOR, name, config_key, to_check))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _determine_dimensions(num_of_pixels): """ Given a number of pixels, determines the largest width and height that define a rectangle with such an area """
for x in xrange(int(math.sqrt(num_of_pixels)) + 1, 1, -1): if num_of_pixels % x == 0: return num_of_pixels // x, x return 1, num_of_pixels
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_image_to_file(img_path, file_path): """ Expects images created by from_from_file_to_image """
img = Image.open(img_path) data = numpy.array(img) data = numpy.reshape(data, len(img.getdata()) * 3) to_remove = data[len(data) - 1] data = numpy.delete(data, xrange(len(data) - to_remove, len(data))) data.tofile(file_path)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def textFromHTML(html): """ Cleans and parses text from the given HTML. """
cleaner = lxml.html.clean.Cleaner(scripts=True) cleaned = cleaner.clean_html(html) return lxml.html.fromstring(cleaned).text_content()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def evaluate(self, context): """ Interpolates the HTML source with the context, then returns that HTML and the text extracted from that html. """
html = self._source.format(**context) parts = {"text/html": html, "text/plain": textFromHTML(html)} return {}, parts
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_files(*bases): """ List all files in a data directory. """
for base in bases: basedir, _ = base.split(".", 1) base = os.path.join(os.path.dirname(__file__), *base.split(".")) rem = len(os.path.dirname(base)) + len(basedir) + 2 for root, dirs, files in os.walk(base): for name in files: yield os.path.join(basedir, root, name)[rem:]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def equirectangular_distance(self, other): """ Return the approximate equirectangular when the location is close to the center of the cluster. For small distances, Pythagoras’ theorem can be used on an equirectangular projection. Equirectangular formula:: x = Δλ ⋅ cos φm y = Δφ d = R ⋅ √(x² + y)² It will always over-estimate compared to the real Haversine distance. For example it will add no more than 0.05382 % to the real distance if the delta latitude or longitude between your two points does not exceed 4 decimal degrees. The standard formula (Haversine) is the exact one (that is, it works for any couple of longitude/latitude on earth) but is much slower as it needs 7 trigonometric and 2 square roots. If your couple of points are not too far apart, and absolute precision is not paramount, you can use this approximate version (Equirectangular), which is much faster as it uses only one trigonometric and one square root:: Python 2.7.6rc1 (v2.7.6rc1:4913d0e9be30+, Oct 27 2013, 20:52:11) [GCC 4.2.1 (Apple Inc. build 5666) (dot 3)] on darwin Type "help", "copyright", "credits" or "license" for more information. 5.62202811241 529.424701041 2.78262710571 529.424701073 @param other: a ``GeoPoint`` instance. @return: the great-circle distance, in meters, between this geographic coordinates to the specified other point. """
x = math.radians(other.longitude - self.longitude) \ * math.cos(math.radians(other.latitude + self.latitude) / 2); y = math.radians(other.latitude - self.latitude); return math.sqrt(x * x + y * y) * GeoPoint.EARTH_RADIUS_METERS;
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_json(payload): """ Build a ``GeoPoint`` instance from the specified JSON object. @param payload: JSON representation of a geographic location:: { "accuracy": decimal, "altitude": decimal, "bearing": decimal, "longitude": decimal, "latitude": decimal, "provider": string, "speed": decimal } where: * ``accuracy``: accuracy of the position in meters. * ``altitude``: altitude in meters of the location. * ``bearing``: bearing in degrees. Bearing is the horizontal direction of travel of the device, and is not related to the device orientation. It is guaranteed to be in the range ``[0.0, 360.0]``, or ``null`` if this device does not have a bearing. * ``latitude``: latitude-angular distance, expressed in decimal degrees (WGS84 datum), measured from the center of the Earth, of a point north or south of the Equator. * ``longitude``: longitude-angular distance, expressed in decimal degrees (WGS84 datum), measured from the center of the Earth, of a point east or west of the Prime Meridian. * ``provider``: code name of the location provider that reported the geographical location: * ``gps``: indicate that the location has been provided by a Global Positioning System (GPS). * ``network``: indicate that the location has been provided by an hybrid positioning system, which uses different positioning technologies, such as Global Positioning System (GPS), Wi-Fi hotspots, cell tower signals. * ``speed``: speed in meters/second over the ground, or ``null`` if this location does not have a speed. @return: a ``GeoPoint`` instance or ``None`` if the JSON payload is nil. """
return payload and \ GeoPoint(payload['longitude'], payload['latitude'], accuracy=payload.get('accuracy'), altitude=payload.get('altitude'), bearing=payload.get('bearing'), fix_time=payload.get('fix_time'), provider=payload.get('provider'), speed=payload.get('speed'))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def change_keys(obj, convert): """ Recursively goes through the dictionary obj and replaces keys with the convert function. """
if isinstance(obj, (str, int, float)): return obj if isinstance(obj, dict): new = obj.__class__() for k, v in obj.items(): new[convert(k)] = change_keys(v, convert) elif isinstance(obj, (list, set, tuple)): new = obj.__class__(change_keys(v, convert) for v in obj) else: return obj return new
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _expand_placeholder_value(value): """ Return the SQL string representation of the specified placeholder's value. @param value: the value of a placeholder such as a simple element, a list, or a tuple of one string. @note: by convention, a tuple of one string indicates that this string MUST not be quoted as it represents, for instance, a called to a stored procedure, and not a textual content to modify into a table. @return: a SQL string representation. """
if isinstance(value, (list, set)) or (isinstance(value, tuple) and len(value) != 1): sql_value = ','.join( [ RdbmsConnection._to_sql_value( element if not isinstance(element, tuple) else element[0], noquote=isinstance(element, tuple)) for element in value ]) elif isinstance(value, tuple): assert len(value) == 1 value = value[0] assert value is None or isinstance(value, basestring), 'basestring expected instead of %s' % type(value) sql_value = RdbmsConnection._to_sql_value(value, True) else: sql_value = RdbmsConnection._to_sql_value(value) return sql_value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_placeholders(sql_statement, parameters): """ Retrieve the list of placeholders and their type defined in an SQL statement. @param sql_statement: a parameterized statement. @param parameters: the list of parameters used in the SQL statement. @return: a dictionary of placeholders where the key represents the name of a placeholder, the value corresponds to a tuple:: (``type:PlaceholderType``, ``value``) where : * ``type``: type of the placeholder * ``value``: value to replace the placeholder. """
# Find the list of placeholders, and their type, defined in the SQL # statement. placeholders = {} try: for match in REGEX_PATTERN_SQL_PLACEHOLDERS.findall(sql_statement): for (i, placeholder_type) in enumerate(PlaceholderType._values): placeholder_name = match[i] if placeholder_name: placeholder_value = parameters[placeholder_name] if placeholder_type == PlaceholderType.nested_list \ and (isinstance(placeholder_value, tuple) and len(placeholder_value) == 1) \ and not isinstance(placeholder_value, (list, set, tuple)): raise ValueError('The value to replace the placeholder "%s" is not a list as expected' % placeholder_name) placeholders[placeholder_name] = (placeholder_type, placeholder_value) break except KeyError: raise ValueError('The placeholder %s has no corresponding parameter' % placeholder_name) # Check whether all the specified parameters have their corresponding # placeholder in the SQL statement. undefined_placeholders = [ parameter for parameter in parameters.iterkeys() if parameter not in placeholders ] if undefined_placeholders: raise ValueError('The placeholders %s are missing from the extended pyformat SQL statement\n%s' \ % (', '.join([ '"%s"' % _ for _ in undefined_placeholders ]), sql_statement)) return placeholders
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _prepare_statement(sql_statement, parameters): """ Prepare the specified SQL statement, replacing the placeholders by the value of the given parameters @param sql_statement: the string expression of a SQL statement. @param parameters: a dictionary of parameters where the key represents the name of a parameter and the value represents the value of this parameter to replace in each placeholder of this parameter in the SQL statement. @return: a string representation of the SQL statement where the placehodlers have been replaced by the value of the corresponding variables, depending on the type of these variables. """
placehoolders = RdbmsConnection._get_placeholders(sql_statement, parameters) for (variable_name, (variable_type, variable_value)) in placehoolders.iteritems(): # Only expand parameters whose value corresponds to a list. if isinstance(variable_value, (list, set, tuple)): sql_statement = RdbmsConnection._replace_placeholder(sql_statement, (variable_name, variable_type, variable_value)) # Remove this parameter as it has been expended in the SQL expression. del parameters[variable_name] return sql_statement
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _replace_placeholder(sql_statement, variable): """ Return the string obtained by replacing the specified placeholders by its corresponding value. @param sql_statement: the string expression of a SQL statement to replace placeholders with their corresponding values. @param variable: the variable to use to replace the corresponding placeholder(s) in the SQL statement. * ``name``: name of the variable. * ``type``: an instance of ``PlaceholderType``. * ``value``: the value of this variable to replace the corresponding placeholder(s) of this variable in the SQL statement. @return: a string expression of the SQL statement where the paceholders of the specified variable have been replace by the value of this variable, depending on the type of this varialble. """
(variable_name, variable_type, variable_value) = variable sql_value = RdbmsConnection._expand_placeholder_value(variable_value) if variable_type == PlaceholderType.simple_list \ else ','.join([ '(%s)' % RdbmsConnection._expand_placeholder_value(v) for v in variable_value ]) return re.sub(PATTERN_SQL_PLACEHOLDER_EXPRESSIONS[variable_type] % variable_name, sql_value, sql_statement)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _to_sql_value(value, noquote=False): """ Return the SQL string representation of the specified value. @param value: a value to convert into its SQL string representation. @param noquote: indicate whether to quote or not the specified value. @return: a SQL string representation of the specified value. """
# Convert to string the values that the database adapter can't adapt # to a SQL type. # [http://initd.org/psycopg/docs/usage.html#query-parameters] if not isinstance(value, (types.NoneType, bool, int, long, float, basestring)): value = obj.stringify(value) if noquote: return value # @warning: do not use ``psycopg2.extensions.adapt(value).getquoted()`` # because it returns ``str`` object, which is expected as adaptation # is taking a Python object and converting it into a SQL # representation: this is always a bytes string, as it has to be # sent to the socket. However the caller might not use the quoted # value to immediately sent it to the database server, but it can # use it for rewriting an SQL statement, which will break the text # encoding. return 'NULL' if value is None \ else '%s' % str(value) if isinstance(value, (bool, int, long, float)) \ else "e'%s'" % unicode(value).replace("'", "''").replace('\\', '\\\\').replace('%', '%%')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def standalone_from_launchable(cls, launch): """ Given a launchable resource, create a definition of a standalone instance, which doesn't depend on or contain references to other elements. """
attrs = copy.copy(launch.el_attrs) # Remove attributes we overwrite / don't need del attrs["Type"] if attrs.has_key("DependsOn"): del attrs["DependsOn"] if attrs["Properties"].has_key("SpotPrice"): del attrs["Properties"]["SpotPrice"] if attrs["Properties"].has_key("InstanceMonitoring"): del attrs["Properties"]["InstanceMonitoring"] if attrs["Properties"].has_key("SecurityGroups"): del attrs["Properties"]["SecurityGroups"] if attrs["Properties"].has_key("InstanceId"): raise RuntimeError("Can't make instance from launchable containing InstanceId property") inst = EC2Instance(**attrs) # TODO: shallow copy? inst.iscm = launch.iscm return inst
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def filter_data(self, pattern=''): """ Filter available varaibles """
filtered_profiles = {} with open(self.abspath) as fobj: for idx, line in enumerate(fobj): if 'TIME SERIES' in line: break if pattern in line and (idx-self._attributes['CATALOG']-1) > 0: filtered_profiles[idx-self._attributes['CATALOG']-1] = line return filtered_profiles
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract(self, variable_idx): """ Extract a specific varaible """
branch = self._define_branch(variable_idx) label = self.profiles[variable_idx].replace("\n", "") self.label[variable_idx] = label self.data[variable_idx] = [[], []] with open(self.abspath) as fobj: for line in fobj.readlines()[ variable_idx+1+self._attributes['data_idx']:: self._attributes['nvar']+1]: points = [] for point in line.split(' '): try: points.append(float(point)) except ValueError: pass self.data[variable_idx][1].append(np.array(points)) x_st = self.geometries[branch][0] x_no_st = [(x0+x1)/2 for x0, x1 in zip(x_st[:-1], x_st[1:])] if len(self.data[variable_idx][1][0]) == len(x_st): self.data[variable_idx][0] = np.array(x_st) else: self.data[variable_idx][0] = np.array(x_no_st)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def canonicalize(message): """ Function to convert an email Message to standard format string :param message: email.Message to be converted to standard string :return: the standard representation of the email message in bytes """
if message.is_multipart() \ or message.get('Content-Transfer-Encoding') != 'binary': return mime_to_bytes(message, 0).replace( b'\r\n', b'\n').replace(b'\r', b'\n').replace(b'\n', b'\r\n') else: message_header = '' message_body = message.get_payload(decode=True) for k, v in message.items(): message_header += '{}: {}\r\n'.format(k, v) message_header += '\r\n' return message_header.encode('utf-8') + message_body
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract_first_part(message, boundary): """ Function to extract the first part of a multipart message"""
first_message = message.split(boundary)[1].lstrip() if first_message.endswith(b'\r\n'): first_message = first_message[:-2] else: first_message = first_message[:-1] return first_message
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pem_to_der(cert, return_multiple=True): """ Converts a given certificate or list to PEM format"""
# initialize the certificate array cert_list = [] # If certificate is in DER then un-armour it if pem.detect(cert): for _, _, der_bytes in pem.unarmor(cert, multiple=True): cert_list.append(der_bytes) else: cert_list.append(cert) # return multiple if return_multiple is set else first element if return_multiple: return cert_list else: return cert_list.pop()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def verify_certificate_chain(cert_str, trusted_certs, ignore_self_signed=True): """ Verify a given certificate against a trust store"""
# Load the certificate certificate = crypto.load_certificate(crypto.FILETYPE_ASN1, cert_str) # Create a certificate store and add your trusted certs try: store = crypto.X509Store() if ignore_self_signed: store.add_cert(certificate) # Assuming the certificates are in PEM format in a trusted_certs list for _cert in trusted_certs: store.add_cert( crypto.load_certificate(crypto.FILETYPE_ASN1, _cert)) # Create a certificate context using the store and the certificate store_ctx = crypto.X509StoreContext(store, certificate) # Verify the certificate, returns None if certificate is not valid store_ctx.verify_certificate() return True except crypto.X509StoreContextError as e: raise AS2Exception('Partner Certificate Invalid: %s' % e.args[-1][-1])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def csrf(request): """ Context processor that provides a CSRF token, or the string 'NOTPROVIDED' if it has not been provided by either a view decorator or the middleware """
def _get_val(): token = get_token(request) if token is None: # In order to be able to provide debugging info in the # case of misconfiguration, we use a sentinel value # instead of returning an empty dict. return 'NOTPROVIDED' else: token = force_bytes(token, encoding='latin-1') key = force_bytes( get_random_string(len(token)), encoding='latin-1' ) value = b64_encode(xor(token, key)) return force_text(b'$'.join((key, value)), encoding='latin-1') _get_val = lazy(_get_val, text_type) return {'csrf_token': _get_val()}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def api(root_url, service, version, path): """Generate URL for path in a Taskcluster service."""
root_url = root_url.rstrip('/') path = path.lstrip('/') if root_url == OLD_ROOT_URL: return 'https://{}.taskcluster.net/{}/{}'.format(service, version, path) else: return '{}/api/{}/{}/{}'.format(root_url, service, version, path)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def api_reference(root_url, service, version): """Generate URL for a Taskcluster api reference."""
root_url = root_url.rstrip('/') if root_url == OLD_ROOT_URL: return 'https://references.taskcluster.net/{}/{}/api.json'.format(service, version) else: return '{}/references/{}/{}/api.json'.format(root_url, service, version)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def docs(root_url, path): """Generate URL for path in the Taskcluster docs."""
root_url = root_url.rstrip('/') path = path.lstrip('/') if root_url == OLD_ROOT_URL: return 'https://docs.taskcluster.net/{}'.format(path) else: return '{}/docs/{}'.format(root_url, path)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def exchange_reference(root_url, service, version): """Generate URL for a Taskcluster exchange reference."""
root_url = root_url.rstrip('/') if root_url == OLD_ROOT_URL: return 'https://references.taskcluster.net/{}/{}/exchanges.json'.format(service, version) else: return '{}/references/{}/{}/exchanges.json'.format(root_url, service, version)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def schema(root_url, service, name): """Generate URL for a schema in a Taskcluster service."""
root_url = root_url.rstrip('/') name = name.lstrip('/') if root_url == OLD_ROOT_URL: return 'https://schemas.taskcluster.net/{}/{}'.format(service, name) else: return '{}/schemas/{}/{}'.format(root_url, service, name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def regex_validation_sealer(fields, defaults, RegexType=type(re.compile(""))): """ Example sealer that just does regex-based validation. """
required = set(fields) - set(defaults) if required: raise TypeError( "regex_validation_sealer doesn't support required arguments. Fields that need fixing: %s" % required) klass = None kwarg_validators = dict( (key, val if isinstance(val, RegexType) else re.compile(val)) for key, val in defaults.items() ) arg_validators = list( kwarg_validators[key] for key in fields ) def __init__(self, *args, **kwargs): for pos, (value, validator) in enumerate(zip(args, arg_validators)): if not validator.match(value): raise ValidationError("Positional argument %s failed validation. %r doesn't match regex %r" % ( pos, value, validator.pattern )) for key, value in kwargs.items(): if key in kwarg_validators: validator = kwarg_validators[key] if not validator.match(value): raise ValidationError("Keyword argument %r failed validation. %r doesn't match regex %r" % ( key, value, validator.pattern )) super(klass, self).__init__(*args, **kwargs) klass = type("RegexValidateBase", (__base__,), dict( __init__=__init__, )) return klass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def PI_read(server, tag, start=None, end=None, frequency=None): """ Read function for PI server. It has to be executed by python 32 bit. """
pisdk = Dispatch('PISDK.PISDK') my_server = pisdk.Servers(server) # Not sure if/when the login is necessary #con = Dispatch('PISDKDlg.Connections') #con.Login(my_server, '', '', 1, 0) time_start = Dispatch('PITimeServer.PITimeFormat') time_end = Dispatch('PITimeServer.PITimeFormat') sample_point = my_server.PIPoints[tag] uom = sample_point.PointAttributes.Item("EngUnits").Value description = sample_point.PointAttributes.Item('Descriptor').Value if start != None and end != None: # returns a range of values (average) time_start.InputString = start.strftime('%m-%d-%Y %H:%M:%S') time_end.InputString = end.strftime('%m-%d-%Y %H:%M:%S') sample_values = sample_point.Data.Summaries2(time_start, time_end, frequency, 5, 0, None) values = sample_values('Average').Value data = np.array([x.Value for x in values]) elif start != None and end == None: # returns a single value at the start time end = start + timedelta(seconds=1) time_start.InputString = start.strftime('%m-%d-%Y %H:%M:%S') time_end.InputString = end.strftime('%m-%d-%Y %H:%M:%S') sample_values = sample_point.Data.Summaries2(time_start, time_end, frequency, 5, 0, None) values = sample_values('Average').Value data = [x.Value for x in values][0] else: # returns the actual value data = sample_point.data.Snapshot.Value return description, uom, np.array(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unisim_csv_formatting(path, fname): """ Remove some useless stuff from the head of a csv file generated by unisim and returns a pandas dataframe """
with open(path+fname, 'r') as fobj: data = fobj.readlines() header = data[9].split(",")[:-1] unit_of_measures = data[10].split(",")[:-1] data = pd.read_csv(path+fname, skiprows=10, index_col=0, usecols=(range(0, len(header))), na_values=('Shutdown', 'Bad', 'I/O Timeout', 'Scan Timeout')) data.columns = header[1:] data.unit = unit_of_measures[1:] return data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract_stripchart(self, stripchart='overall', expose_data=True): """ Extract a specific stripchard and exposes the data in the namespace """
csv_fname = self.fname.split(os.sep)[-1].replace(".usc", ".csv") scp_fname = self.fname.split(os.sep)[-1].replace(".usc", ".SCP") case_details = {'case': self.fname.__repr__()[1:-1], 'stripchart': stripchart, 'target': self.path.__repr__()[1:-1] + csv_fname} script = STRIPCHART_EXTRACTION_TEMPLATE.substitute(case_details) with open(self.path + scp_fname, 'w') as fobj: fobj.write(script) self.case.visible = True self.case.application.playscript(self.path + scp_fname) self.case.visible = False os.remove(self.path + scp_fname) if expose_data is True: self.stripcharts[stripchart] = unisim_csv_formatting(self.path, csv_fname) if os.path.isdir(self.path+'trends') is not True: os.mkdir(self.path + 'trends') shutil.copy(self.path + csv_fname, self.path + 'trends\\{}.csv'.format(stripchart)) os.remove(self.path + csv_fname)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract_profiles(self, pipeline_name, expose_data=True): """ Extract all the profiles of a specific pipeline and exposes the data in the namespace """
compas_pipe = self.__profile_definition(pipeline_name) get_variable = compas_pipe.GEtUserVariable if os.path.isdir(self.path+'profiles') is not True: os.mkdir(self.path + 'profiles') target_dir = self.path + 'profiles' if expose_data is True: self.profiles[pipeline_name] = {} for key in PROFILE_KEYS: pipe = self.pipes[pipeline_name] pipe['data'][key] = get_variable(PROFILE_KEYS[key]).Variable() temp = {key: val for (key, val) in enumerate(pipe['data'][key])} try: data = pd.DataFrame(temp, index=pipe['grid']) except ValueError: data = pd.DataFrame(temp, index=pipe['non_st_grid']) data.columns = self.pipes[pipeline_name]['timesteps'] data.to_csv('{}/{}-{}.csv'.format(target_dir, pipeline_name, key)) if expose_data is True: self.profiles[pipeline_name][key] = data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __profile_definition(self, pipeline_name): """ Prepare the profiles extraction from a specific profile """
pipe = self.flsh.Operations[pipeline_name] x_st = pipe.GetUserVariable(PROFILE_LENGTH_ST).Variable() x_non_st = pipe.GetUserVariable(PROFILE_LENGTH_NON_ST).Variable() timesteps = pipe.GetUserVariable(PROFILE_TIME).Variable() self.pipes[pipeline_name] = {'grid': x_st, 'non_st_grid': x_non_st, 'timesteps': timesteps, 'data': {} } return pipe
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run_until(self, endtime, timeunit='minutes', save=True): """ Run a case untile the specifiend endtime """
integrator = self.case.solver.Integrator integrator.rununtil(endtime, timeunit) if save is True: self.case.save()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save(self, fname=''): """ Save the current case """
if fname is '': self.case.save() else: self.case.SaveAs(self.path+os.sep+fname)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tuple_sealer(fields, defaults): """ This sealer returns an equivalent of a ``namedtuple``. """
baseclass_name = 'FieldsBase_for__{0}'.format('__'.join(fields)) global_namespace, local_namespace = make_init_func( fields, defaults, baseclass_name, header_name='__new__', header_start='def {func_name}(cls', header_end='):\n', super_call_start='return tuple.__new__(cls, (', super_call_end='))\n', super_call_pass_kwargs=False, set_attributes=False, ) def __getnewargs__(self): return tuple(self) def __repr__(self): return "{0}({1})".format( self.__class__.__name__, ", ".join(a + "=" + repr(getattr(self, a)) for a in fields) ) return type(baseclass_name, (tuple,), dict( [(name, property(itemgetter(i))) for i, name in enumerate(fields)], __new__=local_namespace['__new__'], __getnewargs__=__getnewargs__, __repr__=__repr__, __slots__=(), ))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def filter_trends(self, pattern=''): """ Filter available trends """
filtered_trends = {} with open(self.abspath) as fobj: for idx, line in enumerate(fobj): variable_idx = idx-self._attributes['CATALOG']-1 if 'TIME SERIES' in line: break if pattern in line and variable_idx > 0: filtered_trends[variable_idx] = line return filtered_trends
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract(self, *args): """ Extract a specific variable """
self.time = np.loadtxt(self.abspath, skiprows=self._attributes['data_idx']+1, unpack=True, usecols=(0,)) for variable_idx in args: data = np.loadtxt(self.abspath, skiprows=self._attributes['data_idx']+1, unpack=True, usecols=(variable_idx,)) with open(self.abspath) as fobj: for idx, line in enumerate(fobj): if idx == 1 + variable_idx+self._attributes['CATALOG']: try: self.data[variable_idx] = data[:len(self.time)] except TypeError: self.data[variable_idx] = data.base self.label[variable_idx] = line.replace("\'", '').replace("\n", "") break
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def view_trends(self, pattern=''): """ Return a pandas df with the available trends """
d = OrderedDict() d['Index'] = None d['Variable'] = [] d['Position'] = [] d['Unit'] = [] d['Description'] = [] raw_d = self.filter_trends(pattern) d['Index'] = [k for k in raw_d.keys()] for st in self.filter_trends(pattern).values(): st = st.replace('\n', '') d['Variable'].append(st.split(' ')[0]) temp = [x[1:-1] for x in re.findall("\'[\w\(\) \-\:\/]+\'", st)] d['Description'].append(temp[-1]) d['Unit'].append(temp[-2][1:-1]) pos = " - ".join(temp[: -2]).replace("BRANCH", "Br").replace(":", "") d['Position'].append(pos) df=pd.DataFrame(d) df.index.name = "Filter: {}".format(pattern) return df
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _tab_type(self): """ Private method to define the tab type """
with open(self.abspath) as fobj: contents = fobj.readlines() for line in contents: if 'COMPONENTS' in line: return 'keyword' else: return 'fixed'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _partial_extraction_fixed(self, idx, extra_idx=0): """ Private method for a single extraction on a fixed-type tab file """
myarray = np.array([]) with open(self.abspath) as fobj: contents = fobj.readlines()[idx+extra_idx:] for line in contents: try: vals = re.findall(r' *[\w\-\+\.]*', line) temp = np.array([float(val) for val in vals if val not in ('', ' ')]) myarray = np.hstack((myarray, temp)) except ValueError: break return myarray
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _export_all_fixed(self): """ Exports all the properties for a fixed-type tab file """
array_ts = [] array_ps = [] for array_t, array_p in it.product(self.metadata["t_array"][0], self.metadata["p_array"][0]): array_ts.append(array_t) array_ps.append(array_p/1e5) array_ts_tot = [array_ts for t in self.data.index] array_ps_tot = [array_ps for t in self.data.index] values = [] for idx in self.data.index: values.append(self._partial_extraction_fixed(idx+1)) self.data["Temperature"] = array_ts_tot self.data["Pressure"] = array_ps_tot self.data["values"] = values
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _metadata_keyword(self): """ Define the most important tab parameters for a keyword-type tab file """
with open(self.abspath) as fobj: for line in fobj: if 'PVTTABLE LABEL' in line: label = re.findall(r"\=[\w\ \"]*\,", line)[0][1:-1] self.metadata["fluids"].append(label) if 'PRESSURE = (' in line: line = line.split('=')[1] vals = re.findall(r'[\d\-\.eE+]+', line) self.metadata['p_array'] = np.array([float(val) for val in vals]) if 'TEMPERATURE = (' in line: line = line.split('=')[1] vals = re.findall(r'[\d\-\.eE+]+', line) self.metadata['t_array'] = np.array([float(val) for val in vals]) if 'COLUMNS = (' in line: line = line.split('=')[1].replace(" ", "").\ replace('(', '').replace(')\n', '') self.metadata['properties'] = line.split(',') self.metadata["t_points"] = len(self.metadata["t_array"]) self.metadata["p_points"] = len(self.metadata["p_array"]) self.metadata["nfluids"] = len(self.metadata["fluids"]) self.data = pd.DataFrame(self.metadata["properties"])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _export_all_keyword(self): """ Export method for keyword tab files """
data = {} for fluid_idx, fluid in enumerate(self.metadata["fluids"]): data[fluid] = {} with open(self.abspath) as fobj: text = fobj.read() try: text = text.split("!Phase properties")[1+fluid_idx] except IndexError: text = text.split("COLUMNS")[1+fluid_idx] try: text = text.split("LABEL")[0] except IndexError: pass values = re.findall(r"[\.\d\-]+[\.\deE\+\-]+", text) nprops = len(self.metadata["properties"]) for idx, prop in enumerate(self.metadata["properties"]): data[fluid][prop] = [float(x) for x in values[idx::nprops]] self.data = pd.DataFrame(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def export_all(self): """ It makes all the properties avaiable as data attribute """
if self.tab_type == 'fixed': self._export_all_fixed() if self.tab_type == 'keyword': self._export_all_keyword()