text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _parse(template): """Parse a top-level template string Expression. Any extraneous text is considered literal text. """
parser = Parser(template) parser.parse_expression() parts = parser.parts remainder = parser.string[parser.pos:] if remainder: parts.append(remainder) return Expression(parts)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def evaluate(self, env): """Evaluate the symbol in the environment, returning a Unicode string. """
if self.ident in env.values: # Substitute for a value. return env.values[self.ident] else: # Keep original text. return self.original
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def translate(self): """Compile the variable lookup."""
ident = self.ident expr = ex_rvalue(VARIABLE_PREFIX + ident) return [expr], set([ident]), set()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def evaluate(self, env): """Evaluate the function call in the environment, returning a Unicode string. """
if self.ident in env.functions: arg_vals = [expr.evaluate(env) for expr in self.args] try: out = env.functions[self.ident](*arg_vals) except Exception as exc: # Function raised exception! Maybe inlining the name of # the exception will help debug. return u'<%s>' % str(exc) return str(out) else: return self.original
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def translate(self): """Compile the function call."""
varnames = set() ident = self.ident funcnames = set([ident]) arg_exprs = [] for arg in self.args: subexprs, subvars, subfuncs = arg.translate() varnames.update(subvars) funcnames.update(subfuncs) # Create a subexpression that joins the result components of # the arguments. arg_exprs.append(ex_call( ast.Attribute(ex_literal(u''), 'join', ast.Load()), [ex_call( 'map', [ ex_rvalue(str.__name__), ast.List(subexprs, ast.Load()), ] )], )) subexpr_call = ex_call( FUNCTION_PREFIX + ident, arg_exprs ) return [subexpr_call], varnames, funcnames
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def evaluate(self, env): """Evaluate the entire expression in the environment, returning a Unicode string. """
out = [] for part in self.parts: if isinstance(part, str): out.append(part) else: out.append(part.evaluate(env)) return u''.join(map(str, out))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def translate(self): """Compile the expression to a list of Python AST expressions, a set of variable names used, and a set of function names. """
expressions = [] varnames = set() funcnames = set() for part in self.parts: if isinstance(part, str): expressions.append(ex_literal(part)) else: e, v, f = part.translate() expressions.extend(e) varnames.update(v) funcnames.update(f) return expressions, varnames, funcnames
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_argument_list(self): """Parse a list of arguments starting at ``pos``, returning a list of Expression objects. Does not modify ``parts``. Should leave ``pos`` pointing to a } character or the end of the string. """
# Try to parse a subexpression in a subparser. expressions = [] while self.pos < len(self.string): subparser = Parser(self.string[self.pos:], in_argument=True) subparser.parse_expression() # Extract and advance past the parsed expression. expressions.append(Expression(subparser.parts)) self.pos += subparser.pos if self.pos >= len(self.string) or \ self.string[self.pos] == GROUP_CLOSE: # Argument list terminated by EOF or closing brace. break # Only other way to terminate an expression is with ,. # Continue to the next argument. assert self.string[self.pos] == ARG_SEP self.pos += 1 return expressions
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def substitute(self, values={}, functions={}): """Evaluate the template given the values and functions. """
try: res = self.compiled(values, functions) except Exception: # Handle any exceptions thrown by compiled version. res = self.interpret(values, functions) return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def translate(self): """Compile the template to a Python function."""
expressions, varnames, funcnames = self.expr.translate() argnames = [] for varname in varnames: argnames.append(VARIABLE_PREFIX + varname) for funcname in funcnames: argnames.append(FUNCTION_PREFIX + funcname) func = compile_func( argnames, [ast.Return(ast.List(expressions, ast.Load()))], ) def wrapper_func(values={}, functions={}): args = {} for varname in varnames: args[VARIABLE_PREFIX + varname] = values[varname] for funcname in funcnames: args[FUNCTION_PREFIX + funcname] = functions[funcname] parts = func(**args) return u''.join(parts) return wrapper_func
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def uri_from_parts(parts): "simple function to merge three parts into an uri" uri = "%s://%s%s" % (parts[0], parts[1], parts[2]) if parts[3]: extra = '?'+urlencode(parts[3]) uri += extra return uri
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def as_list(self): "return some attributes as a list" netloc = '' if self.vpath_connector: netloc = '(('+self.vpath_connector+'))' elif self.authority: netloc = self.authority else: netloc = self.netloc return [ self.scheme, netloc, self.path, self.query, '', ]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_print(value, space = True, tab = False, crlf = False): """ Returns True if is print or False otherwise """
if not isinstance(value, basestring): return False regex = r'\x00-\x08\x0B\x0C\x0E-\x1F\x7F' if not space: regex += r'\x20' if tab: regex += r'\x09' if crlf: regex += r'\x0A\x0D' return re.match(r'[' + regex + ']', value, re.U) is None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def file_writelines_flush_sync(path, lines): """ Fill file at @path with @lines then flush all buffers (Python and system buffers) """
fp = open(path, 'w') try: fp.writelines(lines) flush_sync_file_object(fp) finally: fp.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def file_w_create_directories(filepath): """ Recursively create some directories if needed so that the directory where @filepath must be written exists, then open it in "w" mode and return the file object. """
dirname = os.path.dirname(filepath) if dirname and dirname != os.path.curdir and not os.path.isdir(dirname): os.makedirs(dirname) return open(filepath, 'w')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fetch_transaction_status(self, transaction_id): """ Get the transaction current status. :param transaction_id: :return: """
url = "%s%s%s/status" % (self.api_endpoint, constants.TRANSACTION_STATUS_ENDPOINT, transaction_id) username = self.base.get_username() password = self.base.get_password(username=username, request_url=url) response = requests.get(url, auth=HTTPBasicAuth(username=username, password=password)) if response.status_code == 404: raise TransactionDoesNotExist('Wrong transaction ID!') if not self.base.verify_response(response.json()): raise SignatureValidationException('Server signature verification has failed') return response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def capture_sale(self, transaction_id, capture_amount, message=None): """ Capture existing preauth. :param transaction_id: :param capture_amount: :param message: :return: status code """
request_data = { "amount": self.base.convert_decimal_to_hundreds(capture_amount), "currency": self.currency, "message": message } url = "%s%s%s/capture" % (self.api_endpoint, constants.TRANSACTION_STATUS_ENDPOINT, transaction_id) username = self.base.get_username() password = self.base.get_password(username=username, request_url=url) response = requests.put(url, json=request_data, auth=HTTPBasicAuth(username=username, password=password)) if response.status_code == 404: raise TransactionDoesNotExist('Wrong transaction ID!') if not self.base.verify_response(response.json()): raise SignatureValidationException('Server signature verification has failed') response_json = response.json() return response_json.get('status')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def caller_folder(): """ Returns the folder where the code of the caller's caller lives """
import inspect caller_file = inspect.stack()[2][1] if os.path.exists(caller_file): return os.path.abspath(os.path.dirname(caller_file)) else: return os.path.abspath(os.getcwd())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_dict(self): """ Response as dict :return: response :rtype: dict """
cache_info = None if self.cache_info: cache_info = self.cache_info.to_dict() return { 'cache_info': cache_info, 'html': self.html, 'scraped': self.scraped, 'raw': self.raw }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_dict(d): """ Response from dict :param d: Dict to load :type d: dict :return: response :rtype: Response """
if d is None: return None return Response( d.get('html'), CacheInfo.from_dict(d.get('cache_info')), d.get('scraped'), d.get('raw') )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _create_board_image_cv(self, board=None): """Return a cv image of the board or empty board if not provided."""
board = board or base.Board() # empty board by default tile_h, tile_w = self._TILE_SHAPE[0:2] board_shape = tile_h * 8, tile_w * 8, 3 board_image = numpy.zeros(board_shape, dtype=numpy.uint8) # place each tile on the image for (row, col), tile in board.positions_with_tile(): tile_image = self._tile_images[tile._type] t, l = row * tile_h, col * tile_w b, r = t + tile_h, l + tile_w board_image[t:b, l:r] = tile_image return board_image
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _draw_swap_cv(self, board_image, swap): """Add a white tile border to indicate the swap."""
tile_h, tile_w = self._TILE_SHAPE[0:2] # get a single bounding box (row_1, col_1), (row_2, col_2) = swap t = tile_h * min(row_1, row_2) b = tile_h * (1 + max(row_1, row_2)) l = tile_w * min(col_1, col_2) r = tile_w * (1 + max(col_1, col_2)) top_left = (l, t) bottom_right = (r, b) data.cv2.rectangle(board_image, top_left, bottom_right, color=(255, 255, 255), thickness = 4)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _convert_cv_to_tk(self, image_cv): """Convert an OpenCV image to a tkinter PhotoImage"""
# convert BGR to RGB image_cv_rgb = data.cv2.cvtColor(image_cv, data.cv2.COLOR_BGR2RGB) # convert opencv to PIL image_pil = PIL_Image.fromarray(image_cv_rgb) # convert PIL to tkinter return ImageTk.PhotoImage(image_pil)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _scheduled_check_for_summaries(self): """Present the results if they have become available or timed out."""
if self._analysis_process is None: return # handle time out timed_out = time.time() - self._analyze_start_time > self.time_limit if timed_out: self._handle_results('Analysis timed out but managed\n' ' to get lower turn results.', 'Analysis timed out with no results.') return # handle standard completion try: self._analysis_process.join(0.001) except AssertionError: pass # if some timing issue with closed process, just continue if not self._analysis_process.is_alive(): self._handle_results('Completed analysis.', 'Unable to find the game on screen.') return #finally, if it's still alive, then come back later self._base.after(self._POLL_PERIOD_MILLISECONDS, self._scheduled_check_for_summaries)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _next(self): """Get the next summary and present it."""
self.summaries.rotate(-1) current_summary = self.summaries[0] self._update_summary(current_summary)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _previous(self): """Get the previous summary and present it."""
self.summaries.rotate() current_summary = self.summaries[0] self._update_summary(current_summary)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _update_notification(self, message=None): """Update the message area with blank or a message."""
if message is None: message = '' message_label = self._parts['notification label'] message_label.config(text=message) self._base.update()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _update_summary(self, summary=None): """Update all parts of the summary or clear when no summary."""
board_image_label = self._parts['board image label'] # get content for update or use blanks when no summary if summary: # make a board image with the swap drawn on it # board, action, text = summary.board, summary.action, summary.text board_image_cv = self._create_board_image_cv(summary.board) self._draw_swap_cv(board_image_cv, summary.action) board_image_tk = self._convert_cv_to_tk(board_image_cv) text = '' if not summary.score is None: text += 'Score: {:3.1f}'.format(summary.score) if (not summary.mana_drain_leaves is None) and\ (not summary.total_leaves is None): text += ' Mana Drains: {}/{}' \ ''.format(summary.mana_drain_leaves, summary.total_leaves) else: #clear any stored state image and use the blank board_image_tk = board_image_label._blank_image text = '' # update the UI parts with the content board_image_label._board_image = board_image_tk board_image_label.config(image=board_image_tk) # update the summary text summary_label = self._parts['summary label'] summary_label.config(text=text) # refresh the UI self._base.update()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def send(self): """Issue the request. Uses httplib2.Http support for handling redirects. Returns an httplib2.Response, which may be augmented by the proc_response() method. Note that the default implementation of proc_response() causes an appropriate exception to be raised if the response code is >= 400. """
# Pre-process the request try: self.procstack.proc_request(self) except exc.ShortCircuit, e: self._debug("Request pre-processing short-circuited") # Short-circuited; we have an (already processed) response return e.response self._debug("Sending %r request to %r (body %r, headers %r)", self.method, self.url, self.body, self.headers) # Issue the request (resp, content) = self.client.request(self.url, self.method, self.body, self.headers, self.max_redirects) # Save the body in the response resp.body = content # Do any processing on the response that's desired try: self.proc_response(resp) except: # Process the exception result = self.procstack.proc_exception(*sys.exc_info()) if not result: # Not handled, re-raise it raise else: # Handled and we have a fully post-processed response return result # Return the response, post-processing it return self.procstack.proc_response(resp)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def proc_response(self, resp): """Process response hook. Process non-redirect responses received by the send() method. May augment the response. The default implementation causes an exception to be raised if the response status code is >= 400. """
# Raise exceptions for error responses if resp.status >= 400: e = exc.exception_map.get(resp.status, exc.HTTPException) self._debug(" Response was a %d fault, raising %s", resp.status, e.__name__) raise e(resp)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_arguments(self): """ Add the label argument by default, no need to specify it in args. """
super(LabelCommand, self).add_arguments() self.parser.add_argument('labels', metavar=self.label, nargs="+")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _comparator(func): """ Decorator for EnumValue rich comparison methods. """
def comparator_wrapper(self, other): try: # [PATCH] The code was originally the following: # # assert self.enumtype == other.enumtype # result = func(self.index, other.index) # # which first statement causes an issue when serializing/unserializing object # from/to memcached using pylibmc, which built a new instance of the # enumeration. Therefore two items are stated different while semantically # the same. # # These two lines are replaced by the following, which relies on the fact that # developers are not likely naming two items of distinct enumerations the same # way, and less likely to compare two items of two distinct enumerations. # # (Daniel CAUNE; [email protected]; 2012-05-11) result = func(self.key, other.key) except (AssertionError, AttributeError): result = NotImplemented return result comparator_wrapper.__name__ = func.__name__ comparator_wrapper.__doc__ = getattr(float, func.__name__).__doc__ return comparator_wrapper
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extend(self, *keys, **kwargs): """ Return a new enumeration object extended with the specified items. """
this = copy.deepcopy(self) value_type = kwargs.get('value_type', EnumValue) if not keys: raise EnumEmptyError() keys = tuple(keys) values = [None] * len(keys) for i, key in enumerate(keys): value = value_type(this, i, key) values[i] = value try: super(Enum, this).__setattr__(key, value) except TypeError: raise EnumBadKeyError(key) this.__dict__['_keys'] = this.__dict__['_keys'] + keys this.__dict__['_values'] += values return this
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def is_valid_preview(preview): ''' Verifies that the preview is a valid filetype ''' if not preview: return False if mimetype(preview) not in [ExportMimeType.PNG, ExportMimeType.PDF]: return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def options(self, data): """Generate folders to best match metadata. The results will be a single, perfectly matched folder, or the two nearest neighbours of an imperfect match. :param dict data: metadata matching criteria. This method is a generator. It yields :py:class:`turberfield.dialogue.model.SceneScript.Folder` objects. """
if self.mapping_key(data) in self.keys: yield next(i for i in self.folders if i.metadata == data) else: index = bisect.bisect_left(self.keys, self.mapping_key(data)) posns = sorted(set([max(0, index - 1), index])) yield from (self.folders[i] for i in posns)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def widget_for(element): """Create a widget for a schema item """
view_type = _view_type_for_element(element) if view_type is None: raise KeyError('No view type for %r' % element) builder = view_widgets.get(view_type) if builder is None: raise KeyError('No widget type for %r' % view_type) return builder(element)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def projection(radius=5e-6, sphere_index=1.339, medium_index=1.333, wavelength=550e-9, pixel_size=1e-7, grid_size=(80, 80), center=(39.5, 39.5)): """Optical path difference projection of a dielectric sphere Parameters radius: float Radius of the sphere [m] sphere_index: float Refractive index of the sphere medium_index: float Refractive index of the surrounding medium wavelength: float Vacuum wavelength of the imaging light [m] pixel_size: float Pixel size [m] grid_size: tuple of floats Resulting image size in x and y [px] center: tuple of floats Center position in image coordinates [px] Returns ------- qpi: qpimage.QPImage Quantitative phase data set """
# grid x = np.arange(grid_size[0]).reshape(-1, 1) y = np.arange(grid_size[1]).reshape(1, -1) cx, cy = center # sphere location rpx = radius / pixel_size r = rpx**2 - (x - cx)**2 - (y - cy)**2 # distance z = np.zeros_like(r) rvalid = r > 0 z[rvalid] = 2 * np.sqrt(r[rvalid]) * pixel_size # phase = delta_n * 2PI * z / wavelength phase = (sphere_index - medium_index) * 2 * np.pi * z / wavelength meta_data = {"pixel size": pixel_size, "wavelength": wavelength, "medium index": medium_index, "sim center": center, "sim radius": radius, "sim index": sphere_index, "sim model": "projection", } qpi = qpimage.QPImage(data=phase, which_data="phase", meta_data=meta_data) return qpi
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_first_builder_window(builder): """Get the first toplevel widget in a gtk.Builder hierarchy. This is mostly used for guessing purposes, and an explicit naming is always going to be a better situation. """
for obj in builder.get_objects(): if isinstance(obj, gtk.Window): # first window return obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_builder_toplevel(self, builder): """Get the toplevel widget from a gtk.Builder file. The main view implementation first searches for the widget named as self.toplevel_name (which defaults to "main". If this is missing, or not a gtk.Window, the first toplevel window found in the gtk.Builder is used. """
toplevel = builder.get_object(self.toplevel_name) if not gobject.type_is_a(toplevel, gtk.Window): toplevel = None if toplevel is None: toplevel = get_first_builder_window(builder) return toplevel
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_bytes(s, encoding=None, errors='strict'): """Convert string to bytes."""
encoding = encoding or 'utf-8' if is_unicode(s): return s.encode(encoding, errors) elif is_strlike(s): return s else: if six.PY2: return str(s) else: return str(s).encode(encoding, errors)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unicode_left(s, width): """Cut unicode string from left to fit a given width."""
i = 0 j = 0 for ch in s: j += __unicode_width_mapping[east_asian_width(ch)] if width < j: break i += 1 return s[:i]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unicode_right(s, width): """Cut unicode string from right to fit a given width."""
i = len(s) j = 0 for ch in reversed(s): j += __unicode_width_mapping[east_asian_width(ch)] if width < j: break i -= 1 return s[i:]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def schema_factory(schema_name, **schema_nodes): """Schema Validation class factory. Args: schema_name(str): The namespace of the schema. schema_nodes(dict): The attr_names / SchemaNodes mapping of schema. Returns: A Schema class. Raises: SchemaError, for bad attribute setting initialization. Examples: OrderedDict([('lat', 34.0), ('lng', 29.01)]) OrderedDict([('lat', 34.0), ('lng', 0.0)]) <RegionSchema instance, attributes:['country_code', 'keywords', 'location', 'name']> [] Traceback (most recent call last): schema_factory.errors.SchemaError: Missing Required Attributes: {'country_code'} Traceback (most recent call last): schema_factory.errors.SchemaError: Invalid Attributes RegionSchema for {'foo'}. """
schema_dict = dict() schema_dict.update(schema_nodes) def cls_repr(self): # pragma: no cover return "<{} instance at: 0x{:x}>".format(self.__class__, id(self)) def cls_str(self): # pragma: no cover return "<{} instance, attributes:{}>".format( self.__class__.__name__, self.schema_nodes ) def cls_init(self, **kwargs): kwargs_set = set(kwargs) if not self.required.issubset(kwargs_set): raise SchemaError('Missing Required Attributes: {}'.format( self.required.difference(kwargs_set) )) if not set(kwargs).issubset(set(self.schema_nodes)): raise SchemaError('Invalid Attributes {} for {}.'.format( self.__class__.__name__, set(kwargs).difference(set(self.schema_nodes)) )) for attr_name in kwargs: setattr(self, attr_name, kwargs[attr_name]) def to_dict(self): return OrderedDict([(k, getattr(self, k)) for k in self.schema_nodes]) schema_dict['to_dict'] = property(to_dict) schema_dict['__init__'] = cls_init schema_dict['__repr__'] = cls_repr schema_dict['__str__'] = cls_str return SchemaType('{}Schema'.format(schema_name.title()), (), schema_dict)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def serialize(self, *fields): """Serialize Nodes and attributes """
if fields: if not set(fields).issubset(self.data_nodes): raise SchemaError('Invalid field for serialization: {}'.format(set(fields).difference(self.data_nodes))) return OrderedDict([(k, getattr(self, k)) for k in fields]) return OrderedDict([(k, getattr(self, k)) for k in self.data_nodes])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setup_logger(debug, color): """Configure the logger."""
if debug: log_level = logging.DEBUG else: log_level = logging.INFO logger = logging.getLogger('exifread') stream = Handler(log_level, debug, color) logger.addHandler(stream) logger.setLevel(log_level)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sanitize(configuration, error_fn): """ Run all availalbe sanitizers across a configuration. Arguments: configuration - a full project configuration error_fn - A function to call if a sanitizer check fails. The function takes a single argument: a description of the problem; provide specifics if possible, including the componnet, the part of the configuration that presents an issue, etc.. """
for name, sanitize_fn in _SANITIZERS.items(): sanitize_fn(configuration, lambda warning, n=name: error_fn(n, warning))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def s2n(self, offset, length, signed=0): """ Convert slice to integer, based on sign and endian flags. Usually this offset is assumed to be relative to the beginning of the start of the EXIF information. For some cameras that use relative tags, this offset may be relative to some other starting point. """
self.file.seek(self.offset + offset) sliced = self.file.read(length) if self.endian == 'I': val = s2n_intel(sliced) else: val = s2n_motorola(sliced) # Sign extension? if signed: msb = 1 << (8 * length - 1) if val & msb: val -= (msb << 1) return val
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def n2s(self, offset, length): """Convert offset to string."""
s = '' for dummy in range(length): if self.endian == 'I': s += chr(offset & 0xFF) else: s = chr(offset & 0xFF) + s offset = offset >> 8 return s
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _next_ifd(self, ifd): """Return the pointer to next IFD."""
entries = self.s2n(ifd, 2) next_ifd = self.s2n(ifd + 2 + 12 * entries, 4) if next_ifd == ifd: return 0 else: return next_ifd
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list_ifd(self): """Return the list of IFDs in the header."""
i = self._first_ifd() ifds = [] while i: ifds.append(i) i = self._next_ifd(i) return ifds
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract_tiff_thumbnail(self, thumb_ifd): """ Extract uncompressed TIFF thumbnail. Take advantage of the pre-existing layout in the thumbnail IFD as much as possible """
thumb = self.tags.get('Thumbnail Compression') if not thumb or thumb.printable != 'Uncompressed TIFF': return entries = self.s2n(thumb_ifd, 2) # this is header plus offset to IFD ... if self.endian == 'M': tiff = 'MM\x00*\x00\x00\x00\x08' else: tiff = 'II*\x00\x08\x00\x00\x00' # ... plus thumbnail IFD data plus a null "next IFD" pointer self.file.seek(self.offset + thumb_ifd) tiff += self.file.read(entries * 12 + 2) + '\x00\x00\x00\x00' # fix up large value offset pointers into data area for i in range(entries): entry = thumb_ifd + 2 + 12 * i tag = self.s2n(entry, 2) field_type = self.s2n(entry + 2, 2) type_length = FIELD_TYPES[field_type][0] count = self.s2n(entry + 4, 4) old_offset = self.s2n(entry + 8, 4) # start of the 4-byte pointer area in entry ptr = i * 12 + 18 # remember strip offsets location if tag == 0x0111: strip_off = ptr strip_len = count * type_length # is it in the data area? if count * type_length > 4: # update offset pointer (nasty "strings are immutable" crap) # should be able to say "tiff[ptr:ptr+4]=newoff" newoff = len(tiff) tiff = tiff[:ptr] + self.n2s(newoff, 4) + tiff[ptr + 4:] # remember strip offsets location if tag == 0x0111: strip_off = newoff strip_len = 4 # get original data and store it self.file.seek(self.offset + old_offset) tiff += self.file.read(count * type_length) # add pixel strips and update strip offset info old_offsets = self.tags['Thumbnail StripOffsets'].values old_counts = self.tags['Thumbnail StripByteCounts'].values for i in range(len(old_offsets)): # update offset pointer (more nasty "strings are immutable" crap) offset = self.n2s(len(tiff), strip_len) tiff = tiff[:strip_off] + offset + tiff[strip_off + strip_len:] strip_off += strip_len # add pixel strip to end self.file.seek(self.offset + old_offsets[i]) tiff += self.file.read(old_counts[i]) self.tags['TIFFThumbnail'] = tiff
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract_jpeg_thumbnail(self): """ Extract JPEG thumbnail. (Thankfully the JPEG data is stored as a unit.) """
thumb_offset = self.tags.get('Thumbnail JPEGInterchangeFormat') if thumb_offset: self.file.seek(self.offset + thumb_offset.values[0]) size = self.tags['Thumbnail JPEGInterchangeFormatLength'].values[0] self.tags['JPEGThumbnail'] = self.file.read(size) # Sometimes in a TIFF file, a JPEG thumbnail is hidden in the MakerNote # since it's not allowed in a uncompressed TIFF IFD if 'JPEGThumbnail' not in self.tags: thumb_offset = self.tags.get('MakerNote JPEGThumbnail') if thumb_offset: self.file.seek(self.offset + thumb_offset.values[0]) self.tags['JPEGThumbnail'] = self.file.read(thumb_offset.field_length)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _canon_decode_tag(self, value, mn_tags): """ Decode Canon MakerNote tag based on offset within tag. See http://www.burren.cx/david/canon.html by David Burren """
for i in range(1, len(value)): tag = mn_tags.get(i, ('Unknown', )) name = tag[0] if len(tag) > 1: val = tag[1].get(value[i], 'Unknown') else: val = value[i] try: logger.debug(" %s %s %s", i, name, hex(value[i])) except TypeError: logger.debug(" %s %s %s", i, name, value[i]) # it's not a real IFD Tag but we fake one to make everybody # happy. this will have a "proprietary" type self.tags['MakerNote ' + name] = IfdTag(str(val), None, 0, None, None, None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _canon_decode_camera_info(self, camera_info_tag): """ Decode the variable length encoded camera info section. """
model = self.tags.get('Image Model', None) if not model: return model = str(model.values) camera_info_tags = None for (model_name_re, tag_desc) in makernote.canon.CAMERA_INFO_MODEL_MAP.items(): if re.search(model_name_re, model): camera_info_tags = tag_desc break else: return # We are assuming here that these are all unsigned bytes (Byte or # Unknown) if camera_info_tag.field_type not in (1, 7): return camera_info = struct.pack('<%dB' % len(camera_info_tag.values), *camera_info_tag.values) # Look for each data value and decode it appropriately. for offset, tag in camera_info_tags.items(): tag_format = tag[1] tag_size = struct.calcsize(tag_format) if len(camera_info) < offset + tag_size: continue packed_tag_value = camera_info[offset:offset + tag_size] tag_value = struct.unpack(tag_format, packed_tag_value)[0] tag_name = tag[0] if len(tag) > 2: if callable(tag[2]): tag_value = tag[2](tag_value) else: tag_value = tag[2].get(tag_value, tag_value) logger.debug(" %s %s", tag_name, tag_value) self.tags['MakerNote ' + tag_name] = IfdTag(str(tag_value), None, 0, None, None, None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def on_widget__configure_event(self, widget, event): ''' Called when size of drawing area changes. ''' if event.x < 0 and event.y < 0: # Widget has not been allocated a size yet, so do nothing. return self.resize(event.width, event.height)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def render_label(self, cairo_context, shape_id, text=None, label_scale=.9): ''' Draw label on specified shape. Parameters ---------- cairo_context : cairo.Context Cairo context to draw text width. Can be preconfigured, for example, to set font style, etc. shape_id : str Shape identifier. text : str, optional Label text. If not specified, shape identifier is used. label_scale : float, optional Fraction of limiting dimension of shape bounding box to scale text to. ''' text = shape_id if text is None else text shape = self.canvas.df_bounding_shapes.ix[shape_id] shape_center = self.canvas.df_shape_centers.ix[shape_id] font_size, text_shape = \ aspect_fit_font_size(text, shape * label_scale, cairo_context=cairo_context) cairo_context.set_font_size(font_size) cairo_context.move_to(shape_center[0] - .5 * text_shape.width, shape_center[1] + .5 * text_shape.height) cairo_context.show_text(text)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_ui(self): """Create the user interface create_ui is a method called during the Delegate's initialisation process, to create, add to, or modify any UI created by GtkBuilder files. """
self.entry = gtk.Entry() self.widget.add(self.entry)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_file_hash(file_path, block_size=1024, hasher=None): """ Generate hash for given file :param file_path: Path to file :type file_path: str :param block_size: Size of block to be read at once (default: 1024) :type block_size: int :param hasher: Use specific hasher, defaults to md5 (default: None) :type hasher: _hashlib.HASH :return: Hash of file :rtype: str """
if hasher is None: hasher = hashlib.md5() with open(file_path, 'rb') as f: while True: buffer = f.read(block_size) if len(buffer) <= 0: break hasher.update(buffer) return hasher.hexdigest()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def resource_get_list(self): """ Get list of this plugins resources and a hash to check for file changes (It is recommended to keep a in memory representation of this struct and not to generate it upon each request) :return: List of supported resources and hashes :rtype: list[(unicode, unicode)] """
if not self._resources: return self.resource_update_list() res = [] with self._resource_lock: for key in self._resources: res.append((key, self._resources[key]['hash'])) return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def resource_update_list(self, reset=False): """ Update internal struct of resource, hash list and get diff (Warning: Resource names have to be unique!!) :param reset: Should resources be rebuild from scratch (default: False) :type reset: bool :return: List of resources and hashes that changed :rtype: list[(unicode, unicode)] """
if not self._resource_path: raise PluginException("No resource path set") if not os.path.isdir(self._resource_path): raise PluginException( u"Resource path directory '{}' not found".format( self._resource_path ) ) res = [] with self._resource_lock: if reset: self._resources = {} old = dict(self._resources) for dirname, dirnames, filenames in os.walk(self._resource_path): for file_name in filenames: file_ext = os.path.splitext(file_name)[1].lower()[1:] if file_ext not in self._resource_file_types: self.debug(u"Skipping '{}'".format(file_name)) continue file_path = os.path.join(dirname, file_name) try: file_hash = get_file_hash(file_path) except: self.exception( u"Failed to hash '{}'".format(file_path) ) continue self._resources[file_name] = { 'name': file_name, 'path': file_path, 'hash': file_hash, 'checked': datetime.datetime.utcnow() } # generate diff for key in self._resources: resource = self._resources[key] if key not in old or old[key]['hash'] != resource['hash']: # new file or hash changed res.append((key, resource['hash'])) return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def resource_get(self, resource_name): """ Return resource info :param resource_name: Resource name as returned by resource_get_list() :type resource_name: str :return: Resource information (empty if not found) name: Resource name hash: Resource hash path: Path to resource checked: Last time information was updated :rtype: dict[str, str] """
try: with self._resource_lock: res = self._resources[resource_name] except KeyError: return {} return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def special_mode(v): """decode Olympus SpecialMode tag in MakerNote"""
mode1 = { 0: 'Normal', 1: 'Unknown', 2: 'Fast', 3: 'Panorama', } mode2 = { 0: 'Non-panoramic', 1: 'Left to right', 2: 'Right to left', 3: 'Bottom to top', 4: 'Top to bottom', } if not v or (v[0] not in mode1 or v[2] not in mode2): return v return '%s - sequence %d - %s' % (mode1[v[0]], v[1], mode2[v[2]])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _safe_call(obj, methname, *args, **kwargs): """ Safely calls the method with the given methname on the given object. Remaining positional and keyword arguments are passed to the method. The return value is None, if the method is not available, or the return value of the method. """
meth = getattr(obj, methname, None) if meth is None or not callable(meth): return return meth(*args, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def proc_response(self, resp, startidx=None): """ Post-process a response through all processors in the stack, in reverse order. For convenience, returns the response passed to the method. The startidx argument is an internal interface only used by the proc_request() and proc_exception() methods to process a response through a subset of response processors. """
# If we're empty, bail out early if not self: return resp # Select appropriate starting index if startidx is None: startidx = len(self) for idx in range(startidx, -1, -1): _safe_call(self[idx], 'proc_response', resp) # Return the response we were passed return resp
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_config(): """Find a build cache somewhere in a parent directory."""
previous = "" current = os.getcwd() while previous != current: check_path = os.path.join(current, "build.cache") if os.path.isfile(check_path): return check_path else: previous = current current = os.path.dirname(current) raise Exception("Can't find build cache")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_cache(force=False, cache_file=None): """ Load a build cache, updating it if necessary. A cache is considered outdated if any of its inputs have changed. Arguments force -- Consider a cache outdated regardless of whether its inputs have been modified. """
if not cache_file: cache_file = find_config() cache_config = devpipeline_configure.parser.read_config(cache_file) cache = devpipeline_configure.cache._CachedConfig(cache_config, cache_file) if force or _is_outdated(cache_file, cache): cache = devpipeline_configure.config.process_config( cache_config.get("DEFAULT", "dp.build_config"), os.path.dirname(cache_file), "build.cache", profiles=cache_config.get("DEFAULT", "dp.profile_name", fallback=None), overrides=cache_config.get("DEFAULT", "dp.overrides", fallback=None), ) devpipeline_core.sanitizer.sanitize( cache, lambda n, m: print("{} [{}]".format(m, n)) ) return cache
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def app(environ, start_response): """Function called by the WSGI server."""
r = HttpRequestHandler(environ, start_response, Router).dispatch() return r
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def make_job(job_name, **kwargs): """ Decorator to create a Job from a function. Give a job name and add extra fields to the job. @make_job("ExecuteDecJob", command=mongoengine.StringField(required=True), output=mongoengine.StringField(default=None)) def execute(job: Job): result = subprocess.run(job.command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) job.output = result.stdout.decode('utf-8') + " " + result.stderr.decode('utf-8') """
def wraps(func): kwargs['process'] = func job = type(job_name, (Job,), kwargs) globals()[job_name] = job return job return wraps
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_safe(str_or_bytes, encoding='utf-8', errors='ignore', output=sys.stdout, newline='\n'): """ Print unicode or bytes universally. :param str_or_bytes: string :param encoding: encoding :param output: output file handler :param errors: error handling scheme. Refer to codecs.register_error. """
writer = output.buffer if hasattr(output, 'buffer') else output # When the input type is bytes, verify it can be decoded with the specified encoding. decoded = str_or_bytes if is_unicode(str_or_bytes) else to_unicode(str_or_bytes, encoding, errors) encoded = to_bytes(decoded, encoding, errors) writer.write(encoded + to_bytes(newline, encoding, errors)) output.flush()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_verified(self, msg_info): """ expects "msg_info" to have the field 'files_containers_id' This call already executes "update_last_checked_time" so it doesn't need to be called separately """
assert hasattr(msg_info, 'files_containers_id') with self._session_resource as session: session.execute( update(FilesDestinations) .where(FilesDestinations.file_containers_id == msg_info.files_containers_id) .values(verification_info=msg_info.msg_id) ) self.update_last_checked_time(msg_info)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_uploaded_container(self, msg_info): """ returns 0 if it doesn't correspond to an uploaded container -1 if it corresponds to an uploaded container but it is corrupted 1 if it corresponds to an uploaded container and is OK """
results = { 'BAD': -1, 'NOT_FCB': 0, 'OK': 1 } for part in msg_info.msg_body.walk(): if part.is_multipart(): continue """ if part.get('Content-Disposition') is None: print("no content dispo") continue """ if part.get_content_type() == 'text/plain': if self._is_content_from_fcb(part.get_payload()): self._log.debug("Body detected as FCB: %s", part.get_payload()) else: self._log.debug("Body doesn't match FCB: %s", part.get_payload()) continue attachment_name = self._get_attachment_name(part) if not attachment_name: self._log.debug("Couldn't get attachment name. Will ignore the part.") continue files_container = self._get_files_container_by_name(attachment_name) if files_container: sha1_in_db = files_container.sha1 msg_info.files_containers_id = files_container.id tmp_file = FileInfo(os.path.join(tempfile.gettempdir(), "downloaded.tmp")) fp = open(tmp_file.path, 'wb') fp.write(part.get_payload(decode=1)) fp.flush() fp.close() if tmp_file.sha1 == sha1_in_db: self._log.info("File container '%s' verified!", attachment_name) result = results['OK'] else: self._log.error("File container '%s' doesn't match the sha1 sum. Expected '%s' but got '%s'", attachment_name, sha1_in_db, tmp_file.sha1) result = results['BAD'] os.remove(tmp_file.path) return result else: self._log.debug("Attached file '%s' not found in DB. Will ignore this mail.", attachment_name) return results['NOT_FCB']
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def abort(self, count=2, timeout=60): ''' Send an abort sequence using CAN bytes. ''' for counter in xrange(0, count): self.putc(CAN, timeout)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def send(self, stream, retry=16, timeout=60, quiet=0, callback=None): ''' Send a stream via the XMODEM protocol. >>> stream = file('/etc/issue', 'rb') >>> print modem.send(stream) True Returns ``True`` upon succesful transmission or ``False`` in case of failure. :param stream: The stream object to send data from. :type stream: stream (file, etc.) :param retry: The maximum number of times to try to resend a failed packet before failing. :type retry: int :param timeout: The number of seconds to wait for a response before timing out. :type timeout: int :param quiet: If 0, it prints info to stderr. If 1, it does not print any info. :type quiet: int :param callback: Reference to a callback function that has the following signature. This is useful for getting status updates while a xmodem transfer is underway. Expected callback signature: def callback(total_packets, success_count, error_count) :type callback: callable ''' # initialize protocol try: packet_size = dict( xmodem = 128, xmodem1k = 1024, )[self.mode] except AttributeError: raise ValueError("An invalid mode was supplied") error_count = 0 crc_mode = 0 cancel = 0 while True: char = self.getc(1) if char: if char == NAK: crc_mode = 0 break elif char == CRC: crc_mode = 1 break elif char == CAN: if not quiet: print >> sys.stderr, 'received CAN' if cancel: return False else: cancel = 1 else: log.error('send ERROR expected NAK/CRC, got %s' % \ (ord(char),)) error_count += 1 if error_count >= retry: self.abort(timeout=timeout) return False # send data error_count = 0 success_count = 0 total_packets = 0 sequence = 1 while True: data = stream.read(packet_size) if not data: log.info('sending EOT') # end of stream break total_packets += 1 data = data.ljust(packet_size, self.pad) if crc_mode: crc = self.calc_crc(data) else: crc = self.calc_checksum(data) # emit packet while True: if packet_size == 128: self.putc(SOH) else: # packet_size == 1024 self.putc(STX) self.putc(chr(sequence)) self.putc(chr(0xff - sequence)) self.putc(data) if crc_mode: self.putc(chr(crc >> 8)) self.putc(chr(crc & 0xff)) else: self.putc(chr(crc)) char = self.getc(1, timeout) if char == ACK: success_count += 1 if callable(callback): callback(total_packets, success_count, error_count) break if char == NAK: error_count += 1 if callable(callback): callback(total_packets, success_count, error_count) if error_count >= retry: # excessive amounts of retransmissions requested, # abort transfer self.abort(timeout=timeout) log.warning('excessive NAKs, transfer aborted') return False # return to loop and resend continue # protocol error self.abort(timeout=timeout) log.error('protocol error') return False # keep track of sequence sequence = (sequence + 1) % 0x100 while True: # end of transmission self.putc(EOT) #An ACK should be returned char = self.getc(1, timeout) if char == ACK: break else: error_count += 1 if error_count >= retry: self.abort(timeout=timeout) log.warning('EOT was not ACKd, transfer aborted') return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def recv(self, stream, crc_mode=1, retry=16, timeout=60, delay=1, quiet=0): ''' Receive a stream via the XMODEM protocol. >>> stream = file('/etc/issue', 'wb') >>> print modem.recv(stream) 2342 Returns the number of bytes received on success or ``None`` in case of failure. ''' # initiate protocol error_count = 0 char = 0 cancel = 0 while True: # first try CRC mode, if this fails, # fall back to checksum mode if error_count >= retry: self.abort(timeout=timeout) return None elif crc_mode and error_count < (retry / 2): if not self.putc(CRC): time.sleep(delay) error_count += 1 else: crc_mode = 0 if not self.putc(NAK): time.sleep(delay) error_count += 1 char = self.getc(1, timeout) if not char: error_count += 1 continue elif char == SOH: #crc_mode = 0 break elif char == STX: break elif char == CAN: if cancel: return None else: cancel = 1 else: error_count += 1 # read data error_count = 0 income_size = 0 packet_size = 128 sequence = 1 cancel = 0 while True: while True: if char == SOH: packet_size = 128 break elif char == STX: packet_size = 1024 break elif char == EOT: # We received an EOT, so send an ACK and return the received # data length self.putc(ACK) return income_size elif char == CAN: # cancel at two consecutive cancels if cancel: return None else: cancel = 1 else: if not quiet: print >> sys.stderr, \ 'recv ERROR expected SOH/EOT, got', ord(char) error_count += 1 if error_count >= retry: self.abort() return None # read sequence error_count = 0 cancel = 0 seq1 = ord(self.getc(1)) seq2 = 0xff - ord(self.getc(1)) if seq1 == sequence and seq2 == sequence: # sequence is ok, read packet # packet_size + checksum data = self.getc(packet_size + 1 + crc_mode, timeout) if crc_mode: csum = (ord(data[-2]) << 8) + ord(data[-1]) data = data[:-2] log.debug('CRC (%04x <> %04x)' % \ (csum, self.calc_crc(data))) valid = csum == self.calc_crc(data) else: csum = data[-1] data = data[:-1] log.debug('checksum (checksum(%02x <> %02x)' % \ (ord(csum), self.calc_checksum(data))) valid = ord(csum) == self.calc_checksum(data) # valid data, append chunk if valid: income_size += len(data) stream.write(data) self.putc(ACK) sequence = (sequence + 1) % 0x100 char = self.getc(1, timeout) continue else: # consume data self.getc(packet_size + 1 + crc_mode) self.debug('expecting sequence %d, got %d/%d' % \ (sequence, seq1, seq2)) # something went wrong, request retransmission self.putc(NAK)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def calc_crc(self, data, crc=0): ''' Calculate the Cyclic Redundancy Check for a given block of data, can also be used to update a CRC. >>> crc = modem.calc_crc('hello') >>> crc = modem.calc_crc('world', crc) >>> hex(crc) '0xd5e3' ''' for char in data: crc = (crc << 8) ^ self.crctable[((crc >> 8) ^ ord(char)) & 0xff] return crc & 0xffff
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def combobox_set_model_from_list(cb, items): """Setup a ComboBox or ComboBoxEntry based on a list of strings."""
cb.clear() model = gtk.ListStore(str) for i in items: model.append([i]) cb.set_model(model) if type(cb) == gtk.ComboBoxEntry: cb.set_text_column(0) elif type(cb) == gtk.ComboBox: cell = gtk.CellRendererText() cb.pack_start(cell, True) cb.add_attribute(cell, 'text', 0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def LastOf(*subcons): """ Create an adapter which uses only the last construct. If first argument is a string it will be the name. """
name = "seq" if isinstance(subcons[0], six.string_types): name = subcons[0] subcons = subcons[1:] return IndexingAdapter(Sequence(name, *subcons), -1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _make_scm(current_target): """ Create an Scm for a component. Arguments component - The component being operated on. """
# pylint: disable=protected-access tool_key = devpipeline_core.toolsupport.choose_tool_key( current_target, devpipeline_scm._SCM_TOOL_KEYS ) return devpipeline_core.toolsupport.tool_builder( current_target.config, tool_key, devpipeline_scm.SCMS, current_target )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def checkout_task(current_target): """ Update or a local checkout. Arguments target - The target to operate on. """
try: scm = _make_scm(current_target) src_dir = current_target.config.get("dp.src_dir") shared_dir = current_target.config.get("dp.src_dir_shared") scm.checkout(repo_dir=src_dir, shared_dir=shared_dir) scm.update(repo_dir=src_dir) except devpipeline_core.toolsupport.MissingToolKey as mtk: current_target.executor.warning(mtk)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def proc_response(self, resp): """Process JSON data found in the response."""
# Try to interpret any JSON try: resp.obj = json.loads(resp.body) self._debug(" Received entity: %r", resp.obj) except ValueError: resp.obj = None self._debug(" No received entity; body %r", resp.body) # Now, call superclass method for error handling super(JSONRequest, self).proc_response(resp)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _attach_obj(self, req, obj): """Helper method to attach obj to req as JSON data."""
# Attach the object to the request json.dump(obj, req) # Also set the content-type header req['content-type'] = self._content_type
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_module_egg_path(module_path): """ Find the path of the deployed egg package that may contain the specified module path. @param module_path: the path of a Python module. @return: the absolute path of the deployed egg package that contains the specified module path, or ``None`` if no deployed egg package contains this module path. """
if module_path.find('.egg') == -1: return None _module_absolute_path = os.path.abspath(module_path) egg_paths = [os.path.relpath(_module_absolute_path, egg_path) for egg_path in [egg_path for egg_path in sys.path if REGEX_EGG_PACKAGE_PATH.match(egg_path) \ and _module_absolute_path.startswith(egg_path)]] return None if len(egg_paths) == 0 else egg_paths[0]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tmpl_shorten(text, max_size=32): """Shorten the given text to ``max_size`` * synopsis: ``%shorten{text}`` or ``%shorten{text,max_size}`` * example: ``%shorten{$title,32}`` * description: Shorten “text” on word boundarys. """
max_size = int(max_size) if len(text) <= max_size: return text text = textwrap.wrap(text, max_size)[0] import re text = re.sub(r'\W+$', '', text) return text.strip()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tmpl_time(text, fmt, cur_fmt): """Format a time value using `strftime`. * synopsis: ``%time{date_time,format,curformat}`` * description: Return the date and time in any format accepted by \ strftime. For example, to get the year some music was added to \ your library, use %time{$added,%Y}. """
return time.strftime(fmt, time.strptime(text, cur_fmt))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def download_binaries(package_dir=False): """Download all binaries for the current platform Parameters package_dir: bool If set to `True`, the binaries will be downloaded to the `resources` directory of the qpsphere package instead of to the users application data directory. Note that this might require administrative rights if qpsphere is installed in a system directory. Returns ------- paths: list of pathlib.Path List of paths to binaries. This will always return binaries in the `resources` directory of the qpsphere package (if binaries are present there), in disregard of the parameter `package_dir`. """
# bhfield # make sure the binary is available on the system paths = _bhfield.fetch.get_binaries() if package_dir: # Copy the binaries to the `resources` directory # of qpsphere. pdir = RESCR_PATH outpaths = [] for pp in paths: target = pdir / pp.name if not target.exists(): shutil.copy(pp, target) outpaths.append(target) else: outpaths = paths return outpaths
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_binaries(package_dir=False): """Remove all binaries for the current platform Parameters package_dir: bool If True, remove all binaries from the `resources` directory of the qpsphere package. If False, remove all binaries from the user's cache directory. """
paths = [] if package_dir: pdir = RESCR_PATH else: pdir = CACHE_PATH for pp in pdir.iterdir(): if pp.name != "shipped_resources_go_here": paths.append(pp) for pp in paths: pp.unlink()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def elapsed(func): """A decorator for calculating time elapsed when execute function"""
@functools.wraps(func) def wrapper(*args, **kw): start = time.time() print('Running `%s()` ...' % func.__name__) res = func(*args, **kw) end = time.time() print('Function `%s()` running elapsed %.2f s' % (func.__name__, end - start)) return res return wrapper
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_logger(log_file, name='logger', cmd=True): """define a logger for your program parameters log_file file name of log name name of logger example logger = create_logger('example.log',name='logger',) logger.info('This is an example!') logger.warning('This is a warn!') """
import logging logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) # set format formatter = logging.Formatter('%(asctime)s | %(name)s | %(levelname)s | %(message)s', datefmt='%Y-%m-%d %H:%M:%S') # file handler fh = logging.FileHandler(log_file) fh.setLevel(logging.DEBUG) fh.setFormatter(formatter) logger.addHandler(fh) # cmd handler if cmd: ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) ch.setFormatter(formatter) logger.addHandler(ch) return logger
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def relative_time_to_text(l10n=locales.get(_default), **kwargs): """ Return an aproximate textual representation of the provioded duration of time. Examples: relative_time_to_text(hours=6, minutes=34) -> "six and a half hours" relative_time_to_text(years=5, months=8, days=5) -> "less than six years" Keyword arguments: l10n -- The locale of the language for the result. Default is en_US. seconds minutes hours days weeks months years """
kwargs = _normalize(**kwargs) cor = _Chain() cor.add(_LessThan1M(l10n, **kwargs)) cor.add(_LessThan1H(l10n, **kwargs)) cor.add(_LessThan23H(l10n, **kwargs)) cor.add(_LessThan6D1H(l10n, **kwargs)) cor.add(_LessThan25D10H(l10n, **kwargs)) cor.add(_LessThan11MM(l10n, **kwargs)) cor.add(_LessThan10Y(l10n, **kwargs)) cor.add(_MoreThan10Y(l10n, **kwargs)) return cor.run()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def size(self): """ size in bytes """
if not self._size: self._size = os.path.getsize(self._path) return self._size
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def do_heavy_work(self, block): """ Expects Compressor Block like objects """
cipher_key = self.gen_key(32) in_file_path = block.latest_file_info.path dst_file_path = block.processed_data_file_info.path + self.get_extension() self.log.debug("Encrypting file '%s' with key '%s' to file '%s'", in_file_path, cipher_key, dst_file_path) self.encrypt_file(key=cipher_key, in_filename=in_file_path, out_filename=dst_file_path) block.cipher_key = cipher_key block.ciphered_file_info = FileInfo(dst_file_path) block.latest_file_info = block.ciphered_file_info return block
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def autocommit(f): "A decorator to commit to the storage if autocommit is set to True." @wraps(f) def wrapper(self, *args, **kwargs): result = f(self, *args, **kwargs) if self._meta.commit_ready(): self.commit() return result return wrapper
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def set(self, key, value): "Set value to the key store." # if key already in data, update indexes if not isinstance(value, dict): raise BadValueError( 'The value {} is incorrect.' ' Values should be strings'.format(value)) _value = deepcopy(value) if key in self.data: self.delete_from_index(key) self.data[key] = _value self.update_index(key, _value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def insert(self, value): "Insert value in the keystore. Return the UUID key." key = str(uuid4()) self.set(key, value) return key
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def delete(self, key): "Delete a `key` from the keystore." if key in self.data: self.delete_from_index(key) del self.data[key]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update(self, key, value): """Update a `key` in the keystore. If the key is non-existent, it's being created """
if not isinstance(value, dict): raise BadValueError( 'The value {} is incorrect.' ' Values should be strings'.format(value)) if key in self.data: v = self.get(key) v.update(value) else: v = value self.set(key, v)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def del_key(self, key, key_to_delete): "Delete the `key_to_delete` for the record found with `key`." v = self.get(key) if key_to_delete in v: del v[key_to_delete] self.set(key, v)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def commit(self): "Commit data to the storage." if self._meta.path: with open(self._meta.path, 'wb') as fd: raw = deepcopy(self.raw) # LAZY INDEX PROCESSING # Save indexes only if not lazy lazy_indexes = self.lazy_indexes # Keep this list safe if not self._meta.lazy_indexes: # Remove indexes if needed for idx_name in lazy_indexes: del raw['indexes'][idx_name] for index_name, values in raw['indexes'].items(): for value, keys in values.items(): raw['indexes'][index_name][value] = list(keys) # don't store indexes if not needed if not raw['indexes'] or self._meta.lazy_indexes: del raw['indexes'] try: fd.write(six.u(self.serialize(raw))) except TypeError: fd.write(six.b(self.serialize(raw)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def keys_to_values(self, keys): "Return the items in the keystore with keys in `keys`." return dict((k, v) for k, v in self.data.items() if k in keys)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def filter_keys(self, **kwargs): "Return a set of keys filtered according to the given arguments." self._used_index = False keys = set(self.data.keys()) for key_filter, v_filter in kwargs.items(): if key_filter in self.indexes: self._used_index = True if v_filter not in self.indexes[key_filter]: keys = set([]) else: keys = keys.intersection( self.indexes[key_filter][v_filter]) else: keys = keys.intersection( self.simple_filter(key_filter, v_filter)) return keys