text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def filter(self): """Generate a filtered query from request parameters. :returns: Filtered SQLALchemy query """
argmap = { filter.label or label: filter.field for label, filter in self.filters.items() } args = self.opts.parser.parse(argmap) query = self.query if self.query is not None else self.opts.query for label, filter in self.filters.items(): value = args.get(filter.label or label) if value is not None: query = filter.filter(query, self.opts.model, label, value) return query
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _check_min_density(self, min_density): """Validator to ensure proper usage."""
if min_density is None: self._min_density = -np.Inf elif (isinstance(min_density, float) and (0.0 <= min_density < 1.0)): self._min_density = min_density else: raise ValueError('min_density must be float and be >=0.0 and < 1.0')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _not_empty(self, view, slice_): """Checks if the density is too low. """
img2d = self._get_axis(self._image, view, slice_) return (np.count_nonzero(img2d) / img2d.size) > self._min_density
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _sample_slices_in_dim(self, view, num_slices, non_empty_slices): """Samples the slices in the given dimension according the chosen strategy."""
if self._sampling_method == 'linear': return self._linear_selection(non_empty_slices=non_empty_slices, num_slices=num_slices) elif self._sampling_method == 'percentage': return self._percent_selection(non_empty_slices=non_empty_slices) elif self._sampling_method == 'callable': return self._selection_by_callable(view=view, non_empty_slices=non_empty_slices, num_slices=num_slices) else: raise NotImplementedError('Invalid state for the class!')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _linear_selection(self, non_empty_slices, num_slices): """Selects linearly spaced slices in given"""
num_non_empty = len(non_empty_slices) # # trying to skip 5% slices at the tails (bottom clipping at 0) # skip_count = max(0, np.around(num_non_empty * 0.05).astype('int16')) # # only when possible # if skip_count > 0 and (num_non_empty - 2 * skip_count > num_slices): # non_empty_slices = non_empty_slices[skip_count: -skip_count] # num_non_empty = len(non_empty_slices) sampled_indices = np.linspace(0, num_non_empty, num=min(num_non_empty, num_slices), endpoint=False) slices_in_dim = non_empty_slices[np.around(sampled_indices).astype('int64')] return slices_in_dim
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _percent_selection(self, non_empty_slices): """Chooses slices at a given percentage between the first and last non-empty slice."""
return np.around(self._sampler * len(non_empty_slices) / 100).astype('int64')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _selection_by_callable(self, view, num_slices, non_empty_slices): """Returns all the slices selected by the given callable."""
selected = [sl for sl in non_empty_slices if self._sampler(self._get_axis(self._image, view, sl))] return selected[:num_slices]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_slices(self, extended=False): """Generator over all the slices selected, each time returning a cross-section. Parameters extended : bool Flag to return just slice data (default, extended=False), or return a tuple of axis, slice_num, slice_data (extended=True) Returns ------- slice_data : an image (just slice data, default, with extended=False), or a tuple of axis, slice_num, slice_data (extended=True) """
for dim, slice_num in self._slices: yield self._get_axis(self._image, dim, slice_num, extended=extended)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_slices_multi(self, image_list, extended=False): """Returns the same cross-section from the multiple images supplied. All images must be of the same shape as the original image defining this object. Parameters image_list : Iterable containing atleast 2 images extended : bool Flag to return just slice data (default, extended=False), or return a tuple of axis, slice_num, slice_data (extended=True) Returns ------- tuple_slice_data : tuple of one slice from each image in the input image list Let's denote it by as TSL. if extended=True, returns tuple(axis, slice_num, TSL) """
# ensure all the images have the same shape for img in image_list: if img.shape != self._image.shape: raise ValueError('Supplied images are not compatible with this class. ' 'They must have the shape: {}'.format(self._image_shape)) for dim, slice_num in self._slices: multiple_slices = (self._get_axis(img, dim, slice_num) for img in image_list) if not extended: # return just the slice data yield multiple_slices else: # additionally include which dim and which slice num # not using extended option in get_axis, to avoid complicating unpacking yield dim, slice_num, multiple_slices
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _make_grid_of_axes(self, bounding_rect=cfg.bounding_rect_default, num_rows=cfg.num_rows_per_view_default, num_cols=cfg.num_cols_grid_default, axis_pad=cfg.axis_pad_default, commn_annot=None, **axis_kwargs): """Creates a grid of axes bounded within a given rectangle."""
axes_in_grid = list() extents = self._compute_cell_extents_grid(bounding_rect=bounding_rect, num_cols=num_cols, num_rows=num_rows, axis_pad=axis_pad) for cell_ext in extents: ax_cell = self.fig.add_axes(cell_ext, frameon=False, visible=False, **axis_kwargs) if commn_annot is not None: ax_cell.set_title(commn_annot) ax_cell.set_axis_off() axes_in_grid.append(ax_cell) return axes_in_grid
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _create_imshow_objects(self): """Turns off all the x and y axes in each Axis"""
# uniform values for initial image can cause weird behaviour with normalization # as imshow.set_data() does not automatically update the normalization!! # using random data is a better choice random_image = np.random.rand(20, 20) self.images = [None] * len(self.flat_grid) for ix, ax in enumerate(self.flat_grid): self.images[ix] = ax.imshow(random_image, **self.display_params)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def attach(self, image_in, sampler=None, show=True): """Attaches the relevant cross-sections to each axis. Parameters attach_image : ndarray The image to be attached to the collage, once it is created. Must be atleast 3d. sampler : str or list or callable selection strategy: to identify the type of sampling done to select the slices to return. All sampling is done between the first and last non-empty slice in that view/dimension. - if 'linear' : linearly spaced slices - if list, it is treated as set of percentages at which slices to be sampled (must be in the range of [1-100], not [0-1]). This could be used to more/all slices in the middle e.g. range(40, 60, 5) or at the end e.g. [ 5, 10, 15, 85, 90, 95] - if callable, it must take a 2D image of arbitray size, return True/False to indicate whether to select that slice or not. Only non-empty slices (atleas one non-zero voxel) are provided as input. Simple examples for callable could be based on 1) percentage of non-zero voxels > x etc 2) presence of desired texture ? 3) certain properties of distribution (skewe: dark/bright, energy etc) etc If the sampler returns more than requested `num_slices`, only the first num_slices will be selected. show : bool Flag to request immediate display of collage """
if len(image_in.shape) < 3: raise ValueError('Image must be atleast 3D') # allowing the choice of new sampling for different invocations. if sampler is None: temp_sampler = self.sampler else: temp_sampler = sampler slicer = SlicePicker(image_in=image_in, view_set=self.view_set, num_slices=self.num_slices, sampler=temp_sampler) try: for img_obj, slice_data in zip(self.images, slicer.get_slices()): img_obj.set_data(slice_data) except: self._data_attached = False raise ValueError('unable to attach the given image data to current collage') else: self._data_attached = True # show all the axes if show: self.show()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _set_visible(self, visibility, grid_index=None): """Sets the visibility property of all axes."""
if grid_index is None: for ax in self.flat_grid: ax.set_visible(visibility) else: if grid_index < 0 or grid_index >= len(self.grids): raise IndexError('Valid indices : 0 to {}'.format(len(self.grids) - 1)) for ax in self.grids[grid_index]: ax.set_visible(visibility)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save(self, annot=None, output_path=None): """Saves the collage to disk as an image. Parameters annot : str text to annotate the figure with a super title output_path : str path to save the figure to. Note: any spaces in the filename will be replace with ``_`` """
if annot is not None: self.fig.suptitle(annot, backgroundcolor='black', color='g') if output_path is not None: output_path = output_path.replace(' ', '_') # TODO improve bbox calculations to include ONLY the axes from collage # and nothing else self.fig.savefig(output_path + '.png', bbox_inches='tight', dpi=200, bbox_extra_artists=self.flat_grid)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def clear(self): """Clears all the axes to start fresh."""
for ax in self.flat_grid: for im_h in ax.findobj(AxesImage): im_h.remove()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _check_image(self, image_nD): """Sanity checks on the image data"""
self.input_image = load_image_from_disk(image_nD) if len(self.input_image.shape) < 3: raise ValueError('Input image must be atleast 3D') if np.count_nonzero(self.input_image) == 0: raise ValueError('Input image is completely filled with zeros! ' 'Must be non-empty')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _add_fixed_dim(self, fixed_dim=-1): """Makes note of which dimension needs to be fixed, defaulting to last."""
if fixed_dim in [-1, None, 'last']: fixed_dim = len(self.input_image.shape) - 1 # last dimension if int(fixed_dim)!=fixed_dim or \ fixed_dim > len(self.input_image.shape) or \ fixed_dim < -1: raise ValueError('invalid value for the dimension to be fixed!' 'Must be an integer in range [0, {}] inclusive' ''.format(len(self.input_image.shape))) if self.input_image.shape[fixed_dim] < 2: raise ValueError('Input image must have atleast two samples ' 'in the fixed dimension. It has {}. ' 'Full image shape: {} ' ''.format(self.input_image.shape[fixed_dim], self.input_image.shape)) self.fixed_dim = int(fixed_dim)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _make_carpet(self, rescale_data): """ Constructs the carpet from the input image. Optional rescaling of the data. """
self.carpet = self._unroll_array(self.input_image, self.fixed_dim) if rescale_data: self.carpet = row_wise_rescale(self.carpet)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def show(self, clustered=False, ax_carpet=None, label_x_axis='time point', label_y_axis='voxels/ROI'): """ Displays the carpet in the given axis. Parameters clustered : bool, optional Flag to indicate whether to show the clustered/reduced carpet or the original. You must run .cluster_rows_in_roi() before trying to show clustered carpet. ax_carpet : Axis, optional handle to a valid matplotlib Axis label_x_axis : str String label for the x-axis of the carpet label_y_axis : str String label for the y-axis of the carpet Returns ------- ax_carpet : Axis handle to axis where carpet is shown """
if clustered is True and self._carpet_clustered is False: print('You must run .cluster_rows_in_roi() ' 'before being able to show clustered carpet!') return if ax_carpet is None: self.ax_carpet = plt.gca() else: if not isinstance(ax_carpet, Axes): raise ValueError('Input must be a valid matplotlib Axis!') self.ax_carpet = ax_carpet plt.sca(self.ax_carpet) self.fig = plt.gcf() # vmin/vmax are controlled, because we rescale all to [0, 1] self.imshow_params_carpet = dict(interpolation='none', cmap='gray', aspect='auto', origin='lower', zorder=1) # should we control vmin=0.0, vmax=1.0 ?? if not clustered: self.carpet_handle = self.ax_carpet.imshow(self.carpet, **self.imshow_params_carpet) else: self.carpet_handle = self.ax_carpet.imshow(self.clustered_carpet, **self.imshow_params_carpet) # TODO decorating axes with labels self.ax_carpet.set(xlabel=label_x_axis, ylabel=label_y_axis, frame_on=False) self.ax_carpet.set_ylim(auto=True) return self.ax_carpet
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save(self, output_path=None, title=None): """Saves the current figure with carpet visualization to disk. Parameters output_path : str Path to where the figure needs to be saved to. title : str text to overlay and annotate the visualization (done via plt.suptitle()) """
try: save_figure(self.fig, output_path=output_path, annot=title) except: print('Unable to save the figure to disk! \nException: ') traceback.print_exc()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cluster_rows_in_roi(self, roi_mask=None, num_clusters_per_roi=5, metric='minkowski'): """Clusters the data within all the ROIs specified in a mask. Parameters roi_mask : ndarray or None volumetric mask defining the list of ROIs, with a label for each voxel. This must be the same size in all dimensions except the fixed_dim i.e. if you were making a Carpet from an fMRI image of size 125x125x90x400 fixing the 4th dimension (of size 400), then roi_mask must be of size 125x125x90. num_clusters_per_roi : int number of clusters (n) to form each ROI specified in the roi_mask if n (say 20) is less than number of voxels per a given ROI (say 2000), then data from approx. 2000/20=100 voxels would summarized (averaged by default), into a single cluster. So if the ROI mask had m ROIs (say 10), then the final clustered carpet would have m*n rows (200), regardless of the number of voxels in the 3D image. metric : str distance metric for the hierarchical clustering algorithm; default : 'minkowski' Options: anything accepted by `scipy.spatial.distance.pdist`, which can be: ‘braycurtis’, ‘canberra’, ‘chebyshev’, ‘cityblock’, ‘correlation’, ‘cosine’, ‘dice’, ‘euclidean’, ‘hamming’, ‘jaccard’, ‘kulsinski’, ‘mahalanobis’, ‘matching’, ‘minkowski’, ‘rogerstanimoto’, ‘russellrao’, ‘seuclidean’, ‘sokalmichener’, ‘sokalsneath’, ‘sqeuclidean’, ‘yule’. """
self._set_roi_mask(roi_mask) try: clusters = [self._summarize_in_roi(self.roi_mask == label, num_clusters_per_roi, metric=metric) for label in self.roi_list] self.clustered_carpet = np.vstack(clusters) except: print('unable to produce the clustered carpet - exception:') traceback.print_exc() self._carpet_clustered = False else: self._carpet_clustered = True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _set_roi_mask(self, roi_mask): """Sets a new ROI mask."""
if isinstance(roi_mask, np.ndarray): # not (roi_mask is None or roi_mask=='auto'): self._verify_shape_compatibility(roi_mask, 'ROI set') self.roi_mask = roi_mask self.roi_list = np.unique(roi_mask.flatten()) np.setdiff1d(self.roi_list, cfg.background_value) else: self.roi_mask = np.ones(self.carpet.shape[:-1]) # last dim is self.fixed_dim already self.roi_list = [1, ]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _make_clusters(self, matrix, num_clusters_per_roi, metric): """clusters a given matrix by into specified number of clusters according to given metric"""
from scipy.cluster.hierarchy import fclusterdata # maxclust needed to ensure t is interpreted as # clusters in heirarchical clustering group_ids = fclusterdata(matrix, metric=metric, t=num_clusters_per_roi, criterion='maxclust') group_set = np.unique(group_ids) clusters = [ self._summary_func(matrix[group_ids == group, :], axis=0, keepdims=True) for group in group_set] return np.vstack(clusters).squeeze()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _apply_mask(self, roi_mask): """Removes voxels outside the given mask or ROI set."""
# TODO ensure compatible with input image # - must have < N dim and same size in moving dims. rows_to_delete = list() # to allow for additional masks to be applied in the future if isinstance(roi_mask, np.ndarray): # not (roi_mask is None or roi_mask=='auto'): self._set_roi_mask(roi_mask) rows_roi = np.where(self.roi_mask.flatten() == cfg.background_value) # TODO below would cause differences in size/shape across mask and carpet! self.carpet = np.delete(self.carpet, rows_roi, axis=0) else: self.roi_mask = np.ones(self.carpet.shape)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _verify_shape_compatibility(self, img, img_type): """Checks mask shape against input image shape."""
if self.input_image.shape[:-1] != img.shape: raise ValueError('Shape of the {} ({}) is not compatible ' 'with input image shape: {} ' ''.format(img_type, img.shape, self.input_image.shape[:-1]))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def verify_email(request, code, redirect_to=None): """Verifies an account activation code a user received by e-mail. Requires Messages Django Contrib. :param Requset request: :param str code: :param str redirect_to: :return: """
success = False valid_code = EmailConfirmation.is_valid(code) if valid_code: valid_code.activate() success = True if success: messages.success(request, SIGNUP_VERIFY_EMAIL_SUCCESS_TEXT, 'success') else: messages.error(request, SIGNUP_VERIFY_EMAIL_ERROR_TEXT, 'danger error') if redirect_to is None: redirect_to = '/' return redirect(redirect_to)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unpack_fixed8(src): """Get a FIXED8 value."""
dec_part = unpack_ui8(src) int_part = unpack_ui8(src) return int_part + dec_part / 256
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unpack_float16(src): """Read and unpack a 16b float. The structure is: - 1 bit for the sign . 5 bits for the exponent, with an exponent bias of 16 - 10 bits for the mantissa """
bc = BitConsumer(src) sign = bc.u_get(1) exponent = bc.u_get(5) mantissa = bc.u_get(10) exponent -= 16 mantissa /= 2 ** 10 num = (-1 ** sign) * mantissa * (10 ** exponent) return num
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def u_get(self, quant): """Return a number using the given quantity of unsigned bits."""
if not quant: return bits = [] while quant: if self._count == 0: byte = self.src.read(1) number = struct.unpack("<B", byte)[0] self._bits = bin(number)[2:].zfill(8) self._count = 8 if quant > self._count: self._count, quant, toget = 0, quant - self._count, self._count else: self._count, quant, toget = self._count - quant, 0, quant read, self._bits = self._bits[:toget], self._bits[toget:] bits.append(read) data = int("".join(bits), 2) return data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def s_get(self, quant): """Return a number using the given quantity of signed bits."""
if quant < 2: # special case, just return that unsigned value # quant can also be 0 return self.u_get(quant) sign = self.u_get(1) raw_number = self.u_get(quant - 1) if sign == 0: # positive, simplest case number = raw_number else: # negative, complemento a 2 complement = 2 ** (quant - 1) - 1 number = -1 * ((raw_number ^ complement) + 1) return number
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fb_get(self, quant, fb=16): """Return a fixed bit number quant: number of bits to read fb: number of bits in the integer and decimal part of the output default is 16, resulting in a 16.16 fixed bit"""
raw_number = self.s_get(quant) if quant == 1: # special case, just return that unsigned value return raw_number return raw_number / (1 << fb)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_freesurfer_cmap(vis_type): """Provides different colormaps for different visualization types."""
if vis_type in ('cortical_volumetric', 'cortical_contour'): LUT = get_freesurfer_cortical_LUT() cmap = ListedColormap(LUT) elif vis_type in ('labels_volumetric', 'labels_contour'): black = np.array([0, 0, 0, 1]) cmap = plt.get_cmap('hsv') # TODO using more than 20 labels might be a problem? cmap = cmap(np.linspace(0, 1, 20)) # prepending black to paint background as black colors = np.vstack((black, cmap)) cmap = ListedColormap(colors, 'my_colormap') else: raise NotImplementedError('color map for the visualization type {} has not been implemented!'.format(vis_type)) return cmap
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_rsa_key(bits=2048, keyfile=None, format='PEM', passphrase=None): """ Generate a new RSA key with the specified key size. :param int bits: bit size of the key modulus :param str keyfile: file the key should be written to :param str format: format for the key file, either PEM or DER :param str passphrase: pass phrase for encrypting the key file. If pass phrase is a callable its return value will be used. :return: RSA private key instance """
if passphrase and format != 'PEM': raise Exception( "passphrase is only supported for PEM encoded private keys") rsakey = RSA.generate(bits) if passphrase and isinstance(passphrase, collections.Callable): passphrase = passphrase() output = rsakey.exportKey(format=format, passphrase=passphrase) if keyfile: with open(keyfile, 'w') as outputfile: outputfile.write(output) log.info("generated private key:\n\n%s", output) return rsakey
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_csr(key, dn, csrfilename=None, attributes=None): """ Generates a Certificate Signing Request for a given key. :param Crypto.PublicKey.RSA._RSAobj key: a key :param dn: a distinguished name as dictionary or string with key=value pairs separated by slashes like ``/CN=test.example.org/C=DE/O=Test organisation/`` :param str csrfilename: name of a file to write the CSR to :param tuple attributes: a tuple describing attributes to be included in the CSR :return: a certificate signing request """
certreqInfo = rfc2314.CertificationRequestInfo() certreqInfo.setComponentByName('version', rfc2314.Version(0)) certreqInfo.setComponentByName('subject', _build_dn(dn)) certreqInfo.setComponentByName('subjectPublicKeyInfo', _build_subject_publickey_info(key)) attrpos = certreqInfo.componentType.getPositionByName('attributes') attrtype = certreqInfo.componentType.getTypeByPosition(attrpos) certreqInfo.setComponentByName('attributes', _build_attributes( attributes, attrtype)) certreq = rfc2314.CertificationRequest() certreq.setComponentByName('certificationRequestInfo', certreqInfo) sigAlgIdentifier = rfc2314.SignatureAlgorithmIdentifier() sigAlgIdentifier.setComponentByName( 'algorithm', univ.ObjectIdentifier('1.2.840.113549.1.1.11')) certreq.setComponentByName( 'signatureAlgorithm', sigAlgIdentifier) certreq.setComponentByName( 'signature', _build_signature(key, certreqInfo)) output = _der_to_pem(encoder.encode(certreq), 'CERTIFICATE REQUEST') if csrfilename: with open(csrfilename, 'w') as csrfile: csrfile.write(output) log.info("generated certification request:\n\n%s", output) return output
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def respond_for(self, view_function, args, kwargs): """Returns a response for the given view & args."""
request = args[0] form = self.get_requested_form(request) if form.is_valid(): result = self.handle_form_valid(request, form) if result: return result self.update_request(request, form) return view_function(*args, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_request(self, request, form): """Updates Request object with flows forms."""
forms_key = '%s_forms' % self.flow_type # Use ordered forms dict in case _formNode wants to fetch the first defined. flow_dict = OrderedDict() try: flow_dict = request.sitegate[forms_key] except AttributeError: request.sitegate = {} except KeyError: pass flow_dict[self.get_flow_name()] = form request.sitegate[forms_key] = flow_dict
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def login_generic(request, username, password): """Helper method. Generic login with username and password."""
user = authenticate(username=username, password=password) if user is not None and user.is_active: login(request, user) return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_arg_or_attr(self, name, default=None): """Returns flow argument, as provided with sitegate decorators or attribute set as a flow class attribute or default."""
if name in self.flow_args: return self.flow_args[name] try: return getattr(self, name) except AttributeError: return default
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_requested_form(self, request): """Returns an instance of a form requested."""
flow_name = self.get_flow_name() flow_key = '%s_flow' % self.flow_type flow_enabled = self.enabled form_data = None if (flow_enabled and request.method == 'POST' and request.POST.get(flow_key, False) and request.POST[flow_key] == flow_name): form_data = request.POST form = self.init_form( form_data, widget_attrs=self.flow_args.get('widget_attrs', None), template=self.get_template_name(self.flow_args.get('template', None)) ) # Attach flow identifying field to differentiate among several possible forms. form.fields[flow_key] = forms.CharField(required=True, initial=flow_name, widget=forms.HiddenInput) form.flow_enabled = flow_enabled form.flow_disabled_text = self.disabled_text return form
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def init_form(self, form_data, widget_attrs=None, template=None): """Constructs, populates and returns a form."""
form = self.form(data=form_data) form.template = template # Attach flow attribute to have access from flow forms (usually to call get_arg_or_attr()) form.flow = self if widget_attrs is not None: set_form_widgets_attrs(form, widget_attrs) return form
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def log_path(scraper): """ Determine the file name for the JSON log. """
return os.path.join(scraper.config.data_path, '%s.jsonlog' % scraper.name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def make_logger(scraper): """ Create two log handlers, one to output info-level ouput to the console, the other to store all logging in a JSON file which will later be used to generate reports. """
logger = logging.getLogger('') logger.setLevel(logging.DEBUG) requests_log = logging.getLogger("requests") requests_log.setLevel(logging.WARNING) json_handler = logging.FileHandler(log_path(scraper)) json_handler.setLevel(logging.DEBUG) json_formatter = jsonlogger.JsonFormatter(make_json_format()) json_handler.setFormatter(json_formatter) logger.addHandler(json_handler) console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) fmt = '%(name)s [%(levelname)-8s]: %(message)s' formatter = logging.Formatter(fmt) console_handler.setFormatter(formatter) logger.addHandler(console_handler) logger = logging.getLogger(scraper.name) logger = TaskAdapter(logger, scraper) return logger
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def initiate(self, sa): """Initiate an SA. :param sa: the SA to initiate :type sa: dict :return: logs emitted by command, with `errmsg` given on failure :rtype: :py:class:`vici.session.CommandResult` """
response = self.handler.streamed_request("initiate", "control-log", sa) return self._result(*response)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def terminate(self, sa): """Terminate an SA. :param sa: the SA to terminate :type sa: dict :return: logs emitted by command, with `errmsg` given on failure :rtype: :py:class:`vici.session.CommandResult` """
response = self.handler.streamed_request("terminate", "control-log", sa) return self._result(*response)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list_sas(self, filters=None): """Retrieve active IKE_SAs and associated CHILD_SAs. :param filters: retrieve only matching IKE_SAs (optional) :type filters: dict :return: list of active IKE_SAs and associated CHILD_SAs :rtype: list """
_, sa_list = self.handler.streamed_request("list-sas", "list-sa", filters) return sa_list
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list_policies(self, filters=None): """Retrieve installed trap, drop and bypass policies. :param filters: retrieve only matching policies (optional) :type filters: dict :return: list of installed trap, drop and bypass policies :rtype: list """
_, policy_list = self.handler.streamed_request("list-policies", "list-policy", filters) return policy_list
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list_conns(self, filters=None): """Retrieve loaded connections. :param filters: retrieve only matching configuration names (optional) :type filters: dict :return: list of connections :rtype: list """
_, connection_list = self.handler.streamed_request("list-conns", "list-conn", filters) return connection_list
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list_certs(self, filters=None): """Retrieve loaded certificates. :param filters: retrieve only matching certificates (optional) :type filters: dict :return: list of installed trap, drop and bypass policies :rtype: list """
_, cert_list = self.handler.streamed_request("list-certs", "list-cert", filters) return cert_list
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _result(self, command_response, log=None): """Create a CommandResult for a request response. :param command_response: command request response :type command_response: dict :param log: list of log messages (optional) :type log: list :return: a CommandResult containing any given log messages :rtype: :py:class:`vici.session.CommandResult` """
if command_response["success"] == "yes": return CommandResult(True, None, log) else: return CommandResult(False, command_response["errmsg"], log)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def request(self, command, message=None): """Send command request with an optional message. :param command: command to send :type command: str :param message: message (optional) :type message: str :return: command result :rtype: dict """
if message is not None: message = Message.serialize(message) packet = Packet.request(command, message) response = self._communicate(packet) if response.response_type != Packet.CMD_RESPONSE: raise SessionException( "Unexpected response type {type}, " "expected '{response}' (CMD_RESPONSE)".format( type=response.response_type, response=Packet.CMD_RESPONSE ) ) return Message.deserialize(response.payload)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def streamed_request(self, command, event_stream_type, message=None): """Send command request and collect and return all emitted events. :param command: command to send :type command: str :param event_stream_type: event type emitted on command execution :type event_stream_type: str :param message: message (optional) :type message: str :return: a pair of the command result and a list of emitted events :rtype: tuple """
result = [] if message is not None: message = Message.serialize(message) # subscribe to event stream packet = Packet.register_event(event_stream_type) response = self._communicate(packet) if response.response_type != Packet.EVENT_CONFIRM: raise SessionException( "Unexpected response type {type}, " "expected '{confirm}' (EVENT_CONFIRM)".format( type=response.response_type, confirm=Packet.EVENT_CONFIRM, ) ) # issue command, and read any event messages packet = Packet.request(command, message) self.transport.send(packet) response = self._read() while response.response_type == Packet.EVENT: result.append(Message.deserialize(response.payload)) response = self._read() if response.response_type == Packet.CMD_RESPONSE: response_message = Message.deserialize(response.payload) else: raise SessionException( "Unexpected response type {type}, " "expected '{response}' (CMD_RESPONSE)".format( type=response.response_type, response=Packet.CMD_RESPONSE ) ) # unsubscribe from event stream packet = Packet.unregister_event(event_stream_type) response = self._communicate(packet) if response.response_type != Packet.EVENT_CONFIRM: raise SessionException( "Unexpected response type {type}, " "expected '{confirm}' (EVENT_CONFIRM)".format( type=response.response_type, confirm=Packet.EVENT_CONFIRM, ) ) return (response_message, result)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _read(self): """Get next packet from transport. :return: parsed packet in a tuple with message type and payload :rtype: :py:class:`collections.namedtuple` """
raw_response = self.transport.receive() response = Packet.parse(raw_response) # FIXME if response.response_type == Packet.EVENT and response.event_type == "log": # queue up any debug log messages, and get next self.log_events.append(response) # do something? self._read() else: return response
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _diff_image(slice1, slice2, abs_value=True, cmap='gray', **kwargs): """Computes the difference image"""
diff = slice1 - slice2 if abs_value: diff = np.abs(diff) return diff, cmap
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def diff_colormap(): "Custom colormap to map low values to black or another color." # bottom = plt.cm.copper(np.linspace(0., 1, 6)) black = np.atleast_2d([0., 0., 0., 1.]) bottom = np.repeat(black, 6, axis=0) middle = plt.cm.copper(np.linspace(0, 1, 250)) # remain = plt.cm.Reds(np.linspace(0, 1, 240)) colors = np.vstack((bottom, middle)) diff_colormap = mpl.colors.LinearSegmentedColormap.from_list('diff_colormap', colors) return diff_colormap
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_bounding_rect(rect_pos): """Ensure the rect spec is valid."""
if not isinstance(rect_pos, Iterable): raise ValueError('rectangle spect must be a tuple of floats ' 'specifying (left, right, width, height)') left, bottom, width, height = rect_pos for val, name in zip((left, bottom, width, height), ('left', 'bottom', 'width', 'height')): if val < 0.0 or val > 1.0: raise ValueError("{}'s value must be >=0 and <= 1.0. " "It is now {}".format(name, val)) if left + width > 1.0: print('rect would extend beyond the width of figure/axis by {}'.format(left + width - 1.0)) if bottom + height > 1.0: print('rect would extend beyond the height of figure/axis by {}'.format( bottom + height - 1.0)) return rect_pos
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_num_slices(num_slices, img_shape=None, num_dims=3): """Ensures requested number of slices is valid. Atleast 1 and atmost the image size, if available """
if not isinstance(num_slices, Iterable) or len(num_slices) == 1: num_slices = np.repeat(num_slices, num_dims) if img_shape is not None: if len(num_slices) != len(img_shape): raise ValueError('The number of dimensions requested is different from image.' ' Must be either 1 or equal to {}'.format(len(img_shape) + 1)) # upper bounding them to image shape num_slices = np.minimum(img_shape, num_slices) # lower bounding it to 1 return np.maximum(1, num_slices)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_int(num, num_descr='number', min_value=0, max_value=np.Inf): """Validation and typecasting."""
if not np.isfinite(num) or num < min_value or num > max_value: raise ValueError('{}={} is not finite or ' 'is not >= {} or ' 'is not < {}'.format(num_descr, num, min_value, max_value)) return int(num)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_image(img_spec, bkground_thresh, ensure_num_dim=3): """Image reader, with additional checks on size. Can optionally remove stray values close to zero (smaller than 5 %ile)."""
img = load_image_from_disk(img_spec) if not np.issubdtype(img.dtype, np.floating): img = img.astype('float32') if ensure_num_dim == 3: img = check_image_is_3d(img) elif ensure_num_dim == 4: img = check_image_is_4d(img) return threshold_image(img, bkground_thresh)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_image_from_disk(img_spec): """Vanilla image loader."""
if isinstance(img_spec, str): if pexists(realpath(img_spec)): hdr = nib.load(img_spec) # trying to stick to an orientation hdr = nib.as_closest_canonical(hdr) img = hdr.get_data() else: raise IOError('Given path to image does not exist!') elif isinstance(img_spec, np.ndarray): img = img_spec else: raise ValueError('Invalid input specified! ' 'Input either a path to image data, or provide 3d Matrix directly.') return img
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def threshold_image(img, bkground_thresh, bkground_value=0.0): """ Thresholds a given image at a value or percentile. Replacement value can be specified too. Parameters image_in : ndarray Input image bkground_thresh : float a threshold value to identify the background bkground_value : float a value to fill the background elements with. Default 0. Returns ------- thresholded_image : ndarray thresholded and/or filled image """
if bkground_thresh is None: return img if isinstance(bkground_thresh, str): try: thresh_perc = float(bkground_thresh.replace('%', '')) except: raise ValueError( 'percentile specified could not be parsed correctly ' ' - must be a string of the form "5%", "10%" etc') else: thresh_value = np.percentile(img, thresh_perc) elif isinstance(bkground_thresh, (float, int)): thresh_value = bkground_thresh else: raise ValueError('Invalid specification for background threshold.') img[img < thresh_value] = bkground_value return img
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def row_wise_rescale(matrix): """ Row-wise rescale of a given matrix. For fMRI data (num_voxels x num_time_points), this would translate to voxel-wise normalization over time. Parameters matrix : ndarray Input rectangular matrix, typically a carpet of size num_voxels x num_4th_dim, 4th_dim could be time points or gradients or other appropriate Returns ------- normed : ndarray normalized matrix """
if matrix.shape[0] <= matrix.shape[1]: raise ValueError('Number of voxels is less than the number of time points!! ' 'Are you sure data is reshaped correctly?') min_ = matrix.min(axis=1) range_ = matrix.ptp(axis=1) # ptp : peak to peak, max-min min_tile = np.tile(min_, (matrix.shape[1], 1)).T range_tile = np.tile(range_, (matrix.shape[1], 1)).T # avoiding any numerical difficulties range_tile[range_tile < np.finfo(np.float).eps] = 1.0 normed = (matrix - min_tile) / range_tile del min_, range_, min_tile, range_tile return normed
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def crop_to_extents(img1, img2, padding): """Crop the images to ensure both fit within the bounding box"""
beg_coords1, end_coords1 = crop_coords(img1, padding) beg_coords2, end_coords2 = crop_coords(img2, padding) beg_coords = np.fmin(beg_coords1, beg_coords2) end_coords = np.fmax(end_coords1, end_coords2) img1 = crop_3dimage(img1, beg_coords, end_coords) img2 = crop_3dimage(img2, beg_coords, end_coords) return img1, img2
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def crop_image(img, padding=5): "Crops an image or slice to its extents" if padding < 1: return img beg_coords, end_coords = crop_coords(img, padding) if len(img.shape) == 3: img = crop_3dimage(img, beg_coords, end_coords) elif len(img.shape) == 2: img = crop_2dimage(img, beg_coords, end_coords) else: raise ValueError('Can only crop 2D or 3D images!') return img
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def crop_coords(img, padding): """Find coordinates describing extent of non-zero portion of image, padded"""
coords = np.nonzero(img) empty_axis_exists = np.any([len(arr) == 0 for arr in coords]) if empty_axis_exists: end_coords = img.shape beg_coords = np.zeros((1, img.ndim)).astype(int).flatten() else: min_coords = np.array([arr.min() for arr in coords]) max_coords = np.array([arr.max() for arr in coords]) beg_coords = np.fmax(0, min_coords - padding) end_coords = np.fmin(img.shape, max_coords + padding) return beg_coords, end_coords
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def verify_sampler(sampler, image, image_shape, view_set, num_slices): """verifies the sampler requested is valid."""
if isinstance(sampler, str): sampler = sampler.lower() if sampler not in ['linear', ]: raise ValueError('Sampling strategy: {} not implemented.'.format(sampler)) out_sampler = sampler out_sampling_method = 'linear' elif isinstance(sampler, Iterable): if any([index < 0 or index > 100 for index in sampler]): raise ValueError('sampling percentages must be in [0-100]% range') if len(sampler) > min(num_slices): num_slices = np.maximum(num_slices, len(sampler)) out_sampler = np.array(sampler) out_sampling_method = 'percentage' elif callable(sampler): # checking if the callable returns a bool for view in view_set: middle_slice = int(image_shape[view] / 2) if not isinstance(sampler(get_axis(image, view, middle_slice)), bool): raise ValueError('sampler callable must return a boolean value (True/False)') out_sampler = sampler out_sampling_method = 'callable' else: raise NotImplementedError('Invalid choice for sampler! Choose one of: ' 'linear, percentage or callable') return out_sampler, out_sampling_method, num_slices
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_username_max_len(): """Returns username maximum length as supported by Django. :rtype: int """
fields = [field for field in USER._meta.fields if field.name == 'username'] try: length = fields[0].max_length except IndexError: length = 30 return length
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_env(self, config): """ Read environment variables based on the settings defined in the defaults. These are expected to be upper-case versions of the actual setting names, prefixed by ``SCRAPEKIT_``. """
for option, value in config.items(): env_name = 'SCRAPEKIT_%s' % option.upper() value = os.environ.get(env_name, value) config[option] = value return config
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _spawn(self): """ Initialize the queue and the threads. """
self.queue = Queue(maxsize=self.num_threads * 10) for i in range(self.num_threads): t = Thread(target=self._consume) t.daemon = True t.start()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _consume(self): """ Main loop for each thread, handles picking a task off the queue, processing it and notifying the queue that it is done. """
while True: try: task, args, kwargs = self.queue.get(True) task(*args, **kwargs) finally: self.queue.task_done()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def put(self, task, args, kwargs): """ Add a new item to the queue. An item is a task and the arguments needed to call it. Do not call this directly, use Task.queue/Task.run instead. """
if self.num_threads == 0: return task(*args, **kwargs) if self.queue is None: self._spawn() self.queue.put((task, args, kwargs))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run(self, *args, **kwargs): """ Queue a first item to execute, then wait for the queue to be empty before returning. This should be the default way of starting any scraper. """
if self._source is not None: return self._source.run(*args, **kwargs) else: self.queue(*args, **kwargs) return self.wait()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def chain(self, other_task): """ Add a chain listener to the execution of this task. Whenever an item has been processed by the task, the registered listener task will be queued to be executed with the output of this task. Can also be written as:: pipeline = task1 > task2 """
other_task._source = self self._listeners.append(ChainListener(other_task)) return other_task
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pipe(self, other_task): """ Add a pipe listener to the execution of this task. The output of this task is required to be an iterable. Each item in the iterable will be queued as the sole argument to an execution of the listener task. Can also be written as:: pipeline = task1 | task2 """
other_task._source = self self._listeners.append(PipeListener(other_task)) return other_task
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def client_auth(self): """Generate an XML element with client auth data populated."""
if not self._client_auth: self._client_auth = E.Element('merchantAuthentication') E.SubElement(self._client_auth, 'name').text = self.config.login_id E.SubElement(self._client_auth, 'transactionKey').text = self.config.transaction_key return self._client_auth
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _base_request(self, method): """Factory method for generating the base XML requests."""
request = E.Element(method) request.set('xmlns', 'AnetApi/xml/v1/schema/AnetApiSchema.xsd') request.append(self.client_auth) return request
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _make_call(self, call): """Make a call to the Authorize.net server with the XML."""
try: request = urllib2.Request(self.config.environment, E.tostring(call)) request.add_header('Content-Type', 'text/xml') response = urllib2.urlopen(request).read() response = E.fromstring(response) response_json = parse_response(response) except urllib2.HTTPError: raise AuthorizeConnectionError('Error processing XML request.') # Exception handling for transaction response errors. try: error = response_json.transaction_response.errors[0] raise AuthorizeResponseError(error.error_code, error.error_text, response_json) except (KeyError, AttributeError): # Attempt to access transaction response errors pass # Throw an exception for invalid calls. This makes error handling easier. if response_json.messages[0].result_code != 'Ok': error = response_json.messages[0].message raise AuthorizeResponseError(error.code, error.text, response_json) return response_json
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tag_builder(parser, token, cls, flow_type): """Helper function handling flow form tags."""
tokens = token.split_contents() tokens_num = len(tokens) if tokens_num == 1 or (tokens_num == 3 and tokens[1] == 'for'): flow_name = None if tokens_num == 3: flow_name = tokens[2] return cls(flow_name) else: raise template.TemplateSyntaxError( '"sitegate_%(type)s_form" tag requires zero or two arguments. ' 'E.g. {%% sitegate_%(type)s_form %%} or ' '{%% sitegate_%(type)s_form for ClassicSignup %%}.' % {'type': flow_type})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sitegate_view(*args_dec, **kwargs_dec): """Decorator to mark views used both for signup & sign in."""
if len(args_dec): # simple decoration w/o parameters return signup_view(signin_view(redirect_signedin(*args_dec, **kwargs_dec))) signin = signin_view(**kwargs_dec) signup = signup_view(**kwargs_dec) return lambda *args, **kwargs: signup(signin(redirect_signedin(*args, **kwargs)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_ulid_timestamp(ulid): """ Get the time from an ULID as an UNIX timestamp. :param ulid: An ULID (either as UUID, base32 ULID or binary) :return: UNIX timestamp :rtype: float """
ts_bytes = ulid_to_binary(ulid)[:6] ts_bytes = b'\0\0' + ts_bytes assert len(ts_bytes) == 8 return (struct.unpack(b'!Q', ts_bytes)[0] / 1000.)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def generate_binary_ulid(timestamp=None, monotonic=False): """ Generate the bytes for an ULID. :param timestamp: An optional timestamp override. If `None`, the current time is used. :type timestamp: int|float|datetime.datetime|None :param monotonic: Attempt to ensure ULIDs are monotonically increasing. Monotonic behavior is not guaranteed when used from multiple threads. :type monotonic: bool :return: Bytestring of length 16. :rtype: bytes """
global _last_entropy, _last_timestamp if timestamp is None: timestamp = time.time() elif isinstance(timestamp, datetime.datetime): timestamp = calendar.timegm(timestamp.utctimetuple()) ts = int(timestamp * 1000.0) ts_bytes = _to_binary( (ts >> shift) & 0xFF for shift in (40, 32, 24, 16, 8, 0) ) entropy = os.urandom(10) if monotonic and _last_timestamp == ts and _last_entropy is not None: while entropy < _last_entropy: entropy = os.urandom(10) _last_entropy = entropy _last_timestamp = ts return ts_bytes + entropy
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def generate_ulid_as_uuid(timestamp=None, monotonic=False): """ Generate an ULID, but expressed as an UUID. :param timestamp: An optional timestamp override. If `None`, the current time is used. :type timestamp: int|float|datetime.datetime|None :param monotonic: Attempt to ensure ULIDs are monotonically increasing. Monotonic behavior is not guaranteed when used from multiple threads. :type monotonic: bool :return: UUID containing ULID data. :rtype: uuid.UUID """
return uuid.UUID(bytes=generate_binary_ulid(timestamp, monotonic=monotonic))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ulid_to_binary(ulid): """ Convert an ULID to its binary representation. :param ulid: An ULID (either as UUID, base32 ULID or binary) :return: Bytestring of length 16 :rtype: bytes """
if isinstance(ulid, uuid.UUID): return ulid.bytes if isinstance(ulid, (text_type, bytes)) and len(ulid) == 26: return decode_ulid_base32(ulid) if isinstance(ulid, (bytes, bytearray)) and len(ulid) == 16: return ulid raise InvalidULID('can not convert ulid %r to binary' % ulid)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def make_session(scraper): """ Instantiate a session with the desired configuration parameters, including the cache policy. """
cache_path = os.path.join(scraper.config.data_path, 'cache') cache_policy = scraper.config.cache_policy cache_policy = cache_policy.lower().strip() session = ScraperSession() session.scraper = scraper session.cache_policy = cache_policy adapter = CacheControlAdapter( FileCache(cache_path), cache_etags=True, controller_class=PolicyCacheController ) session.mount('http://', adapter) session.mount('https://', adapter) return session
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def json(self, **kwargs): """ Create JSON object out of the response. """
try: return super(ScraperResponse, self).json(**kwargs) except ValueError as ve: raise ParseException(ve)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def collapse_whitespace(text): """ Collapse all consecutive whitespace, newlines and tabs in a string into single whitespaces, and strip the outer whitespace. This will also accept an ``lxml`` element and extract all text. """
if text is None: return None if hasattr(text, 'xpath'): text = text.xpath('string()') text = re.sub('\s+', ' ', text) return text.strip()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setup_regex(self): """Sets up the patterns and compiled regex objects for parsing types."""
#Regex for matching the entire body of the type and getting top-level modifiers. self._RX_TYPE = r"\n\s*type(?P<modifiers>,\s+(public|private))?(\s*::)?\s+(?P<name>[A-Za-z0-9_]+)" + \ r"(?P<contents>.+?)end\s*type(\s+(?P=name))?" self.RE_TYPE = re.compile(self._RX_TYPE, re.DOTALL | re.I) #This regex is the same as RE_TYPE, only the contents are removed from the definition. self._RX_SIG = r"type(?P<modifiers>,\s+(public|private))?(\s+::)?\s+(?P<name>[A-Za-z0-9_]+)" self.RE_SIG = re.compile(self._RX_SIG, re.I) #Regex for finding if the type is private self._RX_PRIV = "private.+?(contains)?" self.RE_PRIV = re.compile(self._RX_PRIV, re.DOTALL | re.I) #Regex for finding methods buried in a type declaration. self._RX_EXEC = r"^\s*(?P<modifiers>[^:]+)\s+::\s+(?P<name>[A-Za-z0-9_]+)" + \ r"(\s+=>\s+(?P<points>[A-Za-z0-9_]+))?$" self.RE_EXEC = re.compile(self._RX_EXEC, re.M | re.I) #Regex for getting text after contains statement self._RX_CONTAINS = "\n\s*contains(?P<remainder>.+)" self.RE_CONTAINS = re.compile(self._RX_CONTAINS, re.DOTALL | re.I)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_line(self, statement, element, mode): """As part of real-time update, parses the statement and adjusts the attributes of the specified CustomType instance to reflect the changes. :arg statement: the lines of code that was added/removed/changed on the element after it had alread been parsed. The lines together form a single continuous code statement. :arg element: the CustomType instance to update. :arg mode: 'insert', or 'delete'. """
if element.incomplete: #We need to check for the end_token so we can close up the incomplete #status for the instance. if element.end_token in statement: element.incomplete = False return #This method deals with updating the *body* of the type declaration. The only #possible entries in the body are member variable declarations and type #executable definitions. self._process_execs_contents(statement, element.module.name, element, mode) self._rt_parse_members(statement, element, mode)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _rt_members_add(self, element, statement): """Finds all the member declarations in 'statement' and adds the corresponding instances to element.members."""
members = self.vparser.parse(statement, None) for member in members: single = members[member] single.parent = element element.members[member] = single
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _rt_members_delete(self, element, statement): """Finds all the member declarations in 'statement' and removes the corresponding instances from element.members."""
removals = self.vparser.parse(statement, None) for member in removals: if member in element.members: del element.members[member]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(self, module): """Extracts all the types from the specified module body."""
matches = self.RE_TYPE.finditer(module.contents) result = {} for match in matches: name = match.group("name") modifiers = match.group("modifiers") if modifiers is not None: cleanmods = re.split("[\s,]+", modifiers.strip()) else: cleanmods = [] contents = match.group("contents") result[name.lower()] = self._process_type(name, cleanmods, contents, module, match) if "public" in result[name.lower()].modifiers: module.publics[name.lower()] = 1 #Set the types we found in the module and then move the embedded #ones into their correct parent executables. module.types = result module.update_embedded("types")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _process_type(self, name, modifiers, contents, module, match): """Processes a regex match of a type's contents."""
#First, we need to see if the types children are private. if self.RE_PRIV.search(contents): modifiers.append("private contents") #Next, we need to parse out all the members of the type and their docstrings members = self.vparser.parse(contents, None) #Now we can create the type code element and handle the member docstrings t = CustomType(name, modifiers, members, module) #parse out all the executables including the finalizer execs = self._process_execs(contents, module.name, t) #Set the regex start and end char indices t.start, t.end = module.absolute_charindex(match.string, match.start(), match.end()) #Update the parent for embedded members and executables for key in list(t.members.keys()): t.members[key].parent = t for key in list(t.executables.keys()): t.executables[key].parent = t #Extract the docstrings from the type body and associate them with their members memdocs = self.docparser.parse_docs(contents, t) if name in memdocs: docs = self.docparser.to_doc(memdocs[name][0], name) self.docparser.process_memberdocs(docs, t) return t
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_docs(self, t, module): """Updates the documentation for the specified type using the module predocs."""
#We need to look in the parent module docstrings for this types decorating tags. key = "{}.{}".format(module.name, t.name) if key in module.predocs: t.docstring = self.docparser.to_doc(module.predocs[key][0], t.name) t.docstart, t.docend = (module.predocs[key][1], module.predocs[key][2])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _process_execs(self, contents, modulename, atype, mode="insert"): """Extracts all the executable methods that belong to the type."""
#We only want to look at text after the contains statement match = self.RE_CONTAINS.search(contents) #It is possible for the type to not have any executables if match is not None: exectext = match.group("remainder") self._process_execs_contents(exectext, modulename, atype, mode)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def login(self): """Logs into MAL and sets cookies appropriately. :rtype: :class:`.Session` :return: The current session. """
# POSTS a login to mal. mal_headers = { 'Host': 'myanimelist.net', } mal_payload = { 'username': self.username, 'password': self.password, 'cookie': 1, 'sublogin': 'Login' } self.session.headers.update(mal_headers) r = self.session.post(u'http://myanimelist.net/login.php', data=mal_payload) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def increase_indent(func): """Decorator for makin """
def wrapper(*args, **kwargs): global _debug_indent _debug_indent += 1 result = func(*args, **kwargs) _debug_indent -= 1 return result return wrapper
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dbg(message, *args): """ Looks at the stack, to see if a debug message should be printed. """
if debug_function and enable_notice: frm = inspect.stack()[1] mod = inspect.getmodule(frm[0]) if not (mod.__name__ in ignored_modules): i = ' ' * _debug_indent debug_function(NOTICE, i + 'dbg: ' + message % args)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_to_stdout(level, str_out): """ The default debug function """
if level == NOTICE: col = Fore.GREEN elif level == WARNING: col = Fore.RED else: col = Fore.YELLOW if not is_py3: str_out = str_out.encode(encoding, 'replace') print((col + str_out + Fore.RESET))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def count_dimensions(entry): """Counts the number of dimensions from a nested list of dimension assignments that may include function calls. """
result = 0 for e in entry: if isinstance(e, str): sliced = e.strip(",").split(",") result += 0 if len(sliced) == 1 and sliced[0] == "" else len(sliced) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(self, string, parent): """Parses all the value code elements from the specified string."""
result = {} for member in self.RE_MEMBERS.finditer(string): mems = self._process_member(member, parent, string) #The regex match could contain multiple members that were defined #on the same line in the code file. for onemem in mems: result[onemem.name.lower()] = onemem return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _process_member(self, member, parent, string): """Extracts all the member info from the regex match; returns a ValueElements."""
#The modifiers regex is very greedy so we have some cleaning up to do #to extract the mods. modifiers = member.group("modifiers") dimension = None if modifiers is not None: #Unfortunately, the dimension can also be specified as a modifier and #the dimensions can include variable names and functions. This introduces #the possibility of nested lists. modifiers = modifiers.lower() if "dimension" in modifiers: start, end = self._get_dim_modifier(modifiers) dimension = modifiers[start+1:end] dimtext = modifiers[modifiers.index("dimension"):end+1] modifiers = re.split(",\s*", modifiers.replace(dimtext, "").strip()) #modifiers.append("dimension") else: modifiers = re.split("[,\s]+", modifiers.strip()) if "" in modifiers: modifiers.remove("") dtype = member.group("type") kind = member.group("kind") names = member.group("names") #If there are multiple vars defined on this line we need to return #a list of all of them. result = [] #They might have defined multiple vars on the same line refstring = string[member.start():member.end()].strip() if parent is not None: refline = parent.module.linenum(member.start()) else: refline = "?" ready = self._separate_multiple_def(re.sub(",\s*", ", ", names.strip()), parent, refstring, refline) for name, ldimension, default, D in self._clean_multiple_def(ready): #Now construct the element and set all the values, then add it in the results list. udim = ldimension if ldimension is not None else dimension uD = D if ldimension is not None else count_dimensions([dimension]) result.append(ValueElement(name, modifiers, dtype, kind, default, udim, parent, uD)) return result