code
stringlengths
81
3.79k
def formalize(self): source_class = self.source_link.to_metaclass target_class = self.target_link.to_metaclass source_class.referential_attributes |= set(self.source_keys) target_class.identifying_attributes |= set(self.target_keys) def fget(inst, ref_name, alt_prop): other_inst = self.target_link.navigate_one(inst) if other_inst is None and alt_prop: return alt_prop.fget(inst) return getattr(other_inst, ref_name, None) def fset(inst, value, name, ref_name, alt_prop): kind = get_metaclass(inst).kind raise MetaException('%s.%s is a referential attribute '\ 'and cannot be assigned directly'% (kind, name)) for ref_key, primary_key in zip(self.source_keys, self.target_keys): prop = getattr(source_class.clazz, ref_key, None) prop = property(partial(fget, ref_name=primary_key, alt_prop=prop), partial(fset, name=ref_key, ref_name=primary_key, alt_prop=prop)) setattr(source_class.clazz, ref_key, prop)
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): local_stream = utils.BytearrayStream() if self._unique_identifier: self._unique_identifier.write( local_stream, kmip_version=kmip_version ) if self._cryptographic_parameters: self._cryptographic_parameters.write( local_stream, kmip_version=kmip_version ) if self._data: self._data.write(local_stream, kmip_version=kmip_version) if self._digested_data: self._digested_data.write(local_stream, kmip_version=kmip_version) if self._signature_data: self._signature_data.write( local_stream, kmip_version=kmip_version ) if self._correlation_value: self._correlation_value.write( local_stream, kmip_version=kmip_version ) if self._init_indicator: self._init_indicator.write( local_stream, kmip_version=kmip_version ) if self._final_indicator: self._final_indicator.write( local_stream, kmip_version=kmip_version ) self.length = local_stream.length() super(SignatureVerifyRequestPayload, self).write( output_stream, kmip_version=kmip_version ) output_stream.write(local_stream.buffer)
def get_imap_capabilities(server): capabilities = list(map(str, list(server.capabilities()))) for i in range(len(capabilities)): capabilities[i] = str(capabilities[i]).replace("b'", "").replace("'", "") logger.debug("IMAP server supports: {0}".format(capabilities)) return capabilities
def get_prices(self, date: str, currency: str) -> List[PriceModel]: from .repositories import PriceRepository session = self.session repo = PriceRepository(session) query = repo.query if date: query = query.filter(dal.Price.date == date) if currency: query = query.filter(dal.Price.currency == currency) query = query.order_by(dal.Price.namespace, dal.Price.symbol) price_entities = query.all() mapper = mappers.PriceMapper() result = [] for entity in price_entities: model = mapper.map_entity(entity) result.append(model) return result
def _handle_display_data(self, msg): self.log.debug("display: %s", msg.get('content', '')) if not self._hidden and self._is_from_this_session(msg): source = msg['content']['source'] data = msg['content']['data'] metadata = msg['content']['metadata'] if data.has_key('text/html'): html = data['text/html'] self._append_html(html, True) elif data.has_key('text/plain'): text = data['text/plain'] self._append_plain_text(text, True) self._append_plain_text(u'\n', True)
def check_type(self, value): if self.__dict__['dtype'] is None: return elif value is None: return elif isinstance(value, self.__dict__['dtype']): return msg = "Value of type %s, when %s was expected." % ( type(value), self.__dict__['dtype']) raise TypeError(msg)
def s3walk(self, basedir, show_dir=None): if not show_dir: show_dir = self.opt.show_dir if basedir[-1] == PATH_SEP: basedir = basedir[0:-1] s3url = S3URL(basedir) result = [] pool = ThreadPool(ThreadUtil, self.opt) pool.s3walk(s3url, s3url.get_fixed_path(), s3url.path, result) pool.join() if not show_dir and len(result) == 1 and result[0]['is_dir']: path = result[0]['name'] s3url = S3URL(path) result = [] pool = ThreadPool(ThreadUtil, self.opt) pool.s3walk(s3url, s3url.get_fixed_path(), s3url.path, result) pool.join() def compare(x, y): result = -cmp(x['is_dir'], y['is_dir']) if result != 0: return result return cmp(x['name'], y['name']) return sorted(result, key=cmp_to_key(compare))
def write(self, output_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0): local_buffer = utils.BytearrayStream() if self._unique_identifier: self._unique_identifier.write( local_buffer, kmip_version=kmip_version ) self.length = local_buffer.length() super(GetAttributeListRequestPayload, self).write( output_buffer, kmip_version=kmip_version ) output_buffer.write(local_buffer.buffer)
def _construct_schema(elements, nsmap): schema = { 'properties': {}, 'geometry': None } schema_key = None gml_key = None if nsmap: for key in nsmap: if nsmap[key] == XS_NAMESPACE: schema_key = key if nsmap[key] in GML_NAMESPACES: gml_key = key else: gml_key = 'gml' schema_key = 'xsd' mappings = { 'PointPropertyType': 'Point', 'PolygonPropertyType': 'Polygon', 'LineStringPropertyType': 'LineString', 'MultiPointPropertyType': 'MultiPoint', 'MultiLineStringPropertyType': 'MultiLineString', 'MultiPolygonPropertyType': 'MultiPolygon', 'MultiGeometryPropertyType': 'MultiGeometry', 'GeometryPropertyType': 'GeometryCollection', 'SurfacePropertyType': '3D Polygon', 'MultiSurfacePropertyType': '3D MultiPolygon' } for element in elements: data_type = element.attrib['type'].replace(gml_key + ':', '') name = element.attrib['name'] if data_type in mappings: schema['geometry'] = mappings[data_type] schema['geometry_column'] = name else: schema['properties'][name] = data_type.replace(schema_key+':', '') if schema['properties'] or schema['geometry']: return schema else: return None
def _exit_gracefully(self, signum, frame): self.log.info("Exiting gracefully upon receiving signal %s", signum) self.terminate() self.end() self.log.debug("Finished terminating DAG processors.") sys.exit(os.EX_OK)
def convertArgsToTokens(self, data): tdict = [] tokens = [] d = open(data, 'r') for line in d.readlines(): tdict.append(line.rstrip()) tokens += line.split() d.close() tokens = list(set(tokens)) return tdict, tokens
def _nested_convert_to_tensor(struct, dtype=None, name=None): if dtype is not None or not tf.nest.is_nested(struct): return tf.convert_to_tensor(struct, dtype=dtype) if _maybe_convertible_to_tensor(struct): try: return tf.convert_to_tensor(value=struct, name=name) except (ValueError, TypeError): pass shallow_struct = _get_shallow_structure(struct) return nest.map_structure_up_to( shallow_struct, lambda s: _nested_convert_to_tensor(s, name=name), struct)
def __get_or_create( ns_cache: NamespaceMap, name: sym.Symbol, module: types.ModuleType = None, core_ns_name=CORE_NS, ) -> lmap.Map: ns = ns_cache.entry(name, None) if ns is not None: return ns_cache new_ns = Namespace(name, module=module) if name.name != core_ns_name: core_ns = ns_cache.entry(sym.symbol(core_ns_name), None) assert core_ns is not None, "Core namespace not loaded yet!" new_ns.refer_all(core_ns) return ns_cache.assoc(name, new_ns)
def gauss(x, *p): A, mu, sigma = p return A * np.exp(-0.5 * (-mu + x)**2 / sigma**2)
def tempfile_set(tempfile, target): if target: os.rename(tempfile, target) else: os.unlink(tempfile) if target in TEMP_FILES: TEMP_FILES.remove(tempfile)
def _get_properties(config): property_classes = {BUILTIN_PROPERTY} property_names = set() if config is not None: property_classes.update(config.property_classes) property_names.update( (prop.rsplit(".", 1)[-1] for prop in config.property_classes) ) return property_classes, property_names
def duration(self): ecc = self.ecc if not np.isnan(self.ecc) else np.sqrt(self.ecw**2 + self.esw**2) esw = self.esw if not np.isnan(self.esw) else ecc * np.sin(self.w) aRs = ((G * self.rhos * (1. + self.MpMs) * (self.per * DAYSEC)**2.) / (3. * np.pi))**(1./3.) inc = np.arccos(self.bcirc/aRs) becc = self.bcirc * (1 - ecc**2)/(1 - esw) tdur = self.per / 2. / np.pi * np.arcsin(((1. + self.RpRs)**2 - becc**2)**0.5 / (np.sin(inc) * aRs)) tdur *= np.sqrt(1. - ecc**2.)/(1. - esw) return tdur
def __fetch_items(self, path, page=1): fetch_data = True parsed_crates = 0 total_crates = 0 while fetch_data: logger.debug("Fetching page: %i", page) try: payload = {'sort': 'alphabetical', 'page': page} raw_content = self.fetch(path, payload=payload) content = json.loads(raw_content) parsed_crates += len(content['crates']) if not total_crates: total_crates = content['meta']['total'] except requests.exceptions.HTTPError as e: logger.error("HTTP exception raised - %s", e.response.text) raise e yield raw_content page += 1 if parsed_crates >= total_crates: fetch_data = False
def _joint_mean(self): with tf.name_scope("mean_joint"): with tf.control_dependencies(self.runtime_assertions): initial_latent_mean = _broadcast_to_shape( self.initial_state_prior.mean()[..., tf.newaxis], tf.concat([self.batch_shape_tensor(), [self.latent_size, 1]], axis=0)) initial_observation_mean = _propagate_mean( initial_latent_mean, self.get_observation_matrix_for_timestep(self.initial_step), self.get_observation_noise_for_timestep(self.initial_step)) mean_step = build_kalman_mean_step( self.get_transition_matrix_for_timestep, self.get_transition_noise_for_timestep, self.get_observation_matrix_for_timestep, self.get_observation_noise_for_timestep) (latent_means, observation_means) = tf.scan( mean_step, elems=tf.range(self.initial_step+1, self.final_step), initializer=(initial_latent_mean, initial_observation_mean)) latent_means = tf.concat([initial_latent_mean[tf.newaxis, ...], latent_means], axis=0) observation_means = tf.concat([initial_observation_mean[tf.newaxis, ...], observation_means], axis=0) latent_means = tf.squeeze(latent_means, -1) latent_means = distribution_util.move_dimension(latent_means, 0, -2) observation_means = tf.squeeze(observation_means, -1) observation_means = distribution_util.move_dimension( observation_means, 0, -2) return latent_means, observation_means
def resolve_outputs(self): input_shape = None for i, shape in enumerate(self._input_shapes.values()): if i == 0: input_shape = shape if len(input_shape) != len(shape) or any( a is not None and b is not None and a != b for a, b in zip(input_shape[:-1], shape[:-1])): raise util.ConfigurationError( 'layer "{}" incompatible input shapes {}' .format(self.name, self._input_shapes)) size = self.kwargs.get('size') shape = self.kwargs.get('shape') if shape is not None: pass elif size is not None: shape = tuple(input_shape[:-1]) + (size, ) else: raise util.ConfigurationError( 'layer "{}" does not specify a size'.format(self.name)) self._output_shapes['out'] = shape
def read(self, filename): kwargs = {} if sys.version_info >= (3, 2): kwargs['encoding'] = "utf-8" return configparser.RawConfigParser.read(self, filename, **kwargs)
def normalize(self, dt, is_dst=False): if dt.tzinfo is None: raise ValueError('Naive time - no tzinfo set') return dt.replace(tzinfo=self)
def close(self): if self._closed: return self._socket.close() self._closed = True
def add_patches(self, patches, after=None): if after is None: self.insert_patches(patches) else: self._check_patch(after) patchlines = self._patchlines_before(after) patchlines.append(self.patch2line[after]) for patch in patches: patchline = PatchLine(patch) patchlines.append(patchline) self.patch2line[patchline.get_patch()] = patchline patchlines.extend(self._patchlines_after(after)) self.patchlines = patchlines
def update_key( self, vault_base_url, key_name, key_version, key_ops=None, key_attributes=None, tags=None, custom_headers=None, raw=False, **operation_config): parameters = models.KeyUpdateParameters(key_ops=key_ops, key_attributes=key_attributes, tags=tags) url = self.update_key.metadata['url'] path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), 'key-version': self._serialize.url("key_version", key_version, 'str') } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') body_content = self._serialize.body(parameters, 'KeyUpdateParameters') request = self._client.patch(url, query_parameters) response = self._client.send( request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: raise models.KeyVaultErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('KeyBundle', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
def specific_gains(string): if not string: return {} gains = {} for gain in string.split(','): amp_name, value = gain.split('=') gains[amp_name.strip()] = float(value.strip()) return gains
def _with_loc(f: W) -> W: @functools.wraps(f) def with_lineno_and_col(ctx): meta = lmap.map( {READER_LINE_KW: ctx.reader.line, READER_COL_KW: ctx.reader.col} ) v = f(ctx) try: return v.with_meta(meta) except AttributeError: return v return cast(W, with_lineno_and_col)
def updates(self, **kwargs): regs = regularizers.from_kwargs(self, **kwargs) _, updates = self.build_graph(regs) return updates
def sqrt(wave): r dep_units = "{0}**0.5".format(wave.dep_units) return _operation(wave, "sqrt", dep_units, np.sqrt)
def parse_args(): usage = "Usage: create_concordance <infile> [<outfile>]" description = "Simple Concordance Generator" argparser = argparse.ArgumentParser( usage=usage, description=description) argparser.add_argument( 'infile', type=argparse.FileType('r'), help="File read in to create concordance") argparser.add_argument( 'outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout, help="File to write concordance to. " "Default is stdout") argparser.add_argument( '--word', nargs="?", const=str, help="Display a word in concordance") args = argparser.parse_args() return args
def _count_table_rows(self, table_name): cursor = self._db.cursor() select_stmt = "SELECT COUNT(*) FROM " + table_name try: cursor.execute(select_stmt) row = cursor.fetchone() except sqlite3.DatabaseError as e: msg = "invalid archive file; cause: %s" % str(e) raise ArchiveError(cause=msg) finally: cursor.close() return row[0]
def discount_status(request, form): discounts = form.cleaned_data["discount"] items = commerce.DiscountItem.objects.filter( Q(discount__in=discounts), ).select_related("cart", "product", "product__category") items = group_by_cart_status( items, ["discount"], ["discount", "discount__description"], ) headings = [ "Discount", "Paid", "Reserved", "Unreserved", "Refunded", ] data = [] for item in items: data.append([ item["discount__description"], item["total_paid"], item["total_reserved"], item["total_unreserved"], item["total_refunded"], ]) return ListReport("Usage by item", headings, data)
def spin(self): if self._notification_socket: self._flush_notifications() if self._iopub_socket: self._flush_iopub(self._iopub_socket) if self._mux_socket: self._flush_results(self._mux_socket) if self._task_socket: self._flush_results(self._task_socket) if self._control_socket: self._flush_control(self._control_socket) if self._query_socket: self._flush_ignored_hub_replies()
def show(self, title=''): self.render(title=title) if self.fig: plt.show(self.fig)
def f_get_groups(self, copy=True): if copy: return self._groups.copy() else: return self._groups
def create_domain(self, domain_name, username=None, alphabet=Domain.DEFAULT_ALPHABET, length=Domain.DEFAULT_KEY_LENGTH): try: return self._create_domain(domain_name, username, alphabet, length) except Exception as ex: _logger.warn("Inserting new domain failed: %s", ex) raise DuplicateDomainException
def get_help(self): if self.help: return self.help elif self.__doc__ and self.__doc__.strip(): return self.__doc__.strip() else: return ''
def _unique_constraint_name(table: str, field, keys): postfix = '_'.join(keys) return '{table}_{field}_unique_{postfix}'.format( table=table, field=field.column, postfix=postfix )
def get_result(self, indices_or_msg_ids=None, block=None): block = self.block if block is None else block if indices_or_msg_ids is None: indices_or_msg_ids = -1 if not isinstance(indices_or_msg_ids, (list,tuple)): indices_or_msg_ids = [indices_or_msg_ids] theids = [] for id in indices_or_msg_ids: if isinstance(id, int): id = self.history[id] if not isinstance(id, basestring): raise TypeError("indices must be str or int, not %r"%id) theids.append(id) local_ids = filter(lambda msg_id: msg_id in self.history or msg_id in self.results, theids) remote_ids = filter(lambda msg_id: msg_id not in local_ids, theids) if remote_ids: ar = AsyncHubResult(self, msg_ids=theids) else: ar = AsyncResult(self, msg_ids=theids) if block: ar.wait() return ar
def load_python_global(module, name): if module == '__builtin__' and six.PY3: module = 'builtins' module = importlib.import_module(module) return getattr(module, name)
def _req_rep_retry(self, request): retries_left = self.RETRIES while retries_left: self._logger.log(1, 'Sending REQ `%s`', request) self._send_request(request) socks = dict(self._poll.poll(self.TIMEOUT)) if socks.get(self._socket) == zmq.POLLIN: response = self._receive_response() self._logger.log(1, 'Received REP `%s`', response) return response, self.RETRIES - retries_left else: self._logger.debug('No response from server (%d retries left)' % retries_left) self._close_socket(confused=True) retries_left -= 1 if retries_left == 0: raise RuntimeError('Server seems to be offline!') time.sleep(self.SLEEP) self._start_socket()
def _check_inputs(self): try: _ = self._inputs[0] except TypeError: raise RuntimeError( "inputs should be iterable but found type='{0}', value=" "'{1}'".format(type(self._inputs), str(self._inputs))) from melody.inputs import Input for check_input in self._inputs: if not isinstance(check_input, Input): raise RuntimeError( "input should be a subclass of the Input class but " "found type='{0}', value='{1}'".format(type(check_input), str(check_input)))
def individuals(context, institute, causatives, case_id): LOG.info("Running scout view individuals") adapter = context.obj['adapter'] individuals = [] if case_id: case = adapter.case(case_id=case_id) if case: cases = [case] else: LOG.info("Could not find case %s", case_id) return else: cases = [case_obj for case_obj in adapter.cases( collaborator=institute, has_causatives=causatives)] if len(cases) == 0: LOG.info("Could not find cases that match criteria") return individuals = (ind_obj for case_obj in cases for ind_obj in case_obj['individuals']) click.echo(" for case in cases: for ind_obj in case['individuals']: ind_info = [ case['_id'], ind_obj['individual_id'], ind_obj['display_name'], SEX_MAP[int(ind_obj['sex'])], PHENOTYPE_MAP[ind_obj['phenotype']], ind_obj['mother'], ind_obj['father'] ] click.echo('\t'.join(ind_info))
def show(self, *args, **kwargs): from webbrowser import open as webopen return webopen(str(self), *args, **kwargs)
def insert_child ( self, object, index, child ): if isinstance( child, Subgraph ): object.subgraphs.insert( index, child ) elif isinstance( child, Cluster ): object.clusters.insert( index, child ) elif isinstance( child, Node ): object.nodes.insert( index, child ) elif isinstance( child, Edge ): object.edges.insert( index, child ) else: pass
def get_private_keys( self, index=0, count=1, security_level=AddressGenerator.DEFAULT_SECURITY_LEVEL, ): return commands.GetPrivateKeysCommand(self.adapter)( seed=self.seed, index=index, count=count, securityLevel=security_level, )
async def limited(until): duration = int(round(until - time.time())) mins = duration / 60 fmt = 'We have exhausted a ratelimit quota. Retrying in %.2f seconds (%.3f minutes).' log.warn(fmt, duration, mins)
def get_last_activities(self, n): filenames = self.get_activity_list().iloc[-n:].filename.tolist() last_activities = [self.get_activity(f) for f in filenames] return last_activities
def fetch(self, category=CATEGORY_QUESTION, offset=DEFAULT_OFFSET): if not offset: offset = DEFAULT_OFFSET kwargs = {"offset": offset} items = super().fetch(category, **kwargs) return items
def get_public_tokens(self): r = self.remote_utils.get_url(self.url() + "public_tokens/") return r.json()
def validate_token(self, token, expected_data=None): try: data = self.load_token(token) if expected_data: for k in expected_data: if expected_data[k] != data["data"].get(k): return None return data except BadData: return None
async def set_session_state(self, state): await self._can_run() state = state.encode(self.encoding) if isinstance(state, six.text_type) else state return await self._mgmt_request_response( REQUEST_RESPONSE_SET_SESSION_STATE_OPERATION, {'session-id': self.session_id, 'session-state': bytearray(state)}, mgmt_handlers.default)
def error(self, relative_to='AME2003'): df = self.df - Table(relative_to).df return Table(df=df)
def update_event_hub(self, hub_name, hub=None): _validate_not_none('hub_name', hub_name) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = '/' + _str(hub_name) + '?api-version=2014-01' request.body = _get_request_body(_convert_event_hub_to_xml(hub)) request.path, request.query = self._httpclient._update_request_uri_query(request) request.headers.append(('If-Match', '*')) request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _convert_response_to_event_hub(response)
def _get_pipeline_processes(self): with open(self.log_file) as fh: for line in fh: if re.match(".*Creating operator.*", line): match = re.match(".*Creating operator > (.*) --", line) process = match.group(1) if any([process.startswith(x) for x in self._blacklist]): continue if process not in self.skip_processes: self.processes[match.group(1)] = { "barrier": "W", "submitted": set(), "finished": set(), "failed": set(), "retry": set(), "cpus": None, "memory": None } self.process_tags[process] = {} if re.match(".*Launching `.*` \[.*\] ", line): tag_match = re.match(".*Launching `.*` \[(.*)\] ", line) self.pipeline_tag = tag_match.group(1) if tag_match else \ "?" name_match = re.match(".*Launching `(.*)` \[.*\] ", line) self.pipeline_name = name_match.group(1) if name_match \ else "?" self.content_lines = len(self.processes)
def allow_request(self, request, view): if request.method != 'POST': return True return super(PostRequestThrottleMixin, self).allow_request(request, view)
def mr_reader(job, input_stream, loads=core.loads): for line in input_stream: yield loads(line),
def until_traits_are_present(self, element_with_traits): end_time = time.time() + self._timeout count = 1 missing_traits_descriptions = None while True: missing_traits_descriptions = [] try: missing_traits_descriptions = element_with_traits.evaluate_traits() if len(missing_traits_descriptions) == 0: return True else: logger.debug(" missing_traits_descriptions))) except self._ignored_exceptions as ex: logger.debug("Captured {0}: {1}".format(str(ex.__class__).replace("<type '", "").replace("'>", ""), str(ex))) pass time.sleep(self._poll) count += 1 if time.time() > end_time: break raise TimeoutException( msg="conditions " + '<' + '> <'.join(missing_traits_descriptions) + '>' + " not true after " + str( self._timeout) + " seconds.")
def find_max_rad_npnp(self): max_rad = 0 max_npnp = 0 for res, _ in self.items(): if res != 'KEY': for _, ff_params in self[res].items(): if max_rad < ff_params[1]: max_rad = ff_params[1] if max_npnp < ff_params[4]: max_npnp = ff_params[4] return max_rad, max_npnp
def crscode_to_string(codetype, code, format): link = 'http://spatialreference.org/ref/%s/%s/%s/' %(codetype,code,format) result = urllib2.urlopen(link).read() if not isinstance(result, str): result = result.decode() return result
def intern(self, sym: sym.Symbol, var: Var, force: bool = False) -> Var: m: lmap.Map = self._interns.swap(Namespace._intern, sym, var, force=force) return m.entry(sym)
def clone(url, path): adapter = None if url[:4] == "git@" or url[-4:] == ".git": adapter = Git(path) if url[:6] == "svn://": adapter = Svn(path) if url[:6] == "bzr://": adapter = Bzr(path) if url[:9] == "ssh://hg@": adapter = Hg(path) if adapter is None: raise RepositoryAdapterNotFound( "Can't find adapter for `%s` repository url" % url) return adapter.clone(url)
def _send_file(self, local, remote): remote = "%s:%s" % (self.location, remote) for i in range(10): if not os.path.exists(local): self.log.debug("waiting for %s" % local) time.sleep(1) else: break self.log.info("sending %s to %s", local, remote) check_output(self.scp_cmd + [local, remote])
def set_default_tlw(self, tlw, designer, inspector): "track default top level window for toolbox menu default action" self.designer = designer self.inspector = inspector
def _chunk_pars(freq_vector, data_matrix, pformat): pformat = pformat.upper() length = 4 for freq, data in zip(freq_vector, data_matrix): data = data.flatten() for index in range(0, data.size, length): fpoint = [freq] if not index else [None] cdata = data[index : index + length] if pformat == "MA": vector1 = np.abs(cdata) vector2 = np.rad2deg(np.angle(cdata)) elif pformat == "RI": vector1 = np.real(cdata) vector2 = np.imag(cdata) else: vector1 = 20.0 * np.log10(np.abs(cdata)) vector2 = np.rad2deg(np.angle(cdata)) sep_data = np.array([]) for item1, item2 in zip(vector1, vector2): sep_data = np.concatenate((sep_data, np.array([item1, item2]))) ret = np.concatenate((np.array(fpoint), sep_data)) yield ret
def _determine_function_name_type(node, config=None): property_classes, property_names = _get_properties(config) if not node.is_method(): return "function" if node.decorators: decorators = node.decorators.nodes else: decorators = [] for decorator in decorators: if isinstance(decorator, astroid.Name) or ( isinstance(decorator, astroid.Attribute) and decorator.attrname in property_names ): infered = utils.safe_infer(decorator) if infered and infered.qname() in property_classes: return "attr" elif isinstance(decorator, astroid.Attribute) and decorator.attrname in ( "setter", "deleter", ): return "attr" return "method"
def get(self, name, factory, *factory_args, **factory_kwargs): update_thread_local = getattr(factory, 'update_thread_local', True) if (not update_thread_local) or (name not in self.__dict__): obj = factory(*factory_args, **factory_kwargs) if update_thread_local: setattr(self, name, obj) return obj return getattr(self, name)
def _build_point_formats_dtypes(point_format_dimensions, dimensions_dict): return { fmt_id: _point_format_to_dtype(point_fmt, dimensions_dict) for fmt_id, point_fmt in point_format_dimensions.items() }
def fetch_metric(self, metric, start, end, tags={}, aggregator="sum", downsample=None, ms_resolution=True): query = "{aggregator}:{downsample}{metric}{{{tags}}}".format( aggregator=aggregator, downsample=downsample + "-avg:" if downsample else "", metric=metric, tags=','.join("%s=%s" % (k, v) for k, v in tags.items()) ) params = { 'ms': ms_resolution, 'start': '{0:.3f}'.format(start.timestamp()), 'end': '{0:.3f}'.format(end.timestamp()), 'm': query } response = self.__request("/query", params) if response.status_code == 200: try: return response.json()[0]['dps'] except IndexError: return {} raise QueryError(response.json())
def read(self): self.__fileobj.seek(self.data_offset) self.data = self.__fileobj.read(self.data_size)
def gen_timeout_resend(attempts): timeout = 2 ** (attempts + 1) + random.uniform(-1, +1) logger.debug('next timeout resending will happen on %s', future_dt_str(nowutc(), timeout)) return timeout
def apply(self, method, args): try: params = args['params'] if isinstance(params, dict): result = method(**params) else: result = method(*params) except Exception as error: server_error(args['id'], error) else: return result
def _add_group_from_storage(self, args, kwargs): return self._nn_interface._add_generic(self, type_name=GROUP, group_type_name=GROUP, args=args, kwargs=kwargs, add_prefix=False, check_naming=False)
def hflip(img): if not _is_pil_image(img): raise TypeError('img should be PIL Image. Got {}'.format(type(img))) return img.transpose(Image.FLIP_LEFT_RIGHT)
def save_image(self, imagefile, save_path, file_ext, mime_type): file_to_save = InMemoryUploadedFile( imagefile, None, 'foo.%s' % file_ext, mime_type, imagefile.tell(), None ) file_to_save.seek(0) self.storage.save(save_path, file_to_save)
def draw(self): if not self.visible: return if not isinstance(self.submenu,Container): glEnable(GL_SCISSOR_TEST) glScissor(*self.pos+self.size) SubMenu.draw(self) if not isinstance(self.submenu,Container): glDisable(GL_SCISSOR_TEST)
def cb_help_message(self, option, optname, value, parser): self.linter.msgs_store.help_message(utils._splitstrip(value)) sys.exit(0)
def open(path, mode=gdalconst.GA_ReadOnly): path = getattr(path, 'name', path) try: return Raster(vsiprefix(path), mode) except AttributeError: try: imgdata = path.read() except AttributeError: raise TypeError('Not a file-like object providing read()') else: imgio = MemFileIO(delete=False) gdal.FileFromMemBuffer(imgio.name, imgdata) return Raster(imgio, mode) raise ValueError('Failed to open raster from "%r"' % path)
def from_connection_string(cls, conn_str, *, loop=None, **kwargs): address, policy, key, _ = parse_conn_str(conn_str) parsed_namespace = urlparse(address) namespace, _, base = parsed_namespace.hostname.partition('.') return cls( service_namespace=namespace, shared_access_key_name=policy, shared_access_key_value=key, host_base='.' + base, loop=loop, **kwargs)
def _read_config(self): self._config_loaded = True conf = [] for f in self._candidate_log_files(): if os.path.isfile(f): self._logger.info("Reading config file %s" % f) section_rx = re.compile(r"^\[(\w+)\]$") keyvalue_rx = re.compile(r"^(\w+:)?([\w.]+)\s*=(.*)$") with io.open(f, "rt", encoding="utf-8") as config_file: section_name = None for lineno, line in enumerate(config_file): line = line.strip() if line == "" or line.startswith(" m1 = section_rx.match(line) if m1: section_name = m1.group(1) continue m2 = keyvalue_rx.match(line) if m2: lng = m2.group(1) key = m2.group(2) val = m2.group(3).strip() if lng and lng.lower() != "py:": continue if section_name: key = section_name + "." + key if key in H2OConfigReader._allowed_config_keys: conf.append((key, val)) else: self._logger.error("Key %s is not a valid config key" % key) continue self._logger.error("Syntax error in config file line %d: %s" % (lineno, line)) self._config = dict(conf) return
def clean_time_slots(self): ts = ((a[0], a[1]) for t in self.tiers.values() for a in t[0].values()) for a in {a for b in ts for a in b} ^ set(self.timeslots): del(self.timeslots[a])
def __last_beat(cumscore): maxes = util.localmax(cumscore) med_score = np.median(cumscore[np.argwhere(maxes)]) return np.argwhere((cumscore * maxes * 2 > med_score)).max()
def _basename_in_blacklist_re(base_name, black_list_re): for file_pattern in black_list_re: if file_pattern.match(base_name): return True return False
def _parse_header(line): parts = _parseparam(';' + line) key = parts.next() pdict = {} for p in parts: i = p.find('=') if i >= 0: name = p[:i].strip().lower() value = p[i+1:].strip() if len(value) >= 2 and value[0] == value[-1] == '"': value = value[1:-1] value = value.replace('\\\\', '\\').replace('\\"', '"') pdict[name] = value return key, pdict
def plot_tree(T, res=None, title=None, cmap_id="Pastel2"): import matplotlib.pyplot as plt def round_time(t, res=0.1): v = int(t / float(res)) * res return v cmap = plt.get_cmap(cmap_id) level_bounds = [] for level in T.levels: if level == "root": continue segments = T.get_segments_in_level(level) level_bounds.append(segments) B = float(len(level_bounds)) for i, segments in enumerate(level_bounds): labels = utils.segment_labels_to_floats(segments) for segment, label in zip(segments, labels): if res is None: start = segment.start end = segment.end xlabel = "Time (seconds)" else: start = int(round_time(segment.start, res=res) / res) end = int(round_time(segment.end, res=res) / res) xlabel = "Time (frames)" plt.axvspan(start, end, ymax=(len(level_bounds) - i) / B, ymin=(len(level_bounds) - i - 1) / B, facecolor=cmap(label)) L = float(len(T.levels) - 1) plt.yticks(np.linspace(0, (L - 1) / L, num=L) + 1 / L / 2., T.levels[1:][::-1]) plt.xlabel(xlabel) if title is not None: plt.title(title) plt.gca().set_xlim([0, end])
def validate_zone(zone): if not has_valid_id(zone): raise InvalidZone("%s must contain a valid 'id' attribute" % zone.__name__) if not has_valid_name(zone): raise InvalidZone("%s must contain a valid 'name' attribute" % zone.__name__)
def _merge(self, old, new, use_equals=False): if old is None: return new if new is None: return old if (old == new) if use_equals else (old is new): return old raise ValueError("Incompatible values: %s != %s" % (old, new))
def list(self, resource=None, type=None, actorId=None, _from=None, to=None, max=None, **request_parameters): check_type(resource, basestring) check_type(type, basestring) check_type(actorId, basestring) check_type(_from, basestring) check_type(to, basestring) check_type(max, int) params = dict_from_items_with_values( request_parameters, resource=resource, type=type, actorId=actorId, _from=_from, to=to, max=max, ) if _from: params["from"] = params.pop("_from") items = self._session.get_items(API_ENDPOINT, params=params) for item in items: yield self._object_factory(OBJECT_TYPE, item)
def _reformat_historical_formating_error(self): if PyFunceble.CONFIGURATION["inactive_database"]: historical_formating_error = ( PyFunceble.CURRENT_DIRECTORY + "inactive-db.json" ) if PyFunceble.path.isfile(historical_formating_error): data = Dict().from_json(File(historical_formating_error).read()) data_to_parse = {} top_keys = data.keys() for top_key in top_keys: low_keys = data[top_key].keys() data_to_parse[top_key] = {} for low_key in low_keys: if low_key.isdigit(): data_to_parse[top_key][ int(low_key) - (self.one_day_in_seconds * 30) ] = data[top_key][low_key] else: data_to_parse[top_key][ int(PyFunceble.time()) - (self.one_day_in_seconds * 30) ] = data[top_key][low_key] if "inactive_db" in PyFunceble.INTERN: PyFunceble.INTERN["inactive_db"].update(data_to_parse) else: PyFunceble.INTERN["inactive_db"] = data_to_parse File(historical_formating_error).delete()
def _copy_image(self, name): image = self._get_image(name) QtGui.QApplication.clipboard().setImage(image)
def list(self): url = "api/v0002/mgmt/custom/bundle" r = self._apiClient.get(url) if r.status_code == 200: return r.json() else: raise ApiException(r)
def log_attempt(self, key): with self.lock: if key not in self.attempts: self.attempts[key] = 1 else: self.attempts[key] += 1 if self.attempts[key] >= self.max_attempts: log.info('Account %s locked due to too many login attempts' % key) self.locks[key] = datetime.datetime.utcnow() + datetime.timedelta(seconds=self.lock_duration)
def get_process_gids(self): real, effective, saved = _psutil_bsd.get_process_gids(self.pid) return nt_gids(real, effective, saved)
def get_service_certificate(self, service_name, thumbalgorithm, thumbprint): _validate_not_none('service_name', service_name) _validate_not_none('thumbalgorithm', thumbalgorithm) _validate_not_none('thumbprint', thumbprint) return self._perform_get( '/' + self.subscription_id + '/services/hostedservices/' + _str(service_name) + '/certificates/' + _str(thumbalgorithm) + '-' + _str(thumbprint) + '', Certificate)
def calc_n_ints_in_file(filename): h = read_header(filename) n_bytes = int(h[b'nbits'] / 8) n_chans = h[b'nchans'] n_ifs = h[b'nifs'] idx_data = len_header(filename) f = open(filename, 'rb') f.seek(idx_data) filesize = os.path.getsize(filename) n_bytes_data = filesize - idx_data if h[b'nbits'] == 2: n_ints = int(4 * n_bytes_data / (n_chans * n_ifs)) else: n_ints = int(n_bytes_data / (n_bytes * n_chans * n_ifs)) return n_ints
def collect_things_entry_points(): things = dict() for entry_point in iter_entry_points(group='invenio_migrator.things'): things[entry_point.name] = entry_point.load() return things
def _check_token_present(self): try: self._get_value(CONFIGKEY_TOKEN) self._get_value(CONFIGKEY_REFRESH_TOKEN) self._get_value(CONFIGKEY_REFRESHABLE) except KeyError: self._log("Request new Token (CTP)") self._get_new_access_information()
def parse(self, *args): parsed_args = self.parser.parse_args(args) if parsed_args.category is None: delattr(parsed_args, 'category') if self._from_date: parsed_args.from_date = str_to_datetime(parsed_args.from_date) if self._to_date and parsed_args.to_date: parsed_args.to_date = str_to_datetime(parsed_args.to_date) if self._archive and parsed_args.archived_since: parsed_args.archived_since = str_to_datetime(parsed_args.archived_since) if self._archive and parsed_args.fetch_archive and parsed_args.no_archive: raise AttributeError("fetch-archive and no-archive arguments are not compatible") if self._archive and parsed_args.fetch_archive and not parsed_args.category: raise AttributeError("fetch-archive needs a category to work with") for alias, arg in self.aliases.items(): if (alias not in parsed_args) and (arg in parsed_args): value = getattr(parsed_args, arg, None) setattr(parsed_args, alias, value) return parsed_args
def connection_lost(self, exc): logger.debug("worker connection lost") self._worker.close() self._workers.remove(self._worker)
def register_handler(self, name, handler, esc_strings): self._handlers[name] = handler for esc_str in esc_strings: self._esc_handlers[esc_str] = handler