code
stringlengths
81
3.79k
def convert_to_this_nbformat(nb, orig_version=2, orig_minor=0): if orig_version == 1: nb = v2.convert_to_this_nbformat(nb) orig_version = 2 if orig_version == 2: nb.nbformat = nbformat nb.nbformat_minor = nbformat_minor nb.orig_nbformat = 2 return nb elif orig_version == 3: if orig_minor != nbformat_minor: nb.orig_nbformat_minor = orig_minor nb.nbformat_minor = nbformat_minor return nb else: raise ValueError('Cannot convert a notebook from v%s to v3' % orig_version)
def sync_folder(self, path, bucket): bucket = self.conn.get_bucket(bucket) local_files = self._get_local_files(path) s3_files = self._get_s3_files(bucket) for filename, hash in local_files.iteritems(): s3_key = s3_files[filename] if s3_key is None: s3_key = Key(bucket) s3_key.key = filename s3_key.etag = '"!"' if s3_key.etag[1:-1] != hash[0]: s3_key.set_contents_from_filename(join(path, filename), md5=hash)
def wrap_class(cls, error_threshold=None): methods = inspect.getmembers(cls, inspect.ismethod) + inspect.getmembers(cls, inspect.isfunction) for method_name, method in methods: wrapped_method = flawless.client.client._wrap_function_with_error_decorator( method if not im_self(method) else im_func(method), save_current_stack_trace=False, error_threshold=error_threshold, ) if im_self(method): wrapped_method = classmethod(wrapped_method) setattr(cls, method_name, wrapped_method) return cls
def check_type(self, value, attr, data): root_value = super(InstructionParameter, self).check_type( value, attr, data) if is_collection(value): _ = [super(InstructionParameter, self).check_type(item, attr, data) for item in value] return root_value
def _batch_gather_with_broadcast(params, indices, axis): leading_bcast_shape = tf.broadcast_dynamic_shape( tf.shape(input=params)[:axis], tf.shape(input=indices)[:-1]) params += tf.zeros( tf.concat((leading_bcast_shape, tf.shape(input=params)[axis:]), axis=0), dtype=params.dtype) indices += tf.zeros( tf.concat((leading_bcast_shape, tf.shape(input=indices)[-1:]), axis=0), dtype=indices.dtype) return tf.compat.v1.batch_gather(params, indices)
def establish(self, call_id, timeout, limit=None, retry=None, max_retries=None): rejected = 0 retried = 0 results = [] result_queue = self.result_queues[call_id] try: with Timeout(timeout, False): while True: result = result_queue.get() if result is None: rejected += 1 if retry is not None: if retried == max_retries: break retry() retried += 1 continue results.append(result) if len(results) == limit: break finally: del result_queue self.remove_result_queue(call_id) if not results: if rejected: raise Rejected('%d workers rejected' % rejected if rejected != 1 else 'A worker rejected') else: raise WorkerNotFound('failed to find worker') return results
def slicify(slc, dim): if isinstance(slc, slice): start = 0 if slc.start is None else slc.start stop = dim if slc.stop is None else slc.stop step = 1 if slc.step is None else slc.step if start < 0: start += dim if stop < 0: stop += dim if step > 0: if start < 0: start = 0 if stop > dim: stop = dim else: if stop < 0: stop = -1 if start > dim: start = dim-1 return slice(start, stop, step) elif isinstance(slc, int): if slc < 0: slc += dim return slice(slc, slc+1, 1) else: raise ValueError("Type for slice %s not recongized" % type(slc))
def enable_gtk(self, app=None): import gtk try: gtk.set_interactive(True) self._current_gui = GUI_GTK except AttributeError: from IPython.lib.inputhookgtk import inputhook_gtk self.set_inputhook(inputhook_gtk) self._current_gui = GUI_GTK
def overlapping(self, variant_obj): category = 'snv' if variant_obj['category'] == 'sv' else 'sv' query = { '$and': [ {'case_id': variant_obj['case_id']}, {'category': category}, {'hgnc_ids' : { '$in' : variant_obj['hgnc_ids']}} ] } sort_key = [('rank_score', pymongo.DESCENDING)] variants = self.variant_collection.find(query).sort(sort_key).limit(30) return variants
def convert_camel_case(name): s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def incpos(self, length: int=1) -> int: if length < 0: raise ValueError("length must be positive") i = 0 while (i < length): if self._cursor.index < self._len: if self.peek_char == '\n': self._cursor.step_next_line() self._cursor.step_next_char() i += 1 return self._cursor.index
def cache(func): CACHE_DIR = appdirs.user_cache_dir('sportsref', getpass.getuser()) if not os.path.isdir(CACHE_DIR): os.makedirs(CACHE_DIR) @funcutils.wraps(func) def wrapper(url): file_hash = hashlib.md5() encoded_url = url.encode(errors='replace') file_hash.update(encoded_url) file_hash = file_hash.hexdigest() filename = '{}/{}'.format(CACHE_DIR, file_hash) sport_id = None for a_base_url, a_sport_id in sportsref.SITE_ABBREV.items(): if url.startswith(a_base_url): sport_id = a_sport_id break else: print('No sport ID found for {}, not able to check cache'.format(url)) file_exists = os.path.isfile(filename) if sport_id and file_exists: cur_time = int(time.time()) mod_time = int(os.path.getmtime(filename)) days_since_mod = datetime.timedelta(seconds=(cur_time - mod_time)).days days_cache_valid = globals()['_days_valid_{}'.format(sport_id)](url) cache_is_valid = days_since_mod < days_cache_valid else: cache_is_valid = False allow_caching = sportsref.get_option('cache') if file_exists and cache_is_valid and allow_caching: with codecs.open(filename, 'r', encoding='utf-8', errors='replace') as f: text = f.read() else: text = func(url) with codecs.open(filename, 'w+', encoding='utf-8') as f: f.write(text) return text return wrapper
def onTWriteCallback__init(self, sim): yield from self.onTWriteCallback(sim) self.intf.t._sigInside.registerWriteCallback( self.onTWriteCallback, self.getEnable) self.intf.o._sigInside.registerWriteCallback( self.onTWriteCallback, self.getEnable)
def delims(self, delims): expr = '[' + ''.join('\\'+ c for c in delims) + ']' self._delim_re = re.compile(expr) self._delims = delims self._delim_expr = expr
def ALL_mentions(target_mentions, chain_mentions): found_all = True for name in target_mentions: found_one = False for chain_ment in chain_mentions: if name in chain_ment: found_one = True break if not found_one: found_all = False break return found_all
def execute(self, obj): try: if self.config.stdin: self.spawn(self.config.command, stdin_content=obj, stdin=True, timeout=1) else: if "@@" not in self.config.command: raise PJFMissingArgument("Missing @@ filename indicator while using non-stdin fuzzing method") for x in self.config.command: if "@@" in x: self.config.command[self.config.command.index(x)] = x.replace("@@", obj) self.spawn(self.config.command, timeout=2) self.logger.debug("[{0}] - PJFExternalFuzzer successfully completed".format(time.strftime("%H:%M:%S"))) return self._out except KeyboardInterrupt: return "" except Exception as e: raise PJFBaseException(e.message if hasattr(e, "message") else str(e))
def can_cut(self): cursor = self._control.textCursor() return (cursor.hasSelection() and self._in_buffer(cursor.anchor()) and self._in_buffer(cursor.position()))
def get_importer(path_item): try: importer = sys.path_importer_cache[path_item] except KeyError: for hook in sys.path_hooks: try: importer = hook(path_item) except ImportError: pass else: break else: importer = None sys.path_importer_cache.setdefault(path_item,importer) if importer is None: try: importer = ImpWrapper(path_item) except ImportError: pass return importer
def create_resized_image(self, path_to_image, save_path_on_storage, width, height): image, file_ext, image_format, mime_type = self.retrieve_image( path_to_image ) image, save_kwargs = self.preprocess(image, image_format) imagefile = self.process_image( image=image, image_format=image_format, save_kwargs=save_kwargs, width=width, height=height ) self.save_image(imagefile, save_path_on_storage, file_ext, mime_type)
def predict_on_stream(config: Union[str, Path, dict], batch_size: int = 1, file_path: Optional[str] = None) -> None: if file_path is None or file_path == '-': if sys.stdin.isatty(): raise RuntimeError('To process data from terminal please use interact mode') f = sys.stdin else: f = open(file_path, encoding='utf8') model: Chainer = build_model(config) args_count = len(model.in_x) while True: batch = list((l.strip() for l in islice(f, batch_size * args_count))) if not batch: break args = [] for i in range(args_count): args.append(batch[i::args_count]) res = model(*args) if len(model.out_params) == 1: res = [res] for res in zip(*res): res = json.dumps(res, ensure_ascii=False) print(res, flush=True) if f is not sys.stdin: f.close()
def paths(input_dir): 'yield all file paths under input_dir' for root, dirs, fnames in os.walk(input_dir): for i_fname in fnames: i_path = os.path.join(root, i_fname) yield i_path
def evaluate_min_coverage(coverage_opt, assembly_coverage, assembly_size): if coverage_opt == "auto": min_coverage = (assembly_coverage / assembly_size) * .3 logger.info("Minimum assembly coverage automatically set to: " "{}".format(min_coverage)) if min_coverage < 10: logger.info("Minimum assembly coverage cannot be set to lower" " that 10. Setting to 10") min_coverage = 10 else: min_coverage = int(coverage_opt) logger.info("Minimum assembly coverage manually set to: {}".format( min_coverage)) return min_coverage
def dbgr(self, string): print('') self.proc.cmd_queue.append(string) self.proc.process_command() return
def register(self, contract): "registers NativeContract classes" assert issubclass(contract, NativeContractBase) assert len(contract.address) == 20 assert contract.address.startswith(self.native_contract_address_prefix) if self.native_contracts.get(contract.address) == contract._on_msg: log.debug("already registered", contract=contract, address=contract.address) return assert contract.address not in self.native_contracts, 'address already taken' self.native_contracts[contract.address] = contract._on_msg log.debug("registered native contract", contract=contract, address=contract.address)
def assert_no_title(self, title, **kwargs): query = TitleQuery(title, **kwargs) @self.synchronize(wait=query.wait) def assert_no_title(): if query.resolves_for(self): raise ExpectationNotMet(query.negative_failure_message) return True return assert_no_title()
def IsNotNone(*fields, default=None): when_clauses = [ expressions.When( ~expressions.Q(**{field: None}), then=expressions.F(field) ) for field in reversed(fields) ] return expressions.Case( *when_clauses, default=expressions.Value(default), output_field=CharField() )
def jenkins_request_with_headers(jenkins_server, req): try: response = jenkins_server.jenkins_request(req) response_body = response.content response_headers = response.headers if response_body is None: raise jenkins.EmptyResponseException( "Error communicating with server[%s]: " "empty response" % jenkins_server.server) return {'body': response_body.decode('utf-8'), 'headers': response_headers} except HTTPError as e: if e.code in [401, 403, 500]: raise JenkinsException( 'Error in request. ' + 'Possibly authentication failed [%s]: %s' % ( e.code, e.msg) ) elif e.code == 404: raise jenkins.NotFoundException('Requested item could not be found') else: raise except socket.timeout as e: raise jenkins.TimeoutException('Error in request: %s' % e) except URLError as e: if str(e.reason) == "timed out": raise jenkins.TimeoutException('Error in request: %s' % e.reason) raise JenkinsException('Error in request: %s' % e.reason)
def _left_doubling_increments(batch_shape, max_doublings, step_size, seed=None, name=None): with tf.compat.v1.name_scope(name, 'left_doubling_increments', [batch_shape, max_doublings, step_size]): step_size = tf.convert_to_tensor(value=step_size) dtype = step_size.dtype.base_dtype output_shape = tf.concat(([max_doublings + 1], batch_shape), axis=0) expand_left = distributions.Bernoulli(0.5, dtype=dtype).sample( sample_shape=output_shape, seed=seed) width_multipliers = tf.cast(2 ** tf.range(0, max_doublings+1), dtype=dtype) widths_shape = tf.concat(([max_doublings + 1], tf.ones_like(batch_shape)), axis=0) width_multipliers = tf.reshape(width_multipliers, shape=widths_shape) widths = width_multipliers * step_size left_increments = tf.cumsum(widths * expand_left, exclusive=True, axis=0) return left_increments, widths
def _joint_sample_n(self, n, seed=None): with tf.name_scope("sample_n_joint"): stream = seed_stream.SeedStream( seed, salt="LinearGaussianStateSpaceModel_sample_n_joint") sample_and_batch_shape = distribution_util.prefer_static_value( tf.concat([[n], self.batch_shape_tensor()], axis=0)) with tf.control_dependencies(self.runtime_assertions): initial_latent = self.initial_state_prior.sample( sample_shape=_augment_sample_shape( self.initial_state_prior, sample_and_batch_shape, self.validate_args), seed=stream()) initial_latent = initial_latent[..., tf.newaxis] initial_observation_matrix = ( self.get_observation_matrix_for_timestep(self.initial_step)) initial_observation_noise = ( self.get_observation_noise_for_timestep(self.initial_step)) initial_observation_pred = initial_observation_matrix.matmul( initial_latent) initial_observation = (initial_observation_pred + initial_observation_noise.sample( sample_shape=_augment_sample_shape( initial_observation_noise, sample_and_batch_shape, self.validate_args), seed=stream())[..., tf.newaxis]) sample_step = build_kalman_sample_step( self.get_transition_matrix_for_timestep, self.get_transition_noise_for_timestep, self.get_observation_matrix_for_timestep, self.get_observation_noise_for_timestep, full_sample_and_batch_shape=sample_and_batch_shape, stream=stream, validate_args=self.validate_args) (latents, observations) = tf.scan( sample_step, elems=tf.range(self.initial_step+1, self.final_step), initializer=(initial_latent, initial_observation)) latents = tf.concat([initial_latent[tf.newaxis, ...], latents], axis=0) observations = tf.concat([initial_observation[tf.newaxis, ...], observations], axis=0) latents = tf.squeeze(latents, -1) latents = distribution_util.move_dimension(latents, 0, -2) observations = tf.squeeze(observations, -1) observations = distribution_util.move_dimension(observations, 0, -2) return latents, observations
def simple_attention(memory, att_size, mask, keep_prob=1.0, scope="simple_attention"): with tf.variable_scope(scope): BS, ML, MH = tf.unstack(tf.shape(memory)) memory_do = tf.nn.dropout(memory, keep_prob=keep_prob, noise_shape=[BS, 1, MH]) logits = tf.layers.dense(tf.layers.dense(memory_do, att_size, activation=tf.nn.tanh), 1, use_bias=False) logits = softmax_mask(tf.squeeze(logits, [2]), mask) att_weights = tf.expand_dims(tf.nn.softmax(logits), axis=2) res = tf.reduce_sum(att_weights * memory, axis=1) return res
def _dump_text(self): results = self._relay_output['result']; for l in results: dt = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime(int(l[1]['ts']))) print("{0} {1} {2} {3}".format(l[0], dt, l[1]['type'], l[1]['msg']))
def get_ip_address_info(ip_address, cache=None, nameservers=None, timeout=2.0, parallel=False): ip_address = ip_address.lower() if cache: info = cache.get(ip_address, None) if info: return info info = OrderedDict() info["ip_address"] = ip_address reverse_dns = get_reverse_dns(ip_address, nameservers=nameservers, timeout=timeout) country = get_ip_address_country(ip_address, parallel=parallel) info["country"] = country info["reverse_dns"] = reverse_dns info["base_domain"] = None if reverse_dns is not None: base_domain = get_base_domain(reverse_dns) info["base_domain"] = base_domain return info
def _explore(self, explore_iterable): if self.v_locked: raise pex.ParameterLockedException('Parameter `%s` is locked!' % self.v_full_name) if self.f_has_range(): raise TypeError('Your parameter `%s` is already explored, ' 'cannot _explore it further!' % self._name) if self._data is None: raise TypeError('Your parameter `%s` has no default value, please specify one ' 'via `f_set` before exploration. ' % self.v_full_name) data_list = self._data_sanity_checks(explore_iterable) self._explored_range = data_list self._explored = True self.f_lock()
async def fetch(self) -> Response: if self.request_config.get('DELAY', 0) > 0: await asyncio.sleep(self.request_config['DELAY']) timeout = self.request_config.get('TIMEOUT', 10) try: async with async_timeout.timeout(timeout): resp = await self._make_request() try: resp_data = await resp.text(encoding=self.encoding) except UnicodeDecodeError: resp_data = await resp.read() response = Response( url=self.url, method=self.method, encoding=resp.get_encoding(), html=resp_data, metadata=self.metadata, cookies=resp.cookies, headers=resp.headers, history=resp.history, status=resp.status, aws_json=resp.json, aws_text=resp.text, aws_read=resp.read) aws_valid_response = self.request_config.get('VALID') if aws_valid_response and iscoroutinefunction(aws_valid_response): response = await aws_valid_response(response) if response.ok: return response else: return await self._retry(error_msg='request url failed!') except asyncio.TimeoutError: return await self._retry(error_msg='timeout') except Exception as e: return await self._retry(error_msg=e) finally: await self._close_request_session()
def _prm_read_dictionary(self, leaf, full_name): try: temp_table = self._prm_read_table(leaf, full_name) temp_dict = temp_table.to_dict('list') innder_dict = {} for innerkey, vallist in temp_dict.items(): innder_dict[innerkey] = vallist[0] return innder_dict except: self._logger.error('Failed loading `%s` of `%s`.' % (leaf._v_name, full_name)) raise
def _validate_value(key, value, expected_type): if not isinstance(value, expected_type): raise TypeError("{} argument must have a type {} not {}".format( key, expected_type, type(value)))
def unified_file(self): if ( "file_to_test" in PyFunceble.INTERN and PyFunceble.INTERN["file_to_test"] and PyFunceble.CONFIGURATION["unified"] ): output = ( self.output_parent_dir + PyFunceble.OUTPUTS["default_files"]["results"] ) if PyFunceble.CONFIGURATION["less"]: if PyFunceble.HTTP_CODE["active"]: to_print = [ self.tested, self.domain_status, PyFunceble.INTERN["http_code"], ] else: to_print = [self.tested, self.domain_status, self.source] Prints(to_print, "Less", output, True).data() else: to_print = [ self.tested, self.domain_status, self.expiration_date, self.source, PyFunceble.INTERN["http_code"], PyFunceble.CURRENT_TIME, ] Prints(to_print, "Generic_File", output, True).data()
def print_table(language): table = translation_table(language) for code, name in sorted(table.items(), key=operator.itemgetter(0)): print(u'{language:<8} {name:\u3000<20}'.format( name=name, language=code )) return None
def yzy_to_zyz(xi, theta1, theta2, eps=1e-9): quaternion_yzy = quaternion_from_euler([theta1, xi, theta2], 'yzy') euler = quaternion_yzy.to_zyz() quaternion_zyz = quaternion_from_euler(euler, 'zyz') out_angles = (euler[1], euler[0], euler[2]) abs_inner = abs(quaternion_zyz.data.dot(quaternion_yzy.data)) if not np.allclose(abs_inner, 1, eps): raise TranspilerError('YZY and ZYZ angles do not give same rotation matrix.') out_angles = tuple(0 if np.abs(angle) < _CHOP_THRESHOLD else angle for angle in out_angles) return out_angles
def select_lasso(self, expression_x, expression_y, xsequence, ysequence, mode="replace", name="default", executor=None): def create(current): return selections.SelectionLasso(expression_x, expression_y, xsequence, ysequence, current, mode) self._selection(create, name, executor=executor)
def resize_to(self, width, height): self.driver.resize_window_to(self.handle, width, height)
def merge(self, new_dict): actions = new_dict.pop("actions") for action in actions: self.add_action(action) self.__dict__.update(new_dict)
def _run_writers(self, start_count, next_idx, sources, i_str, t_path): name_info = dict( first=start_count, source=sources.pop(), ) all_o_paths = [] for writer in self.writers: logger.debug('running %r on %r: %r', writer, i_str, name_info) o_paths = writer(t_path, name_info, i_str) logger.debug('loaded (%d, %d) of %r into %r', start_count, next_idx - 1, i_str, o_paths) all_o_paths += o_paths return all_o_paths
def _height_is_big_enough(image, height): if height > image.size[1]: raise ImageSizeError(image.size[1], height)
def add_enrichr_parser(subparsers): argparser_enrichr = subparsers.add_parser("enrichr", help="Using Enrichr API to perform GO analysis.") enrichr_opt = argparser_enrichr.add_argument_group("Input arguments") enrichr_opt.add_argument("-i", "--input-list", action="store", dest="gene_list", type=str, required=True, metavar='IDs', help="Enrichr uses a list of gene names as input.") enrichr_opt.add_argument("-g", "--gene-sets", action="store", dest="library", type=str, required=True, metavar='GMT', help="Enrichr library name(s) required. Separate each name by comma.") enrichr_opt.add_argument("--org", "--organism", action="store", dest="organism", type=str, default='', help="Enrichr supported organism name. Default: human. See here: https://amp.pharm.mssm.edu/modEnrichr.") enrichr_opt.add_argument("--ds", "--description", action="store", dest="descrip", type=str, default='enrichr', metavar='STRING', help="It is recommended to enter a short description for your list so that multiple lists \ can be differentiated from each other if you choose to save or share your list.") enrichr_opt.add_argument("--cut", "--cut-off", action="store", dest="thresh", metavar='float', type=float, default=0.05, help="Adjust-Pval cutoff, used for generating plots. Default: 0.05.") enrichr_opt.add_argument("--bg", "--background", action="store", dest="bg", default='hsapiens_gene_ensembl', metavar='BGNUM', help="BioMart Dataset name or Background total genes number. Default: None") enrichr_opt.add_argument("-t", "--top-term", dest="term", action="store", type=int, default=10, metavar='int', help="Numbers of top terms shown in the plot. Default: 10") enrichr_output = argparser_enrichr.add_argument_group("Output figure arguments") add_output_option(enrichr_output) return
def strval(node, outermost=True): if not isinstance(node, element): return node.xml_value if outermost else [node.xml_value] accumulator = [] for child in node.xml_children: if isinstance(child, text): accumulator.append(child.xml_value) elif isinstance(child, element): accumulator.extend(strval(child, outermost=False)) if outermost: accumulator = ''.join(accumulator) return accumulator
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): local_stream = utils.BytearrayStream() if self._unique_identifier: self._unique_identifier.write( local_stream, kmip_version=kmip_version ) if self._usage_limits_count: self._usage_limits_count.write( local_stream, kmip_version=kmip_version ) if self._cryptographic_usage_mask: self._cryptographic_usage_mask.write( local_stream, kmip_version=kmip_version ) if self._lease_time: self._lease_time.write( local_stream, kmip_version=kmip_version ) self.length = local_stream.length() super(CheckResponsePayload, self).write( output_stream, kmip_version=kmip_version ) output_stream.write(local_stream.buffer)
def apply_operation_back(self, op, qargs=None, cargs=None, condition=None): qargs = qargs or [] cargs = cargs or [] all_cbits = self._bits_in_condition(condition) all_cbits.extend(cargs) self._check_condition(op.name, condition) self._check_bits(qargs, self.output_map) self._check_bits(all_cbits, self.output_map) self._add_op_node(op, qargs, cargs, condition) al = [qargs, all_cbits] for q in itertools.chain(*al): ie = list(self._multi_graph.predecessors(self.output_map[q])) if len(ie) != 1: raise DAGCircuitError("output node has multiple in-edges") self._multi_graph.add_edge(ie[0], self._id_to_node[self._max_node_id], name="%s[%s]" % (q[0].name, q[1]), wire=q) self._multi_graph.remove_edge(ie[0], self.output_map[q]) self._multi_graph.add_edge(self._id_to_node[self._max_node_id], self.output_map[q], name="%s[%s]" % (q[0].name, q[1]), wire=q) return self._id_to_node[self._max_node_id]
def configure(self, options, config): log.debug("Configuring plugins") self.config = config cfg = PluginProxy('configure', self._plugins) cfg(options, config) enabled = [plug for plug in self._plugins if plug.enabled] self.plugins = enabled self.sort() log.debug("Plugins enabled: %s", enabled)
def s2n(self): M_N = 8.0713171 f = lambda parent, daugther: -parent + daugther + 2 * M_N return self.derived('s2n', (0, -2), f)
async def wait_changed(self): if not self.is_complete(): waiter = self._loop.create_future() self._waiters.append(waiter) await waiter
def add_virtual_columns_proper_motion2vperpendicular(self, distance="distance", pm_long="pm_l", pm_lat="pm_b", vl="vl", vb="vb", propagate_uncertainties=False, radians=False): k = 4.74057 self.add_variable("k", k, overwrite=False) self.add_virtual_column(vl, "k*{pm_long}*{distance}".format(**locals())) self.add_virtual_column(vb, "k* {pm_lat}*{distance}".format(**locals())) if propagate_uncertainties: self.propagate_uncertainties([self[vl], self[vb]])
def _build_trainable_posterior(param, initial_loc_fn): loc = tf.compat.v1.get_variable( param.name + '_loc', initializer=lambda: initial_loc_fn(param), dtype=param.prior.dtype, use_resource=True) scale = tf.nn.softplus( tf.compat.v1.get_variable( param.name + '_scale', initializer=lambda: -4 * tf.ones_like(initial_loc_fn(param)), dtype=param.prior.dtype, use_resource=True)) q = tfd.Normal(loc=loc, scale=scale) if (param.prior.event_shape.ndims is None or param.prior.event_shape.ndims > 0): q = tfd.Independent( q, reinterpreted_batch_ndims=param.prior.event_shape.ndims) return tfd.TransformedDistribution(q, param.bijector)
def poll(self): service = yield self.get_service() if not service: self.log.warn("Docker service not found") return 0 task_filter = {'service': service['Spec']['Name']} tasks = yield self.docker( 'tasks', task_filter ) running_task = None for task in tasks: task_state = task['Status']['State'] self.log.debug( "Task %s of Docker service %s status: %s", task['ID'][:7], self.service_id[:7], pformat(task_state), ) if task_state == 'running': running_task = task if running_task is not None: return None else: return 1
def var(self, axis=None, keepdims=False): return self._stat(axis, name='variance', keepdims=keepdims)
def get_ids(self, features, threshold=0.0, func=np.sum, get_weights=False): if isinstance(features, str): features = [features] features = self.search_features(features) feature_weights = self.data.ix[:, features] weights = feature_weights.apply(func, 1) above_thresh = weights[weights >= threshold] return above_thresh if get_weights else list(above_thresh.index)
def is_error(node: astroid.node_classes.NodeNG) -> bool: for child_node in node.get_children(): if isinstance(child_node, astroid.Raise): return True return False
def _sentences(self, clean_visible): 'generate strings identified as sentences' previous_end = 0 clean_visible = clean_visible.decode('utf8') for start, end in self.sentence_tokenizer.span_tokenize(clean_visible): if start < previous_end: start = previous_end if start > end: continue try: label = self.label_index.find_le(end) except ValueError: label = None if label: off = label.offsets[OffsetType.CHARS] end = max(off.first + off.length, end) previous_end = end sent_str = clean_visible[start:end] yield start, end, sent_str
def set_default_bg(): term = environ.get('TERM', None) if term: if (term.startswith('xterm',) or term.startswith('eterm') or term == 'dtterm'): return False return True
def run(self, data_loaders, workflow, max_epochs, **kwargs): assert isinstance(data_loaders, list) assert mmcv.is_list_of(workflow, tuple) assert len(data_loaders) == len(workflow) self._max_epochs = max_epochs work_dir = self.work_dir if self.work_dir is not None else 'NONE' self.logger.info('Start running, host: %s, work_dir: %s', get_host_info(), work_dir) self.logger.info('workflow: %s, max: %d epochs', workflow, max_epochs) self.call_hook('before_run') while self.epoch < max_epochs: for i, flow in enumerate(workflow): mode, epochs = flow if isinstance(mode, str): if not hasattr(self, mode): raise ValueError( 'runner has no method named "{}" to run an epoch'. format(mode)) epoch_runner = getattr(self, mode) elif callable(mode): epoch_runner = mode else: raise TypeError('mode in workflow must be a str or ' 'callable function, not {}'.format( type(mode))) for _ in range(epochs): if mode == 'train' and self.epoch >= max_epochs: return epoch_runner(data_loaders[i], **kwargs) time.sleep(1) self.call_hook('after_run')
def glance_process(body, message): event_type = body['event_type'] process = glance_customer_process.get(event_type) if process is not None: process(body, message) else: matched = False process_wildcard = None for pattern in glance_customer_process_wildcard.keys(): if pattern.match(event_type): process_wildcard = glance_customer_process_wildcard.get(pattern) matched = True break if matched: process_wildcard(body, message) else: default_process(body, message) message.ack()
def pre_build(self, traj, brian_list, network_dict): self._pre_build = not _explored_parameters_in_group(traj, traj.parameters.connections) self._pre_build = (self._pre_build and 'neurons_i' in network_dict and 'neurons_e' in network_dict) if self._pre_build: self._build_connections(traj, brian_list, network_dict)
def _create_idx_from_stream(self, stream): stream_iter = iter(stream) dimension = self.properties.dimension darray = ctypes.c_double * dimension mins = darray() maxs = darray() no_data = ctypes.cast(ctypes.pointer(ctypes.c_ubyte(0)), ctypes.POINTER(ctypes.c_ubyte)) def py_next_item(p_id, p_mins, p_maxs, p_dimension, p_data, p_length): try: p_id[0], coordinates, obj = next(stream_iter) except StopIteration: return -1 except Exception as exc: self._exception = exc return -1 if self.interleaved: coordinates = Index.deinterleave(coordinates) for i in range(dimension): mins[i] = coordinates[i*2] maxs[i] = coordinates[(i*2)+1] p_mins[0] = ctypes.cast(mins, ctypes.POINTER(ctypes.c_double)) p_maxs[0] = ctypes.cast(maxs, ctypes.POINTER(ctypes.c_double)) p_dimension[0] = dimension if obj is None: p_data[0] = no_data p_length[0] = 0 else: p_length[0], data, _ = self._serialize(obj) p_data[0] = ctypes.cast(data, ctypes.POINTER(ctypes.c_ubyte)) return 0 stream = core.NEXTFUNC(py_next_item) return IndexStreamHandle(self.properties.handle, stream)
def isAcquired(self, lockID): return self.__lockImpl.isAcquired(lockID, self.__selfID, time.time())
def fill_heatmap(self): for module_path, lineno, runtime in self.lines_without_stdlib: self._execution_count[module_path][lineno] += 1 self._heatmap[module_path][lineno] += runtime
def parse_unstruct(unstruct): my_json = json.loads(unstruct) data = my_json['data'] schema = data['schema'] if 'data' in data: inner_data = data['data'] else: raise SnowplowEventTransformationException(["Could not extract inner data field from unstructured event"]) fixed_schema = fix_schema("unstruct_event", schema) return [(fixed_schema, inner_data)]
def volume_percentage_used(self, volume): volume = self._get_volume(volume) if volume is not None: total = int(volume["size"]["total"]) used = int(volume["size"]["used"]) if used is not None and used > 0 and \ total is not None and total > 0: return round((float(used) / float(total)) * 100.0, 1)
def _sample_3d(self, n, seed=None): seed = seed_stream.SeedStream(seed, salt='von_mises_fisher_3d') u_shape = tf.concat([[n], self._batch_shape_tensor()], axis=0) z = tf.random.uniform(u_shape, seed=seed(), dtype=self.dtype) safe_conc = tf.where(self.concentration > 0, self.concentration, tf.ones_like(self.concentration)) safe_z = tf.where(z > 0, z, tf.ones_like(z)) safe_u = 1 + tf.reduce_logsumexp( input_tensor=[ tf.math.log(safe_z), tf.math.log1p(-safe_z) - 2 * safe_conc ], axis=0) / safe_conc u = tf.where(self.concentration > tf.zeros_like(safe_u), safe_u, 2 * z - 1) u = tf.where(tf.equal(z, 0), -tf.ones_like(u), u) if not self._allow_nan_stats: u = tf.debugging.check_numerics(u, 'u in _sample_3d') return u[..., tf.newaxis]
def find_on_path(importer, path_item, only=False): path_item = _normalize_cached(path_item) if os.path.isdir(path_item) and os.access(path_item, os.R_OK): if path_item.lower().endswith('.egg'): yield Distribution.from_filename( path_item, metadata=PathMetadata( path_item, os.path.join(path_item,'EGG-INFO') ) ) else: for entry in os.listdir(path_item): lower = entry.lower() if lower.endswith('.egg-info') or lower.endswith('.dist-info'): fullpath = os.path.join(path_item, entry) if os.path.isdir(fullpath): metadata = PathMetadata(path_item, fullpath) else: metadata = FileMetadata(fullpath) yield Distribution.from_location( path_item, entry, metadata, precedence=DEVELOP_DIST ) elif not only and lower.endswith('.egg'): dists = find_distributions(os.path.join(path_item, entry)) for dist in dists: yield dist elif not only and lower.endswith('.egg-link'): with open(os.path.join(path_item, entry)) as entry_file: entry_lines = entry_file.readlines() for line in entry_lines: if not line.strip(): continue path = os.path.join(path_item, line.rstrip()) dists = find_distributions(path) for item in dists: yield item break
def lists(self, pattern: str = None) -> List[WikiList]: return [ lst for arg in self.arguments for lst in arg.lists(pattern) if lst]
def make_simple_step_size_update_policy(num_adaptation_steps, target_rate=0.75, decrement_multiplier=0.01, increment_multiplier=0.01, step_counter=None): if step_counter is None and num_adaptation_steps is not None: step_counter = tf.compat.v1.get_variable( name='step_size_adaptation_step_counter', initializer=np.array(-1, dtype=np.int32), dtype=tf.int32, trainable=False, use_resource=True) def step_size_simple_update_fn(step_size_var, kernel_results): if kernel_results is None: if mcmc_util.is_list_like(step_size_var): return [tf.identity(ss) for ss in step_size_var] return tf.identity(step_size_var) log_n = tf.math.log( tf.cast( tf.size(input=kernel_results.log_accept_ratio), kernel_results.log_accept_ratio.dtype)) log_mean_accept_ratio = tf.reduce_logsumexp( input_tensor=tf.minimum(kernel_results.log_accept_ratio, 0.)) - log_n adjustment = tf.where( log_mean_accept_ratio < tf.cast( tf.math.log(target_rate), log_mean_accept_ratio.dtype), -decrement_multiplier / (1. + decrement_multiplier), increment_multiplier) def build_assign_op(): if mcmc_util.is_list_like(step_size_var): return [ ss.assign_add(ss * tf.cast(adjustment, ss.dtype)) for ss in step_size_var ] return step_size_var.assign_add( step_size_var * tf.cast(adjustment, step_size_var.dtype)) if num_adaptation_steps is None: return build_assign_op() else: with tf.control_dependencies([step_counter.assign_add(1)]): return tf.cond( pred=step_counter < num_adaptation_steps, true_fn=build_assign_op, false_fn=lambda: step_size_var) return step_size_simple_update_fn
def _update_status(self): srun, scomp, sdead = self._s_running, self._s_completed, self._s_dead running, completed, dead = self._running, self._completed, self._dead for num, job in enumerate(running): stat = job.stat_code if stat == srun: continue elif stat == scomp: completed.append(job) self._comp_report.append(job) running[num] = False elif stat == sdead: dead.append(job) self._dead_report.append(job) running[num] = False running[:] = filter(None, running)
def headers_present(self, headers): headers = {name: re.compile('(.*)') for name in headers} self.add_matcher(matcher('HeadersMatcher', headers))
def download_csv(data, filename): assert_is_type(data, H2OFrame) assert_is_type(filename, str) url = h2oconn.make_url("DownloadDataset", 3) + "?frame_id={}&hex_string=false".format(data.frame_id) with open(filename, "wb") as f: f.write(urlopen()(url).read())
def _bind_parameter(self, parameter, value): for (instr, param_index) in self._parameter_table[parameter]: instr.params[param_index] = value
def exclude_downhole(filt, threshold=2): cfilt = filt.copy() inds = bool_2_indices(~filt) rem = (np.diff(inds) >= threshold)[:, 0] if any(rem): if inds[rem].shape[0] > 1: limit = inds[rem][1, 0] cfilt[limit:] = False return cfilt
def init_module(self, run_object): self.profile = self.profile_module self._run_object, _, self._run_args = run_object.partition(' ') self._object_name = '%s (module)' % self._run_object self._globs = { '__file__': self._run_object, '__name__': '__main__', '__package__': None, } program_path = os.path.dirname(self._run_object) if sys.path[0] != program_path: sys.path.insert(0, program_path) self._replace_sysargs()
def _main(self, fileobj, data, offset): fileobj.seek(offset) fileobj.write(data)
def encode(self, txt): return list(self._fwd_index.get(c, 0) for c in txt)
def decrement(self): with self._lock: if self._count == 0: raise RuntimeError( 'Counter is at zero. It cannot dip below zero') self._count -= 1 if self._is_finalized and self._count == 0: self._callback()
def _initialize_slots(self, seed, hashvalues): self.seed = seed self.hashvalues = self._parse_hashvalues(hashvalues)
def get(self, *args, **kwargs): if 'pk' in kwargs: kwargs['parent'] = kwargs['pk'] kwargs['head'] = True del kwargs['pk'] if 'request' in kwargs: request = kwargs['request'] version = request.GET.get('version', None) preview_id = request.GET.get('preview_id', None) if (version is not None) and (preview_id is not None): kwargs['revision_id'] = version kwargs['preview_id'] = preview_id del kwargs['is_published'] del kwargs['request'] return super(PublishableManager, self).get(*args, **kwargs)
def withIndent(self, indent=1): ctx = copy(self) ctx.indent += indent return ctx
def fix_header(filename, keyword, new_value): hd = read_header(filename) hi = read_header(filename, return_idxs=True) idx = hi[keyword] dtype = header_keyword_types[keyword] dtype_to_type = {b'<l' : np.int32, b'str' : bytes, b'<d' : np.float64, b'angle' : to_sigproc_angle} value_dtype = dtype_to_type[dtype] if isinstance(value_dtype, bytes): if len(hd[keyword]) == len(new_value): val_str = np.int32(len(new_value)).tostring() + new_value else: raise RuntimeError("String size mismatch. Cannot update without rewriting entire file.") else: val_str = value_dtype(new_value).tostring() with open(filename, 'rb+') as fh: fh.seek(idx) fh.write(val_str)
def _match_one(self, rec, tests): for key,test in tests.iteritems(): if not test(rec.get(key, None)): return False return True
def as_action_description(self): description = { self.name: { 'href': self.href_prefix + self.href, 'timeRequested': self.time_requested, 'status': self.status, }, } if self.input is not None: description[self.name]['input'] = self.input if self.time_completed is not None: description[self.name]['timeCompleted'] = self.time_completed return description
def parse(url): config = {} if not isinstance(url, six.string_types): url = '' url = urlparse.urlparse(url) path = url.path[1:] path = path.split('?', 2)[0] config.update({ 'NAME': path, 'USER': url.username, 'PASSWORD': url.password, 'HOST': url.hostname, 'PORT': url.port, }) if url.scheme in SCHEMES: config['ENGINE'] = SCHEMES[url.scheme] return config
def get_table_content(self, table): result = [[]] cols = table.cols for cell in self.compute_content(table): if cols == 0: result.append([]) cols = table.cols cols -= 1 result[-1].append(cell) while len(result[-1]) < cols: result[-1].append("") return result
def connect(self): SCOPES = 'https://www.googleapis.com/auth/drive' store = file.Storage('drive_credentials.json') creds = store.get() if not creds or creds.invalid: try: flow = client.flow_from_clientsecrets('client_secret.json', SCOPES) except InvalidClientSecretsError: log.error('ERROR: Could not find client_secret.json in current directory, please obtain it from the API console.') return creds = tools.run_flow(flow, store) self.connection = build('drive', 'v3', http=creds.authorize(Http())) response = self.connection.files().list(q="name='Music' and mimeType='application/vnd.google-apps.folder' and trashed=false").execute() try: folder_id = response.get('files', [])[0]['id'] except IndexError: log.warning('Music folder is missing. Creating it.') folder_metadata = {'name': 'Music', 'mimeType': 'application/vnd.google-apps.folder'} folder = self.connection.files().create(body=folder_metadata, fields='id').execute()
def get_type(self, type_name): type_name = self._canonicalize_type(type_name) if str(type_name) == 'int': type_name = 'integer' elif str(type_name) == 'str': type_name = 'string' elif str(type_name) == 'dict': type_name = 'basic_dict' if self.is_known_type(type_name): return self.known_types[type_name] base_type, is_complex, subtypes = self.split_type(type_name) if is_complex and base_type in self.type_factories: self.instantiate_type(type_name, base_type, subtypes) return self.known_types[type_name] i = 0 for i, (source, name) in enumerate(self._lazy_type_sources): if isinstance(source, str): import pkg_resources for entry in pkg_resources.iter_entry_points(source): try: mod = entry.load() type_system.load_type_module(mod) except: fail_info = ("Entry point group: %s, name: %s" % (source, entry.name), sys.exc_info) logging.exception("Error loading external type source from entry point, group: %s, name: %s", source, entry.name) self.failed_sources.append(fail_info) else: try: source(self) except: fail_info = ("source: %s" % name, sys.exc_info) logging.exception("Error loading external type source, source: %s", source) self.failed_sources.append(fail_info) if self.is_known_type(type_name) or (is_complex and base_type in self.type_factories): break self._lazy_type_sources = self._lazy_type_sources[i:] if not (self.is_known_type(type_name) or (is_complex and base_type in self.type_factories)): raise ArgumentError("get_type called on unknown type", type=type_name, failed_external_sources=[x[0] for x in self.failed_sources]) return self.get_type(type_name)
def create(self): if self.dirname and not os.path.exists(self.dirname): os.makedirs(self.dirname)
def add_server(self, hostname, port, use_ssl, tls_ctx=None): if not use_ssl and tls_ctx: raise ValueError("Cannot specify a TLS context and not use SSL!") server = ldap3.Server( hostname, port=port, use_ssl=use_ssl, tls=tls_ctx ) self._server_pool.add(server) return server
def get_ref_annotation_data_after_time(self, id_tier, time): befores = self.get_ref_annotation_data_between_times( id_tier, time, self.get_full_time_interval()) if befores: return [min(befores, key=lambda x: x[0])] else: return []
def import_data( self, resource_group_name, name, files, format=None, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._import_data_initial( resource_group_name=resource_group_name, name=name, files=files, format=format, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
def flowshow(flow, win_name='', wait_time=0): flow = flowread(flow) flow_img = flow2rgb(flow) imshow(rgb2bgr(flow_img), win_name, wait_time)
def duplicate(self, new_parent=None): "Create a new object exactly similar to self" kwargs = {} for spec_name, spec in self._meta.specs.items(): value = getattr(self, spec_name) if isinstance(value, Color): print "COLOR", value, value.default if value.default: value = None if value is not None: kwargs[spec_name] = value del kwargs['parent'] new_id = wx.NewId() kwargs['id'] = new_id kwargs['name'] = "%s_%s" % (kwargs['name'], new_id) new_obj = self.__class__(new_parent or self.get_parent(), **kwargs) for child in self: child.duplicate(new_obj) return new_obj
def option_attrname(self, opt, optdict=None): if optdict is None: optdict = self.get_option_def(opt) return optdict.get("dest", opt.replace("-", "_"))
def set_issuer(self, issuer): self._set_name(_lib.X509_set_issuer_name, issuer) self._issuer_invalidator.clear()
def search(self, query, verbose=0): if verbose > 0: print("searching " + query) query = query.lower() qgram = ng(query, self.slb) qocument = set() for q in qgram: if q in self.ngrams.keys(): for i in self.ngrams[q]: qocument.add(i) self.qocument = qocument results = {} for i in qocument: for j in self.D[i].keys(): if not j in results.keys(): results[j] = 0 results[j] = results[j] + self.D[i][j] sorted_results = sorted(results.items(), key=operator.itemgetter(1), reverse=True) return [self.elements[f[0]] for f in sorted_results]
def layers(self): graph_layers = self.multigraph_layers() try: next(graph_layers) except StopIteration: return def add_nodes_from(layer, nodes): layer._multi_graph.add_nodes_from(nodes) for graph_layer in graph_layers: op_nodes = [node for node in graph_layer if node.type == "op"] if not op_nodes: return new_layer = DAGCircuit() new_layer.name = self.name for creg in self.cregs.values(): new_layer.add_creg(creg) for qreg in self.qregs.values(): new_layer.add_qreg(qreg) add_nodes_from(new_layer, self.input_map.values()) add_nodes_from(new_layer, self.output_map.values()) add_nodes_from(new_layer, op_nodes) support_list = [ op_node.qargs for op_node in op_nodes if op_node.name not in {"barrier", "snapshot", "save", "load", "noise"} ] wires = {self.input_map[wire]: self.output_map[wire] for wire in self.wires} for op_node in op_nodes: args = self._bits_in_condition(op_node.condition) \ + op_node.cargs + op_node.qargs arg_ids = (self.input_map[(arg[0], arg[1])] for arg in args) for arg_id in arg_ids: wires[arg_id], wires[op_node] = op_node, wires[arg_id] new_layer._multi_graph.add_edges_from(wires.items()) yield {"graph": new_layer, "partition": support_list}