Dataset Viewer
Auto-converted to Parquet
code
stringlengths
81
3.79k
def apply_option(self, cmd, option, active=True): return re.sub(r'{{{}\:(?P<option>[^}}]*)}}'.format(option), '\g<option>' if active else '', cmd)
def make_local_static_report_files(self): for static, pkgdir in self.STATIC_FILES: shutil.copyfile( data_filename(static, pkgdir), os.path.join(self.directory, static) ) if self.extra_css: shutil.copyfile( self.config.extra_css, os.path.join(self.directory, self.extra_css) )
def format_filesize(size): for suffix in ("bytes", "KB", "MB", "GB", "TB"): if size < 1024.0: if suffix in ("GB", "TB"): return "{0:3.2f} {1}".format(size, suffix) else: return "{0:3.1f} {1}".format(size, suffix) size /= 1024.0
def add_lexicon_ref(self, lrid, name, lrtype, url, lexicon_id, lexicon_name, datcat_id=None, datcat_name=None): self.lexicon_refs[lrid] = { 'LEX_REF_ID': lrid, 'NAME': name, 'TYPE': lrtype, 'URL': url, 'LEXICON_ID': lexicon_id, 'LEXICON_NAME': lexicon_name, 'DATCAT_ID': datcat_id, 'DATCAT_NAME': datcat_name }
def invert_hash(self, tok_hash): return [tok_encoded.decode('utf8') for (_, tok_encoded) in self.client.scan_keys(HASH_KEYWORD_INDEX_TABLE, ((tok_hash,), (tok_hash,)))]
def find_unique_points(explored_parameters): ranges = [param.f_get_range(copy=False) for param in explored_parameters] zipped_tuples = list(zip(*ranges)) try: unique_elements = OrderedDict() for idx, val_tuple in enumerate(zipped_tuples): if val_tuple not in unique_elements: unique_elements[val_tuple] = [] unique_elements[val_tuple].append(idx) return list(unique_elements.items()) except TypeError: logger = logging.getLogger('pypet.find_unique') logger.error('Your parameter entries could not be hashed, ' 'now I am sorting slowly in O(N**2).') unique_elements = [] for idx, val_tuple in enumerate(zipped_tuples): matches = False for added_tuple, pos_list in unique_elements: matches = True for idx2, val in enumerate(added_tuple): if not explored_parameters[idx2]._equal_values(val_tuple[idx2], val): matches = False break if matches: pos_list.append(idx) break if not matches: unique_elements.append((val_tuple, [idx])) return unique_elements
def unfinished(cls): return [ cls.NONE, cls.SCHEDULED, cls.QUEUED, cls.RUNNING, cls.SHUTDOWN, cls.UP_FOR_RETRY, cls.UP_FOR_RESCHEDULE ]
def merge_left(field, local_task, remote_issue, hamming=False): local_field = local_task.get(field, []) remote_field = remote_issue.get(field, []) if field not in local_task: local_task[field] = [] new_count = 0 for remote in remote_field: for local in local_field: if ( ( hamming and get_annotation_hamming_distance(remote, local) == 0 ) or ( remote == local ) ): break else: log.debug("%s not found in %r" % (remote, local_field)) local_task[field].append(remote) new_count += 1 if new_count > 0: log.debug('Added %s new values to %s (total: %s)' % ( new_count, field, len(local_task[field]),))
def images(self, query=None): from sregistry.database.models import Collection, Container rows = [] if query is not None: like = "%" + query + "%" containers = Container.query.filter(or_(Container.name == query, Container.tag.like(like), Container.uri.like(like), Container.name.like(like))).all() else: containers = Container.query.all() if len(containers) > 0: message = " [date] [client]\t[uri]" bot.custom(prefix='Containers:', message=message, color="RED") for c in containers: uri = c.get_uri() created_at = c.created_at.strftime('%B %d, %Y') rows.append([created_at, " [%s]" %c.client, uri]) bot.table(rows) return containers
def set_max_in_flight(self, max_in_flight): assert isinstance(max_in_flight, int) self.max_in_flight = max_in_flight if max_in_flight == 0: for conn in itervalues(self.conns): if conn.rdy > 0: logger.debug('[%s:%s] rdy: %d -> 0', conn.id, self.name, conn.rdy) self._send_rdy(conn, 0) self.total_rdy = 0 else: self.need_rdy_redistributed = True self._redistribute_rdy_state()
def _store(self, lines, buffer=None, store='source'): if buffer is None: buffer = self._buffer if lines.endswith('\n'): buffer.append(lines) else: buffer.append(lines+'\n') setattr(self, store, self._set_source(buffer))
def _ast_option_group_to_code(self, option_group, **kwargs): lines = ["option("] lines.extend(self._indent(self._ast_to_code(option_group.expression))) lines.append(")") return lines
def add(symbol: str, date, value, currency: str): symbol = symbol.upper() currency = currency.upper() app = PriceDbApplication() price = PriceModel() price.symbol.parse(symbol) price.datum.from_iso_date_string(date) price.value = Decimal(value) price.currency = currency app.add_price(price) app.save() click.echo("Price added.")
def _close(self): if self.connection: with self.wrap_database_errors: self.connection.client.close()
def _get_byte_parser(self): if not self._byte_parser: self._byte_parser = \ ByteParser(text=self.text, filename=self.filename) return self._byte_parser
def _configureShortcuts(self): self._upShortcut = QtGui.QShortcut( QtGui.QKeySequence('Backspace'), self ) self._upShortcut.setAutoRepeat(False) self._upShortcut.activated.connect(self._onNavigateUpButtonClicked)
def update_message_dict(message_dict,action): global g_ok_java_messages allKeys = g_ok_java_messages.keys() for key in message_dict.keys(): if key in allKeys: for message in message_dict[key]: if action == 1: if message not in g_ok_java_messages[key]: g_ok_java_messages[key].append(message) if action == 2: if message in g_ok_java_messages[key]: g_ok_java_messages[key].remove(message) else: if action == 1: g_ok_java_messages[key] = message_dict[key]
def _addHdlProcToRun(self, trigger: SimSignal, proc) -> None: if not self._applyValPlaned: self._scheduleApplyValues() if isEvDependentOn(trigger, proc): if self.now == 0: return self._seqProcsToRun.append(proc) else: self._combProcsToRun.append(proc)
def resolve_backend_name(name, backends, deprecated, aliased): available = [backend.name() for backend in backends] resolved_name = deprecated.get(name, aliased.get(name, name)) if isinstance(resolved_name, list): resolved_name = next((b for b in resolved_name if b in available), "") if resolved_name not in available: raise LookupError("backend '{}' not found.".format(name)) if name in deprecated: logger.warning("WARNING: '%s' is deprecated. Use '%s'.", name, resolved_name) return resolved_name
def _rewrite_insert_nothing(self, sql, params, returning): conflict_target = self._build_conflict_target() where_clause = ' AND '.join([ '{0} = %s'.format(self._format_field_name(field_name)) for field_name in self.query.conflict_target ]) where_clause_params = [ self._format_field_value(field_name) for field_name in self.query.conflict_target ] params = params + tuple(where_clause_params) return ( ( 'WITH insdata AS (' '{insert} ON CONFLICT {conflict_target} DO UPDATE' ' SET {pk_column} = NULL WHERE FALSE RETURNING {returning})' ' SELECT * FROM insdata UNION ALL' ' SELECT {returning} FROM {table} WHERE {where_clause} LIMIT 1;' ).format( insert=sql, conflict_target=conflict_target, pk_column=self.qn(self.query.model._meta.pk.column), returning=returning, table=self.query.objs[0]._meta.db_table, where_clause=where_clause ), params )
def get_stores(self, search_term): params = {'SearchText': search_term} response = self.__get('/storefindermap/storesearch', params=params) return Stores(response.json())
def list_recommendations( self, keywords=None, max_domain_recommendations=None, custom_headers=None, raw=False, **operation_config): parameters = models.DomainRecommendationSearchParameters(keywords=keywords, max_domain_recommendations=max_domain_recommendations) def internal_paging(next_link=None, raw=False): if not next_link: url = self.list_recommendations.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') else: url = next_link query_parameters = {} header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') body_content = self._serialize.body(parameters, 'DomainRecommendationSearchParameters') request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.DefaultErrorResponseException(self._deserialize, response) return response deserialized = models.NameIdentifierPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.NameIdentifierPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized
def query_string(self, **params): return SearchResult(self, self._api.get(self._href, **params))
def add_bias(self, name, size, mean=0, std=1): mean = self.kwargs.get('mean_{}'.format(name), mean) std = self.kwargs.get('std_{}'.format(name), std) self._params.append(theano.shared( util.random_vector(size, mean, std, rng=self.rng), name=self._fmt(name)))
def ancestors(self, lhs, rhs): def _search(node): if node in lhs: return True if not node.parent: return False return _search(node.parent) return [node for node in rhs if _search(node)]
def get_prices_on(self, on_date: str, namespace: str, symbol: str): repo = self.get_price_repository() query = ( repo.query.filter(dal.Price.namespace == namespace) .filter(dal.Price.symbol == symbol) .filter(dal.Price.date == on_date) .order_by(dal.Price.time.desc()) ) result = query.first() return result
def getEvents(self): events = [] for json in self.conn.endpoints["self"].getEvents(): events.append(SkypeEvent.fromRaw(self, json)) return events
def prepare(self): self.output_dim = 10 self.encoder = Chain(self.input_dim).stack(Dense(self.internal_layer_size, 'tanh')) self.decoder = Chain(self.internal_layer_size).stack(Dense(self.input_dim)) self.classifier = Chain(self.internal_layer_size).stack(Dense(50, 'tanh'), Dense(self.output_dim), Softmax()) self.register_inner_layers(self.encoder, self.decoder, self.classifier) self.target_input = T.ivector('target') self.register_external_inputs(self.target_input)
def _set_configurations(self): logger.debug("======================") logger.debug("Setting configurations") logger.debug("======================") resources = "" containers = "" params = "" manifest = "" if self.merge_params: params += self._get_merged_params_string() help_list = self._get_merged_params_help() else: params += self._get_params_string() help_list = self._get_params_help() for p in self.processes: if not p.directives: continue logger.debug("[{}] Adding directives: {}".format( p.template, p.directives)) resources += self._get_resources_string(p.directives, p.pid) containers += self._get_container_string(p.directives, p.pid) manifest = self._get_manifest_string() self.resources = self._render_config("resources.config", { "process_info": resources }) self.containers = self._render_config("containers.config", { "container_info": containers }) self.params = self._render_config("params.config", { "params_info": params }) self.manifest = self._render_config("manifest.config", { "manifest_info": manifest }) self.help = self._render_config("Helper.groovy", { "nf_file": basename(self.nf_file), "help_list": help_list, "version": __version__, "pipeline_name": " ".join([x.upper() for x in self.pipeline_name]) }) self.user_config = self._render_config("user.config", {})
def add_range(self, sequence, begin, end): sequence.parser_tree = parsing.Range(self.value(begin).strip("'"), self.value(end).strip("'")) return True
def streams(self): result = self.db.read(self.path, {"q": "ls"}) if result is None or result.json() is None: return [] streams = [] for s in result.json(): strm = self[s["name"]] strm.metadata = s streams.append(strm) return streams
def truncate_string(data, headers, max_field_width=None, **_): return (([utils.truncate_string(v, max_field_width) for v in row] for row in data), [utils.truncate_string(h, max_field_width) for h in headers])
def login(self): if self._session is None: self._session = requests.session() self._session.headers.update({'User-agent': str(UserAgent().random)}) return self._post_login_page()
def file_read(filename): fobj = open(filename,'r'); source = fobj.read(); fobj.close() return source
def einsum_vecmul_index(gate_indices, number_of_qubits): mat_l, mat_r, tens_lin, tens_lout = _einsum_matmul_index_helper(gate_indices, number_of_qubits) return "{mat_l}{mat_r}, ".format(mat_l=mat_l, mat_r=mat_r) + \ "{tens_lin}->{tens_lout}".format(tens_lin=tens_lin, tens_lout=tens_lout)
def validate_widget(widget): if not has_valid_id(widget): raise InvalidWidget("%s must contain a valid 'id' attribute" % widget.__name__) if not has_valid_name(widget): raise InvalidWidget("%s must contain a valid 'name' attribute" % widget.__name__) if not has_valid_template(widget): raise InvalidWidget("%s must contain a valid 'template' attribute" % widget.__name__) if not hasattr(widget, 'zones') or not widget.zones: raise InvalidWidget("%s must be compatible with at least one zone" % widget.__name__)
def institute(context, institute_id, sanger_recipient, coverage_cutoff, frequency_cutoff, display_name, remove_sanger): adapter = context.obj['adapter'] LOG.info("Running scout update institute") try: adapter.update_institute( internal_id=institute_id, sanger_recipient=sanger_recipient, coverage_cutoff=coverage_cutoff, frequency_cutoff=frequency_cutoff, display_name=display_name, remove_sanger=remove_sanger, ) except Exception as err: LOG.warning(err) context.abort()
def get_agency_id(relation): op = relation.tags.get('operator') if op: return int(hashlib.sha256(op.encode('utf-8')).hexdigest(), 16) % 10**8 return -1
def writes(nb, format, **kwargs): format = unicode(format) if format == u'json' or format == u'ipynb': return writes_json(nb, **kwargs) elif format == u'py': return writes_py(nb, **kwargs) else: raise NBFormatError('Unsupported format: %s' % format)
def f_add_config_group(self, *args, **kwargs): return self._nn_interface._add_generic(self, type_name=CONFIG_GROUP, group_type_name=CONFIG_GROUP, args=args, kwargs=kwargs)
def _get_authorization(self, request, httpclient): return 'WRAP access_token="' + \ self._get_token(request.host, request.path, httpclient) + '"'
def evaluate(self, expression, i1=None, i2=None, out=None, selection=None, delay=False): expression = _ensure_strings_from_expressions(expression) result = self.server._call_dataset("evaluate", self, expression=expression, i1=i1, i2=i2, selection=selection, delay=delay) return result
def _get_value(self, key, func=None, split_val=None, as_boolean=False, exception_default=None): try: if as_boolean: return self.config.getboolean(key[0], key[1]) value = self.config.get(key[0], key[1]) if split_val is not None: value = value.split(split_val) if func is not None: return func(value) return value except (KeyError, configparser.NoSectionError, configparser.NoOptionError) as e: if exception_default is not None: return exception_default raise KeyError(e)
def get_system_cpu_times(): user, system, idle = 0, 0, 0 for cpu_time in _psutil_mswindows.get_system_cpu_times(): user += cpu_time[0] system += cpu_time[1] idle += cpu_time[2] return _cputimes_ntuple(user, system, idle)
def _spark_fit(self, cls, Z, *args, **kwargs): mapper = lambda X_y: super(cls, self).fit( X_y[0], X_y[1], *args, **kwargs ) models = Z.map(mapper) avg = models.reduce(operator.add) / models.count() self.__dict__.update(avg.__dict__) return self
def main(sample_id, assembly_file, coverage_file, coverage_bp_file, bam_file, opts, gsize): min_assembly_coverage, max_contigs = opts logger.info("Starting assembly mapping processing") logger.info("Parsing coverage table") coverage_info, a_cov = parse_coverage_table(coverage_file) a_size, contig_size = get_assembly_size(assembly_file) logger.info("Assembly processed with a total size of '{}' and coverage" " of '{}'".format(a_size, a_cov)) logger.info("Parsing coverage per bp table") coverage_bp_data = get_coverage_from_file(coverage_bp_file) min_coverage = evaluate_min_coverage(min_assembly_coverage, a_cov, a_size) filtered_assembly = "{}_filt.fasta".format( os.path.splitext(assembly_file)[0]) filtered_bam = "filtered.bam" logger.info("Checking filtered assembly") if check_filtered_assembly(coverage_info, coverage_bp_data, min_coverage, gsize, contig_size, int(max_contigs), sample_id): logger.info("Filtered assembly passed minimum size threshold") logger.info("Writting filtered assembly") filter_assembly(assembly_file, min_coverage, coverage_info, filtered_assembly) logger.info("Filtering BAM file according to saved contigs") filter_bam(coverage_info, bam_file, min_coverage, filtered_bam) else: shutil.copy(assembly_file, filtered_assembly) shutil.copy(bam_file, filtered_bam) shutil.copy(bam_file + ".bai", filtered_bam + ".bai") with open(".status", "w") as status_fh: status_fh.write("pass")
def fetchmany(self, size=None): self._check_executed() r = self._fetch_row(size or self.arraysize) self.rownumber = self.rownumber + len(r) if not r: self._warning_check() return r
def find_source(self, filename): source = None base, ext = os.path.splitext(filename) TRY_EXTS = { '.py': ['.py', '.pyw'], '.pyw': ['.pyw'], } try_exts = TRY_EXTS.get(ext) if not try_exts: return filename, None for try_ext in try_exts: try_filename = base + try_ext if os.path.exists(try_filename): return try_filename, None source = self.coverage.file_locator.get_zip_data(try_filename) if source: return try_filename, source raise NoSource("No source for code: '%s'" % filename)
def get_document(self, document_id, database_name=None, collection_name=None): if document_id is None: raise AirflowBadRequest("Cannot get a document without an id") try: return self.get_conn().ReadItem( get_document_link( self.__get_database_name(database_name), self.__get_collection_name(collection_name), document_id)) except HTTPFailure: return None
def execute_actions(self, cwd): self._execute_globals(cwd) for action in self.actions: logger.info("executing {}".format(action)) p = subprocess.Popen(action, shell=True, cwd=cwd) p.wait()
def __var_find_to_py_ast( var_name: str, ns_name: str, py_var_ctx: ast.AST ) -> GeneratedPyAST: return GeneratedPyAST( node=ast.Attribute( value=ast.Call( func=_FIND_VAR_FN_NAME, args=[ ast.Call( func=_NEW_SYM_FN_NAME, args=[ast.Str(var_name)], keywords=[ast.keyword(arg="ns", value=ast.Str(ns_name))], ) ], keywords=[], ), attr="value", ctx=py_var_ctx, ) )
def create_storage_account(self, service_name, description, label, affinity_group=None, location=None, geo_replication_enabled=None, extended_properties=None, account_type='Standard_GRS'): _validate_not_none('service_name', service_name) _validate_not_none('description', description) _validate_not_none('label', label) if affinity_group is None and location is None: raise ValueError( 'location or affinity_group must be specified') if affinity_group is not None and location is not None: raise ValueError( 'Only one of location or affinity_group needs to be specified') if geo_replication_enabled == False: account_type = 'Standard_LRS' return self._perform_post( self._get_storage_service_path(), _XmlSerializer.create_storage_service_input_to_xml( service_name, description, label, affinity_group, location, account_type, extended_properties), as_async=True)
def set_selection(self, selection, name="default", executor=None): def create(current): return selection self._selection(create, name, executor=executor, execute_fully=True)
def set_resolved_name(self, ref: dict, type_name2solve: TypeName, type_name_ref: TypeName): if self.resolution[type_name2solve.value] is None: self.resolution[type_name2solve.value] = ref[type_name_ref.value]
def format_data(self, data, scale=True): if len(self.analytes) == 1: d = nominal_values(data[self.analytes[0]]) ds = np.array(list(zip(d, np.zeros(len(d))))) else: d = [nominal_values(data[a]) for a in self.analytes] ds = np.vstack(d).T finite = np.isfinite(ds).sum(1) == ds.shape[1] sampled = np.arange(data[self.analytes[0]].size)[finite] ds = ds[finite] if scale: ds = self.scaler.transform(ds) return ds, sampled
def _referer(self, extension): iana_record = self.lookup.whois( PyFunceble.CONFIGURATION["iana_whois_server"], "hello.%s" % extension ) if iana_record and "refer" in iana_record: regex_referer = r"(?s)refer\:\s+([a-zA-Z0-9._-]+)\n" matched = Regex( iana_record, regex_referer, return_data=True, group=1 ).match() if matched: return matched if extension in self.manual_server: return self.manual_server[extension] return None
def shape_rb_data(raw_rb): rb_data = [] rb_data.append(np.mean(raw_rb, 0)) rb_data.append(np.std(raw_rb, 0)) return rb_data
def update_function(self, name, body, update_mask): response = self.get_conn().projects().locations().functions().patch( updateMask=",".join(update_mask), name=name, body=body ).execute(num_retries=self.num_retries) operation_name = response["name"] self._wait_for_operation_to_complete(operation_name=operation_name)
def boolean(ctx, obj): if hasattr(obj, 'compute'): obj = next(seq.compute(ctx), '') else: obj = seq yield next(to_boolean(obj), '')
def expects_none(options): if any(options.get(key) is not None for key in ["count", "maximum", "minimum", "between"]): return matches_count(0, options) else: return False
def __early_downsample(y, sr, hop_length, res_type, n_octaves, nyquist, filter_cutoff, scale): downsample_count = __early_downsample_count(nyquist, filter_cutoff, hop_length, n_octaves) if downsample_count > 0 and res_type == 'kaiser_fast': downsample_factor = 2**(downsample_count) hop_length //= downsample_factor if len(y) < downsample_factor: raise ParameterError('Input signal length={:d} is too short for ' '{:d}-octave CQT'.format(len(y), n_octaves)) new_sr = sr / float(downsample_factor) y = audio.resample(y, sr, new_sr, res_type=res_type, scale=True) if not scale: y *= np.sqrt(downsample_factor) sr = new_sr return y, sr, hop_length
def log_parser(self): size_stamp = os.path.getsize(self.log_file) self.log_retry = 0 if size_stamp and size_stamp == self.log_sizestamp: return else: logger.debug("Updating log size stamp to: {}".format(size_stamp)) self.log_sizestamp = size_stamp r = ".* (.*) \[.*\].*\[(.*)\].*process > (.*) \((.*)\).*" with open(self.log_file) as fh: for line in fh: if "Submitted process >" in line or \ "Re-submitted process >" in line or \ "Cached process >" in line: m = re.match(r, line) if not m: continue time_start = m.group(1) workdir = m.group(2) process = m.group(3) tag = m.group(4) if time_start + tag not in self.stored_log_ids: self.stored_log_ids.append(time_start + tag) else: continue if process not in self.processes: continue p = self.processes[process] if tag in list(p["finished"]) + list(p["retry"]): continue if tag in list(p["failed"]) and \ "Re-submitted process >" in line: p["retry"].add(tag) self.send = True continue p["barrier"] = "R" if tag not in p["submitted"]: p["submitted"].add(tag) if tag not in self.process_tags[process]: self.process_tags[process][tag] = { "workdir": self._expand_path(workdir), "start": time_start } self.send = True elif not self.process_tags[process][tag]["start"]: self.process_tags[process][tag]["start"] = time_start self.send = True self._update_pipeline_status()
def _remove_exploration(self): for param in self._explored_parameters.values(): if param._stored: try: self.f_delete_item(param) except Exception: self._logger.exception('Could not delete expanded parameter `%s` ' 'from disk.' % param.v_full_name)
def setup_platform(hass, config, add_entities, discovery_info=None): host = config.get(CONF_HOST) token = config.get(CONF_ACCESS_TOKEN) name = config.get(CONF_NAME) volume_step = config.get(CONF_VOLUME_STEP) device_type = config.get(CONF_DEVICE_CLASS) device = VizioDevice(host, token, name, volume_step, device_type) if device.validate_setup() is False: _LOGGER.error("Failed to set up Vizio platform, " "please check if host and API key are correct") return elif (token is None or token == "") and device_type == "tv": _LOGGER.error("Failed to set up Vizio platform, " "if device_class is 'tv' then an auth_token needs " "to be provided, otherwise if device_class is " "'soundbar' then add the right device_class to config") return if config.get(CONF_SUPPRESS_WARNING): from requests.packages import urllib3 _LOGGER.warning("InsecureRequestWarning is disabled " "because of Vizio platform configuration") urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) add_entities([device], True)
def handle_oauth2_response(self, args): client = self.make_client() remote_args = { 'code': args.get('code'), 'client_secret': self.consumer_secret, 'redirect_uri': session.get('%s_oauthredir' % self.name) } log.debug('Prepare oauth2 remote args %r', remote_args) remote_args.update(self.access_token_params) headers = copy(self._access_token_headers) if self.access_token_method == 'POST': headers.update({'Content-Type': 'application/x-www-form-urlencoded'}) body = client.prepare_request_body(**remote_args) resp, content = self.http_request( self.expand_url(self.access_token_url), headers=headers, data=to_bytes(body, self.encoding), method=self.access_token_method, ) elif self.access_token_method == 'GET': qs = client.prepare_request_body(**remote_args) url = self.expand_url(self.access_token_url) url += ('?' in url and '&' or '?') + qs resp, content = self.http_request( url, headers=headers, method=self.access_token_method, ) else: raise OAuthException( 'Unsupported access_token_method: %s' % self.access_token_method ) data = parse_response(resp, content, content_type=self.content_type) if resp.code not in (200, 201): raise OAuthException( 'Invalid response from %s' % self.name, type='invalid_response', data=data ) return data
def decode(self, val): new_val = self.decode_date(val) if val != new_val: return new_val return json.JSONDecoder.decode(self, val)
def extractPrintSaveIntermittens(): global g_summary_dict_intermittents localtz = time.tzname[0] for ind in range(len(g_summary_dict_all["TestName"])): if g_summary_dict_all["TestInfo"][ind]["FailureCount"] >= g_threshold_failure: addFailedTests(g_summary_dict_intermittents, g_summary_dict_all, ind) if len(g_summary_dict_intermittents["TestName"]) > 0: json.dump(g_summary_dict_intermittents, open(g_summary_dict_name, 'w')) with open(g_summary_csv_filename, 'w') as summaryFile: for ind in range(len(g_summary_dict_intermittents["TestName"])): testName = g_summary_dict_intermittents["TestName"][ind] numberFailure = g_summary_dict_intermittents["TestInfo"][ind]["FailureCount"] firstFailedTS = parser.parse(time.ctime(min(g_summary_dict_intermittents["TestInfo"][ind]["Timestamp"]))+ ' '+localtz) firstFailedStr = firstFailedTS.strftime("%a %b %d %H:%M:%S %Y %Z") recentFail = parser.parse(time.ctime(max(g_summary_dict_intermittents["TestInfo"][ind]["Timestamp"]))+ ' '+localtz) recentFailStr = recentFail.strftime("%a %b %d %H:%M:%S %Y %Z") eachTest = "{0}, {1}, {2}, {3}\n".format(testName, recentFailStr, numberFailure, g_summary_dict_intermittents["TestInfo"][ind]["TestCategory"][0]) summaryFile.write(eachTest) print("Intermittent: {0}, Last failed: {1}, Failed {2} times since " "{3}".format(testName, recentFailStr, numberFailure, firstFailedStr))
def _get_rule_source(self, rule): p = len(self.input_source) + rule.position source = self.input_source[p:p + rule.consumed].rstrip() return self._indent(source, depth=self.indent + " ", skip_first_line=True)
def _resubscribe(self, soft=False): if self.bitfinex_config: self.send(**self.bitfinex_config) q_list = [] while True: try: identifier, q = self.channel_configs.popitem(last=True if soft else False) except KeyError: break q_list.append((identifier, q.copy())) if identifier == 'auth': self.send(**q, auth=True) continue if soft: q['event'] = 'unsubscribe' self.send(**q) if soft: for identifier, q in reversed(q_list): self.channel_configs[identifier] = q self.send(**q) else: for identifier, q in q_list: self.channel_configs[identifier] = q
def _receive_data(self): while True: while len(self._buffer) < self.max_size and self.conn.poll(): data = self._read_chunks() if data is not None: self._buffer.append(data) if len(self._buffer) > 0: return self._buffer.popleft()
def _build_purchase_item(course_id, course_url, cost_in_cents, mode, course_data, sku): item = { 'id': "{}-{}".format(course_id, mode), 'url': course_url, 'price': cost_in_cents, 'qty': 1, } if 'title' in course_data: item['title'] = course_data['title'] else: item['title'] = 'Course {} mode: {}'.format(course_id, mode) if 'tags' in course_data: item['tags'] = course_data['tags'] item['vars'] = dict(course_data.get('vars', {}), mode=mode, course_run_id=course_id) item['vars']['purchase_sku'] = sku return item
def _vector_matrix(vs, ms): return tf.reduce_sum(input_tensor=vs[..., tf.newaxis] * ms, axis=-2)
def mix_over_posterior_draws(means, variances): with tf.compat.v1.name_scope( 'mix_over_posterior_draws', values=[means, variances]): num_posterior_draws = dist_util.prefer_static_value( tf.shape(input=means))[0] component_observations = tfd.Independent( distribution=tfd.Normal( loc=dist_util.move_dimension(means, 0, -2), scale=tf.sqrt(dist_util.move_dimension(variances, 0, -2))), reinterpreted_batch_ndims=1) return tfd.MixtureSameFamily( mixture_distribution=tfd.Categorical( logits=tf.zeros([num_posterior_draws], dtype=component_observations.dtype)), components_distribution=component_observations)
def DeleteItem(self, item): "Remove the item from the list and unset the related data" wx_data = self.GetItemData(item) py_data = self._py_data_map[wx_data] del self._py_data_map[wx_data] del self._wx_data_map[py_data] wx.ListCtrl.DeleteItem(self, item)
def add_route(self, command, adapter): if not isinstance(adapter, BaseAdapter): try: adapter = self.adapter_aliases[adapter] except KeyError: self.adapter_aliases[adapter] = adapter = resolve_adapter( adapter ) self.routes[command] = adapter return self
def progress(iterator, prefix): if terminal_width(prefix) > 25: prefix = (".." + get_cut_prefix(prefix, 23)) speed_updated = start = time() speed_written = written = 0 speed_history = deque(maxlen=5) for data in iterator: yield data now = time() elapsed = now - start written += len(data) speed_elapsed = now - speed_updated if speed_elapsed >= 0.5: speed_history.appendleft(( written - speed_written, speed_updated, )) speed_updated = now speed_written = written speed_history_written = sum(h[0] for h in speed_history) speed_history_elapsed = now - speed_history[-1][1] speed = speed_history_written / speed_history_elapsed status = create_status_line( prefix=prefix, written=format_filesize(written), elapsed=format_time(elapsed), speed=format_filesize(speed) ) print_inplace(status) sys.stderr.write("\n") sys.stderr.flush()
def get_params(brightness, contrast, saturation, hue): transforms = [] if brightness is not None: brightness_factor = random.uniform(brightness[0], brightness[1]) transforms.append(Lambda(lambda img: F.adjust_brightness(img, brightness_factor))) if contrast is not None: contrast_factor = random.uniform(contrast[0], contrast[1]) transforms.append(Lambda(lambda img: F.adjust_contrast(img, contrast_factor))) if saturation is not None: saturation_factor = random.uniform(saturation[0], saturation[1]) transforms.append(Lambda(lambda img: F.adjust_saturation(img, saturation_factor))) if hue is not None: hue_factor = random.uniform(hue[0], hue[1]) transforms.append(Lambda(lambda img: F.adjust_hue(img, hue_factor))) random.shuffle(transforms) transform = Compose(transforms) return transform
def panel(context, panel, version, update_date, update_version): adapter = context.obj['adapter'] panel_obj = adapter.gene_panel(panel, version=version) if not panel_obj: LOG.warning("Panel %s (version %s) could not be found" % (panel, version)) context.abort() date_obj = None if update_date: try: date_obj = get_date(update_date) except Exception as err: LOG.warning(err) context.abort() update_panel( adapter, panel, panel_version=panel_obj['version'], new_version=update_version, new_date=date_obj )
def random_ports(port, n): for i in range(min(5, n)): yield port + i for i in range(n-5): yield port + random.randint(-2*n, 2*n)
def PermissiveDict(fields=None): if fields: check_user_facing_fields_dict(fields, 'PermissiveDict') class _PermissiveDict(_ConfigComposite): def __init__(self): key = 'PermissiveDict.' + str(DictCounter.get_next_count()) super(_PermissiveDict, self).__init__( name=None, key=key, fields=fields or dict(), description='A configuration dictionary with typed fields', type_attributes=ConfigTypeAttributes(is_builtin=True), ) @property def is_permissive_composite(self): return True return _PermissiveDict
def gravatar_url(user_or_email, size=GRAVATAR_DEFAULT_SIZE): if hasattr(user_or_email, 'email'): email = user_or_email.email else: email = user_or_email try: return escape(get_gravatar_url(email=email, size=size)) except: return ''
def run_as_cmd(cmd, user, shell='bash'): to_execute = get_shell(shell) + [EXECUTE_SHELL_PARAM, cmd] if user == 'root': return to_execute return ['sudo', '-s', '--set-home', '-u', user] + to_execute
def profile_function(self): with _CodeHeatmapCalculator() as prof: result = self._run_object(*self._run_args, **self._run_kwargs) code_lines, start_line = inspect.getsourcelines(self._run_object) source_lines = [] for line in code_lines: source_lines.append(('line', start_line, line)) start_line += 1 filename = os.path.abspath(inspect.getsourcefile(self._run_object)) heatmap = prof.heatmap[filename] run_time = sum(time for time in heatmap.values()) return { 'objectName': self._object_name, 'runTime': run_time, 'result': result, 'timestamp': int(time.time()), 'heatmaps': [{ 'name': self._object_name, 'heatmap': heatmap, 'executionCount': prof.execution_count[filename], 'srcCode': source_lines, 'runTime': run_time }] }
def dashboard(request): if not isinstance(mc_client, dict): cache_stats = _get_cache_stats() else: cache_stats = None if cache_stats: data = _context_data({ 'title': _('Memcache Dashboard'), 'cache_stats': cache_stats, 'can_get_slabs': hasattr(mc_client, 'get_slabs'), 'REFRESH_RATE': SETTINGS['REFRESH_RATE'], }, request) template = 'memcache_admin/dashboard.html' else: data = _context_data({ 'title': _('Memcache Dashboard - Error'), 'error_message': _('Unable to connect to a memcache server.'), }, request) template = 'memcache_admin/dashboard_error.html' return render_to_response(template, data, RequestContext(request))
def _validate_initial_statevector(self): if self._initial_statevector is None: return length = len(self._initial_statevector) required_dim = 2 ** self._number_of_qubits if length != required_dim: raise BasicAerError('initial statevector is incorrect length: ' + '{} != {}'.format(length, required_dim))
def chatToId(url): match = re.search(r"conversations/([0-9]+:[^/]+)", url) return match.group(1) if match else None
def prepare_pids(self): self.pids = [] for fetcher in self.pid_fetchers: val = fetcher(None, self.revisions[-1][1]) if val: self.pids.append(val)
def use(network=False): global _engine __engine = _engine activated = __engine.active if activated: __engine.disable() _engine = Engine(network=network) _engine.activate() yield _engine _engine.disable() if network: _engine.disable_network() _engine = __engine if activated: _engine.activate()
def parse_yaml_linenumbers(data, filename): def compose_node(parent, index): line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): if ANSIBLE_VERSION < 2: mapping = Constructor.construct_mapping(loader, node, deep=deep) else: mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep) if hasattr(node, '__line__'): mapping[LINE_NUMBER_KEY] = node.__line__ else: mapping[LINE_NUMBER_KEY] = mapping._line_number mapping[FILENAME_KEY] = filename return mapping try: if ANSIBLE_VERSION < 2: loader = yaml.Loader(data) else: import inspect kwargs = {} if 'vault_password' in inspect.getargspec(AnsibleLoader.__init__).args: kwargs['vault_password'] = DEFAULT_VAULT_PASSWORD loader = AnsibleLoader(data, **kwargs) loader.compose_node = compose_node loader.construct_mapping = construct_mapping data = loader.get_single_data() except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e: raise SystemExit("Failed to parse YAML in %s: %s" % (filename, str(e))) return data
def log_cdf_laplace(x, name="log_cdf_laplace"): with tf.name_scope(name): x = tf.convert_to_tensor(value=x, name="x") lower_solution = -np.log(2.) + x safe_exp_neg_x = tf.exp(-tf.abs(x)) upper_solution = tf.math.log1p(-0.5 * safe_exp_neg_x) return tf.where(x < 0., lower_solution, upper_solution)
def samefile(path1, path2): info1 = fs.getfileinfo(path1) info2 = fs.getfileinfo(path2) return (info1.dwVolumeSerialNumber == info2.dwVolumeSerialNumber and info1.nFileIndexHigh == info2.nFileIndexHigh and info1.nFileIndexLow == info2.nFileIndexLow)
def add_netnode_plugin_name(plugin_name): current_names = set(get_netnode_plugin_names()) if plugin_name in current_names: return current_names.add(plugin_name) get_meta_netnode()[PLUGIN_NAMES_KEY] = json.dumps(list(current_names))
def set_serial(self, hex_str): bignum_serial = _ffi.gc(_lib.BN_new(), _lib.BN_free) bignum_ptr = _ffi.new("BIGNUM**") bignum_ptr[0] = bignum_serial bn_result = _lib.BN_hex2bn(bignum_ptr, hex_str) if not bn_result: raise ValueError("bad hex string") asn1_serial = _ffi.gc( _lib.BN_to_ASN1_INTEGER(bignum_serial, _ffi.NULL), _lib.ASN1_INTEGER_free) _lib.X509_REVOKED_set_serialNumber(self._revoked, asn1_serial)
def run_samblaster(job, sam): work_dir = job.fileStore.getLocalTempDir() job.fileStore.readGlobalFile(sam, os.path.join(work_dir, 'input.sam')) command = ['/usr/local/bin/samblaster', '-i', '/data/input.sam', '-o', '/data/output.sam', '--ignoreUnmated'] start_time = time.time() dockerCall(job=job, workDir=work_dir, parameters=command, tool='quay.io/biocontainers/samblaster:0.1.24--0') end_time = time.time() _log_runtime(job, start_time, end_time, "SAMBLASTER") return job.fileStore.writeGlobalFile(os.path.join(work_dir, 'output.sam'))
def handle_stranded_tasks(self, engine): lost = self.pending[engine] for msg_id in lost.keys(): if msg_id not in self.pending[engine]: continue raw_msg = lost[msg_id].raw_msg idents,msg = self.session.feed_identities(raw_msg, copy=False) parent = self.session.unpack(msg[1].bytes) idents = [engine, idents[0]] try: raise error.EngineError("Engine %r died while running task %r"%(engine, msg_id)) except: content = error.wrap_exception() header = dict( status='error', engine=engine, date=datetime.now(), ) msg = self.session.msg('apply_reply', content, parent=parent, subheader=header) raw_reply = map(zmq.Message, self.session.serialize(msg, ident=idents)) self.dispatch_result(raw_reply) self.completed.pop(engine) self.failed.pop(engine)
def get_group_all(group, path=None): result = [] for config, distro in iter_files_distros(path=path): if group in config: for name, epstr in config[group].items(): with BadEntryPoint.err_to_warnings(): result.append(EntryPoint.from_string(epstr, name, distro)) return result
def build_filters_and_sizers(self, ppoi_value, create_on_demand): name = self.name if not name and self.field.placeholder_image_name: name = self.field.placeholder_image_name self.filters = FilterLibrary( name, self.storage, versatileimagefield_registry, ppoi_value, create_on_demand ) for ( attr_name, sizedimage_cls ) in iteritems(versatileimagefield_registry._sizedimage_registry): setattr( self, attr_name, sizedimage_cls( path_to_image=name, storage=self.storage, create_on_demand=create_on_demand, ppoi=ppoi_value ) )
def predict_logit(self, x, **kwargs): return self.feed_forward(x, **kwargs)[self.layers[-1].full_name('pre')]
def get_source_lane(fork_process, pipeline_list): fork_source = fork_process[-1] fork_sig = [x for x in fork_process if x != "__init__"] for position, p in enumerate(pipeline_list[::-1]): if p["output"]["process"] == fork_source: lane = p["output"]["lane"] logger.debug("Possible source match found in position {} in lane" " {}".format(position, lane)) lane_sequence = [x["output"]["process"] for x in pipeline_list if x["output"]["lane"] == lane] logger.debug("Testing lane sequence '{}' against fork signature" " '{}'".format(lane_sequence, fork_sig)) if lane_sequence == fork_sig: return p["output"]["lane"] return 0
def article(self, msgid_article=None, decode=None): args = None if msgid_article is not None: args = utils.unparse_msgid_article(msgid_article) code, message = self.command("ARTICLE", args) if code != 220: raise NNTPReplyError(code, message) parts = message.split(None, 1) try: articleno = int(parts[0]) except ValueError: raise NNTPProtocolError(message) headers = utils.parse_headers(self.info_gen(code, message)) decode = "yEnc" in headers.get("subject", "") escape = 0 crc32 = 0 body = [] for line in self.info_gen(code, message): if decode: if line.startswith("=y"): continue line, escape, crc32 = yenc.decode(line, escape, crc32) body.append(line) return articleno, headers, "".join(body)
End of preview. Expand in Data Studio

No dataset card yet

Downloads last month
23