code
stringlengths
81
3.79k
def wrap_function(func=None, error_threshold=None, reraise_exception=True, save_current_stack_trace=True): if func: return flawless.client.client._wrap_function_with_error_decorator( func=func, error_threshold=error_threshold, reraise_exception=reraise_exception, save_current_stack_trace=save_current_stack_trace) else: return functools.partial(flawless.client.client._wrap_function_with_error_decorator, error_threshold=error_threshold, reraise_exception=reraise_exception, save_current_stack_trace=save_current_stack_trace)
def post(self, url, params=None, data=None, files=None, **kwargs): return self.call_api( "POST", url, params=params, data=data, files=files, **kwargs )
def is_dark_rgb(r, g, b): try: midpoint = int(environ.get('TERMINAL_COLOR_MIDPOINT', None)) except: pass if not midpoint: term = environ.get('TERM', None) print("midpoint", midpoint, 'vs', (16*5 + 16*g + 16*b)) midpoint = 383 if term and term == 'xterm-256color' else 117963 if ( (16*5 + 16*g + 16*b) < midpoint ): return True else: return False
def stop(self): with self.synclock: if self.syncthread is not None: self.syncthread.cancel() self.syncthread = None
def evaluate_marker(cls, text, extra=None): return cls.interpret(parser.expr(text).totuple(1)[1])
def _get_job(self, project_id, job_id): job_name = 'projects/{}/jobs/{}'.format(project_id, job_id) request = self._mlengine.projects().jobs().get(name=job_name) while True: try: return request.execute() except HttpError as e: if e.resp.status == 429: time.sleep(30) else: self.log.error('Failed to get MLEngine job: {}'.format(e)) raise
def pause(self): for tracer in self.tracers: tracer.stop() stats = tracer.get_stats() if stats: print("\nCoverage.py tracer stats:") for k in sorted(stats.keys()): print("%16s: %s" % (k, stats[k])) threading.settrace(None)
def to_py(o, keyword_fn: Callable[[kw.Keyword], Any] = _kw_name): if isinstance(o, ISeq): return _to_py_list(o, keyword_fn=keyword_fn) elif not isinstance( o, (IPersistentList, IPersistentMap, IPersistentSet, IPersistentVector) ): return o else: return _to_py_backup(o, keyword_fn=keyword_fn)
def indent(instr,nspaces=4, ntabs=0, flatten=False): if instr is None: return ind = '\t'*ntabs+' '*nspaces if flatten: pat = re.compile(r'^\s*', re.MULTILINE) else: pat = re.compile(r'^', re.MULTILINE) outstr = re.sub(pat, ind, instr) if outstr.endswith(os.linesep+ind): return outstr[:-len(ind)] else: return outstr
def handleCONNACK(self, response): state = self.__class__.__name__ log.error("Unexpected {packet:7} packet received in {log_source}", packet="CONNACK")
def filter_bam(coverage_info, bam_file, min_coverage, output_bam): contig_list = [x for x, vals in coverage_info.items() if vals["cov"] >= min_coverage] cli = [ "samtools", "view", "-bh", "-F", "4", "-o", output_bam, "-@", "1", bam_file, ] cli += contig_list logger.debug("Runnig samtools view subprocess with command: {}".format( cli)) p = subprocess.Popen(cli, stdout=PIPE, stderr=PIPE) stdout, stderr = p.communicate() try: stderr = stderr.decode("utf8") stdout = stdout.decode("utf8") except (UnicodeDecodeError, AttributeError): stderr = str(stderr) stdout = str(stdout) logger.info("Finished samtools view subprocess with STDOUT:\\n" "======================================\\n{}".format(stdout)) logger.info("Fished samtools view subprocesswith STDERR:\\n" "======================================\\n{}".format(stderr)) logger.info("Finished samtools view with return code: {}".format( p.returncode)) if not p.returncode: cli = [ "samtools", "index", output_bam ] logger.debug("Runnig samtools index subprocess with command: " "{}".format(cli)) p = subprocess.Popen(cli, stdout=PIPE, stderr=PIPE) stdout, stderr = p.communicate() try: stderr = stderr.decode("utf8") stdout = stdout.decode("utf8") except (UnicodeDecodeError, AttributeError): stderr = str(stderr) stdout = str(stdout) logger.info("Finished samtools index subprocess with STDOUT:\\n" "======================================\\n{}".format( stdout)) logger.info("Fished samtools index subprocesswith STDERR:\\n" "======================================\\n{}".format( stderr)) logger.info("Finished samtools index with return code: {}".format( p.returncode))
def create_tfs_tfvc_client(url, token=None): if token is None: token = os.environ.get('TFS_API_TOKEN', None) tfs_connection = create_tfs_connection(url, token) tfs_tfvc_client = tfs_connection.get_client('vsts.tfvc.v4_1.tfvc_client.TfvcClient') if tfs_tfvc_client is None: msg = 'Unable to create TFS Git Client, failed to connect to TFS Enterprise (%s) with provided token.' raise RuntimeError(msg, url) return tfs_tfvc_client
def visualize_qualitative_analysis(inputs, model, samples=1, batch_size=3, length=8): average = lambda dist: tf.reduce_mean( input_tensor=dist.mean(), axis=0) with tf.compat.v1.name_scope("val_reconstruction"): reconstruct = functools.partial(model.reconstruct, inputs=inputs, samples=samples) visualize_reconstruction(inputs, average(reconstruct())) visualize_reconstruction(inputs, average(reconstruct(sample_static=True)), name="static_prior") visualize_reconstruction(inputs, average(reconstruct(sample_dynamic=True)), name="dynamic_prior") visualize_reconstruction(inputs, average(reconstruct(swap_static=True)), name="swap_static") visualize_reconstruction(inputs, average(reconstruct(swap_dynamic=True)), name="swap_dynamic") with tf.compat.v1.name_scope("generation"): generate = functools.partial(model.generate, batch_size=batch_size, length=length, samples=samples) image_summary(average(generate(fix_static=True)), "fix_static") image_summary(average(generate(fix_dynamic=True)), "fix_dynamic")
def debug_script(src, pm=False, globs=None): "Debug a test script. `src` is the script, as a string." import pdb srcfilename = tempfile.mktemp(".py", "doctestdebug") f = open(srcfilename, 'w') f.write(src) f.close() try: if globs: globs = globs.copy() else: globs = {} if pm: try: execfile(srcfilename, globs, globs) except: print sys.exc_info()[1] pdb.post_mortem(sys.exc_info()[2]) else: pdb.run("execfile(%r)" % srcfilename, globs, globs) finally: os.remove(srcfilename)
def _dot_product(self, imgs_to_decode): return np.dot(imgs_to_decode.T, self.feature_images).T
def set_certificate_issuer( self, vault_base_url, issuer_name, provider, credentials=None, organization_details=None, attributes=None, custom_headers=None, raw=False, **operation_config): parameter = models.CertificateIssuerSetParameters(provider=provider, credentials=credentials, organization_details=organization_details, attributes=attributes) url = self.set_certificate_issuer.metadata['url'] path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str') } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') body_content = self._serialize.body(parameter, 'CertificateIssuerSetParameters') request = self._client.put(url, query_parameters) response = self._client.send( request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: raise models.KeyVaultErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('IssuerBundle', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
def remove_interval(self, time): if self.tier_type != 'IntervalTier': raise Exception('Tiertype must be IntervalTier.') self.intervals = [i for i in self.intervals if not(i[0] <= time and i[1] >= time)]
def _run_valid(self, epoch, valid_set, dry_run=False, save_path=None): costs = self.valid_step(valid_set) _, J = costs[0] new_best = False if self.best_cost - J > self.best_cost * self.min_improvement: self.best_params = self.copy_params() new_best = True if not dry_run: self.best_cost = J self.best_epoch = epoch self.save_checkpoint(save_path) self.report(dict(costs), type="valid", epoch=0 if dry_run else epoch, new_best=new_best) self.last_run_costs = costs return epoch - self.best_epoch < self.patience
def Bin(self): err = _Bin(self.transit, self.limbdark, self.settings, self.arrays) if err != _ERR_NONE: RaiseError(err)
def get_namespaces(self, prefix=None): params = {"prefix": prefix} return self.request(method="get", params=params).json()
def geckoboard_rag_widget(request): params = get_gecko_params(request) print params['uids'] max_date = datetime.now()-timedelta(days=params['days_back']) metrics = Metric.objects.filter(uid__in=params['uids']) results = [(metric.latest_count(frequency=params['frequency'], count=not params['cumulative'], cumulative=params['cumulative'], max_date=max_date), metric.title) for metric in metrics] return tuple(results)
def _read_compressed_points_data(self, laszip_vlr, point_format): offset_to_chunk_table = struct.unpack("<q", self.stream.read(8))[0] size_of_point_data = offset_to_chunk_table - self.stream.tell() if offset_to_chunk_table <= 0: logger.warning( "Strange offset to chunk table: {}, ignoring it..".format( offset_to_chunk_table ) ) size_of_point_data = -1 points = record.PackedPointRecord.from_compressed_buffer( self.stream.read(size_of_point_data), point_format, self.header.point_count, laszip_vlr, ) return points
def get_system_per_cpu_times(): ret = [] for cpu_t in _psutil_mswindows.get_system_cpu_times(): user, system, idle = cpu_t item = _cputimes_ntuple(user, system, idle) ret.append(item) return ret
def _init_from_bool(self, z, x): if z is None: raise QiskitError("z vector must not be None.") if x is None: raise QiskitError("x vector must not be None.") if len(z) != len(x): raise QiskitError("length of z and x vectors must be " "the same. (z: {} vs x: {})".format(len(z), len(x))) z = _make_np_bool(z) x = _make_np_bool(x) self._z = z self._x = x return self
def _expand_default(self, option): if self.parser is None or not self.default_tag: return option.help optname = option._long_opts[0][2:] try: provider = self.parser.options_manager._all_options[optname] except KeyError: value = None else: optdict = provider.get_option_def(optname) optname = provider.option_attrname(optname, optdict) value = getattr(provider.config, optname, optdict) value = utils._format_option_value(optdict, value) if value is optparse.NO_DEFAULT or not value: value = self.NO_DEFAULT_VALUE return option.help.replace(self.default_tag, str(value))
def map(self, func, value_shape=None, dtype=None): if value_shape is None or dtype is None: try: mapped = func(random.randn(*self.plan).astype(self.dtype)) except Exception: first = self._rdd.first() if first: mapped = func(first[1]) if value_shape is None: value_shape = mapped.shape if dtype is None: dtype = mapped.dtype chunked_dims = where(self.plan != self.vshape)[0] unchunked_dims = where(self.plan == self.vshape)[0] if len(value_shape) != len(self.plan): raise NotImplementedError('map on ChunkedArray cannot drop dimensions') if any([value_shape[i] != self.plan[i] for i in chunked_dims]): raise ValueError('map cannot change the sizes of chunked dimensions') def check_and_apply(v): new = func(v) if len(unchunked_dims) > 0: if any([new.shape[i] != value_shape[i] for i in unchunked_dims]): raise Exception("Map operation did not produce values of uniform shape.") if len(chunked_dims) > 0: if any([v.shape[i] != new.shape[i] for i in chunked_dims]): raise Exception("Map operation changed the size of a chunked dimension") return new rdd = self._rdd.mapValues(check_and_apply) vshape = [value_shape[i] if i in unchunked_dims else self.vshape[i] for i in range(len(self.vshape))] newshape = r_[self.kshape, vshape].astype(int).tolist() return self._constructor(rdd, shape=tuple(newshape), dtype=dtype, plan=asarray(value_shape)).__finalize__(self)
def get_data(self, cache=True, as_text=False, parse_form_data=False): rv = getattr(self, '_cached_data', None) if rv is None: if parse_form_data: self._load_form_data() rv = self.stream.read() if cache: self._cached_data = rv if as_text: rv = rv.decode(self.charset, self.encoding_errors) return rv
def main(mash_output, sample_id): logger.info("Reading file : {}".format(mash_output)) read_mash_output = open(mash_output) dic = {} median_list = [] filtered_dic = {} logger.info("Generating dictionary and list to pre-process the final json") for line in read_mash_output: tab_split = line.split("\t") identity = tab_split[0] median_multiplicity = tab_split[2] query_id = tab_split[4] dic[query_id] = [identity, median_multiplicity] median_list.append(float(median_multiplicity)) output_json = open(" ".join(mash_output.split(".")[:-1]) + ".json", "w") if len(median_list) > 0: median_cutoff = median(median_list) logger.info("Generating final json to dump to a file") for k, v in dic.items(): copy_number = int(float(v[1]) / median_cutoff) if float(v[1]) > median_cutoff: filtered_dic["_".join(k.split("_")[0:3])] = [ round(float(v[0]),2), copy_number ] logger.info( "Exported dictionary has {} entries".format(len(filtered_dic))) else: logger.error("No matches were found using mash screen for the queried reads") output_json.write(json.dumps(filtered_dic)) output_json.close() json_dic = { "tableRow": [{ "sample": sample_id, "data": [{ "header": "Mash Screen", "table": "plasmids", "patlas_mashscreen": filtered_dic, "value": len(filtered_dic) }] }], } with open(".report.json", "w") as json_report: json_report.write(json.dumps(json_dic, separators=(",", ":")))
def add_import( self, sym: sym.Symbol, module: types.ModuleType, *aliases: sym.Symbol ) -> None: self._imports.swap(lambda m: m.assoc(sym, module)) if aliases: self._import_aliases.swap( lambda m: m.assoc( *itertools.chain.from_iterable([(alias, sym) for alias in aliases]) ) )
def delete_report(self, report): url = ACCOUNTS_API.format(report.account_id) + "/reports/{}/{}".format( report.type, report.report_id) response = self._delete_resource(url) return True
def delete(self, blocksize=100): from .columns import MODELS_REFERENCED if not self._model._no_fk or self._model._namespace in MODELS_REFERENCED: raise QueryError("Can't delete entities of models with foreign key relationships") de = [] i = 0 for result in self.iter_result(pagesize=blocksize): de.append(result) i += 1 if i >= blocksize: session.delete(de) del de[:] i = 0 if de: session.delete(de)
def _ep_need_close(self): LOG.debug("Session %s close requested - closing...", self._name) links = self._links.copy() for link in links: link._session_closed()
def users(store): user_objs = list(store.users()) total_events = store.user_events().count() for user_obj in user_objs: if user_obj.get('institutes'): user_obj['institutes'] = [store.institute(inst_id) for inst_id in user_obj.get('institutes')] else: user_obj['institutes'] = [] user_obj['events'] = store.user_events(user_obj).count() user_obj['events_rank'] = event_rank(user_obj['events']) return dict( users=sorted(user_objs, key=lambda user: -user['events']), total_events=total_events, )
def linear_connection(plist, lane): logger.debug( "Establishing linear connection with processes: {}".format(plist)) res = [] previous = None for p in plist: if not previous: previous = p continue res.append({ "input": { "process": previous, "lane": lane }, "output": { "process": p, "lane": lane } }) previous = p return res
def run_migrations_online(): connectable = settings.engine with connectable.connect() as connection: context.configure( connection=connection, transaction_per_migration=True, target_metadata=target_metadata, compare_type=COMPARE_TYPE, ) with context.begin_transaction(): context.run_migrations()
def map_generic(self, func): def process_record(val): newval = empty(1, dtype="object") newval[0] = func(val) return newval rdd = self._rdd.mapValues(process_record) nchunks = self.getnumber(self.plan, self.vshape) newshape = tuple([int(s) for s in r_[self.kshape, nchunks]]) newsplit = len(self.shape) return BoltArraySpark(rdd, shape=newshape, split=newsplit, ordered=self._ordered, dtype="object")
def get_conn(self): if not self._conn: http_authorized = self._authorize() self._conn = build('compute', self.api_version, http=http_authorized, cache_discovery=False) return self._conn
def parse_args(argv): global g_new_messages_to_exclude global g_old_messages_to_remove global g_load_java_message_filename global g_save_java_message_filename global g_print_java_messages if len(argv) < 2: usage() i = 1 while (i < len(argv)): s = argv[i] if (s == "--inputfileadd"): i += 1 if (i > len(argv)): usage() g_new_messages_to_exclude = argv[i] elif (s == "--inputfilerm"): i += 1 if (i > len(argv)): usage() g_old_messages_to_remove = argv[i] elif (s == "--loadjavamessage"): i += 1 if i > len(argv): usage() g_load_java_message_filename = argv[i] elif (s == "--savejavamessage"): i += 1 if (i > len(argv)): usage() g_save_java_message_filename = argv[i] elif (s == '--printjavamessage'): i += 1 g_print_java_messages = True g_load_java_message_filename = argv[i] elif (s == '--help'): usage() else: unknown_arg(s) i += 1
def expand_files(self, modules): result, errors = utils.expand_modules( modules, self.config.black_list, self.config.black_list_re ) for error in errors: message = modname = error["mod"] key = error["key"] self.set_current_module(modname) if key == "fatal": message = str(error["ex"]).replace(os.getcwd() + os.sep, "") self.add_message(key, args=message) return result
def _m(self): assert not hasattr(self, "_interfaces") or not self._interfaces, \ "Too late to change direction of interface" self._direction = DIRECTION.asIntfDirection(DIRECTION.opposite(self._masterDir)) return self
def __nn_filter_helper(R_data, R_indices, R_ptr, S, aggregate): s_out = np.empty_like(S) for i in range(len(R_ptr)-1): targets = R_indices[R_ptr[i]:R_ptr[i+1]] if not len(targets): s_out[i] = S[i] continue neighbors = np.take(S, targets, axis=0) if aggregate is np.average: weights = R_data[R_ptr[i]:R_ptr[i+1]] s_out[i] = aggregate(neighbors, axis=0, weights=weights) else: s_out[i] = aggregate(neighbors, axis=0) return s_out
def fit(self, Z, **fit_params): Zt, fit_params = self._pre_transform(Z, **fit_params) self.steps[-1][-1].fit(Zt, **fit_params) Zt.unpersist() return self
def _make_content_item(node, mime_type=None, alternate_data=None): raw = node.data if getattr(node, 'encoding', None) == 'zlib': try: raw = zlib.decompress(node.data) except Exception, exc: if alternate_data is not None: try: raw = zlib.decompress(alternate_data) except Exception: raise exc else: raise if mime_type is None: mime_type = node.mime_type raw = raw.decode('utf8').encode('utf8') return streamcorpus.ContentItem(raw=raw, media_type=mime_type)
def simUnit(self, synthesisedUnit: Unit, until: float, extraProcesses=[]): beforeSim = self.config.beforeSim if beforeSim is not None: beforeSim(self, synthesisedUnit) add_proc = self.add_process for p in extraProcesses: add_proc(p(self)) self._initUnitSignals(synthesisedUnit) self.run(until)
def parse(self, hcl, canonicalize=False): return self.request("parse", json={"JobHCL": hcl, "Canonicalize": canonicalize}, method="post", allow_redirects=True).json()
def has_no_unchecked_field(self, locator, **kwargs): kwargs["checked"] = False return self.has_no_selector("field", locator, **kwargs)
def reconnect(self): self.log.debug("reconnect(): Initialzion reconnect sequence..") self.connected.clear() self.reconnect_required.set() if self.socket: self.socket.close()
def lowpass_filter(data, cutoff, fs, order=5): nyq = 0.5 * fs normal_cutoff = cutoff / nyq b, a = signal.butter(order, normal_cutoff, btype='low', analog=False) y = signal.lfilter(b, a, data) return y
def _add(self, to_add): if PyFunceble.CONFIGURATION["mining"]: if PyFunceble.INTERN["file_to_test"] not in PyFunceble.INTERN["mined"]: PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]] = {} for element in to_add: if ( element in PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]] ): PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]][ element ].extend(to_add[element]) else: PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]][ element ] = to_add[element] PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]][ element ] = List( PyFunceble.INTERN["mined"][PyFunceble.INTERN["file_to_test"]][ element ] ).format() self._backup()
def square(duration: int, amp: complex, period: float = None, phase: float = 0, name: str = None) -> SamplePulse: if period is None: period = duration return _sampled_square_pulse(duration, amp, period, phase=phase, name=name)
def size(self, train=False, valid=False, xval=False): tm = ModelBase._get_metrics(self, train, valid, xval) m = {} for k, v in tm.items(): m[k] = None if v is None else [v[2] for v in v._metric_json["centroid_stats"].cell_values] return list(m.values())[0] if len(m) == 1 else m
def find_files(filenames, recursive, exclude): while filenames: name = filenames.pop(0) if recursive and os.path.isdir(name): for root, directories, children in os.walk(name): filenames += [os.path.join(root, f) for f in children if match_file(os.path.join(root, f), exclude)] directories[:] = [d for d in directories if match_file(os.path.join(root, d), exclude)] else: if not is_exclude_file(name, exclude): yield name
def is_literal_or_name(value): try: ast.literal_eval(value) return True except (SyntaxError, ValueError): pass if value.strip() in ['dict()', 'list()', 'set()']: return True return re.match(r'^\w+\s*$', value)
def complete_restore( self, location_name, operation_id, last_backup_name, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._complete_restore_initial( location_name=location_name, operation_id=operation_id, last_backup_name=last_backup_name, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
def post(self, headers={}, body=""): code, message = self.command("POST") if code != 340: raise NNTPReplyError(code, message) hdrs = utils.unparse_headers(headers) self.socket.sendall(hdrs) if isinstance(body, basestring): body = cStringIO.StringIO(body) illegal = False for line in body: if line.startswith("."): line = "." + line if line.endswith("\r\n"): line = line[:-2] elif line.endswith("\n"): line = line[:-1] if any(c in line for c in "\0\r"): illegal = True break self.socket.sendall(line + "\r\n") self.socket.sendall(".\r\n") code, message = self.status() if illegal: raise NNTPDataError("Illegal characters found") if code != 240: raise NNTPReplyError(code, message) message_id = message.split(None, 1)[0] if message_id.startswith("<") and message_id.endswith(">"): return message_id return True
def has_context_loop(state, incorrect_msg, exact_names): return _test( state, incorrect_msg or MSG_INCORRECT_LOOP, exact_names, tv_name="_target_vars", highlight_name="target", )
def imcrop(img, bboxes, scale=1.0, pad_fill=None): chn = 1 if img.ndim == 2 else img.shape[2] if pad_fill is not None: if isinstance(pad_fill, (int, float)): pad_fill = [pad_fill for _ in range(chn)] assert len(pad_fill) == chn _bboxes = bboxes[None, ...] if bboxes.ndim == 1 else bboxes scaled_bboxes = bbox_scaling(_bboxes, scale).astype(np.int32) clipped_bbox = bbox_clip(scaled_bboxes, img.shape) patches = [] for i in range(clipped_bbox.shape[0]): x1, y1, x2, y2 = tuple(clipped_bbox[i, :]) if pad_fill is None: patch = img[y1:y2 + 1, x1:x2 + 1, ...] else: _x1, _y1, _x2, _y2 = tuple(scaled_bboxes[i, :]) if chn == 2: patch_shape = (_y2 - _y1 + 1, _x2 - _x1 + 1) else: patch_shape = (_y2 - _y1 + 1, _x2 - _x1 + 1, chn) patch = np.array( pad_fill, dtype=img.dtype) * np.ones( patch_shape, dtype=img.dtype) x_start = 0 if _x1 >= 0 else -_x1 y_start = 0 if _y1 >= 0 else -_y1 w = x2 - x1 + 1 h = y2 - y1 + 1 patch[y_start:y_start + h, x_start:x_start + w, ...] = img[y1:y1 + h, x1:x1 + w, ...] patches.append(patch) if bboxes.ndim == 1: return patches[0] else: return patches
def validate(self, obj, value): try: if issubclass(value, self.klass): return value except: if (value is None) and (self._allow_none): return value self.error(obj, value)
def plot( self, data, bbox=None, plot_type='scatter', fig_kwargs=None, bmap_kwargs=None, plot_kwargs=None, cbar_kwargs=None): from mpl_toolkits.basemap import Basemap fig_kwargs = fig_kwargs or {} bmap_kwargs = bmap_kwargs or {} plot_kwargs = plot_kwargs or {} cbar_kwargs = cbar_kwargs or {} if not bbox: bbox = ( self.nodes_df.y.min(), self.nodes_df.x.min(), self.nodes_df.y.max(), self.nodes_df.x.max()) fig, ax = plt.subplots(**fig_kwargs) bmap = Basemap( bbox[1], bbox[0], bbox[3], bbox[2], ax=ax, **bmap_kwargs) bmap.drawcoastlines() bmap.drawmapboundary() x, y = bmap(self.nodes_df.x.values, self.nodes_df.y.values) if plot_type == 'scatter': plot = bmap.scatter( x, y, c=data.values, **plot_kwargs) elif plot_type == 'hexbin': plot = bmap.hexbin( x, y, C=data.values, **plot_kwargs) bmap.colorbar(plot, **cbar_kwargs) return bmap, fig, ax
def update_configuration(cfgfile=None): configobj.DEFAULT_INTERPOLATION = 'template' cfgfile = configuration_file(cfgfile) cfg = configobj.ConfigObj(cfgfile, configspec=cfgspec, encoding='utf-8') validator = Validator() val = cfg.validate(validator) if val is not True: raise ValueError('Invalid configuration: %s' % val) if len(cfg['capture']['files']) != len(cfg['capture']['flavors']): raise ValueError('List of files and flavors do not match') globals()['__config'] = cfg logger_init() if cfg['server'].get('url', '').endswith('/'): logger.warning('Base URL ends with /. This is most likely a ' 'configuration error. The URL should contain nothing ' 'of the service paths.') logger.info('Configuration loaded from %s' % cfgfile) check() return cfg
def times_csv(path, times, annotations=None, delimiter=',', fmt='%0.3f'): r if annotations is not None and len(annotations) != len(times): raise ParameterError('len(annotations) != len(times)') with open(path, 'w') as output_file: writer = csv.writer(output_file, delimiter=delimiter) if annotations is None: for t in times: writer.writerow([fmt % t]) else: for t, lab in zip(times, annotations): writer.writerow([(fmt % t), lab])
def add_subgraph(self, info): if not info.initialized: return graph = self._request_graph(info.ui.control) if graph is not None: subgraph = Subgraph() retval = subgraph.edit_traits(parent = info.ui.control, kind = "livemodal") if retval.result: graph.subgraphs.append(subgraph)
def _onDeviceStatus(self, client, userdata, pahoMessage): try: status = Status(pahoMessage) self.logger.debug("Received %s action from %s" % (status.action, status.clientId)) if self.deviceStatusCallback: self.deviceStatusCallback(status) except InvalidEventException as e: self.logger.critical(str(e))
def get_input(prompt, default=None, exit_msg='bye!'): try: response = six.moves.input(prompt) except (KeyboardInterrupt, EOFError): print() print(exit_msg) exit() try: return int(response) except ValueError: if response.strip() == "" and default is not None: return default else: return response
def encode(self): header = bytearray(1) varHeader = encode16Int(self.msgId) header[0] = 0xB0 header.extend(encodeLength(len(varHeader))) header.extend(varHeader) self.encoded = header return str(header) if PY2 else bytes(header)
def set_piece_at(self, square, piece, from_hand=False, into_hand=False): if from_hand: self.remove_piece_from_hand(piece.piece_type, self.turn) self.remove_piece_at(square, into_hand) self.pieces[square] = piece.piece_type mask = BB_SQUARES[square] piece_type = piece.piece_type self.piece_bb[piece_type] |= mask if piece_type == KING: self.king_squares[piece.color] = square self.occupied.ixor(mask, piece.color, square) if piece.color == BLACK: piece_index = (piece.piece_type - 1) * 2 else: piece_index = (piece.piece_type - 1) * 2 + 1 self.incremental_zobrist_hash ^= DEFAULT_RANDOM_ARRAY[81 * piece_index + 9 * rank_index(square) + file_index(square)]
def _check_relative_import( self, modnode, importnode, importedmodnode, importedasname ): if not self.linter.is_message_enabled("relative-import"): return None if importedmodnode.file is None: return False if modnode is importedmodnode: return False if modnode.absolute_import_activated() or getattr(importnode, "level", None): return False if importedmodnode.name != importedasname: self.add_message( "relative-import", args=(importedasname, importedmodnode.name), node=importnode, ) return None return None
def sold_out_and_unregistered(context): user = user_for_context(context) if hasattr(user, "attendee") and user.attendee.completed_registration: return None ticket_category = settings.TICKET_PRODUCT_CATEGORY categories = available_categories(context) return ticket_category not in [cat.id for cat in categories]
def is_token_from_emulator(auth_header: str) -> bool: if not auth_header: return False parts = auth_header.split(' ') if len(parts) != 2: return False auth_scheme = parts[0] bearer_token = parts[1] if auth_scheme != 'Bearer': return False token = jwt.decode(bearer_token, verify=False) if not token: return False issuer = token['iss'] if not issuer: return False issuer_list = EmulatorValidation.TO_BOT_FROM_EMULATOR_TOKEN_VALIDATION_PARAMETERS.issuer if issuer_list and not issuer in issuer_list: return False return True
def render_template(template_file, dst_file, **kwargs): with open(template_file) as f: template_text = f.read() dst_text = template_text for key, value in kwargs.iteritems(): dst_text = dst_text .replace("{{" + key + "}}", value) with open(dst_file, "wt") as f: f.write(dst_text)
def is_subdomain(self, domain=None): if domain: to_test = domain elif self.element: to_test = self.element else: to_test = PyFunceble.INTERN["to_test"] return self.is_domain_valid(to_test, subdomain_check=True)
def make_logging_handlers_and_tools(self, multiproc=False): log_stdout = self.log_stdout if sys.stdout is self._stdout_to_logger: log_stdout = False if self.log_config: if multiproc: proc_log_config = self._mp_config else: proc_log_config = self._sp_config if proc_log_config: if isinstance(proc_log_config, dict): new_dict = self._handle_dict_config(proc_log_config) dictConfig(new_dict) else: parser = self._handle_config_parsing(proc_log_config) memory_file = self._parser_to_string_io(parser) fileConfig(memory_file, disable_existing_loggers=False) if log_stdout: std_name, std_level = self.log_stdout stdout = StdoutToLogger(std_name, log_level=std_level) stdout.start() self._tools.append(stdout)
def _get_index_urls_locations(self, project_name): def mkurl_pypi_url(url): loc = posixpath.join(url, project_url_name) if not loc.endswith('/'): loc = loc + '/' return loc project_url_name = urllib_parse.quote(project_name.lower()) if self.index_urls: main_index_url = Link( mkurl_pypi_url(self.index_urls[0]), trusted=True, ) page = self._get_page(main_index_url) if page is None and PyPI.netloc not in str(main_index_url): warnings.warn( "Failed to find %r at %s. It is suggested to upgrade " "your index to support normalized names as the name in " "/simple/{name}." % (project_name, main_index_url), RemovedInPip8Warning, ) project_url_name = self._find_url_name( Link(self.index_urls[0], trusted=True), project_url_name, ) or project_url_name if project_url_name is not None: return [mkurl_pypi_url(url) for url in self.index_urls] return []
def _setup_logging(self, log_level: str): level = getattr(logging, log_level) names = ( 'aiohttp.access', 'aiohttp.internal', 'aiohttp.server', 'aiohttp.web', self.name) for name in names: setup_logger(name=name, stream=sys.stderr, level=level)
def parse(self, selector): log.debug(self.obj) tokens = lex(selector) if self.peek(tokens, 'operator') == '*': self.match(tokens, 'operator') results = list(object_iter(self.obj)) else: results = self.selector_production(tokens) results = [node.value for node in results] if len(results) == 1: return results[0] elif not len(results): return None return results
def bandpass_filter(data, low, high, fs, order=5): nyq = 0.5 * fs low = low / nyq high = high / nyq b, a = signal.butter(order, [low, high], btype='band') y = signal.lfilter(b, a, data) return y
def build_schema(m, c_c): schema = ET.Element('xs:schema') schema.set('xmlns:xs', 'http://www.w3.org/2001/XMLSchema') global_filter = lambda selected: ooaofooa.is_global(selected) for s_dt in m.select_many('S_DT', global_filter): datatype = build_type(s_dt) if datatype is not None: schema.append(datatype) scope_filter = lambda selected: ooaofooa.is_contained_in(selected, c_c) for s_dt in m.select_many('S_DT', scope_filter): datatype = build_type(s_dt) if datatype is not None: schema.append(datatype) component = build_component(m, c_c) schema.append(component) return schema
def execute(option): namelist_option = [] makefile_option = [] flags = "" for entry in option: key = entry.keys()[0] if key == "Problem Size": namelist_option.append({"SIZE": entry[key]}) elif key == "F90": makefile_option.append(entry) else: flags += entry[key] + " " makefile_option.append({"F90FLAGS": flags}) namelist = create_input(namelist_option, "namelist", template_location="templates") makefile_include = create_input(makefile_option, "Makefile.include", template_location="templates") benchmark_base = "shallow" location = benchmark_base + "/original/namelist" my_file = open(location, 'w') my_file.write(namelist) my_file.flush() location = benchmark_base + "/common/Makefile.include" my_file = open(location, 'w') my_file.write(makefile_include) my_file.flush() base_path = benchmark_base + "/original" import subprocess make_process = subprocess.Popen(["make", "clean"], cwd=base_path, stderr=subprocess.PIPE, stdout=subprocess.PIPE) if make_process.wait() != 0: return False, [] make_process = subprocess.Popen(["make"], cwd=base_path, stderr=subprocess.PIPE, stdout=subprocess.PIPE) if make_process.wait() != 0: return False, [] make_process = subprocess.Popen(["./shallow_base"], cwd=base_path, stderr=subprocess.PIPE, stdout=subprocess.PIPE) if make_process.wait() != 0: return False, [] stdout = make_process.stdout.read() for line in stdout.split("\n"): if "Time-stepping" in line: total_time = line.split()[2] return True, total_time
def img_from_vgg(x): x = x.transpose((1, 2, 0)) x[:, :, 0] += 103.939 x[:, :, 1] += 116.779 x[:, :, 2] += 123.68 x = x[:,:,::-1] return x
def remove_unique_identifiers(identifiers_to_tags, pipeline_links): for index, val in enumerate(pipeline_links): if val["input"]["process"] != "__init__": val["input"]["process"] = identifiers_to_tags[ val["input"]["process"]] if val["output"]["process"] != "__init__": val["output"]["process"] = identifiers_to_tags[ val["output"]["process"]] return pipeline_links
def fetch_items(self, category, **kwargs): from_date = kwargs['from_date'] if self.client.version[0] == 2 and self.client.version[1] == 8: fetcher = self._fetch_gerrit28(from_date) else: fetcher = self._fetch_gerrit(from_date) for review in fetcher: yield review
def _imported_module(self, node, mod_path, relative): module = node.root() context_name = module.name if relative: mod_path = "%s.%s" % (".".join(context_name.split(".")[:-1]), mod_path) if self.compute_module(context_name, mod_path): if not hasattr(module, "depends"): module.depends = [] mod_paths = module.depends if mod_path not in mod_paths: mod_paths.append(mod_path)
def _get_existing_instance(self, query, value): if self.columns: result = query.filter_by( **{prop.key: value.get(prop.key) for prop in self.related_keys} ).one() else: result = query.get([value.get(prop.key) for prop in self.related_keys]) if result is None: raise NoResultFound return result
def deprecated(*args): def wrap(func): def wrapped_func(*args, **kwargs): warnings.warn(msg, category=DeprecationWarning) return func(*args, **kwargs) return wrapped_func if len(args) == 1 and callable(args[0]): msg = "Function '%s' will be deprecated in future versions of " \ "Neurosynth." % args[0].__name__ return wrap(args[0]) else: msg = args[0] return wrap
def _get_required_args(fn): argspec = tf_inspect.getfullargspec(fn) args = argspec.args if tf_inspect.isclass(fn): args = args[1:] if argspec.defaults: args = args[:-len(argspec.defaults)] return tuple(args)
def compute_lst(self): if self.header[b'telescope_id'] == 6: self.coords = gbt_coords elif self.header[b'telescope_id'] == 4: self.coords = parkes_coords else: raise RuntimeError("Currently only Parkes and GBT supported") if HAS_SLALIB: dut1 = 0.0 mjd = self.header[b'tstart'] tellong = np.deg2rad(self.coords[1]) last = s.sla_gmst(mjd) - tellong + s.sla_eqeqx(mjd) + dut1 if last < 0.0 : last = last + 2.0*np.pi return last else: raise RuntimeError("This method requires pySLALIB")
def register_metric(metric_name: str) -> Callable[..., Any]: def decorate(fn): fn_name = fn.__module__ + ':' + fn.__name__ if metric_name in _REGISTRY and _REGISTRY[metric_name] != fn_name: log.warning('"{}" is already registered as a metric name, the old function will be ignored' .format(metric_name)) _REGISTRY[metric_name] = fn_name return fn return decorate
def _check_type(var, vtype): if vtype is None: return var is None if isinstance(vtype, _primitive_type): return var == vtype if vtype is str: return isinstance(var, _str_type) if vtype is int: return isinstance(var, _int_type) if vtype is numeric: return isinstance(var, _num_type) if isinstance(vtype, MagicType): return vtype.check(var) if isinstance(vtype, type): return isinstance(var, vtype) if isinstance(vtype, list): elem_type = U(*vtype) return isinstance(var, list) and all(_check_type(item, elem_type) for item in var) if isinstance(vtype, set): elem_type = U(*vtype) return isinstance(var, set) and all(_check_type(item, elem_type) for item in var) if isinstance(vtype, tuple): return (isinstance(var, tuple) and len(vtype) == len(var) and all(_check_type(var[i], vtype[i]) for i in range(len(vtype)))) if isinstance(vtype, dict): ttkv = U(*viewitems(vtype)) return isinstance(var, dict) and all(_check_type(kv, ttkv) for kv in viewitems(var)) if isinstance(vtype, (FunctionType, BuiltinFunctionType)): return vtype(var) raise RuntimeError("Ivalid type %r in _check_type()" % vtype)
def _basilisp_bytecode( mtime: int, source_size: int, code: List[types.CodeType] ) -> bytes: data = bytearray(MAGIC_NUMBER) data.extend(_w_long(mtime)) data.extend(_w_long(source_size)) data.extend(marshal.dumps(code)) return data
def select_name_pattern(source, pat): return filter(lambda x: pat.match(x.xml_name) is not None, select_elements(source))
def get_overrides_filename(variable): filename = os.environ.get(variable) if filename is None: msg = 'Please set the {} environment variable.'.format(variable) raise EnvironmentError(msg) return filename
def get_order(self, order_id): resp = self.get('/orders/{}'.format(order_id)) return Order(resp)
def networkdays(from_date, to_date, locale='en-US'): holidays = locales[locale] return workdays.networkdays(from_date, to_date, holidays)
def start_proxy(self): self._download_sql_proxy_if_needed() if self.sql_proxy_process: raise AirflowException("The sql proxy is already running: {}".format( self.sql_proxy_process)) else: command_to_run = [self.sql_proxy_path] command_to_run.extend(self.command_line_parameters) try: self.log.info("Creating directory %s", self.cloud_sql_proxy_socket_directory) os.makedirs(self.cloud_sql_proxy_socket_directory) except OSError: pass command_to_run.extend(self._get_credential_parameters()) self.log.info("Running the command: `%s`", " ".join(command_to_run)) self.sql_proxy_process = Popen(command_to_run, stdin=PIPE, stdout=PIPE, stderr=PIPE) self.log.info("The pid of cloud_sql_proxy: %s", self.sql_proxy_process.pid) while True: line = self.sql_proxy_process.stderr.readline().decode('utf-8') return_code = self.sql_proxy_process.poll() if line == '' and return_code is not None: self.sql_proxy_process = None raise AirflowException( "The cloud_sql_proxy finished early with return code {}!".format( return_code)) if line != '': self.log.info(line) if "googleapi: Error" in line or "invalid instance name:" in line: self.stop_proxy() raise AirflowException( "Error when starting the cloud_sql_proxy {}!".format( line)) if "Ready for new connections" in line: return
def init_role(self, role_name, role_vms, role_perms): pvms = self.get_session.query(sqla_models.PermissionView).all() pvms = [p for p in pvms if p.permission and p.view_menu] role = self.find_role(role_name) if not role: role = self.add_role(role_name) if len(role.permissions) == 0: self.log.info('Initializing permissions for role:%s in the database.', role_name) role_pvms = set() for pvm in pvms: if pvm.view_menu.name in role_vms and pvm.permission.name in role_perms: role_pvms.add(pvm) role.permissions = list(role_pvms) self.get_session.merge(role) self.get_session.commit() else: self.log.debug('Existing permissions for the role:%s ' 'within the database will persist.', role_name)
def glm(interactive=True, echo=True, testing=False): def demo_body(go): go() h2o.init() go() prostate = h2o.load_dataset("prostate") go() prostate.describe() go() train, test = prostate.split_frame(ratios=[0.70]) go() train["CAPSULE"] = train["CAPSULE"].asfactor() test["CAPSULE"] = test["CAPSULE"].asfactor() go() from h2o.estimators import H2OGeneralizedLinearEstimator prostate_glm = H2OGeneralizedLinearEstimator(family="binomial", alpha=[0.5]) prostate_glm.train(x=["AGE", "RACE", "PSA", "VOL", "GLEASON"], y="CAPSULE", training_frame=train) go() prostate_glm.show() go() predictions = prostate_glm.predict(test) predictions.show() go() performance = prostate_glm.model_performance(test) performance.show() _run_demo(demo_body, interactive, echo, testing)
def dsync_handler(self, args): self.opt.recursive = True self.opt.sync_check = True self.opt.force = True self.validate('cmd|s3,local|s3,local', args) source = args[1] target = args[2] self.s3handler().dsync_files(source, target)
def mkstemp(self, suffix, prefix, directory=None): if not directory: directory = self.artifacts_dir fd, fname = tempfile.mkstemp(suffix, prefix, directory) os.close(fd) os.chmod(fname, 0o644) return fname
def patch_protocol_for_agent(protocol): old_makeConnection = protocol.makeConnection old_connectionLost = protocol.connectionLost def new_makeConnection(transport): patch_transport_fake_push_producer(transport) patch_transport_abortConnection(transport, protocol) return old_makeConnection(transport) def new_connectionLost(reason): if protocol._fake_connection_aborted and reason.check(ConnectionDone): reason = Failure(ConnectionAborted()) return old_connectionLost(reason) protocol.makeConnection = new_makeConnection protocol.connectionLost = new_connectionLost protocol._fake_connection_aborted = False
def cinder(*arg): check_event_type(Openstack.Cinder, *arg) event_type = arg[0] def decorator(func): if event_type.find("*") != -1: event_type_pattern = pre_compile(event_type) cinder_customer_process_wildcard[event_type_pattern] = func else: cinder_customer_process[event_type] = func log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type)) @functools.wraps(func) def wrapper(*args, **kwargs): func(*args, **kwargs) return wrapper return decorator