code
stringlengths
81
3.79k
def event(uid): db = get_session() event = db.query(RecordedEvent).filter(RecordedEvent.uid == uid).first() \ or db.query(UpcomingEvent).filter(UpcomingEvent.uid == uid).first() if event: return make_data_response(event.serialize()) return make_error_response('No event with specified uid', 404)
def parse_database_url(url): if url == "sqlite://:memory:": raise Exception( 'Your url is "sqlite://:memory:", if you want ' 'an sqlite memory database, just use "sqlite://"' ) url_parts = urlsplit(url) engine = get_engine(url_parts.scheme) database, schema = parse_path(url_parts.path) port = url_parts.port host = url_parts.hostname user = url_parts.username password = url_parts.password params = {key: val.pop() for key, val in parse_qs(url_parts.query).items()} return DatabaseInfo( engine=engine, name=database, schema=schema, user=user, password=password, host=host, port=port, params=params, )
def _matmul(a, b, transpose_a=False, transpose_b=False, adjoint_a=False, adjoint_b=False, a_is_sparse=False, b_is_sparse=False, name=None): if a_is_sparse or b_is_sparse: raise NotImplementedError('Numpy backend does not support sparse matmul.') if transpose_a or adjoint_a: a = _matrix_transpose(a, conjugate=adjoint_a) if transpose_b or adjoint_b: b = _matrix_transpose(b, conjugate=adjoint_b) return np.matmul(a, b)
def _broadcast_cat_event_and_params(event, params, base_dtype): if dtype_util.is_integer(event.dtype): pass elif dtype_util.is_floating(event.dtype): event = tf.cast(event, dtype=tf.int32) else: raise TypeError("`value` should have integer `dtype` or " "`self.dtype` ({})".format(base_dtype)) shape_known_statically = ( tensorshape_util.rank(params.shape) is not None and tensorshape_util.is_fully_defined(params.shape[:-1]) and tensorshape_util.is_fully_defined(event.shape)) if not shape_known_statically or params.shape[:-1] != event.shape: params *= tf.ones_like(event[..., tf.newaxis], dtype=params.dtype) params_shape = tf.shape(input=params)[:-1] event *= tf.ones(params_shape, dtype=event.dtype) if tensorshape_util.rank(params.shape) is not None: tensorshape_util.set_shape(event, params.shape[:-1]) return event, params
def add_patch(self, patch): patchline = PatchLine(patch) patch = patchline.get_patch() if patch: self.patch2line[patch] = patchline self.patchlines.append(patchline)
def parse_rrset_record_values(e_resource_records): records = [] for e_record in e_resource_records: for e_value in e_record: records.append(e_value.text) return records
def verify_profile_name(msg, cfg): if msg.profile not in cfg.data: raise UnknownProfileError(msg.profile)
def bots(self): json = self.skype.conn("GET", "{0}/agents".format(SkypeConnection.API_BOT), auth=SkypeConnection.Auth.SkypeToken).json().get("agentDescriptions", []) return [self.merge(SkypeBotUser.fromRaw(self.skype, raw)) for raw in json]
def _makeApiCall(self, parameters=None): r = self._apiClient.get(self._url, parameters) if r.status_code == 200: return r.json() else: raise Exception("HTTP %s %s" % (r.status_code, r.text))
def volume_down(self): self._volume_level -= self._volume_step / self._max_volume self._device.vol_down(num=self._volume_step)
def bytesize(self, byteorder='@'): seed_size = struct.calcsize(byteorder+'q') length_size = struct.calcsize(byteorder+'i') hashvalue_size = struct.calcsize(byteorder+'I') return seed_size + length_size + len(self) * hashvalue_size
def get_certificate_from_publish_settings(publish_settings_path, path_to_write_certificate, subscription_id=None): import base64 try: from xml.etree import cElementTree as ET except ImportError: from xml.etree import ElementTree as ET try: import OpenSSL.crypto as crypto except: raise Exception("pyopenssl is required to use get_certificate_from_publish_settings") _validate_not_none('publish_settings_path', publish_settings_path) _validate_not_none('path_to_write_certificate', path_to_write_certificate) tree = ET.parse(publish_settings_path) subscriptions = tree.getroot().findall("./PublishProfile/Subscription") if subscription_id: subscription = next((s for s in subscriptions if s.get('Id').lower() == subscription_id.lower()), None) else: subscription = subscriptions[0] if subscription is None: raise ValueError("The provided subscription_id '{}' was not found in the publish settings file provided at '{}'".format(subscription_id, publish_settings_path)) cert_string = _decode_base64_to_bytes(subscription.get('ManagementCertificate')) cert = crypto.load_pkcs12(cert_string, b'') with open(path_to_write_certificate, 'wb') as f: f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert.get_certificate())) f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, cert.get_privatekey())) return subscription.get('Id')
def wantClass(self, cls): declared = getattr(cls, '__test__', None) if declared is not None: wanted = declared else: wanted = (not cls.__name__.startswith('_') and (issubclass(cls, unittest.TestCase) or self.matches(cls.__name__))) plug_wants = self.plugins.wantClass(cls) if plug_wants is not None: log.debug("Plugin setting selection of %s to %s", cls, plug_wants) wanted = plug_wants log.debug("wantClass %s? %s", cls, wanted) return wanted
def t_NOTEQUAL(self, t): r"!\=" t.endlexpos = t.lexpos + len(t.value) return t
def get_dict(self, timeout=-1): results = self.get(timeout) engine_ids = [ md['engine_id'] for md in self._metadata ] bycount = sorted(engine_ids, key=lambda k: engine_ids.count(k)) maxcount = bycount.count(bycount[-1]) if maxcount > 1: raise ValueError("Cannot build dict, %i jobs ran on engine maxcount, bycount[-1])) return dict(zip(engine_ids,results))
def create_file(self, bucket, key, file_versions): objs = [] for file_ver in file_versions: f = FileInstance.create().set_uri( file_ver['full_path'], file_ver['size'], 'md5:{0}'.format(file_ver['checksum']), ) obj = ObjectVersion.create(bucket, key).set_file(f) obj.created = arrow.get( file_ver['creation_date']).datetime.replace(tzinfo=None) objs.append(obj) db.session.commit() return objs[-1]
def cli_command_restart(self, msg): info = '' if self.state == State.RUNNING and self.sprocess and self.sprocess.proc: self.state = State.RESTARTING self.sprocess.set_exit_callback(self.proc_exit_cb_restart) self.sprocess.proc.kill() info = 'killed' return info
def _reconstruct_matrix(data_list): matrix_format = data_list[0] data = data_list[1] is_empty = isinstance(data, str) and data == '__empty__' if matrix_format == 'csc': if is_empty: return spsp.csc_matrix(data_list[4]) else: return spsp.csc_matrix(tuple(data_list[1:4]), shape=data_list[4]) elif matrix_format == 'csr': if is_empty: return spsp.csr_matrix(data_list[4]) else: return spsp.csr_matrix(tuple(data_list[1:4]), shape=data_list[4]) elif matrix_format == 'bsr': if is_empty: return spsp.bsr_matrix(data_list[4]) else: return spsp.bsr_matrix(tuple(data_list[1:4]), shape=data_list[4]) elif matrix_format == 'dia': if is_empty: return spsp.dia_matrix(data_list[3]) else: return spsp.dia_matrix(tuple(data_list[1:3]), shape=data_list[3]) else: raise RuntimeError('You shall not pass!')
def pull(dry_run, flavor, interactive, debug): try: main_section = _get_section_name(flavor) config = _try_load_config(main_section, interactive) lockfile_path = os.path.join(get_data_path(config, main_section), 'bugwarrior.lockfile') lockfile = PIDLockFile(lockfile_path) lockfile.acquire(timeout=10) try: issue_generator = aggregate_issues(config, main_section, debug) synchronize(issue_generator, config, main_section, dry_run) finally: lockfile.release() except LockTimeout: log.critical( 'Your taskrc repository is currently locked. ' 'Remove the file at %s if you are sure no other ' 'bugwarrior processes are currently running.' % ( lockfile_path ) ) except RuntimeError as e: log.exception("Aborted (%s)" % e)
def label_search(self, key=None, value=None): if key is not None: key = key.lower() if value is not None: value = value.lower() show_details = True if key is None and value is None: url = '%s/labels/search' % (self.base) show_details = False elif key is not None and value is not None: url = '%s/labels/search/%s/key/%s/value' % (self.base, key, value) elif key is None: url = '%s/labels/search/%s/value' % (self.base, value) else: url = '%s/labels/search/%s/key' % (self.base, key) result = self._get(url) if len(result) == 0: bot.info("No labels found.") sys.exit(0) bot.info("Labels\n") rows = [] for l in result: if show_details is True: entry = ["%s:%s" %(l['key'],l['value']), "\n%s\n\n" %"\n".join(l['containers'])] else: entry = ["N=%s" %len(l['containers']), "%s:%s" %(l['key'],l['value']) ] rows.append(entry) bot.table(rows) return rows
def force_iterable(f): def wrapper(*args, **kwargs): r = f(*args, **kwargs) if hasattr(r, '__iter__'): return r else: return [r] return wrapper
def main(): args = get_args() ret_code = args.target(args) _logger.debug('Exiting with code %d', ret_code) sys.exit(ret_code)
def lock(fileobj): try: import fcntl except ImportError: return False else: try: fcntl.lockf(fileobj, fcntl.LOCK_EX) except IOError: return False else: return True
def make_bintree(levels): G = nx.DiGraph() root = '0' G.add_node(root) add_children(G, root, levels, 2) return G
def set_option(self, optname, value, action=None, optdict=None): if optname in self._options_methods or optname in self._bw_options_methods: if value: try: meth = self._options_methods[optname] except KeyError: meth = self._bw_options_methods[optname] warnings.warn( "%s is deprecated, replace it by %s" % (optname, optname.split("-")[0]), DeprecationWarning, ) value = utils._check_csv(value) if isinstance(value, (list, tuple)): for _id in value: meth(_id, ignore_unknown=True) else: meth(value) return elif optname == "output-format": self._reporter_name = value if self._reporters: self._load_reporter() try: checkers.BaseTokenChecker.set_option(self, optname, value, action, optdict) except config.UnsupportedAction: print("option %s can't be read from config file" % optname, file=sys.stderr)
def _send_offer_assignment_notification_email(config, user_email, subject, email_body, site_code, task): try: sailthru_client = get_sailthru_client(site_code) except SailthruError: logger.exception( '[Offer Assignment] A client error occurred while attempting to send a offer assignment notification.' ' Message: {message}'.format(message=email_body) ) return None email_vars = { 'subject': subject, 'email_body': email_body, } try: response = sailthru_client.send( template=config['templates']['assignment_email'], email=user_email, _vars=email_vars ) except SailthruClientError: logger.exception( '[Offer Assignment] A client error occurred while attempting to send a offer assignment notification.' ' Message: {message}'.format(message=email_body) ) return None if not response.is_ok(): error = response.get_error() logger.error( '[Offer Assignment] A {token_error_code} - {token_error_message} error occurred' ' while attempting to send a offer assignment notification.' ' Message: {message}'.format( message=email_body, token_error_code=error.get_error_code(), token_error_message=error.get_message() ) ) if can_retry_sailthru_request(error): logger.info( '[Offer Assignment] An attempt will be made to resend the offer assignment notification.' ' Message: {message}'.format(message=email_body) ) schedule_retry(task, config) else: logger.warning( '[Offer Assignment] No further attempts will be made to send the offer assignment notification.' ' Failed Message: {message}'.format(message=email_body) ) return response
def migrate(uri: str, archive_uri: str, case_id: str, dry: bool, force: bool): scout_client = MongoClient(uri) scout_database = scout_client[uri.rsplit('/', 1)[-1]] scout_adapter = MongoAdapter(database=scout_database) scout_case = scout_adapter.case(case_id) if not force and scout_case.get('is_migrated'): print("case already migrated") return archive_client = MongoClient(archive_uri) archive_database = archive_client[archive_uri.rsplit('/', 1)[-1]] archive_case = archive_database.case.find_one({ 'owner': scout_case['owner'], 'display_name': scout_case['display_name'] }) archive_data = archive_info(archive_database, archive_case) if dry: print(ruamel.yaml.safe_dump(archive_data)) else: pass
def load_disease_term(self, disease_obj): LOG.debug("Loading disease term %s into database", disease_obj['_id']) try: self.disease_term_collection.insert_one(disease_obj) except DuplicateKeyError as err: raise IntegrityError("Disease term %s already exists in database".format(disease_obj['_id'])) LOG.debug("Disease term saved")
def get_elliptic_curve(name): for curve in get_elliptic_curves(): if curve.name == name: return curve raise ValueError("unknown curve name", name)
def subscribe(self): self.stream.setsockopt(zmq.UNSUBSCRIBE, '') if '' in self.topics: self.log.debug("Subscribing to: everything") self.stream.setsockopt(zmq.SUBSCRIBE, '') else: for topic in self.topics: self.log.debug("Subscribing to: %r"%(topic)) self.stream.setsockopt(zmq.SUBSCRIBE, topic)
def _sem_open(name, value=None): if value is None: handle = pthread.sem_open(ctypes.c_char_p(name), 0) else: handle = pthread.sem_open(ctypes.c_char_p(name), SEM_OFLAG, SEM_PERM, ctypes.c_int(value)) if handle == SEM_FAILURE: e = ctypes.get_errno() if e == errno.EEXIST: raise FileExistsError("a semaphore named %s already exists" % name) elif e == errno.ENOENT: raise FileNotFoundError('cannot find semaphore named %s' % name) elif e == errno.ENOSYS: raise NotImplementedError('No semaphore implementation on this ' 'system') else: raiseFromErrno() return handle
def parallel(collection, method, processes=None, args=None, **kwargs): if processes is None: processes = min(mp.cpu_count(), 20) print "Running parallel process on " + str(processes) + " cores. :-)" pool = mp.Pool(processes=processes) PROC = [] tic = time.time() for main_arg in collection: if args is None: ARGS = (main_arg,) else: if isinstance(args, tuple) == False: args = (args,) ARGS = (main_arg,) + args PROC.append(pool.apply_async(method, args=ARGS, kwds=kwargs)) RES = [] for p in PROC: try: RES.append(p.get()) except Exception as e: print "shit happens..." print e RES.append(None) pool.close() pool.join() toc = time.time() elapsed = toc - tic print "Elapsed time: %s on %s processes :-)\n" % (str(elapsed), str(processes)) return RES
def uncache_zipdir(path): from zipimport import _zip_directory_cache as zdc _uncache(path, zdc) _uncache(path, sys.path_importer_cache)
def task_create(asana_workspace_id, name, notes, assignee, projects, completed, **kwargs): put("task_create", asana_workspace_id=asana_workspace_id, name=name, notes=notes, assignee=assignee, projects=projects, completed=completed, **kwargs)
def str_to_num(str_value): str_value = str(str_value) try: return int(str_value) except ValueError: return float(str_value)
def connect(url='https://github.com', token=None): gh_session = None if url == 'https://github.com': gh_session = create_session(token) else: gh_session = create_enterprise_session(url, token) if gh_session is None: msg = 'Unable to connect to (%s) with provided token.' raise RuntimeError(msg, url) logger.info('Connected to: %s', url) return gh_session
def generate_project(args): src = os.path.join(dirname(abspath(__file__)), 'project') project_name = args.get('<project>') if not project_name: logger.warning('Project name cannot be empty.') return dst = os.path.join(os.getcwd(), project_name) if os.path.isdir(dst): logger.warning('Project directory already exists.') return logger.info('Start generating project files.') _mkdir_p(dst) for src_dir, sub_dirs, filenames in os.walk(src): relative_path = src_dir.split(src)[1].lstrip(os.path.sep) dst_dir = os.path.join(dst, relative_path) if src != src_dir: _mkdir_p(dst_dir) for filename in filenames: if filename in ['development.py', 'production.py']: continue src_file = os.path.join(src_dir, filename) dst_file = os.path.join(dst_dir, filename) if filename.endswith(REWRITE_FILE_EXTS): _rewrite_and_copy(src_file, dst_file, project_name) else: shutil.copy(src_file, dst_file) logger.info("New: %s" % dst_file) if filename in ['development_sample.py', 'production_sample.py']: dst_file = os.path.join(dst_dir, "%s.py" % filename.split('_')[0]) _rewrite_and_copy(src_file, dst_file, project_name) logger.info("New: %s" % dst_file) logger.info('Finish generating project files.')
def with_ignored_exceptions(self, *ignored_exceptions): for exception in ignored_exceptions: self._ignored_exceptions = self._ignored_exceptions + (exception,) return self
def indented_short_title(self, item): r = "" if hasattr(item, 'get_absolute_url'): r = '<input type="hidden" class="medialibrary_file_path" value="%s" />' % item.get_absolute_url() editable_class = '' if not getattr(item, 'feincms_editable', True): editable_class = ' tree-item-not-editable' r += '<span id="page_marker-%d" class="page_marker%s" style="width: %dpx;">&nbsp;</span>&nbsp;' % ( item.id, editable_class, 14 + item.level * 18) if hasattr(item, 'short_title'): r += item.short_title() else: r += unicode(item) return mark_safe(r)
def render_columns(columns, write_borders=True, column_colors=None): if column_colors is not None and len(column_colors) != len(columns): raise ValueError('Wrong number of column colors') widths = [max(len(cell) for cell in column) for column in columns] max_column_length = max(len(column) for column in columns) result = '\n'.join(render_row(i, columns, widths, column_colors) for i in range(max_column_length)) if write_borders: border = '+%s+' % '|'.join('-' * (w + 2) for w in widths) return '%s\n%s\n%s' % (border, result, border) else: return result
def create_database(self, server_name, name, service_objective_id, edition=None, collation_name=None, max_size_bytes=None): _validate_not_none('server_name', server_name) _validate_not_none('name', name) _validate_not_none('service_objective_id', service_objective_id) return self._perform_post( self._get_databases_path(server_name), _SqlManagementXmlSerializer.create_database_to_xml( name, service_objective_id, edition, collation_name, max_size_bytes ) )
def xcom_push( self, key, value, execution_date=None): if execution_date and execution_date < self.execution_date: raise ValueError( 'execution_date can not be in the past (current ' 'execution_date is {}; received {})'.format( self.execution_date, execution_date)) XCom.set( key=key, value=value, task_id=self.task_id, dag_id=self.dag_id, execution_date=execution_date or self.execution_date)
def merged(self, timeslots: 'TimeslotCollection') -> 'TimeslotCollection': slots = [Timeslot(slot.interval, slot.channel) for slot in self.timeslots] slots.extend([Timeslot(slot.interval, slot.channel) for slot in timeslots.timeslots]) return TimeslotCollection(*slots)
def concat(*seqs) -> ISeq: allseqs = lseq.sequence(itertools.chain(*filter(None, map(to_seq, seqs)))) if allseqs is None: return lseq.EMPTY return allseqs
def _set_logger(self, name=None): if name is None: cls = self.__class__ name = '%s.%s' % (cls.__module__, cls.__name__) self._logger = logging.getLogger(name)
def as_event_description(self): description = { self.name: { 'timestamp': self.time, }, } if self.data is not None: description[self.name]['data'] = self.data return description
def add_element(self, element): if isinstance(element, BaseExpression): element.set_parent(self._working_fragment) self._working_fragment.elements.append(element) return self else: return self.add_operator(element)
def hub_history(self): self.session.send(self._query_socket, "history_request", content={}) idents, msg = self.session.recv(self._query_socket, 0) if self.debug: pprint(msg) content = msg['content'] if content['status'] != 'ok': raise self._unwrap_exception(content) else: return content['history']
def mk_privkeys(num): "make privkeys that support coloring, see utils.cstr" privkeys = [] assert num <= num_colors for i in range(num): j = 0 while True: k = sha3(str(j)) a = privtoaddr(k) an = big_endian_to_int(a) if an % num_colors == i: break j += 1 privkeys.append(k) return privkeys
def parse_list(value): segments = _QUOTED_SEGMENT_RE.findall(value) for segment in segments: left, match, right = value.partition(segment) value = ''.join([left, match.replace(',', '\000'), right]) return [_dequote(x.strip()).replace('\000', ',') for x in value.split(',')]
def index(context): LOG.info("Running scout delete index") adapter = context.obj['adapter'] for collection in adapter.db.collection_names(): adapter.db[collection].drop_indexes() LOG.info("All indexes deleted")
def is_valid_filesys(path): if os.path.isabs(path) and os.path.isdir(path) and \ not os.path.isfile(path): return True else: raise LocalPortValidationError( 'Port value %s is not a valid filesystem location' % path )
def parse_coordinates(variant, category): ref = variant.REF if variant.ALT: alt = variant.ALT[0] if category=="str" and not variant.ALT: alt = '.' chrom_match = CHR_PATTERN.match(variant.CHROM) chrom = chrom_match.group(2) svtype = variant.INFO.get('SVTYPE') if svtype: svtype = svtype.lower() mate_id = variant.INFO.get('MATEID') svlen = variant.INFO.get('SVLEN') svend = variant.INFO.get('END') snvend = int(variant.end) position = int(variant.POS) ref_len = len(ref) alt_len = len(alt) sub_category = get_sub_category(alt_len, ref_len, category, svtype) end = get_end(position, alt, category, snvend, svend) length = get_length(alt_len, ref_len, category, position, end, svtype, svlen) end_chrom = chrom if sub_category == 'bnd': if ':' in alt: match = BND_ALT_PATTERN.match(alt) if match: other_chrom = match.group(1) match = CHR_PATTERN.match(other_chrom) end_chrom = match.group(2) cytoband_start = get_cytoband_coordinates(chrom, position) cytoband_end = get_cytoband_coordinates(end_chrom, end) coordinates = { 'position': position, 'end': end, 'length': length, 'sub_category': sub_category, 'mate_id': mate_id, 'cytoband_start': cytoband_start, 'cytoband_end': cytoband_end, 'end_chrom': end_chrom, } return coordinates
def get_metadata(path_or_module, metadata_version=None): if isinstance(path_or_module, ModuleType): try: return Installed(path_or_module, metadata_version) except (ValueError, IOError): pass try: __import__(path_or_module) except ImportError: pass else: try: return Installed(path_or_module, metadata_version) except (ValueError, IOError): pass if os.path.isfile(path_or_module): try: return SDist(path_or_module, metadata_version) except (ValueError, IOError): pass try: return BDist(path_or_module, metadata_version) except (ValueError, IOError): pass try: return Wheel(path_or_module, metadata_version) except (ValueError, IOError): pass if os.path.isdir(path_or_module): try: return Develop(path_or_module, metadata_version) except (ValueError, IOError): pass
def unsubscribe(self, event, callback): try: self._subscribers[event].remove(self._Subscription(event, callback)) except KeyError: return False return True
def to_archive(self, writer): if 'b' not in writer.mode: raise GiraffeError("Archive writer must be in binary mode") writer.write(GIRAFFE_MAGIC) writer.write(self.columns.serialize()) i = 0 for n, chunk in enumerate(self._fetchall(ROW_ENCODING_RAW), 1): writer.write(chunk) yield TeradataEncoder.count(chunk)
def flatten(iterables, level=inf): if level >= 0 and isinstance(iterables, (list, tuple, GeneratorType, map, zip)): level -= 1 for i in iterables: yield from flatten(i, level=level) else: yield iterables
def send(self): self._generate_email() if self.verbose: print( "Debugging info" "\n--------------" "\n{} Message created.".format(timestamp()) ) recipients = [] for i in (self.to, self.cc, self.bcc): if i: if isinstance(i, MutableSequence): recipients += i else: recipients.append(i) session = self._get_session() if self.verbose: print(timestamp(), "Login successful.") session.sendmail(self.from_, recipients, self.message.as_string()) session.quit() if self.verbose: print(timestamp(), "Logged out.") if self.verbose: print( timestamp(), type(self).__name__ + " info:", self.__str__(indentation="\n * "), ) print("Message sent.")
def node(self, title, **args): self._stream.write('%snode: {title:"%s"' % (self._indent, title)) self._write_attributes(NODE_ATTRS, **args) self._stream.write("}\n")
def _next_rdelim(items, pos): for num, item in enumerate(items): if item > pos: break else: raise RuntimeError("Mismatched delimiters") del items[num] return item
def _extract_base(self, element): if isinstance(element, list): return [self._extract_base(x) for x in element] base = self.checker.is_url_valid(url=element, return_base=True) if base: return base if "/" in element: return element.split("/")[0] return element
def recent(self): url = "{0}/users/ME/conversations".format(self.skype.conn.msgsHost) params = {"startTime": 0, "view": "msnp24Equivalent", "targetType": "Passport|Skype|Lync|Thread"} resp = self.skype.conn.syncStateCall("GET", url, params, auth=SkypeConnection.Auth.RegToken).json() chats = {} for json in resp.get("conversations", []): cls = SkypeSingleChat if "threadProperties" in json: info = self.skype.conn("GET", "{0}/threads/{1}".format(self.skype.conn.msgsHost, json.get("id")), auth=SkypeConnection.Auth.RegToken, params={"view": "msnp24Equivalent"}).json() json.update(info) cls = SkypeGroupChat chats[json.get("id")] = self.merge(cls.fromRaw(self.skype, json)) return chats
def _html_checker(job_var, interval, status, header, _interval_set=False): job_status = job_var.status() job_status_name = job_status.name job_status_msg = job_status.value status.value = header % (job_status_msg) while job_status_name not in ['DONE', 'CANCELLED']: time.sleep(interval) job_status = job_var.status() job_status_name = job_status.name job_status_msg = job_status.value if job_status_name == 'ERROR': break else: if job_status_name == 'QUEUED': job_status_msg += ' (%s)' % job_var.queue_position() if not _interval_set: interval = max(job_var.queue_position(), 2) else: if not _interval_set: interval = 2 status.value = header % (job_status_msg) status.value = header % (job_status_msg)
def display(self): if isinstance(self.name, six.string_types) and len(self.name) > 0: return '{0} ({1})'.format(self.name, self.public_ip) else: return self.public_ip
def parse_conservation(variant, info_key): raw_score = variant.INFO.get(info_key) conservations = [] if raw_score: if isinstance(raw_score, numbers.Number): raw_score = (raw_score,) for score in raw_score: if score >= CONSERVATION[info_key]['conserved_min']: conservations.append('Conserved') else: conservations.append('NotConserved') return conservations
def _asa_task(q, masks, stft, sample_width, frame_rate, nsamples_for_each_fft): for mask in masks: mask = np.where(mask > 0, 1, 0) masks = [mask * stft for mask in masks] nparrs = [] dtype_dict = {1: np.int8, 2: np.int16, 4: np.int32} dtype = dtype_dict[sample_width] for m in masks: _times, nparr = signal.istft(m, frame_rate, nperseg=nsamples_for_each_fft) nparr = nparr.astype(dtype) nparrs.append(nparr) for m in nparrs: q.put(m) q.put("DONE")
def client_authentication_required(self, request, *args, **kwargs): def is_confidential(client): if hasattr(client, 'is_confidential'): return client.is_confidential client_type = getattr(client, 'client_type', None) if client_type: return client_type == 'confidential' return True grant_types = ('password', 'authorization_code', 'refresh_token') client_id, _ = self._get_client_creds_from_request(request) if client_id and request.grant_type in grant_types: client = self._clientgetter(client_id) if client: return is_confidential(client) return False
def contains(self, k): if self._changed(): self._read() return k in self.store.keys()
def assert_is_type(var, *types, **kwargs): assert types, "The list of expected types was not provided" expected_type = types[0] if len(types) == 1 else U(*types) if _check_type(var, expected_type): return assert set(kwargs).issubset({"message", "skip_frames"}), "Unexpected keyword arguments: %r" % kwargs message = kwargs.get("message", None) skip_frames = kwargs.get("skip_frames", 1) args = _retrieve_assert_arguments() vname = args[0] etn = _get_type_name(expected_type, dump=", ".join(args[1:])) vtn = _get_type_name(type(var)) raise H2OTypeError(var_name=vname, var_value=var, var_type_name=vtn, exp_type_name=etn, message=message, skip_frames=skip_frames)
def success(self): any_success = False for step_event in itertools.chain( self.input_expectations, self.output_expectations, self.transforms ): if step_event.event_type == DagsterEventType.STEP_FAILURE: return False if step_event.event_type == DagsterEventType.STEP_SUCCESS: any_success = True return any_success
def session_new(self, **kwargs): path = self._get_path('session_new') response = self._GET(path, kwargs) self._set_attrs_to_values(response) return response
def filter_new(self, name, filt_str): filt = self.filt.grab_filt(filt=filt_str) self.filt.add(name, filt, info=filt_str) return
def _observe_mode(self, change): block = self.block if block and self.is_initialized and change['type'] == 'update': if change['oldvalue'] == 'replace': raise NotImplementedError for c in self.children: block.children.remove(c) c.set_parent(None) self.refresh_items()
def read(self, input_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0): super(GetAttributeListResponsePayload, self).read( input_buffer, kmip_version=kmip_version ) local_buffer = utils.BytearrayStream(input_buffer.read(self.length)) if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_buffer): self._unique_identifier = primitives.TextString( tag=enums.Tags.UNIQUE_IDENTIFIER ) self._unique_identifier.read( local_buffer, kmip_version=kmip_version ) else: raise exceptions.InvalidKmipEncoding( "The GetAttributeList response payload encoding is missing " "the unique identifier." ) names = list() if kmip_version < enums.KMIPVersion.KMIP_2_0: while self.is_tag_next(enums.Tags.ATTRIBUTE_NAME, local_buffer): name = primitives.TextString(tag=enums.Tags.ATTRIBUTE_NAME) name.read(local_buffer, kmip_version=kmip_version) names.append(name) if len(names) == 0: raise exceptions.InvalidKmipEncoding( "The GetAttributeList response payload encoding is " "missing the attribute names." ) self._attribute_names = names else: while self.is_tag_next( enums.Tags.ATTRIBUTE_REFERENCE, local_buffer ): if self.is_type_next(enums.Types.STRUCTURE, local_buffer): reference = objects.AttributeReference() reference.read(local_buffer, kmip_version=kmip_version) names.append( primitives.TextString( value=reference.attribute_name, tag=enums.Tags.ATTRIBUTE_NAME ) ) elif self.is_type_next(enums.Types.ENUMERATION, local_buffer): reference = primitives.Enumeration( enums.Tags, tag=enums.Tags.ATTRIBUTE_REFERENCE ) reference.read(local_buffer, kmip_version=kmip_version) name = enums.convert_attribute_tag_to_name(reference.value) names.append( primitives.TextString( value=name, tag=enums.Tags.ATTRIBUTE_NAME ) ) else: raise exceptions.InvalidKmipEncoding( "The GetAttributeList response payload encoding " "contains an invalid AttributeReference type." ) self._attribute_names = names self.is_oversized(local_buffer)
def registerAdminSite(appName, excludeModels=[]): for model in apps.get_app_config(appName).get_models(): if model not in excludeModels: admin.site.register(model)
def _load_rels(self, source): self.relationships.load(source=self, data=source)
def protocol_version_to_kmip_version(value): if not isinstance(value, ProtocolVersion): return None if value.major == 1: if value.minor == 0: return enums.KMIPVersion.KMIP_1_0 elif value.minor == 1: return enums.KMIPVersion.KMIP_1_1 elif value.minor == 2: return enums.KMIPVersion.KMIP_1_2 elif value.minor == 3: return enums.KMIPVersion.KMIP_1_3 elif value.minor == 4: return enums.KMIPVersion.KMIP_1_4 else: return None else: return None
def genes_by_alias(self, build='37', genes=None): LOG.info("Fetching all genes by alias") alias_genes = {} if not genes: genes = self.hgnc_collection.find({'build':build}) for gene in genes: hgnc_id = gene['hgnc_id'] hgnc_symbol = gene['hgnc_symbol'] for alias in gene['aliases']: true_id = None if alias == hgnc_symbol: true_id = hgnc_id if alias in alias_genes: alias_genes[alias]['ids'].add(hgnc_id) if true_id: alias_genes[alias]['true'] = hgnc_id else: alias_genes[alias] = { 'true': hgnc_id, 'ids': set([hgnc_id]) } return alias_genes
def _expand_to_event_rank(self, x): expanded_x = x for _ in range(tensorshape_util.rank(self.event_shape)): expanded_x = tf.expand_dims(expanded_x, -1) return expanded_x
def start(self): if self._collectors: self._collectors[-1].pause() self._collectors.append(self) traces0 = [] if hasattr(sys, "gettrace"): fn0 = sys.gettrace() if fn0: tracer0 = getattr(fn0, '__self__', None) if tracer0: traces0 = getattr(tracer0, 'traces', []) fn = self._start_tracer() for args in traces0: (frame, event, arg), lineno = args try: fn(frame, event, arg, lineno=lineno) except TypeError: raise Exception( "fullcoverage must be run with the C trace function." ) threading.settrace(self._installation_trace)
def _has_connection(hostname, port): try: host = socket.gethostbyname(hostname) socket.create_connection((host, port), 2) return True except Exception: return False
def rps_at(self, t): if 0 <= t <= self.duration: return self.minrps + \ float(self.maxrps - self.minrps) * t / self.duration else: return 0
def get_all_child_m2m_relations(model): return [ field for field in model._meta.get_fields() if isinstance(field, ParentalManyToManyField) ]
def display_json(*objs, **kwargs): raw = kwargs.pop('raw',False) if raw: for obj in objs: publish_json(obj) else: display(*objs, include=['text/plain','application/json'])
def pick_scalar_condition(pred, true_value, false_value, name=None): with tf.name_scope(name or "pick_scalar_condition"): pred = tf.convert_to_tensor( value=pred, dtype_hint=tf.bool, name="pred") true_value = tf.convert_to_tensor(value=true_value, name="true_value") false_value = tf.convert_to_tensor(value=false_value, name="false_value") pred_ = tf.get_static_value(pred) if pred_ is None: return tf.where(pred, true_value, false_value) return true_value if pred_ else false_value
def adapter(data, headers, table_format=None, **kwargs): keys = ('title', ) table = table_format_handler[table_format] t = table([headers] + list(data), **filter_dict_by_key(kwargs, keys)) dimensions = terminaltables.width_and_alignment.max_dimensions( t.table_data, t.padding_left, t.padding_right)[:3] for r in t.gen_table(*dimensions): yield u''.join(r)
def print_hex(data): hex_msg = "" for c in data: hex_msg += "\\x" + format(c, "02x") _LOGGER.debug(hex_msg)
def fetch(self, category=CATEGORY_BUILD): kwargs = {} items = super().fetch(category, **kwargs) return items
def _status_new(self): self._update_status() new_comp = self._group_report(self._comp_report, 'Completed') new_dead = self._group_report(self._dead_report, 'Dead, call jobs.traceback() for details') self._comp_report[:] = [] self._dead_report[:] = [] return new_comp or new_dead
def encode(self): header = bytearray(1) varHeader = bytearray() payload = bytearray() if self.qos: header[0] = 0x30 | self.retain | (self.qos << 1) | (self.dup << 3) varHeader.extend(encodeString(self.topic)) varHeader.extend(encode16Int(self.msgId)) else: header[0] = 0x30 | self.retain varHeader.extend(encodeString(self.topic)) if isinstance(self.payload, bytearray): payload.extend(self.payload) elif isinstance(self.payload, str): payload.extend(bytearray(self.payload, encoding='utf-8')) else: raise PayloadTypeError(type(self.payload)) totalLen = len(varHeader) + len(payload) if totalLen > 268435455: raise PayloadValueError(totalLen) header.extend(encodeLength(totalLen)) header.extend(varHeader) header.extend(payload) self.encoded = header return str(header) if PY2 else bytes(header)
def symmetrized_csiszar_function(logu, csiszar_function, name=None): with tf.compat.v1.name_scope(name, "symmetrized_csiszar_function", [logu]): logu = tf.convert_to_tensor(value=logu, name="logu") return 0.5 * (csiszar_function(logu) + dual_csiszar_function(logu, csiszar_function))
def get_engine(scheme): path = scheme.split("+") first, rest = path[0], path[1:] second = rest[0] if rest else None engine = resolve(ENGINE_MAPPING, first) if not isinstance(engine, list): if second: raise KeyError("%s has no sub-engines" % first) return engine try: engine, extra = engine except ValueError: raise ValueError( "django-bananas.url' engine " "configuration is invalid: %r" % ENGINE_MAPPING ) if second is not None: engine = resolve(extra, second) assert not isinstance( engine, (list, dict) ), "Only two levels of engines " "are allowed" assert engine, "The returned engine is not truthy" return engine
def export_to_storage_bucket(self, bucket, namespace=None, entity_filter=None, labels=None): admin_conn = self.get_conn() output_uri_prefix = 'gs://' + '/'.join(filter(None, [bucket, namespace])) if not entity_filter: entity_filter = {} if not labels: labels = {} body = { 'outputUrlPrefix': output_uri_prefix, 'entityFilter': entity_filter, 'labels': labels, } resp = (admin_conn .projects() .export(projectId=self.project_id, body=body) .execute(num_retries=self.num_retries)) return resp
def build_package_from_pr_number(gh_token, sdk_id, pr_number, output_folder, *, with_comment=False): con = Github(gh_token) repo = con.get_repo(sdk_id) sdk_pr = repo.get_pull(pr_number) package_names = {f.filename.split('/')[0] for f in sdk_pr.get_files() if f.filename.startswith("azure")} absolute_output_folder = Path(output_folder).resolve() with tempfile.TemporaryDirectory() as temp_dir, \ manage_git_folder(gh_token, Path(temp_dir) / Path("sdk"), sdk_id, pr_number=pr_number) as sdk_folder: for package_name in package_names: _LOGGER.debug("Build {}".format(package_name)) execute_simple_command( ["python", "./build_package.py", "--dest", str(absolute_output_folder), package_name], cwd=sdk_folder ) _LOGGER.debug("Build finished: {}".format(package_name)) if with_comment: files = [f.name for f in absolute_output_folder.iterdir()] comment_message = None dashboard = DashboardCommentableObject(sdk_pr, "(message created by the CI based on PR content)") try: installation_message = build_installation_message(sdk_pr) download_message = build_download_message(sdk_pr, files) comment_message = installation_message + "\n\n" + download_message dashboard.create_comment(comment_message) except Exception: _LOGGER.critical("Unable to do PR comment:\n%s", comment_message)
def verbose(self, msg, *args, **kwargs): self.log(logging.VERBOSE, msg, *args, **kwargs)
def set_value(self, value): self.validate_value(value) self.value.set(value)
def remoteDataReceived(self, connection, data): proto = self.getLocalProtocol(connection) proto.transport.write(data) return {}
def create_validator(data_struct_dict, name=None): if name is None: name = 'FromDictValidator' attrs = {} for field_name, field_info in six.iteritems(data_struct_dict): field_type = field_info['type'] if field_type == DictField.FIELD_TYPE_NAME and isinstance(field_info.get('validator'), dict): field_info['validator'] = create_validator(field_info['validator']) attrs[field_name] = create_field(field_info) name = force_str(name) return type(name, (Validator, ), attrs)
def get_branch_mutation_matrix(self, node, full_sequence=False): pp,pc = self.marginal_branch_profile(node) expQt = self.gtr.expQt(self._branch_length_to_gtr(node)) if len(expQt.shape)==3: mut_matrix_stack = np.einsum('ai,aj,ija->aij', pc, pp, expQt) else: mut_matrix_stack = np.einsum('ai,aj,ij->aij', pc, pp, expQt) normalizer = mut_matrix_stack.sum(axis=2).sum(axis=1) mut_matrix_stack = np.einsum('aij,a->aij', mut_matrix_stack, 1.0/normalizer) if full_sequence: return mut_matrix_stack[self.full_to_reduced_sequence_map] else: return mut_matrix_stack
def get_window(window, Nx, fftbins=True): if six.callable(window): return window(Nx) elif (isinstance(window, (six.string_types, tuple)) or np.isscalar(window)): return scipy.signal.get_window(window, Nx, fftbins=fftbins) elif isinstance(window, (np.ndarray, list)): if len(window) == Nx: return np.asarray(window) raise ParameterError('Window size mismatch: ' '{:d} != {:d}'.format(len(window), Nx)) else: raise ParameterError('Invalid window specification: {}'.format(window))