function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def make_label(mets_meta: dict) -> str: """Generate a descripte label for the given metadata set. Will take the form '{creator}: {label} ({pub_place}, {pub_date})'. :param mets_meta: Metadata to generate label from :returns: Generated label """ label = mets_meta['title'][0] if mets_meta.get('creator'): label = "{creator}: {label}".format( creator="/".join(mets_meta['creator']), label=label) if mets_meta.get('pub_place') and mets_meta.get('pub_date'): label = "{label} ({pub_place}, {pub_date})".format( label=label, pub_place=mets_meta['pub_place'], pub_date=mets_meta['pub_date']) elif mets_meta.get('pub_date'): label = "{label} ({pub_date})".format( label=label, pub_date=mets_meta['pub_date']) elif mets_meta.get('pub_place'): label = "{label} ({pub_place})".format( label=label, pub_place=mets_meta['pub_place']) return label
jbaiter/demetsiiify
[ 31, 6, 31, 16, 1482421024 ]
def _get_canvases(toc_entry: TocEntry, manifest: Manifest) -> List[str]: """Obtain list of canvas identifiers for a given TOC entry. :param toc_entry: TOC entry to get canvases for :param manifest: Manifest with canvases :returns: All canvas ids for the given TOC entry """ canvases = [] for phys_id in toc_entry.physical_ids: canvas = next((c for c in manifest.sequences[0].canvases if c.id.endswith(f'{phys_id}.json')), None) if canvas is None: logger.warning(f'Could not find a matching canvas for {phys_id}') continue canvases.append(canvas) if toc_entry.children: canvases.extend(chain.from_iterable( _get_canvases(child, manifest) for child in toc_entry.children)) return canvases
jbaiter/demetsiiify
[ 31, 6, 31, 16, 1482421024 ]
def _make_empty_manifest(ident: str, label: str, base_url: str) -> Manifest: """Generate an empty IIIF manifest. :param ident: Identifier for the manifest, that is not a URL, but the `<ident>` in `https://..../<ident>/manifest` :param label: Label for the manifest :param base_url: Root URL for the application, e.g. https://example.com :returns: The empty manifest """ manifest_factory = ManifestFactory() manifest_ident = f'{base_url}/iiif/{ident}/manifest' manifest_factory.set_base_prezi_uri(f'{base_url}/iiif/{ident}') manifest_factory.set_base_image_uri(f'{base_url}/iiif/image') manifest_factory.set_iiif_image_info('2.0', 0) manifest = manifest_factory.manifest(ident=manifest_ident, label=label) return manifest
jbaiter/demetsiiify
[ 31, 6, 31, 16, 1482421024 ]
def make_image_info(itm: PhysicalItem, base_url: str) -> dict: """Create info.json data structures for all physical items.""" sizes = [(f.width, f.height) for f in itm.files if f.width is not None and f.height is not None] max_width, max_height = max(sizes) return { '@context': 'http://iiif.io/api/image/2/context.json', '@id': f'{base_url}/iiif/image/{itm.image_ident}', 'protocol': 'http://iiif.io/api/image', 'profile': ['http://iiif.io/api/image/2/level0.json'], 'width': max_width, 'height': max_height, 'sizes': [{'width': w, 'height': h} for w, h in sorted(sizes)]}
jbaiter/demetsiiify
[ 31, 6, 31, 16, 1482421024 ]
def make_manifest_collection( pagination: Pagination, label: str, collection_id: str, per_page: int, base_url: str, page_num: Optional[int] = None, coll_counts: Optional[Tuple[int, str, int]] = None) -> dict: """Generate a IIIF collection. :param pagination: Pagination query for all manifests of the collection :param label: Label for the collection :param collection_id: Identifier of the collection :param base_url: Root URL for the application, e.g. https://example.com :param page_num: Number of the collection page to display :returns: The generated IIIF collection """ collection_url = f'{base_url}/iiif/collection/{collection_id}' if page_num is not None: page_id = 'p{}'.format(page_num) else: page_id = 'top' collection = { "@context": "http://iiif.io/api/presentation/2/context.json", "@id": f'{base_url}/iiif/collection/{collection_id}/{page_id}', "@type": "sc:Collection", "total": pagination.total, "label": label, } if page_id == 'top': collection.update({ "first": f'{collection_url}/p1', "last": f'{collection_url}/p{pagination.pages}' }) else: if collection_id != 'index': collection['within'] = f'{collection_url}/top' collection.update({ 'startIndex': (pagination.page - 1) * pagination.per_page, 'manifests': [{ '@id': f'{base_url}/iiif/{m.id}/manifest', '@type': 'sc:Manifest', 'label': m.label, 'attribution': m.manifest['attribution'], 'logo': m.manifest['logo'], 'thumbnail': m.manifest.get( 'thumbnail', m.manifest['sequences'][0]['canvases'][0]['thumbnail']) } for m in pagination.items] }) if page_num == 1: collection['collections'] = [] for cid, label, num_manifs in coll_counts: if not num_manifs: continue # We create a mock pagination object that does not have # an underlying query, since we're only going to need # the manifest count when generating the top-level collection manifests_pagination = Pagination( None, 1, per_page, num_manifs, None) iiif_coll = make_manifest_collection( manifests_pagination, label, cid, None) collection['collections'].append(iiif_coll) if 'collections' in collection and not collection['collections']: del collection['collections'] if pagination.has_next: collection['next'] = f'{collection_url}/p{pagination.next_num}' if pagination.has_prev: collection['prev'] = f'{collection_url}/p{pagination.prev_num}' return collection
jbaiter/demetsiiify
[ 31, 6, 31, 16, 1482421024 ]
def _make_link(page_no: int) -> str: params = urlencode({'p': page_no, **request_args}) return f'{base_url}/iiif/annotation?{params}'
jbaiter/demetsiiify
[ 31, 6, 31, 16, 1482421024 ]
def raw_text(context): project=context['object'] object_type = ContentType.objects.get_for_model(project) projectfiles = fileobject.objects.filter(content_type=object_type,object_id=project.id, filetype="text") textlist = "" for i in projectfiles: textlist = textlist+i.filename.read() return textlist
Rhombik/rhombik-object-repository
[ 21, 6, 21, 16, 1374607062 ]
def to_string(c): digit = f'{ord(c):x}' name = unicodedata.name(c, 'Name not found.') return f'`\\U{digit:>08}`\t: {name} - {c} \N{EM DASH} <http://www.fileformat.info/info/unicode/char/{digit}>'
jelliedpizza/RoleBot
[ 27, 10, 27, 3, 1502110601 ]
def __init__(self, fitbit): self.fitbit = fitbit; if (self.fitbit.token == None): self.fitbit.get_token()
sociam/indx
[ 40, 7, 40, 81, 1329137438 ]
def get_day_time_series(self, resource_path, date=date.today(), format='json'): url = "/1/user/-/{0}/date/{1}/1d/1min.{2}".format(resource_path, date.isoformat(), format) data = self.fitbit.call_get_api(url) return json.loads(data)
sociam/indx
[ 40, 7, 40, 81, 1329137438 ]
def get_calories(self, from_datetime, to_datetime=None, format='json'): return self.get_intraday_time_series("activities/calories", from_datetime, to_datetime, format)
sociam/indx
[ 40, 7, 40, 81, 1329137438 ]
def get_distance(self, from_datetime, to_datetime=None, format='json'): return self.get_intraday_time_series("activities/distance", from_datetime, to_datetime, format)
sociam/indx
[ 40, 7, 40, 81, 1329137438 ]
def get_floors(self, from_datetime, to_datetime=None, format='json'): return self.get_intraday_time_series("activities/floors", from_datetime, to_datetime, format)
sociam/indx
[ 40, 7, 40, 81, 1329137438 ]
def can(self, request): has_user = hasattr(request, 'user') is_staff = False if has_user and request.user is not None and request.user.is_staff: is_staff = True return settings.DEBUG and 'prof' in request.GET and (not has_user or is_staff)
kansanmuisti/kamu
[ 11, 8, 11, 46, 1325598221 ]
def _compute_qty_on_voucher(self): # al calcular por voucher no tenemos en cuenta el metodo de facturacion # es decir, que calculamos como si fuese metodo segun lo recibido voucher = self._context.get('voucher', False) if not voucher: self.update({'qty_on_voucher': 0.0}) return lines = self.filtered( lambda x: x.order_id.state in ['purchase', 'done']) moves = self.env['stock.move'].search([ ('id', 'in', lines.mapped('move_ids').ids), ('state', '=', 'done'), ('picking_id.vouchers', 'ilike', voucher[0]), ]) for line in lines: line.qty_on_voucher = sum(moves.filtered( lambda x: x.id in line.move_ids.ids).mapped('product_uom_qty'))
ingadhoc/purchase
[ 10, 28, 10, 7, 1453132294 ]
def _compute_vouchers(self): for rec in self: rec.vouchers = ', '.join(rec.mapped( 'move_ids.picking_id.voucher_ids.display_name'))
ingadhoc/purchase
[ 10, 28, 10, 7, 1453132294 ]
def _compute_delivery_status(self): precision = self.env['decimal.precision'].precision_get( 'Product Unit of Measure') for line in self: if line.state not in ('purchase', 'done'): line.delivery_status = 'no' continue if line.order_id.force_delivered_status: line.delivery_status = line.order_id.force_delivered_status continue if float_compare( (line.qty_received + line.qty_returned), line.product_qty, precision_digits=precision) == -1: line.delivery_status = 'to receive' elif float_compare( (line.qty_received + line.qty_returned), line.product_qty, precision_digits=precision) >= 0: line.delivery_status = 'received' else: line.delivery_status = 'no'
ingadhoc/purchase
[ 10, 28, 10, 7, 1453132294 ]
def _onchange_product_qty(self): if ( self.state == 'purchase' and self.product_id.type in ['product', 'consu'] and self.product_qty < self._origin.product_qty): warning_mess = { 'title': _('Ordered quantity decreased!'), 'message': ( '¡Está reduciendo la cantidad pedida! Recomendamos usar' ' el botón para cancelar remanente y' ' luego setear la cantidad deseada.'), } self.product_qty = self._origin.product_qty return {'warning': warning_mess} return {}
ingadhoc/purchase
[ 10, 28, 10, 7, 1453132294 ]
def _compute_qty_returned(self): for line in self: qty = 0.0 for move in line.move_ids.filtered( lambda m: m.state == 'done' and m.location_id.usage != 'supplier' and m.to_refund): qty += move.product_uom._compute_quantity( move.product_uom_qty, line.product_uom) line.qty_returned = qty
ingadhoc/purchase
[ 10, 28, 10, 7, 1453132294 ]
def action_add_all_to_invoice(self): for rec in self: rec.invoice_qty = rec.qty_on_voucher or ( rec.qty_to_invoice + rec.invoice_qty)
ingadhoc/purchase
[ 10, 28, 10, 7, 1453132294 ]
def render_html(self, inner_html, **kwargs): """ Callback function for rendering HTML. :param inner_html: The inner HTML of this tree node. :param kwargs: Extra keyword arguments for rendering. :return The rendered HTML of this node. """ return self.html_render_template.format(text_alignment=self.text_alignment, inner_html=inner_html)
TamiaLab/PySkCode
[ 1, 2, 1, 1, 1443186590 ]
def __init__(self, date_range, new_data): self.date_range = date_range self.new_data = new_data
manahl/arctic
[ 2924, 574, 2924, 89, 1432906650 ]
def __init__(self, version_store, symbol, user, log, modify_timeseries=None, audit=True, *args, **kwargs): """ Parameters ---------- version_store: `VersionStore` Arctic Library Needs to support write, read, list_versions, _delete_version this is the underlying store that we'll be securing for write symbol: `str` symbol name for the item that's being modified user: `str` user making the change log: `str` Log message for the change modify_timeseries: if given, it will check the assumption that this is the latest data available for symbol in version_store Should not this be the case, a ConcurrentModificationException will be raised. Use this if you're interacting with code that read in the data already and for some reason you cannot refactor the read-write operation to be contained within this context manager audit: `bool` should we 'audit' the transaction. An audited write transaction is equivalent to a snapshot before and after the data change - i.e. we won't prune versions of the data involved in an audited transaction. This can be used to ensure that the history of certain data changes is preserved indefinitely. all other args: Will be passed into the initial read """ self._version_store = version_store self._symbol = symbol self._user = user self._log = log self._audit = audit logger.info("MT: {}@{}: [{}] {}: {}".format(_get_host(version_store).get('l'), _get_host(version_store).get('mhost'), user, log, symbol)) try: self.base_ts = self._version_store.read(self._symbol, *args, **kwargs) except NoDataFoundException: versions = [x['version'] for x in self._version_store.list_versions(self._symbol, latest_only=True)] versions.append(0) self.base_ts = VersionedItem(symbol=self._symbol, library=None, version=versions[0], metadata=None, data=None, host=None) except OperationFailure: # TODO: Current errors in mongo "Incorrect Number of Segments Returned" # This workaround should be removed once underlying problem is resolved. self.base_ts = self._version_store.read_metadata(symbol=self._symbol) if modify_timeseries is not None and not are_equals(modify_timeseries, self.base_ts.data): raise ConcurrentModificationException() self._do_write = False
manahl/arctic
[ 2924, 574, 2924, 89, 1432906650 ]
def write(self, symbol, data, prune_previous_version=True, metadata=None, **kwargs): """ Records a write request to be actioned on context exit. Takes exactly the same parameters as the regular library write call. """ if data is not None: # We only write data if existing data is None or the Timeseries data has changed or metadata has changed if self.base_ts.data is None or not are_equals(data, self.base_ts.data) or metadata != self.base_ts.metadata: self._do_write = True self._write = partial(self._version_store.write, symbol, data, prune_previous_version=prune_previous_version, metadata=metadata, **kwargs)
manahl/arctic
[ 2924, 574, 2924, 89, 1432906650 ]
def __init__(self): super(StylingExample,self).__init__('gui/styling.xml') self.styles = ['default'] + list(STYLES.keys()) for name,style in list(STYLES.items()): pychan.manager.addStyle(name,style) pychan.loadFonts("fonts/samanata.xml")
fifengine/fifengine-demos
[ 5, 6, 5, 2, 1457868618 ]
def stop(self): super(StylingExample,self).stop() if self.styledCredits: self.styledCredits.hide() self.styledCredits = None
fifengine/fifengine-demos
[ 5, 6, 5, 2, 1457868618 ]
def process(*args, **kwargs): """Runs the decorated function in a concurrent process, taking care of the result and error management. Decorated functions will return a concurrent.futures.Future object once called. The timeout parameter will set a maximum execution time for the decorated function. If the execution exceeds the timeout, the process will be stopped and the Future will raise TimeoutError. The name parameter will set the process name. The daemon parameter controls the underlying process daemon flag. Default is True. The context parameter allows to provide the multiprocessing.context object used for starting the process. """ timeout = kwargs.get('timeout') name = kwargs.get('name') daemon = kwargs.get('daemon', True) mp_context = kwargs.get('context') # decorator without parameters if not kwargs and len(args) == 1 and callable(args[0]): return _process_wrapper(args[0], timeout, name, daemon, multiprocessing) # decorator with parameters _validate_parameters(timeout, name, daemon, mp_context) mp_context = mp_context if mp_context is not None else multiprocessing # without @pie syntax if len(args) == 1 and callable(args[0]): return _process_wrapper(args[0], timeout, name, daemon, multiprocessing) # with @pie syntax def decorating_function(function): return _process_wrapper(function, timeout, name, daemon, mp_context) return decorating_function
noxdafox/pebble
[ 407, 47, 407, 3, 1381902977 ]
def wrapper(*args, **kwargs): future = ProcessFuture() reader, writer = mp_context.Pipe(duplex=False) if isinstance(function, types.FunctionType) and start_method != 'fork': target = _trampoline args = [function.__qualname__, function.__module__] + list(args) else: target = function worker = launch_process( name, _function_handler, daemon, mp_context, target, args, kwargs, (reader, writer)) writer.close() future.set_running_or_notify_cancel() launch_thread(name, _worker_handler, True, future, worker, reader, timeout) return future
noxdafox/pebble
[ 407, 47, 407, 3, 1381902977 ]
def _worker_handler(future, worker, pipe, timeout): """Worker lifecycle manager. Waits for the worker to be perform its task, collects result, runs the callback and cleans up the process. """ result = _get_result(future, pipe, timeout) if isinstance(result, BaseException): if isinstance(result, ProcessExpired): result.exitcode = worker.exitcode if not isinstance(result, CancelledError): future.set_exception(result) else: future.set_result(result) if worker.is_alive(): stop_process(worker)
noxdafox/pebble
[ 407, 47, 407, 3, 1381902977 ]
def _get_result(future, pipe, timeout): """Waits for result and handles communication errors.""" counter = count(step=SLEEP_UNIT) try: while not pipe.poll(SLEEP_UNIT): if timeout is not None and next(counter) >= timeout: return TimeoutError('Task Timeout', timeout) elif future.cancelled(): return CancelledError() return pipe.recv() except (EOFError, OSError): return ProcessExpired('Abnormal termination') except Exception as error: return error
noxdafox/pebble
[ 407, 47, 407, 3, 1381902977 ]
def _register_function(function): _registered_functions[function.__qualname__] = function return function
noxdafox/pebble
[ 407, 47, 407, 3, 1381902977 ]
def __init__(self) -> None: super().__init__() MimeTypeDatabase.addMimeType( MimeType( name = "application/x-cura-compressed-gcode-file", comment = "Cura Compressed G-code File", suffixes = ["gcode.gz"] ) ) self._supported_extensions = [".gcode.gz"]
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def debug(*args): try: st = inspect.stack()[1] funcName = st[3] funcCallStr = st[4] varnames = re.search('debug\((.*)\)', funcCallStr[0]) varnames = varnames.groups()[0].split(',') for n, v in zip(varnames, args): v_str = str(v) v_str = "`%s`" % v_str if v_str.count('\n') == 0 else v_str print(config['fmt'] % (n.strip()) + config['sep'], v_str, end=config['end'], file=config['file']) except Exception as err: raise err
NTMatBoiseState/FiberFit
[ 4, 7, 4, 2, 1466825204 ]
def __init__(self, parent=None): super(HPasteCollectionWidget.__HPasteCollectionWidget, self).__init__(parent, metadataExposedKeys=('raw_url', 'nettype')) for x in range(1, 5): self.ui.mainView.horizontalHeader().hideSection(x) self.__nepane = None self.__netType = '' self.__nettypeFilter = QSortFilterProxyModel(self) self.__nettypeFilter.setFilterKeyColumn(4) self.__nettypeFilter.setFilterRegExp(QRegExp("*", Qt.CaseInsensitive, QRegExp.Wildcard)) self.appendFilter(self.__nettypeFilter) self.accepted.connect(self.doOnAccept) self.__insideAuthCallback = False # self.setProperty("houdiniStyle", True) ss = "QTableView{border : 0px solid; gridline-color: rgb(48,48,48)}" ss += "QHeaderView::section{border-style: none; border-bottom: 0px; border-right: 0px;}" self.setStyleSheet(ss) self.__savedNetworkViewPos = None
pedohorse/hpaste
[ 79, 18, 79, 1, 1503556224 ]
def doOnAccept(self, item): if item is None: return try: try: # >h16 hou.clearAllSelected() except: # <=h15.5 hou.node("/obj").setSelected(False, clear_all_selected=True) hpaste.stringToNodes(item.content(), ne=self.__nepane, override_network_position=self.__savedNetworkViewPos) except RuntimeWarning as e: log('Warnings encountered during load:\n%s' % str(e), 2) except Exception as e: hou.ui.displayMessage("could not paste: %s" % str(e), severity=hou.severityType.Warning)
pedohorse/hpaste
[ 79, 18, 79, 1, 1503556224 ]
def _changeAccess(self, index): item = index.internalPointer() text, good = QInputDialog.getItem(None, 'modify item access', 'choose new access type:', ['private', 'public'], current=item.access() == CollectionItem.AccessType.public, editable=False) if not good: return newaccess = CollectionItem.AccessType.public if text == 'public' else CollectionItem.AccessType.private if newaccess == item.access(): return item.setAccess(newaccess)
pedohorse/hpaste
[ 79, 18, 79, 1, 1503556224 ]
def _itemInfo(self, index): item = index.internalPointer() accesstext = 'public' if item.access() == CollectionItem.AccessType.public else 'private' readonlytext = 'readonly' if item.readonly() else 'editable' info = 'name: %s\n%s\naccess: %s\n%s\n\ncollection id: %s\n\nmetadata:\n' % (item.name(), item.description(), accesstext, readonlytext, item.id()) info += '\n'.join(('%s : %s' % (key, item.metadata()[key]) for key in item.metadata())) QMessageBox.information(self, 'item information', info)
pedohorse/hpaste
[ 79, 18, 79, 1, 1503556224 ]
def _removeIcon(self, index): ok = QMessageBox.warning(self, 'sure?', 'confirm removing Icon. This operation can not be undone.', QMessageBox.Ok | QMessageBox.Cancel) == QMessageBox.Ok if ok: super(HPasteCollectionWidget.__HPasteCollectionWidget, self)._removeIcon(index)
pedohorse/hpaste
[ 79, 18, 79, 1, 1503556224 ]
def _authCallback(self, callbackinfo): auth, public, action = callbackinfo if self.__insideAuthCallback: return # prevent looping self.__insideAuthCallback = True try: if action == 0 or (action == 2 and not auth['enabled']): good = self.removeCollection(auth['user']) if not good: # means something went wrong during removal attempt - probably async collection syncing problem. Try later if public: GithubAuthorizator.setPublicCollsctionEnabled(auth['user'], True) else: GithubAuthorizator.setAuthorizationEnabled(auth['user'], True) elif action == 1 or (action == 2 and auth['enabled']): if public: self.addCollection(GithubCollection(auth['user'], public=True), do_async=True) # TODO: reuse some token for public access else: self.addCollection(GithubCollection(auth['token']), do_async=True) except CollectionSyncError as e: QMessageBox.critical(self, 'something went wrong!', 'could not add/remove collection: %s' % str(e)) finally: self.__insideAuthCallback = False
pedohorse/hpaste
[ 79, 18, 79, 1, 1503556224 ]
def __init__(self, parent): if HPasteCollectionWidget.__instance is None: HPasteCollectionWidget.__instance = HPasteCollectionWidget.__HPasteCollectionWidget(parent) try: auths = [] if True: auths = list(GithubAuthorizator.listAuthorizations()) ## test # todel = [] # for auth in auths: # if not GithubAuthorizator.testAuthorization(auth): # if not GithubAuthorizator.newAuthorization(auth): # todel.append(auth) # for d in todel: # auths.remove(d) # For now don't force people to have their own collections while False and len(auths) == 0: auths = list(GithubAuthorizator.listAuthorizations()) if len(auths) == 0: if GithubAuthorizator.newAuthorization(): continue else: raise RuntimeError("No collections") # test todel = [] for auth in auths: if not GithubAuthorizator.testAuthorization(auth): if not GithubAuthorizator.newAuthorization(auth): todel.append(auth) for d in todel: auths.remove(d) except Exception as e: hou.ui.displayMessage('Something went wrong.\n%s' % str(e)) HPasteCollectionWidget.__instance = None raise for auth in auths: if auth['enabled']: HPasteCollectionWidget.__instance.addCollection(GithubCollection(auth['token']), do_async=True) # now public collections cols = GithubAuthorizator.listPublicCollections() for col in cols: if not col['enabled']: continue try: # TODO: test if collection works ptkn = None if len(auths) > 0: import random ptkn = random.sample(auths, 1)[0]['token'] HPasteCollectionWidget.__instance.addCollection(GithubCollection(col['user'], public=True, token_for_public_access=ptkn), do_async=True) except Exception as e: msg = '' if isinstance(e, error.HTTPError): msg = 'code %d. %s' % (e.code, e.reason) elif isinstance(e, error.URLError): msg = e.reason else: msg = str(e) hou.ui.displayMessage('unable to load public collection %s: %s' % (col['user'], msg)) # set callback GithubAuthorizator.registerCollectionChangedCallback((HPasteCollectionWidget.__instance, HPasteCollectionWidget.__HPasteCollectionWidget._authCallback)) elif parent is not HPasteCollectionWidget.__instance.parent(): log("reparenting", 0) HPasteCollectionWidget.__instance.setParent(parent)
pedohorse/hpaste
[ 79, 18, 79, 1, 1503556224 ]
def _hasInstance(cls): return cls.__instance is not None
pedohorse/hpaste
[ 79, 18, 79, 1, 1503556224 ]
def _killInstance(cls): # TODO: TO BE RETHOUGHT LATER !! THIS GUY SHOULD GO AWAY # remove callback, it holds a reference to us GithubAuthorizator.unregisterCollectionChangedCallback((cls.__instance, HPasteCollectionWidget.__HPasteCollectionWidget._authCallback)) cls.__instance.deleteLater() # widget has parent, so it won't be deleted unless we explicitly tell it to cls.__instance = None
pedohorse/hpaste
[ 79, 18, 79, 1, 1503556224 ]
def __init__(self, **kwargs): self.element = kwargs.get('element', None) self.families = kwargs.get('families', []) self.memories = kwargs.get('memories', []) self.algos = kwargs.get('algos', []) self.debugs = kwargs.get('debugs', [])
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def __init__(self, file_or_path): """! @brief Constructor.
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def pdsc(self): """! @brief Accessor for the ElementTree instance for the pack's PDSC file.""" return self._pdsc
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def devices(self): """! @brief A list of CmsisPackDevice objects for every part number defined in the pack.""" return self._devices
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def _parse_devices(self, parent): # Extract device description elements we care about. newState = _DeviceInfo(element=parent) children = [] for elem in parent: if elem.tag == 'memory': newState.memories.append(elem) elif elem.tag == 'algorithm': newState.algos.append(elem) elif elem.tag == 'debug': newState.debugs.append(elem) # Save any elements that we will recurse into. elif elem.tag in ('subFamily', 'device', 'variant'): children.append(elem) # Push the new device description state onto the stack. self._state_stack.append(newState)
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def _extract_families(self): families = [] for state in self._state_stack: elem = state.element if elem.tag == 'family': families += [elem.attrib['Dvendor'], elem.attrib['Dfamily']] elif elem.tag == 'subFamily': families += [elem.attrib['DsubFamily']] return families
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def _extract_memories(self): def filter(map, elem): # 'name' takes precedence over 'id'. if 'name' in elem.attrib: name = elem.attrib['name'] elif 'id' in elem.attrib: name = elem.attrib['id'] else: # Neither option for memory name was specified, so use the address range. try: start = int(elem.attrib['start'], base=0) size = int(elem.attrib['size'], base=0) except (KeyError, ValueError): LOG.warning("memory region missing address") return
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def _extract_algos(self): def filter(map, elem): # We only support Keil FLM style flash algorithms (for now). if ('style' in elem.attrib) and (elem.attrib['style'] != 'Keil'): LOG.debug("skipping non-Keil flash algorithm") return None, None
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def _extract_debugs(self): def filter(map, elem): if 'Pname' in elem.attrib: name = elem.attrib['Pname'] unit = elem.attrib.get('Punit', 0) name += str(unit)
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def get_file(self, filename): """! @brief Return file-like object for a file within the pack.
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def _get_bool_attribute(elem, name, default=False): """! @brief Extract an XML attribute with a boolean value.
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def __init__(self, pack, device_info): """! @brief Constructor. @param self @param pack The CmsisPack object that contains this device. @param device_info A _DeviceInfo object with the XML elements that describe this device. """ self._pack = pack self._info = device_info
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def _build_memory_regions(self): """! @brief Creates memory region instances for the device.
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def _build_flash_regions(self): """! @brief Converts ROM memory regions to flash regions.
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def _find_matching_algo(self, region): """! @brief Searches for a flash algo covering the regions's address range.'""" for algo in self._info.algos: # Both start and size are required attributes. algoStart = int(algo.attrib['start'], base=0) algoSize = int(algo.attrib['size'], base=0) algoEnd = algoStart + algoSize - 1
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def pack(self): """! @brief The CmsisPack object that defines this device.""" return self._pack
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def part_number(self): """! @brief Part number for this device.
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def vendor(self): """! @brief Vendor or manufacturer name.""" return self._info.families[0].split(':')[0]
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def families(self): """! @brief List of families the device belongs to, ordered most generic to least.""" return [f for f in self._info.families[1:]]
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def memory_map(self): """! @brief MemoryMap object.""" # Lazily construct the memory map. if self._memory_map is None: self._build_memory_regions() self._build_flash_regions()
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def svd(self): """! @brief File-like object for the device's SVD file. @todo Support multiple cores. """ try: svdPath = self._info.debugs[0].attrib['svd'] return self._pack.get_file(svdPath) except (KeyError, IndexError): return None
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def default_reset_type(self): """! @brief One of the Target.ResetType enums. @todo Support multiple cores. """ try: resetSequence = self._info.debugs[0].attrib['defaultResetSequence'] if resetSequence == 'ResetHardware': return Target.ResetType.HW elif resetSequence == 'ResetSystem': return Target.ResetType.SW_SYSRESETREQ elif resetSequence == 'ResetProcessor': return Target.ResetType.SW_VECTRESET else: return Target.ResetType.SW except (KeyError, IndexError): return Target.ResetType.SW
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def __repr__(self): return "<%s@%x %s>" % (self.__class__.__name__, id(self), self.part_number)
mbedmicro/pyOCD
[ 883, 424, 883, 228, 1382710205 ]
def init(self): self.target = HTTP()
datawire/ambassador
[ 4023, 658, 4023, 431, 1490901772 ]
def queries(self): if sys.platform != 'darwin': text = json.dumps(self.status_update) update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0'] subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10) # If you run these tests individually, the time between running kubestatus # and the ingress resource actually getting updated is longer than the # time spent waiting for resources to be ready, so this test will fail (most of the time) time.sleep(1) yield Query(self.url(self.name + "/")) yield Query(self.url(f'need-normalization/../{self.name}/'))
datawire/ambassador
[ 4023, 658, 4023, 431, 1490901772 ]
def init(self): self.target = HTTP()
datawire/ambassador
[ 4023, 658, 4023, 431, 1490901772 ]
def queries(self): if sys.platform != 'darwin': text = json.dumps(self.status_update) update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0'] subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10) # If you run these tests individually, the time between running kubestatus # and the ingress resource actually getting updated is longer than the # time spent waiting for resources to be ready, so this test will fail (most of the time) time.sleep(1) yield Query(self.url(self.name + "/")) yield Query(self.url(f'need-normalization/../{self.name}/'))
datawire/ambassador
[ 4023, 658, 4023, 431, 1490901772 ]
def init(self): self.target = HTTP(namespace="alt-namespace")
datawire/ambassador
[ 4023, 658, 4023, 431, 1490901772 ]
def queries(self): if sys.platform != 'darwin': text = json.dumps(self.status_update) update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0'] subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10) # If you run these tests individually, the time between running kubestatus # and the ingress resource actually getting updated is longer than the # time spent waiting for resources to be ready, so this test will fail (most of the time) time.sleep(1) yield Query(self.url(self.name + "/")) yield Query(self.url(f'need-normalization/../{self.name}/'))
datawire/ambassador
[ 4023, 658, 4023, 431, 1490901772 ]
def init(self): self.target = HTTP()
datawire/ambassador
[ 4023, 658, 4023, 431, 1490901772 ]
def queries(self): text = json.dumps(self.status_update) update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0'] subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10) # If you run these tests individually, the time between running kubestatus # and the ingress resource actually getting updated is longer than the # time spent waiting for resources to be ready, so this test will fail (most of the time) time.sleep(1) yield Query(self.url(self.name + "/")) yield Query(self.url(self.name + "-nested/")) yield Query(self.url(f'need-normalization/../{self.name}/'))
datawire/ambassador
[ 4023, 658, 4023, 431, 1490901772 ]
def init(self): self.target = HTTP() self.target1 = HTTP(name="target1", namespace="same-ingress-1") self.target2 = HTTP(name="target2", namespace="same-ingress-2")
datawire/ambassador
[ 4023, 658, 4023, 431, 1490901772 ]
def queries(self): if sys.platform != 'darwin': text = json.dumps(self.status_update) update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0'] subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10) # If you run these tests individually, the time between running kubestatus # and the ingress resource actually getting updated is longer than the # time spent waiting for resources to be ready, so this test will fail (most of the time) time.sleep(1) yield Query(self.url(self.name + "-target1/")) yield Query(self.url(self.name + "-target2/"))
datawire/ambassador
[ 4023, 658, 4023, 431, 1490901772 ]
def init(self): self.target = HTTP() if not is_ingress_class_compatible(): self.xfail = 'IngressClass is not supported in this cluster'
datawire/ambassador
[ 4023, 658, 4023, 431, 1490901772 ]
def queries(self): if sys.platform != 'darwin': text = json.dumps(self.status_update) update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0'] subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10) # If you run these tests individually, the time between running kubestatus # and the ingress resource actually getting updated is longer than the # time spent waiting for resources to be ready, so this test will fail (most of the time) time.sleep(1) yield Query(self.url(self.name + "/")) yield Query(self.url(f'need-normalization/../{self.name}/'))
datawire/ambassador
[ 4023, 658, 4023, 431, 1490901772 ]
def __init__(self, base_path): super(SequentialRecipe, self).__init__(base_path) self.guid = uuid.uuid4().hex self.owned_paths = {}
wglass/zoonado
[ 11, 7, 11, 3, 1456766949 ]
def determine_znode_label(self, sibling): return sibling.rsplit("-", 2)[0]
wglass/zoonado
[ 11, 7, 11, 3, 1456766949 ]
def create_unique_znode(self, znode_label, data=None): path = self.sibling_path(znode_label + "-" + self.guid + "-") try: created_path = yield self.client.create( path, data=data, ephemeral=True, sequential=True ) except exc.NoNode: yield self.ensure_path() created_path = yield self.client.create( path, data=data, ephemeral=True, sequential=True ) self.owned_paths[znode_label] = created_path
wglass/zoonado
[ 11, 7, 11, 3, 1456766949 ]
def delete_unique_znode(self, znode_label): try: yield self.client.delete(self.owned_paths[znode_label]) except exc.NoNode: pass
wglass/zoonado
[ 11, 7, 11, 3, 1456766949 ]
def analyze_siblings(self): siblings = yield self.client.get_children(self.base_path) siblings = [name for name in siblings if sequential_re.match(name)] siblings.sort(key=self.sequence_number) owned_positions = {} for index, path in enumerate(siblings): if self.guid in path: owned_positions[self.determine_znode_label(path)] = index raise gen.Return((owned_positions, siblings))
wglass/zoonado
[ 11, 7, 11, 3, 1456766949 ]
def __init__(self, mock_state='init'): self.mock_state = mock_state
GoogleCloudPlatform/gcpdiag
[ 195, 48, 195, 16, 1626709107 ]
def services(self): return AppEngineStandardApiStub('services')
GoogleCloudPlatform/gcpdiag
[ 195, 48, 195, 16, 1626709107 ]
def list(self, appsId='appsId', servicesId='servicesId'): self.json_dir = apis_stub.get_json_dir(appsId) return self
GoogleCloudPlatform/gcpdiag
[ 195, 48, 195, 16, 1626709107 ]
def from_name(cls, name): for operation in cls: if operation.value.name.lower() == name.lower(): return operation else: raise ValueError(name)
globality-corp/microcosm-flask
[ 10, 10, 10, 2, 1457468635 ]
def setUp(self): super(GraphModuleTest, self).setUp() tf.set_random_seed(0)
deepmind/graph_nets
[ 5225, 778, 5225, 5, 1535703568 ]
def _get_shaped_input_graph(self): return graphs.GraphsTuple( nodes=tf.zeros([3, 4, 5, 11], dtype=tf.float32), edges=tf.zeros([5, 4, 5, 12], dtype=tf.float32), globals=tf.zeros([2, 4, 5, 13], dtype=tf.float32), receivers=tf.range(5, dtype=tf.int32) // 3, senders=tf.range(5, dtype=tf.int32) % 3, n_node=tf.constant([2, 1], dtype=tf.int32), n_edge=tf.constant([3, 2], dtype=tf.int32), )
deepmind/graph_nets
[ 5225, 778, 5225, 5, 1535703568 ]
def test_output_values(self, broadcaster, expected): """Test the broadcasted output value.""" input_graph = utils_tf.data_dicts_to_graphs_tuple( [SMALL_GRAPH_1, SMALL_GRAPH_2]) broadcasted = broadcaster(input_graph) with tf.Session() as sess: broadcasted_out = sess.run(broadcasted) self.assertNDArrayNear( np.array(expected, dtype=np.float32), broadcasted_out, err=1e-4)
deepmind/graph_nets
[ 5225, 778, 5225, 5, 1535703568 ]
def test_output_values_larger_rank(self, broadcaster, expected): """Test the broadcasted output value.""" input_graph = utils_tf.data_dicts_to_graphs_tuple( [SMALL_GRAPH_1, SMALL_GRAPH_2]) input_graph = input_graph.map( lambda v: tf.reshape(v, [v.get_shape().as_list()[0]] + [2, -1])) broadcasted = broadcaster(input_graph) with tf.Session() as sess: broadcasted_out = sess.run(broadcasted) self.assertNDArrayNear( np.reshape(np.array(expected, dtype=np.float32), [len(expected)] + [2, -1]), broadcasted_out, err=1e-4)
deepmind/graph_nets
[ 5225, 778, 5225, 5, 1535703568 ]
def test_missing_field_raises_exception(self, broadcaster, none_fields): """Test that an error is raised if a required field is `None`.""" input_graph = self._get_input_graph(none_fields) with self.assertRaisesRegexp( ValueError, "field cannot be None when broadcasting"): broadcaster(input_graph)
deepmind/graph_nets
[ 5225, 778, 5225, 5, 1535703568 ]
def test_output_values(self, reducer, expected_values): input_values_np = np.array([[0.1, -0.1], [0.2, -0.2], [0.3, -0.3], [0.4, -0.4], [0.5, -0.5], [0.6, -0.6], [0.7, -0.7], [0.8, -0.8], [0.9, -0.9], [1., -1.]], dtype=np.float32) input_indices_np = np.array([1, 2, 2, 3, 3, 3, 4, 4, 5, 4], dtype=np.int32) num_groups_np = np.array(7, dtype=np.int32) input_indices = tf.constant(input_indices_np, dtype=tf.int32) input_values = tf.constant(input_values_np, dtype=tf.float32) num_groups = tf.constant(num_groups_np, dtype=tf.int32) reduced = reducer(input_values, input_indices, num_groups) with tf.Session() as sess: reduced_out = sess.run(reduced) self.assertNDArrayNear( np.array(expected_values, dtype=np.float32), reduced_out, err=1e-4)
deepmind/graph_nets
[ 5225, 778, 5225, 5, 1535703568 ]
def test_output_values(self, aggregator, expected): input_graph = self._get_input_graph() aggregated = aggregator(input_graph) with tf.Session() as sess: aggregated_out = sess.run(aggregated) self.assertNDArrayNear( np.array(expected, dtype=np.float32), aggregated_out, err=1e-4)
deepmind/graph_nets
[ 5225, 778, 5225, 5, 1535703568 ]
def test_output_values_larger_rank(self, aggregator, expected): input_graph = self._get_input_graph() input_graph = input_graph.map( lambda v: tf.reshape(v, [v.get_shape().as_list()[0]] + [2, -1])) aggregated = aggregator(input_graph) with tf.Session() as sess: aggregated_out = sess.run(aggregated) self.assertNDArrayNear( np.reshape(np.array(expected, dtype=np.float32), [len(expected)] + [2, -1]), aggregated_out, err=1e-4)
deepmind/graph_nets
[ 5225, 778, 5225, 5, 1535703568 ]
def test_missing_field_raises_exception(self, constructor, none_field): """Tests that aggregator fail if a required field is missing.""" input_graph = self._get_input_graph([none_field]) with self.assertRaisesRegexp(ValueError, none_field): constructor(tf.unsorted_segment_sum)(input_graph)
deepmind/graph_nets
[ 5225, 778, 5225, 5, 1535703568 ]
def test_unused_field_can_be_none(self, constructor, none_fields): """Tests that aggregator fail if a required field is missing.""" input_graph = self._get_input_graph(none_fields) constructor(tf.unsorted_segment_sum)(input_graph)
deepmind/graph_nets
[ 5225, 778, 5225, 5, 1535703568 ]
def setUp(self): super(EdgeBlockTest, self).setUp() self._scale = 10. self._edge_model_fn = lambda: lambda features: features * self._scale
deepmind/graph_nets
[ 5225, 778, 5225, 5, 1535703568 ]
def test_output_values( self, use_edges, use_receiver_nodes, use_sender_nodes, use_globals): """Compares the output of an EdgeBlock to an explicit computation.""" input_graph = self._get_input_graph() edge_block = blocks.EdgeBlock( edge_model_fn=self._edge_model_fn, use_edges=use_edges, use_receiver_nodes=use_receiver_nodes, use_sender_nodes=use_sender_nodes, use_globals=use_globals) output_graph = edge_block(input_graph) model_inputs = [] if use_edges: model_inputs.append(input_graph.edges) if use_receiver_nodes: model_inputs.append(blocks.broadcast_receiver_nodes_to_edges(input_graph)) if use_sender_nodes: model_inputs.append(blocks.broadcast_sender_nodes_to_edges(input_graph)) if use_globals: model_inputs.append(blocks.broadcast_globals_to_edges(input_graph)) model_inputs = tf.concat(model_inputs, axis=-1) self.assertEqual(input_graph.nodes, output_graph.nodes) self.assertEqual(input_graph.globals, output_graph.globals) with tf.Session() as sess: output_graph_out, model_inputs_out = sess.run( (output_graph, model_inputs)) expected_output_edges = model_inputs_out * self._scale self.assertNDArrayNear( expected_output_edges, output_graph_out.edges, err=1e-4)
deepmind/graph_nets
[ 5225, 778, 5225, 5, 1535703568 ]
def test_created_variables(self, use_edges, use_receiver_nodes, use_sender_nodes, use_globals, expected_first_dim_w): """Verifies the variable names and shapes created by an EdgeBlock.""" output_size = 10 expected_var_shapes_dict = { "edge_block/mlp/linear_0/b:0": [output_size], "edge_block/mlp/linear_0/w:0": [expected_first_dim_w, output_size]} input_graph = self._get_input_graph() edge_block = blocks.EdgeBlock( edge_model_fn=functools.partial(snt.nets.MLP, output_sizes=[output_size]), use_edges=use_edges, use_receiver_nodes=use_receiver_nodes, use_sender_nodes=use_sender_nodes, use_globals=use_globals) edge_block(input_graph) variables = edge_block.get_variables() var_shapes_dict = {var.name: var.get_shape().as_list() for var in variables} self.assertDictEqual(expected_var_shapes_dict, var_shapes_dict)
deepmind/graph_nets
[ 5225, 778, 5225, 5, 1535703568 ]
def test_missing_field_raises_exception( self, use_edges, use_receiver_nodes, use_sender_nodes, use_globals, none_fields): """Checks that missing a required field raises an exception.""" input_graph = self._get_input_graph(none_fields) edge_block = blocks.EdgeBlock( edge_model_fn=self._edge_model_fn, use_edges=use_edges, use_receiver_nodes=use_receiver_nodes, use_sender_nodes=use_sender_nodes, use_globals=use_globals) with self.assertRaisesRegexp(ValueError, "field cannot be None"): edge_block(input_graph)
deepmind/graph_nets
[ 5225, 778, 5225, 5, 1535703568 ]