code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def ltake(n: int, xs: Iterable[T]) -> List[T]: return list(take(n, xs))
A non-lazy version of take.
def guess_currency_from_address(address): if is_py2: fixer = lambda x: int(x.encode('hex'), 16) else: fixer = lambda x: x first_byte = fixer(b58decode_check(address)[0]) double_first_byte = fixer(b58decode_check(address)[:2]) hits = [] for currency, data in crypto_data.items(): if hasattr(data, 'get'): version = data.get('address_version_byte', None) if version is not None and version in [double_first_byte, first_byte]: hits.append([currency, data['name']]) if hits: return hits raise ValueError("Unknown Currency with first byte: %s" % first_byte)
Given a crypto address, find which currency it likely belongs to. Raises an exception if it can't find a match. Raises exception if address is invalid.
def get_sections(self): try: obj_list = self.__dict__['sections'] return [Section(i) for i in obj_list] except KeyError: self._lazy_load() obj_list = self.__dict__['sections'] return [Section(i) for i in obj_list]
Fetch the sections field if it does not exist.
def rstyle(self, name): try: del self.chart_style[name] except KeyError: self.warning("Style " + name + " is not set") except: self.err("Can not remove style " + name)
Remove one style
def get_base_url(html: str) -> str: forms = BeautifulSoup(html, 'html.parser').find_all('form') if not forms: raise VVKBaseUrlException('Form for login not found') elif len(forms) > 1: raise VVKBaseUrlException('More than one login form found') login_url = forms[0].get('action') if not login_url: raise VVKBaseUrlException('No action tag in form') return login_url
Search for login url from VK login page
def from_path(cls, path): urlparts = urlparse.urlsplit(path) site = 'nonlocal' if (urlparts.scheme == '' or urlparts.scheme == 'file'): if os.path.isfile(urlparts.path): path = os.path.abspath(urlparts.path) path = urlparse.urljoin('file:', urllib.pathname2url(path)) site = 'local' fil = File(os.path.basename(path)) fil.PFN(path, site) return fil
Takes a path and returns a File object with the path as the PFN.
def get_rlz(self, rlzstr): r mo = re.match(r'rlz-(\d+)', rlzstr) if not mo: return return self.realizations[int(mo.group(1))]
r""" Get a Realization instance for a string of the form 'rlz-\d+'
def plot_roc_curve(self, on, bootstrap_samples=100, ax=None, **kwargs): plot_col, df = self.as_dataframe(on, return_cols=True, **kwargs) df = filter_not_null(df, "benefit") df = filter_not_null(df, plot_col) df.benefit = df.benefit.astype(bool) return roc_curve_plot(df, plot_col, "benefit", bootstrap_samples, ax=ax)
Plot an ROC curve for benefit and a given variable Parameters ---------- on : str or function or list or dict See `cohort.load.as_dataframe` bootstrap_samples : int, optional Number of boostrap samples to use to compute the AUC ax : Axes, default None Axes to plot on Returns ------- (mean_auc_score, plot): (float, matplotlib plot) Returns the average AUC for the given predictor over `bootstrap_samples` and the associated ROC curve
def delete(self, *keys): key_counter = 0 for key in map(self._encode, keys): if key in self.redis: del self.redis[key] key_counter += 1 if key in self.timeouts: del self.timeouts[key] return key_counter
Emulate delete.
def write(self, request): if FLAGS.sc2_verbose_protocol: self._log(" Writing request ".center(60, "-") + "\n") self._log_packet(request) self._write(request)
Write a Request.
def add(self, lineitem): if lineitem['ProductName']: self._lineitems.append(lineitem) if lineitem['BlendedCost']: self._blended_cost += lineitem['BlendedCost'] if lineitem['UnBlendedCost']: self._unblended_cost += lineitem['UnBlendedCost']
Add a line item record to this Costs object.
def map_query(self, variables=None, evidence=None, elimination_order=None): final_distribution = self._variable_elimination(variables, 'marginalize', evidence=evidence, elimination_order=elimination_order) argmax = np.argmax(final_distribution.values) assignment = final_distribution.assignment([argmax])[0] map_query_results = {} for var_assignment in assignment: var, value = var_assignment map_query_results[var] = value if not variables: return map_query_results else: return_dict = {} for var in variables: return_dict[var] = map_query_results[var] return return_dict
Computes the MAP Query over the variables given the evidence. Note: When multiple variables are passed, it returns the map_query for each of them individually. Parameters ---------- variables: list list of variables over which we want to compute the max-marginal. evidence: dict a dict key, value pair as {var: state_of_var_observed} None if no evidence elimination_order: list order of variable eliminations (if nothing is provided) order is computed automatically Examples -------- >>> from pgmpy.inference import VariableElimination >>> from pgmpy.models import BayesianModel >>> import numpy as np >>> import pandas as pd >>> values = pd.DataFrame(np.random.randint(low=0, high=2, size=(1000, 5)), ... columns=['A', 'B', 'C', 'D', 'E']) >>> model = BayesianModel([('A', 'B'), ('C', 'B'), ('C', 'D'), ('B', 'E')]) >>> model.fit(values) >>> inference = VariableElimination(model) >>> phi_query = inference.map_query(['A', 'B'])
def _get_permission_description(permission_name): parts = permission_name.split('_') parts.pop(0) method = parts.pop() resource = ('_'.join(parts)).lower() return 'Can %s %s' % (method.upper(), resource)
Generate a descriptive string based on the permission name. For example: 'resource_Order_get' -> 'Can GET order' todo: add support for the resource name to have underscores
def is_mainline(self) -> bool: node = self while node.parent: parent = node.parent if not parent.variations or parent.variations[0] != node: return False node = parent return True
Checks if the node is in the mainline of the game.
def build_on_start(self, runnable, regime, on_start): on_start_code = [] for action in on_start.actions: code = self.build_action(runnable, regime, action) for line in code: on_start_code += [line] return on_start_code
Build OnStart start handler code. @param on_start: OnStart start handler object @type on_start: lems.model.dynamics.OnStart @return: Generated OnStart code @rtype: list(string)
def mark_dead(self, proxy, _time=None): if proxy not in self.proxies: logger.warn("Proxy <%s> was not found in proxies list" % proxy) return if proxy in self.good: logger.debug("GOOD proxy became DEAD: <%s>" % proxy) else: logger.debug("Proxy <%s> is DEAD" % proxy) self.unchecked.discard(proxy) self.good.discard(proxy) self.dead.add(proxy) now = _time or time.time() state = self.proxies[proxy] state.backoff_time = self.backoff(state.failed_attempts) state.next_check = now + state.backoff_time state.failed_attempts += 1
Mark a proxy as dead
def calcLorenzDistance(self): LorenzSim = np.mean(np.array(self.Lorenz_hist)[self.ignore_periods:,:],axis=0) dist = np.sqrt(np.sum((100*(LorenzSim - self.LorenzTarget))**2)) self.LorenzDistance = dist return dist
Returns the sum of squared differences between simulated and target Lorenz points. Parameters ---------- None Returns ------- dist : float Sum of squared distances between simulated and target Lorenz points (sqrt)
def update_release(id, **kwargs): data = update_release_raw(id, **kwargs) if data: return utils.format_json(data)
Update an existing ProductRelease with new information
def loop_input(self, on_quit=None): self._run_script( self.__session, self._context.get_property(PROP_INIT_FILE) ) script_file = self._context.get_property(PROP_RUN_FILE) if script_file: self._run_script(self.__session, script_file) else: self._run_loop(self.__session) self._stop_event.set() sys.stdout.write("Bye !\n") sys.stdout.flush() if on_quit is not None: on_quit()
Reads the standard input until the shell session is stopped :param on_quit: A call back method, called without argument when the shell session has ended
def _init_metadata(self): self._mdata.update(default_mdata.get_osid_form_mdata()) update_display_text_defaults(self._mdata['journal_comment'], self._locale_map) for element_name in self._mdata: self._mdata[element_name].update( {'element_id': Id(self._authority, self._namespace, element_name)}) self._journal_comment_default = self._mdata['journal_comment']['default_string_values'][0] self._validation_messages = {}
Initialize OsidObjectForm metadata.
def highlight_source_at_location(source: "Source", location: "SourceLocation") -> str: first_line_column_offset = source.location_offset.column - 1 body = " " * first_line_column_offset + source.body line_index = location.line - 1 line_offset = source.location_offset.line - 1 line_num = location.line + line_offset column_offset = first_line_column_offset if location.line == 1 else 0 column_num = location.column + column_offset lines = _re_newline.split(body) len_lines = len(lines) def get_line(index: int) -> Optional[str]: return lines[index] if 0 <= index < len_lines else None return f"{source.name} ({line_num}:{column_num})\n" + print_prefixed_lines( [ (f"{line_num - 1}: ", get_line(line_index - 1)), (f"{line_num}: ", get_line(line_index)), ("", " " * (column_num - 1) + "^"), (f"{line_num + 1}: ", get_line(line_index + 1)), ] )
Highlight source at given location. This renders a helpful description of the location of the error in the GraphQL Source document.
def _ServerActions(action,alias,servers): if alias is None: alias = clc.v1.Account.GetAlias() results = [] for server in servers: r = clc.v1.API.Call('post','Server/%sServer' % (action), {'AccountAlias': alias, 'Name': server }) if int(r['StatusCode']) == 0: results.append(r) return(results)
Archives the specified servers. :param action: the server action url to exec against :param alias: short code for a particular account. If none will use account's default alias :param servers: list of server names
def query_most(num=8, kind='1'): return TabPost.select().where( (TabPost.kind == kind) & (TabPost.valid == 1) ).order_by( TabPost.view_count.desc() ).limit(num)
Query most viewed.
def create_osd_keyring(conn, cluster, key): logger = conn.logger path = '/var/lib/ceph/bootstrap-osd/{cluster}.keyring'.format( cluster=cluster, ) if not conn.remote_module.path_exists(path): logger.warning('osd keyring does not exist yet, creating one') conn.remote_module.write_keyring(path, key)
Run on osd node, writes the bootstrap key if not there yet.
def remove_file(config_map, file_key): if file_key[0] == '/': file_key = file_key[1::] client = boto3.client( 's3', aws_access_key_id=config_map['put_public_key'], aws_secret_access_key=config_map['put_private_key'] ) client.delete_object( Bucket=config_map['s3_bucket'], Key=file_key )
Convenience function for removing objects from AWS S3 Added by [email protected], Apr 28, 2015 May 25, 2017: Switch to boto3
def new_floating_ip(self, **kwargs): droplet_id = kwargs.get('droplet_id') region = kwargs.get('region') if self.api_version == 2: if droplet_id is not None and region is not None: raise DoError('Only one of droplet_id and region is required to create a Floating IP. ' \ 'Set one of the variables and try again.') elif droplet_id is None and region is None: raise DoError('droplet_id or region is required to create a Floating IP. ' \ 'Set one of the variables and try again.') else: if droplet_id is not None: params = {'droplet_id': droplet_id} else: params = {'region': region} json = self.request('/floating_ips', params=params, method='POST') return json['floating_ip'] else: raise DoError(v2_api_required_str)
Creates a Floating IP and assigns it to a Droplet or reserves it to a region.
def email(self, value): email = self.wait.until(expected.visibility_of_element_located( self._email_input_locator)) email.clear() email.send_keys(value)
Set the value of the email field.
def _indent(stream, indent, *msgs): for x in range(0, indent): stream.write(" ") for x in msgs: stream.write(x.encode("ascii", "backslashreplace").decode("ascii")) stream.write("\n")
write a message to a text stream, with indentation. Also ensures that the output encoding of the messages is safe for writing.
def clean(self, *args, **kwargs): if self.status != LINK_STATUS.get('planned'): if self.interface_a is None or self.interface_b is None: raise ValidationError(_('fields "from interface" and "to interface" are mandatory in this case')) if (self.interface_a_id == self.interface_b_id) or (self.interface_a == self.interface_b): msg = _('link cannot have same "from interface" and "to interface: %s"') % self.interface_a raise ValidationError(msg) if self.status == LINK_STATUS.get('planned') and (self.node_a is None or self.node_b is None): raise ValidationError(_('fields "from node" and "to node" are mandatory for planned links')) if self.type != LINK_TYPES.get('radio') and (self.dbm is not None or self.noise is not None): raise ValidationError(_('Only links of type "radio" can contain "dbm" and "noise" information'))
Custom validation 1. interface_a and interface_b mandatory except for planned links 2. planned links should have at least node_a and node_b filled in 3. dbm and noise fields can be filled only for radio links 4. interface_a and interface_b must differ 5. interface a and b type must match
def image_data(verbose=False): global _IMAGE_DATA if _IMAGE_DATA is None: if verbose: logger.info("--- Downloading image.") with contextlib.closing(urllib.request.urlopen(IMAGE_URL)) as infile: _IMAGE_DATA = infile.read() return _IMAGE_DATA
Get the raw encoded image data, downloading it if necessary.
def create_client_for_kernel(self): connect_output = KernelConnectionDialog.get_connection_parameters(self) (connection_file, hostname, sshkey, password, ok) = connect_output if not ok: return else: self._create_client_for_kernel(connection_file, hostname, sshkey, password)
Create a client connected to an existing kernel
def save(self, *args, **kwargs): if self.descriptor_schema: try: validate_schema(self.descriptor, self.descriptor_schema.schema) self.descriptor_dirty = False except DirtyError: self.descriptor_dirty = True elif self.descriptor and self.descriptor != {}: raise ValueError("`descriptor_schema` must be defined if `descriptor` is given") super().save()
Perform descriptor validation and save object.
def generate_new_address(coin_symbol='btc', api_key=None): assert api_key, 'api_key required' assert is_valid_coin_symbol(coin_symbol) if coin_symbol not in ('btc-testnet', 'bcy'): WARNING_MSG = [ 'Generating private key details server-side.', 'You really should do this client-side.', 'See https://github.com/sbuss/bitmerchant for an example.', ] print(' '.join(WARNING_MSG)) url = make_url(coin_symbol, 'addrs') params = {'token': api_key} r = requests.post(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r)
Takes a coin_symbol and returns a new address with it's public and private keys. This method will create the address server side, which is inherently insecure and should only be used for testing. If you want to create a secure address client-side using python, please check out bitmerchant: from bitmerchant.wallet import Wallet Wallet.new_random_wallet() https://github.com/sbuss/bitmerchant
def H11(self): "Difference entropy." return -(self.p_xminusy * np.log(self.p_xminusy + self.eps)).sum(1)
Difference entropy.
def commit(message, add=False, quiet=False): if add: run('add .') try: stdout = run('commit -m %r' % str(message), quiet=quiet) except GitError as e: s = str(e) if 'nothing to commit' in s or 'no changes added to commit' in s: raise EmptyCommit(*e.inits()) raise return re.split('[ \]]', stdout.splitlines()[0])[1]
Commit with that message and return the SHA1 of the commit If add is truish then "$ git add ." first
def __set_frame_shift_status(self): if 'fs' in self.hgvs_original: self.is_frame_shift = True self.is_non_silent = True elif re.search('[A-Z]\d+[A-Z]+\*', self.hgvs_original): self.is_frame_shift = True self.is_non_silent = True else: self.is_frame_shift = False
Check for frame shift and set the self.is_frame_shift flag.
def parse(self, text): if isinstance(text, bytes): text = text.decode("ascii") text = re.sub("\s+", " ", unidecode(text)) return self.communicate(text + "\n")
Call the server and return the raw results.
def brighten(color, brightness): h, s, v = rgb_to_hsv(*map(down_scale, color)) return tuple(map(up_scale, hsv_to_rgb(h, s, v + down_scale(brightness))))
Adds or subtracts value to a color.
def _CamelCaseToSnakeCase(path_name): result = [] for c in path_name: if c == '_': raise ParseError('Fail to parse FieldMask: Path name ' '{0} must not contain "_"s.'.format(path_name)) if c.isupper(): result += '_' result += c.lower() else: result += c return ''.join(result)
Converts a field name from camelCase to snake_case.
def add_ref(self, ref, attr=None): self.session.add_ref(self, ref, attr) return self.fetch()
Add reference to resource :param ref: reference to add :type ref: Resource :rtype: Resource
def _setup(self): if isinstance(self.module, torch.nn.RNNBase): self.module.flatten_parameters = noop for name_w in self.weights: w = getattr(self.module, name_w) del self.module._parameters[name_w] self.module.register_parameter(name_w + '_raw', nn.Parameter(w.data))
for each string defined in self.weights, the corresponding attribute in the wrapped module is referenced, then deleted, and subsequently registered as a new parameter with a slightly modified name. Args: None Returns: None
def _parse_datetime_string(val): dt = None lenval = len(val) fmt = {19: "%Y-%m-%d %H:%M:%S", 10: "%Y-%m-%d"}.get(lenval) if fmt is None: raise exc.InvalidDateTimeString("The supplied value '%s' does not " "match either of the formats 'YYYY-MM-DD HH:MM:SS' or " "'YYYY-MM-DD'." % val) return datetime.datetime.strptime(val, fmt)
Attempts to parse a string representation of a date or datetime value, and returns a datetime if successful. If not, a InvalidDateTimeString exception will be raised.
def createWidgets(self): from gooey.gui.components import widgets return [getattr(widgets, item['type'])(self, item) for item in getin(self.widgetInfo, ['data', 'widgets'], [])]
Instantiate the Gooey Widgets that are used within the RadioGroup
def GetHandlers(self): handlers = [] if self.ssl_context: handlers.append(urllib2.HTTPSHandler(context=self.ssl_context)) if self.proxies: handlers.append(urllib2.ProxyHandler(self.proxies)) return handlers
Retrieve the appropriate urllib2 handlers for the given configuration. Returns: A list of urllib2.BaseHandler subclasses to be used when making calls with proxy.
def find_stacks(node, strict=False): fso = FindStackOps() fso.visit(node) AnnotateStacks(fso.push_pop_pairs, strict).visit(node) return node
Find pushes and pops to the stack and annotate them as such. Args: node: An AST node that might contain stack pushes and pops. strict: A boolean indicating whether to stringently test whether each push and pop are matched. This is not always possible when taking higher-order derivatives of code generated in split-motion. Returns: node: The node passed in, but with pushes and pops annotated in AST nodes.
def requires(self): value = self._schema.get("requires", {}) if not isinstance(value, (basestring, dict)): raise SchemaError( "requires value {0!r} is neither a string nor an" " object".format(value)) return value
Additional object or objects required by this object.
def pre_request(self, response, exc=None): if response.request.method == 'CONNECT': self.start_response( '200 Connection established', [('content-length', '0')] ) self.future.set_result([b'']) upstream = response.connection dostream = self.connection dostream.upgrade(partial(StreamTunnel.create, upstream)) upstream.upgrade(partial(StreamTunnel.create, dostream)) response.fire_event('post_request') raise AbortEvent else: response.event('data_processed').bind(self.data_processed) response.event('post_request').bind(self.post_request)
Start the tunnel. This is a callback fired once a connection with upstream server is established.
def get_config(path): if configparser is None: return None if os.path.exists(os.path.join(ROOT, 'config', NAME, path)): path = os.path.join(ROOT, 'config', NAME, path) else: path = os.path.join(ROOT, 'config', path) if not os.path.isfile(path): return None conf = open(path, 'rt').read() conf = os.path.expandvars(conf) config = configparser.SafeConfigParser() if sys.version_info[0] == 2: from io import StringIO config.readfp(StringIO(unicode(conf))) else: config.read_string(conf) return config
Load a config from disk :param path: target config :type path: unicode :return: :rtype: configparser.Config
def insert_all(db, schema_name, table_name, columns, items): table = '{0}.{1}'.format(schema_name, table_name) if schema_name else table_name columns_list = ', '.join(columns) values_list = ', '.join(['?'] * len(columns)) query = 'INSERT INTO {table} ({columns}) VALUES ({values})'.format( table=table, columns=columns_list, values=values_list) for item in items: values = [getattr(item, col) for col in columns] db.execute(query, values)
Insert all item in given items list into the specified table, schema_name.table_name.
def is_participle_clause_fragment(sentence): if not _begins_with_one_of(sentence, ['VBG', 'VBN', 'JJ']): return 0.0 if _begins_with_one_of(sentence, ['JJ']): doc = nlp(sentence) fw = [w for w in doc][0] if fw.dep_ == 'amod': return 0.0 if _begins_with_one_of(sentence, ['VBG']): doc = nlp(sentence) fw = [w for w in doc][0] if fw.dep_.endswith('subj'): return 0.0 fc = [c for c in doc.noun_chunks] if str(fw) in str(fc): return 0.0 positive_prob = models['participle'].predict([_text_to_vector(sentence, trigram2idx['participle'], trigram_count['participle'])])[0][1] return float(positive_prob)
Supply a sentence or fragment and recieve a confidence interval
def sprand(m, n, density, format='csr'): m, n = int(m), int(n) A = _rand_sparse(m, n, density, format='csr') A.data = sp.rand(A.nnz) return A.asformat(format)
Return a random sparse matrix. Parameters ---------- m, n : int shape of the result density : float target a matrix with nnz(A) = m*n*density, 0<=density<=1 format : string sparse matrix format to return, e.g. 'csr', 'coo', etc. Return ------ A : sparse matrix m x n sparse matrix Examples -------- >>> from pyamg.gallery import sprand >>> A = sprand(5,5,3/5.0)
def get_app_config(app_id): try: req = requests.get( ("https://clients3.google.com/" "cast/chromecast/device/app?a={}").format(app_id)) return json.loads(req.text[4:]) if req.status_code == 200 else {} except ValueError: return {}
Get specific configuration for 'app_id'.
def get_cgi_parameter_float(form: cgi.FieldStorage, key: str) -> Optional[float]: return get_float_or_none(get_cgi_parameter_str(form, key))
Extracts a float parameter from a CGI form, or None if the key is absent or the string value is not convertible to ``float``.
def read_float(self): self.bitcount = self.bits = 0 return unpack('>d', self.input.read(8))[0]
Read float value.
def is_valid_boundaries(self, boundaries): if boundaries is not None: min_ = boundaries[0] for value in boundaries: if value < min_: return False else: min_ = value return True return False
checks if the boundaries are in ascending order
def deserialize_header_auth(stream, algorithm, verifier=None): _LOGGER.debug("Starting header auth deserialization") format_string = ">{iv_len}s{tag_len}s".format(iv_len=algorithm.iv_len, tag_len=algorithm.tag_len) return MessageHeaderAuthentication(*unpack_values(format_string, stream, verifier))
Deserializes a MessageHeaderAuthentication object from a source stream. :param stream: Source data stream :type stream: io.BytesIO :param algorithm: The AlgorithmSuite object type contained in the header :type algorith: aws_encryption_sdk.identifiers.AlgorithmSuite :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: Deserialized MessageHeaderAuthentication object :rtype: aws_encryption_sdk.internal.structures.MessageHeaderAuthentication
def query(self, *args): if not args or len(args) > 2: raise TypeError('query() takes 2 or 3 arguments (a query or a key ' 'and a query) (%d given)' % (len(args) + 1)) elif len(args) == 1: query, = args return self.get('text').query(text_type(query)) else: key, query = args index_key = self.get(key) if isinstance(query, string_types): return index_key.query(query) else: if query.fielded: raise ValueError('Queries with an included key should ' 'not include a field.') return index_key.query(text_type(query))
Query a fulltext index by key and query or just a plain Lucene query, i1 = gdb.nodes.indexes.get('people',type='fulltext', provider='lucene') i1.query('name','do*') i1.query('name:do*') In this example, the last two line are equivalent.
def validate_work_spec(cls, work_spec): if 'name' not in work_spec: raise ProgrammerError('work_spec lacks "name"') if 'min_gb' not in work_spec or \ not isinstance(work_spec['min_gb'], (float, int, long)): raise ProgrammerError('work_spec["min_gb"] must be a number')
Check that `work_spec` is valid. It must at the very minimum contain a ``name`` and ``min_gb``. :raise rejester.exceptions.ProgrammerError: if it isn't valid
def after_third_friday(day=None): day = day if day is not None else datetime.datetime.now() now = day.replace(day=1, hour=16, minute=0, second=0, microsecond=0) now += relativedelta.relativedelta(weeks=2, weekday=relativedelta.FR) return day > now
check if day is after month's 3rd friday
def listen_now_items(self): response = self._call( mc_calls.ListenNowGetListenNowItems ) listen_now_item_list = response.body.get('listennow_items', []) listen_now_items = defaultdict(list) for item in listen_now_item_list: type_ = f"{ListenNowItemType(item['type']).name}s" listen_now_items[type_].append(item) return dict(listen_now_items)
Get a listing of Listen Now items. Note: This does not include situations; use the :meth:`situations` method instead. Returns: dict: With ``albums`` and ``stations`` keys of listen now items.
def _create_sync_map(self, sync_root): sync_map = SyncMap(tree=sync_root, rconf=self.rconf, logger=self.logger) if self.rconf.safety_checks: self.log(u"Running sanity check on computed sync map...") if not sync_map.leaves_are_consistent: self._step_failure(ValueError(u"The computed sync map contains inconsistent fragments")) self.log(u"Running sanity check on computed sync map... passed") else: self.log(u"Not running sanity check on computed sync map") self.task.sync_map = sync_map
If requested, check that the computed sync map is consistent. Then, add it to the Task.
def update_ticket(self, ticket_id=None, body=None): return self.ticket.addUpdate({'entry': body}, id=ticket_id)
Update a ticket. :param integer ticket_id: the id of the ticket to update :param string body: entry to update in the ticket
def hasReaders(self, ulBuffer): fn = self.function_table.hasReaders result = fn(ulBuffer) return result
inexpensively checks for readers to allow writers to fast-fail potentially expensive copies and writes.
def get_instance(cls, encoded_key): login_str = base64.b64decode(encoded_key).decode('utf-8') usertoken, password = login_str.strip().split(':', 1) instance = cls(usertoken, password) return instance
Return an ApiAuth instance from an encoded key
def stop(self, nowait=False): self._stop.set() if nowait: self._stop_nowait.set() self.queue.put_nowait(self._sentinel_item) if (self._thread.isAlive() and self._thread is not threading.currentThread()): self._thread.join() self._thread = None
Stop the listener. This asks the thread to terminate, and then waits for it to do so. Note that if you don't call this before your application exits, there may be some records still left on the queue, which won't be processed. If nowait is False then thread will handle remaining items in queue and stop. If nowait is True then thread will be stopped even if the queue still contains items.
def populate_audit_fields(self, event): event.updated = self._data event.original = self.get_original()._data
Populates the the audit JSON fields with raw data from the model, so all changes can be tracked and diffed. Args: event (Event): The Event instance to attach the data to instance (fleaker.db.Model): The newly created/updated model
def schemaValidCtxtGetParserCtxt(self): ret = libxml2mod.xmlSchemaValidCtxtGetParserCtxt(self._o) if ret is None:raise parserError('xmlSchemaValidCtxtGetParserCtxt() failed') __tmp = parserCtxt(_obj=ret) return __tmp
allow access to the parser context of the schema validation context
def iterate_similarity_datasets(args): for dataset_name in args.similarity_datasets: parameters = nlp.data.list_datasets(dataset_name) for key_values in itertools.product(*parameters.values()): kwargs = dict(zip(parameters.keys(), key_values)) yield dataset_name, kwargs, nlp.data.create(dataset_name, **kwargs)
Generator over all similarity evaluation datasets. Iterates over dataset names, keyword arguments for their creation and the created dataset.
def bind(cls, origin, handler, *, name=None): name = cls.__name__ if name is None else name attrs = { "_origin": origin, "_handler": handler, "__module__": "origin", } return type(name, (cls,), attrs)
Bind this object to the given origin and handler. :param origin: An instance of `Origin`. :param handler: An instance of `bones.HandlerAPI`. :return: A subclass of this class.
def get_time_position(self, time): if time < self._start or time > self._finish: raise ValueError("time argument out of bounds") return (time - self._start) / (self._resolution / self._zoom_factor)
Get x-coordinate for given time :param time: Time to determine x-coordinate on Canvas for :type time: float :return: X-coordinate for the given time :rtype: int :raises: ValueError
def open(self, mode): if mode == 'w': return self.format.pipe_writer(AtomicFtpFile(self._fs, self.path)) elif mode == 'r': temp_dir = os.path.join(tempfile.gettempdir(), 'luigi-contrib-ftp') self.__tmp_path = temp_dir + '/' + self.path.lstrip('/') + '-luigi-tmp-%09d' % random.randrange(0, 1e10) self._fs.get(self.path, self.__tmp_path) return self.format.pipe_reader( FileWrapper(io.BufferedReader(io.FileIO(self.__tmp_path, 'r'))) ) else: raise Exception("mode must be 'r' or 'w' (got: %s)" % mode)
Open the FileSystem target. This method returns a file-like object which can either be read from or written to depending on the specified mode. :param mode: the mode `r` opens the FileSystemTarget in read-only mode, whereas `w` will open the FileSystemTarget in write mode. Subclasses can implement additional options. :type mode: str
def eol_distance_last(self, offset=0): distance = 0 for char in reversed(self.string[:self.pos + offset]): if char == '\n': break else: distance += 1 return distance
Return the ammount of characters until the last newline.
def order_target_percent(self, asset, target, limit_price=None, stop_price=None, style=None): if not self._can_order_asset(asset): return None amount = self._calculate_order_target_percent_amount(asset, target) return self.order(asset, amount, limit_price=limit_price, stop_price=stop_price, style=style)
Place an order to adjust a position to a target percent of the current portfolio value. If the position doesn't already exist, this is equivalent to placing a new order. If the position does exist, this is equivalent to placing an order for the difference between the target percent and the current percent. Parameters ---------- asset : Asset The asset that this order is for. target : float The desired percentage of the portfolio value to allocate to ``asset``. This is specified as a decimal, for example: 0.50 means 50%. limit_price : float, optional The limit price for the order. stop_price : float, optional The stop price for the order. style : ExecutionStyle The execution style for the order. Returns ------- order_id : str The unique identifier for this order. Notes ----- ``order_target_value`` does not take into account any open orders. For example: .. code-block:: python order_target_percent(sid(0), 10) order_target_percent(sid(0), 10) This code will result in 20% of the portfolio being allocated to sid(0) because the first call to ``order_target_percent`` will not have been filled when the second ``order_target_percent`` call is made. See :func:`zipline.api.order` for more information about ``limit_price``, ``stop_price``, and ``style`` See Also -------- :class:`zipline.finance.execution.ExecutionStyle` :func:`zipline.api.order` :func:`zipline.api.order_target` :func:`zipline.api.order_target_value`
def grant_permission(username, resource=None, resource_type='keyspace', permission=None, contact_points=None, port=None, cql_user=None, cql_pass=None): permission_cql = "grant {0}".format(permission) if permission else "grant all permissions" resource_cql = "on {0} {1}".format(resource_type, resource) if resource else "on all keyspaces" query = "{0} {1} to {2}".format(permission_cql, resource_cql, username) log.debug("Attempting to grant permissions with query '%s'", query) try: cql_query(query, contact_points, port, cql_user, cql_pass) except CommandExecutionError: log.critical('Could not grant permissions.') raise except BaseException as e: log.critical('Unexpected error while granting permissions: %s', e) raise return True
Grant permissions to a user. :param username: The name of the user to grant permissions to. :type username: str :param resource: The resource (keyspace or table), if None, permissions for all resources are granted. :type resource: str :param resource_type: The resource_type (keyspace or table), defaults to 'keyspace'. :type resource_type: str :param permission: A permission name (e.g. select), if None, all permissions are granted. :type permission: str :param contact_points: The Cassandra cluster addresses, can either be a string or a list of IPs. :type contact_points: str | list[str] :param cql_user: The Cassandra user if authentication is turned on. :type cql_user: str :param cql_pass: The Cassandra user password if authentication is turned on. :type cql_pass: str :param port: The Cassandra cluster port, defaults to None. :type port: int :return: :rtype: CLI Example: .. code-block:: bash salt 'minion1' cassandra_cql.grant_permission salt 'minion1' cassandra_cql.grant_permission username=joe resource=test_keyspace permission=select salt 'minion1' cassandra_cql.grant_permission username=joe resource=test_table resource_type=table \ permission=select contact_points=minion1
def count(a, axis=None): axes = _normalise_axis(axis, a) if axes is None or len(axes) != 1: msg = "This operation is currently limited to a single axis" raise AxisSupportError(msg) return _Aggregation(a, axes[0], _CountStreamsHandler, _CountMaskedStreamsHandler, np.dtype('i'), {})
Count the non-masked elements of the array along the given axis. .. note:: Currently limited to operating on a single axis. :param axis: Axis or axes along which the operation is performed. The default (axis=None) is to perform the operation over all the dimensions of the input array. The axis may be negative, in which case it counts from the last to the first axis. If axis is a tuple of ints, the operation is performed over multiple axes. :type axis: None, or int, or iterable of ints. :return: The Array representing the requested mean. :rtype: Array
def lines(self): if self._lines is None: with io.open(self.path, 'r', encoding='utf-8') as fh: self._lines = fh.read().split('\n') return self._lines
List of file lines.
def get_version(): if not INSTALLED: try: with open('version.txt', 'r') as v_fh: return v_fh.read() except Exception: warnings.warn( 'Unable to resolve package version until installed', UserWarning ) return '0.0.0' return p_version.get_version(HERE)
find current version information Returns: (str): version information
def stringify(data): def serialize(k, v): if k == "candidates": return int(v) if isinstance(v, numbers.Number): if k == "zipcode": return str(v).zfill(5) return str(v) return v return [{k: serialize(k, v) for k, v in json_dict.items()} for json_dict in data]
Ensure all values in the dictionary are strings, except for the value for `candidate` which should just be an integer. :param data: a list of addresses in dictionary format :return: the same list with all values except for `candidate` count as a string
def rm(venv_name): inenv = InenvManager() venv = inenv.get_venv(venv_name) click.confirm("Delete dir {}".format(venv.path)) shutil.rmtree(venv.path)
Removes the venv by name
def build_table(self, table, force=False): sources = self._resolve_sources(None, [table]) for source in sources: self.build_source(None, source, force=force) self.unify_partitions()
Build all of the sources for a table
def format_sms_payload(self, message, to, sender='elkme', options=[]): self.validate_number(to) if not isinstance(message, str): message = " ".join(message) message = message.rstrip() sms = { 'from': sender, 'to': to, 'message': message } for option in options: if option not in ['dontlog', 'dryrun', 'flashsms']: raise ElksException('Option %s not supported' % option) sms[option] = 'yes' return sms
Helper function to create a SMS payload with little effort
def create_server(loop, host, port=CONTROL_PORT, reconnect=False): server = Snapserver(loop, host, port, reconnect) yield from server.start() return server
Server factory.
def check_production_parameters_exist(self): for k, v in self.modelInstance.parameter_sets.items(): for p_id in self.modelInstance.production_params.keys(): if v.get(p_id): pass else: v[p_id] = 1.0 for p_id in self.modelInstance.allocation_params.keys(): if v.get(p_id): pass else: v[p_id] = 1.0
old versions of models won't have produciton parameters, leading to ZeroDivision errors and breaking things
def close_db(self, exception): if self.is_connected: if exception is None and not transaction.isDoomed(): transaction.commit() else: transaction.abort() self.connection.close()
Added as a `~flask.Flask.teardown_request` to applications to commit the transaction and disconnect ZODB if it was used during the request.
def show(self, uuid=None, term=None): try: if uuid: uidentities = api.unique_identities(self.db, uuid) elif term: uidentities = api.search_unique_identities(self.db, term) else: uidentities = api.unique_identities(self.db) for uid in uidentities: enrollments = api.enrollments(self.db, uid.uuid) uid.roles = enrollments self.display('show.tmpl', uidentities=uidentities) except NotFoundError as e: self.error(str(e)) return e.code return CMD_SUCCESS
Show the information related to unique identities. This method prints information related to unique identities such as identities or enrollments. When <uuid> is given, it will only show information about the unique identity related to <uuid>. When <term> is set, it will only show information about those unique identities that have any attribute (name, email, username, source) which match with the given term. This parameter does not have any effect when <uuid> is set. :param uuid: unique identifier :param term: term to match with unique identities data
def get_similar_commands(name): from difflib import get_close_matches name = name.lower() close_commands = get_close_matches(name, commands_dict.keys()) if close_commands: return close_commands[0] else: return False
Command name auto-correct.
def readString(self): length, is_reference = self._readLength() if is_reference: result = self.context.getString(length) return self.context.getStringForBytes(result) if length == 0: return '' result = self.stream.read(length) self.context.addString(result) return self.context.getStringForBytes(result)
Reads and returns a string from the stream.
def _dataframe_to_edge_list(df): cols = df.columns if len(cols): assert _SRC_VID_COLUMN in cols, "Vertex DataFrame must contain column %s" % _SRC_VID_COLUMN assert _DST_VID_COLUMN in cols, "Vertex DataFrame must contain column %s" % _DST_VID_COLUMN df = df[cols].T ret = [Edge(None, None, _series=df[col]) for col in df] return ret else: return []
Convert dataframe into list of edges, assuming that source and target ids are stored in _SRC_VID_COLUMN, and _DST_VID_COLUMN respectively.
def after(f, chain=False): def decorator(g): @wraps(g) def h(*args, **kargs): if chain: return f(g(*args, **kargs)) else: r = g(*args, **kargs) f(*args, **kargs) return r return h return decorator
Runs f with the result of the decorated function.
def _from_dict(cls, _dict): args = {} if 'input' in _dict: args['input'] = MessageInput._from_dict(_dict.get('input')) if 'intents' in _dict: args['intents'] = [ RuntimeIntent._from_dict(x) for x in (_dict.get('intents')) ] if 'entities' in _dict: args['entities'] = [ RuntimeEntity._from_dict(x) for x in (_dict.get('entities')) ] return cls(**args)
Initialize a DialogSuggestionValue object from a json dictionary.
def getExtensionArgs(self): aliases = NamespaceMap() required = [] if_available = [] ax_args = self._newArgs() for type_uri, attribute in self.requested_attributes.items(): if attribute.alias is None: alias = aliases.add(type_uri) else: alias = aliases.addAlias(type_uri, attribute.alias) if attribute.required: required.append(alias) else: if_available.append(alias) if attribute.count != 1: ax_args['count.' + alias] = str(attribute.count) ax_args['type.' + alias] = type_uri if required: ax_args['required'] = ','.join(required) if if_available: ax_args['if_available'] = ','.join(if_available) return ax_args
Get the serialized form of this attribute fetch request. @returns: The fetch request message parameters @rtype: {unicode:unicode}
def determine_end_point(http_request, url): if url.endswith('aggregates') or url.endswith('aggregates/'): return 'aggregates' else: return 'detail' if is_detail_url(http_request, url) else 'list'
returns detail, list or aggregates
def types(self): out = [] if self._transform_bytes: out.append(bytes) if self._transform_str: out.append(str) return tuple(out)
Tuple containing types transformed by this transformer.
def entity_types(args): r = fapi.list_entity_types(args.project, args.workspace) fapi._check_response_code(r, 200) return r.json().keys()
List entity types in a workspace
def shutdown(self): self._must_shutdown = True self._is_shutdown.wait() self._meta_runner.stop()
Shutdown the accept loop and stop running payloads
def insert(self, space, t, *, replace=False, timeout=-1) -> _MethodRet: return self._db.insert(space, t, replace=replace, timeout=timeout)
Insert request coroutine. Examples: .. code-block:: pycon # Basic usage >>> await conn.insert('tester', [0, 'hello']) <Response sync=3 rowcount=1 data=[ <TarantoolTuple id=0 name='hello'> ]> # Using dict as an argument tuple >>> await conn.insert('tester', { ... 'id': 0 ... 'text': 'hell0' ... }) <Response sync=3 rowcount=1 data=[ <TarantoolTuple id=0 name='hello'> ]> :param space: space id or space name. :param t: tuple to insert (list object) :param replace: performs replace request instead of insert :param timeout: Request timeout :returns: :class:`asynctnt.Response` instance
def serialize_to_string(self, name, datas): value = datas.get('value', None) if value is None: msg = ("String reference '{}' lacks of required 'value' variable " "or is empty") raise SerializerError(msg.format(name)) return value
Serialize given datas to a string. Simply return the value from required variable``value``. Arguments: name (string): Name only used inside possible exception message. datas (dict): Datas to serialize. Returns: string: Value.
def SensorShare(self, sensor_id, parameters): if not parameters['user']['id']: parameters['user'].pop('id') if not parameters['user']['username']: parameters['user'].pop('username') if self.__SenseApiCall__("/sensors/{0}/users".format(sensor_id), "POST", parameters = parameters): return True else: self.__error__ = "api call unsuccessful" return False
Share a sensor with a user @param sensor_id (int) - Id of sensor to be shared @param parameters (dictionary) - Additional parameters for the call @return (bool) - Boolean indicating whether the ShareSensor call was successful
def get_queryset(self, request): if request.GET.get(ShowHistoryFilter.parameter_name) == '1': queryset = self.model.objects.with_active_flag() else: queryset = self.model.objects.current_set() ordering = self.get_ordering(request) if ordering: return queryset.order_by(*ordering) return queryset
Annote the queryset with an 'is_active' property that's true iff that row is the most recently added row for that particular set of KEY_FIELDS values. Filter the queryset to show only is_active rows by default.
def salt_syndic(): import salt.utils.process salt.utils.process.notify_systemd() import salt.cli.daemons pid = os.getpid() try: syndic = salt.cli.daemons.Syndic() syndic.start() except KeyboardInterrupt: os.kill(pid, 15)
Start the salt syndic.