code
stringlengths 51
2.38k
| docstring
stringlengths 4
15.2k
|
---|---|
def subgraph(self, nodes):
adj_matrix = self.csgraph[np.ix_(nodes, nodes)]
weighted = True
if self.node_labels is not None:
node_labels = self.node_labels[nodes]
else:
node_labels = None
return DiGraph(adj_matrix, weighted=weighted, node_labels=node_labels)
|
Return the subgraph consisting of the given nodes and edges
between thses nodes.
Parameters
----------
nodes : array_like(int, ndim=1)
Array of node indices.
Returns
-------
DiGraph
A DiGraph representing the subgraph.
|
def _index_key_for(self, att, value=None):
if value is None:
value = getattr(self, att)
if callable(value):
value = value()
if value is None:
return None
if att not in self.lists:
return self._get_index_key_for_non_list_attr(att, value)
else:
return self._tuple_for_index_key_attr_list(att, value)
|
Returns a key based on the attribute and its value.
The key is used for indexing.
|
def cli(env):
mgr = SoftLayer.EventLogManager(env.client)
event_log_types = mgr.get_event_log_types()
table = formatting.Table(COLUMNS)
for event_log_type in event_log_types:
table.add_row([event_log_type])
env.fout(table)
|
Get Event Log Types
|
def check_roundoff(self, rtol=0.25, atol=1e-6):
psdev = _gvar.sdev(self.p.flat)
paltsdev = _gvar.sdev(self.palt.flat)
if not numpy.allclose(psdev, paltsdev, rtol=rtol, atol=atol):
warnings.warn("Possible roundoff errors in fit.p; try svd cut.")
|
Check for roundoff errors in fit.p.
Compares standard deviations from fit.p and fit.palt to see if they
agree to within relative tolerance ``rtol`` and absolute tolerance
``atol``. Generates a warning if they do not (in which
case an SVD cut might be advisable).
|
def _apply_to_data(data, func, unpack_dict=False):
apply_ = partial(_apply_to_data, func=func, unpack_dict=unpack_dict)
if isinstance(data, dict):
if unpack_dict:
return [apply_(v) for v in data.values()]
return {k: apply_(v) for k, v in data.items()}
if isinstance(data, (list, tuple)):
try:
return [apply_(x) for x in data]
except TypeError:
return func(data)
return func(data)
|
Apply a function to data, trying to unpack different data
types.
|
def publish_server_closed(self, server_address, topology_id):
event = ServerClosedEvent(server_address, topology_id)
for subscriber in self.__server_listeners:
try:
subscriber.closed(event)
except Exception:
_handle_exception()
|
Publish a ServerClosedEvent to all server listeners.
:Parameters:
- `server_address`: The address (host/port pair) of the server.
- `topology_id`: A unique identifier for the topology this server
is a part of.
|
def fromDictionary(value):
if isinstance(value, dict):
pp = PortalParameters()
for k,v in value.items():
setattr(pp, "_%s" % k, v)
return pp
else:
raise AttributeError("Invalid input.")
|
creates the portal properties object from a dictionary
|
def request_uplink_info(self, context, agent):
LOG.debug('request_uplink_info from %(agent)s', {'agent': agent})
event_type = 'agent.request.uplink'
payload = {'agent': agent}
timestamp = time.ctime()
data = (event_type, payload)
pri = self.obj.PRI_LOW_START + 1
self.obj.pqueue.put((pri, timestamp, data))
LOG.debug('Added request uplink info into queue.')
return 0
|
Process uplink message from an agent.
|
def command(cmd):
status, out = commands.getstatusoutput(cmd)
if status is not 0:
logger.error("Something went wrong:")
logger.error(out)
raise SdistCreationError()
return out
|
Execute command and raise an exception upon an error.
>>> 'README' in command('ls')
True
>>> command('nonexistingcommand') #doctest: +ELLIPSIS
Traceback (most recent call last):
...
SdistCreationError
|
def future(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None):
return _utils.wrap_future_call(
self._inner.future(
_utils.WrappedAsyncIterator(request_iterator, self._loop),
timeout,
metadata,
credentials
),
self._loop,
self._executor)
|
Asynchronously invokes the underlying RPC on the client.
Args:
request_iterator: An ASYNC iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
If None, the timeout is considered infinite.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
An object that is both a Call for the RPC and a Future. In the event of
RPC completion, the return Call-Future's result value will be the
response message of the RPC. Should the event terminate with non-OK
status, the returned Call-Future's exception value will be an RpcError.
|
def unset(ctx, key):
file = ctx.obj['FILE']
quote = ctx.obj['QUOTE']
success, key = unset_key(file, key, quote)
if success:
click.echo("Successfully removed %s" % key)
else:
exit(1)
|
Removes the given key.
|
def score_kmer(self, kmer):
if len(kmer) != len(self.pwm):
raise Exception("incorrect k-mer length")
score = 0.0
d = {"A":0, "C":1, "G":2, "T":3}
for nuc, row in zip(kmer.upper(), self.pwm):
score += log(row[d[nuc]] / 0.25 + 0.01)
return score
|
Calculate the log-odds score for a specific k-mer.
Parameters
----------
kmer : str
String representing a kmer. Should be the same length as the motif.
Returns
-------
score : float
Log-odd score.
|
def decode_vlqs(s):
ints = []
i = 0
shift = 0
for c in s:
raw = B64_INT[c]
cont = VLQ_CONT & raw
i = ((VLQ_BASE_MASK & raw) << shift) | i
shift += VLQ_SHIFT
if not cont:
sign = -1 if 1 & i else 1
ints.append((i >> 1) * sign)
i = 0
shift = 0
return tuple(ints)
|
Decode str `s` into a list of integers.
|
def _tweak_lane(lane_details, dname):
tweak_config_file = os.path.join(dname, "lane_config.yaml")
if os.path.exists(tweak_config_file):
with open(tweak_config_file) as in_handle:
tweak_config = yaml.safe_load(in_handle)
if tweak_config.get("uniquify_lanes"):
out = []
for ld in lane_details:
ld["name"] = "%s-%s" % (ld["name"], ld["lane"])
out.append(ld)
return out
return lane_details
|
Potentially tweak lane information to handle custom processing, reading a lane_config.yaml file.
|
def _recurse_find_trace(self, structure, item, trace=[]):
try:
i = structure.index(item)
except ValueError:
for j,substructure in enumerate(structure):
if isinstance(substructure, list):
return self._recurse_find_trace(substructure, item, trace+[j])
else:
return trace+[i]
|
given a nested structure from _parse_repr and find the trace route to get to item
|
def _previewFile(self):
dataFrame = self._loadCSVDataFrame()
dataFrameModel = DataFrameModel(dataFrame, filePath=self._filename)
dataFrameModel.enableEditing(True)
self._previewTableView.setModel(dataFrameModel)
columnModel = dataFrameModel.columnDtypeModel()
columnModel.changeFailed.connect(self.updateStatusBar)
self._datatypeTableView.setModel(columnModel)
|
Updates the preview widgets with new models for both tab panes.
|
def check_vtech(text):
err = "institution.vtech"
msg = "Incorrect name. Use '{}' instead of '{}'."
institution = [
["Virginia Polytechnic Institute and State University",
["Virginia Polytechnic and State University"]],
]
return preferred_forms_check(text, institution, err, msg)
|
Suggest the correct name.
source: Virginia Tech Division of Student Affairs
source_url: http://bit.ly/2en1zbv
|
def update_collisions(self):
if not self.mode['items'] or len(self.mode['items']) == 0: return
self.collman.clear()
for z, node in self.children:
if hasattr(node, 'cshape') and type(node.cshape) == cm.CircleShape:
self.collman.add(node)
for other in self.collman.iter_colliding(self.player):
typeball = other.btype
self.logger.debug('collision', typeball)
if other.removable:
self.to_remove.append(other)
self.reward_item(typeball)
self.remove_items()
|
Test player for collisions with items
|
def list_nodes_full(**kwargs):
nodes = _query('server/list')
ret = {}
for node in nodes:
name = nodes[node]['label']
ret[name] = nodes[node].copy()
ret[name]['id'] = node
ret[name]['image'] = nodes[node]['os']
ret[name]['size'] = nodes[node]['VPSPLANID']
ret[name]['state'] = nodes[node]['status']
ret[name]['private_ips'] = nodes[node]['internal_ip']
ret[name]['public_ips'] = nodes[node]['main_ip']
return ret
|
Return all data on nodes
|
def unfreeze(name, path=None, use_vt=None):
_ensure_exists(name, path=path)
if state(name, path=path) == 'stopped':
raise CommandExecutionError(
'Container \'{0}\' is stopped'.format(name)
)
cmd = 'lxc-unfreeze'
if path:
cmd += ' -P {0}'.format(pipes.quote(path))
return _change_state(cmd, name, 'running', path=path, use_vt=use_vt)
|
Unfreeze the named container.
path
path to the container parent directory
default: /var/lib/lxc (system)
.. versionadded:: 2015.8.0
use_vt
run the command through VT
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt '*' lxc.unfreeze name
|
def find_method(self, decl):
name = decl.name
method = None
try:
method = getattr(self, u'do_{}'.format(
(name).replace('-', '_')))
except AttributeError:
if name.startswith('data-'):
method = getattr(self, 'do_data_any')
elif name.startswith('attr-'):
method = getattr(self, 'do_attr_any')
else:
log(WARN, u'Missing method {}'.format(
(name).replace('-', '_')).encode('utf-8'))
if method:
self.record_coverage_line(decl.source_line)
return method
else:
return lambda x, y, z: None
|
Find class method to call for declaration based on name.
|
def aggregated_records(all_records, key_fields=KEY_FIELDS):
flow_table = defaultdict(_FlowStats)
for flow_record in all_records:
key = tuple(getattr(flow_record, attr) for attr in key_fields)
if any(x is None for x in key):
continue
flow_table[key].update(flow_record)
for key in flow_table:
item = {k: v for k, v in zip(key_fields, key)}
item.update(flow_table[key].to_dict())
yield item
|
Yield dicts that correspond to aggregates of the flow records given by
the sequence of FlowRecords in `all_records`. Skips incomplete records.
This will consume the `all_records` iterator, and requires enough memory to
be able to read it entirely.
`key_fields` optionally contains the fields over which to aggregate. By
default it's the typical flow 5-tuple.
|
def clone(self, source_id, backup_id, size,
volume_id=None, source_host=None):
volume_id = volume_id or str(uuid.uuid4())
return self.http_put('/volumes/%s' % volume_id,
params=self.unused({
'source_host': source_host,
'source_volume_id': source_id,
'backup_id': backup_id,
'size': size
}))
|
create a volume then clone the contents of
the backup into the new volume
|
def set_rule_name(self, rule_name):
if not self.aws.get('xray', None):
self.aws['xray'] = {}
self.aws['xray']['sampling_rule_name'] = rule_name
|
Add the matched centralized sampling rule name
if a segment is sampled because of that rule.
This method should be only used by the recorder.
|
def i32(self, name, value=None, align=None):
self.int(4, name, value, align)
|
Add an 32 byte integer field to template.
This is an convenience method that simply calls `Int` keyword with predefined length.
|
def limit_mem(limit=(4 * 1024**3)):
"Set soft memory limit"
rsrc = resource.RLIMIT_DATA
soft, hard = resource.getrlimit(rsrc)
resource.setrlimit(rsrc, (limit, hard))
softnew, _ = resource.getrlimit(rsrc)
assert softnew == limit
_log = logging.getLogger(__name__)
_log.debug('Set soft memory limit: %s => %s', soft, softnew)
|
Set soft memory limit
|
def read_config(ip, mac):
click.echo("Read configuration from %s" % ip)
request = requests.get(
'http://{}/{}/{}/'.format(ip, URI, mac), timeout=TIMEOUT)
print(request.json())
|
Read the current configuration of a myStrom device.
|
def prepare_function_symbol(self, symbol_name, basic_addr=None):
if basic_addr is None:
basic_addr = self.project.loader.extern_object.get_pseudo_addr(symbol_name)
return basic_addr, basic_addr
|
Prepare the address space with the data necessary to perform relocations pointing to the given symbol
Returns a 2-tuple. The first item is the address of the function code, the second is the address of the
relocation target.
|
def main():
parser = build_parser()
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
inputs = {'config': args.config,
'config_fastq': args.config_fastq,
'input': args.input,
'unc.bed': args.unc,
'hg19.transcripts.fa': args.fasta,
'composite_exons.bed': args.composite_exons,
'normalize.pl': args.normalize,
'output_dir': args.output_dir,
'rsem_ref.zip': args.rsem_ref,
'chromosomes.zip': args.chromosomes,
'ebwt.zip': args.ebwt,
'ssec': args.ssec,
's3_dir': args.s3_dir,
'sudo': args.sudo,
'single_end_reads': args.single_end_reads,
'upload_bam_to_s3': args.upload_bam_to_s3,
'uuid': None,
'sample.tar': None,
'cpu_count': None}
Job.Runner.startToil(Job.wrapJobFn(download_shared_files, inputs), args)
|
This is a Toil pipeline for the UNC best practice RNA-Seq analysis.
RNA-seq fastqs are combined, aligned, sorted, filtered, and quantified.
Please read the README.md located in the same directory.
|
def dir2zip(in_dir, zip_fname):
z = zipfile.ZipFile(zip_fname, 'w',
compression=zipfile.ZIP_DEFLATED)
for root, dirs, files in os.walk(in_dir):
for file in files:
in_fname = pjoin(root, file)
in_stat = os.stat(in_fname)
info = zipfile.ZipInfo(in_fname)
info.filename = relpath(in_fname, in_dir)
if os.path.sep == '\\':
info.filename = relpath(in_fname, in_dir).replace('\\', '/')
info.date_time = time.localtime(in_stat.st_mtime)
perms = stat.S_IMODE(in_stat.st_mode) | stat.S_IFREG
info.external_attr = perms << 16
with open_readable(in_fname, 'rb') as fobj:
contents = fobj.read()
z.writestr(info, contents, zipfile.ZIP_DEFLATED)
z.close()
|
Make a zip file `zip_fname` with contents of directory `in_dir`
The recorded filenames are relative to `in_dir`, so doing a standard zip
unpack of the resulting `zip_fname` in an empty directory will result in
the original directory contents.
Parameters
----------
in_dir : str
Directory path containing files to go in the zip archive
zip_fname : str
Filename of zip archive to write
|
def __get_wbfmt_usrfld(self, data_nt):
if self.ntfld_wbfmt is not None:
if isinstance(self.ntfld_wbfmt, str):
ntval = getattr(data_nt, self.ntfld_wbfmt, None)
if ntval is not None:
return self.fmtname2wbfmtobj.get(ntval, None)
|
Return format for text cell from namedtuple field specified by 'ntfld_wbfmt
|
def activate(self, ideSettings, ideGlobalData):
WizardInterface.activate(self, ideSettings, ideGlobalData)
self.__where = self.__getConfiguredWhere()
self.ide.editorsManager.sigTabClosed.connect(self.__collectGarbage)
self.ide.project.sigProjectChanged.connect(self.__collectGarbage)
|
Activates the plugin.
The plugin may override the method to do specific
plugin activation handling.
ideSettings - reference to the IDE Settings singleton
see codimension/src/utils/settings.py
ideGlobalData - reference to the IDE global settings
see codimension/src/utils/globals.py
Note: if overriden do not forget to call the
base class activate()
|
def orphans_single(default_exec=False):
if not default_exec and executable.endswith('uwsgi'):
_executable = executable[:-5] + 'python'
else:
_executable = executable
p = subprocess.Popen([_executable, '-m', 'nikola', 'orphans'],
stdout=subprocess.PIPE)
p.wait()
files = [l.strip().decode('utf-8') for l in p.stdout.readlines()]
for f in files:
if f:
os.unlink(f)
out = '\n'.join(files)
return p.returncode, out
|
Remove all orphans in the site, in the single user-mode.
|
def _CreateImage(media_service, opener, url):
image_data = opener.open(url).read().decode('utf-8')
image = {
'type': 'IMAGE',
'data': image_data,
'xsi_type': 'Image'
}
return media_service.upload(image)[0]
|
Creates an image and uploads it to the server.
Args:
media_service: a SudsServiceProxy instance for AdWords's MediaService.
opener: an OpenerDirector instance.
url: a str URL used to load image data.
Returns:
The image that was successfully uploaded.
|
def get_apps_tools():
tools_paths = {}
for app_config in apps.get_app_configs():
proc_path = os.path.join(app_config.path, 'tools')
if os.path.isdir(proc_path):
tools_paths[app_config.name] = proc_path
custom_tools_paths = getattr(settings, 'RESOLWE_CUSTOM_TOOLS_PATHS', [])
if not isinstance(custom_tools_paths, list):
raise KeyError("`RESOLWE_CUSTOM_TOOLS_PATHS` setting must be a list.")
for seq, custom_path in enumerate(custom_tools_paths):
custom_key = '_custom_{}'.format(seq)
tools_paths[custom_key] = custom_path
return tools_paths
|
Get applications' tools and their paths.
Return a dict with application names as keys and paths to tools'
directories as values. Applications without tools are omitted.
|
def _postcheck(self, network, feedin):
curtailment = network.timeseries.curtailment
gen_repr = [repr(_) for _ in curtailment.columns]
feedin_repr = feedin.loc[:, gen_repr]
curtailment_repr = curtailment
curtailment_repr.columns = gen_repr
if not ((feedin_repr - curtailment_repr) > -1e-1).all().all():
message = 'Curtailment exceeds feed-in.'
logging.error(message)
raise TypeError(message)
|
Raises an error if the curtailment of a generator exceeds the
feed-in of that generator at any time step.
Parameters
-----------
network : :class:`~.grid.network.Network`
feedin : :pandas:`pandas.DataFrame<dataframe>`
DataFrame with feed-in time series in kW. Columns of the dataframe
are :class:`~.grid.components.GeneratorFluctuating`, index is
time index.
|
def accuracy(current, predicted):
acc = 0
if np.count_nonzero(predicted) > 0:
acc = float(np.dot(current, predicted))/float(np.count_nonzero(predicted))
return acc
|
Computes the accuracy of the TM at time-step t based on the prediction
at time-step t-1 and the current active columns at time-step t.
@param current (array) binary vector containing current active columns
@param predicted (array) binary vector containing predicted active columns
@return acc (float) prediction accuracy of the TM at time-step t
|
def preflightInfo(info):
missingRequired = set()
missingRecommended = set()
for attr in requiredAttributes:
if not hasattr(info, attr) or getattr(info, attr) is None:
missingRequired.add(attr)
for attr in recommendedAttributes:
if not hasattr(info, attr) or getattr(info, attr) is None:
missingRecommended.add(attr)
return dict(missingRequired=missingRequired, missingRecommended=missingRecommended)
|
Returns a dict containing two items. The value for each
item will be a list of info attribute names.
================== ===
missingRequired Required data that is missing.
missingRecommended Recommended data that is missing.
================== ===
|
def cli(env, volume_id, reason, immediate):
file_storage_manager = SoftLayer.FileStorageManager(env.client)
if not (env.skip_confirmations or formatting.no_going_back(volume_id)):
raise exceptions.CLIAbort('Aborted')
cancelled = file_storage_manager.cancel_snapshot_space(
volume_id, reason, immediate)
if cancelled:
if immediate:
click.echo('File volume with id %s has been marked'
' for immediate snapshot cancellation' % volume_id)
else:
click.echo('File volume with id %s has been marked'
' for snapshot cancellation' % volume_id)
else:
click.echo('Unable to cancel snapshot space for file volume %s'
% volume_id)
|
Cancel existing snapshot space for a given volume.
|
def wheel(self, direction, steps):
self._lock.acquire()
if direction == 1:
wheel_moved = steps
elif direction == 0:
wheel_moved = -1*steps
else:
raise ValueError("Expected direction to be 1 or 0")
self._lock.release()
return mouse.wheel(wheel_moved)
|
Clicks the wheel the specified number of steps in the given direction.
Use Mouse.WHEEL_DOWN, Mouse.WHEEL_UP
|
def update_user_info(self, **kwargs):
if kwargs:
self.config.update(kwargs)
method, url = get_URL('user_update')
res = getattr(self.session, method)(url, params=self.config)
if res.status_code == 200:
return True
hellraiser(res)
|
Update user info and settings.
:param \*\*kwargs: settings to be merged with
:func:`User.get_configfile` setings and sent to Filemail.
:rtype: ``bool``
|
def broadcast(self):
log.debug("Broadcasting M-SEARCH to %s:%s", self.mcast_ip, self.mcast_port)
request = '\r\n'.join(("M-SEARCH * HTTP/1.1",
"HOST:{mcast_ip}:{mcast_port}",
"ST:upnp:rootdevice",
"MX:2",
'MAN:"ssdp:discover"',
"", "")).format(**self.__dict__)
self.server.sendto(request.encode(), (self.mcast_ip, self.mcast_port))
|
Send a multicast M-SEARCH request asking for devices to report in.
|
def get_last_nonce(app, key, nonce):
uk = ses.query(um.UserKey).filter(um.UserKey.key==key)\
.filter(um.UserKey.last_nonce<nonce * 1000).first()
if not uk:
return None
lastnonce = copy.copy(uk.last_nonce)
uk.last_nonce = nonce * 1000
try:
ses.commit()
except Exception as e:
current_app.logger.exception(e)
ses.rollback()
ses.flush()
return lastnonce
|
Get the last_nonce used by the given key from the SQLAlchemy database.
Update the last_nonce to nonce at the same time.
:param str key: the public key the nonce belongs to
:param int nonce: the last nonce used by this key
|
def add_item(self, alias, item):
if not isinstance(alias, six.string_types):
raise TypeError('Item name must be a string, got a {!r}'.format(type(alias)))
item = copy.deepcopy(item)
if item.name is not_set:
item.name = alias
if self.settings.str_path_separator in item.name:
raise ValueError(
'Item name must not contain str_path_separator which is configured for this Config -- {!r} -- '
'but {!r} does.'.format(self.settings.str_path_separator, item)
)
self._tree[item.name] = item
if item.name != alias:
if self.settings.str_path_separator in alias:
raise ValueError(
'Item alias must not contain str_path_separator which is configured for this Config -- {!r} --'
'but {!r} used for {!r} does.'.format(self.settings.str_path_separator, alias, item)
)
self._tree[alias] = item
item._section = self
self.dispatch_event(self.hooks.item_added_to_section, alias=alias, section=self, subject=item)
|
Add a config item to this section.
|
def lexeme(self, verb, parse=True):
a = []
b = self.lemma(verb, parse=parse)
if b in self:
a = [x for x in self[b] if x != ""]
elif parse is True:
a = self.find_lexeme(b)
u = []; [u.append(x) for x in a if x not in u]
return u
|
Returns a list of all possible inflections of the given verb.
|
def unwrap(self, value, session=None):
self.validate_unwrap(value)
return set([self.item_type.unwrap(v, session=session) for v in value])
|
Unwraps the elements of ``value`` using ``SetField.item_type`` and
returns them in a set
|
def get_subscriber_queue(self, event_types=None):
try:
self.started_queue.get(timeout=1)
raise RuntimeError('Cannot create a new subscriber queue while Exchange is running.')
except Empty:
pass
if event_types is None:
event_types = EventTypes.ALL
queue = Queue()
self.queues[event_types].append(queue)
return queue
|
Create a new queue for a specific combination of event types
and return it.
Returns:
a :class:`multiprocessing.Queue`.
Raises:
RuntimeError if called after `run`
|
def is_valid_schedule(schedule, events, slots):
if len(schedule) == 0:
return False
array = converter.schedule_to_array(schedule, events, slots)
return is_valid_array(array, events, slots)
|
Take a schedule and return whether it is a valid solution for the
given constraints
Parameters
----------
schedule : list or tuple
a schedule in schedule form
events : list or tuple
of resources.Event instances
slots : list or tuple
of resources.Slot instances
Returns
-------
bool
True if schedule is a valid solution
|
def upload_file(self, dataset_key, name, file_metadata={}, **kwargs):
owner_id, dataset_id = parse_dataset_key(dataset_key)
try:
self._uploads_api.upload_file(owner_id, dataset_id, name, **kwargs)
if file_metadata:
self.update_dataset(dataset_key, files=file_metadata)
except _swagger.rest.ApiException as e:
raise RestApiError(cause=e)
|
Upload one file to a dataset
:param dataset_key: Dataset identifier, in the form of owner/id
:type dataset_key: str
:param name: Name/path for files stored in the local filesystem
:type name: str
:param expand_archives: Boolean value to indicate files should be
expanded upon upload
:type expand_archive: bool optional
:param files_metadata: Dict containing the name of files and metadata
Uses file name as a dict containing File description, labels and
source URLs to add or update
:type files_metadata: dict optional
:raises RestApiException: If a server error occurs
Examples
--------
>>> import datadotworld as dw
>>> api_client = dw.api_client()
>>> api_client.upload_file(
... 'username/test-dataset',
... 'example.csv') # doctest: +SKIP
|
def render_description_meta_tag(context, is_og=False):
request = context['request']
content = ''
if context.get('object'):
try:
content = context['object'].get_meta_description()
except AttributeError:
pass
elif context.get('meta_tagger'):
content = context['meta_tagger'].get('description')
if not content:
try:
content = request.current_page.get_meta_description()
except (AttributeError, NoReverseMatch):
pass
if content:
return mark_safe('<meta {attr_name}="{tag_name}" content="{content}">'.format(
attr_name='name' if not is_og else 'property',
tag_name='description' if not is_og else 'og:description',
content=content
))
else:
return ''
|
Returns the description as meta or open graph tag.
|
def getOntology(self, id_):
if id_ not in self._ontologyIdMap:
raise exceptions.OntologyNotFoundException(id_)
return self._ontologyIdMap[id_]
|
Returns the ontology with the specified ID.
|
def repl_update(self, config):
cfg = config.copy()
cfg['version'] += 1
try:
result = self.run_command("replSetReconfig", cfg)
if int(result.get('ok', 0)) != 1:
return False
except pymongo.errors.AutoReconnect:
self.update_server_map(cfg)
self.waiting_member_state()
self.waiting_config_state()
return self.connection() and True
|
Reconfig Replicaset with new config
|
def reload(self, callback=None, errback=None):
return self.load(reload=True, callback=callback, errback=errback)
|
Reload record data from the API.
|
def HandleAccounts(self, result):
self.logger.debug('Checking for changes to user accounts.')
configured_users = self.utils.GetConfiguredUsers()
enable_oslogin = self._GetEnableOsLoginValue(result)
enable_two_factor = self._GetEnableTwoFactorValue(result)
if enable_oslogin:
desired_users = {}
self.oslogin.UpdateOsLogin(True, two_factor_desired=enable_two_factor)
else:
desired_users = self._GetAccountsData(result)
self.oslogin.UpdateOsLogin(False)
remove_users = sorted(set(configured_users) - set(desired_users.keys()))
self._UpdateUsers(desired_users)
self._RemoveUsers(remove_users)
self.utils.SetConfiguredUsers(desired_users.keys())
|
Called when there are changes to the contents of the metadata server.
Args:
result: json, the deserialized contents of the metadata server.
|
def default(self, meth):
if self._default is not NOTHING:
raise DefaultAlreadySetError()
self._default = Factory(meth, takes_self=True)
return meth
|
Decorator that allows to set the default for an attribute.
Returns *meth* unchanged.
:raises DefaultAlreadySetError: If default has been set before.
.. versionadded:: 17.1.0
|
def put(self, content_bytes):
derived_path = self.context.request.url
over_max, content_size = self.content_size_exceeded_max(content_bytes)
logger.debug('[{log_prefix}] content size in bytes: {size}'
' | is over max? {over_max} | skip storage? {skip}'.format(
log_prefix=LOG_PREFIX, size=content_size, over_max=over_max,
skip=self.skip_storage()))
if (over_max and self.skip_storage()):
logger.debug('[{log_prefix}] skipping storage: {content_size} '
'exceeds item_size_max of {max_size}'.format(
log_prefix=LOG_PREFIX, content_size=content_size,
max_size=self.item_size_max()))
return None
self.storage.set(
self.timestamp_key_for(derived_path), datetime.utcnow(),
time=self.context.config.RESULT_STORAGE_EXPIRATION_SECONDS
)
self.storage.set(
self.result_key_for(derived_path), content_bytes,
time=self.context.config.RESULT_STORAGE_EXPIRATION_SECONDS
)
return derived_path
|
Save the `bytes` under a key derived from `path` in Memcache.
:return: A string representing the content path if it is stored.
:rettype: string or None
|
def angle_to_name(angle, segments=8, abbr=False):
if segments == 4:
string = COMPASS_NAMES[int((angle + 45) / 90) % 4 * 2]
elif segments == 8:
string = COMPASS_NAMES[int((angle + 22.5) / 45) % 8 * 2]
elif segments == 16:
string = COMPASS_NAMES[int((angle + 11.25) / 22.5) % 16]
else:
raise ValueError('Segments parameter must be 4, 8 or 16 not %r'
% segments)
if abbr:
return ''.join(i[0].capitalize() for i in string.split('-'))
else:
return string
|
Convert angle in to direction name.
Args:
angle (float): Angle in degrees to convert to direction name
segments (int): Number of segments to split compass in to
abbr (bool): Whether to return abbreviated direction string
Returns:
str: Direction name for ``angle``
|
def _eratosthenes():
d = {}
for q in count(2):
p = d.pop(q, None)
if p is None:
yield q
d[q * q] = q
else:
x = p + q
while x in d:
x += p
d[x] = p
|
Yields the sequence of prime numbers via the Sieve of Eratosthenes.
|
def RetrievePluginAsset(self, plugin_name, asset_name):
return plugin_asset_util.RetrieveAsset(self.path, plugin_name, asset_name)
|
Return the contents of a given plugin asset.
Args:
plugin_name: The string name of a plugin.
asset_name: The string name of an asset.
Returns:
The string contents of the plugin asset.
Raises:
KeyError: If the asset is not available.
|
def normalize(self):
for i, (_, amplitude, phase) in enumerate(self.model):
if amplitude < 0:
self.model['amplitude'][i] = -amplitude
self.model['phase'][i] = phase + 180.0
self.model['phase'][i] = np.mod(self.model['phase'][i], 360.0)
|
Adapt self.model so that amplitudes are positive and phases are in [0,360) as per convention
|
def ack(self, msg):
message_id = msg['headers']['message-id']
subscription = msg['headers']['subscription']
transaction_id = None
if 'transaction-id' in msg['headers']:
transaction_id = msg['headers']['transaction-id']
return ack(message_id, subscription, transaction_id)
|
Called when a MESSAGE has been received.
Override this method to handle received messages.
This function will generate an acknowledge message
for the given message and transaction (if present).
|
def build_dependencies(self):
for m in self.modules:
m.build_dependencies()
for p in self.packages:
p.build_dependencies()
|
Recursively build the dependencies for sub-modules and sub-packages.
Iterate on node's modules then packages and call their
build_dependencies methods.
|
def submit(self, spec):
spec = ApplicationSpec._from_any(spec)
resp = self._call('submit', spec.to_protobuf())
return resp.id
|
Submit a new skein application.
Parameters
----------
spec : ApplicationSpec, str, or dict
A description of the application to run. Can be an
``ApplicationSpec`` object, a path to a yaml/json file, or a
dictionary description of an application specification.
Returns
-------
app_id : str
The id of the submitted application.
|
def snapshot_list(self):
NO_SNAPSHOTS_TAKEN = 'No snapshots have been taken yet!'
output = self._run_vagrant_command(['snapshot', 'list'])
if NO_SNAPSHOTS_TAKEN in output:
return []
else:
return output.splitlines()
|
This command will list all the snapshots taken.
|
def create_app_from_yml(path):
try:
with open(path, "rt", encoding="UTF-8") as f:
try:
interpolated = io.StringIO(f.read() % {
"here": os.path.abspath(os.path.dirname(path))})
interpolated.name = f.name
conf = yaml.safe_load(interpolated)
except yaml.YAMLError as exc:
raise RuntimeError(
"Cannot parse a configuration file. Context: " + str(exc))
except FileNotFoundError:
conf = {"metadata": None, "pipes": {}}
return core.create_app(conf["metadata"], pipes=conf["pipes"])
|
Return an application instance created from YAML.
|
def request_access_token(self, params):
return self.request(self.access_token_url, method="GET", params=params)
|
Foursquare does not accept POST requests to retrieve an access token,
so we'll be doing a GET request instead.
|
def from_wif_or_ewif_hex(wif_hex: str, password: Optional[str] = None) -> SigningKeyType:
wif_bytes = Base58Encoder.decode(wif_hex)
fi = wif_bytes[0:1]
if fi == b"\x01":
return SigningKey.from_wif_hex(wif_hex)
elif fi == b"\x02" and password is not None:
return SigningKey.from_ewif_hex(wif_hex, password)
else:
raise Exception("Error: Bad format: not WIF nor EWIF")
|
Return SigningKey instance from Duniter WIF or EWIF in hexadecimal format
:param wif_hex: WIF or EWIF string in hexadecimal format
:param password: Password of EWIF encrypted seed
|
def tar_open(f):
if isinstance(f, six.string_types):
return tarfile.open(name=f)
else:
return tarfile.open(fileobj=f)
|
Open either a filename or a file-like object as a TarFile.
Parameters
----------
f : str or file-like object
The filename or file-like object from which to read.
Returns
-------
TarFile
A `TarFile` instance.
|
def slice_target(self,chr,start,end):
trng = Bed(chr,start,end)
nrngs = []
for r in self._rngs:
i = r.intersect(trng)
if not i: continue
nrngs.append(i)
if len(nrngs) == 0: return None
return MappingGeneric(nrngs,self._options)
|
Slice the mapping by the target coordinate
First coordinate is 0-indexed start
Second coordinate is 1-indexed finish
|
def status_charge():
data = status()
if 'BCHARGE' in data:
charge = data['BCHARGE'].split()
if charge[1].lower() == 'percent':
return float(charge[0])
return {'Error': 'Load not available.'}
|
Return battery charge
CLI Example:
.. code-block:: bash
salt '*' apcups.status_charge
|
def cmd_posvel(self, args):
ignoremask = 511
latlon = None
try:
latlon = self.module('map').click_position
except Exception:
pass
if latlon is None:
print ("set latlon to zeros")
latlon = [0, 0]
else:
ignoremask = ignoremask & 504
print ("found latlon", ignoremask)
vN = 0
vE = 0
vD = 0
if (len(args) == 3):
vN = float(args[0])
vE = float(args[1])
vD = float(args[2])
ignoremask = ignoremask & 455
print ("ignoremask",ignoremask)
print (latlon)
self.master.mav.set_position_target_global_int_send(
0,
1,
0,
mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT,
ignoremask,
int(latlon[0] * 1e7),
int(latlon[1] * 1e7),
10,
vN, vE, vD,
0, 0, 0,
0, 0)
|
posvel mapclick vN vE vD
|
def scale_joint_sfs(s):
i = np.arange(s.shape[0])[:, None]
j = np.arange(s.shape[1])[None, :]
out = (s * i) * j
return out
|
Scale a joint site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n1, n2)
Joint site frequency spectrum.
Returns
-------
joint_sfs_scaled : ndarray, int, shape (n1, n2)
Scaled joint site frequency spectrum.
|
def register_on_machine_registered(self, callback):
event_type = library.VBoxEventType.on_machine_registered
return self.event_source.register_callback(callback, event_type)
|
Set the callback function to consume on machine registered events.
Callback receives a IMachineRegisteredEvent object.
Returns the callback_id
|
def get_all_launch_configurations(region=None, key=None, keyid=None,
profile=None):
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
retries = 30
while True:
try:
return conn.get_all_launch_configurations()
except boto.exception.BotoServerError as e:
if retries and e.code == 'Throttling':
log.debug('Throttled by AWS API, retrying in 5 seconds...')
time.sleep(5)
retries -= 1
continue
log.error(e)
return []
|
Fetch and return all Launch Configuration with details.
CLI example::
salt myminion boto_asg.get_all_launch_configurations
|
def get_escalation_policies(profile='pagerduty', subdomain=None, api_key=None):
return _list_items(
'escalation_policies',
'id',
profile=profile,
subdomain=subdomain,
api_key=api_key,
)
|
List escalation_policies belonging to this account
CLI Example:
salt myminion pagerduty.get_escalation_policies
|
def cache(self):
if not self._cache:
use_cache = getattr(settings, 'USE_DRF_INSTANCE_CACHE', True)
if use_cache:
from django.core.cache import cache
self._cache = cache
return self._cache
|
Get the Django cache interface.
This allows disabling the cache with
settings.USE_DRF_INSTANCE_CACHE=False. It also delays import so that
Django Debug Toolbar will record cache requests.
|
def get_all_args(fn) -> list:
sig = inspect.signature(fn)
return list(sig.parameters)
|
Returns a list of all arguments for the function fn.
>>> def foo(x, y, z=100): return x + y + z
>>> get_all_args(foo)
['x', 'y', 'z']
|
def qs_for_ip(cls, ip_str):
ip = int(netaddr.IPAddress(ip_str))
if ip > 4294967295:
return cls.objects.none()
ip_range_query = {
'start__lte': ip,
'stop__gte': ip
}
return cls.objects.filter(**ip_range_query)
|
Returns a queryset with matching IPNetwork objects for the given IP.
|
def UpdateClientsFromFleetspeak(clients):
if not fleetspeak_connector.CONN or not fleetspeak_connector.CONN.outgoing:
return
id_map = {}
for client in clients:
if client.fleetspeak_enabled:
id_map[fleetspeak_utils.GRRIDToFleetspeakID(client.client_id)] = client
if not id_map:
return
res = fleetspeak_connector.CONN.outgoing.ListClients(
admin_pb2.ListClientsRequest(client_ids=list(iterkeys(id_map))))
for read in res.clients:
api_client = id_map[read.client_id]
api_client.last_seen_at = fleetspeak_utils.TSToRDFDatetime(
read.last_contact_time)
api_client.last_clock = fleetspeak_utils.TSToRDFDatetime(read.last_clock)
|
Updates ApiClient records to include info from Fleetspeak.
|
def dtypes(self):
return [(str(f.name), f.dataType.simpleString()) for f in self.schema.fields]
|
Returns all column names and their data types as a list.
>>> df.dtypes
[('age', 'int'), ('name', 'string')]
|
def add_row(self, row_data, resize_x=True):
if not resize_x:
self._check_row_size(row_data)
self.body(Tr()(Td()(cell) for cell in row_data))
return self
|
Adds a row at the end of the table
|
def createManager(self, services, yadis_url=None):
key = self.getSessionKey()
if self.getManager():
raise KeyError('There is already a %r manager for %r' %
(key, self.url))
if not services:
return None
manager = YadisServiceManager(self.url, yadis_url, services, key)
manager.store(self.session)
return manager
|
Create a new YadisService Manager for this starting URL and
suffix, and store it in the session.
@raises KeyError: When I already have a manager.
@return: A new YadisServiceManager or None
|
def reind_proc(self, inputstring, **kwargs):
out = []
level = 0
for line in inputstring.splitlines():
line, comment = split_comment(line.strip())
indent, line = split_leading_indent(line)
level += ind_change(indent)
if line:
line = " " * self.tabideal * level + line
line, indent = split_trailing_indent(line)
level += ind_change(indent)
line = (line + comment).rstrip()
out.append(line)
if level != 0:
complain(CoconutInternalException("non-zero final indentation level", level))
return "\n".join(out)
|
Add back indentation.
|
def validate_row_lengths(fields,
data
):
for i, row in enumerate(data):
if len(fields) != len(row):
msg = 'Row {} has {} entries when {} are expected.'.format(
i, len(row), len(fields))
raise FormatError(msg)
|
Validate the `data` row lengths according to the specification
in `fields`.
:param fields: The `FieldSpec` objects forming the
specification.
:param data: The rows to check.
:raises FormatError: When the number of entries in a row does
not match expectation.
|
def get_resource_path(name, raise_exception=False):
if not RuntimeGlobals.resources_directories:
RuntimeGlobals.resources_directories.append(
os.path.normpath(os.path.join(umbra.__path__[0], Constants.resources_directory)))
for path in RuntimeGlobals.resources_directories:
path = os.path.join(path, name)
if foundations.common.path_exists(path):
LOGGER.debug("> '{0}' resource path: '{1}'.".format(name, path))
return path
if raise_exception:
raise umbra.exceptions.ResourceExistsError(
"{0} | No resource file path found for '{1}' name!".format(__name__, name))
|
Returns the resource file path matching the given name.
:param name: Resource name.
:type name: unicode
:param raise_exception: Raise the exception.
:type raise_exception: bool
:return: Resource path.
:rtype: unicode
|
def b58check_encode(bin_s, version_byte=0):
bin_s = chr(int(version_byte)) + bin_s
num_leading_zeros = len(re.match(r'^\x00*', bin_s).group(0))
bin_s = bin_s + bin_checksum(bin_s)
hex_s = hexlify(bin_s)
b58_s = change_charset(hex_s, HEX_KEYSPACE, B58_KEYSPACE)
return B58_KEYSPACE[0] * num_leading_zeros + b58_s
|
Takes in a binary string and converts it to a base 58 check string.
|
def is_sub_plate(self, other):
if all(v in set(other.values) for v in self.values):
return True
if all(any(all(spv in m for spv in v) for m in map(set, other.values)) for v in self.values):
return True
if other in self.ancestor_plates:
return True
return False
|
Determines if this plate is a sub-plate of another plate -
i.e. has the same meta data but a restricted set of values
:param other: The other plate
:return: True if this plate is a sub-plate of the other plate
|
def _bsecurate_cli_view_graph(args):
curate.view_graph(args.basis, args.version, args.data_dir)
return ''
|
Handles the view-graph subcommand
|
def get(self, request, hook_id):
try:
bot = caching.get_or_set(MessengerBot, hook_id)
except MessengerBot.DoesNotExist:
logger.warning("Hook id %s not associated to a bot" % hook_id)
return Response(status=status.HTTP_404_NOT_FOUND)
if request.query_params.get('hub.verify_token') == str(bot.id):
return Response(int(request.query_params.get('hub.challenge')))
return Response('Error, wrong validation token')
|
Verify token when configuring webhook from facebook dev.
MessengerBot.id is used for verification
|
def path_is_known_executable(path):
return (
path_is_executable(path)
or os.access(str(path), os.R_OK)
and path.suffix in KNOWN_EXTS
)
|
Returns whether a given path is a known executable from known executable extensions
or has the executable bit toggled.
:param path: The path to the target executable.
:type path: :class:`~vistir.compat.Path`
:return: True if the path has chmod +x, or is a readable, known executable extension.
:rtype: bool
|
def get_readable_forums(self, forums, user):
if user.is_superuser:
return forums
readable_forums = self._get_forums_for_user(
user, ['can_read_forum', ], use_tree_hierarchy=True)
return forums.filter(id__in=[f.id for f in readable_forums]) \
if isinstance(forums, (models.Manager, models.QuerySet)) \
else list(filter(lambda f: f in readable_forums, forums))
|
Returns a queryset of forums that can be read by the considered user.
|
def get_index(binstr, end_index=160):
res = -1
try:
res = binstr.index('1') + 1
except ValueError:
res = end_index
return res
|
Return the position of the first 1 bit
from the left in the word until end_index
:param binstr:
:param end_index:
:return:
|
def get_trust(self):
if not bool(self._my_map['trustId']):
raise errors.IllegalState('this Authorization has no trust')
mgr = self._get_provider_manager('AUTHENTICATION.PROCESS')
if not mgr.supports_trust_lookup():
raise errors.OperationFailed('Authentication.Process does not support Trust lookup')
lookup_session = mgr.get_trust_lookup_session(proxy=getattr(self, "_proxy", None))
lookup_session.use_federated_agency_view()
osid_object = lookup_session.get_trust(self.get_trust_id())
return osid_object
|
Gets the ``Trust`` for this authorization.
return: (osid.authentication.process.Trust) - the ``Trust``
raise: IllegalState - ``has_trust()`` is ``false``
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
|
def get_acl_on(request):
acl_on = settings.ACL_ON
if settings.LOCKDOWN and hasattr(request, 'user'):
if request.user.is_superuser:
acl_on = False
return acl_on
|
Returns `True` if ACL should be honorated, returns otherwise `False`.
|
def _precompile_substitution(self, kind, pattern):
if pattern not in self._regexc[kind]:
qm = re.escape(pattern)
self._regexc[kind][pattern] = {
"qm": qm,
"sub1": re.compile(r'^' + qm + r'$'),
"sub2": re.compile(r'^' + qm + r'(\W+)'),
"sub3": re.compile(r'(\W+)' + qm + r'(\W+)'),
"sub4": re.compile(r'(\W+)' + qm + r'$'),
}
|
Pre-compile the regexp for a substitution pattern.
This will speed up the substitutions that happen at the beginning of
the reply fetching process. With the default brain, this took the
time for _substitute down from 0.08s to 0.02s
:param str kind: One of ``sub``, ``person``.
:param str pattern: The substitution pattern.
|
def And(*args: Union[Bool, bool]) -> Bool:
union = []
args_list = [arg if isinstance(arg, Bool) else Bool(arg) for arg in args]
for arg in args_list:
union.append(arg.annotations)
return Bool(z3.And([a.raw for a in args_list]), union)
|
Create an And expression.
|
def weight_decay(decay_rate, var_list, skip_biases=True):
if not decay_rate:
return 0.
tf.logging.info("Applying weight decay, decay_rate: %0.5f", decay_rate)
weight_decays = []
for v in var_list:
is_bias = len(v.shape.as_list()) == 1 and v.name.endswith("bias:0")
if not (skip_biases and is_bias):
with tf.device(v.device):
v_loss = tf.nn.l2_loss(v)
weight_decays.append(v_loss)
return tf.add_n(weight_decays) * decay_rate
|
Apply weight decay to vars in var_list.
|
def get_option_choices(opt_name, opt_value, default_value, all_choices):
choices = []
if isinstance(opt_value, six.string_types):
choices = [opt_value]
elif isinstance(opt_value, (list, tuple)):
choices = list(opt_value)
elif opt_value is None:
choices = default_value
else:
raise InvalidOption('Option %s has invalid'
' value: %s' % (opt_name, opt_value))
if 'all' in choices:
choices = all_choices
for item in choices:
if item not in all_choices:
raise InvalidOption('Choices of option %s contains invalid'
' item: %s' % (opt_name, item))
return choices
|
Generate possible choices for the option `opt_name`
limited to `opt_value` value with default value
as `default_value`
|
def _parse_version(self, line):
version_string = line.split(' ')[1]
version_list = version_string.split('.')
major_version = ''.join([version_list[0], version_list[1]])
release_num = ''.join([version_list[2].rstrip(), "-03"])
return (major_version, release_num)
|
There's a magic suffix to the release version, currently it's -03, but
it increments seemingly randomly.
|
async def get_checkpoint_async(self, partition_id):
lease = await self.get_lease_async(partition_id)
checkpoint = None
if lease:
if lease.offset:
checkpoint = Checkpoint(partition_id, lease.offset,
lease.sequence_number)
return checkpoint
|
Get the checkpoint data associated with the given partition.
Could return null if no checkpoint has been created for that partition.
:param partition_id: The partition ID.
:type partition_id: str
:return: Given partition checkpoint info, or `None` if none has been previously stored.
:rtype: ~azure.eventprocessorhost.checkpoint.Checkpoint
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.