code
stringlengths 81
3.79k
|
---|
def get_plugin_option(self, plugin, key):
if plugin in self.plugins:
plugin = self.plugins[plugin]
return plugin.get_option(key)
|
def release(self):
if self.table is None:
raise GiraffeError("Cannot release. Target table has not been set.")
log.info("BulkLoad", "Attempting release for table {}".format(self.table))
self.mload.release(self.table)
|
def get_enumerations_from_bit_mask(enumeration, mask):
return [x for x in enumeration if (x.value & mask) == x.value]
|
def update_w(self):
def update_single_w(i):
FB = base.matrix(np.float64(np.dot(-self.data.T, W_hat[:,i])))
be = solvers.qp(HB, FB, INQa, INQb, EQa, EQb)
self.beta[i,:] = np.array(be['x']).reshape((1, self._num_samples))
HB = base.matrix(np.float64(np.dot(self.data[:,:].T, self.data[:,:])))
EQb = base.matrix(1.0, (1, 1))
W_hat = np.dot(self.data, pinv(self.H))
INQa = base.matrix(-np.eye(self._num_samples))
INQb = base.matrix(0.0, (self._num_samples, 1))
EQa = base.matrix(1.0, (1, self._num_samples))
for i in range(self._num_bases):
update_single_w(i)
self.W = np.dot(self.beta, self.data.T).T
|
def inverse(self):
return Snapshot(self.num_qubits, self.num_clbits, self.params[0],
self.params[1])
|
def prune_all(self) -> int:
from .repositories import PriceRepository
repo = PriceRepository()
items = repo.query.distinct(dal.Price.namespace, dal.Price.symbol).all()
count = 0
for item in items:
symbol = SecuritySymbol(item.namespace, item.symbol)
deleted = self.prune(symbol)
if deleted:
count += 1
return count
|
def _merge_single_runs(self, other_trajectory, used_runs):
count = len(self)
run_indices = range(len(other_trajectory))
run_name_dict = OrderedDict()
to_store_groups_with_annotations = []
for idx in run_indices:
if idx in used_runs:
other_info_dict = other_trajectory.f_get_run_information(idx)
time_ = other_info_dict['time']
timestamp = other_info_dict['timestamp']
completed = other_info_dict['completed']
short_environment_hexsha = other_info_dict['short_environment_hexsha']
finish_timestamp = other_info_dict['finish_timestamp']
runtime = other_info_dict['runtime']
new_idx = used_runs[idx]
new_runname = self.f_wildcard('$', new_idx)
run_name_dict[idx] = new_runname
info_dict = dict(
idx=new_idx,
time=time_,
timestamp=timestamp,
completed=completed,
short_environment_hexsha=short_environment_hexsha,
finish_timestamp=finish_timestamp,
runtime=runtime)
self._add_run_info(**info_dict)
|
def _get_resource_url(self, url, auto_page, data_key):
headers = {'Accept': 'application/json',
'Connection': 'keep-alive'}
response = DAO.getURL(url, headers)
if response.status != 200:
raise DataFailureException(url, response.status, response.data)
data = json.loads(response.data)
self.next_page_url = self._next_page(response)
if auto_page and self.next_page_url:
if isinstance(data, list):
data.extend(self._get_resource_url(self.next_page_url, True,
data_key))
elif isinstance(data, dict) and data_key is not None:
data[data_key].extend(self._get_resource_url(
self.next_page_url, True, data_key)[data_key])
return data
|
def start_workers(self, workers_per_task=1):
if not self.workers:
for _ in range(workers_per_task):
self.workers.append(Worker(self._download, self.queues['download'], self.queues['convert'], self.stopper))
self.workers.append(Worker(self._convert, self.queues['convert'], self.queues['upload'], self.stopper))
self.workers.append(Worker(self._upload, self.queues['upload'], self.queues['delete'], self.stopper))
self.workers.append(Worker(self._delete, self.queues['delete'], self.queues['done'], self.stopper))
self.signal_handler = SignalHandler(self.workers, self.stopper)
signal.signal(signal.SIGINT, self.signal_handler)
for worker in self.workers:
worker.start()
|
def bind_parameters(self, value_dict):
new_circuit = self.copy()
if value_dict.keys() > self.parameters:
raise QiskitError('Cannot bind parameters ({}) not present in the circuit.'.format(
[str(p) for p in value_dict.keys() - self.parameters]))
for parameter, value in value_dict.items():
new_circuit._bind_parameter(parameter, value)
for parameter in value_dict:
del new_circuit._parameter_table[parameter]
return new_circuit
|
def _delete_resource(self, url):
params = {}
self._set_as_user(params)
headers = {'Accept': 'application/json',
'Connection': 'keep-alive'}
url = url + self._params(params)
response = DAO.deleteURL(url, headers)
if not (response.status == 200 or response.status == 204):
raise DataFailureException(url, response.status, response.data)
return response
|
def process_module(self, node):
if self.config.file_header:
if sys.version_info[0] < 3:
pattern = re.compile(
'\A' + self.config.file_header, re.LOCALE | re.MULTILINE)
else:
pattern = re.compile(
'\A' + self.config.file_header, re.MULTILINE)
content = None
with node.stream() as stream:
content = stream.read().decode('utf-8')
matches = pattern.findall(content)
if len(matches) != 1:
self.add_message('invalid-file-header', 1,
args=self.config.file_header)
|
def tree_climber(self, tree_alias, base_item):
if base_item is not None:
base_item.in_current_branch = True
if hasattr(base_item, 'parent') and base_item.parent is not None:
self.tree_climber(tree_alias, self.get_item_by_id(tree_alias, base_item.parent.id))
|
def convert_acquire(self, shift, instruction):
meas_level = self._run_config.get('meas_level', 2)
command_dict = {
'name': 'acquire',
't0': shift+instruction.start_time,
'duration': instruction.duration,
'qubits': [q.index for q in instruction.acquires],
'memory_slot': [m.index for m in instruction.mem_slots]
}
if meas_level == 2:
if instruction.command.discriminator:
command_dict.update({
'discriminators': [
QobjMeasurementOption(
name=instruction.command.discriminator.name,
params=instruction.command.discriminator.params)
]
})
command_dict.update({
'register_slot': [regs.index for regs in instruction.reg_slots]
})
if meas_level >= 1:
if instruction.command.kernel:
command_dict.update({
'kernels': [
QobjMeasurementOption(
name=instruction.command.kernel.name,
params=instruction.command.kernel.params)
]
})
return self._qobj_model(**command_dict)
|
def getUserId(self):
self.userId = self("GET", "{0}/users/self/profile".format(self.API_USER),
auth=self.Auth.SkypeToken).json().get("username")
|
def fields(self, *fields):
if len(fields) == 0:
return [el.split() for el in self]
res = SList()
for el in [f.split() for f in self]:
lineparts = []
for fd in fields:
try:
lineparts.append(el[fd])
except IndexError:
pass
if lineparts:
res.append(" ".join(lineparts))
return res
|
def run_picard_sort(job, bam, sort_by_name=False):
work_dir = job.fileStore.getLocalTempDir()
job.fileStore.readGlobalFile(bam, os.path.join(work_dir, 'input.bam'))
command = ['SortSam',
'O=/data/output.bam',
'I=/data/input.bam']
docker_parameters = ['--rm',
'--log-driver', 'none',
'-e', 'JAVA_OPTIONS=-Djava.io.tmpdir=/data/ -Xmx{}'.format(job.memory),
'-v', '{}:/data'.format(work_dir)]
if sort_by_name:
command.append('SO=queryname')
else:
command.append('SO=coordinate')
start_time = time.time()
dockerCall(job=job, workDir=work_dir,
parameters=command,
tool='quay.io/ucsc_cgl/picardtools:1.95--dd5ac549b95eb3e5d166a5e310417ef13651994e',
dockerParameters=docker_parameters)
end_time = time.time()
_log_runtime(job, start_time, end_time, "Picard SortSam")
return job.fileStore.writeGlobalFile(os.path.join(work_dir, 'output.bam'))
|
def crash_handler_lite(etype, evalue, tb):
traceback.print_exception(etype, evalue, tb)
from IPython.core.interactiveshell import InteractiveShell
if InteractiveShell.initialized():
config = "%config "
else:
config = "c."
print >> sys.stderr, _lite_message_template.format(email=author_email, config=config)
|
def rule(cls, rulename=None, erase=False):
if not hasattr(cls, '_rules'):
raise TypeError(
"%s didn't seems to be a BasicParser subsclasse" % cls.__name__)
class_hook_list = cls._hooks
class_rule_list = cls._rules
def wrapper(f):
nonlocal rulename
add_method(cls)(f)
if rulename is None:
rulename = f.__name__
if not erase and (rulename in class_hook_list or rulename in class_rule_list):
raise TypeError("%s is already define has rule or hook" % rulename)
if '.' not in rulename:
rulename = cls.__module__ + '.' + cls.__name__ + '.' + rulename
set_one(class_rule_list, rulename, f)
return f
return wrapper
|
def set_request_header(self, name, value):
_name = BSTR(name)
_value = BSTR(value)
_WinHttpRequest._SetRequestHeader(self, _name, _value)
|
def source_expand(self, source):
result = []
if not isinstance(source, list):
source = [source]
for src in source:
tmp = self.opt.recursive
self.opt.recursive = False
result += [f['name'] for f in self.s3walk(src, True)]
self.opt.recursive = tmp
if (len(result) == 0) and (not self.opt.ignore_empty_source):
fail("[Runtime Failure] Source doesn't exist.")
return result
|
def new_frontend_master(self):
ip = self.ip if self.ip in LOCAL_IPS else LOCALHOST
kernel_manager = self.kernel_manager_class(
ip=ip,
connection_file=self._new_connection_file(),
config=self.config,
)
kwargs = dict()
kwargs['extra_arguments'] = self.kernel_argv
kernel_manager.start_kernel(**kwargs)
kernel_manager.start_channels()
widget = self.widget_factory(config=self.config,
local_kernel=True)
self.init_colors(widget)
widget.kernel_manager = kernel_manager
widget._existing = False
widget._may_close = True
widget._confirm_exit = self.confirm_exit
return widget
|
def valid(self):
if self.expiration_time:
return self.expiration_time > int(time.time())
else:
return True
|
def add_new_heart_handler(self, handler):
self.log.debug("heartbeat::new_heart_handler: %s", handler)
self._new_handlers.add(handler)
|
def get_ordered_list_type(meta_data, numId, ilvl):
numbering_dict = meta_data.numbering_dict
if numId not in numbering_dict:
return DEFAULT_LIST_NUMBERING_STYLE
if ilvl not in numbering_dict[numId]:
return DEFAULT_LIST_NUMBERING_STYLE
return meta_data.numbering_dict[numId][ilvl]
|
def configure_inline_support(shell, backend, user_ns=None):
try:
from IPython.zmq.pylab.backend_inline import InlineBackend
except ImportError:
return
user_ns = shell.user_ns if user_ns is None else user_ns
cfg = InlineBackend.instance(config=shell.config)
cfg.shell = shell
if cfg not in shell.configurables:
shell.configurables.append(cfg)
if backend == backends['inline']:
from IPython.zmq.pylab.backend_inline import flush_figures
from matplotlib import pyplot
shell.register_post_execute(flush_figures)
pyplot.rcParams.update(cfg.rc)
user_ns['figsize'] = pyplot.figsize = figsize
fmt = cfg.figure_format
select_figure_format(shell, fmt)
from IPython.core.display import display
user_ns['display'] = display
user_ns['getfigs'] = getfigs
|
def decode_bytecode(bytecode):
bytecode_wnd = memoryview(bytecode)
while bytecode_wnd:
opcode_id = byte2int(bytecode_wnd[0])
opcode = OPCODE_MAP[opcode_id]
if opcode.imm_struct is not None:
offs, imm, _ = opcode.imm_struct.from_raw(None, bytecode_wnd[1:])
else:
imm = None
offs = 0
insn_len = 1 + offs
yield Instruction(opcode, imm, insn_len)
bytecode_wnd = bytecode_wnd[insn_len:]
|
def __complete_interns(
self, value: str, include_private_vars: bool = True
) -> Iterable[str]:
if include_private_vars:
is_match = Namespace.__completion_matcher(value)
else:
_is_match = Namespace.__completion_matcher(value)
def is_match(entry: Tuple[sym.Symbol, Var]) -> bool:
return _is_match(entry) and not entry[1].is_private
return map(
lambda entry: f"{entry[0].name}",
filter(is_match, [(s, v) for s, v in self.interns]),
)
|
def configure(self, options, config):
Plugin.configure(self, options, config)
self.config = config
if self.enabled:
self.stats = {'errors': 0,
'failures': 0,
'passes': 0,
'skipped': 0
}
self.errorlist = []
self.error_report_file = codecs.open(options.xunit_file, 'w',
self.encoding, 'replace')
|
def renew_lock(self):
if self._queue_name:
self.service_bus_service.renew_lock_queue_message(
self._queue_name,
self.broker_properties['SequenceNumber'],
self.broker_properties['LockToken'])
elif self._topic_name and self._subscription_name:
self.service_bus_service.renew_lock_subscription_message(
self._topic_name,
self._subscription_name,
self.broker_properties['SequenceNumber'],
self.broker_properties['LockToken'])
else:
raise AzureServiceBusPeekLockError(_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_RENEW_LOCK)
|
def call(self, inputs):
net = self.encoder_net(tf.cast(inputs, tf.float32))
return ed.MultivariateNormalDiag(
loc=net[..., :self.latent_size],
scale_diag=tf.nn.softplus(net[..., self.latent_size:]),
name="latent_code_posterior")
|
def kvlayer_key_to_stream_id(k):
abs_url_hash, epoch_ticks = k
return '{0}-{1}'.format(epoch_ticks,
base64.b16encode(abs_url_hash).lower())
|
def update_binary_annotations(self, extra_annotations):
if not self.logging_context:
self.binary_annotations.update(extra_annotations)
else:
self.logging_context.tags.update(extra_annotations)
|
def status(self,verbose=0):
self._update_status()
self._group_report(self.running,'Running')
self._group_report(self.completed,'Completed')
self._group_report(self.dead,'Dead')
self._comp_report[:] = []
self._dead_report[:] = []
|
def visit_table(self, layout):
table_content = self.get_table_content(layout)
cols_width = [0] * len(table_content[0])
for row in table_content:
for index, col in enumerate(row):
cols_width[index] = max(cols_width[index], len(col))
self.default_table(layout, table_content, cols_width)
self.writeln()
|
def no_exp(number):
r
mant, exp = to_scientific_tuple(number)
if not exp:
return str(number)
floating_mant = "." in mant
mant = mant.replace(".", "")
if exp < 0:
return "0." + "0" * (-exp - 1) + mant
if not floating_mant:
return mant + "0" * exp + (".0" if isinstance(number, float) else "")
lfpart = len(mant) - 1
if lfpart < exp:
return (mant + "0" * (exp - lfpart)).rstrip(".")
return mant
|
def url_query_params(url):
return dict(urlparse.parse_qsl(urlparse.urlparse(url).query, True))
|
def path_dispatch_old_new(mname, returns_model):
def _wrapper(self, old_path, new_path, *args, **kwargs):
old_prefix, old_mgr, old_mgr_path = _resolve_path(
old_path, self.managers
)
new_prefix, new_mgr, new_mgr_path = _resolve_path(
new_path, self.managers,
)
if old_mgr is not new_mgr:
raise HTTPError(
400,
"Can't move files between backends ({old} -> {new})".format(
old=old_path,
new=new_path,
)
)
assert new_prefix == old_prefix
result = getattr(new_mgr, mname)(
old_mgr_path,
new_mgr_path,
*args,
**kwargs
)
if returns_model and new_prefix:
return _apply_prefix(new_prefix, result)
else:
return result
return _wrapper
|
def get_admins(self, account_id, params={}):
url = ADMINS_API.format(account_id)
admins = []
for data in self._get_paged_resource(url, params=params):
admins.append(CanvasAdmin(data=data))
return admins
|
def set_feature_transform(self, mode='polynomial', degree=1):
if self.status != 'load_train_data':
print("Please load train data first.")
return self.train_X
self.feature_transform_mode = mode
self.feature_transform_degree = degree
self.train_X = self.train_X[:, 1:]
self.train_X = utility.DatasetLoader.feature_transform(
self.train_X,
self.feature_transform_mode,
self.feature_transform_degree
)
return self.train_X
|
def _wanmen_get_title_by_json_topic_part(json_content, tIndex, pIndex):
return '_'.join([json_content[0]['name'],
json_content[0]['Topics'][tIndex]['name'],
json_content[0]['Topics'][tIndex]['Parts'][pIndex]['name']])
|
def fallback_to_default_project_id(func):
@functools.wraps(func)
def inner_wrapper(self, *args, **kwargs):
if len(args) > 0:
raise AirflowException(
"You must use keyword arguments in this methods rather than"
" positional")
if 'project_id' in kwargs:
kwargs['project_id'] = self._get_project_id(kwargs['project_id'])
else:
kwargs['project_id'] = self._get_project_id(None)
if not kwargs['project_id']:
raise AirflowException("The project id must be passed either as "
"keyword project_id parameter or as project_id extra "
"in GCP connection definition. Both are not set!")
return func(self, *args, **kwargs)
return inner_wrapper
|
def get_model_class(settings_entry_name):
app_name, model_name = get_app_n_model(settings_entry_name)
try:
model = apps_get_model(app_name, model_name)
except (LookupError, ValueError):
model = None
if model is None:
raise ImproperlyConfigured(
'`SITETREE_%s` refers to model `%s` that has not been installed.' % (settings_entry_name, model_name))
return model
|
def draw(self):
self.screen.border(0)
if self.title is not None:
self.screen.addstr(2, 2, self.title, curses.A_STANDOUT)
if self.subtitle is not None:
self.screen.addstr(4, 2, self.subtitle, curses.A_BOLD)
for index, item in enumerate(self.items):
if self.current_option == index:
text_style = self.highlight
else:
text_style = self.normal
self.screen.addstr(5 + index, 4, item.show(index), text_style)
screen_rows, screen_cols = CursesMenu.stdscr.getmaxyx()
top_row = 0
if 6 + len(self.items) > screen_rows:
if screen_rows + self.current_option < 6 + len(self.items):
top_row = self.current_option
else:
top_row = 6 + len(self.items) - screen_rows
self.screen.refresh(top_row, 0, 0, 0, screen_rows - 1, screen_cols - 1)
|
def svg_to_image(string, size=None):
if isinstance(string, unicode):
string = string.encode('utf-8')
renderer = QtSvg.QSvgRenderer(QtCore.QByteArray(string))
if not renderer.isValid():
raise ValueError('Invalid SVG data.')
if size is None:
size = renderer.defaultSize()
image = QtGui.QImage(size, QtGui.QImage.Format_ARGB32)
painter = QtGui.QPainter(image)
renderer.render(painter)
return image
|
def token_indent(self, idx):
line_indent = self.line_indent(idx)
return line_indent + " " * (self.start_col(idx) - len(line_indent))
|
def close(self):
self.flush()
setattr(sys, self.channel, self.ostream)
self.file.close()
self._closed = True
|
def info(self):
for key, val in self.header.items():
if key == b'src_raj':
val = val.to_string(unit=u.hour, sep=':')
if key == b'src_dej':
val = val.to_string(unit=u.deg, sep=':')
if key == b'tsamp':
val *= u.second
if key in ('foff', 'fch1'):
val *= u.MHz
if key == b'tstart':
print("%16s : %32s" % ("tstart (ISOT)", Time(val, format='mjd').isot))
key = "tstart (MJD)"
print("%16s : %32s" % (key, val))
print("\n%16s : %32s" % ("Num ints in file", self.n_ints_in_file))
print("%16s : %32s" % ("Data shape", self.data.shape))
print("%16s : %32s" % ("Start freq (MHz)", self.freqs[0]))
print("%16s : %32s" % ("Stop freq (MHz)", self.freqs[-1]))
|
def fromRaw(cls, skype=None, raw={}):
return cls(skype, raw, **cls.rawToFields(raw))
|
def inherit_from_std_ex(node: astroid.node_classes.NodeNG) -> bool:
ancestors = node.ancestors() if hasattr(node, "ancestors") else []
for ancestor in itertools.chain([node], ancestors):
if (
ancestor.name in ("Exception", "BaseException")
and ancestor.root().name == EXCEPTIONS_MODULE
):
return True
return False
|
def findStationCodesByCity(city_name, token):
req = requests.get(
API_ENDPOINT_SEARCH,
params={
'token': token,
'keyword': city_name
})
if req.status_code == 200 and req.json()["status"] == "ok":
return [result["uid"] for result in req.json()["data"]]
else:
return []
|
def v2_playbook_on_task_start(self, task, **kwargs):
self.last_task_name = task.get_name()
self.printed_last_task = False
|
def _register_stements(self, statements: List["HdlStatement"],
target: List["HdlStatement"]):
for stm in flatten(statements):
assert stm.parentStm is None, stm
stm._set_parent_stm(self)
target.append(stm)
|
def _query_cassandra(self):
self.hook = CassandraHook(cassandra_conn_id=self.cassandra_conn_id)
session = self.hook.get_conn()
cursor = session.execute(self.cql)
return cursor
|
def request_tokens(self):
url = 'https://api.ecobee.com/token'
params = {'grant_type': 'ecobeePin', 'code': self.authorization_code,
'client_id': self.api_key}
try:
request = requests.post(url, params=params)
except RequestException:
logger.warn("Error connecting to Ecobee. Possible connectivity outage."
"Could not request token.")
return
if request.status_code == requests.codes.ok:
self.access_token = request.json()['access_token']
self.refresh_token = request.json()['refresh_token']
self.write_tokens_to_file()
self.pin = None
else:
logger.warn('Error while requesting tokens from ecobee.com.'
' Status code: ' + str(request.status_code))
return
|
def cancel_task(self, task_id):
self.registry.remove(task_id)
self._scheduler.cancel_job_task(task_id)
logger.info("Task %s canceled", task_id)
|
def _superop_to_choi(data, input_dim, output_dim):
shape = (output_dim, output_dim, input_dim, input_dim)
return _reshuffle(data, shape)
|
def status(institute_id, case_name):
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
status = request.form.get('status', case_obj['status'])
link = url_for('.case', institute_id=institute_id, case_name=case_name)
if status == 'archive':
store.archive_case(institute_obj, case_obj, user_obj, status, link)
else:
store.update_status(institute_obj, case_obj, user_obj, status, link)
return redirect(request.referrer)
|
def update_database(self, instance_id, database_id, ddl_statements,
project_id=None,
operation_id=None):
instance = self._get_client(project_id=project_id).instance(
instance_id=instance_id)
if not instance.exists():
raise AirflowException("The instance {} does not exist in project {} !".
format(instance_id, project_id))
database = instance.database(database_id=database_id)
try:
operation = database.update_ddl(
ddl_statements=ddl_statements, operation_id=operation_id)
if operation:
result = operation.result()
self.log.info(result)
return
except AlreadyExists as e:
if e.code == 409 and operation_id in e.message:
self.log.info("Replayed update_ddl message - the operation id %s "
"was already done before.", operation_id)
return
except GoogleAPICallError as e:
self.log.error('An error occurred: %s. Exiting.', e.message)
raise e
|
def name(self):
name = self._platform_impl.get_process_name()
if os.name == 'posix':
try:
cmdline = self.cmdline
except AccessDenied:
pass
else:
if cmdline:
extended_name = os.path.basename(cmdline[0])
if extended_name.startswith(name):
name = extended_name
self._platform_impl._process_name = name
return name
|
def create_group(self, group):
self._valid_group_id(group.id)
body = {"data": group.json_data()}
url = "{}/group/{}".format(self.API, group.name)
data = self._put_resource(url, headers={}, body=body)
return self._group_from_json(data.get("data"))
|
def _minimal_export_traces(self, outdir=None, analytes=None,
samples=None, subset='All_Analyses'):
if analytes is None:
analytes = self.analytes
elif isinstance(analytes, str):
analytes = [analytes]
if samples is not None:
subset = self.make_subset(samples)
samples = self._get_samples(subset)
focus_stage = 'rawdata'
if not os.path.isdir(outdir):
os.mkdir(outdir)
for s in samples:
d = self.data[s].data[focus_stage]
out = Bunch()
for a in analytes:
out[a] = d[a]
out = pd.DataFrame(out, index=self.data[s].Time)
out.index.name = 'Time'
d = dateutil.parser.parse(self.data[s].meta['date'])
header = ['
(time.strftime('%Y:%m:%d %H:%M:%S')),
"
'
'
'
'
header = '\n'.join(header) + '\n'
csv = out.to_csv()
with open('%s/%s.csv' % (outdir, s), 'w') as f:
f.write(header)
f.write(csv)
return
|
def as_recarray(self):
dtype = [(k,v.dtype) for k,v in self.__dict__.iteritems()]
R = numpy.recarray(len(self.__dict__[k]),dtype=dtype)
for key in self.__dict__:
R[key] = self.__dict__[key]
return R
|
def unregister(self, mimetype, processor):
if mimetype in self and processor in self[mimetype]:
self[mimetype].remove(processor)
|
def _write_iop_to_file(self, iop, file_name):
lg.info('Writing :: ' + file_name)
f = open(file_name, 'w')
for i in scipy.nditer(iop):
f.write(str(i) + '\n')
|
async def get_tracks(self, *, limit: Optional[int] = 20, offset: Optional[int] = 0) -> List[Track]:
data = await self.__client.http.album_tracks(self.id, limit=limit, offset=offset)
return list(Track(self.__client, item) for item in data['items'])
|
def get_ammo_generator(self):
af_readers = {
'phantom': missile.AmmoFileReader,
'slowlog': missile.SlowLogReader,
'line': missile.LineReader,
'uri': missile.UriReader,
'uripost': missile.UriPostReader,
'access': missile.AccessLogReader,
'caseline': missile.CaseLineReader,
}
if self.uris and self.ammo_file:
raise StepperConfigurationError(
'Both uris and ammo file specified. You must specify only one of them'
)
elif self.uris:
ammo_gen = missile.UriStyleGenerator(
self.uris, self.headers, http_ver=self.http_ver)
elif self.ammo_file:
if self.ammo_type in af_readers:
if self.ammo_type == 'phantom':
opener = resource.get_opener(self.ammo_file)
with opener(self.use_cache) as ammo:
try:
if not ammo.next()[0].isdigit():
self.ammo_type = 'uri'
self.log.info(
"Setting ammo_type 'uri' because ammo is not started with digit and you did not specify ammo format"
)
else:
self.log.info(
"Default ammo type ('phantom') used, use 'phantom.ammo_type' option to override it"
)
except StopIteration:
self.log.exception(
"Couldn't read first line of ammo file")
raise AmmoFileError(
"Couldn't read first line of ammo file")
else:
raise NotImplementedError(
'No such ammo type implemented: "%s"' % self.ammo_type)
ammo_gen = af_readers[self.ammo_type](
self.ammo_file, headers=self.headers, http_ver=self.http_ver, use_cache=self.use_cache)
else:
raise StepperConfigurationError(
'Ammo not found. Specify uris or ammo file')
self.log.info("Using %s ammo reader" % type(ammo_gen).__name__)
return ammo_gen
|
def check_rdd_dtype(rdd, expected_dtype):
if not isinstance(rdd, BlockRDD):
raise TypeError("Expected {0} for parameter rdd, got {1}."
.format(BlockRDD, type(rdd)))
if isinstance(rdd, DictRDD):
if not isinstance(expected_dtype, dict):
raise TypeError('Expected {0} for parameter '
'expected_dtype, got {1}.'
.format(dict, type(expected_dtype)))
accept = True
types = dict(list(zip(rdd.columns, rdd.dtype)))
for key, values in expected_dtype.items():
if not isinstance(values, (tuple, list)):
values = [values]
accept = accept and types[key] in values
return accept
if not isinstance(expected_dtype, (tuple, list)):
expected_dtype = [expected_dtype]
return rdd.dtype in expected_dtype
|
def entropy(state):
rho = np.array(state)
if rho.ndim == 1:
return 0
evals = np.maximum(np.linalg.eigvalsh(state), 0.)
return shannon_entropy(evals, base=np.e)
|
def python_matches(self,text):
if "." in text:
try:
matches = self.attr_matches(text)
if text.endswith('.') and self.omit__names:
if self.omit__names == 1:
no__name = (lambda txt:
re.match(r'.*\.__.*?__',txt) is None)
else:
no__name = (lambda txt:
re.match(r'.*\._.*?',txt) is None)
matches = filter(no__name, matches)
except NameError:
matches = []
else:
matches = self.global_matches(text)
return matches
|
def subwave(wave, dep_name=None, indep_min=None, indep_max=None, indep_step=None):
r
ret = copy.copy(wave)
if dep_name is not None:
ret.dep_name = dep_name
_bound_waveform(ret, indep_min, indep_max)
pexdoc.addai("indep_step", bool((indep_step is not None) and (indep_step <= 0)))
exmsg = "Argument `indep_step` is greater than independent vector range"
cond = bool(
(indep_step is not None)
and (indep_step > ret._indep_vector[-1] - ret._indep_vector[0])
)
pexdoc.addex(RuntimeError, exmsg, cond)
if indep_step:
indep_vector = _barange(indep_min, indep_max, indep_step)
dep_vector = _interp_dep_vector(ret, indep_vector)
ret._set_indep_vector(indep_vector, check=False)
ret._set_dep_vector(dep_vector, check=False)
return ret
|
def init_transformers(self):
self._transformers = []
for transformer_cls in _default_transformers:
transformer_cls(
shell=self.shell, prefilter_manager=self, config=self.config
)
|
def _mode(self):
return (self.mean_direction +
tf.zeros_like(self.concentration)[..., tf.newaxis])
|
def _prepare_args_with_initial_vertex(objective_function,
initial_vertex,
step_sizes,
objective_at_initial_vertex,
batch_evaluate_objective):
dim = tf.size(input=initial_vertex)
num_vertices = dim + 1
unit_vectors_along_axes = tf.reshape(
tf.eye(dim, dim, dtype=initial_vertex.dtype.base_dtype),
tf.concat([[dim], tf.shape(input=initial_vertex)], axis=0))
simplex_face = initial_vertex + step_sizes * unit_vectors_along_axes
simplex = tf.concat([tf.expand_dims(initial_vertex, axis=0),
simplex_face], axis=0)
num_evaluations = 0
if objective_at_initial_vertex is None:
objective_at_initial_vertex = objective_function(initial_vertex)
num_evaluations += 1
objective_at_simplex_face, num_evals = _evaluate_objective_multiple(
objective_function, simplex_face, batch_evaluate_objective)
num_evaluations += num_evals
objective_at_simplex = tf.concat(
[
tf.expand_dims(objective_at_initial_vertex, axis=0),
objective_at_simplex_face
], axis=0)
return (dim,
num_vertices,
simplex,
objective_at_simplex,
num_evaluations)
|
def convertBits(self, sigOrVal, toType):
if isinstance(sigOrVal, Value):
return convertBits__val(self, sigOrVal, toType)
elif isinstance(toType, HBool):
if self.bit_length() == 1:
v = 0 if sigOrVal._dtype.negated else 1
return sigOrVal._eq(self.getValueCls().fromPy(v, self))
elif isinstance(toType, Bits):
if self.bit_length() == toType.bit_length():
return sigOrVal._convSign(toType.signed)
elif toType == INT:
return Operator.withRes(AllOps.BitsToInt, [sigOrVal], toType)
return default_auto_cast_fn(self, sigOrVal, toType)
|
def from_symbol(cls, symbol):
if symbol.lower() == symbol:
return cls(PIECE_SYMBOLS.index(symbol), WHITE)
else:
return cls(PIECE_SYMBOLS.index(symbol.lower()), BLACK)
|
def pauli_group(number_of_qubits, case='weight'):
if number_of_qubits < 5:
temp_set = []
if case == 'weight':
tmp = pauli_group(number_of_qubits, case='tensor')
return sorted(tmp, key=lambda x: -np.count_nonzero(
np.array(x.to_label(), 'c') == b'I'))
elif case == 'tensor':
for k in range(4 ** number_of_qubits):
z = np.zeros(number_of_qubits, dtype=np.bool)
x = np.zeros(number_of_qubits, dtype=np.bool)
for j in range(number_of_qubits):
element = (k // (4 ** j)) % 4
if element == 1:
x[j] = True
elif element == 2:
z[j] = True
x[j] = True
elif element == 3:
z[j] = True
temp_set.append(Pauli(z, x))
return temp_set
else:
raise QiskitError("Only support 'weight' or 'tensor' cases "
"but you have {}.".format(case))
raise QiskitError("Only support number of qubits is less than 5")
|
def _validate(self, qobj):
n_qubits = qobj.config.n_qubits
max_qubits = self.configuration().n_qubits
if n_qubits > max_qubits:
raise BasicAerError('Number of qubits {} '.format(n_qubits) +
'is greater than maximum ({}) '.format(max_qubits) +
'for "{}".'.format(self.name()))
for experiment in qobj.experiments:
name = experiment.header.name
if experiment.config.memory_slots == 0:
logger.warning('No classical registers in circuit "%s", '
'counts will be empty.', name)
elif 'measure' not in [op.name for op in experiment.instructions]:
logger.warning('No measurements in circuit "%s", '
'classical register will remain all zeros.', name)
|
def classification(self, classification):
allowed_values = ["Public Limited Indian Non-Government Company", "Private Limited Indian Non-Government Company", "One Person Company", "Private Limited Foreign Company Incorporated in India", "Public Limited Foreign Company Incorporated in India", "Union Government Company", "State Government Company", "Guarantee & Association Public", "Guarantee & Association Private", "Not For Profit Company", "Unlimited Liabilities Public", "Unlimited Liabilities Private", "Undefined"]
if classification not in allowed_values:
raise ValueError(
"Invalid value for `classification`, must be one of {0}"
.format(allowed_values)
)
self._classification = classification
|
def val_where(cond, tval, fval):
if isinstance(tval, tf.Tensor):
return tf.where(cond, tval, fval)
elif isinstance(tval, tuple):
cls = type(tval)
return cls(*(val_where(cond, t, f) for t, f in zip(tval, fval)))
else:
raise Exception(TypeError)
|
def get_embedding_levels(text, storage, upper_is_rtl=False, debug=False):
prev_surrogate = False
base_level = storage['base_level']
for _ch in text:
if _IS_UCS2 and (_SURROGATE_MIN <= ord(_ch) <= _SURROGATE_MAX):
prev_surrogate = _ch
continue
elif prev_surrogate:
_ch = prev_surrogate + _ch
prev_surrogate = False
if upper_is_rtl and _ch.isupper():
bidi_type = 'R'
else:
bidi_type = bidirectional(_ch)
storage['chars'].append({
'ch': _ch,
'level': base_level,
'type': bidi_type,
'orig': bidi_type
})
if debug:
debug_storage(storage, base_info=True)
|
def load(self):
self._check_open()
try:
data = json.load(self.file, **self.load_args)
except ValueError:
data = {}
if not isinstance(data, dict):
raise ValueError('Root JSON type must be dictionary')
self.clear()
self.update(data)
|
def calc(pvalues, lamb):
m = len(pvalues)
pi0 = (pvalues > lamb).sum() / ((1 - lamb)*m)
pFDR = np.ones(m)
print("pFDR y Pr fastPow")
for i in range(m):
y = pvalues[i]
Pr = max(1, m - i) / float(m)
pFDR[i] = (pi0 * y) / (Pr * (1 - math.pow(1-y, m)))
print(i, pFDR[i], y, Pr, 1.0 - math.pow(1-y, m))
num_null = pi0*m
num_alt = m - num_null
num_negs = np.array(range(m))
num_pos = m - num_negs
pp = num_pos / float(m)
qvalues = np.ones(m)
qvalues[0] = pFDR[0]
for i in range(m-1):
qvalues[i+1] = min(qvalues[i], pFDR[i+1])
sens = ((1.0 - qvalues) * num_pos) / num_alt
sens[sens > 1.0] = 1.0
df = pd.DataFrame(dict(
pvalue=pvalues,
qvalue=qvalues,
FDR=pFDR,
percentile_positive=pp,
sens=sens
))
df["svalue"] = df.sens[::-1].cummax()[::-1]
return df, num_null, m
|
def series(collection, method, prints = 15, *args, **kwargs):
if 'verbose' in kwargs.keys():
verbose = kwargs['verbose']
else:
verbose = True
results = []
timer = turntable.utils.Timer(nLoops=len(collection), numPrints=prints, verbose=verbose)
for subject in collection:
results.append(method(subject, *args, **kwargs))
timer.loop()
timer.fin()
return results
|
def getlist(self, section, option):
value_list = self.get(section, option)
values = []
for value_line in value_list.split('\n'):
for value in value_line.split(','):
value = value.strip()
if value:
values.append(value)
return values
|
def glob(self, pattern):
cls = self.__class__
return [cls(s) for s in glob.glob(unicode(self / pattern))]
|
def export_html(html, filename, image_tag = None, inline = True):
if image_tag is None:
image_tag = default_image_tag
else:
image_tag = ensure_utf8(image_tag)
if inline:
path = None
else:
root,ext = os.path.splitext(filename)
path = root + "_files"
if os.path.isfile(path):
raise OSError("%s exists, but is not a directory." % path)
with open(filename, 'w') as f:
html = fix_html(html)
f.write(IMG_RE.sub(lambda x: image_tag(x, path = path, format = "png"),
html))
|
def export(self, cert, key, type=FILETYPE_PEM, days=100,
digest=_UNSPECIFIED):
if not isinstance(cert, X509):
raise TypeError("cert must be an X509 instance")
if not isinstance(key, PKey):
raise TypeError("key must be a PKey instance")
if not isinstance(type, int):
raise TypeError("type must be an integer")
if digest is _UNSPECIFIED:
raise TypeError("digest must be provided")
digest_obj = _lib.EVP_get_digestbyname(digest)
if digest_obj == _ffi.NULL:
raise ValueError("No such digest method")
bio = _lib.BIO_new(_lib.BIO_s_mem())
_openssl_assert(bio != _ffi.NULL)
sometime = _lib.ASN1_TIME_new()
_openssl_assert(sometime != _ffi.NULL)
_lib.X509_gmtime_adj(sometime, 0)
_lib.X509_CRL_set_lastUpdate(self._crl, sometime)
_lib.X509_gmtime_adj(sometime, days * 24 * 60 * 60)
_lib.X509_CRL_set_nextUpdate(self._crl, sometime)
_lib.X509_CRL_set_issuer_name(
self._crl, _lib.X509_get_subject_name(cert._x509)
)
sign_result = _lib.X509_CRL_sign(self._crl, key._pkey, digest_obj)
if not sign_result:
_raise_current_error()
return dump_crl(type, self)
|
def parse_text(text):
assert isinstance(text, _str_type), "`text` parameter should be a string, got %r" % type(text)
gen = iter(text.splitlines(True))
readline = gen.next if hasattr(gen, "next") else gen.__next__
return Code(_tokenize(readline))
|
def parsed_forensic_reports_to_csv(reports):
fields = ["feedback_type", "user_agent", "version", "original_envelope_id",
"original_mail_from", "original_rcpt_to", "arrival_date",
"arrival_date_utc", "subject", "message_id",
"authentication_results", "dkim_domain", "source_ip_address",
"source_country", "source_reverse_dns", "source_base_domain",
"delivery_result", "auth_failure", "reported_domain",
"authentication_mechanisms", "sample_headers_only"]
if type(reports) == OrderedDict:
reports = [reports]
csv_file = StringIO()
csv_writer = DictWriter(csv_file, fieldnames=fields)
csv_writer.writeheader()
for report in reports:
row = report.copy()
row["source_ip_address"] = report["source"]["ip_address"]
row["source_reverse_dns"] = report["source"]["reverse_dns"]
row["source_base_domain"] = report["source"]["base_domain"]
row["source_country"] = report["source"]["country"]
del row["source"]
row["subject"] = report["parsed_sample"]["subject"]
row["auth_failure"] = ",".join(report["auth_failure"])
authentication_mechanisms = report["authentication_mechanisms"]
row["authentication_mechanisms"] = ",".join(
authentication_mechanisms)
del row["sample"]
del row["parsed_sample"]
csv_writer.writerow(row)
return csv_file.getvalue()
|
def print_processor_inputs(self, processor_name):
p = self.processors(processor_name=processor_name)
if len(p) == 1:
p = p[0]
else:
Exception('Invalid processor name')
for field_schema, _, _ in iterate_schema({}, p['input_schema'], 'input'):
name = field_schema['name']
typ = field_schema['type']
print("{} -> {}".format(name, typ))
|
def add_price_entity(self, price: dal.Price):
from decimal import Decimal
repo = self.get_price_repository()
existing = (
repo.query
.filter(dal.Price.namespace == price.namespace)
.filter(dal.Price.symbol == price.symbol)
.filter(dal.Price.date == price.date)
.filter(dal.Price.time == price.time)
.first()
)
if existing:
new_value = Decimal(price.value) / Decimal(price.denom)
self.logger.info(f"Exists: {price}")
if price.currency != existing.currency:
raise ValueError(
f"The currency is different for price {price}!")
if existing.value != price.value:
existing.value = price.value
self.logger.info(f"Updating to {new_value}.")
if existing.denom != price.denom:
existing.denom = price.denom
else:
self.session.add(price)
self.logger.info(f"Added {price}")
|
def _load_features_from_images(self, images, names=None):
if names is not None and len(names) != len(images):
raise Exception(
"Lists of feature names and images must be of same length!")
self.feature_names = names if names is not None else images
self.feature_images = imageutils.load_imgs(images, self.masker)
|
def remove_chain(self, name):
if name in self.chains:
delattr(self.chains, name)
else:
raise ValueError("Chain with this name not found")
|
def _use_rev_b_archive(self, records, offset):
if type(self._ARCHIVE_REV_B) is bool:
return self._ARCHIVE_REV_B
data = ArchiveBStruct.unpack_from(records, offset)
if data['RecType'] == 0:
log.info('detected archive rev. B')
self._ARCHIVE_REV_B = True
else:
log.info('detected archive rev. A')
self._ARCHIVE_REV_B = False
return self._ARCHIVE_REV_B
|
def do_help(self, options, args, parser):
if options.help:
if self.classic:
self.help_fn(topic='help')
else:
self.help_fn(parser=parser)
return True
if "help" in options.actions:
if args:
for a in args:
parser = CMDS.get(a)
if parser:
self.help_fn(parser=parser)
else:
self.help_fn(topic=a)
else:
self.help_fn(topic='help')
return True
if options.version:
self.help_fn(topic='version')
return True
return False
|
def register_routes(app):
from . import controllers
from flask.blueprints import Blueprint
for module in _import_submodules_from_package(controllers):
bp = getattr(module, 'bp')
if bp and isinstance(bp, Blueprint):
app.register_blueprint(bp)
|
def flush(self, timeout=60):
if timeout <= 0:
raise ErrBadTimeout
if self.is_closed:
raise ErrConnectionClosed
future = asyncio.Future(loop=self._loop)
try:
yield from self._send_ping(future)
yield from asyncio.wait_for(future, timeout, loop=self._loop)
except asyncio.TimeoutError:
future.cancel()
raise ErrTimeout
|
def match(self):
result = []
to_match = comp(self.regex)
if self.rematch:
pre_result = to_match.findall(self.data)
else:
pre_result = to_match.search(self.data)
if self.return_data and pre_result:
if self.rematch:
for data in pre_result:
if isinstance(data, tuple):
result.extend(list(data))
else:
result.append(data)
if self.group != 0:
return result[self.group]
else:
result = pre_result.group(
self.group
).strip()
return result
if not self.return_data and pre_result:
return True
return False
|
def reset(self, new_session=True):
self.history_manager.reset(new_session)
if new_session:
self.execution_count = 1
if self.displayhook.do_full_cache:
self.displayhook.flush()
if self.user_ns is not self.user_global_ns:
self.user_ns.clear()
ns = self.user_global_ns
drop_keys = set(ns.keys())
drop_keys.discard('__builtin__')
drop_keys.discard('__builtins__')
drop_keys.discard('__name__')
for k in drop_keys:
del ns[k]
self.user_ns_hidden.clear()
self.init_user_ns()
self.alias_manager.clear_aliases()
self.alias_manager.init_aliases()
self.clear_main_mod_cache()
self.new_main_mod()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.