Unnamed: 0
int64 0
389k
| code
stringlengths 26
79.6k
| docstring
stringlengths 1
46.9k
|
---|---|---|
13,200 | def attach_mock(self, mock, attribute):
mock._mock_parent = None
mock._mock_new_parent = None
mock._mock_name =
mock._mock_new_name = None
setattr(self, attribute, mock) | Attach a mock as an attribute of this one, replacing its name and
parent. Calls to the attached mock will be recorded in the
`method_calls` and `mock_calls` attributes of this one. |
13,201 | def _jars_to_directories(self, target):
files = set()
jar_import_products = self.context.products.get_data(JarImportProducts)
imports = jar_import_products.imports(target)
for coordinate, jar in imports:
files.add(self._extract_jar(coordinate, jar))
return files | Extracts and maps jars to directories containing their contents.
:returns: a set of filepaths to directories containing the contents of jar. |
13,202 | def update_factor(self, name, body):
url = self._url(.format(name))
return self.client.put(url, data=body) | Update Guardian factor
Useful to enable / disable factor
Args:
name (str): Either push-notification or sms
body (dict): Attributes to modify.
See: https://auth0.com/docs/api/management/v2#!/Guardian/put_factors_by_name |
13,203 | def connect(self):
if ( in os.environ) and (sys.platform != ):
conn = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
retry_on_signal(lambda: conn.connect(os.environ[]))
except:
return
elif sys.platform == :
import win_pageant
if win_pageant.can_talk_to_agent():
conn = win_pageant.PageantConnection()
else:
return
else:
return
self._conn = conn | Method automatically called by the run() method of the AgentProxyThread |
13,204 | def verify_high(self, high):
errors = []
if not isinstance(high, dict):
errors.append()
reqs = OrderedDict()
for name, body in six.iteritems(high):
if name.startswith():
continue
if not isinstance(name, six.string_types):
errors.append(
{0}\{1}\
.format(
name,
body[],
type(name).__name__
)
)
if not isinstance(body, dict):
err = (
.format(name, body))
errors.append(err)
continue
for state in body:
if state.startswith():
continue
if not isinstance(body[state], list):
errors.append(
{0}\{1}\
.format(name, body[])
)
else:
fun = 0
if in state:
fun += 1
for arg in body[state]:
if isinstance(arg, six.string_types):
fun += 1
if in arg.strip():
errors.append((
).format(
arg,
name,
body[]))
elif isinstance(arg, dict):
argfirst = next(iter(arg))
if argfirst in (, , , ):
if not isinstance(arg[argfirst], list):
errors.append((
{1}\{2}\
).format(
argfirst,
name,
body[]
))
else:
reqs[name] = {: state}
for req in arg[argfirst]:
if isinstance(req, six.string_types):
req = {: req}
if not isinstance(req, dict):
err = (
).format(
req,
body[])
errors.append(err)
continue
req_key = next(iter(req))
req_val = req[req_key]
if in req_key:
errors.append((
{0}\
{1}\
{2}\
{3}\.format(
req_key,
name,
body[],
req_key[:req_key.find()]
)
))
if not ishashable(req_val):
errors.append((
).format(
six.text_type(req_val),
body[]))
continue
reqs[name][req_val] = req_key
if req_val in reqs:
if name in reqs[req_val]:
if reqs[req_val][name] == state:
if reqs[req_val][] == reqs[name][req_val]:
err = (
).format(
body[],
name,
req_val
)
errors.append(err)
if len(list(arg)) != 1:
errors.append((
{0}\
{1}\).format(
name,
body[]))
if not fun:
if state == or state == :
continue
errors.append(({0}\
{1}\).format(state, body[]))
elif fun > 1:
errors.append(
{0}\
{1}\.format(state, body[])
)
return errors | Verify that the high data is viable and follows the data structure |
13,205 | def register_activity_type(domain=None, name=None, version=None, description=None, defaultTaskStartToCloseTimeout=None, defaultTaskHeartbeatTimeout=None, defaultTaskList=None, defaultTaskPriority=None, defaultTaskScheduleToStartTimeout=None, defaultTaskScheduleToCloseTimeout=None):
pass | Registers a new activity type along with its configuration settings in the specified domain.
Access Control
You can use IAM policies to control this action's access to Amazon SWF resources as follows:
If the caller does not have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's cause parameter will be set to OPERATION_NOT_PERMITTED. For details and example IAM policies, see Using IAM to Manage Access to Amazon SWF Workflows .
See also: AWS API Documentation
:example: response = client.register_activity_type(
domain='string',
name='string',
version='string',
description='string',
defaultTaskStartToCloseTimeout='string',
defaultTaskHeartbeatTimeout='string',
defaultTaskList={
'name': 'string'
},
defaultTaskPriority='string',
defaultTaskScheduleToStartTimeout='string',
defaultTaskScheduleToCloseTimeout='string'
)
:type domain: string
:param domain: [REQUIRED]
The name of the domain in which this activity is to be registered.
:type name: string
:param name: [REQUIRED]
The name of the activity type within the domain.
The specified string must not start or end with whitespace. It must not contain a : (colon), / (slash), | (vertical bar), or any control characters (u0000-u001f | u007f - u009f). Also, it must not contain the literal string quotarnquot.
:type version: string
:param version: [REQUIRED]
The version of the activity type.
Note
The activity type consists of the name and version, the combination of which must be unique within the domain.
The specified string must not start or end with whitespace. It must not contain a : (colon), / (slash), | (vertical bar), or any control characters (u0000-u001f | u007f - u009f). Also, it must not contain the literal string quotarnquot.
:type description: string
:param description: A textual description of the activity type.
:type defaultTaskStartToCloseTimeout: string
:param defaultTaskStartToCloseTimeout: If set, specifies the default maximum duration that a worker can take to process tasks of this activity type. This default can be overridden when scheduling an activity task using the ScheduleActivityTask decision.
The duration is specified in seconds; an integer greater than or equal to 0. The value 'NONE' can be used to specify unlimited duration.
:type defaultTaskHeartbeatTimeout: string
:param defaultTaskHeartbeatTimeout: If set, specifies the default maximum time before which a worker processing a task of this type must report progress by calling RecordActivityTaskHeartbeat . If the timeout is exceeded, the activity task is automatically timed out. This default can be overridden when scheduling an activity task using the ScheduleActivityTask decision. If the activity worker subsequently attempts to record a heartbeat or returns a result, the activity worker receives an UnknownResource fault. In this case, Amazon SWF no longer considers the activity task to be valid; the activity worker should clean up the activity task.
The duration is specified in seconds; an integer greater than or equal to 0. The value 'NONE' can be used to specify unlimited duration.
:type defaultTaskList: dict
:param defaultTaskList: If set, specifies the default task list to use for scheduling tasks of this activity type. This default task list is used if a task list is not provided when a task is scheduled through the ScheduleActivityTask decision.
name (string) -- [REQUIRED]The name of the task list.
:type defaultTaskPriority: string
:param defaultTaskPriority: The default task priority to assign to the activity type. If not assigned, then '0' will be used. Valid values are integers that range from Java's Integer.MIN_VALUE (-2147483648) to Integer.MAX_VALUE (2147483647). Higher numbers indicate higher priority.
For more information about setting task priority, see Setting Task Priority in the Amazon Simple Workflow Developer Guide .
:type defaultTaskScheduleToStartTimeout: string
:param defaultTaskScheduleToStartTimeout: If set, specifies the default maximum duration that a task of this activity type can wait before being assigned to a worker. This default can be overridden when scheduling an activity task using the ScheduleActivityTask decision.
The duration is specified in seconds; an integer greater than or equal to 0. The value 'NONE' can be used to specify unlimited duration.
:type defaultTaskScheduleToCloseTimeout: string
:param defaultTaskScheduleToCloseTimeout: If set, specifies the default maximum duration for a task of this activity type. This default can be overridden when scheduling an activity task using the ScheduleActivityTask decision.
The duration is specified in seconds; an integer greater than or equal to 0. The value 'NONE' can be used to specify unlimited duration.
:returns:
domain (string) -- [REQUIRED]
The name of the domain in which this activity is to be registered.
name (string) -- [REQUIRED]
The name of the activity type within the domain.
The specified string must not start or end with whitespace. It must not contain a : (colon), / (slash), | (vertical bar), or any control characters (u0000-u001f | u007f - u009f). Also, it must not contain the literal string quotarnquot.
version (string) -- [REQUIRED]
The version of the activity type.
Note
The activity type consists of the name and version, the combination of which must be unique within the domain.
The specified string must not start or end with whitespace. It must not contain a : (colon), / (slash), | (vertical bar), or any control characters (u0000-u001f | u007f - u009f). Also, it must not contain the literal string quotarnquot.
description (string) -- A textual description of the activity type.
defaultTaskStartToCloseTimeout (string) -- If set, specifies the default maximum duration that a worker can take to process tasks of this activity type. This default can be overridden when scheduling an activity task using the ScheduleActivityTask decision.
The duration is specified in seconds; an integer greater than or equal to 0. The value "NONE" can be used to specify unlimited duration.
defaultTaskHeartbeatTimeout (string) -- If set, specifies the default maximum time before which a worker processing a task of this type must report progress by calling RecordActivityTaskHeartbeat . If the timeout is exceeded, the activity task is automatically timed out. This default can be overridden when scheduling an activity task using the ScheduleActivityTask decision. If the activity worker subsequently attempts to record a heartbeat or returns a result, the activity worker receives an UnknownResource fault. In this case, Amazon SWF no longer considers the activity task to be valid; the activity worker should clean up the activity task.
The duration is specified in seconds; an integer greater than or equal to 0. The value "NONE" can be used to specify unlimited duration.
defaultTaskList (dict) -- If set, specifies the default task list to use for scheduling tasks of this activity type. This default task list is used if a task list is not provided when a task is scheduled through the ScheduleActivityTask decision.
name (string) -- [REQUIRED]The name of the task list.
defaultTaskPriority (string) -- The default task priority to assign to the activity type. If not assigned, then "0" will be used. Valid values are integers that range from Java's Integer.MIN_VALUE (-2147483648) to Integer.MAX_VALUE (2147483647). Higher numbers indicate higher priority.
For more information about setting task priority, see Setting Task Priority in the Amazon Simple Workflow Developer Guide .
defaultTaskScheduleToStartTimeout (string) -- If set, specifies the default maximum duration that a task of this activity type can wait before being assigned to a worker. This default can be overridden when scheduling an activity task using the ScheduleActivityTask decision.
The duration is specified in seconds; an integer greater than or equal to 0. The value "NONE" can be used to specify unlimited duration.
defaultTaskScheduleToCloseTimeout (string) -- If set, specifies the default maximum duration for a task of this activity type. This default can be overridden when scheduling an activity task using the ScheduleActivityTask decision.
The duration is specified in seconds; an integer greater than or equal to 0. The value "NONE" can be used to specify unlimited duration. |
13,206 | def pdf_from_post(self):
html = self.request.form.get("html")
style = self.request.form.get("style")
reporthtml = "<html><head>{0}</head><body>{1}</body></html>"
reporthtml = reporthtml.format(style, html)
reporthtml = safe_unicode(reporthtml).encode("utf-8")
pdf_fn = tempfile.mktemp(suffix=".pdf")
pdf_file = createPdf(htmlreport=reporthtml, outfile=pdf_fn)
return pdf_file | Returns a pdf stream with the stickers |
13,207 | def _launch_editor(starting_text=):
"Launch editor, let user write text, then return that text."
editor = os.environ.get(, )
with tempfile.TemporaryDirectory() as dirname:
filename = pathlib.Path(dirname) /
with filename.open(mode=) as handle:
handle.write(starting_text)
subprocess.call([editor, filename])
with filename.open(mode=) as handle:
text = handle.read()
return text | Launch editor, let user write text, then return that text. |
13,208 | def to_python(self, value):
if value in self.empty_values:
try:
return self.empty_value
except AttributeError:
return u
return bleach.clean(value, **self.bleach_options) | Strips any dodgy HTML tags from the input |
13,209 | def compute_return(self, start_date, end_date, rate="MID"):
if rate not in ["MID", "ASK", "BID"]:
raise ValueError("Unknown rate type (%s)- must be , or " % str(rate))
if end_date <= start_date:
raise ValueError("End date must be on or after start date")
df = self.generate_dataframe(start_date=start_date, end_date=end_date)
start_price = df.ix[start_date][rate]
end_price = df.ix[end_date][rate]
currency_return = (end_price / start_price) - 1.0
return currency_return | Compute the return of the currency between two dates |
13,210 | def merge_pot1_files(self, delete_source=True):
natom = len(self[0].input.structure)
max_pertcase = 3 * natom
pot1_files = []
for task in self:
if not isinstance(task, DfptTask): continue
paths = task.outdir.list_filepaths(wildcard="*_POT*")
for path in paths:
i = path.rindex("_POT")
pertcase = int(path[i+4:].replace(".nc", ""))
if pertcase <= max_pertcase:
pot1_files.append(path)
if not pot1_files: return None
self.history.info("Will call mrgdvdb to merge %s files:" % len(pot1_files))
out_dvdb = self.outdir.path_in("out_DVDB")
if len(pot1_files) == 1:
shutil.copy(pot1_files[0], out_dvdb)
else:
mrgdvdb = wrappers.Mrgdvdb(manager=self[0].manager, verbose=0)
mrgdvdb.merge(self.outdir.path, pot1_files, out_dvdb, delete_source=delete_source)
return out_dvdb | This method is called when all the q-points have been computed.
It runs `mrgdvdb` in sequential on the local machine to produce
the final DVDB file in the outdir of the `Work`.
Args:
delete_source: True if POT1 files should be removed after (successful) merge.
Returns:
path to the output DVDB file. None if not DFPT POT file is found. |
13,211 | def writeB1logfile(filename, data):
allkeys = list(data.keys())
f = open(filename, , encoding=)
for ld in _logfile_data:
linebegin = ld[0]
fieldnames = ld[1]
if len(ld) < 3:
formatter = str
elif ld[2] is None:
formatter = str
else:
formatter = ld[2]
formatted =
if isinstance(fieldnames, str):
if fieldnames not in allkeys:
continue
try:
formatted = formatter(data[fieldnames])
except KeyError:
continue
elif isinstance(fieldnames, tuple):
if all([(fn not in allkeys) for fn in fieldnames]):
continue
if isinstance(formatter, tuple) and len(formatter) == len(fieldnames):
formatted = .join([ft(data[fn])
for ft, fn in zip(formatter, fieldnames)])
elif not isinstance(formatter, tuple):
formatted = formatter([data[fn] for fn in fieldnames])
else:
raise SyntaxError()
else:
raise SyntaxError(
)
linetowrite = linebegin + + formatted +
f.write(linetowrite)
if isinstance(fieldnames, tuple):
for fn in fieldnames:
if fn in allkeys:
allkeys.remove(fn)
else:
if fieldnames in allkeys:
allkeys.remove(fieldnames)
for k in allkeys:
linetowrite = k + + str(data[k]) +
f.write(linetowrite)
f.close() | Write a header structure into a B1 logfile.
Inputs:
filename: name of the file.
data: header dictionary
Notes:
exceptions pass through to the caller. |
13,212 | def transform_api_header_authorization(param, value):
try:
username, password = value.split(":", 1)
except ValueError:
raise click.BadParameter(
"Authorization header needs to be Authorization=username:password",
param=param,
)
value = "%s:%s" % (username.strip(), password)
value = base64.b64encode(bytes(value.encode()))
return "Basic %s" % value.decode("utf-8") | Transform a username:password value into a base64 string. |
13,213 | def add_mip_obj(model):
if len(model.variables) > 1e4:
LOGGER.warning("the MIP version of minimal media is extremely slow for"
" models that large :(")
exchange_rxns = find_boundary_types(model, "exchange")
big_m = max(abs(b) for r in exchange_rxns for b in r.bounds)
prob = model.problem
coefs = {}
to_add = []
for rxn in exchange_rxns:
export = len(rxn.reactants) == 1
indicator = prob.Variable("ind_" + rxn.id, lb=0, ub=1, type="binary")
if export:
vrv = rxn.reverse_variable
indicator_const = prob.Constraint(
vrv - indicator * big_m, ub=0, name="ind_constraint_" + rxn.id)
else:
vfw = rxn.forward_variable
indicator_const = prob.Constraint(
vfw - indicator * big_m, ub=0, name="ind_constraint_" + rxn.id)
to_add.extend([indicator, indicator_const])
coefs[indicator] = 1
model.add_cons_vars(to_add)
model.solver.update()
model.objective.set_linear_coefficients(coefs)
model.objective.direction = "min" | Add a mixed-integer version of a minimal medium to the model.
Changes the optimization objective to finding the medium with the least
components::
minimize size(R) where R part of import_reactions
Arguments
---------
model : cobra.model
The model to modify. |
13,214 | def file_type(self, file):
try:
magic_text = magic.from_file(file)
if (isinstance(magic_text, bytes)):
magic_text = magic_text.decode()
except (TypeError, IOError):
return
if (re.search(, magic_text)):
return()
elif (re.search(, magic_text)):
return()
return | Use python-magic to determine file type.
Returns 'png' or 'jpg' on success, nothing on failure. |
13,215 | def get_limits(self, coord=):
limits = self.t_[]
if limits is None:
image = self.get_image()
if image is not None:
wd, ht = image.get_size()
limits = ((self.data_off, self.data_off),
(float(wd - 1 + self.data_off),
float(ht - 1 + self.data_off)))
else:
canvas = self.get_canvas()
pts = canvas.get_points()
if len(pts) > 0:
limits = trcalc.get_bounds(pts)
else:
limits = ((0.0, 0.0), (0.0, 0.0))
crdmap = self.get_coordmap(coord)
limits = crdmap.data_to(limits)
return limits | Get the bounding box of the viewer extents.
Returns
-------
limits : tuple
Bounding box in coordinates of type `coord` in the form of
``(ll_pt, ur_pt)``. |
13,216 | def zoom_in(self):
index = self._zoom_factors.index(self._zoom_factor)
if index + 1 == len(self._zoom_factors):
return
self._zoom_factor = self._zoom_factors[index + 1]
if self._zoom_factors.index(self.zoom_factor) + 1 == len(self._zoom_factors):
self._button_zoom_in.config(state=tk.DISABLED)
self._button_zoom_out.config(state=tk.NORMAL)
self.draw_timeline() | Increase zoom factor and redraw TimeLine |
13,217 | def load_image(self, idx):
im = Image.open(.format(self.siftflow_dir, idx))
in_ = np.array(im, dtype=np.float32)
in_ = in_[:,:,::-1]
in_ -= self.mean
in_ = in_.transpose((2,0,1))
return in_ | Load input image and preprocess for Caffe:
- cast to float
- switch channels RGB -> BGR
- subtract mean
- transpose to channel x height x width order |
13,218 | def get_activity_lookup_session(self, proxy, *args, **kwargs):
if not self.supports_activity_lookup():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.ActivityLookupSession(proxy=proxy, runtime=self._runtime)
except AttributeError:
raise OperationFailed()
return session | Gets the ``OsidSession`` associated with the activity lookup service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``ActivityLookupSession``
:rtype: ``osid.learning.ActivityLookupSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_activity_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if ``supports_activity_lookup()`` is ``true``.* |
13,219 | def create_call(self, raw_request, **kwargs):
req = self.create_request(raw_request, **kwargs)
res = self.create_response(**kwargs)
rou = self.create_router(**kwargs)
c = self.call_class(req, res, rou)
return c | create a call object that has endpoints understandable request and response
instances |
13,220 | def _define(self):
definition = []
q = QuantumRegister(2, "q")
rule = [
(U1Gate((self.params[2] - self.params[1]) / 2), [q[1]], []),
(CnotGate(), [q[0], q[1]], []),
(U3Gate(-self.params[0] / 2, 0, -(self.params[1] + self.params[2]) / 2), [q[1]], []),
(CnotGate(), [q[0], q[1]], []),
(U3Gate(self.params[0] / 2, self.params[1], 0), [q[1]], [])
]
for inst in rule:
definition.append(inst)
self.definition = definition | gate cu3(theta,phi,lambda) c, t
{ u1((lambda-phi)/2) t; cx c,t;
u3(-theta/2,0,-(phi+lambda)/2) t; cx c,t;
u3(theta/2,phi,0) t;
} |
13,221 | def _set_axis_ticks(self, axis, ticks, log=False, rotation=0):
if isinstance(ticks, (list, tuple)) and all(isinstance(l, list) for l in ticks):
axis.set_ticks(ticks[0])
axis.set_ticklabels(ticks[1])
elif isinstance(ticks, ticker.Locator):
axis.set_major_locator(ticks)
elif not ticks and ticks is not None:
axis.set_ticks([])
elif isinstance(ticks, int):
if log:
locator = ticker.LogLocator(numticks=ticks,
subs=range(1,10))
else:
locator = ticker.MaxNLocator(ticks)
axis.set_major_locator(locator)
elif isinstance(ticks, (list, tuple)):
labels = None
if all(isinstance(t, tuple) for t in ticks):
ticks, labels = zip(*ticks)
axis.set_ticks(ticks)
if labels:
axis.set_ticklabels(labels)
for tick in axis.get_ticklabels():
tick.set_rotation(rotation) | Allows setting the ticks for a particular axis either with
a tuple of ticks, a tick locator object, an integer number
of ticks, a list of tuples containing positions and labels
or a list of positions. Also supports enabling log ticking
if an integer number of ticks is supplied and setting a
rotation for the ticks. |
13,222 | def _add_var(var, value):
makeconf = _get_makeconf()
layman =
fullvar = .format(var, value)
if __salt__[](makeconf, layman):
cmd = [, , r.format(
layman.replace(, ),
fullvar),
makeconf]
__salt__[](cmd)
else:
__salt__[](makeconf, fullvar) | Add a new var to the make.conf. If using layman, the source line
for the layman make.conf needs to be at the very end of the
config. This ensures that the new var will be above the source
line. |
13,223 | def full_text(self, level: int = 1) -> str:
res = ""
if self.wiki.extract_format == ExtractFormat.WIKI:
res += self.title
elif self.wiki.extract_format == ExtractFormat.HTML:
res += "<h{}>{}</h{}>".format(level, self.title, level)
else:
raise NotImplementedError("Unknown ExtractFormat type")
res += "\n"
res += self._text
if len(self._text) > 0:
res += "\n\n"
for sec in self.sections:
res += sec.full_text(level + 1)
return res | Returns text of the current section as well as all its subsections.
:param level: indentation level
:return: text of the current section as well as all its subsections |
13,224 | def QA_fetch_index_list_adv(collections=DATABASE.index_list):
获取股票列表
index_list_items = QA_fetch_index_list(collections)
if len(index_list_items) == 0:
print("QA Error QA_fetch_index_list_adv call item for item in collections.find() return 0 item, maybe the DATABASE.index_list is empty!")
return None
return index_list_items | '获取股票列表'
:param collections: mongodb 数据库
:return: DataFrame |
13,225 | def is_active(self, timeout=2):
try:
result = Result(*self.perform_request(, , params={: timeout}))
except ConnectionError:
return False
except TransportError:
return False
if result.response.status_code == 200:
return True
return False | :param timeout: int
:return: boolean |
13,226 | def step(self, observations):
log_histogram = self(observations)
actions = self.q_head.sample(log_histogram)
return {
: actions,
: log_histogram
} | Sample action from an action space for given state |
13,227 | def print_square(row_queue, t):
occupied_rows = {y: row for _, y, row in row_queue}
empty_row = .join( for _ in range(t))
for y in range(t):
print(, end=)
if y not in occupied_rows:
print(empty_row, end=)
else:
row = dict(occupied_rows[y])
all_cols = ( % row[x] if x in row else
for x in range(t))
print(.join(all_cols), end=)
print("|") | Prints a row queue as its conceptual square array. |
13,228 | def match_regexp(self, value, q, strict=False):
value = stringify(value)
mr = re.compile(q)
if value is not None:
if mr.match(value):
return
self.shout(, strict, value, q) | if value matches a regexp q |
13,229 | def populate(self, priority, address, rtr, data):
assert isinstance(data, bytes)
self.needs_low_priority(priority)
self.needs_no_rtr(rtr)
self.set_attributes(priority, address, rtr)
self.module_type = data[0]
self.sub_address_1 = data[3]
self.sub_address_2 = data[4]
self.sub_address_3 = data[5]
self.sub_address_4 = data[6] | :return: None |
13,230 | def rec_edit(self, zone, record_type, record_id, name, content, ttl=1, service_mode=None, priority=None,
service=None, service_name=None, protocol=None, weight=None, port=None, target=None):
params = {
: ,
: zone,
: record_type,
: record_id,
: name,
: content,
: ttl
}
if service_mode is not None:
params[] = service_mode
if priority is not None:
params[] = priority
if service is not None:
params[] = service
if service_name is not None:
params[] = service_name
if protocol is not None:
params[] = protocol
if weight is not None:
params[] = weight
if port is not None:
params[] = port
if target is not None:
params[] = target
return self._request(params) | Edit a DNS record for the given zone.
:param zone: domain name
:type zone: str
:param record_type: Type of DNS record. Valid values are [A/CNAME/MX/TXT/SPF/AAAA/NS/SRV/LOC]
:type record_type: str
:param record_id: DNS Record ID. Available by using the rec_load_all call.
:type record_id: int
:param name: Name of the DNS record
:type name: str
:param content: The content of the DNS record, will depend on the the type of record being added
:type content: str
:param ttl: TTL of record in seconds. 1 = Automatic, otherwise, value must in between 120 and 4,294,967,295
seconds.
:type ttl: int
:param service_mode: [applies to A/AAAA/CNAME] Status of CloudFlare Proxy, 1 = orange cloud, 0 = grey cloud.
:type service_mode: int
:param priority: [applies to MX/SRV] MX record priority.
:type priority: int
:param service: Service for SRV record
:type service: str
:param service_name: Service Name for SRV record
:type service_name: str
:param protocol: Protocol for SRV record. Values are [_tcp/_udp/_tls].
:type protocol: str
:param weight: Weight for SRV record.
:type weight: int
:param port: Port for SRV record
:type port: int
:param target: Target for SRV record
:type target: str
:return:
:rtype: dict |
13,231 | def V_horiz_spherical(D, L, a, h, headonly=False):
r
R = D/2.
r = (a**2 + R**2)/2./abs(a)
w = R - h
y = (2*R*h - h**2)**0.5
z = (r**2 - R**2)**0.5
Af = R**2*acos((R-h)/R) - (R-h)*(2*R*h - h**2)**0.5
if h == R and abs(a) <= R:
Vf = pi*a/6*(3*R**2 + a**2)
elif h == D and abs(a) <= R:
Vf = pi*a/3*(3*R**2 + a**2)
elif h == 0 or a == 0 or a == R or a == -R:
Vf = pi*a*h**2*(1 - h/3./R)
elif abs(a) >= 0.01*D:
Vf = a/abs(a)*(
2*r**3/3.*(acos((R**2 - r*w)/(R*(w-r))) + acos((R**2+r*w)/(R*(w+r)))
- z/r*(2+(R/r)**2)*acos(w/R))
- 2*(w*r**2 - w**3/3)*atan(y/z) + 4*w*y*z/3)
else:
def V_horiz_spherical_toint(x):
return (r**2 - x**2)*atan(((R**2 - x**2)/(r**2 - R**2))**0.5)
from scipy.integrate import quad
integrated = quad(V_horiz_spherical_toint, w, R)[0]
Vf = a/abs(a)*(2*integrated - Af*z)
if headonly:
Vf = Vf/2.
else:
Vf += Af*L
return Vf | r'''Calculates volume of a tank with spherical heads, according to [1]_.
.. math::
V_f = A_fL + \frac{\pi a}{6}(3R^2 + a^2),\;\; h = R, |a|\le R
.. math::
V_f = A_fL + \frac{\pi a}{3}(3R^2 + a^2),\;\; h = D, |a|\le R
.. math::
V_f = A_fL + \pi a h^2\left(1 - \frac{h}{3R}\right),\;\; h = 0,
\text{ or } |a| = 0, R, -R
.. math::
V_f = A_fL + \frac{a}{|a|}\left\{\frac{2r^3}{3}\left[\cos^{-1}
\frac{R^2 - rw}{R(w-r)} + \cos^{-1}\frac{R^2 + rw}{R(w+r)}
- \frac{z}{r}\left(2 + \left(\frac{R}{r}\right)^2\right)
\cos^{-1}\frac{w}{R}\right] - 2\left(wr^2 - \frac{w^3}{3}\right)
\tan^{-1}\frac{y}{z} + \frac{4wyz}{3}\right\}
,\;\; h \ne R, D; a \ne 0, R, -R, |a| \ge 0.01D
.. math::
V_f = A_fL + \frac{a}{|a|}\left[2\int_w^R(r^2 - x^2)\tan^{-1}
\sqrt{\frac{R^2-x^2}{r^2-R^2}}dx - A_f z\right]
,\;\; h \ne R, D; a \ne 0, R, -R, |a| < 0.01D
.. math::
Af = R^2\cos^{-1}\frac{R-h}{R} - (R-h)\sqrt{2Rh - h^2}
.. math::
r = \frac{a^2 + R^2}{2|a|}
.. math::
w = R - h
.. math::
y = \sqrt{2Rh-h^2}
.. math::
z = \sqrt{r^2 - R^2}
Parameters
----------
D : float
Diameter of the main cylindrical section, [m]
L : float
Length of the main cylindrical section, [m]
a : float
Distance the spherical head extends on one side, [m]
h : float
Height, as measured up to where the fluid ends, [m]
headonly : bool, optional
Function returns only the volume of a single head side if True
Returns
-------
V : float
Volume [m^3]
Examples
--------
Matching example from [1]_, with inputs in inches and volume in gallons.
>>> V_horiz_spherical(D=108., L=156., a=42., h=36)/231.
2303.9615116986183
References
----------
.. [1] Jones, D. "Calculating Tank Volume." Text. Accessed December 22, 2015.
http://www.webcalc.com.br/blog/Tank_Volume.PDF |
13,232 | def simple_ins_from_obs(obsnames, insfilename=):
with open(insfilename, ) as ofp:
ofp.write()
[ofp.write(.format(cob)) for cob in obsnames] | writes an instruction file that assumes wanting to read the values names in obsnames in order
one per line from a model output file
Args:
obsnames: list of obsnames to read in
insfilename: filename for INS file (default: model.output.ins)
Returns:
writes a file <insfilename> with each observation read off a line |
13,233 | def build_rank_score_dict(rank_scores):
logger = getLogger(__name__)
logger.debug("Checking rank scores: {0}".format(rank_scores))
scores = {}
for family in rank_scores:
entry = family.split()
try:
family_id = entry[0]
logger.debug("Extracting rank score for family:{0}".format(family_id))
score = entry[1]
logger.debug("Score:{0}".format(score))
except Exception:
raise SyntaxError("Malformed rank score input")
scores[family_id] = score
return scores | Take a list with annotated rank scores for each family and returns a
dictionary with family_id as key and a list of genetic models as value.
Args:
rank_scores : A list on the form ['1:12','2:20']
Returns:
scores : A dictionary with family id:s as key and scores as value
{
'1':'12',
'2':'20'
} |
13,234 | def _remove(self, removeList, selfValue):
s value, not recursive.
There is no need for a recursive one anyway.
Match by == operation.
Args:
removeList (list): The list of matching elements.
selfValue (list): The list you remove value from. Usually ``self.value``
'
for removeValue in removeList:
print(removeValue, removeList)
removeEverything(removeValue, selfValue) | Remove elements from a list by matching the elements in the other list.
This method only looks inside current instance's value, not recursive.
There is no need for a recursive one anyway.
Match by == operation.
Args:
removeList (list): The list of matching elements.
selfValue (list): The list you remove value from. Usually ``self.value`` |
13,235 | def write_compounds(self, stream, compounds, properties=None):
self._write_entries(
stream, compounds, self.convert_compound_entry, properties) | Write iterable of compounds as YAML object to stream.
Args:
stream: File-like object.
compounds: Iterable of compound entries.
properties: Set of compound properties to output (or None to output
all). |
13,236 | def parse_bind(bind):
if isinstance(bind, Connection):
engine = bind.engine
else:
engine = bind
m = re.match(r"Engine\((.*?)\)", str(engine))
if m is not None:
u = urlparse(m.group(1))
uses_netloc.append(u.scheme)
safe_url = ""
if u.password is None:
safe_url = u.geturl()
else:
host_info = u.netloc.rpartition()[-1]
parts = u._replace(netloc=.format(u.username, host_info))
safe_url = parts.geturl()
sql = {}
sql[] = u.scheme
sql[] = safe_url
if u.username is not None:
sql[] = "{}".format(u.username)
return sql | Parses a connection string and creates SQL trace metadata |
13,237 | def draw(self):
if self.__conditional_flag is True:
return np.concatenate((self.__create_samples(), self.__create_samples()), axis=1)
else:
return self.__create_samples() | Draws samples from the `true` distribution.
Returns:
`np.ndarray` of samples. |
13,238 | def replace_parameter(self, name, value=None):
spec = self.__specs[name] if name in self.__specs else None
if self.extra_parameters() is False and spec is None:
raise ValueError()
if spec is not None and spec.nullable() is False and value is None:
raise ValueError( % name)
if spec is not None and value is not None:
re_obj = spec.re_obj()
if re_obj is not None and re_obj.match(value) is None:
raise ValueError()
WURIQuery.replace_parameter(self, name, value=value) | Replace a query parameter values with a new value. If a new value does not match current
specifications, then exception is raised
:param name: parameter name to replace
:param value: new parameter value. None is for empty (null) value
:return: None |
13,239 | def run_analysis(self, argv):
args = self._parser.parse_args(argv)
ccube_dirty = HpxMap.create_from_fits(args.ccube_dirty, hdu=)
bexpcube_dirty = HpxMap.create_from_fits(args.bexpcube_dirty, hdu=)
ccube_clean = HpxMap.create_from_fits(args.ccube_clean, hdu=)
bexpcube_clean = HpxMap.create_from_fits(args.bexpcube_clean, hdu=)
if args.hpx_order:
hpx_order = args.hpx_order
else:
hpx_order = ccube_dirty.hpx.order
cube_dict = ResidualCR._match_cubes(ccube_clean, ccube_dirty,
bexpcube_clean, bexpcube_dirty, hpx_order)
intensity_clean = ResidualCR._compute_intensity(cube_dict[],
cube_dict[])
intensity_dirty = ResidualCR._compute_intensity(cube_dict[],
cube_dict[])
intensity_mean = ResidualCR._compute_mean(intensity_dirty,
intensity_clean)
intensity_ratio = ResidualCR._compute_ratio(intensity_dirty,
intensity_clean)
bright_pixel_select = ResidualCR._make_bright_pixel_mask(intensity_mean,
args.select_factor)
bright_pixel_mask = ResidualCR._make_bright_pixel_mask(intensity_mean,
args.mask_factor)
aeff_corrections = ResidualCR._get_aeff_corrections(intensity_ratio,
bright_pixel_select)
corrected_dirty = ResidualCR._apply_aeff_corrections(intensity_dirty,
aeff_corrections)
corrected_ratio = ResidualCR._compute_ratio(corrected_dirty,
intensity_clean)
intensity_resid = ResidualCR._compute_diff(corrected_dirty,
intensity_clean)
filled_resid = ResidualCR._fill_masked_intensity_resid(intensity_resid,
bright_pixel_mask)
smooth_resid = ResidualCR._smooth_hpx_map(filled_resid,
args.sigma)
out_model = ResidualCR._intergral_to_differential(smooth_resid)
out_energies = ccube_dirty.hpx.make_energies_hdu()
cubes = dict(SKYMAP=out_model)
fits_utils.write_maps(None, cubes,
args.outfile, energy_hdu=out_energies)
if args.full_output:
check = ResidualCR._differential_to_integral(out_model)
check_resid = ResidualCR._compute_diff(smooth_resid, check)
counts_resid =\
ResidualCR._compute_counts_from_intensity(intensity_resid,
cube_dict[])
pred_counts\
= ResidualCR._compute_counts_from_model(out_model,
cube_dict[])
pred_resid = ResidualCR._compute_diff(pred_counts, counts_resid)
out_ebounds = ccube_dirty.hpx.make_energy_bounds_hdu()
cubes = dict(INTENSITY_CLEAN=intensity_clean,
INTENSITY_DIRTY=intensity_dirty,
INTENSITY_RATIO=intensity_ratio,
CORRECTED_DIRTY=corrected_dirty,
CORRECTED_RATIO=corrected_ratio,
INTENSITY_RESID=intensity_resid,
PIXEL_SELECT=bright_pixel_select,
PIXEL_MASK=bright_pixel_mask,
FILLED_RESID=filled_resid,
SMOOTH_RESID=smooth_resid,
CHECK=check,
CHECK_RESID=check_resid,
COUNTS_RESID=counts_resid,
PRED_COUNTS=pred_counts,
PRED_RESID=pred_resid)
fits_utils.write_maps(None, cubes,
args.outfile.replace(, ),
energy_hdu=out_ebounds) | Run this analysis |
13,240 | def import_certificate(
ctx, slot, management_key, pin, cert, password, verify):
controller = ctx.obj[]
_ensure_authenticated(ctx, controller, pin, management_key)
data = cert.read()
while True:
if password is not None:
password = password.encode()
try:
certs = parse_certificates(data, password)
except (ValueError, TypeError):
if password is None:
password = click.prompt(
,
default=, hide_input=True,
show_default=False,
err=True)
continue
else:
password = None
click.echo()
continue
break
if len(certs) > 1:
leafs = get_leaf_certificates(certs)
cert_to_import = leafs[0]
else:
cert_to_import = certs[0]
def do_import(retry=True):
try:
controller.import_certificate(
slot, cert_to_import, verify=verify,
touch_callback=prompt_for_touch)
except KeypairMismatch:
ctx.fail(
.format(slot.name))
except APDUError as e:
if e.sw == SW.SECURITY_CONDITION_NOT_SATISFIED and retry:
_verify_pin(ctx, controller, pin)
do_import(retry=False)
else:
raise
do_import() | Import a X.509 certificate.
Write a certificate to one of the slots on the YubiKey.
\b
SLOT PIV slot to import the certificate to.
CERTIFICATE File containing the certificate. Use '-' to use stdin. |
13,241 | def get_num_confirmations(tx_hash, coin_symbol=, api_key=None):
return get_transaction_details(tx_hash=tx_hash, coin_symbol=coin_symbol,
limit=1, api_key=api_key).get() | Given a tx_hash, return the number of confirmations that transactions has.
Answer is going to be from 0 - current_block_height. |
13,242 | def _processMsg(self, type, msg):
now = datetime.datetime.now()
if self.LOG_FILE_PATH == :
self.LOG_FILE_PATH = os.path.dirname(os.path.abspath(__file__)) +
log_file = self.LOG_FILE_PATH + now.strftime(self.LOG_FILE_FORMAT) +
msg = self.LOG_MESSAGE_FORMAT.format(
TYPE=type.upper(),
DATE=now.strftime(self.DATES_FORMAT),
DATETIME=now.strftime(self.DATETIME_FORMAT),
MESSAGE=msg,
)
if self.PLATFORM_DATA:
msg = msg.format(
PL_TYPE=platform.machine(),
PL_NAME=platform.node(),
PL_PROCESSOR=platform.processor(),
PL_PY_BUILD_DATE=platform.python_build()[1],
PL_PY_COMPILER=platform.python_compiler(),
PL_PY_RELEASE=platform.release(),
PL_OS=platform.system(),
PL_TIMEZONE=strftime("%z", gmtime())
)
self._STORAGE = Storage(log_file)
return self._STORAGE.write(msg) | Process Debug Messages |
13,243 | def create_cursor(self, name=None):
cursor = self.connection.cursor()
cursor.tzinfo_factory = self.tzinfo_factory
return cursor | Creates a cursor. Assumes that a connection is established. |
13,244 | def on_drag_data_received(self, widget, context, x, y, data, info, time):
state_id_insert = data.get_text()
parent_m = self.model.selection.get_selected_state()
if not isinstance(parent_m, ContainerStateModel):
return
state_v = self.canvas.get_view_for_model(parent_m.states[state_id_insert])
pos_start = state_v.model.get_meta_data_editor()[]
motion = InMotion(state_v, self.view.editor)
motion.start_move(self.view.editor.get_matrix_i2v(state_v).transform_point(pos_start[0], pos_start[1]))
motion.move((x, y))
motion.stop_move()
state_v.model.set_meta_data_editor(, motion.item.position)
self.canvas.wait_for_update(trigger_update=True)
self._meta_data_changed(None, state_v.model, , True) | Receives state_id from LibraryTree and moves the state to the position of the mouse
:param widget:
:param context:
:param x: Integer: x-position of mouse
:param y: Integer: y-position of mouse
:param data: SelectionData: contains state_id
:param info:
:param time: |
13,245 | def close(self):
if not (yield from super().close()):
return False
self.acpi_shutdown = False
yield from self.stop()
for adapter in self._ethernet_adapters:
if adapter is not None:
for nio in adapter.ports.values():
if nio and isinstance(nio, NIOUDP):
self.manager.port_manager.release_udp_port(nio.lport, self._project)
for udp_tunnel in self._local_udp_tunnels.values():
self.manager.port_manager.release_udp_port(udp_tunnel[0].lport, self._project)
self.manager.port_manager.release_udp_port(udp_tunnel[1].lport, self._project)
self._local_udp_tunnels = {} | Closes this QEMU VM. |
13,246 | def select_newest_project(dx_project_ids):
if len(dx_project_ids) == 1:
return dx_project_ids[0]
projects = [dxpy.DXProject(x) for x in dx_project_ids]
created_times = [x.describe()["created"] for x in projects]
paired = zip(created_times,projects)
paired.sort(reverse=True)
return paired[0][0] | Given a list of DNAnexus project IDs, returns the one that is newest as determined by creation date.
Args:
dx_project_ids: `list` of DNAnexus project IDs.
Returns:
`str`. |
13,247 | def activateRandomLocation(self):
self.activePhases = np.array([np.random.random(2)])
if self.anchoringMethod == "discrete":
self.activePhases = np.floor(
self.activePhases * self.cellDimensions)/self.cellDimensions
self._computeActiveCells() | Set the location to a random point. |
13,248 | def save(self, output_file, overwrite=False):
if os.path.exists(output_file) and overwrite is False:
raise ModelFileExists("The file %s exists already. If you want to overwrite it, use the "
"options as . " % (output_file, output_file))
else:
data = self.to_dict_with_types()
try:
representation = my_yaml.dump(data, default_flow_style=False)
with open(output_file, "w+") as f:
f.write(representation.replace("\n", "\n\n"))
except IOError:
raise CannotWriteModel(os.path.dirname(os.path.abspath(output_file)),
"Could not write model file %s. Check your permissions to write or the "
"report on the free space which follows: " % output_file) | Save the model to disk |
13,249 | def urlunparse(data):
scheme, netloc, url, params, query, fragment = data
if params:
url = "%s;%s" % (url, params)
return urlunsplit((scheme, netloc, url, query, fragment)) | Put a parsed URL back together again. This may result in a
slightly different, but equivalent URL, if the URL that was parsed
originally had redundant delimiters, e.g. a ? with an empty query
(the draft states that these are equivalent). |
13,250 | def get_exchange_rates(self, **params):
response = self._get(, , params=params)
return self._make_api_object(response, APIObject) | https://developers.coinbase.com/api/v2#exchange-rates |
13,251 | def edit_custom_examples(program, config):
if (not config.custom_dir) or (not os.path.exists(config.custom_dir)):
_inform_cannot_edit_no_custom_dir()
return
resolved_program = get_resolved_program(program, config)
custom_file_paths = get_file_paths_for_program(
resolved_program,
config.custom_dir
)
if (len(custom_file_paths) > 0):
path_to_edit = custom_file_paths[0]
else:
path_to_edit = os.path.join(config.custom_dir, resolved_program + )
subprocess.call([config.editor_cmd, path_to_edit]) | Edit custom examples for the given program, creating the file if it does
not exist. |
13,252 | def _match_net(self, net):
if self.network:
return match_list(self.network, net)
else:
return True | Match a query for a specific network/list of networks |
13,253 | def _make_scaled_srcmap(self):
self.logger.info()
bexp0 = fits.open(self.files[])
bexp1 = fits.open(self.config[][])
srcmap = fits.open(self.config[][])
if bexp0[0].data.shape != bexp1[0].data.shape:
raise Exception()
bexp_ratio = bexp0[0].data / bexp1[0].data
self.logger.info(
% (np.min(bexp_ratio),
np.median(
bexp_ratio),
np.max(bexp_ratio)))
for hdu in srcmap[1:]:
if hdu.name == :
continue
if hdu.name == :
continue
hdu.data *= bexp_ratio
srcmap.writeto(self.files[], overwrite=True) | Make an exposure cube with the same binning as the counts map. |
13,254 | def removeCallback(cls, eventType, func, record=None):
callbacks = cls.callbacks()
callbacks.setdefault(eventType, [])
for i in xrange(len(callbacks[eventType])):
my_func, my_record, _ = callbacks[eventType][i]
if func == my_func and record == my_record:
del callbacks[eventType][i]
break | Removes a callback from the model's event callbacks.
:param eventType: <str>
:param func: <callable> |
13,255 | def fetch(dbconn, tablename, n=1, uuid=None, end=True):
cur = dbconn.cursor()
order = if end else
try:
if uuid:
cur.execute("SELECT * FROM WHERE UUID= ORDER BY ROWID {} LIMIT {};".format(tablename, uuid, order, n))
else:
cur.execute("SELECT * FROM ORDER BY ROWID {} LIMIT {};".format(tablename, order, n))
except sqlite3.OperationalError as e:
if not in getattr(e, , ):
logger.error(e)
return []
rows = cur.fetchall()
return rows | Returns `n` rows from the table's start or end
:param dbconn: database connection
:param tablename: name of the table
:param n: number of rows to return from the end of the table
:param uuid: Optional UUID to select from
:return: If n > 1, a list of rows. If n=1, a single row |
13,256 | def _get_fields(self, event, pull, message=None):
result = pull.fields_general(event)
if message is not None:
result["__message__"] = message
return result | Constructs a dictionary of fields and replacement values based on the
specified event and the status of the pull request.
:arg event: one of ["start", "error", "finish"].
:arg pull: an instance of PullRequest that has details about the current
status of the pull request testing etc.
:arg message: an additional contextual message to add in the __message__ field. |
13,257 | def find_ent_endurance_tier_price(package, tier_level):
for item in package[]:
for attribute in item.get(, []):
if int(attribute[]) == ENDURANCE_TIERS.get(tier_level):
break
else:
continue
price_id = _find_price_id(item[], )
if price_id:
return price_id
raise ValueError("Could not find price for endurance tier level") | Find the price in the given package with the specified tier level
:param package: The Enterprise (Endurance) product package
:param tier_level: The endurance tier for which a price is desired
:return: Returns the price for the given tier, or an error if not found |
13,258 | def repr2(x):
s = repr(x)
if len(s) >= 2 and s[0] == "u" and (s[1] == ""'):
s = s[1:]
return s | Analogous to repr(), but will suppress 'u' prefix when repr-ing a unicode string. |
13,259 | def send_to_redshift(
instance,
data,
replace=True,
batch_size=1000,
types=None,
primary_key=(),
create_boolean=False):
connection_kwargs = redshift_credentials.credential(instance)
print("Initiate send_to_redshift...")
print("Test to know if the table exists...")
if (not create.existing_test(instance, data["table_name"])) or (types is not None) or (primary_key != ()):
create_boolean = True
print("Test to know if the table exists...OK")
if create_boolean:
create.create_table(instance, data, primary_key, types)
ssh_host = os.environ.get("SSH_%s_HOST" % instance)
ssh_user = os.environ.get("SSH_%s_USER" % instance)
ssh_path_private_key = os.environ.get("SSH_%s_PATH_PRIVATE_KEY" % instance)
if ssh_host:
tunnel = SSHTunnelForwarder(
(ssh_host, 22),
ssh_username=ssh_user,
ssh_private_key=ssh_path_private_key,
remote_bind_address=(
os.environ.get("RED_%s_HOST" % instance), int(os.environ.get("RED_%s_PORT" % instance))),
local_bind_address=(, 6543),
)
try:
tunnel.start()
print("Tunnel opened!")
except sshtunnel.HandlerSSHTunnelForwarderError:
pass
connection_kwargs["host"] = "localhost"
connection_kwargs["port"] = 6543
con = psycopg2.connect(**connection_kwargs)
cursor = con.cursor()
if replace:
cleaning_request = + data["table_name"] +
print("Cleaning")
cursor.execute(cleaning_request)
print("Cleaning Done")
boolean = True
index = 0
total_nb_batchs = len(data["rows"]) // batch_size + 1
while boolean:
temp_row = []
for i in range(batch_size):
if not data["rows"]:
boolean = False
continue
temp_row.append(data["rows"].pop())
final_data = []
for x in temp_row:
for y in x:
final_data.append(y)
temp_string = .join(map(lambda a: + .join(map(lambda b: , a)) + , tuple(temp_row)))
inserting_request = + data["table_name"] + + ", ".join(
data["columns_name"]) + + temp_string +
if final_data:
cursor.execute(inserting_request, final_data)
index = index + 1
percent = round(index * 100 / total_nb_batchs, 2)
if percent < 100:
print("\r %s / %s (%s %%)" % (str(index), total_nb_batchs, str(percent)), end=)
else:
print("\r %s / %s (%s %%)" % (str(index), total_nb_batchs, str(percent)))
con.commit()
cursor.close()
con.close()
if ssh_host:
tunnel.close()
print("Tunnel closed!")
print("data sent to redshift")
return 0 | data = {
"table_name" : 'name_of_the_redshift_schema' + '.' + 'name_of_the_redshift_table' #Must already exist,
"columns_name" : [first_column_name,second_column_name,...,last_column_name],
"rows" : [[first_raw_value,second_raw_value,...,last_raw_value],...]
} |
13,260 | def embed_snippet(views,
drop_defaults=True,
state=None,
indent=2,
embed_url=None,
requirejs=True,
cors=True
):
data = embed_data(views, drop_defaults=drop_defaults, state=state)
widget_views = u.join(
widget_view_template.format(view_spec=escape_script(json.dumps(view_spec)))
for view_spec in data[]
)
if embed_url is None:
embed_url = DEFAULT_EMBED_REQUIREJS_URL if requirejs else DEFAULT_EMBED_SCRIPT_URL
load = load_requirejs_template if requirejs else load_template
use_cors = if cors else
values = {
: load.format(embed_url=embed_url, use_cors=use_cors),
: escape_script(json.dumps(data[], indent=indent)),
: widget_views,
}
return snippet_template.format(**values) | Return a snippet that can be embedded in an HTML file.
Parameters
----------
{views_attribute}
{embed_kwargs}
Returns
-------
A unicode string with an HTML snippet containing several `<script>` tags. |
13,261 | def private_vlan_mode(self, **kwargs):
int_type = kwargs.pop().lower()
name = kwargs.pop()
mode = kwargs.pop().lower()
callback = kwargs.pop(, self._callback)
int_types = [, ,
, ,
]
valid_modes = [, , ,
, ]
if int_type not in int_types:
raise ValueError("Incorrect int_type value.")
if mode not in valid_modes:
raise ValueError( % (mode, valid_modes))
if not pynos.utilities.valid_interface(int_type, name):
raise ValueError(
)
pvlan_args = dict(name=name)
if in mode:
pvlan_mode = getattr(self._interface,
%
(int_type, mode))
else:
pvlan_mode = getattr(self._interface,
% (int_type, mode))
config = pvlan_mode(**pvlan_args)
return callback(config) | Set PVLAN mode (promiscuous, host, trunk).
Args:
int_type (str): Type of interface. (gigabitethernet,
tengigabitethernet, etc)
name (str): Name of interface. (1/0/5, 1/0/10, etc)
mode (str): The switchport PVLAN mode.
callback (function): A function executed upon completion of the
method. The only parameter passed to `callback` will be the
``ElementTree`` `config`.
Returns:
Return value of `callback`.
Raises:
KeyError: if `int_type`, `name`, or `mode` is not specified.
ValueError: if `int_type`, `name`, or `mode` is invalid.
Examples:
>>> import pynos.device
>>> switches = ['10.24.39.211', '10.24.39.203']
>>> auth = ('admin', 'password')
>>> int_type = 'tengigabitethernet'
>>> name = '225/0/38'
>>> for switch in switches:
... conn = (switch, '22')
... with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.interface.enable_switchport(int_type,
... name)
... output = dev.interface.private_vlan_mode(
... int_type=int_type, name=name, mode='trunk_host')
... dev.interface.private_vlan_mode()
... # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
KeyError |
13,262 | def load(self):
projects = {}
path = os.path.expanduser(self.path)
if not os.path.isdir(path):
return projects
logger.debug("Load project configs from %s", path)
for filename in os.listdir(path):
filename_parts = os.path.splitext(filename)
if filename_parts[1][1:] != PROJECT_CONFIG_EXTENSION:
continue
name = filename_parts[0]
try:
project_file_path = os.path.join(path, filename)
with open(project_file_path) as f:
data = yaml.load(f)
projects[name] = data
except ValueError:
continue
logger.debug("Project config readed from {}".format(name, project_file_path))
return projects | Load the projects config data from local path
Returns:
Dict: project_name -> project_data |
13,263 | def allow_network_access_grading(self):
vals = self._hook_manager.call_hook(, course=self.get_course(), task=self, default=self._network_grading)
return vals[0] if len(vals) else self._network_grading | Return True if the grading container should have access to the network |
13,264 | def create_package(self, output=None):
current_file = os.path.dirname(os.path.abspath(
inspect.getfile(inspect.currentframe())))
handler_file = os.sep.join(current_file.split(os.sep)[0:]) + os.sep +
if self.stage_config.get(, False):
self.zip_path = self.zappa.create_lambda_zip(
prefix=self.lambda_name,
use_precompiled_packages=self.stage_config.get(, True),
exclude=self.stage_config.get(, []),
disable_progress=self.disable_progress,
archive_format=
)
if self.django_settings:
base = __file__.rsplit(os.sep, 1)[0]
django_py = .join(os.path.join(base, , ))
lambda_zip.write(django_py, )
async_response_table = self.stage_config.get(, )
settings_s += "ASYNC_RESPONSE_TABLE=\n".format(async_response_table)
temp_settings = tempfile.NamedTemporaryFile(delete=False)
os.chmod(temp_settings.name, 0o644)
temp_settings.write(bytes(settings_s, "utf-8"))
temp_settings.close()
lambda_zip.write(temp_settings.name, )
os.unlink(temp_settings.name) | Ensure that the package can be properly configured,
and then create it. |
13,265 | def export(self, nidm_version, export_dir):
atts = (
(PROV[], self.type),
(PROV[], self.label),
(NIDM_HAS_ALTERNATIVE_HYPOTHESIS, self.tail))
if self.partial_degree is not None:
atts += (
(SPM_PARTIAL_CONJUNCTION_DEGREE, self.partial_degree),)
self.add_attributes(atts) | Create prov entities and activities. |
13,266 | def visit_ClassDef(self, node):
for base in node.bases:
if isinstance(base, ast.Name) and isinstance(base.ctx, ast.Load):
base = getattr(runtime, base.id, None)
elif isinstance(base, ast.Attribute) and isinstance(base.ctx, ast.Load):
base = getattr(runtime, base.attr, None)
else:
continue
if issubclass(base, runtime.Process):
break
else:
return
descriptor = ProcessDescriptor(source=self.source)
embedded_class_fields = {
runtime.PROCESS_INPUTS_NAME: descriptor.inputs,
runtime.PROCESS_OUTPUTS_NAME: descriptor.outputs,
}
for item in node.body:
if isinstance(item, ast.Assign):
if (len(item.targets) == 1 and isinstance(item.targets[0], ast.Name)
and isinstance(item.targets[0].ctx, ast.Store)
and item.targets[0].id in PROCESS_METADATA):
value = PROCESS_METADATA[item.targets[0].id].get_value(item.value)
setattr(descriptor.metadata, item.targets[0].id, value)
elif (isinstance(item, ast.Expr) and isinstance(item.value, ast.Str)
and descriptor.metadata.description is None):
descriptor.metadata.description = item.value.s
elif isinstance(item, ast.ClassDef) and item.name in embedded_class_fields.keys():
self.visit_field_class(item, descriptor, embedded_class_fields[item.name])
descriptor.validate()
self.processes.append(descriptor) | Visit top-level classes. |
13,267 | def get_users_in_project(self, projectname):
ds_project = self.get_project(projectname)
if ds_project is None:
logger.error(
"Project does not exist in MAM" % projectname)
raise RuntimeError(
"Project does not exist in MAM" % projectname)
user_list = []
if ds_project["Users"] != "":
user_list = ds_project["Users"].lower().split(",")
return user_list | Get list of users in project from MAM. |
13,268 | def get_dhcp_options(dhcp_options_name=None, dhcp_options_id=None,
region=None, key=None, keyid=None, profile=None):
myfunnydhcpoptionsname
if not any((dhcp_options_name, dhcp_options_id)):
raise SaltInvocationError(
)
if not dhcp_options_id and dhcp_options_name:
dhcp_options_id = _get_resource_id(, dhcp_options_name,
region=region, key=key,
keyid=keyid, profile=profile)
if not dhcp_options_id:
return {: {}}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
r = conn.get_all_dhcp_options(dhcp_options_ids=[dhcp_options_id])
except BotoServerError as e:
return {: __utils__[](e)}
if not r:
return {: None}
keys = (, , ,
, )
return {: dict((k, r[0].options.get(k)) for k in keys)} | Return a dict with the current values of the requested DHCP options set
CLI Example:
.. code-block:: bash
salt myminion boto_vpc.get_dhcp_options 'myfunnydhcpoptionsname'
.. versionadded:: 2016.3.0 |
13,269 | def present_active_subjunctive(self):
if self.sng == "vera":
forms = ["sé", "sér", "sé", "sém", "séð", "sé"]
return forms
elif self.sng == "sjá":
forms = ["sjá", "sér", "sé", "sém", "séð", "sé"]
return forms
else:
subjunctive_root = self.sng[:-1] if self.sng[-1] == "a" else self.sng
forms = [subjunctive_root + "a"]
subjunctive_root = subjunctive_root[:-1] if subjunctive_root[-1] == "j" else subjunctive_root
forms.append(subjunctive_root + "ir")
forms.append(subjunctive_root + "i")
forms.append(subjunctive_root + "im")
forms.append(subjunctive_root + "ið")
forms.append(subjunctive_root + "i")
return forms | Strong verbs
I
>>> verb = StrongOldNorseVerb()
>>> verb.set_canonic_forms(["líta", "lítr", "leit", "litu", "litinn"])
>>> verb.present_active_subjunctive()
['líta', 'lítir', 'líti', 'lítim', 'lítið', 'líti']
II
>>> verb = StrongOldNorseVerb()
>>> verb.set_canonic_forms(["bjóða", "býðr", "bauð", "buðu", "boðinn"])
>>> verb.present_active_subjunctive()
['bjóða', 'bjóðir', 'bjóði', 'bjóðim', 'bjóðið', 'bjóði']
III
>>> verb = StrongOldNorseVerb()
>>> verb.set_canonic_forms(["verða", "verðr", "varð", "urðu", "orðinn"])
>>> verb.present_active_subjunctive()
['verða', 'verðir', 'verði', 'verðim', 'verðið', 'verði']
IV
>>> verb = StrongOldNorseVerb()
>>> verb.set_canonic_forms(["bera", "berr", "bar", "báru", "borinn"])
>>> verb.present_active_subjunctive()
['bera', 'berir', 'beri', 'berim', 'berið', 'beri']
V
>>> verb = StrongOldNorseVerb()
>>> verb.set_canonic_forms(["gefa", "gefr", "gaf", "gáfu", "gefinn"])
>>> verb.present_active_subjunctive()
['gefa', 'gefir', 'gefi', 'gefim', 'gefið', 'gefi']
VI
>>> verb = StrongOldNorseVerb()
>>> verb.set_canonic_forms(["fara", "ferr", "fór", "fóru", "farinn"])
>>> verb.present_active_subjunctive()
['fara', 'farir', 'fari', 'farim', 'farið', 'fari']
VII
>>> verb = StrongOldNorseVerb()
>>> verb.set_canonic_forms(["ráða", "ræðr", "réð", "réðu", "ráðinn"])
>>> verb.present_active_subjunctive()
['ráða', 'ráðir', 'ráði', 'ráðim', 'ráðið', 'ráði']
>>> verb = StrongOldNorseVerb()
>>> verb.set_canonic_forms(["vera", "a", "a", "a", "a"])
>>> verb.present_active_subjunctive()
['sé', 'sér', 'sé', 'sém', 'séð', 'sé']
>>> verb = StrongOldNorseVerb()
>>> verb.set_canonic_forms(["sjá", "a", "a", "a", "a"])
>>> verb.present_active_subjunctive()
['sjá', 'sér', 'sé', 'sém', 'séð', 'sé']
:return: |
13,270 | def get_spaces(self, space_key=None, expand=None, start=None, limit=None, callback=None):
params = {}
if space_key:
params["spaceKey"] = space_key
if expand:
params["expand"] = expand
if start is not None:
params["start"] = int(start)
if limit is not None:
params["limit"] = int(limit)
return self._service_get_request("rest/api/space", params=params, callback=callback) | Returns information about the spaces present in the Confluence instance.
:param space_key (string): OPTIONAL: A list of space keys to filter on. Default: None.
:param expand (string): OPTIONAL: A comma separated list of properties to expand on the spaces.
Default: Empty
:param start (int): OPTIONAL: The start point of the collection to return. Default: 0.
:param limit (int): OPTIONAL: A limit of the number of spaces to return, this could be restricted by fixed
system limits. Default: 25.
:param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns.
Default: None (no callback, raw data returned).
:return: The JSON data returned from the space endpoint,
or the results of the callback. Will raise requests.HTTPError on bad input, potentially. |
13,271 | def ext_pillar(hyper_id, pillar, name, key):
vk = salt.utils.virt.VirtKey(hyper_id, name, __opts__)
ok = vk.accept(key)
pillar[] = {name: ok}
return {} | Accept the key for the VM on the hyper, if authorized. |
13,272 | def default_logging(grab_log=None,
network_log=None,
level=logging.DEBUG, mode=,
propagate_network_logger=False):
logging.basicConfig(level=level)
network_logger = logging.getLogger()
network_logger.propagate = propagate_network_logger
if network_log:
hdl = logging.FileHandler(network_log, mode)
network_logger.addHandler(hdl)
network_logger.setLevel(level)
grab_logger = logging.getLogger()
if grab_log:
hdl = logging.FileHandler(grab_log, mode)
grab_logger.addHandler(hdl)
grab_logger.setLevel(level) | Customize logging output to display all log messages
except grab network logs.
Redirect grab network logs into file. |
13,273 | def output_xml(self, text):
document = Element()
comment = Comment()
document.append(comment)
aggregates = SubElement(document, )
aggregate = SubElement(aggregates, )
measurements = SubElement(aggregate, )
payload = json.loads(text)
metric_name = self._metric_name
for r in payload[][][]:
timestamp = self._format_timestamp(r[0][0])
for s in r[1]:
measure_node = SubElement(measurements, )
source = s[0]
value = str(s[1])
ts_node = SubElement(measure_node, )
ts_node.text = str(timestamp)
metric_node = SubElement(measure_node, )
metric_node.text = metric_name
metric_node = SubElement(measure_node, )
metric_node.text = self.aggregate
source_node = SubElement(measure_node, )
source_node.text = source
value_node = SubElement(measure_node, )
value_node.text = value
rough_string = ElementTree.tostring(document, )
reparse = minidom.parseString(rough_string)
output = reparse.toprettyxml(indent=" ")
print(self.colorize_xml(output)) | Output results in JSON format |
13,274 | def make_generic_c_patterns(keywords, builtins,
instance=None, define=None, comment=None):
"Strongly inspired from idlelib.ColorDelegator.make_pat"
kw = r"\b" + any("keyword", keywords.split()) + r"\b"
builtin = r"\b" + any("builtin", builtins.split()+C_TYPES.split()) + r"\b"
if comment is None:
comment = any("comment", [r"//[^\n]*", r"\/\*(.*?)\*\/"])
comment_start = any("comment_start", [r"\/\*"])
comment_end = any("comment_end", [r"\*\/"])
if instance is None:
instance = any("instance", [r"\bthis\b"])
number = any("number",
[r"\b[+-]?[0-9]+[lL]?\b",
r"\b[+-]?0[xX][0-9A-Fa-f]+[lL]?\b",
r"\b[+-]?[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?\b"])
sqstring = r"(\b[rRuU])?\\\n]*(\\.[^?"
dqstring = r
string = any("string", [sqstring, dqstring])
if define is None:
define = any("define", [r"
return "|".join([instance, kw, comment, string, number,
comment_start, comment_end, builtin,
define, any("SYNC", [r"\n"])]) | Strongly inspired from idlelib.ColorDelegator.make_pat |
13,275 | def memberness(context):
if context:
texts = context.xpath().extract()
text = str(texts).lower()
if len(texts) > 1:
return 2
elif in text:
return 2
elif not in text:
return 0
elif in text:
return 2
return 3 | The likelihood that the context is a "member". |
13,276 | def _load_model(self):
super()._load_model()
self.mujoco_robot.set_base_xpos([0, 0, 0])
self.model = MujocoWorldBase()
self.arena = EmptyArena()
if self.use_indicator_object:
self.arena.add_pos_indicator()
self.model.merge(self.arena)
self.model.merge(self.mujoco_robot)
self.hole_obj = self.hole.get_collision(name="hole", site=True)
self.hole_obj.set("quat", "0 0 0.707 0.707")
self.hole_obj.set("pos", "0.11 0 0.18")
self.model.merge_asset(self.hole)
self.model.worldbody.find(".//body[@name=]").append(self.hole_obj)
self.cyl_obj = self.cylinder.get_collision(name="cylinder", site=True)
self.cyl_obj.set("pos", "0 0 0.15")
self.model.merge_asset(self.cylinder)
self.model.worldbody.find(".//body[@name=]").append(self.cyl_obj)
self.model.worldbody.find(".//geom[@name=]").set("rgba", "0 1 0 1") | Loads the peg and the hole models. |
13,277 | def set_(device, **kwargs):
**
empty = {: 0, : 0,
: 0, : 0}
current = None
cmd =
if in kwargs:
cmd += .format(kwargs[])
parsed = _parse_quota(device, )
if kwargs[] in parsed:
current = parsed[][kwargs[]]
else:
current = empty
ret = .format(kwargs[])
if in kwargs:
if in kwargs:
raise SaltInvocationError(
)
cmd += .format(kwargs[])
parsed = _parse_quota(device, )
if kwargs[] in parsed:
current = parsed[][kwargs[]]
else:
current = empty
ret = .format(kwargs[])
if not current:
raise CommandExecutionError()
for limit in (, ,
, ):
if limit in kwargs:
current[limit] = kwargs[limit]
cmd += .format(current[],
current[],
current[],
current[],
device)
result = __salt__[](cmd, python_shell=False)
if result[] != 0:
raise CommandExecutionError(
.format(result[])
)
return {ret: current} | Calls out to setquota, for a specific user or group
CLI Example:
.. code-block:: bash
salt '*' quota.set /media/data user=larry block-soft-limit=1048576
salt '*' quota.set /media/data group=painters file-hard-limit=1000 |
13,278 | def _decode_argv(self, argv, enc=None):
uargv = []
if enc is None:
enc = DEFAULT_ENCODING
for arg in argv:
if not isinstance(arg, unicode):
arg = arg.decode(enc)
uargv.append(arg)
return uargv | decode argv if bytes, using stin.encoding, falling back on default enc |
13,279 | def run(self):
self.OnStartup()
try:
while True:
message = self._in_queue.get()
if message is None:
break
try:
self.HandleMessage(message)
except Exception as e:
logging.warning("%s", e)
self.SendReply(
rdf_flows.GrrStatus(
status=rdf_flows.GrrStatus.ReturnedStatus.GENERIC_ERROR,
error_message=utils.SmartUnicode(e)),
request_id=message.request_id,
response_id=1,
session_id=message.session_id,
task_id=message.task_id,
message_type=rdf_flows.GrrMessage.Type.STATUS)
if flags.FLAGS.pdb_post_mortem:
pdb.post_mortem()
except Exception as e:
logging.error("Exception outside of the processing loop: %r", e)
finally:
logging.fatal("The client has broken out of its processing loop.")
os.kill(os.getpid(), signal.SIGKILL) | Main thread for processing messages. |
13,280 | def ExecuteCmd(cmd, quiet=False):
result = None
if quiet:
with open(os.devnull, "w") as fnull:
result = subprocess.call(cmd, shell=True, stdout=fnull, stderr=fnull)
else:
result = subprocess.call(cmd, shell=True)
return result | Run a command in a shell. |
13,281 | def tar_add_bytes(tf, filename, bytestring):
if not isinstance(bytestring, bytes):
buff = io.BytesIO(bytestring)
tarinfo = tarfile.TarInfo(filename)
tarinfo.size = len(bytestring)
tf.addfile(tarinfo, buff) | Add a file to a tar archive
Args:
tf (tarfile.TarFile): tarfile to add the file to
filename (str): path within the tar file
bytestring (bytes or str): file contents. Must be :class:`bytes` or
ascii-encodable :class:`str` |
13,282 | def perm_by_group_and_perm_name(
cls, resource_id, group_id, perm_name, db_session=None
):
db_session = get_db_session(db_session)
query = db_session.query(cls.models_proxy.GroupResourcePermission)
query = query.filter(
cls.models_proxy.GroupResourcePermission.group_id == group_id
)
query = query.filter(
cls.models_proxy.GroupResourcePermission.perm_name == perm_name
)
query = query.filter(
cls.models_proxy.GroupResourcePermission.resource_id == resource_id
)
return query.first() | fetch permissions by group and permission name
:param resource_id:
:param group_id:
:param perm_name:
:param db_session:
:return: |
13,283 | def add_user_to_user_groups(self, id, **kwargs):
kwargs[] = True
if kwargs.get():
return self.add_user_to_user_groups_with_http_info(id, **kwargs)
else:
(data) = self.add_user_to_user_groups_with_http_info(id, **kwargs)
return data | Adds specific user groups to the user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_user_to_user_groups(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: The list of user groups that should be added to the user
:return: UserModel
If the method is called asynchronously,
returns the request thread. |
13,284 | def getfile2(url, auth=None, outdir=None):
import requests
print("Retrieving: %s" % url)
fn = os.path.split(url)[-1]
if outdir is not None:
fn = os.path.join(outdir, fn)
if auth is not None:
r = requests.get(url, stream=True, auth=auth)
else:
r = requests.get(url, stream=True)
chunk_size = 1000000
with open(fn, ) as fd:
for chunk in r.iter_content(chunk_size):
fd.write(chunk) | Function to fetch files using requests
Works with https authentication |
13,285 | def get_resource_url(cls, resource, base_url):
if resource.Meta.resource_name:
url = .format(base_url, resource.Meta.resource_name)
else:
p = inflect.engine()
plural_name = p.plural(resource.Meta.name.lower())
url = .format(base_url, plural_name)
return cls._parse_url_and_validate(url) | Construct the URL for talking to this resource.
i.e.:
http://myapi.com/api/resource
Note that this is NOT the method for calling individual instances i.e.
http://myapi.com/api/resource/1
Args:
resource: The resource class instance
base_url: The Base URL of this API service.
returns:
resource_url: The URL for this resource |
13,286 | def save_loop(filename, framerate=30, time=3.0, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
n_frames = framerate * time
az = 2.0 * np.pi / n_frames
Visualizer3D.save(filename, n_frames=n_frames, axis=axis, clf=clf,
animate_rate=framerate, animate_az=az)
if clf:
Visualizer3D.clf() | Off-screen save a GIF of one rotation about the scene.
Parameters
----------
filename : str
The filename in which to save the output image (should have extension .gif)
framerate : int
The frame rate at which to animate motion.
time : float
The number of seconds for one rotation.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance. |
13,287 | def _fire_event(self, event_name, *event_args, **event_kwargs):
if event_name in self._allowed_events:
self._logger.debug("firing handlers for event %s ", event_name)
for func, args, kwargs in self._event_handlers[event_name]:
kwargs.update(event_kwargs)
func(self, *(event_args + args), **kwargs) | Execute all the handlers associated with given event.
This method executes all handlers associated with the event
`event_name`. Optional positional and keyword arguments can be used to
pass arguments to **all** handlers added with this event. These
aguments updates arguments passed using :meth:`~ignite.engine.Engine.add_event_handler`.
Args:
event_name: event for which the handlers should be executed. Valid
events are from :class:`~ignite.engine.Events` or any `event_name` added by
:meth:`~ignite.engine.Engine.register_events`.
*event_args: optional args to be passed to all handlers.
**event_kwargs: optional keyword args to be passed to all handlers. |
13,288 | def read(self):
data = bytearray()
while True:
incoming_bytes = self.comport.inWaiting()
if incoming_bytes == 0:
break
else:
content = self.comport.read(size=incoming_bytes)
data.extend(bytearray(content))
return data | Read data from serial port and returns a ``bytearray``. |
13,289 | def wrap(self, starter_cls):
if isinstance(starter_cls, type) and issubclass(starter_cls, ProcessStarter):
return starter_cls
depr_msg =
warnings.warn(depr_msg, DeprecationWarning, stacklevel=3)
return functools.partial(CompatStarter, starter_cls) | If starter_cls is not a ProcessStarter, assume it's the legacy
preparefunc and return it bound to a CompatStarter. |
13,290 | def data_to_binary(self):
return bytes([
COMMAND_CODE,
self.channels_to_byte(self.led_on),
self.channels_to_byte(self.led_slow_blinking),
self.channels_to_byte(self.led_fast_blinking)
]) | :return: bytes |
13,291 | def list(self):
return [x["_id"] for x in self._db.system.js.find(projection=["_id"])] | Get a list of the names of the functions stored in this database. |
13,292 | def get_translations_sorted(codes):
codes = codes or self.codes
return self._get_priority_translations(priority, codes) | Returns a sorted list of (code, translation) tuples for codes |
13,293 | def labels(self):
all_labels = set(self.token_patterns.keys())
all_labels.update(self.phrase_patterns.keys())
return tuple(all_labels) | All labels present in the match patterns.
RETURNS (set): The string labels.
DOCS: https://spacy.io/api/entityruler#labels |
13,294 | def send(self, node_id, request, wakeup=True):
conn = self._conns.get(node_id)
if not conn or not self._can_send_request(node_id):
self.maybe_connect(node_id, wakeup=wakeup)
return Future().failure(Errors.NodeNotReadyError(node_id))
future = conn.send(request, blocking=False)
if wakeup:
self.wakeup()
return future | Send a request to a specific node. Bytes are placed on an
internal per-connection send-queue. Actual network I/O will be
triggered in a subsequent call to .poll()
Arguments:
node_id (int): destination node
request (Struct): request object (not-encoded)
wakeup (bool): optional flag to disable thread-wakeup
Raises:
AssertionError: if node_id is not in current cluster metadata
Returns:
Future: resolves to Response struct or Error |
13,295 | def restoreSettings(self, settings):
value = unwrapVariant(settings.value())
if value:
self.setFilenames(value.split(os.path.pathsep)) | Restores the files for this menu from the settings.
:param settings | <QSettings> |
13,296 | def run(self):
executor = concurrent.futures.ThreadPoolExecutor(
max_workers=self._config[])
now = int(datetime.datetime.utcnow().timestamp())
start_time = ((now + 29) // 30) * 30
self._log.info(.format(
datetime.datetime.fromtimestamp(start_time)))
while int(datetime.datetime.utcnow().timestamp()) < start_time:
time.sleep(0.1)
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(self._run_loop(executor))
except KeyboardInterrupt:
pass
finally:
self._log.info()
tasks = []
for stream, item_group in self._streams:
tasks.append(stream.async_send_heap(item_group.get_end()))
loop.run_until_complete(asyncio.gather(*tasks))
self._log.info()
executor.shutdown() | Starts the sender. |
13,297 | def plot(self, channel_names, kind=,
gates=None, gate_colors=None, gate_lw=1, **kwargs):
ax = kwargs.get()
channel_names = to_list(channel_names)
gates = to_list(gates)
plot_output = graph.plotFCM(self.data, channel_names, kind=kind, **kwargs)
if gates is not None:
if gate_colors is None:
gate_colors = cycle((, , , , , ))
if not isinstance(gate_lw, collections.Iterable):
gate_lw = [gate_lw]
gate_lw = cycle(gate_lw)
for (g, c, lw) in zip(gates, gate_colors, gate_lw):
g.plot(ax=ax, ax_channels=channel_names, color=c, lw=lw)
return plot_output | Plot the flow cytometry data associated with the sample on the current axis.
To produce the plot, follow up with a call to matplotlib's show() function.
Parameters
----------
{graph_plotFCM_pars}
{FCMeasurement_plot_pars}
{common_plot_ax}
gates : [None, Gate, list of Gate]
Gate must be of type {_gate_available_classes}.
gate_lw: float | iterable
line width to use when drawing gates
if float, uses the same line width for all gates
if iterable, then cycles between the values
kwargs : dict
Additional keyword arguments to be passed to graph.plotFCM
Returns
-------
None : if no data is present
plot_output : output of plot command used to draw (e.g., output of hist)
Examples
--------
>>> sample.plot('Y2-A', bins=100, alpha=0.7, color='green', normed=1) # 1d histogram
>>> sample.plot(['B1-A', 'Y2-A'], cmap=cm.Oranges, colorbar=False) # 2d histogram |
13,298 | def get_cursor(cls, cursor_type=_CursorType.PLAIN) -> Cursor:
_cur = None
if cls._use_pool:
_connection_source = yield from cls.get_pool()
else:
_connection_source = yield from aiopg.connect(echo=False, **cls._connection_params)
if cursor_type == _CursorType.PLAIN:
_cur = yield from _connection_source.cursor()
if cursor_type == _CursorType.NAMEDTUPLE:
_cur = yield from _connection_source.cursor(cursor_factory=psycopg2.extras.NamedTupleCursor)
if cursor_type == _CursorType.DICT:
_cur = yield from _connection_source.cursor(cursor_factory=psycopg2.extras.DictCursor)
if not cls._use_pool:
_cur = cursor_context_manager(_connection_source, _cur)
return _cur | Yields:
new client-side cursor from existing db connection pool |
13,299 | def apply_T5(word):
T5 =
WORD = word.split()
for i, v in enumerate(WORD):
if contains_VVV(v) and any(i for i in i_DIPHTHONGS if i in v):
I = v.rfind() - 1 or 2
I = I + 2 if is_consonant(v[I - 1]) else I
WORD[i] = v[:I] + + v[I:]
T5 =
word = .join(WORD)
return word, T5 | If a (V)VVV-sequence contains a VV-sequence that could be an /i/-final
diphthong, there is a syllable boundary between it and the third vowel,
e.g., [raa.ois.sa], [huo.uim.me], [la.eis.sa], [sel.vi.äi.si], [tai.an],
[säi.e], [oi.om.me]. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.