Unnamed: 0
int64 0
389k
| code
stringlengths 26
79.6k
| docstring
stringlengths 1
46.9k
|
---|---|---|
19,000 | def enable_backups(self, table_name, model):
self._tables.pop(table_name, None)
request = {
"TableName": table_name,
"PointInTimeRecoverySpecification": {"PointInTimeRecoveryEnabled": True}
}
try:
self.dynamodb_client.update_continuous_backups(**request)
except botocore.exceptions.ClientError as error:
raise BloopException("Unexpected error while setting Continuous Backups.") from error | Calls UpdateContinuousBackups on the table according to model.Meta["continuous_backups"]
:param table_name: The name of the table to enable Continuous Backups on
:param model: The model to get Continuous Backups settings from |
19,001 | def _producer_wrapper(f, port, addr=):
try:
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(.join([addr, str(port)]))
f(socket)
finally:
context.destroy() | A shim that sets up a socket and starts the producer callable.
Parameters
----------
f : callable
Callable that takes a single argument, a handle
for a ZeroMQ PUSH socket. Must be picklable.
port : int
The port on which the socket should connect.
addr : str, optional
Address to which the socket should connect. Defaults
to localhost ('tcp://127.0.0.1'). |
19,002 | def rigid_transform_from_ros(from_frame, to_frame, service_name=, namespace=None):
if namespace == None:
service_name = rospy.get_namespace() + service_name
else:
service_name = namespace + service_name
rospy.wait_for_service(service_name, timeout = 10)
listener = rospy.ServiceProxy(service_name, RigidTransformListener)
ret = listener(from_frame, to_frame)
quat = np.asarray([ret.w_rot, ret.x_rot, ret.y_rot, ret.z_rot])
trans = np.asarray([ret.x_trans, ret.y_trans, ret.z_trans])
rot = RigidTransform.rotation_from_quaternion(quat)
return RigidTransform(rotation=rot, translation=trans, from_frame=from_frame, to_frame=to_frame) | Gets transform from ROS as a rigid transform
Requires ROS rigid_transform_publisher service to be running. Assuming autolab_core is installed as a catkin package,
this can be done with: roslaunch autolab_core rigid_transforms.launch
Parameters
----------
from_frame : :obj:`str`
to_frame : :obj:`str`
service_name : string, optional
RigidTransformListener service to interface with. If the RigidTransformListener services are started through
rigid_transforms.launch it will be called rigid_transform_listener
namespace : string, optional
Namespace to prepend to transform_listener_service. If None, current namespace is prepended.
Raises
------
rospy.ServiceException
If service call to rigid_transform_listener fails |
19,003 | def copy(self, **replacements):
cls = type(self)
kwargs = {: self.org, : self.name, : self.ext, : self.classifier, : self.rev}
for key, val in replacements.items():
kwargs[key] = val
return cls(**kwargs) | Returns a clone of this M2Coordinate with the given replacements kwargs overlaid. |
19,004 | def _SignedVarintEncoder():
def EncodeSignedVarint(write, value):
if value < 0:
value += (1 << 64)
bits = value & 0x7f
value >>= 7
while value:
write(six.int2byte(0x80|bits))
bits = value & 0x7f
value >>= 7
return write(six.int2byte(bits))
return EncodeSignedVarint | Return an encoder for a basic signed varint value (does not include
tag). |
19,005 | def generate_graphs(result, topic, aspect, for_doc=False):
result = result
topic = topic
aspect = aspect
for_doc = for_doc
with open(result, ) as f:
lines = f.readlines()
platform = if in lines[2] else
idx = [i for i, line in enumerate(lines) if line.startswith()]
table_spans = {: [idx[1] + 1, idx[2]],
: [idx[4] + 1, idx[5]],
: [idx[7] + 1, idx[8]],
: [idx[10] + 1, idx[11]],
: [idx[13] + 1, idx[14]]}
start, stop = table_spans[topic.lower()]
items = [l[:50].strip() for l in lines[start: stop]]
time = np.array([int(l[50:61].strip()) for l in lines[start: stop]])
ram = np.array([int(l[61:].strip()) for l in lines[start: stop]])
if aspect == :
array = ram
else:
array = time
y_pos = list(range(len(items)))
fig, ax = plt.subplots()
fig.set_size_inches(15, 3.8 / 12 * len(items) + 1.2)
asam_pos = [i for i, c in enumerate(items) if c.startswith()]
mdfreader_pos = [i for i, c in enumerate(items) if c.startswith()]
ax.barh(asam_pos, array[asam_pos], color=, ecolor=)
ax.barh(mdfreader_pos, array[mdfreader_pos], color=, ecolor=)
ax.set_yticks(y_pos)
ax.set_yticklabels(items)
ax.invert_yaxis()
ax.set_xlabel( if aspect == else )
if topic == :
ax.set_title(
.format( if aspect == else ))
else:
ax.set_title(
.format(topic, if aspect == else ))
ax.xaxis.grid()
fig.subplots_adjust(bottom=0.72/fig.get_figheight(),
top=1-0.48/fig.get_figheight(),
left=0.4,
right=0.9)
if aspect == :
if topic == :
name = .format(platform)
else:
name = .format(platform, topic.lower())
else:
if topic == :
name = .format(platform)
else:
name = .format(platform, topic.lower())
if for_doc:
plt.show()
else:
plt.savefig(name, dpi=300) | genrate graphs from result file
Parameters
----------
result : str
path to result file
topic : str
benchmark topic; for example "Open file" or "Save file"
aspect : str
performance indiitemsor; can be "ram" (RAM memory usage) or "time" (elapsed time)
for_doc : bool
wether the source code is used inside the documentation |
19,006 | def parse_item(self, response):
if URLObject(response.url).path == LOGIN_HTML_PATH:
reqs = self.handle_unexpected_redirect_to_login_page(response)
for req in reqs:
yield req
title = response.xpath("//title/text()").extract_first()
if title:
title = title.strip()
for key, value
in response.request.headers.items()}
item = A11yItem(
url=response.url,
request_headers=request_headers,
accessed_at=datetime.utcnow(),
page_title=title,
)
yield item | Get basic information about a page, so that it can be passed to the
`pa11y` tool for further testing.
@url https://www.google.com/
@returns items 1 1
@returns requests 0 0
@scrapes url request_headers accessed_at page_title |
19,007 | def draw_interface(objects, callback, callback_text):
screen = curses.initscr()
height, width = screen.getmaxyx()
curses.noecho()
curses.cbreak()
curses.start_color()
screen.keypad( 1 )
curses.init_pair(1,curses.COLOR_BLACK, curses.COLOR_CYAN)
highlightText = curses.color_pair( 1 )
normalText = curses.A_NORMAL
screen.border( 0 )
curses.curs_set( 0 )
max_row = height - 15
box = curses.newwin( max_row + 2, int(width - 2), 1, 1 )
box.box()
fmt = PartialFormatter()
row_num = len( objects )
pages = int( ceil( row_num / max_row ) )
position = 1
page = 1
for i in range( 1, max_row + 1 ):
if row_num == 0:
box.addstr( 1, 1, "There arenstringstringt strings", highlightText )
else:
if ( i + ( max_row * ( page - 1 ) ) == position + ( max_row * ( page - 1 ) ) ):
box.addstr( i - ( max_row * ( page - 1 ) ), 2, str( i ) + " - " + objects[ i - 1 ][], highlightText )
else:
box.addstr( i - ( max_row * ( page - 1 ) ), 2, str( i ) + " - " + objects[ i - 1 ][], normalText )
if i == row_num:
break
screen.refresh()
box.refresh()
x = screen.getch()
curses.endwin()
exit() | Draws a ncurses interface. Based on the given object list, every object should have a "string" key, this is whats displayed on the screen, callback is called with the selected object.
Rest of the code is modified from:
https://stackoverflow.com/a/30834868 |
19,008 | def _ensure_batch_is_sufficiently_small(
self,
batch_instances: Iterable[Instance],
excess: Deque[Instance]) -> List[List[Instance]]:
if self._maximum_samples_per_batch is None:
assert not excess
return [list(batch_instances)]
key, limit = self._maximum_samples_per_batch
batches: List[List[Instance]] = []
batch: List[Instance] = []
padding_length = -1
excess.extend(batch_instances)
while excess:
instance = excess.popleft()
if self.vocab is not None:
instance.index_fields(self.vocab)
field_lengths = instance.get_padding_lengths()
for _, lengths in field_lengths.items():
try:
padding_length = max(padding_length,
lengths[key])
except KeyError:
pass
proposed_batch_size = len(batch) + 1
if proposed_batch_size >= self._batch_size or padding_length * proposed_batch_size > limit:
batches.append(batch)
excess.appendleft(instance)
batch = []
padding_length = -1
else:
batch.append(instance)
excess.extend(batch)
return batches | If self._maximum_samples_per_batch is specified, then split the batch
into smaller sub-batches if it exceeds the maximum size.
Parameters
----------
batch_instances : ``Iterable[Instance]``
A candidate batch.
excess : ``Deque[Instance]``
Instances that were not sufficient to form an entire batch
previously. They will be used as part of the first sub-batch. This
will be populated with instances from the end of batch_instances
that do not consist of more than self._maximum_samples_per_batch
samples or self._batch_size instances. It is the caller's
responsibility to place these in a batch too, which may, of course,
be done in part with subsequent calls to this method.
WARNING: Mutated in place! |
19,009 | def runlist_list(**kwargs):
ctx = Context(**kwargs)
ctx.execute_action(, **{
: ctx.repo.create_secure_service(),
}) | Show uploaded runlists. |
19,010 | def CheckVersion(problems, latest_version=None):
if not latest_version:
timeout = 20
socket.setdefaulttimeout(timeout)
request = urllib2.Request(LATEST_RELEASE_VERSION_URL)
try:
response = urllib2.urlopen(request)
content = response.read()
m = re.search(r, content)
if m:
latest_version = m.group(1)
except urllib2.HTTPError as e:
description = (
%
(e.reason, e.code))
problems.OtherProblem(
description=description, type=errors.TYPE_NOTICE)
return
except urllib2.URLError as e:
description = (
% e.reason)
problems.OtherProblem(
description=description, type=errors.TYPE_NOTICE)
return
if not latest_version:
description = (
% LATEST_RELEASE_VERSION_URL)
problems.OtherProblem(
description=description, type=errors.TYPE_NOTICE)
return
newest_version = _MaxVersion([latest_version, __version__])
if __version__ != newest_version:
problems.NewVersionAvailable(newest_version) | Check if there is a newer version of transitfeed available.
Args:
problems: if a new version is available, a NewVersionAvailable problem will
be added
latest_version: if specified, override the latest version read from the
project page |
19,011 | def normalize_extension(extension):
extension = decode_path(extension)
if extension is None:
return
if extension.startswith():
extension = extension[1:]
if in extension:
_, extension = os.path.splitext(extension)
extension = slugify(extension, sep=)
if extension is None:
return
if len(extension):
return extension | Normalise a file name extension. |
19,012 | def ok(self, text=u"OK", err=False):
self._text = None
_text = to_text(text) if text else u"OK"
err = err or not self.write_to_stdout
self._freeze(_text, err=err) | Set Ok (success) finalizer to a spinner. |
19,013 | def proxy_alias(alias_name, node_type):
proxy = type(
alias_name,
(lazy_object_proxy.Proxy,),
{
"__class__": object.__dict__["__class__"],
"__instancecheck__": _instancecheck,
},
)
return proxy(lambda: node_type) | Get a Proxy from the given name to the given node type. |
19,014 | def is_vert_aligned(c):
return all(
[
_to_span(c[i]).sentence.is_visual()
and bbox_vert_aligned(
bbox_from_span(_to_span(c[i])), bbox_from_span(_to_span(c[0]))
)
for i in range(len(c))
]
) | Return true if all the components of c are vertically aligned.
Vertical alignment means that the bounding boxes of each Mention of c
shares a similar x-axis value in the visual rendering of the document.
:param c: The candidate to evaluate
:rtype: boolean |
19,015 | def get_distance(
self, l_motor: float, r_motor: float, tm_diff: float
) -> typing.Tuple[float, float]:
x = 0
y = 0
angle = 0
total_time = int(tm_diff * 100000)
steps = total_time // self._timestep
remainder = total_time % self._timestep
step = self._timestep / 100000.0
if remainder:
last_step = remainder / 100000.0
steps += 1
else:
last_step = step
while steps != 0:
if steps == 1:
tm_diff = last_step
else:
tm_diff = step
steps -= 1
l = self._lmotor.compute(-l_motor, tm_diff)
r = self._rmotor.compute(r_motor, tm_diff)
velocity = (l + r) * 0.5
rotation = self._bm * (l - r) / self._inertia
distance = velocity * tm_diff
turn = rotation * tm_diff
x += distance * math.cos(angle)
y += distance * math.sin(angle)
angle += turn
return x, y, angle | Given motor values and the amount of time elapsed since this was last
called, retrieves the x,y,angle that the robot has moved. Pass these
values to :meth:`PhysicsInterface.distance_drive`.
To update your encoders, use the ``l_position`` and ``r_position``
attributes of this object.
:param l_motor: Left motor value (-1 to 1); -1 is forward
:param r_motor: Right motor value (-1 to 1); 1 is forward
:param tm_diff: Elapsed time since last call to this function
:returns: x travel, y travel, angle turned (radians)
.. note:: If you are using more than 2 motors, it is assumed that
all motors on each side are set to the same speed. Only
pass in one of the values from each side |
19,016 | def transfer(self, receiver_address, amount, from_account):
tx_hash = self.send_transaction(
,
(receiver_address,
amount),
transact={: from_account.address,
: from_account.password}
)
return self.get_tx_receipt(tx_hash).status == 1 | Transfer tokens from one account to the receiver address.
:param receiver_address: Address of the transfer receiver, str
:param amount: Amount of tokens, int
:param from_account: Sender account, Account
:return: bool |
19,017 | def load_core_file(core_fp):
core = {}
with open(core_fp) as in_f:
for line in in_f.read().splitlines():
if not line.startswith("
otu_id, tax = line.split("\t")
core[otu_id] = oc.otu_name(ast.literal_eval(tax))
return core | For core OTU data file, returns Genus-species identifier for each data
entry.
:type core_fp: str
:param core_fp: A file containing core OTU data.
:rtype: str
:return: Returns genus-species identifier based on identified taxonomical
level. |
19,018 | def get_random_proxy(self):
idx = randint(0, len(self._list) - 1)
return self._list[idx] | Return random proxy |
19,019 | def close(self):
with self._close_lock:
epfd = self._epfd
if epfd >= 0:
self._epfd = -1
close(epfd) | Close the internal epoll file descriptor if it isn't closed
:raises OSError:
If the underlying ``close(2)`` fails. The error message matches
those found in the manual page. |
19,020 | def write_bus_data(self, file):
writer = self._get_writer(file)
writer.writerow(BUS_ATTRS)
for bus in self.case.buses:
writer.writerow([getattr(bus, attr) for attr in BUS_ATTRS]) | Writes bus data as CSV. |
19,021 | def register_blueprint(self, blueprint):
s module-level
:func:`register_plugin` functions.
:param blueprint: blueprint object with plugin endpoints
:type blueprint: flask.Blueprint
'
if blueprint not in self._blueprint_known:
self.app.register_blueprint(blueprint)
self._blueprint_known.add(blueprint) | Register given blueprint on curren app.
This method is provided for using inside plugin's module-level
:func:`register_plugin` functions.
:param blueprint: blueprint object with plugin endpoints
:type blueprint: flask.Blueprint |
19,022 | def register(self, app, options):
url_prefix = options.get(, self.url_prefix)
for future in self.routes:
future.handler.__blueprintname__ = self.name
uri = url_prefix + future.uri if url_prefix else future.uri
app.route(
uri=uri[1:] if uri.startswith() else uri,
methods=future.methods,
host=future.host or self.host,
strict_slashes=future.strict_slashes,
stream=future.stream
)(future.handler)
for future in self.middlewares:
if future.args or future.kwargs:
app.middleware(*future.args,
**future.kwargs)(future.middleware)
else:
app.middleware(future.middleware)
for future in self.exceptions:
app.exception(*future.args, **future.kwargs)(future.handler)
for future in self.statics:
uri = url_prefix + future.uri if url_prefix else future.uri
app.static(uri, future.file_or_directory,
*future.args, **future.kwargs)
for event, listeners in self.listeners.items():
for listener in listeners:
app.listener(event)(listener) | Register the blueprint to the mach9 app. |
19,023 | def update_identity(self, identity, identity_id):
route_values = {}
if identity_id is not None:
route_values[] = self._serialize.url(, identity_id, )
content = self._serialize.body(identity, )
self._send(http_method=,
location_id=,
version=,
route_values=route_values,
content=content) | UpdateIdentity.
:param :class:`<Identity> <azure.devops.v5_0.identity.models.Identity>` identity:
:param str identity_id: |
19,024 | def formatdate(timeval=None, localtime=False, usegmt=False):
if timeval is None:
timeval = time.time()
if localtime:
now = time.localtime(timeval)
if time.daylight and now[-1]:
offset = time.altzone
else:
offset = time.timezone
hours, minutes = divmod(abs(offset), 3600)
if offset > 0:
sign =
else:
sign =
zone = % (sign, hours, minutes // 60)
else:
now = time.gmtime(timeval)
if usegmt:
zone =
else:
zone =
return _format_timetuple_and_zone(now, zone) | Returns a date string as specified by RFC 2822, e.g.:
Fri, 09 Nov 2001 01:08:47 -0000
Optional timeval if given is a floating point time value as accepted by
gmtime() and localtime(), otherwise the current time is used.
Optional localtime is a flag that when True, interprets timeval, and
returns a date relative to the local timezone instead of UTC, properly
taking daylight savings time into account.
Optional argument usegmt means that the timezone is written out as
an ascii string, not numeric one (so "GMT" instead of "+0000"). This
is needed for HTTP, and is only used when localtime==False. |
19,025 | def p_DefaultValue_string(p):
p[0] = model.Value(type=model.Value.STRING, value=p[1]) | DefaultValue : STRING |
19,026 | def get_error(self, block=False, timeout=None):
try:
error = self._errors.get(block=block, timeout=timeout)
return error
except Exception:
return None | Removes and returns an error from self._errors
Args:
block(bool): if True block until a RTMMessage is available,
else it will return None when self._inbox is empty
timeout(int): it blocks at most timeout seconds
Returns:
error if inbox is not empty, else None |
19,027 | def _find_set_members(set):
cmd = .format(_ipset_cmd(), set)
out = __salt__[](cmd, python_shell=False)
if out[] > 0:
startMembers = True
return members | Return list of members for a set |
19,028 | def rps_at(self, t):
if 0 <= t <= self.duration:
return self.minrps + \
float(self.maxrps - self.minrps) * t / self.duration
else:
return 0 | Return rps for second t |
19,029 | def put_archive(self, path, data):
return self.client.api.put_archive(self.id, path, data) | Insert a file or folder in this container using a tar archive as
source.
Args:
path (str): Path inside the container where the file(s) will be
extracted. Must exist.
data (bytes): tar data to be extracted
Returns:
(bool): True if the call succeeds.
Raises:
:py:class:`~docker.errors.APIError` If an error occurs. |
19,030 | def nvrtcGetPTX(self, prog):
size = c_size_t()
code = self._lib.nvrtcGetPTXSize(prog, byref(size))
self._throw_on_error(code)
buf = create_string_buffer(size.value)
code = self._lib.nvrtcGetPTX(prog, buf)
self._throw_on_error(code)
return buf.value.decode() | Returns the compiled PTX for the NVRTC program object. |
19,031 | def outdict(self, ndigits=3):
output = self.__dict__.copy()
for item in output:
output[item] = round(output[item], ndigits)
return output | Return dictionary structure rounded to a given precision. |
19,032 | def AutorizarAnticipo(self):
"Autorizar Anticipo de una Liquidación Primaria Electrónica de Granos"
anticipo = {"liquidacion": self.liquidacion}
liq = anticipo["liquidacion"]
liq["campaniaPpal"] = self.liquidacion["campaniaPPal"]
liq["codLocProcedencia"] = self.liquidacion["codLocalidadProcedencia"]
liq["descPuertoLocalidad"] = self.liquidacion["desPuertoLocalidad"]
if self.opcionales:
liq[] = self.opcionales
if self.retenciones:
anticipo[] = self.retenciones
if self.deducciones:
anticipo[] = self.deducciones
ret = self.client.lpgAutorizarAnticipo(
auth={
: self.Token, : self.Sign,
: self.Cuit, },
anticipo=anticipo,
)
ret = ret[]
self.__analizar_errores(ret)
self.AnalizarLiquidacion(ret.get(), self.liquidacion)
return True | Autorizar Anticipo de una Liquidación Primaria Electrónica de Granos |
19,033 | def logtrace(logger, msg, *args, **kwargs):
if esgfpid.defaults.LOG_TRACE_TO_DEBUG:
logdebug(logger, % msg, *args, **kwargs)
else:
pass | If esgfpid.defaults.LOG_TRACE_TO_DEBUG, messages are treated
like debug messages (with an added [trace]).
Otherwise, they are ignored. |
19,034 | def _sendDDEcommand(self, cmd, timeout=None):
reply = self.conversation.Request(cmd, timeout)
if self.pyver > 2:
reply = reply.decode().rstrip()
return reply | Send command to DDE client |
19,035 | def listIterators(self, login, tableName):
self.send_listIterators(login, tableName)
return self.recv_listIterators() | Parameters:
- login
- tableName |
19,036 | def write(self, nb, fp, **kwargs):
return fp.write(self.writes(nb,**kwargs)) | Write a notebook to a file like object |
19,037 | def unpack_fixed8(src):
dec_part = unpack_ui8(src)
int_part = unpack_ui8(src)
return int_part + dec_part / 256 | Get a FIXED8 value. |
19,038 | def _start_of_century(self):
year = self.year - 1 - (self.year - 1) % YEARS_PER_CENTURY + 1
return self.set(year, 1, 1) | Reset the date to the first day of the century.
:rtype: Date |
19,039 | def just(*args):
assert len(args) >= 1,
if len(args) == 1:
try:
return cycle(set(args))
except:
return cycle(args)
else:
return cycle({args}) | this works as an infinite loop that yields
the given argument(s) over and over |
19,040 | def create_key(file_):
pkey = crypto.PKey()
pkey.generate_key(crypto.TYPE_RSA, 2048)
file_.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
file_.flush() | Create a key and save it into ``file_``.
Note that ``file`` must be opened in binary mode. |
19,041 | def decode_source(source_bytes):
newline_decoder = io.IncrementalNewlineDecoder(None, True)
return newline_decoder.decode(source_to_unicode(source_bytes)) | Decode bytes representing source code and return the string.
Universal newline support is used in the decoding. |
19,042 | def unhook_wnd_proc(self):
if not self.__local_wnd_proc_wrapped:
return
SetWindowLong(self.__local_win_handle,
GWL_WNDPROC,
self.__old_wnd_proc)
self.__local_wnd_proc_wrapped = None | Restore previous Window message handler |
19,043 | def percentile(self, p):
if not (0 <= p <= 100):
raise ValueError("p must be between 0 and 100, inclusive.")
p = float(p)/100.
p *= self.n
c_i = None
t = 0
if p == 0:
return self.C.min_item()[1].mean
for i, key in enumerate(self.C.keys()):
c_i_plus_one = self.C[key]
if i == 0:
k = c_i_plus_one.count / 2
else:
k = (c_i_plus_one.count + c_i.count) / 2.
if p < t + k:
z1 = p - t
z2 = t + k - p
return (c_i.mean * z2 + c_i_plus_one.mean * z1) / (z1 + z2)
c_i = c_i_plus_one
t += k
return self.C.max_item()[1].mean | Computes the percentile of a specific value in [0,100]. |
19,044 | def _unpack(c, tmp, package, version, git_url=None):
real_version = version[:]
source = None
if git_url:
pass
else:
cwd = os.getcwd()
print("Moving into temp dir %s" % tmp)
os.chdir(tmp)
try:
flags = "--download=. --build=build --no-use-wheel"
cmd = "pip install %s %s==%s" % (flags, package, version)
c.run(cmd)
globs = []
globexpr = ""
for extension, opener in (
("zip", "unzip"),
("tgz", "tar xzvf"),
("tar.gz", "tar xzvf"),
):
globexpr = "*.{0}".format(extension)
globs = glob(globexpr)
if globs:
break
archive = os.path.basename(globs[0])
source, _, _ = archive.rpartition(".{0}".format(extension))
c.run("{0} {1}".format(opener, globexpr))
finally:
os.chdir(cwd)
return real_version, source | Download + unpack given package into temp dir ``tmp``.
Return ``(real_version, source)`` where ``real_version`` is the "actual"
version downloaded (e.g. if a Git master was indicated, it will be the SHA
of master HEAD) and ``source`` is the source directory (relative to
unpacked source) to import into ``<project>/vendor``. |
19,045 | def get_dataframe_from_variable(nc, data_var):
time_var = nc.get_variables_by_attributes(standard_name=)[0]
depth_vars = nc.get_variables_by_attributes(axis=lambda v: v is not None and v.lower() == )
depth_vars += nc.get_variables_by_attributes(standard_name=lambda v: v in [, ], positive=lambda x: x is not None)
depth_var = None
for d in depth_vars:
try:
if d._name in data_var.coordinates.split(" ") or d._name in data_var.dimensions:
depth_var = d
break
except AttributeError:
continue
times = netCDF4.num2date(time_var[:], units=time_var.units, calendar=getattr(time_var, , ))
original_times_size = times.size
if depth_var is None and hasattr(data_var, ):
depth_type = get_type(data_var.sensor_depth)
depths = np.asarray([data_var.sensor_depth] * len(times)).flatten()
values = data_var[:].flatten()
elif depth_var is None:
depths = np.asarray([np.nan] * len(times)).flatten()
depth_type = get_type(depths)
values = data_var[:].flatten()
else:
depths = depth_var[:]
depth_type = get_type(depths)
if len(data_var.shape) > 1:
times = np.repeat(times, depths.size)
depths = np.tile(depths, original_times_size)
values = data_var[:, :].flatten()
else:
values = data_var[:].flatten()
if getattr(depth_var, , ).lower() == :
logger.warning("Converting depths to positive down before returning the DataFrame")
depths = depths * -1
return df | Returns a Pandas DataFrame of the data.
This always returns positive down depths |
19,046 | def copy(self):
n = SegmentList()
n._list = [ a.copy() for a in self._list ]
n._bytes_occupied = self._bytes_occupied
return n | Make a copy of the SegmentList.
:return: A copy of the SegmentList instance.
:rtype: angr.analyses.cfg_fast.SegmentList |
19,047 | def update(self, slug):
post_data = self.get_post_data()
post_data[] = self.userinfo.user_name
pageinfo = MWiki.get_by_uid(slug)
cnt_old = tornado.escape.xhtml_unescape(pageinfo.cnt_md).strip()
cnt_new = post_data[].strip()
if cnt_old == cnt_new:
pass
else:
MWikiHist.create_wiki_history(MWiki.get_by_uid(slug))
MWiki.update(slug, post_data)
tornado.ioloop.IOLoop.instance().add_callback(self.cele_gen_whoosh)
self.redirect(.format(post_data[])) | Update the page. |
19,048 | def handle_response (response):
response = json.loads(response.read())
if in response:
raise ValueError(response[])
else:
try:
return json.loads(response[])
except (TypeError, json.decoder.JSONDecodeError):
if response[] == :
return float()
else:
return response[] | Handle a response from the newton API |
19,049 | def enable_directory_service(self, check_peer=False):
if check_peer:
return self.set_directory_service(check_peer=True)
return self.set_directory_service(enabled=True) | Enable the directory service.
:param check_peer: If True, enables server authenticity
enforcement. If False, enables directory
service integration.
:type check_peer: bool, optional
:returns: A dictionary describing the status of the directory service.
:rtype: ResponseDict |
19,050 | def _get_on_name(self, func):
r = re.match("^on_(.+)$", func.__name__)
if r:
event = r.group(1)
else:
raise ValueError("The function name should be "
"`on_<eventname>`().")
return event | Return `eventname` when the function name is `on_<eventname>()`. |
19,051 | def update_thesis_information(self):
fields_501 = record_get_field_instances(self.record, )
for field in fields_501:
new_subs = []
for key, value in field[0]:
if key == :
new_subs.append((, value))
elif key == :
new_subs.append((, value))
elif key == :
new_subs.append((, value))
else:
new_subs.append((key, value))
record_delete_field(self.record, tag="502",
field_position_global=field[4])
record_add_field(self.record, "502", subfields=new_subs) | 501 degree info - move subfields. |
19,052 | def get(self, name):
if not self.loaded:
raise RegistryNotLoaded(self)
if not self._registry.get(name):
raise NotificationNotRegistered(
f"Notification not registered. Got ."
)
return self._registry.get(name) | Returns a Notification by name. |
19,053 | def ungrist (value):
assert is_iterable_typed(value, basestring) or isinstance(value, basestring)
def ungrist_one (value):
stripped = __re_grist_content.match (value)
if not stripped:
raise BaseException ("in ungrist: is not of the form <.*>" % value)
return stripped.group (1)
if isinstance (value, str):
return ungrist_one (value)
else:
return [ ungrist_one (v) for v in value ] | Returns the value without grist.
If value is a sequence, does it for every value and returns the result as a sequence. |
19,054 | def _relative_attention_inner(x, y, z, transpose):
batch_size = tf.shape(x)[0]
heads = x.get_shape().as_list()[1]
length = tf.shape(x)[2]
xy_matmul = tf.matmul(x, y, transpose_b=transpose)
x_t = tf.transpose(x, [2, 0, 1, 3])
x_t_r = tf.reshape(x_t, [length, heads * batch_size, -1])
x_tz_matmul = tf.matmul(x_t_r, z, transpose_b=transpose)
x_tz_matmul_r = tf.reshape(x_tz_matmul, [length, batch_size, heads, -1])
x_tz_matmul_r_t = tf.transpose(x_tz_matmul_r, [1, 2, 0, 3])
return xy_matmul + x_tz_matmul_r_t | Relative position-aware dot-product attention inner calculation.
This batches matrix multiply calculations to avoid unnecessary broadcasting.
Args:
x: Tensor with shape [batch_size, heads, length or 1, length or depth].
y: Tensor with shape [batch_size, heads, length or 1, depth].
z: Tensor with shape [length or 1, length, depth].
transpose: Whether to transpose inner matrices of y and z. Should be true if
last dimension of x is depth, not length.
Returns:
A Tensor with shape [batch_size, heads, length, length or depth]. |
19,055 | def _download_initial_config(self):
_initial_config = self._download_running_config()
self._last_working_config = _initial_config
self._config_history.append(_initial_config)
self._config_history.append(_initial_config) | Loads the initial config. |
19,056 | def receive_request(self, transaction):
with transaction:
transaction.separate_timer = self._start_separate_timer(transaction)
self._blockLayer.receive_request(transaction)
if transaction.block_transfer:
self._stop_separate_timer(transaction.separate_timer)
self._messageLayer.send_response(transaction)
self.send_datagram(transaction.response)
return
self._observeLayer.receive_request(transaction)
self._requestLayer.receive_request(transaction)
if transaction.resource is not None and transaction.resource.changed:
self.notify(transaction.resource)
transaction.resource.changed = False
elif transaction.resource is not None and transaction.resource.deleted:
self.notify(transaction.resource)
transaction.resource.deleted = False
self._observeLayer.send_response(transaction)
self._blockLayer.send_response(transaction)
self._stop_separate_timer(transaction.separate_timer)
self._messageLayer.send_response(transaction)
if transaction.response is not None:
if transaction.response.type == defines.Types["CON"]:
self._start_retransmission(transaction, transaction.response)
self.send_datagram(transaction.response) | Handle requests coming from the udp socket.
:param transaction: the transaction created to manage the request |
19,057 | def get_or_create_namespace(self, url: str) -> Union[Namespace, Dict]:
result = self.get_namespace_by_url(url)
if result is not None:
return result
t = time.time()
bel_resource = get_bel_resource(url)
_clean_bel_namespace_values(bel_resource)
values = bel_resource[]
if not_resource_cachable(bel_resource):
log.debug(, url, len(values), time.time() - t)
log.debug(, url, len(values))
return values
namespace_insert_values = _get_namespace_insert_values(bel_resource)
namespace = Namespace(
url=url,
**namespace_insert_values
)
namespace.entries = [
NamespaceEntry(name=name, encoding=encoding)
for name, encoding in values.items()
]
log.info(, url, len(values), time.time() - t)
self.session.add(namespace)
self.session.commit()
return namespace | Insert the namespace file at the given location to the cache.
If not cachable, returns the dict of the values of this namespace.
:raises: pybel.resources.exc.ResourceError |
19,058 | def quotation_markers(self, value):
if value is not None:
assert type(value) in (tuple, list), " attribute: type is not or !".format(
"quotation_markers", value)
for element in value:
assert type(element) is unicode, " attribute: type is not !".format(
"quotation_markers", element)
assert len(element) == 1, " attribute: has multiples characters!".format("quotation_markers",
element)
assert not re.search(r"\w", element), " attribute: is an alphanumeric character!".format(
"quotation_markers", element)
self.__quotation_markers = value | Setter for **self.__quotation_markers** attribute.
:param value: Attribute value.
:type value: tuple or list |
19,059 | def set_meta_rdf(self, rdf, fmt=):
evt = self._client._request_entity_meta_set(self.__lid, rdf, fmt=fmt)
self._client._wait_and_except_if_failed(evt) | Set the metadata for this Thing in RDF fmt
Advanced users who want to manipulate the RDF for this Thing directly without the
[ThingMeta](ThingMeta.m.html#IoticAgent.IOT.ThingMeta.ThingMeta) helper object
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`fmt` (optional) (string) The format of RDF you have sent.
Valid formats are: "xml", "n3", "turtle" |
19,060 | def object_ref(self):
return ImmutableDict(type=self.type, category_id=self.category_id, event_id=self.event_id,
session_id=self.session_id, contrib_id=self.contrib_id, subcontrib_id=self.subcontrib_id) | Return the reference of the changed object. |
19,061 | def indices2one_hot(indices, nb_classes):
if nb_classes < 1:
raise ValueError(
.format(nb_classes))
one_hot = []
for index in indices:
one_hot.append([0] * nb_classes)
one_hot[-1][index] = 1
return one_hot | Convert an iterable of indices to one-hot encoded list.
You might also be interested in sklearn.preprocessing.OneHotEncoder
Parameters
----------
indices : iterable
iterable of indices
nb_classes : int
Number of classes
dtype : type
Returns
-------
one_hot : list
Examples
--------
>>> indices2one_hot([0, 1, 1], 3)
[[1, 0, 0], [0, 1, 0], [0, 1, 0]]
>>> indices2one_hot([0, 1, 1], 2)
[[1, 0], [0, 1], [0, 1]] |
19,062 | def _on_apply_button_clicked(self, *args):
refresh_required = self.core_config_model.apply_preliminary_config()
refresh_required |= self.gui_config_model.apply_preliminary_config()
if not self.gui_config_model.config.get_config_value("SESSION_RESTORE_ENABLED"):
import rafcon.gui.backup.session as backup_session
logger.info("Removing current session")
backup_session.reset_session()
if refresh_required:
from rafcon.gui.singleton import main_window_controller
main_window_controller.get_controller().on_refresh_all_activate(None, None)
self._popup_message() | Apply button clicked: Apply the configuration |
19,063 | def d3logpdf_dlink3(self, link_f, y, Y_metadata=None):
d3lik_dlink3 = -special.polygamma(2, self.beta*link_f)*(self.beta**3)
return d3lik_dlink3 | Third order derivative log-likelihood function at y given link(f) w.r.t link(f)
.. math::
\\frac{d^{3} \\ln p(y_{i}|\lambda(f_{i}))}{d^{3}\\lambda(f)} = -\\beta^{3}\\frac{d^{2}\\Psi(\\alpha_{i})}{d\\alpha_{i}}\\\\
\\alpha_{i} = \\beta y_{i}
:param link_f: latent variables link(f)
:type link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in gamma distribution
:returns: third derivative of likelihood evaluated at points f
:rtype: Nx1 array |
19,064 | def fit(self, choosers, alternatives, current_choice):
logger.debug(.format(self.name))
if not isinstance(current_choice, pd.Series):
current_choice = choosers[current_choice]
choosers, alternatives = self.apply_fit_filters(choosers, alternatives)
if self.estimation_sample_size:
choosers = choosers.loc[np.random.choice(
choosers.index,
min(self.estimation_sample_size, len(choosers)),
replace=False)]
current_choice = current_choice.loc[choosers.index]
_, merged, chosen = interaction.mnl_interaction_dataset(
choosers, alternatives, self.sample_size, current_choice)
model_design = dmatrix(
self.str_model_expression, data=merged, return_type=)
if len(merged) != model_design.as_matrix().shape[0]:
raise ModelEvaluationError(
)
self.log_likelihoods, self.fit_parameters = mnl.mnl_estimate(
model_design.as_matrix(), chosen, self.sample_size)
self.fit_parameters.index = model_design.columns
logger.debug(.format(self.name))
return self.log_likelihoods | Fit and save model parameters based on given data.
Parameters
----------
choosers : pandas.DataFrame
Table describing the agents making choices, e.g. households.
alternatives : pandas.DataFrame
Table describing the things from which agents are choosing,
e.g. buildings.
current_choice : pandas.Series or any
A Series describing the `alternatives` currently chosen
by the `choosers`. Should have an index matching `choosers`
and values matching the index of `alternatives`.
If a non-Series is given it should be a column in `choosers`.
Returns
-------
log_likelihoods : dict
Dict of log-liklihood values describing the quality of the
model fit. Will have keys 'null', 'convergence', and 'ratio'. |
19,065 | def validate_json(data, validator):
errors = []
for err in sorted(validator.iter_errors(data), key=lambda e: e.path):
if err.path:
err_path = "[{}]".format(" -> ".join([str(p) for p in err.path]))
else:
err_path = ""
msg = err.message + " " + err_path
if err.context:
suberrs = [" - {}".format(suberr.message) for suberr in err.context]
msg += ":\n{}".format("".join(suberrs))
errors.append(msg)
return errors | Validate data against a given JSON schema (see https://json-schema.org).
data: JSON-serializable data to validate.
validator (jsonschema.DraftXValidator): The validator.
RETURNS (list): A list of error messages, if available. |
19,066 | def match(self, s):
if self.re.match(s):
self.list.append(s)
return True
else: return False | Matching the pattern to the input string, returns True/False and
saves the matched string in the internal list |
19,067 | def minimize(bed_file):
if not bed_file:
return bed_file
else:
sorted_bed = bt.BedTool(bed_file).cut(range(3)).sort()
if not sorted_bed.fn.endswith(".bed"):
return sorted_bed.moveto(sorted_bed.fn + ".bed")
else:
return sorted_bed | strip a BED file down to its three necessary columns: chrom start end |
19,068 | def _GetAccessToken(self):
d = {
: self._GenerateAssertion(),
: ,
}
try:
body = parse.urlencode(d)
except AttributeError:
body = urllib.urlencode(d)
req = urllib_request.Request(RpcHelper.TOKEN_ENDPOINT)
req.add_header(, )
binary_body = body.encode()
raw_response = urllib_request.urlopen(req, binary_body)
return simplejson.loads(raw_response.read())[] | Gets oauth2 access token for Gitkit API using service account.
Returns:
string, oauth2 access token. |
19,069 | def available_state(self, state: State) -> Tuple[State, ...]:
result = []
for gene in self.genes:
result.extend(self.available_state_for_gene(gene, state))
if len(result) > 1 and state in result:
result.remove(state)
return tuple(result) | Return the state reachable from a given state. |
19,070 | def validate(self, document, schema=None, update=False, normalize=True):
self.update = update
self._unrequired_by_excludes = set()
self.__init_processing(document, schema)
if normalize:
self.__normalize_mapping(self.document, self.schema)
for field in self.document:
if self.ignore_none_values and self.document[field] is None:
continue
definitions = self.schema.get(field)
if definitions is not None:
self.__validate_definitions(definitions, field)
else:
self.__validate_unknown_fields(field)
if not self.update:
self.__validate_required_fields(self.document)
self.error_handler.end(self)
return not bool(self._errors) | Normalizes and validates a mapping against a validation-schema of
defined rules.
:param document: The document to normalize.
:type document: any :term:`mapping`
:param schema: The validation schema. Defaults to :obj:`None`. If not
provided here, the schema must have been provided at
class instantiation.
:type schema: any :term:`mapping`
:param update: If ``True``, required fields won't be checked.
:type update: :class:`bool`
:param normalize: If ``True``, normalize the document before validation.
:type normalize: :class:`bool`
:return: ``True`` if validation succeeds, otherwise ``False``. Check
the :func:`errors` property for a list of processing errors.
:rtype: :class:`bool` |
19,071 | def transition_matrix_reversible_pisym(C, return_statdist=False, **kwargs):
r
T_nonrev = transition_matrix_non_reversible(C)
from msmtools.analysis import stationary_distribution
pi = stationary_distribution(T_nonrev)
X = scipy.sparse.diags(pi).dot(T_nonrev)
X = X.T + X
pi_rev = np.array(X.sum(axis=1)).squeeze()
T_rev = scipy.sparse.diags(1.0/pi_rev).dot(X)
if return_statdist:
return T_rev, pi
return T_rev | r"""
Estimates reversible transition matrix as follows:
..:math:
p_{ij} = c_{ij} / c_i where c_i = sum_j c_{ij}
\pi_j = \sum_j \pi_i p_{ij}
x_{ij} = \pi_i p_{ij} + \pi_j p_{ji}
p^{rev}_{ij} = x_{ij} / x_i where x_i = sum_j x_{ij}
In words: takes the nonreversible transition matrix estimate, uses its
stationary distribution to compute an equilibrium correlation matrix,
symmetrizes that correlation matrix and then normalizes to the reversible
transition matrix estimate.
Parameters
----------
C: ndarray, shape (n,n)
count matrix
Returns
-------
T: Estimated transition matrix |
19,072 | def slice_begin(self, tensor_shape, pnum):
tensor_layout = self.tensor_layout(tensor_shape)
coordinates = pnum_to_processor_coordinates(self.shape, pnum)
ret = []
for dim_size, mesh_axis in zip(
tensor_shape.to_integer_list, tensor_layout.tensor_axis_to_mesh_axis):
if mesh_axis is None:
ret.append(0)
else:
ret.append(
dim_size // self.shape[mesh_axis].size * coordinates[mesh_axis])
return ret | Begin position for the tensor slice for the given processor.
Args:
tensor_shape: Shape.
pnum: int <= self.size.
Returns:
list of integers with length tensor_shape.ndims. |
19,073 | def get_sorted_hdrgo2usrgos(self, hdrgos, flat_list=None, hdrgo_prt=True, hdrgo_sort=True):
sorted_hdrgos_usrgos = []
h2u_get = self.grprobj.hdrgo2usrgos.get
hdr_go2nt = self._get_go2nt(hdrgos)
if hdrgo_sort is True:
hdr_go2nt = sorted(hdr_go2nt.items(), key=lambda t: self.hdrgo_sortby(t[1]))
for hdrgo_id, hdrgo_nt in hdr_go2nt:
if flat_list is not None:
if hdrgo_prt or hdrgo_id in self.grprobj.usrgos:
flat_list.append(hdrgo_nt)
usrgos_unsorted = h2u_get(hdrgo_id)
if usrgos_unsorted:
usrgo2nt = self._get_go2nt(usrgos_unsorted)
usrgont_sorted = sorted(usrgo2nt.items(), key=lambda t: self.usrgo_sortby(t[1]))
usrgos_sorted, usrnts_sorted = zip(*usrgont_sorted)
if flat_list is not None:
flat_list.extend(usrnts_sorted)
sorted_hdrgos_usrgos.append((hdrgo_id, usrgos_sorted))
else:
sorted_hdrgos_usrgos.append((hdrgo_id, []))
return cx.OrderedDict(sorted_hdrgos_usrgos) | Return GO IDs sorting using go2nt's namedtuple. |
19,074 | def cmd_ping(ip, interface, count, timeout, wait, verbose):
if interface:
conf.iface = interface
conf.verb = False
conf.L3socket=L3RawSocket
layer3 = IP()
layer3.dst = ip
layer3.tos = 0
layer3.id = 1
layer3.flags = 0
layer3.frag = 0
layer3.ttl = 64
layer3.proto = 1
layer4 = ICMP()
layer4.type = 8
layer4.code = 0
layer4.id = 0
layer4.seq = 0
pkt = layer3 / layer4
counter = 0
while True:
ans = sr1(pkt, timeout=timeout)
if ans:
if verbose:
ans.show()
else:
print(ans.summary())
del(ans)
else:
print()
counter += 1
if count != 0 and counter == count:
break
sleep(wait)
return True | The classic ping tool that send ICMP echo requests.
\b
# habu.ping 8.8.8.8
IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding
IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding
IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding
IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding |
19,075 | def _add_explicit_includes(lines, dependencies=None, extralinks=None):
from fortpy import config
import sys
from os import path
includes = sys.modules["config"].includes
linklibs = False
if extralinks is not None and len(extralinks) > 0:
for i, link in enumerate(extralinks):
lines.append("LBD{0:d} = {1}".format(i, link))
lines.append("")
if len(includes) > 0:
lines.append("LIBS\t\t= \\")
for library in includes:
addlib = False
if "modules" in library:
for libmod in library["modules"]:
if dependencies is None or libmod.lower() in dependencies:
addlib = True
break
else:
addlib = True
if addlib:
linklibs = True
lines.append("\t\t{} \\".format(library["path"]))
if extralinks is not None:
for i in range(len(extralinks)):
if path.isfile(extralinks[i]):
lines.append("\t\t$(LBD{0:d}) \\".format(i))
return linklibs or (extralinks is not None and len(extralinks) > 0) | Adds any relevant libraries that need to be explicitly included according
to the fortpy configuration file. Libraries are appended to the specified
collection of lines. Returns true if relevant libraries were added. |
19,076 | def parse_error(self, tup_tree):
self.check_node(tup_tree, , (,), (,),
(,))
instance_list = self.list_of_various(tup_tree, (,))
return (name(tup_tree), attrs(tup_tree), instance_list) | Parse the tuple for an ERROR element:
::
<!ELEMENT ERROR (INSTANCE*)>
<!ATTLIST ERROR
CODE CDATA #REQUIRED
DESCRIPTION CDATA #IMPLIED> |
19,077 | def extrude(
self,
input_entity,
translation_axis=None,
rotation_axis=None,
point_on_axis=None,
angle=None,
num_layers=None,
recombine=False,
):
self._EXTRUDE_ID += 1
if _is_string(input_entity):
entity = Dummy(input_entity)
elif isinstance(input_entity, PointBase):
entity = Dummy("Point{{{}}}".format(input_entity.id))
elif isinstance(input_entity, SurfaceBase):
entity = Dummy("Surface{{{}}}".format(input_entity.id))
elif hasattr(input_entity, "surface"):
entity = Dummy("Surface{{{}}}".format(input_entity.surface.id))
else:
assert isinstance(input_entity, LineBase), "Illegal extrude entity."
entity = Dummy("Line{{{}}}".format(input_entity.id))
extrusion_string = ""
name = "ex{}".format(self._EXTRUDE_ID)
if translation_axis is not None:
if rotation_axis is not None:
extrusion_string += "{}[] = Extrude{{{{{}}}, {{{}}}, {{{}}}, {}}}{{{};".format(
name,
",".join(repr(x) for x in translation_axis),
",".join(repr(x) for x in rotation_axis),
",".join(repr(x) for x in point_on_axis),
angle,
entity.id,
)
else:
extrusion_string += "{}[] = Extrude {{{}}} {{{};".format(
name, ",".join(repr(x) for x in translation_axis), entity.id
)
else:
assert (
rotation_axis is not None
), "Specify at least translation or rotation."
extrusion_string += "{}[] = Extrude{{{{{}}}, {{{}}}, {}}}{{{};".format(
name,
",".join(repr(x) for x in rotation_axis),
",".join(repr(x) for x in point_on_axis),
angle,
entity.id,
)
if num_layers is not None:
extrusion_string += " Layers{{{}}}; {}".format(
num_layers, "Recombine;" if recombine else ""
)
extrusion_string += "};"
self._GMSH_CODE.append(extrusion_string)
top = "{}[0]".format(name)
extruded = "{}[1]".format(name)
if isinstance(input_entity, LineBase):
top = LineBase(top)
extruded = SurfaceBase(extruded, 4)
elif isinstance(input_entity, SurfaceBase):
top = SurfaceBase(top, input_entity.num_edges)
extruded = VolumeBase(extruded)
elif isinstance(input_entity, PointBase):
top = PointBase(top)
extruded = LineBase(extruded)
else:
top = Dummy(top)
extruded = Dummy(extruded)
lat = []
if isinstance(input_entity, SurfaceBase):
lat = [
SurfaceBase("{}[{}]".format(name, i + 2), 4)
for i in range(input_entity.num_edges)
]
return top, extruded, lat | Extrusion (translation + rotation) of any entity along a given
translation_axis, around a given rotation_axis, about a given angle. If
one of the entities is not provided, this method will produce only
translation or rotation. |
19,078 | def setup_address(self, name, address=default, transact={}):
owner = self.setup_owner(name, transact=transact)
self._assert_control(owner, name)
if is_none_or_zero_address(address):
address = None
elif address is default:
address = owner
elif is_binary_address(address):
address = to_checksum_address(address)
elif not is_checksum_address(address):
raise ValueError("You must supply the address in checksum format")
if self.address(name) == address:
return None
if address is None:
address = EMPTY_ADDR_HEX
transact[] = owner
resolver = self._set_resolver(name, transact=transact)
return resolver.functions.setAddr(raw_name_to_hash(name), address).transact(transact) | Set up the name to point to the supplied address.
The sender of the transaction must own the name, or
its parent name.
Example: If the caller owns ``parentname.eth`` with no subdomains
and calls this method with ``sub.parentname.eth``,
then ``sub`` will be created as part of this call.
:param str name: ENS name to set up
:param str address: name will point to this address, in checksum format. If ``None``,
erase the record. If not specified, name will point to the owner's address.
:param dict transact: the transaction configuration, like in
:meth:`~web3.eth.Eth.sendTransaction`
:raises InvalidName: if ``name`` has invalid syntax
:raises UnauthorizedError: if ``'from'`` in `transact` does not own `name` |
19,079 | def create_model(
model_name: str,
*,
__config__: Type[BaseConfig] = None,
__base__: Type[BaseModel] = None,
__module__: Optional[str] = None,
__validators__: Dict[str, classmethod] = None,
**field_definitions: Any,
) -> BaseModel:
if __base__:
if __config__ is not None:
raise ConfigError()
else:
__base__ = BaseModel
fields = {}
annotations = {}
for f_name, f_def in field_definitions.items():
if f_name.startswith():
warnings.warn(f, RuntimeWarning)
if isinstance(f_def, tuple):
try:
f_annotation, f_value = f_def
except ValueError as e:
raise ConfigError(
f
f
f
) from e
else:
f_annotation, f_value = None, f_def
if f_annotation:
annotations[f_name] = f_annotation
fields[f_name] = f_value
namespace: = {: annotations, : __module__}
if __validators__:
namespace.update(__validators__)
namespace.update(fields)
if __config__:
namespace[] = inherit_config(__config__, BaseConfig)
return type(model_name, (__base__,), namespace) | Dynamically create a model.
:param model_name: name of the created model
:param __config__: config class to use for the new model
:param __base__: base class for the new model to inherit from
:param __validators__: a dict of method names and @validator class methods
:param **field_definitions: fields of the model (or extra fields if a base is supplied) in the format
`<name>=(<type>, <default default>)` or `<name>=<default value> eg. `foobar=(str, ...)` or `foobar=123` |
19,080 | def send_response(self, transaction):
host, port = transaction.request.source
key_token = hash(str(host) + str(port) + str(transaction.request.token))
if key_token in self._relations:
if transaction.response.code == defines.Codes.CONTENT.number:
if transaction.resource is not None and transaction.resource.observable:
transaction.response.observe = transaction.resource.observe_count
self._relations[key_token].allowed = True
self._relations[key_token].transaction = transaction
self._relations[key_token].timestamp = time.time()
else:
del self._relations[key_token]
elif transaction.response.code >= defines.Codes.ERROR_LOWER_BOUND:
del self._relations[key_token]
return transaction | Finalize to add the client to the list of observer.
:type transaction: Transaction
:param transaction: the transaction that owns the response
:return: the transaction unmodified |
19,081 | def get(self, alias: str):
try:
return self._caches[alias]
except KeyError:
pass
config = self.get_alias_config(alias)
cache = _create_cache(**deepcopy(config))
self._caches[alias] = cache
return cache | Retrieve cache identified by alias. Will return always the same instance
If the cache was not instantiated yet, it will do it lazily the first time
this is called.
:param alias: str cache alias
:return: cache instance |
19,082 | def get_field_def(
schema: GraphQLSchema, parent_type: GraphQLObjectType, field_name: str
) -> GraphQLField:
if field_name == "__schema" and schema.query_type == parent_type:
return SchemaMetaFieldDef
elif field_name == "__type" and schema.query_type == parent_type:
return TypeMetaFieldDef
elif field_name == "__typename":
return TypeNameMetaFieldDef
return parent_type.fields.get(field_name) | Get field definition.
This method looks up the field on the given type definition. It has special casing
for the two introspection fields, `__schema` and `__typename`. `__typename` is
special because it can always be queried as a field, even in situations where no
other fields are allowed, like on a Union. `__schema` could get automatically
added to the query type, but that would require mutating type definitions, which
would cause issues. |
19,083 | def intersect_3d(p1, p2):
v = p2 - p1
normed_v = unit_vector(v)
nx = normed_v[:, 0]
ny = normed_v[:, 1]
nz = normed_v[:, 2]
xx = np.sum(nx**2 - 1)
yy = np.sum(ny**2 - 1)
zz = np.sum(nz**2 - 1)
xy = np.sum(nx * ny)
xz = np.sum(nx * nz)
yz = np.sum(ny * nz)
M = np.array([(xx, xy, xz), (xy, yy, yz), (xz, yz, zz)])
x = np.sum(
p1[:, 0] * (nx**2 - 1) + p1[:, 1] * (nx * ny) + p1[:, 2] * (nx * nz)
)
y = np.sum(
p1[:, 0] * (nx * ny) + p1[:, 1] * (ny * ny - 1) + p1[:, 2] * (ny * nz)
)
z = np.sum(
p1[:, 0] * (nx * nz) + p1[:, 1] * (ny * nz) + p1[:, 2] * (nz**2 - 1)
)
return np.linalg.lstsq(M, np.array((x, y, z)), rcond=None)[0] | Find the closes point for a given set of lines in 3D.
Parameters
----------
p1 : (M, N) array_like
Starting points
p2 : (M, N) array_like
End points.
Returns
-------
x : (N,) ndarray
Least-squares solution - the closest point of the intersections.
Raises
------
numpy.linalg.LinAlgError
If computation does not converge. |
19,084 | def pred_eq(self, n, val):
v1 = n.value
v2 = val
if hasattr(val, ):
v2 = val.value
if isinstance(v1, int) and not isinstance(v2, int):
return v1 == int(v2)
return v1 == v2 | Test if a node set with setint or setstr equal a certain value
example::
R = [
__scope__:n
['a' #setint(n, 12) | 'b' #setint(n, 14)]
C
[#eq(n, 12) D]
] |
19,085 | def from_body(cls, body):
request = cls()
request.channel = body[1]
request.seq = body[2]
request.cemi = body[4:]
return request | Create a tunnelling request from a given body of a KNX/IP frame. |
19,086 | def insert_code(filename, code, save=True, marker=):
content =
found = False
for line in open(filename, ):
if not found and line.strip() == marker:
found = True
cnt = line.find(marker)
content += line[:cnt] + code
content += line
if not found:
if not content.endswith():
content +=
content += code + marker +
if save:
with open(filename, ) as f:
f.write(content)
return content | Auto append code |
19,087 | def validate(identifier):
source = actions.validate_source(identifier)
log.info(, source.slug, str(source.id)) | Validate a source given its identifier |
19,088 | def get_repo(self, repo: str, branch: str, *,
depth: Optional[int]=1,
reference: Optional[Path]=None
) -> Repo:
git_repo, _ = self.get_files(repo, branch, depth=depth, reference=reference)
return git_repo | Returns a :class:`Repo <git.repo.base.Repo>` instance for the branch.
See :meth:`run` for arguments descriptions. |
19,089 | def sleep(seconds=0):
loop = evergreen.current.loop
current = Fiber.current()
assert loop.task is not current
timer = loop.call_later(seconds, current.switch)
try:
loop.switch()
finally:
timer.cancel() | Yield control to another eligible coroutine until at least *seconds* have
elapsed.
*seconds* may be specified as an integer, or a float if fractional seconds
are desired. |
19,090 | def stopped(name=None,
containers=None,
shutdown_timeout=None,
unpause=False,
error_on_absent=True,
**kwargs):
s configured ``stop_timeout`` will be observed. If
``stop_timeout`` was also unset on the container, then a timeout of 10
seconds will be used.
unpause : False
Set to ``True`` to unpause any paused containers before stopping. If
unset, then an error will be raised for any container that was paused.
error_on_absent : True
By default, this state will return an error if any of the specified
containers are absent. Set this to ``False`` to suppress that error.
namechangesresultcommentcommentOne of \ and \ must be providedcommentcontainers must be a listdocker.stateabsentabsentThe following container(s) are absent: {0}, absentpausedresultThe following container(s) are paused: {0}, pausedresultcomment. runningpausedresultcommentContainer \ is commentAll specified containers are absentcommentabsent or commentnot runningtestresultcommentThe following container(s) will be stopped: {0}, unpausetimeoutdocker.stopresultchangescommentcommentFailed to stop container \comment; resultcommentThe following container(s) were stopped: {0}, '.join(to_stop))
)
return ret | Ensure that a container (or containers) is stopped
name
Name or ID of the container
containers
Run this state on more than one container at a time. The following two
examples accomplish the same thing:
.. code-block:: yaml
stopped_containers:
docker_container.stopped:
- names:
- foo
- bar
- baz
.. code-block:: yaml
stopped_containers:
docker_container.stopped:
- containers:
- foo
- bar
- baz
However, the second example will be a bit quicker since Salt will stop
all specified containers in a single run, rather than executing the
state separately on each image (as it would in the first example).
shutdown_timeout
Timeout for graceful shutdown of the container. If this timeout is
exceeded, the container will be killed. If this value is not passed,
then the container's configured ``stop_timeout`` will be observed. If
``stop_timeout`` was also unset on the container, then a timeout of 10
seconds will be used.
unpause : False
Set to ``True`` to unpause any paused containers before stopping. If
unset, then an error will be raised for any container that was paused.
error_on_absent : True
By default, this state will return an error if any of the specified
containers are absent. Set this to ``False`` to suppress that error. |
19,091 | def from_start_and_end(cls, start, end, sequence, phos_3_prime=False):
strand1 = NucleicAcidStrand.from_start_and_end(
start, end, sequence, phos_3_prime=phos_3_prime)
duplex = cls(strand1)
return duplex | Creates a DNA duplex from a start and end point.
Parameters
----------
start: [float, float, float]
Start of the build axis.
end: [float, float, float]
End of build axis.
sequence: str
Nucleotide sequence.
phos_3_prime: bool, optional
If false the 5' and the 3' phosphor will be omitted. |
19,092 | def deserialize(self, buffer=bytes(), index=Index(), **options):
self.index = index
self._value = self.unpack(buffer, index, **options)
return self.index_field(index) | De-serializes the `Field` from the byte *buffer* starting at
the begin of the *buffer* or with the given *index* by unpacking the
bytes to the :attr:`value` of the `Field` in accordance with the
decoding *byte order* for the de-serialization and the decoding
:attr:`byte_order` of the `Field`.
The specific decoding :attr:`byte_order` of the `Field` overrules the
decoding *byte order* for the de-serialization.
Returns the :class:`Index` of the *buffer* after the `Field`.
Optional the de-serialization of the referenced :attr:`~Pointer.data`
object of a :class:`Pointer` field can be enabled.
:param bytes buffer: byte stream.
:param Index index: current read :class:`Index` within the *buffer*.
:keyword byte_order: decoding byte order for the de-serialization.
:type byte_order: :class:`Byteorder`, :class:`str`
:keyword bool nested: if ``True`` a :class:`Pointer` field de-serialize
its referenced :attr:`~Pointer.data` object as well
(chained method call).
Each :class:`Pointer` field uses for the de-serialization of its
referenced :attr:`~Pointer.data` object its own
:attr:`~Pointer.bytestream`. |
19,093 | def get_encoded_word(value):
ew = EncodedWord()
if not value.startswith():
raise errors.HeaderParseError(
"expected encoded word but found {}".format(value))
_3to2list1 = list(value[2:].split(, 1))
tok, remainder, = _3to2list1[:1] + [_3to2list1[1:]]
if tok == value[2:]:
raise errors.HeaderParseError(
"expected encoded word but found {}".format(value))
remstr = .join(remainder)
if remstr[:2].isdigit():
_3to2list3 = list(remstr.split(, 1))
rest, remainder, = _3to2list3[:1] + [_3to2list3[1:]]
tok = tok + + rest
if len(tok.split()) > 1:
ew.defects.append(errors.InvalidHeaderDefect(
"whitespace inside encoded word"))
ew.cte = value
value = .join(remainder)
try:
text, charset, lang, defects = _ew.decode( + tok + )
except ValueError:
raise errors.HeaderParseError(
"encoded word format invalid: ".format(ew.cte))
ew.charset = charset
ew.lang = lang
ew.defects.extend(defects)
while text:
if text[0] in WSP:
token, text = get_fws(text)
ew.append(token)
continue
_3to2list5 = list(_wsp_splitter(text, 1))
chars, remainder, = _3to2list5[:1] + [_3to2list5[1:]]
vtext = ValueTerminal(chars, )
_validate_xtext(vtext)
ew.append(vtext)
text = .join(remainder)
return ew, value | encoded-word = "=?" charset "?" encoding "?" encoded-text "?=" |
19,094 | def _find_errors_param(self):
if hasattr(self.estimator, ):
return self.estimator.mse_path_.mean(1)
if hasattr(self.estimator, ):
return self.estimator.cv_values_.mean(0)
raise YellowbrickValueError(
"could not find errors param on {} estimator".format(
self.estimator.__class__.__name__
)
) | Searches for the parameter on the estimator that contains the array of
errors that was used to determine the optimal alpha. If it cannot find
the parameter then a YellowbrickValueError is raised. |
19,095 | def filter_on_wire_representation(ava, acs, required=None, optional=None):
acsdic = dict([(ac.name_format, ac) for ac in acs])
if required is None:
required = []
if optional is None:
optional = []
res = {}
for attr, val in ava.items():
done = False
for req in required:
try:
_name = acsdic[req.name_format]._to[attr]
if _name == req.name:
res[attr] = val
done = True
except KeyError:
pass
if done:
continue
for opt in optional:
try:
_name = acsdic[opt.name_format]._to[attr]
if _name == opt.name:
res[attr] = val
break
except KeyError:
pass
return res | :param ava: A dictionary with attributes and values
:param acs: List of tuples (Attribute Converter name,
Attribute Converter instance)
:param required: A list of saml.Attributes
:param optional: A list of saml.Attributes
:return: Dictionary of expected/wanted attributes and values |
19,096 | def load_backend(build_configuration, backend_package):
backend_module = backend_package +
try:
module = importlib.import_module(backend_module)
except ImportError as e:
traceback.print_exc()
raise BackendConfigurationError(
.format(backend=backend_module, error=e))
def invoke_entrypoint(name):
entrypoint = getattr(module, name, lambda: None)
try:
return entrypoint()
except TypeError as e:
traceback.print_exc()
raise BackendConfigurationError(
.format(entrypoint=name, backend=backend_module, error=e))
build_file_aliases = invoke_entrypoint()
if build_file_aliases:
build_configuration.register_aliases(build_file_aliases)
subsystems = invoke_entrypoint()
if subsystems:
build_configuration.register_optionables(subsystems)
rules = invoke_entrypoint()
if rules:
build_configuration.register_rules(rules)
invoke_entrypoint() | Installs the given backend package into the build configuration.
:param build_configuration the :class:``pants.build_graph.build_configuration.BuildConfiguration`` to
install the backend plugin into.
:param string backend_package: the package name containing the backend plugin register module that
provides the plugin entrypoints.
:raises: :class:``pants.base.exceptions.BuildConfigurationError`` if there is a problem loading
the build configuration. |
19,097 | def _new_convolution(self, use_bias):
def clean_dict(input_dict):
if input_dict and not use_bias:
cleaned_dict = input_dict.copy()
cleaned_dict.pop("b", None)
return cleaned_dict
return input_dict
return self._conv_class(
output_channels=4*self._output_channels,
kernel_shape=self._kernel_shape,
stride=self._stride,
rate=self._rate,
padding=self._padding,
use_bias=use_bias,
initializers=clean_dict(self._initializers),
partitioners=clean_dict(self._partitioners),
regularizers=clean_dict(self._regularizers),
name="conv") | Returns new convolution.
Args:
use_bias: Use bias in convolutions. If False, clean_dict removes bias
entries from initializers, partitioners and regularizers passed to
the constructor of the convolution. |
19,098 | def _generate_union_tag_vars_funcs(self, union):
for field in union.all_fields:
if not is_void_type(field.data_type):
enum_field_name = fmt_enum_name(field.name, union)
with self.block_func(
func=fmt_camel(field.name),
args=[],
return_type=fmt_type(field.data_type)):
with self.block(
.format(
fmt_camel_upper(field.name)),
delim=(, )):
error_msg = .format(
enum_field_name)
throw_exc = (
)
self.emit(throw_exc.format(error_msg))
self.emit(.format(fmt_var(field.name)))
self.emit() | Emits the getter methods for retrieving tag-specific state. Setters throw
an error in the event an associated tag state variable is accessed without
the correct tag state. |
19,099 | def get_url(request, application, roles, label=None):
args = []
if label is not None:
args.append(label)
args=[application.secret_token, application.state] + args)
return url | Retrieve a link that will work for the current user. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.