code
stringlengths 75
104k
| docstring
stringlengths 1
46.9k
|
---|---|
def val_to_edge(edges, x):
"""Convert axis coordinate to bin index."""
edges = np.array(edges)
w = edges[1:] - edges[:-1]
w = np.insert(w, 0, w[0])
ibin = np.digitize(np.array(x, ndmin=1), edges - 0.5 * w) - 1
ibin[ibin < 0] = 0
return ibin | Convert axis coordinate to bin index. |
def propose_live(self):
"""Return a live point/axes to be used by other sampling methods."""
# Copy a random live point.
i = self.rstate.randint(self.nlive)
u = self.live_u[i, :]
# Check for ellipsoid overlap.
ell_idxs = self.mell.within(u)
nidx = len(ell_idxs)
# Automatically trigger an update if we're not in any ellipsoid.
if nidx == 0:
try:
# Expected ln(prior volume) at a given iteration.
expected_vol = math.exp(self.saved_logvol[-1] - self.dlv)
except:
# Expected ln(prior volume) at the first iteration.
expected_vol = math.exp(-self.dlv)
pointvol = expected_vol / self.nlive # minimum point volume
# Update the bounding ellipsoids.
bound = self.update(pointvol)
if self.save_bounds:
self.bound.append(bound)
self.nbound += 1
self.since_update = 0
# Check for ellipsoid overlap (again).
ell_idxs = self.mell.within(u)
nidx = len(ell_idxs)
# Pick a random ellipsoid that encompasses `u`.
ell_idx = ell_idxs[self.rstate.randint(nidx)]
# Choose axes.
if self.sampling in ['rwalk', 'rstagger', 'rslice']:
ax = self.mell.ells[ell_idx].axes
elif self.sampling == 'slice':
ax = self.mell.ells[ell_idx].paxes
else:
ax = np.identity(self.npdim)
return u, ax | Return a live point/axes to be used by other sampling methods. |
def createNoiseExperimentArgs():
"""Run the probability of false negatives with noise experiment."""
experimentArguments = []
n = 6000
for a in [128]:
noisePct = 0.75
while noisePct <= 0.85:
noise = int(round(noisePct*a,0))
# Some parameter combinations are just not worth running!
experimentArguments.append(
("./sdr_calculations2",
"results_noise_10m/temp_"+str(n)+"_"+str(a)+"_"+str(noise)+"_30.csv",
"200000", str(n), str(a), str(noise))
)
noisePct += 0.05
return experimentArguments | Run the probability of false negatives with noise experiment. |
def grey_erosion(image, radius=None, mask=None, footprint=None):
'''Perform a grey erosion with masking'''
if footprint is None:
if radius is None:
footprint = np.ones((3,3),bool)
radius = 1
else:
footprint = strel_disk(radius)==1
else:
radius = max(1, np.max(np.array(footprint.shape) // 2))
iradius = int(np.ceil(radius))
#
# Do a grey_erosion with masked pixels = 1 so they don't participate
#
big_image = np.ones(np.array(image.shape)+iradius*2)
big_image[iradius:-iradius,iradius:-iradius] = image
if not mask is None:
not_mask = np.logical_not(mask)
big_image[iradius:-iradius,iradius:-iradius][not_mask] = 1
processed_image = scind.grey_erosion(big_image, footprint=footprint)
final_image = processed_image[iradius:-iradius,iradius:-iradius]
if not mask is None:
final_image[not_mask] = image[not_mask]
return final_image | Perform a grey erosion with masking |
def _dispatch_call_args(cls=None, bound_call=None, unbound_call=None,
attr='_call'):
"""Check the arguments of ``_call()`` or similar for conformity.
The ``_call()`` method of `Operator` is allowed to have the
following signatures:
Python 2 and 3:
- ``_call(self, x)``
- ``_call(self, vec, out)``
- ``_call(self, x, out=None)``
Python 3 only:
- ``_call(self, x, *, out=None)`` (``out`` as keyword-only
argument)
For disambiguation, the instance name (the first argument) **must**
be 'self'.
The name of the ``out`` argument **must** be 'out', the second
argument may have any name.
Additional variable ``**kwargs`` and keyword-only arguments
(Python 3 only) are also allowed.
Not allowed:
- ``_call(self)`` -- No arguments except instance:
- ``_call(x)`` -- 'self' missing, i.e. ``@staticmethod``
- ``_call(cls, x)`` -- 'self' missing, i.e. ``@classmethod``
- ``_call(self, out, x)`` -- ``out`` as second argument
- ``_call(self, *x)`` -- Variable arguments
- ``_call(self, x, y, out=None)`` -- more positional arguments
- ``_call(self, x, out=False)`` -- default other than None for
``out``
In particular, static or class methods are not allowed.
Parameters
----------
cls : `class`, optional
The ``_call()`` method of this class is checked. If omitted,
provide ``unbound_call`` instead to check directly.
bound_call : callable, optional
Check this bound method instead of ``cls``
unbound_call : callable, optional
Check this unbound function instead of ``cls``
attr : string, optional
Check this attribute instead of ``_call``, e.g. ``__call__``
Returns
-------
has_out : bool
Whether the call has an ``out`` argument
out_is_optional : bool
Whether the ``out`` argument is optional
spec : `inspect.ArgSpec` or `inspect.FullArgSpec`
Argument specification of the checked call function
Raises
------
ValueError
if the signature of the function is malformed
"""
py3 = (sys.version_info.major > 2)
specs = ['_call(self, x[, **kwargs])',
'_call(self, x, out[, **kwargs])',
'_call(self, x, out=None[, **kwargs])']
if py3:
specs += ['_call(self, x, *, out=None[, **kwargs])']
spec_msg = "\nPossible signatures are ('[, **kwargs]' means optional):\n\n"
spec_msg += '\n'.join(specs)
spec_msg += '\n\nStatic or class methods are not allowed.'
if sum(arg is not None for arg in (cls, bound_call, unbound_call)) != 1:
raise ValueError('exactly one object to check must be given')
if cls is not None:
# Get the actual implementation, including ancestors
for parent in cls.mro():
call = parent.__dict__.get(attr, None)
if call is not None:
break
# Static and class methods are not allowed
if isinstance(call, staticmethod):
raise TypeError("'{}.{}' is a static method. "
"".format(cls.__name__, attr) + spec_msg)
elif isinstance(call, classmethod):
raise TypeError("'{}.{}' is a class method. "
"".format(cls.__name__, attr) + spec_msg)
elif bound_call is not None:
call = bound_call
if not inspect.ismethod(call):
raise TypeError('{} is not a bound method'.format(call))
else:
call = unbound_call
if py3:
# support kw-only args and annotations
spec = inspect.getfullargspec(call)
kw_only = spec.kwonlyargs
kw_only_defaults = spec.kwonlydefaults
else:
spec = inspect.getargspec(call)
kw_only = ()
kw_only_defaults = {}
signature = _function_signature(call)
pos_args = spec.args
if unbound_call is not None:
# Add 'self' to positional arg list to satisfy the checker
pos_args.insert(0, 'self')
pos_defaults = spec.defaults
varargs = spec.varargs
# Variable args are not allowed
if varargs is not None:
raise ValueError("bad signature '{}': variable arguments not allowed"
"".format(signature) + spec_msg)
if len(pos_args) not in (2, 3):
raise ValueError("bad signature '{}'".format(signature) + spec_msg)
true_pos_args = pos_args[1:]
if len(true_pos_args) == 1: # 'out' kw-only
if 'out' in true_pos_args: # 'out' positional and 'x' kw-only -> no
raise ValueError("bad signature '{}': `out` cannot be the only "
"positional argument"
"".format(signature) + spec_msg)
else:
if 'out' not in kw_only:
has_out = out_optional = False
elif kw_only_defaults['out'] is not None:
raise ValueError(
"bad signature '{}': `out` can only default to "
"`None`, got '{}'"
"".format(signature, kw_only_defaults['out']) +
spec_msg)
else:
has_out = True
out_optional = True
elif len(true_pos_args) == 2: # Both args positional
if true_pos_args[0] == 'out': # out must come second
py3_txt = ' or keyword-only. ' if py3 else '. '
raise ValueError("bad signature '{}': `out` can only be the "
"second positional argument".format(signature) +
py3_txt + spec_msg)
elif true_pos_args[1] != 'out': # 'out' must be 'out'
raise ValueError("bad signature '{}': output parameter must "
"be called 'out', got '{}'"
"".format(signature, true_pos_args[1]) +
spec_msg)
else:
has_out = True
out_optional = bool(pos_defaults)
if pos_defaults and pos_defaults[-1] is not None:
raise ValueError("bad signature '{}': `out` can only "
"default to `None`, got '{}'"
"".format(signature, pos_defaults[-1]) +
spec_msg)
else: # Too many positional args
raise ValueError("bad signature '{}': too many positional arguments"
" ".format(signature) + spec_msg)
return has_out, out_optional, spec | Check the arguments of ``_call()`` or similar for conformity.
The ``_call()`` method of `Operator` is allowed to have the
following signatures:
Python 2 and 3:
- ``_call(self, x)``
- ``_call(self, vec, out)``
- ``_call(self, x, out=None)``
Python 3 only:
- ``_call(self, x, *, out=None)`` (``out`` as keyword-only
argument)
For disambiguation, the instance name (the first argument) **must**
be 'self'.
The name of the ``out`` argument **must** be 'out', the second
argument may have any name.
Additional variable ``**kwargs`` and keyword-only arguments
(Python 3 only) are also allowed.
Not allowed:
- ``_call(self)`` -- No arguments except instance:
- ``_call(x)`` -- 'self' missing, i.e. ``@staticmethod``
- ``_call(cls, x)`` -- 'self' missing, i.e. ``@classmethod``
- ``_call(self, out, x)`` -- ``out`` as second argument
- ``_call(self, *x)`` -- Variable arguments
- ``_call(self, x, y, out=None)`` -- more positional arguments
- ``_call(self, x, out=False)`` -- default other than None for
``out``
In particular, static or class methods are not allowed.
Parameters
----------
cls : `class`, optional
The ``_call()`` method of this class is checked. If omitted,
provide ``unbound_call`` instead to check directly.
bound_call : callable, optional
Check this bound method instead of ``cls``
unbound_call : callable, optional
Check this unbound function instead of ``cls``
attr : string, optional
Check this attribute instead of ``_call``, e.g. ``__call__``
Returns
-------
has_out : bool
Whether the call has an ``out`` argument
out_is_optional : bool
Whether the ``out`` argument is optional
spec : `inspect.ArgSpec` or `inspect.FullArgSpec`
Argument specification of the checked call function
Raises
------
ValueError
if the signature of the function is malformed |
def is_value_type_valid_for_exact_conditions(self, value):
""" Method to validate if the value is valid for exact match type evaluation.
Args:
value: Value to validate.
Returns:
Boolean: True if value is a string, boolean, or number. Otherwise False.
"""
# No need to check for bool since bool is a subclass of int
if isinstance(value, string_types) or isinstance(value, (numbers.Integral, float)):
return True
return False | Method to validate if the value is valid for exact match type evaluation.
Args:
value: Value to validate.
Returns:
Boolean: True if value is a string, boolean, or number. Otherwise False. |
def unhandled(self, key):
"""Handle other keyboard actions not handled by the ListBox widget.
"""
self.key = key
self.size = self.tui.get_cols_rows()
if self.search is True:
if self.enter is False and self.no_matches is False:
if len(key) == 1 and key.isprintable():
self.search_string += key
self._search()
elif self.enter is True and not self.search_string:
self.search = False
self.enter = False
return
if not self.urls and key not in "Qq":
return # No other actions are useful with no URLs
if self.help_menu is False:
try:
self.keys[key]()
except KeyError:
pass | Handle other keyboard actions not handled by the ListBox widget. |
def setInstrumentParameters(self, instrpars):
""" This method overrides the superclass to set default values into
the parameter dictionary, in case empty entries are provided.
"""
pri_header = self._image[0].header
self.proc_unit = instrpars['proc_unit']
if self._isNotValid (instrpars['gain'], instrpars['gnkeyword']):
instrpars['gnkeyword'] = 'ADCGAIN' #gain has been hardcoded below
if self._isNotValid (instrpars['rdnoise'], instrpars['rnkeyword']):
instrpars['rnkeyword'] = None
if self._isNotValid (instrpars['exptime'], instrpars['expkeyword']):
instrpars['expkeyword'] = 'EXPTIME'
for chip in self.returnAllChips(extname=self.scienceExt):
chip._gain= 5.4 #measured gain
chip._rdnoise = self.getInstrParameter(
instrpars['rdnoise'], pri_header, instrpars['rnkeyword']
)
chip._exptime = self.getInstrParameter(
instrpars['exptime'], pri_header, instrpars['expkeyword']
)
if chip._gain is None or self._exptime is None:
print('ERROR: invalid instrument task parameter')
raise ValueError
# We need to treat Read Noise as a special case since it is
# not populated in the NICMOS primary header
if chip._rdnoise is None:
chip._rdnoise = self._getDefaultReadnoise()
chip._darkrate=self._getDarkRate()
chip.darkcurrent = self.getdarkcurrent()
chip._effGain = chip._gain
# this is used in the static mask, static mask name also defined
# here, must be done after outputNames
self._assignSignature(chip._chip)
# Convert the science data to electrons if specified by the user.
self.doUnitConversions() | This method overrides the superclass to set default values into
the parameter dictionary, in case empty entries are provided. |
def matrix2lha(M):
"""Inverse function to lha2matrix: return a LHA-like list given a tensor."""
l = []
ind = np.indices(M.shape).reshape(M.ndim, M.size).T
for i in ind:
l.append([j+1 for j in i] + [M[tuple(i)]])
return l | Inverse function to lha2matrix: return a LHA-like list given a tensor. |
def enable_mfa_device(self, user_name, serial_number,
auth_code_1, auth_code_2):
"""
Enables the specified MFA device and associates it with the
specified user.
:type user_name: string
:param user_name: The username of the user
:type serial_number: string
:param seriasl_number: The serial number which uniquely identifies
the MFA device.
:type auth_code_1: string
:param auth_code_1: An authentication code emitted by the device.
:type auth_code_2: string
:param auth_code_2: A subsequent authentication code emitted
by the device.
"""
params = {'UserName' : user_name,
'SerialNumber' : serial_number,
'AuthenticationCode1' : auth_code_1,
'AuthenticationCode2' : auth_code_2}
return self.get_response('EnableMFADevice', params) | Enables the specified MFA device and associates it with the
specified user.
:type user_name: string
:param user_name: The username of the user
:type serial_number: string
:param seriasl_number: The serial number which uniquely identifies
the MFA device.
:type auth_code_1: string
:param auth_code_1: An authentication code emitted by the device.
:type auth_code_2: string
:param auth_code_2: A subsequent authentication code emitted
by the device. |
def receipt(df):
"""
Return a dataframe to verify if a item has a receipt.
"""
mutated_df = df[['IdPRONAC', 'idPlanilhaItem']].astype(str)
mutated_df['pronac_planilha_itens'] = (
f"{mutated_df['IdPRONAC']}/{mutated_df['idPlanilhaItem']}"
)
return (
mutated_df
.set_index(['pronac_planilha_itens'])
) | Return a dataframe to verify if a item has a receipt. |
def control_surface_encode(self, target, idSurface, mControl, bControl):
'''
Control for surface; pending and order to origin.
target : The system setting the commands (uint8_t)
idSurface : ID control surface send 0: throttle 1: aileron 2: elevator 3: rudder (uint8_t)
mControl : Pending (float)
bControl : Order to origin (float)
'''
return MAVLink_control_surface_message(target, idSurface, mControl, bControl) | Control for surface; pending and order to origin.
target : The system setting the commands (uint8_t)
idSurface : ID control surface send 0: throttle 1: aileron 2: elevator 3: rudder (uint8_t)
mControl : Pending (float)
bControl : Order to origin (float) |
def lock(self, timeout=10):
"""
Advisory lock.
Use to ensure that only one LocalSyncClient is working on the Target at the same time.
"""
logger.debug("Locking %s", self.lock_file)
if not os.path.exists(self.lock_file):
self.ensure_path(self.lock_file)
with open(self.lock_file, "w"):
os.utime(self.lock_file)
self._lock.acquire(timeout=timeout) | Advisory lock.
Use to ensure that only one LocalSyncClient is working on the Target at the same time. |
def start_with(self, x):
"""Returns all arguments beginning with given string (or list thereof)"""
_args = []
for arg in self.all:
if is_collection(x):
for _x in x:
if arg.startswith(x):
_args.append(arg)
break
else:
if arg.startswith(x):
_args.append(arg)
return Args(_args, no_argv=True) | Returns all arguments beginning with given string (or list thereof) |
def start_transports(self):
"""start thread transports."""
self.transport = Transport(
self.queue, self.batch_size, self.batch_interval,
self.session_factory)
thread = threading.Thread(target=self.transport.loop)
self.threads.append(thread)
thread.daemon = True
thread.start() | start thread transports. |
def scopus_url(self):
"""URL to the abstract page on Scopus."""
scopus_url = self.coredata.find('link[@rel="scopus"]', ns)
try:
return scopus_url.get('href')
except AttributeError: # scopus_url is None
return None | URL to the abstract page on Scopus. |
def known(self, words: List[str]) -> List[str]:
"""
Return a list of given words that found in the spelling dictionary
:param str words: A list of words to check if they are in the spelling dictionary
"""
return list(w for w in words if w in self.__WORDS) | Return a list of given words that found in the spelling dictionary
:param str words: A list of words to check if they are in the spelling dictionary |
def makeCloneMap(columnsShape, outputCloningWidth, outputCloningHeight=-1):
"""Make a two-dimensional clone map mapping columns to clone master.
This makes a map that is (numColumnsHigh, numColumnsWide) big that can
be used to figure out which clone master to use for each column. Here are
a few sample calls
>>> makeCloneMap(columnsShape=(10, 6), outputCloningWidth=4)
(array([[ 0, 1, 2, 3, 0, 1],
[ 4, 5, 6, 7, 4, 5],
[ 8, 9, 10, 11, 8, 9],
[12, 13, 14, 15, 12, 13],
[ 0, 1, 2, 3, 0, 1],
[ 4, 5, 6, 7, 4, 5],
[ 8, 9, 10, 11, 8, 9],
[12, 13, 14, 15, 12, 13],
[ 0, 1, 2, 3, 0, 1],
[ 4, 5, 6, 7, 4, 5]], dtype=uint32), 16)
>>> makeCloneMap(columnsShape=(7, 8), outputCloningWidth=3)
(array([[0, 1, 2, 0, 1, 2, 0, 1],
[3, 4, 5, 3, 4, 5, 3, 4],
[6, 7, 8, 6, 7, 8, 6, 7],
[0, 1, 2, 0, 1, 2, 0, 1],
[3, 4, 5, 3, 4, 5, 3, 4],
[6, 7, 8, 6, 7, 8, 6, 7],
[0, 1, 2, 0, 1, 2, 0, 1]], dtype=uint32), 9)
>>> makeCloneMap(columnsShape=(7, 11), outputCloningWidth=5)
(array([[ 0, 1, 2, 3, 4, 0, 1, 2, 3, 4, 0],
[ 5, 6, 7, 8, 9, 5, 6, 7, 8, 9, 5],
[10, 11, 12, 13, 14, 10, 11, 12, 13, 14, 10],
[15, 16, 17, 18, 19, 15, 16, 17, 18, 19, 15],
[20, 21, 22, 23, 24, 20, 21, 22, 23, 24, 20],
[ 0, 1, 2, 3, 4, 0, 1, 2, 3, 4, 0],
[ 5, 6, 7, 8, 9, 5, 6, 7, 8, 9, 5]], dtype=uint32), 25)
>>> makeCloneMap(columnsShape=(7, 8), outputCloningWidth=3, outputCloningHeight=4)
(array([[ 0, 1, 2, 0, 1, 2, 0, 1],
[ 3, 4, 5, 3, 4, 5, 3, 4],
[ 6, 7, 8, 6, 7, 8, 6, 7],
[ 9, 10, 11, 9, 10, 11, 9, 10],
[ 0, 1, 2, 0, 1, 2, 0, 1],
[ 3, 4, 5, 3, 4, 5, 3, 4],
[ 6, 7, 8, 6, 7, 8, 6, 7]], dtype=uint32), 12)
The basic idea with this map is that, if you imagine things stretching off
to infinity, every instance of a given clone master is seeing the exact
same thing in all directions. That includes:
- All neighbors must be the same
- The "meaning" of the input to each of the instances of the same clone
master must be the same. If input is pixels and we have translation
invariance--this is easy. At higher levels where input is the output
of lower levels, this can be much harder.
- The "meaning" of the inputs to neighbors of a clone master must be the
same for each instance of the same clone master.
The best way to think of this might be in terms of 'inputCloningWidth' and
'outputCloningWidth'.
- The 'outputCloningWidth' is the number of columns you'd have to move
horizontally (or vertically) before you get back to the same the same
clone that you started with. MUST BE INTEGRAL!
- The 'inputCloningWidth' is the 'outputCloningWidth' of the node below us.
If we're getting input from an sensor where every element just represents
a shift of every other element, this is 1.
At a conceptual level, it means that if two different inputs are shown
to the node and the only difference between them is that one is shifted
horizontally (or vertically) by this many pixels, it means we are looking
at the exact same real world input, but shifted by some number of pixels
(doesn't have to be 1). MUST BE INTEGRAL!
At level 1, I think you could have this:
* inputCloningWidth = 1
* sqrt(coincToInputRatio^2) = 2.5
* outputCloningWidth = 5
...in this case, you'd end up with 25 masters.
Let's think about this case:
input: - - - 0 1 2 3 4 5 - - - - -
columns: 0 1 2 3 4 0 1 2 3 4 0 1 2 3 4 0 1 2 3 4
...in other words, input 0 is fed to both column 0 and column 1. Input 1
is fed to columns 2, 3, and 4, etc. Hopefully, you can see that you'll
get the exact same output (except shifted) with:
input: - - - - - 0 1 2 3 4 5 - - -
columns: 0 1 2 3 4 0 1 2 3 4 0 1 2 3 4 0 1 2 3 4
...in other words, we've shifted the input 2 spaces and the output shifted
5 spaces.
*** The outputCloningWidth MUST ALWAYS be an integral multiple of the ***
*** inputCloningWidth in order for all of our rules to apply. ***
*** NOTE: inputCloningWidth isn't passed here, so it's the caller's ***
*** responsibility to ensure that this is true. ***
*** The outputCloningWidth MUST ALWAYS be an integral multiple of ***
*** sqrt(coincToInputRatio^2), too. ***
@param columnsShape The shape (height, width) of the columns.
@param outputCloningWidth See docstring above.
@param outputCloningHeight If non-negative, can be used to make
rectangular (instead of square) cloning fields.
@return cloneMap An array (numColumnsHigh, numColumnsWide) that
contains the clone index to use for each
column.
@return numDistinctClones The number of distinct clones in the map. This
is just outputCloningWidth*outputCloningHeight.
"""
if outputCloningHeight < 0:
outputCloningHeight = outputCloningWidth
columnsHeight, columnsWidth = columnsShape
numDistinctMasters = outputCloningWidth * outputCloningHeight
a = numpy.empty((columnsHeight, columnsWidth), 'uint32')
for row in xrange(columnsHeight):
for col in xrange(columnsWidth):
a[row, col] = (col % outputCloningWidth) + \
(row % outputCloningHeight) * outputCloningWidth
return a, numDistinctMasters | Make a two-dimensional clone map mapping columns to clone master.
This makes a map that is (numColumnsHigh, numColumnsWide) big that can
be used to figure out which clone master to use for each column. Here are
a few sample calls
>>> makeCloneMap(columnsShape=(10, 6), outputCloningWidth=4)
(array([[ 0, 1, 2, 3, 0, 1],
[ 4, 5, 6, 7, 4, 5],
[ 8, 9, 10, 11, 8, 9],
[12, 13, 14, 15, 12, 13],
[ 0, 1, 2, 3, 0, 1],
[ 4, 5, 6, 7, 4, 5],
[ 8, 9, 10, 11, 8, 9],
[12, 13, 14, 15, 12, 13],
[ 0, 1, 2, 3, 0, 1],
[ 4, 5, 6, 7, 4, 5]], dtype=uint32), 16)
>>> makeCloneMap(columnsShape=(7, 8), outputCloningWidth=3)
(array([[0, 1, 2, 0, 1, 2, 0, 1],
[3, 4, 5, 3, 4, 5, 3, 4],
[6, 7, 8, 6, 7, 8, 6, 7],
[0, 1, 2, 0, 1, 2, 0, 1],
[3, 4, 5, 3, 4, 5, 3, 4],
[6, 7, 8, 6, 7, 8, 6, 7],
[0, 1, 2, 0, 1, 2, 0, 1]], dtype=uint32), 9)
>>> makeCloneMap(columnsShape=(7, 11), outputCloningWidth=5)
(array([[ 0, 1, 2, 3, 4, 0, 1, 2, 3, 4, 0],
[ 5, 6, 7, 8, 9, 5, 6, 7, 8, 9, 5],
[10, 11, 12, 13, 14, 10, 11, 12, 13, 14, 10],
[15, 16, 17, 18, 19, 15, 16, 17, 18, 19, 15],
[20, 21, 22, 23, 24, 20, 21, 22, 23, 24, 20],
[ 0, 1, 2, 3, 4, 0, 1, 2, 3, 4, 0],
[ 5, 6, 7, 8, 9, 5, 6, 7, 8, 9, 5]], dtype=uint32), 25)
>>> makeCloneMap(columnsShape=(7, 8), outputCloningWidth=3, outputCloningHeight=4)
(array([[ 0, 1, 2, 0, 1, 2, 0, 1],
[ 3, 4, 5, 3, 4, 5, 3, 4],
[ 6, 7, 8, 6, 7, 8, 6, 7],
[ 9, 10, 11, 9, 10, 11, 9, 10],
[ 0, 1, 2, 0, 1, 2, 0, 1],
[ 3, 4, 5, 3, 4, 5, 3, 4],
[ 6, 7, 8, 6, 7, 8, 6, 7]], dtype=uint32), 12)
The basic idea with this map is that, if you imagine things stretching off
to infinity, every instance of a given clone master is seeing the exact
same thing in all directions. That includes:
- All neighbors must be the same
- The "meaning" of the input to each of the instances of the same clone
master must be the same. If input is pixels and we have translation
invariance--this is easy. At higher levels where input is the output
of lower levels, this can be much harder.
- The "meaning" of the inputs to neighbors of a clone master must be the
same for each instance of the same clone master.
The best way to think of this might be in terms of 'inputCloningWidth' and
'outputCloningWidth'.
- The 'outputCloningWidth' is the number of columns you'd have to move
horizontally (or vertically) before you get back to the same the same
clone that you started with. MUST BE INTEGRAL!
- The 'inputCloningWidth' is the 'outputCloningWidth' of the node below us.
If we're getting input from an sensor where every element just represents
a shift of every other element, this is 1.
At a conceptual level, it means that if two different inputs are shown
to the node and the only difference between them is that one is shifted
horizontally (or vertically) by this many pixels, it means we are looking
at the exact same real world input, but shifted by some number of pixels
(doesn't have to be 1). MUST BE INTEGRAL!
At level 1, I think you could have this:
* inputCloningWidth = 1
* sqrt(coincToInputRatio^2) = 2.5
* outputCloningWidth = 5
...in this case, you'd end up with 25 masters.
Let's think about this case:
input: - - - 0 1 2 3 4 5 - - - - -
columns: 0 1 2 3 4 0 1 2 3 4 0 1 2 3 4 0 1 2 3 4
...in other words, input 0 is fed to both column 0 and column 1. Input 1
is fed to columns 2, 3, and 4, etc. Hopefully, you can see that you'll
get the exact same output (except shifted) with:
input: - - - - - 0 1 2 3 4 5 - - -
columns: 0 1 2 3 4 0 1 2 3 4 0 1 2 3 4 0 1 2 3 4
...in other words, we've shifted the input 2 spaces and the output shifted
5 spaces.
*** The outputCloningWidth MUST ALWAYS be an integral multiple of the ***
*** inputCloningWidth in order for all of our rules to apply. ***
*** NOTE: inputCloningWidth isn't passed here, so it's the caller's ***
*** responsibility to ensure that this is true. ***
*** The outputCloningWidth MUST ALWAYS be an integral multiple of ***
*** sqrt(coincToInputRatio^2), too. ***
@param columnsShape The shape (height, width) of the columns.
@param outputCloningWidth See docstring above.
@param outputCloningHeight If non-negative, can be used to make
rectangular (instead of square) cloning fields.
@return cloneMap An array (numColumnsHigh, numColumnsWide) that
contains the clone index to use for each
column.
@return numDistinctClones The number of distinct clones in the map. This
is just outputCloningWidth*outputCloningHeight. |
def deserialize_header_auth(stream, algorithm, verifier=None):
"""Deserializes a MessageHeaderAuthentication object from a source stream.
:param stream: Source data stream
:type stream: io.BytesIO
:param algorithm: The AlgorithmSuite object type contained in the header
:type algorith: aws_encryption_sdk.identifiers.AlgorithmSuite
:param verifier: Signature verifier object (optional)
:type verifier: aws_encryption_sdk.internal.crypto.Verifier
:returns: Deserialized MessageHeaderAuthentication object
:rtype: aws_encryption_sdk.internal.structures.MessageHeaderAuthentication
"""
_LOGGER.debug("Starting header auth deserialization")
format_string = ">{iv_len}s{tag_len}s".format(iv_len=algorithm.iv_len, tag_len=algorithm.tag_len)
return MessageHeaderAuthentication(*unpack_values(format_string, stream, verifier)) | Deserializes a MessageHeaderAuthentication object from a source stream.
:param stream: Source data stream
:type stream: io.BytesIO
:param algorithm: The AlgorithmSuite object type contained in the header
:type algorith: aws_encryption_sdk.identifiers.AlgorithmSuite
:param verifier: Signature verifier object (optional)
:type verifier: aws_encryption_sdk.internal.crypto.Verifier
:returns: Deserialized MessageHeaderAuthentication object
:rtype: aws_encryption_sdk.internal.structures.MessageHeaderAuthentication |
def stats(request, server_name):
"""
Show server statistics.
"""
server_name = server_name.strip('/')
data = _context_data({
'title': _('Memcache Statistics for %s') % server_name,
'cache_stats': _get_cache_stats(server_name),
},
request)
return render_to_response('memcache_admin/stats.html', data, RequestContext(request)) | Show server statistics. |
def _short_string_handler_factory():
"""Generates the short string (double quoted) handler."""
def before(c, ctx, is_field_name, is_clob):
assert not (is_clob and is_field_name)
is_string = not is_clob and not is_field_name
if is_string:
ctx.set_ion_type(IonType.STRING)
val = ctx.value
if is_field_name:
assert not val
ctx.set_pending_symbol()
val = ctx.pending_symbol
return val, is_string
def on_close(ctx):
ctx.set_self_delimiting(True)
return ctx.event_transition(IonEvent, IonEventType.SCALAR, ctx.ion_type, ctx.value.as_text())
def after(c, ctx, is_field_name):
ctx.set_quoted_text(False).set_self_delimiting(True)
return ctx.immediate_transition(
ctx.whence if is_field_name else _clob_end_handler(c, ctx),
)
return _quoted_text_handler_factory(_DOUBLE_QUOTE, lambda c: c == _DOUBLE_QUOTE, before, after, append_first=False,
on_close=on_close) | Generates the short string (double quoted) handler. |
def populate(self,
size,
names_library=None,
reuse_names=False,
random_branches=False,
branch_range=(0, 1),
support_range=(0, 1)):
"""
Generates a random topology by populating current node.
:argument None names_library: If provided, names library
(list, set, dict, etc.) will be used to name nodes.
:argument False reuse_names: If True, node names will not be
necessarily unique, which makes the process a bit more
efficient.
:argument False random_branches: If True, branch distances and support
values will be randomized.
:argument (0,1) branch_range: If random_branches is True, this
range of values will be used to generate random distances.
:argument (0,1) support_range: If random_branches is True,
this range of values will be used to generate random branch
support values.
"""
NewNode = self.__class__
if len(self.children) > 1:
connector = NewNode()
for ch in self.get_children():
ch.detach()
connector.add_child(child = ch)
root = NewNode()
self.add_child(child = connector)
self.add_child(child = root)
else:
root = self
next_deq = deque([root])
for i in range(size-1):
if random.randint(0, 1):
p = next_deq.pop()
else:
p = next_deq.popleft()
c1 = p.add_child()
c2 = p.add_child()
next_deq.extend([c1, c2])
if random_branches:
c1.dist = random.uniform(*branch_range)
c2.dist = random.uniform(*branch_range)
c1.support = random.uniform(*branch_range)
c2.support = random.uniform(*branch_range)
else:
c1.dist = 1.0
c2.dist = 1.0
c1.support = 1.0
c2.support = 1.0
# next contains leaf nodes
charset = "abcdefghijklmnopqrstuvwxyz"
if names_library:
names_library = deque(names_library)
else:
avail_names = itertools.combinations_with_replacement(charset, 10)
for n in next_deq:
if names_library:
if reuse_names:
tname = random.sample(names_library, 1)[0]
else:
tname = names_library.pop()
else:
tname = ''.join(next(avail_names))
n.name = tname | Generates a random topology by populating current node.
:argument None names_library: If provided, names library
(list, set, dict, etc.) will be used to name nodes.
:argument False reuse_names: If True, node names will not be
necessarily unique, which makes the process a bit more
efficient.
:argument False random_branches: If True, branch distances and support
values will be randomized.
:argument (0,1) branch_range: If random_branches is True, this
range of values will be used to generate random distances.
:argument (0,1) support_range: If random_branches is True,
this range of values will be used to generate random branch
support values. |
def add_translation(self, rna: Rna, protein: Protein) -> str:
"""Add a translation relation from a RNA to a protein.
:param rna: An RNA node
:param protein: A protein node
"""
return self.add_unqualified_edge(rna, protein, TRANSLATED_TO) | Add a translation relation from a RNA to a protein.
:param rna: An RNA node
:param protein: A protein node |
def _create_autostart_entry(autostart_data: AutostartSettings, autostart_file: Path):
"""Create an autostart .desktop file in the autostart directory, if possible."""
try:
source_desktop_file = get_source_desktop_file(autostart_data.desktop_file_name)
except FileNotFoundError:
_logger.exception("Failed to find a usable .desktop file! Unable to find: {}".format(
autostart_data.desktop_file_name))
else:
_logger.debug("Found source desktop file that will be placed into the autostart directory: {}".format(
source_desktop_file))
with open(str(source_desktop_file), "r") as opened_source_desktop_file:
desktop_file_content = opened_source_desktop_file.read()
desktop_file_content = "\n".join(_manage_autostart_desktop_file_launch_flags(
desktop_file_content, autostart_data.switch_show_configure
)) + "\n"
with open(str(autostart_file), "w", encoding="UTF-8") as opened_autostart_file:
opened_autostart_file.write(desktop_file_content)
_logger.debug("Written desktop file: {}".format(autostart_file)) | Create an autostart .desktop file in the autostart directory, if possible. |
def post(self, value, addend, unit):
"""A date adder endpoint."""
value = value or dt.datetime.utcnow()
if unit == "minutes":
delta = dt.timedelta(minutes=addend)
else:
delta = dt.timedelta(days=addend)
result = value + delta
return {"result": result.isoformat()} | A date adder endpoint. |
def delete_publisher_asset(self, publisher_name, asset_type=None):
"""DeletePublisherAsset.
[Preview API] Delete publisher asset like logo
:param str publisher_name: Internal name of the publisher
:param str asset_type: Type of asset. Default value is 'logo'.
"""
route_values = {}
if publisher_name is not None:
route_values['publisherName'] = self._serialize.url('publisher_name', publisher_name, 'str')
query_parameters = {}
if asset_type is not None:
query_parameters['assetType'] = self._serialize.query('asset_type', asset_type, 'str')
self._send(http_method='DELETE',
location_id='21143299-34f9-4c62-8ca8-53da691192f9',
version='5.1-preview.1',
route_values=route_values,
query_parameters=query_parameters) | DeletePublisherAsset.
[Preview API] Delete publisher asset like logo
:param str publisher_name: Internal name of the publisher
:param str asset_type: Type of asset. Default value is 'logo'. |
def does_external_program_run(prog, verbose):
"""Test to see if the external programs can be run."""
try:
with open('/dev/null') as null:
subprocess.call([prog, '-h'], stdout=null, stderr=null)
result = True
except OSError:
if verbose > 1:
print("couldn't run {}".format(prog))
result = False
return result | Test to see if the external programs can be run. |
def merge_data(*data_frames, **kwargs):
"""
Merge DataFrames by column. Number of rows in tables must be the same.
This method can be called both outside and as a DataFrame method.
:param list[DataFrame] data_frames: DataFrames to be merged.
:param bool auto_rename: if True, fields in source DataFrames will be renamed in the output.
:return: merged data frame.
:rtype: DataFrame
:Example:
>>> merged1 = merge_data(df1, df2)
>>> merged2 = df1.merge_with(df2, auto_rename_col=True)
"""
from .specialized import build_merge_expr
from ..utils import ML_ARG_PREFIX
if len(data_frames) <= 1:
raise ValueError('Count of DataFrames should be at least 2.')
norm_data_pairs = []
df_tuple = collections.namedtuple('MergeTuple', 'df cols exclude')
for pair in data_frames:
if isinstance(pair, tuple):
if len(pair) == 2:
df, cols = pair
exclude = False
else:
df, cols, exclude = pair
if isinstance(cols, six.string_types):
cols = cols.split(',')
else:
df, cols, exclude = pair, None, False
norm_data_pairs.append(df_tuple(df, cols, exclude))
auto_rename = kwargs.get('auto_rename', False)
sel_cols_dict = dict((idx, tp.cols) for idx, tp in enumerate(norm_data_pairs) if tp.cols and not tp.exclude)
ex_cols_dict = dict((idx, tp.cols) for idx, tp in enumerate(norm_data_pairs) if tp.cols and tp.exclude)
merge_expr = build_merge_expr(len(norm_data_pairs))
arg_dict = dict(_params={'autoRenameCol': str(auto_rename)},
selected_cols=sel_cols_dict, excluded_cols=ex_cols_dict)
for idx, dp in enumerate(norm_data_pairs):
arg_dict[ML_ARG_PREFIX + 'input%d' % (1 + idx)] = dp.df
out_df = merge_expr(register_expr=True, _exec_id=uuid.uuid4(), _output_name='output', **arg_dict)
out_df._ml_uplink = [dp.df for dp in norm_data_pairs]
out_df._perform_operation(op.MergeFieldsOperation(auto_rename, sel_cols_dict, ex_cols_dict))
out_df._rebuild_df_schema()
return out_df | Merge DataFrames by column. Number of rows in tables must be the same.
This method can be called both outside and as a DataFrame method.
:param list[DataFrame] data_frames: DataFrames to be merged.
:param bool auto_rename: if True, fields in source DataFrames will be renamed in the output.
:return: merged data frame.
:rtype: DataFrame
:Example:
>>> merged1 = merge_data(df1, df2)
>>> merged2 = df1.merge_with(df2, auto_rename_col=True) |
def assign_rates(self, mu=1.0, pi=None, W=None):
"""
Overwrite the GTR model given the provided data
Parameters
----------
mu : float
Substitution rate
W : nxn matrix
Substitution matrix
pi : n vector
Equilibrium frequencies
"""
n = len(self.alphabet)
self.mu = np.copy(mu)
if pi is not None and pi.shape[0]==n:
self.seq_len = pi.shape[-1]
Pi = np.copy(pi)
else:
if pi is not None and len(pi)!=n:
self.logger("length of equilibrium frequency vector does not match alphabet length", 4, warn=True)
self.logger("Ignoring input equilibrium frequencies", 4, warn=True)
Pi = np.ones(shape=(n,self.seq_len))
self.Pi = Pi/np.sum(Pi, axis=0)
if W is None or W.shape!=(n,n):
if (W is not None) and W.shape!=(n,n):
self.logger("Substitution matrix size does not match alphabet size", 4, warn=True)
self.logger("Ignoring input substitution matrix", 4, warn=True)
# flow matrix
W = np.ones((n,n))
else:
W=0.5*(np.copy(W)+np.copy(W).T)
np.fill_diagonal(W,0)
avg_pi = self.Pi.mean(axis=-1)
average_rate = W.dot(avg_pi).dot(avg_pi)
self.W = W/average_rate
self.mu *=average_rate
self._eig() | Overwrite the GTR model given the provided data
Parameters
----------
mu : float
Substitution rate
W : nxn matrix
Substitution matrix
pi : n vector
Equilibrium frequencies |
def _gwf_channel(path, series_class=TimeSeries, verbose=False):
"""Find the right channel name for a LOSC GWF file
"""
channels = list(io_gwf.iter_channel_names(file_path(path)))
if issubclass(series_class, StateVector):
regex = DQMASK_CHANNEL_REGEX
else:
regex = STRAIN_CHANNEL_REGEX
found, = list(filter(regex.match, channels))
if verbose:
print("Using channel {0!r}".format(found))
return found | Find the right channel name for a LOSC GWF file |
def add_marccountry_tag(dom):
"""
Add ``<mods:placeTerm>`` tag with proper content.
"""
marccountry = dom.find("mods:placeTerm", {"authority": "marccountry"})
# don't add again if already defined
if marccountry:
return
marccountry_tag = dhtmlparser.HTMLElement(
"mods:place",
[
dhtmlparser.HTMLElement(
"mods:placeTerm",
{"type": "code", "authority": "marccountry"},
[dhtmlparser.HTMLElement("xr-")]
)
]
)
insert_tag(
marccountry_tag,
dom.match("mods:mods", "mods:originInfo", "mods:place"),
first(dom.find("mods:originInfo"))
) | Add ``<mods:placeTerm>`` tag with proper content. |
def setup_new_conf(self):
# pylint: disable=too-many-branches, too-many-locals
"""Broker custom setup_new_conf method
This function calls the base satellite treatment and manages the configuration needed
for a broker daemon:
- get and configure its pollers, reactionners and receivers relation
- configure the modules
:return: None
"""
# Execute the base class treatment...
super(Broker, self).setup_new_conf()
# ...then our own specific treatment!
with self.conf_lock:
# # self_conf is our own configuration from the alignak environment
# self_conf = self.cur_conf['self_conf']
self.got_initial_broks = False
# Now we create our pollers, reactionners and receivers
for link_type in ['pollers', 'reactionners', 'receivers']:
if link_type not in self.cur_conf['satellites']:
logger.error("No %s in the configuration!", link_type)
continue
my_satellites = getattr(self, link_type, {})
received_satellites = self.cur_conf['satellites'][link_type]
for link_uuid in received_satellites:
rs_conf = received_satellites[link_uuid]
logger.debug("- received %s - %s: %s", rs_conf['instance_id'],
rs_conf['type'], rs_conf['name'])
# Must look if we already had a configuration and save our broks
already_got = rs_conf['instance_id'] in my_satellites
broks = []
actions = {}
wait_homerun = {}
external_commands = {}
running_id = 0
if already_got:
logger.warning("I already got: %s", rs_conf['instance_id'])
# Save some information
running_id = my_satellites[link_uuid].running_id
(broks, actions,
wait_homerun, external_commands) = \
my_satellites[link_uuid].get_and_clear_context()
# Delete the former link
del my_satellites[link_uuid]
# My new satellite link...
new_link = SatelliteLink.get_a_satellite_link(link_type[:-1],
rs_conf)
my_satellites[new_link.uuid] = new_link
logger.info("I got a new %s satellite: %s", link_type[:-1], new_link)
new_link.running_id = running_id
new_link.external_commands = external_commands
new_link.broks = broks
new_link.wait_homerun = wait_homerun
new_link.actions = actions
# Replace satellite address and port by those defined in satellite_map
# todo: check if it is really necessary! Add a unit test for this
# Not sure about this because of the daemons/satellites configuration
# if new_link.name in self_conf.get('satellite_map', {}):
# new_link = dict(new_link) # make a copy
# new_link.update(self_conf.get('satellite_map', {})[new_link.name])
if not self.have_modules:
try:
self.modules = unserialize(self.cur_conf['modules'], no_load=True)
except AlignakClassLookupException as exp: # pragma: no cover, simple protection
logger.error('Cannot un-serialize modules configuration '
'received from arbiter: %s', exp)
if self.modules:
logger.info("I received some modules configuration: %s", self.modules)
self.have_modules = True
# Ok now start, or restart them!
# Set modules, init them and start external ones
self.do_load_modules(self.modules)
# and start external modules too
self.modules_manager.start_external_instances()
else:
logger.info("I do not have modules")
# Initialize connection with my schedulers first
logger.info("Initializing connection with my schedulers:")
my_satellites = self.get_links_of_type(s_type='scheduler')
for satellite in list(my_satellites.values()):
logger.info("- %s/%s", satellite.type, satellite.name)
if not self.daemon_connection_init(satellite):
logger.error("Satellite connection failed: %s", satellite)
# Initialize connection with all our satellites
logger.info("Initializing connection with my satellites:")
for sat_type in ['arbiter', 'reactionner', 'poller', 'receiver']:
my_satellites = self.get_links_of_type(s_type=sat_type)
for satellite in list(my_satellites.values()):
logger.info("- %s/%s", satellite.type, satellite.name)
if not self.daemon_connection_init(satellite):
logger.error("Satellite connection failed: %s", satellite)
# Now I have a configuration!
self.have_conf = True | Broker custom setup_new_conf method
This function calls the base satellite treatment and manages the configuration needed
for a broker daemon:
- get and configure its pollers, reactionners and receivers relation
- configure the modules
:return: None |
def _cleanup_closed(self) -> None:
"""Double confirmation for transport close.
Some broken ssl servers may leave socket open without proper close.
"""
if self._cleanup_closed_handle:
self._cleanup_closed_handle.cancel()
for transport in self._cleanup_closed_transports:
if transport is not None:
transport.abort()
self._cleanup_closed_transports = []
if not self._cleanup_closed_disabled:
self._cleanup_closed_handle = helpers.weakref_handle(
self, '_cleanup_closed',
self._cleanup_closed_period, self._loop) | Double confirmation for transport close.
Some broken ssl servers may leave socket open without proper close. |
def lwp_cookie_str(cookie):
"""Return string representation of Cookie in an the LWP cookie file format.
Actually, the format is extended a bit -- see module docstring.
"""
h = [(cookie.name, cookie.value),
("path", cookie.path),
("domain", cookie.domain)]
if cookie.port is not None: h.append(("port", cookie.port))
if cookie.path_specified: h.append(("path_spec", None))
if cookie.port_specified: h.append(("port_spec", None))
if cookie.domain_initial_dot: h.append(("domain_dot", None))
if cookie.secure: h.append(("secure", None))
if cookie.expires: h.append(("expires",
time2isoz(float(cookie.expires))))
if cookie.discard: h.append(("discard", None))
if cookie.comment: h.append(("comment", cookie.comment))
if cookie.comment_url: h.append(("commenturl", cookie.comment_url))
keys = sorted(cookie._rest.keys())
for k in keys:
h.append((k, str(cookie._rest[k])))
h.append(("version", str(cookie.version)))
return join_header_words([h]) | Return string representation of Cookie in an the LWP cookie file format.
Actually, the format is extended a bit -- see module docstring. |
def get_draft_secret_key():
"""
Return the secret key used to generate draft mode HMACs. It will be
randomly generated on first access. Existing draft URLs can be invalidated
by deleting or updating the ``DRAFT_SECRET_KEY`` setting.
"""
# TODO: Per URL secret keys, so we can invalidate draft URLs for individual
# pages. For example, on publish.
draft_secret_key, created = Text.objects.get_or_create(
name='DRAFT_SECRET_KEY',
defaults=dict(
value=get_random_string(50),
))
return draft_secret_key.value | Return the secret key used to generate draft mode HMACs. It will be
randomly generated on first access. Existing draft URLs can be invalidated
by deleting or updating the ``DRAFT_SECRET_KEY`` setting. |
def on_patch(self, req, resp, handler=None, **kwargs):
"""Respond on POST HTTP request assuming resource creation flow.
This request handler assumes that POST requests are associated with
resource creation. Thus default flow for such requests is:
* Create new resource instances and prepare their representation by
calling its bulk creation method handler.
* Set response status code to ``201 Created``.
**Note:** this handler does not set ``Location`` header by default as
it would be valid only for single resource creation.
Args:
req (falcon.Request): request object instance.
resp (falcon.Response): response object instance to be modified
handler (method): creation method handler to be called. Defaults
to ``self.create``.
**kwargs: additional keyword arguments retrieved from url template.
"""
self.handle(
handler or self.create_bulk, req, resp, **kwargs
)
resp.status = falcon.HTTP_CREATED | Respond on POST HTTP request assuming resource creation flow.
This request handler assumes that POST requests are associated with
resource creation. Thus default flow for such requests is:
* Create new resource instances and prepare their representation by
calling its bulk creation method handler.
* Set response status code to ``201 Created``.
**Note:** this handler does not set ``Location`` header by default as
it would be valid only for single resource creation.
Args:
req (falcon.Request): request object instance.
resp (falcon.Response): response object instance to be modified
handler (method): creation method handler to be called. Defaults
to ``self.create``.
**kwargs: additional keyword arguments retrieved from url template. |
def decrypt(source, dest=None, passphrase=None):
"""Attempts to decrypt a file"""
if not os.path.exists(source):
raise CryptoritoError("Encrypted file %s not found" % source)
cmd = [gnupg_bin(), gnupg_verbose(), "--decrypt", gnupg_home(),
passphrase_file(passphrase)]
if dest:
cmd.append(["--output", dest])
cmd.append([source])
stderr_output(flatten(cmd))
return True | Attempts to decrypt a file |
def _notify_fn(self):
"""The notify thread function."""
self._notifyrunning = True
while self._notifyrunning:
try:
with IHCController._mutex:
# Are there are any new ids to be added?
if self._newnotifyids:
self.client.enable_runtime_notifications(
self._newnotifyids)
self._newnotifyids = []
changes = self.client.wait_for_resource_value_changes()
if changes is False:
self.re_authenticate(True)
continue
for ihcid in changes:
value = changes[ihcid]
if ihcid in self._ihcevents:
for callback in self._ihcevents[ihcid]:
callback(ihcid, value)
except Exception as exp:
self.re_authenticate(True) | The notify thread function. |
def _setsetting(setting, default):
"""Dynamically sets the variable named in `setting`
This method uses `_getsetting()` to either fetch the setting from Django's
settings module, or else fallback to the default value; it then sets a
variable in this module with the returned value.
"""
value = _getsetting(setting, default)
setattr(_self, setting, value) | Dynamically sets the variable named in `setting`
This method uses `_getsetting()` to either fetch the setting from Django's
settings module, or else fallback to the default value; it then sets a
variable in this module with the returned value. |
def get_child_values(parent, names):
""" return a list of values for the specified child fields. If field not in Element then replace with nan. """
vals = []
for name in names:
if parent.hasElement(name):
vals.append(XmlHelper.as_value(parent.getElement(name)))
else:
vals.append(np.nan)
return vals | return a list of values for the specified child fields. If field not in Element then replace with nan. |
def keep(self, diff):
""" Mark this diff (or volume) to be kept in path. """
self._keepVol(diff.toVol)
self._keepVol(diff.fromVol) | Mark this diff (or volume) to be kept in path. |
def authenticate_credentials(self, payload):
"""
Returns an active user that matches the payload's user id and email.
"""
User = get_user_model() # noqa
username = jwt_get_username_from_payload_handler(payload)
if not username:
msg = _('Invalid payload.')
raise exceptions.AuthenticationFailed(msg)
try:
user = User.objects.get(email=username)
except User.DoesNotExist:
msg = _('Invalid signature.')
raise exceptions.AuthenticationFailed(msg)
return user | Returns an active user that matches the payload's user id and email. |
def __load_child_classes(self, ac: AssetClass):
""" Loads child classes/stocks """
# load child classes for ac
db = self.__get_session()
entities = (
db.query(dal.AssetClass)
.filter(dal.AssetClass.parentid == ac.id)
.order_by(dal.AssetClass.sortorder)
.all()
)
# map
for entity in entities:
child_ac = self.__map_entity(entity)
# depth
child_ac.depth = ac.depth + 1
ac.classes.append(child_ac)
# Add to index
self.model.asset_classes.append(child_ac)
self.__load_child_classes(child_ac) | Loads child classes/stocks |
def dump(self, force=False):
"""
Encodes the value using DER
:param force:
If the encoded contents already exist, clear them and regenerate
to ensure they are in DER format instead of BER format
:return:
A byte string of the DER-encoded value
"""
self._contents = self.chosen.dump(force=force)
if self._header is None or force:
self._header = b''
if self.explicit is not None:
for class_, tag in self.explicit:
self._header = _dump_header(class_, 1, tag, self._header + self._contents) + self._header
return self._header + self._contents | Encodes the value using DER
:param force:
If the encoded contents already exist, clear them and regenerate
to ensure they are in DER format instead of BER format
:return:
A byte string of the DER-encoded value |
def main():
"""Run the bot."""
args = parser.parse_args()
initialize_logging(args)
# Allow expansion of paths even if the shell doesn't do it
config_path = os.path.abspath(os.path.expanduser(args.config))
client = kitnirc.client.Client()
controller = kitnirc.modular.Controller(client, config_path)
# Make sure the configuration file is loaded so we can check for
# connection information.
controller.load_config()
def config_or_none(section, value, integer=False, boolean=False):
"""Helper function to get values that might not be set."""
if controller.config.has_option(section, value):
if integer:
return controller.config.getint(section, value)
elif boolean:
return controller.config.getboolean(section, value)
return controller.config.get(section, value)
return None
# If host isn't specified on the command line, try from config file
host = args.host or config_or_none("server", "host")
if not host:
parser.error(
"IRC host must be specified if not in config file.")
# If nick isn't specified on the command line, try from config file
nick = args.nick or config_or_none("server", "nick")
if not nick:
parser.error(
"Nick must be specified if not in config file.")
# KitnIRC's default client will use port 6667 if nothing else is specified,
# but since we want to potentially specify something else, we add that
# fallback here ourselves.
port = args.port or config_or_none("server", "port", integer=True) or 6667
ssl = args.ssl or config_or_none("server", "ssl", boolean=True)
password = args.password or config_or_none("server", "password")
username = args.username or config_or_none("server", "username") or nick
realname = args.realname or config_or_none("server", "realname") or username
controller.start()
client.connect(
nick,
host=host,
port=port,
username=username,
realname=realname,
password=password,
ssl=ssl,
)
try:
client.run()
except KeyboardInterrupt:
client.disconnect() | Run the bot. |
def throttle( self, wait=True ):
"""
If the wait parameter is True, this method returns True after suspending the current
thread as necessary to ensure that no less than the configured minimum interval passed
since the most recent time an invocation of this method returned True in any thread.
If the wait parameter is False, this method immediatly returns True if at least the
configured minimum interval has passed since the most recent time this method returned
True in any thread, or False otherwise.
"""
# I think there is a race in Thread.start(), hence the lock
with self.thread_start_lock:
if not self.thread_started:
self.thread.start( )
self.thread_started = True
return self.semaphore.acquire( blocking=wait ) | If the wait parameter is True, this method returns True after suspending the current
thread as necessary to ensure that no less than the configured minimum interval passed
since the most recent time an invocation of this method returned True in any thread.
If the wait parameter is False, this method immediatly returns True if at least the
configured minimum interval has passed since the most recent time this method returned
True in any thread, or False otherwise. |
def confirm_user_avatar(self, user, cropping_properties):
"""Confirm the temporary avatar image previously uploaded with the specified cropping.
After a successful registry with :py:meth:`create_temp_user_avatar`, use this method to confirm the avatar for
use. The final avatar can be a subarea of the uploaded image, which is customized with the
``cropping_properties``: the return value of :py:meth:`create_temp_user_avatar` should be used for this
argument.
:param user: the user to confirm the avatar for
:type user: str
:param cropping_properties: a dict of cropping properties from :py:meth:`create_temp_user_avatar`
:type cropping_properties: Dict[str,Any]
"""
data = cropping_properties
url = self._get_url('user/avatar')
r = self._session.post(url, params={'username': user},
data=json.dumps(data))
return json_loads(r) | Confirm the temporary avatar image previously uploaded with the specified cropping.
After a successful registry with :py:meth:`create_temp_user_avatar`, use this method to confirm the avatar for
use. The final avatar can be a subarea of the uploaded image, which is customized with the
``cropping_properties``: the return value of :py:meth:`create_temp_user_avatar` should be used for this
argument.
:param user: the user to confirm the avatar for
:type user: str
:param cropping_properties: a dict of cropping properties from :py:meth:`create_temp_user_avatar`
:type cropping_properties: Dict[str,Any] |
def binormalize(A, tol=1e-5, maxiter=10):
"""Binormalize matrix A. Attempt to create unit l_1 norm rows.
Parameters
----------
A : csr_matrix
sparse matrix (n x n)
tol : float
tolerance
x : array
guess at the diagonal
maxiter : int
maximum number of iterations to try
Returns
-------
C : csr_matrix
diagonally scaled A, C=DAD
Notes
-----
- Goal: Scale A so that l_1 norm of the rows are equal to 1:
- B = DAD
- want row sum of B = 1
- easily done with tol=0 if B=DA, but this is not symmetric
- algorithm is O(N log (1.0/tol))
Examples
--------
>>> from pyamg.gallery import poisson
>>> from pyamg.classical import binormalize
>>> A = poisson((10,),format='csr')
>>> C = binormalize(A)
References
----------
.. [1] Livne, Golub, "Scaling by Binormalization"
Tech Report SCCM-03-12, SCCM, Stanford, 2003
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.3.1679
"""
if not isspmatrix(A):
raise TypeError('expecting sparse matrix A')
if A.dtype == complex:
raise NotImplementedError('complex A not implemented')
n = A.shape[0]
it = 0
x = np.ones((n, 1)).ravel()
# 1.
B = A.multiply(A).tocsc() # power(A,2) inconsistent in numpy, scipy.sparse
d = B.diagonal().ravel()
# 2.
beta = B * x
betabar = (1.0/n) * np.dot(x, beta)
stdev = rowsum_stdev(x, beta)
# 3
while stdev > tol and it < maxiter:
for i in range(0, n):
# solve equation x_i, keeping x_j's fixed
# see equation (12)
c2 = (n-1)*d[i]
c1 = (n-2)*(beta[i] - d[i]*x[i])
c0 = -d[i]*x[i]*x[i] + 2*beta[i]*x[i] - n*betabar
if (-c0 < 1e-14):
print('warning: A nearly un-binormalizable...')
return A
else:
# see equation (12)
xnew = (2*c0)/(-c1 - np.sqrt(c1*c1 - 4*c0*c2))
dx = xnew - x[i]
# here we assume input matrix is symmetric since we grab a row of B
# instead of a column
ii = B.indptr[i]
iii = B.indptr[i+1]
dot_Bcol = np.dot(x[B.indices[ii:iii]], B.data[ii:iii])
betabar = betabar + (1.0/n)*dx*(dot_Bcol + beta[i] + d[i]*dx)
beta[B.indices[ii:iii]] += dx*B.data[ii:iii]
x[i] = xnew
stdev = rowsum_stdev(x, beta)
it += 1
# rescale for unit 2-norm
d = np.sqrt(x)
D = spdiags(d.ravel(), [0], n, n)
C = D * A * D
C = C.tocsr()
beta = C.multiply(C).sum(axis=1)
scale = np.sqrt((1.0/n) * np.sum(beta))
return (1/scale)*C | Binormalize matrix A. Attempt to create unit l_1 norm rows.
Parameters
----------
A : csr_matrix
sparse matrix (n x n)
tol : float
tolerance
x : array
guess at the diagonal
maxiter : int
maximum number of iterations to try
Returns
-------
C : csr_matrix
diagonally scaled A, C=DAD
Notes
-----
- Goal: Scale A so that l_1 norm of the rows are equal to 1:
- B = DAD
- want row sum of B = 1
- easily done with tol=0 if B=DA, but this is not symmetric
- algorithm is O(N log (1.0/tol))
Examples
--------
>>> from pyamg.gallery import poisson
>>> from pyamg.classical import binormalize
>>> A = poisson((10,),format='csr')
>>> C = binormalize(A)
References
----------
.. [1] Livne, Golub, "Scaling by Binormalization"
Tech Report SCCM-03-12, SCCM, Stanford, 2003
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.3.1679 |
def update(self):
"""Cache the list into the data section of the record"""
from ambry.orm.exc import NotFoundError
from requests.exceptions import ConnectionError, HTTPError
from boto.exception import S3ResponseError
d = {}
try:
for k, v in self.list(full=True):
if not v:
continue
d[v['vid']] = {
'vid': v['vid'],
'vname': v.get('vname'),
'id': v.get('id'),
'name': v.get('name')
}
self.data['list'] = d
except (NotFoundError, ConnectionError, S3ResponseError, HTTPError) as e:
raise RemoteAccessError("Failed to update {}: {}".format(self.short_name, e)) | Cache the list into the data section of the record |
def vatm(model,
x,
logits,
eps,
num_iterations=1,
xi=1e-6,
clip_min=None,
clip_max=None,
scope=None):
"""
Tensorflow implementation of the perturbation method used for virtual
adversarial training: https://arxiv.org/abs/1507.00677
:param model: the model which returns the network unnormalized logits
:param x: the input placeholder
:param logits: the model's unnormalized output tensor (the input to
the softmax layer)
:param eps: the epsilon (input variation parameter)
:param num_iterations: the number of iterations
:param xi: the finite difference parameter
:param clip_min: optional parameter that can be used to set a minimum
value for components of the example returned
:param clip_max: optional parameter that can be used to set a maximum
value for components of the example returned
:param seed: the seed for random generator
:return: a tensor for the adversarial example
"""
with tf.name_scope(scope, "virtual_adversarial_perturbation"):
d = tf.random_normal(tf.shape(x), dtype=tf_dtype)
for _ in range(num_iterations):
d = xi * utils_tf.l2_batch_normalize(d)
logits_d = model.get_logits(x + d)
kl = utils_tf.kl_with_logits(logits, logits_d)
Hd = tf.gradients(kl, d)[0]
d = tf.stop_gradient(Hd)
d = eps * utils_tf.l2_batch_normalize(d)
adv_x = x + d
if (clip_min is not None) and (clip_max is not None):
adv_x = tf.clip_by_value(adv_x, clip_min, clip_max)
return adv_x | Tensorflow implementation of the perturbation method used for virtual
adversarial training: https://arxiv.org/abs/1507.00677
:param model: the model which returns the network unnormalized logits
:param x: the input placeholder
:param logits: the model's unnormalized output tensor (the input to
the softmax layer)
:param eps: the epsilon (input variation parameter)
:param num_iterations: the number of iterations
:param xi: the finite difference parameter
:param clip_min: optional parameter that can be used to set a minimum
value for components of the example returned
:param clip_max: optional parameter that can be used to set a maximum
value for components of the example returned
:param seed: the seed for random generator
:return: a tensor for the adversarial example |
def register_provider(self, provider):
'''
Register a :class:`skosprovider.providers.VocabularyProvider`.
:param skosprovider.providers.VocabularyProvider provider: The provider
to register.
:raises RegistryException: A provider with this id or uri has already
been registered.
'''
if provider.get_vocabulary_id() in self.providers:
raise RegistryException(
'A provider with this id has already been registered.'
)
self.providers[provider.get_vocabulary_id()] = provider
if provider.concept_scheme.uri in self.concept_scheme_uri_map:
raise RegistryException(
'A provider with URI %s has already been registered.' % provider.concept_scheme.uri
)
self.concept_scheme_uri_map[provider.concept_scheme.uri] = provider.get_vocabulary_id() | Register a :class:`skosprovider.providers.VocabularyProvider`.
:param skosprovider.providers.VocabularyProvider provider: The provider
to register.
:raises RegistryException: A provider with this id or uri has already
been registered. |
def _fill_schemas_from_definitions(self, obj):
"""At first create schemas without 'AllOf'
:param obj:
:return: None
"""
if obj.get('definitions'):
self.schemas.clear()
all_of_stack = []
for name, definition in obj['definitions'].items():
if 'allOf' in definition:
all_of_stack.append((name, definition))
else:
self.schemas.create_schema(
definition, name, SchemaTypes.DEFINITION, root=self)
while all_of_stack:
name, definition = all_of_stack.pop(0)
self.schemas.create_schema(
definition, name, SchemaTypes.DEFINITION, root=self) | At first create schemas without 'AllOf'
:param obj:
:return: None |
def fa(arr, t, dist='norm', mode='high'):
"""Return the value corresponding to the given return period.
Parameters
----------
arr : xarray.DataArray
Maximized/minimized input data with a `time` dimension.
t : int or sequence
Return period. The period depends on the resolution of the input data. If the input array's resolution is
yearly, then the return period is in years.
dist : str
Name of the univariate distribution, such as beta, expon, genextreme, gamma, gumbel_r, lognorm, norm
(see scipy.stats).
mode : {'min', 'max}
Whether we are looking for a probability of exceedance (max) or a probability of non-exceedance (min).
Returns
-------
xarray.DataArray
An array of values with a 1/t probability of exceedance (if mode=='max').
"""
t = np.atleast_1d(t)
# Get the distribution
dc = get_dist(dist)
# Fit the parameters of the distribution
p = fit(arr, dist)
# Create a lambda function to facilitate passing arguments to dask. There is probably a better way to do this.
if mode in ['max', 'high']:
def func(x):
return dc.isf(1./t, *x)
elif mode in ['min', 'low']:
def func(x):
return dc.ppf(1./t, *x)
else:
raise ValueError("mode `{}` should be either 'max' or 'min'".format(mode))
data = dask.array.apply_along_axis(func, p.get_axis_num('dparams'), p)
# Create coordinate for the return periods
coords = dict(p.coords.items())
coords.pop('dparams')
coords['return_period'] = t
# Create dimensions
dims = list(p.dims)
dims.remove('dparams')
dims.insert(0, u'return_period')
# TODO: add time and time_bnds coordinates (Low will work on this)
# time.attrs['climatology'] = 'climatology_bounds'
# coords['time'] =
# coords['climatology_bounds'] =
out = xr.DataArray(data=data, coords=coords, dims=dims)
out.attrs = p.attrs
out.attrs['standard_name'] = '{0} quantiles'.format(dist)
out.attrs['long_name'] = '{0} return period values for {1}'.format(dist, getattr(arr, 'standard_name', ''))
out.attrs['cell_methods'] = (out.attrs.get('cell_methods', '') + ' dparams: ppf').strip()
out.attrs['units'] = arr.attrs.get('units', '')
out.attrs['mode'] = mode
out.attrs['history'] = out.attrs.get('history', '') + "Compute values corresponding to return periods."
return out | Return the value corresponding to the given return period.
Parameters
----------
arr : xarray.DataArray
Maximized/minimized input data with a `time` dimension.
t : int or sequence
Return period. The period depends on the resolution of the input data. If the input array's resolution is
yearly, then the return period is in years.
dist : str
Name of the univariate distribution, such as beta, expon, genextreme, gamma, gumbel_r, lognorm, norm
(see scipy.stats).
mode : {'min', 'max}
Whether we are looking for a probability of exceedance (max) or a probability of non-exceedance (min).
Returns
-------
xarray.DataArray
An array of values with a 1/t probability of exceedance (if mode=='max'). |
def _post(self, url, data=None, json=None, params=None, headers=None):
"""Wraps a POST request with a url check"""
url = self.clean_url(url)
response = requests.post(url, data=data, json=json, params=params,
headers=headers, timeout=self.timeout,
verify=self.verify)
return response | Wraps a POST request with a url check |
def list_relations(self):
''' list every relation in the database as (src, relation, dst) '''
for node in self.iter_nodes():
for relation, target in self.relations_of(node.obj, True):
yield node.obj, relation, target | list every relation in the database as (src, relation, dst) |
def get_attribute(self, attribute, value=None, features=False):
"""This returns a list of GFF objects (or GFF Features) with the given attribute and if supplied, those
attributes with the specified value
:param attribute: The 'info' field attribute we are querying
:param value: Optional keyword, only return attributes equal to this value
:param features: Optional keyword, return GFF Features instead of GFF Objects
:return: A list of GFF objects (or GFF features if requested)
"""
if attribute in self.filters:
valid_gff_objects = self.fast_attributes[attribute] if not value else\
[i for i in self.fast_attributes[attribute] if i.attributes.get(attribute, False) == value]
if features:
valid_ids = [gff_object.attributes.get(self.id_tag, None) for gff_object in valid_gff_objects]
return [self.feature_map[gff_id] for gff_id in valid_ids if gff_id]
else:
return valid_gff_objects
else:
valid_gff_objects = [gff_object for gff_feature in self.feature_map.values()
for gff_object in gff_feature.features
if gff_object.attributes.get(attribute, False)]
valid_gff_objects = valid_gff_objects if not value else [gff_object for gff_object in valid_gff_objects
if gff_object.attributes[attribute] == value]
if features:
valid_ids = [gff_object.attributes.get(self.id_tag, None) for gff_object in valid_gff_objects]
return [self.feature_map[gff_id] for gff_id in valid_ids if gff_id]
else:
return valid_gff_objects | This returns a list of GFF objects (or GFF Features) with the given attribute and if supplied, those
attributes with the specified value
:param attribute: The 'info' field attribute we are querying
:param value: Optional keyword, only return attributes equal to this value
:param features: Optional keyword, return GFF Features instead of GFF Objects
:return: A list of GFF objects (or GFF features if requested) |
def tcp_ping(
task: Task, ports: List[int], timeout: int = 2, host: Optional[str] = None
) -> Result:
"""
Tests connection to a tcp port and tries to establish a three way
handshake. To be used for network discovery or testing.
Arguments:
ports (list of int): tcp ports to ping
timeout (int, optional): defaults to 2
host (string, optional): defaults to ``hostname``
Returns:
Result object with the following attributes set:
* result (``dict``): Contains port numbers as keys with True/False as values
"""
if isinstance(ports, int):
ports = [ports]
if isinstance(ports, list):
if not all(isinstance(port, int) for port in ports):
raise ValueError("Invalid value for 'ports'")
else:
raise ValueError("Invalid value for 'ports'")
host = host or task.host.hostname
result = {}
for port in ports:
s = socket.socket()
s.settimeout(timeout)
try:
status = s.connect_ex((host, port))
if status == 0:
connection = True
else:
connection = False
except (socket.gaierror, socket.timeout, socket.error):
connection = False
finally:
s.close()
result[port] = connection
return Result(host=task.host, result=result) | Tests connection to a tcp port and tries to establish a three way
handshake. To be used for network discovery or testing.
Arguments:
ports (list of int): tcp ports to ping
timeout (int, optional): defaults to 2
host (string, optional): defaults to ``hostname``
Returns:
Result object with the following attributes set:
* result (``dict``): Contains port numbers as keys with True/False as values |
def getAccounts(self):
""" Return all accounts installed in the wallet database
"""
pubkeys = self.getPublicKeys()
accounts = []
for pubkey in pubkeys:
# Filter those keys not for our network
if pubkey[: len(self.prefix)] == self.prefix:
accounts.extend(self.getAccountsFromPublicKey(pubkey))
return accounts | Return all accounts installed in the wallet database |
def visit_EnumeratorList(self, node):
"""Replace enumerator expressions with '...' stubs."""
for type, enum in node.children():
if enum.value is None:
pass
elif isinstance(enum.value, (c_ast.BinaryOp, c_ast.UnaryOp)):
enum.value = c_ast.Constant("int", "...")
elif hasattr(enum.value, "type"):
enum.value = c_ast.Constant(enum.value.type, "...") | Replace enumerator expressions with '...' stubs. |
def pencil3():
'''Install or update latest Pencil version 3, a GUI prototyping tool.
While it is the newer one and the GUI is more fancy, it is the "more beta"
version of pencil. For exmaple, to display a svg export may fail from
within a reveal.js presentation.
More info:
Homepage: http://pencil.evolus.vn/Next.html
github repo: https://github.com/evolus/pencil
'''
repo_name = 'pencil3'
repo_dir = flo('~/repos/{repo_name}')
print_msg('## fetch latest pencil\n')
checkup_git_repo_legacy(url='https://github.com/evolus/pencil.git',
name=repo_name)
run(flo('cd {repo_dir} && npm install'), msg='\n## install npms\n')
install_user_command_legacy('pencil3', pencil3_repodir=repo_dir)
print_msg('\nNow You can start pencil version 3 with this command:\n\n'
' pencil3') | Install or update latest Pencil version 3, a GUI prototyping tool.
While it is the newer one and the GUI is more fancy, it is the "more beta"
version of pencil. For exmaple, to display a svg export may fail from
within a reveal.js presentation.
More info:
Homepage: http://pencil.evolus.vn/Next.html
github repo: https://github.com/evolus/pencil |
def open_with_external_spyder(self, text):
"""Load file in external Spyder's editor, if available
This method is used only for embedded consoles
(could also be useful if we ever implement the magic %edit command)"""
match = get_error_match(to_text_string(text))
if match:
fname, lnb = match.groups()
builtins.open_in_spyder(fname, int(lnb)) | Load file in external Spyder's editor, if available
This method is used only for embedded consoles
(could also be useful if we ever implement the magic %edit command) |
def value(self):
"""
Return the current evaluation of a condition statement
"""
return ''.join(map(str, self.evaluate(self.trigger.user))) | Return the current evaluation of a condition statement |
def sync(self):
"""Retrieve lights from ElkM1"""
for i in range(4):
self.elk.send(ps_encode(i))
self.get_descriptions(TextDescriptions.LIGHT.value) | Retrieve lights from ElkM1 |
def new_table(self, name, add_id=True, **kwargs):
"""
Create a new table, if it does not exist, or update an existing table if it does
:param name: Table name
:param add_id: If True, add an id field ( default is True )
:param kwargs: Other options passed to table object
:return:
"""
return self.dataset.new_table(name=name, add_id=add_id, **kwargs) | Create a new table, if it does not exist, or update an existing table if it does
:param name: Table name
:param add_id: If True, add an id field ( default is True )
:param kwargs: Other options passed to table object
:return: |
def bsp_father(node: tcod.bsp.BSP) -> Optional[tcod.bsp.BSP]:
"""
.. deprecated:: 2.0
Use :any:`BSP.parent` instead.
"""
return node.parent | .. deprecated:: 2.0
Use :any:`BSP.parent` instead. |
def _repr_pretty_(self, p, cycle):
"""method that defines ``Struct``'s pretty printing rules for iPython
Args:
p (IPython.lib.pretty.RepresentationPrinter): pretty printer object
cycle (bool): is ``True`` if pretty detected a cycle
"""
if cycle:
p.text('Struct(...)')
else:
with p.group(7, 'Struct(', ')'):
p.pretty(self._asdict()) | method that defines ``Struct``'s pretty printing rules for iPython
Args:
p (IPython.lib.pretty.RepresentationPrinter): pretty printer object
cycle (bool): is ``True`` if pretty detected a cycle |
def retry_on_integrity_error(self):
"""Re-raise :class:`~sqlalchemy.exc.IntegrityError` as `DBSerializationError`.
This is mainly useful to handle race conditions in atomic
blocks. For example, even if prior to a database INSERT we
have verified that there is no existing row with the given
primary key, we still may get an
:class:`~sqlalchemy.exc.IntegrityError` if another transaction
inserted a row with this primary key in the meantime. But if
we do (within an atomic block)::
with db.retry_on_integrity_error():
db.session.add(instance)
then if the before-mentioned race condition occurs,
`DBSerializationError` will be raised instead of
:class:`~sqlalchemy.exc.IntegrityError`, so that the
transaction will be retried (by the atomic block), and the
second time our prior-to-INSERT check will correctly detect a
primary key collision.
Note: :meth:`retry_on_integrity_error` triggers a session
flush.
"""
session = self.session
assert session.info.get(_ATOMIC_FLAG_SESSION_INFO_KEY), \
'Calls to "retry_on_integrity_error" must be wrapped in atomic block.'
session.flush()
try:
yield
session.flush()
except IntegrityError:
raise DBSerializationError | Re-raise :class:`~sqlalchemy.exc.IntegrityError` as `DBSerializationError`.
This is mainly useful to handle race conditions in atomic
blocks. For example, even if prior to a database INSERT we
have verified that there is no existing row with the given
primary key, we still may get an
:class:`~sqlalchemy.exc.IntegrityError` if another transaction
inserted a row with this primary key in the meantime. But if
we do (within an atomic block)::
with db.retry_on_integrity_error():
db.session.add(instance)
then if the before-mentioned race condition occurs,
`DBSerializationError` will be raised instead of
:class:`~sqlalchemy.exc.IntegrityError`, so that the
transaction will be retried (by the atomic block), and the
second time our prior-to-INSERT check will correctly detect a
primary key collision.
Note: :meth:`retry_on_integrity_error` triggers a session
flush. |
def get_selection(self):
"""
Read text from the X selection
Usage: C{clipboard.get_selection()}
@return: text contents of the mouse selection
@rtype: C{str}
@raise Exception: if no text was found in the selection
"""
Gdk.threads_enter()
text = self.selection.wait_for_text()
Gdk.threads_leave()
if text is not None:
return text
else:
raise Exception("No text found in X selection") | Read text from the X selection
Usage: C{clipboard.get_selection()}
@return: text contents of the mouse selection
@rtype: C{str}
@raise Exception: if no text was found in the selection |
def _add_item(self, dim_vals, data, sort=True, update=True):
"""
Adds item to the data, applying dimension types and ensuring
key conforms to Dimension type and values.
"""
sort = sort and self.sort
if not isinstance(dim_vals, tuple):
dim_vals = (dim_vals,)
self._item_check(dim_vals, data)
# Apply dimension types
dim_types = zip([kd.type for kd in self.kdims], dim_vals)
dim_vals = tuple(v if None in [t, v] else t(v) for t, v in dim_types)
valid_vals = zip(self.kdims, dim_vals)
for dim, val in valid_vals:
if dim.values and val is not None and val not in dim.values:
raise KeyError('%s dimension value %s not in'
' specified dimension values.' % (dim, repr(val)))
# Updates nested data structures rather than simply overriding them.
if (update and (dim_vals in self.data)
and isinstance(self.data[dim_vals], (MultiDimensionalMapping, OrderedDict))):
self.data[dim_vals].update(data)
else:
self.data[dim_vals] = data
if sort:
self._resort() | Adds item to the data, applying dimension types and ensuring
key conforms to Dimension type and values. |
def _validate(self, msg):
"""Validate an Enum value.
Raises:
TypeError if the value is not an instance of self._message_type.
"""
if not isinstance(msg, self._message_type):
raise TypeError('Expected a %s instance for %s property',
self._message_type.__name__,
self._code_name or self._name) | Validate an Enum value.
Raises:
TypeError if the value is not an instance of self._message_type. |
def _makeColorableInstance(self, clazz, args, kwargs):
"""
Create an object, if fill, stroke or strokewidth
is not specified, get them from the _canvas
:param clazz:
:param args:
:param kwargs:
:return:
"""
kwargs = dict(kwargs)
fill = kwargs.get('fill', self._canvas.fillcolor)
if not isinstance(fill, Color):
fill = Color(fill, mode='rgb', color_range=1)
kwargs['fill'] = fill
stroke = kwargs.get('stroke', self._canvas.strokecolor)
if not isinstance(stroke, Color):
stroke = Color(stroke, mode='rgb', color_range=1)
kwargs['stroke'] = stroke
kwargs['strokewidth'] = kwargs.get('strokewidth', self._canvas.strokewidth)
inst = clazz(self, *args, **kwargs)
return inst | Create an object, if fill, stroke or strokewidth
is not specified, get them from the _canvas
:param clazz:
:param args:
:param kwargs:
:return: |
def GpuUsage(**kargs):
""" Get the current GPU usage of available GPUs """
usage = (False, None)
gpu_status = {'vent_usage': {'dedicated': [], 'mem_mb': {}}}
path_dirs = PathDirs(**kargs)
path_dirs.host_config()
template = Template(template=path_dirs.cfg_file)
# get running jobs using gpus
try:
d_client = docker.from_env()
c = d_client.containers.list(all=False,
filters={'label': 'vent-plugin'})
for container in c:
if ('vent.gpu' in container.attrs['Config']['Labels'] and
container.attrs['Config']['Labels']['vent.gpu'] == 'yes'):
device = container.attrs['Config']['Labels']['vent.gpu.device']
if ('vent.gpu.dedicated' in container.attrs['Config']['Labels'] and
container.attrs['Config']['Labels']['vent.gpu.dedicated'] == 'yes'):
gpu_status['vent_usage']['dedicated'].append(device)
elif 'vent.gpu.mem_mb' in container.attrs['Config']['Labels']:
if device not in gpu_status['vent_usage']['mem_mb']:
gpu_status['vent_usage']['mem_mb'][device] = 0
gpu_status['vent_usage']['mem_mb'][device] += int(
container.attrs['Config']['Labels']['vent.gpu.mem_mb'])
except Exception as e: # pragma: no cover
logger.error('Could not get running jobs ' + str(e))
port = '3476'
# default docker gateway
host = '172.17.0.1'
result = template.option('nvidia-docker-plugin', 'port')
if result[0]:
port = result[1]
result = template.option('nvidia-docker-plugin', 'host')
if result[0]:
host = result[1]
else:
try:
# now just requires ip, ifconfig
route = check_output(('ip', 'route')).decode('utf-8').split('\n')
default = ''
# grab the default network device.
for device in route:
if 'default' in device:
default = device.split()[4]
break
# grab the IP address for the default device
ip_addr = check_output(('ifconfig', default)).decode('utf-8')
ip_addr = ip_addr.split('\n')[1].split()[1]
host = ip_addr
except Exception as e: # pragma: no cover
logger.error('Something with the ip addresses'
'went wrong ' + str(e))
# have to get the info separately to determine how much memory is availabe
nd_url = 'http://' + host + ':' + port + '/v1.0/gpu/info/json'
try:
r = requests.get(nd_url)
if r.status_code == 200:
status = r.json()
for i, device in enumerate(status['Devices']):
gm = int(round(math.log(int(device['Memory']['Global']), 2)))
gpu_status[i] = {'global_memory': 2**gm,
'cores': device['Cores']}
else:
usage = (False, 'Unable to get GPU usage request error code: ' +
str(r.status_code))
except Exception as e: # pragma: no cover
usage = (False, 'Error: ' + str(e))
# get actual status of each gpu
nd_url = 'http://' + host + ':' + port + '/v1.0/gpu/status/json'
try:
r = requests.get(nd_url)
if r.status_code == 200:
status = r.json()
for i, device in enumerate(status['Devices']):
if i not in gpu_status:
gpu_status[i] = {}
gpu_status[i]['utilization'] = device['Utilization']
gpu_status[i]['memory'] = device['Memory']
gpu_status[i]['processes'] = device['Processes']
usage = (True, gpu_status)
else:
usage = (False, 'Unable to get GPU usage request error code: ' +
str(r.status_code))
except Exception as e: # pragma: no cover
usage = (False, 'Error: ' + str(e))
return usage | Get the current GPU usage of available GPUs |
def set(self, id, translation, domain='messages'):
"""
Sets a message translation.
"""
assert isinstance(id, (str, unicode))
assert isinstance(translation, (str, unicode))
assert isinstance(domain, (str, unicode))
self.add({id: translation}, domain) | Sets a message translation. |
def workers(self, pattern=None, negate=False, stats=True):
"""Filters known workers and prints their current status.
Args:
Filter args:
pattern (Optional[str]): a pattern to filter workers
ex.: '^dispatch|^email' to filter names starting with that
or 'dispatch.*123456' to filter that exact name and number
or even '123456' to filter that exact number anywhere.
negate (bool): if True, finds tasks that do not match criteria
Display args:
stats (bool): if True shows worker stats
"""
request = clearly_pb2.FilterWorkersRequest(
workers_filter=clearly_pb2.PatternFilter(pattern=pattern or '.',
negate=negate),
)
for worker in about_time(ClearlyClient._fetched_callback, self._stub.filter_workers(request)):
ClearlyClient._display_worker(worker, stats) | Filters known workers and prints their current status.
Args:
Filter args:
pattern (Optional[str]): a pattern to filter workers
ex.: '^dispatch|^email' to filter names starting with that
or 'dispatch.*123456' to filter that exact name and number
or even '123456' to filter that exact number anywhere.
negate (bool): if True, finds tasks that do not match criteria
Display args:
stats (bool): if True shows worker stats |
def frames(self):
"""
Returns the length of a video stream in frames. Returns 0 if not a video stream.
"""
f=0
if self.isVideo() or self.isAudio():
if self.__dict__['nb_frames']:
try:
f=int(self.__dict__['nb_frames'])
except Exception as e:
print "None integer frame count"
return f | Returns the length of a video stream in frames. Returns 0 if not a video stream. |
def get_required_status_checks(self):
"""
:calls: `GET /repos/:owner/:repo/branches/:branch/protection/required_status_checks <https://developer.github.com/v3/repos/branches>`_
:rtype: :class:`github.RequiredStatusChecks.RequiredStatusChecks`
"""
headers, data = self._requester.requestJsonAndCheck(
"GET",
self.protection_url + "/required_status_checks"
)
return github.RequiredStatusChecks.RequiredStatusChecks(self._requester, headers, data, completed=True) | :calls: `GET /repos/:owner/:repo/branches/:branch/protection/required_status_checks <https://developer.github.com/v3/repos/branches>`_
:rtype: :class:`github.RequiredStatusChecks.RequiredStatusChecks` |
def create_legacy_graph_tasks():
"""Create tasks to recursively parse the legacy graph."""
return [
transitive_hydrated_targets,
transitive_hydrated_target,
hydrated_targets,
hydrate_target,
find_owners,
hydrate_sources,
hydrate_bundles,
RootRule(OwnersRequest),
] | Create tasks to recursively parse the legacy graph. |
def _recomputeRecordFromKNN(self, record):
"""
returns the classified labeling of record
"""
inputs = {
"categoryIn": [None],
"bottomUpIn": self._getStateAnomalyVector(record),
}
outputs = {"categoriesOut": numpy.zeros((1,)),
"bestPrototypeIndices":numpy.zeros((1,)),
"categoryProbabilitiesOut":numpy.zeros((1,))}
# Only use points before record to classify and after the wait period.
classifier_indexes = numpy.array(
self._knnclassifier.getParameter('categoryRecencyList'))
valid_idx = numpy.where(
(classifier_indexes >= self.getParameter('trainRecords')) &
(classifier_indexes < record.ROWID)
)[0].tolist()
if len(valid_idx) == 0:
return None
self._knnclassifier.setParameter('inferenceMode', None, True)
self._knnclassifier.setParameter('learningMode', None, False)
self._knnclassifier.compute(inputs, outputs)
self._knnclassifier.setParameter('learningMode', None, True)
classifier_distances = self._knnclassifier.getLatestDistances()
valid_distances = classifier_distances[valid_idx]
if valid_distances.min() <= self._classificationMaxDist:
classifier_indexes_prev = classifier_indexes[valid_idx]
rowID = classifier_indexes_prev[valid_distances.argmin()]
indexID = numpy.where(classifier_indexes == rowID)[0][0]
category = self._knnclassifier.getCategoryList()[indexID]
return category
return None | returns the classified labeling of record |
def _GetArgsDescription(self, args_type):
"""Get a simplified description of the args_type for a flow."""
args = {}
if args_type:
for type_descriptor in args_type.type_infos:
if not type_descriptor.hidden:
args[type_descriptor.name] = {
"description": type_descriptor.description,
"default": type_descriptor.default,
"type": "",
}
if type_descriptor.type:
args[type_descriptor.name]["type"] = type_descriptor.type.__name__
return args | Get a simplified description of the args_type for a flow. |
def eventFilter(self, widget, event):
"""A filter to control the zooming and panning of the figure canvas."""
# ---- Zooming
if event.type() == QEvent.Wheel:
modifiers = QApplication.keyboardModifiers()
if modifiers == Qt.ControlModifier:
if event.angleDelta().y() > 0:
self.zoom_in()
else:
self.zoom_out()
return True
else:
return False
# ---- Panning
# Set ClosedHandCursor:
elif event.type() == QEvent.MouseButtonPress:
if event.button() == Qt.LeftButton:
QApplication.setOverrideCursor(Qt.ClosedHandCursor)
self._ispanning = True
self.xclick = event.globalX()
self.yclick = event.globalY()
# Reset Cursor:
elif event.type() == QEvent.MouseButtonRelease:
QApplication.restoreOverrideCursor()
self._ispanning = False
# Move ScrollBar:
elif event.type() == QEvent.MouseMove:
if self._ispanning:
dx = self.xclick - event.globalX()
self.xclick = event.globalX()
dy = self.yclick - event.globalY()
self.yclick = event.globalY()
scrollBarH = self.horizontalScrollBar()
scrollBarH.setValue(scrollBarH.value() + dx)
scrollBarV = self.verticalScrollBar()
scrollBarV.setValue(scrollBarV.value() + dy)
return QWidget.eventFilter(self, widget, event) | A filter to control the zooming and panning of the figure canvas. |
def build_schema(m, c_c):
'''
Build an xsd schema from a bridgepoint component.
'''
schema = ET.Element('xs:schema')
schema.set('xmlns:xs', 'http://www.w3.org/2001/XMLSchema')
global_filter = lambda selected: ooaofooa.is_global(selected)
for s_dt in m.select_many('S_DT', global_filter):
datatype = build_type(s_dt)
if datatype is not None:
schema.append(datatype)
scope_filter = lambda selected: ooaofooa.is_contained_in(selected, c_c)
for s_dt in m.select_many('S_DT', scope_filter):
datatype = build_type(s_dt)
if datatype is not None:
schema.append(datatype)
component = build_component(m, c_c)
schema.append(component)
return schema | Build an xsd schema from a bridgepoint component. |
def metadata_path(self):
"""Determine the metadata path."""
xml_name = _granule_identifier_to_xml_name(self.granule_identifier)
metadata_path = os.path.join(self.granule_path, xml_name)
try:
assert os.path.isfile(metadata_path) or \
metadata_path in self.dataset._zipfile.namelist()
except AssertionError:
raise S2ReaderIOError(
"Granule metadata XML does not exist:", metadata_path)
return metadata_path | Determine the metadata path. |
def grok_template_file(src):
"""Determine the real deal template file"""
if not src.startswith('builtin:'):
return abspath(src)
builtin = src.split(':')[1]
builtin = "templates/%s.j2" % builtin
return resource_filename(__name__, builtin) | Determine the real deal template file |
def add_options(self, parser, env=None):
"""Non-camel-case version of func name for backwards compatibility.
.. warning ::
DEPRECATED: Do not use this method,
use :meth:`options <nose.plugins.base.IPluginInterface.options>`
instead.
"""
# FIXME raise deprecation warning if wasn't called by wrapper
if env is None:
env = os.environ
try:
self.options(parser, env)
self.can_configure = True
except OptionConflictError, e:
warn("Plugin %s has conflicting option string: %s and will "
"be disabled" % (self, e), RuntimeWarning)
self.enabled = False
self.can_configure = False | Non-camel-case version of func name for backwards compatibility.
.. warning ::
DEPRECATED: Do not use this method,
use :meth:`options <nose.plugins.base.IPluginInterface.options>`
instead. |
def set_token(self):
"""Get token using Client ID/Secret credentials.
:raises: AuthenticationError if credentials invalid, or call fails.
"""
super(ServicePrincipalCredentials, self).set_token()
try:
token = self._context.acquire_token_with_client_credentials(
self.resource,
self.id,
self.secret
)
self.token = self._convert_token(token)
except adal.AdalError as err:
raise_with_traceback(AuthenticationError, "", err) | Get token using Client ID/Secret credentials.
:raises: AuthenticationError if credentials invalid, or call fails. |
def customize_form_field(self, name, field):
"""
Allows views to customize their form fields. By default, Smartmin replaces the plain textbox
date input with it's own DatePicker implementation.
"""
if isinstance(field, forms.fields.DateField) and isinstance(field.widget, forms.widgets.DateInput):
field.widget = widgets.DatePickerWidget()
field.input_formats = [field.widget.input_format[1]] + list(field.input_formats)
if isinstance(field, forms.fields.ImageField) and isinstance(field.widget, forms.widgets.ClearableFileInput):
field.widget = widgets.ImageThumbnailWidget()
return field | Allows views to customize their form fields. By default, Smartmin replaces the plain textbox
date input with it's own DatePicker implementation. |
def rename_state_fluent(name: str) -> str:
'''Returns current state fluent canonical name.
Args:
name (str): The next state fluent name.
Returns:
str: The current state fluent name.
'''
i = name.index('/')
functor = name[:i]
arity = name[i+1:]
return "{}'/{}".format(functor, arity) | Returns current state fluent canonical name.
Args:
name (str): The next state fluent name.
Returns:
str: The current state fluent name. |
def convert(self, request, response, data):
"""
Performs the desired formatting.
:param request: The webob Request object describing the
request.
:param response: The webob Response object describing the
response.
:param data: The data dictionary list returned by the
prepare() method.
:returns: A string, the results of which are the desired
conversion.
"""
result = []
for conv, datum in zip(self.conversions, data):
# Only include conversion if it's allowed
if conv.modifier.accept(response.status_code):
result.append(conv.convert(request, response, datum))
else:
result.append('-')
return ''.join(result) | Performs the desired formatting.
:param request: The webob Request object describing the
request.
:param response: The webob Response object describing the
response.
:param data: The data dictionary list returned by the
prepare() method.
:returns: A string, the results of which are the desired
conversion. |
def update_detail(self, request):
"""
:param request: an apiv2 request object
:return: request if successful with entities set on request
"""
entity = request.context_params[self.detail_property_name]
updated_entity = self.update_entity(
request,
entity, **request.context_params['data'])
request.context_params[self.updated_property_name] = updated_entity
return request | :param request: an apiv2 request object
:return: request if successful with entities set on request |
def alias(cls, typemap, base, *names):
""" Declare an alternate (humane) name for a measurement protocol parameter """
cls.parameter_alias[base] = (typemap, base)
for i in names:
cls.parameter_alias[i] = (typemap, base) | Declare an alternate (humane) name for a measurement protocol parameter |
def send(x, inter=0, loop=0, count=None, verbose=None, realtime=None, *args, **kargs):
"""Send packets at layer 3
send(packets, [inter=0], [loop=0], [verbose=conf.verb]) -> None"""
__gen_send(conf.L3socket(*args, **kargs), x, inter=inter, loop=loop, count=count,verbose=verbose, realtime=realtime) | Send packets at layer 3
send(packets, [inter=0], [loop=0], [verbose=conf.verb]) -> None |
def get_queues(self, service_desk_id, include_count=False, start=0, limit=50):
"""
Returns a page of queues defined inside a service desk, for a given service desk ID.
The returned queues will include an issue count for each queue (represented in issueCount field)
if the query param includeCount is set to true (defaults to false).
Permissions: The calling user must be an agent of the given service desk.
:param service_desk_id: str
:param include_count: bool
:param start: int
:param limit: int
:return: a page of queues
"""
url = 'rest/servicedeskapi/servicedesk/{}/queue'.format(service_desk_id)
params = {}
if include_count is not None:
params['includeCount'] = bool(include_count)
if start is not None:
params['start'] = int(start)
if limit is not None:
params['limit'] = int(limit)
return self.get(url, headers=self.experimental_headers, params=params) | Returns a page of queues defined inside a service desk, for a given service desk ID.
The returned queues will include an issue count for each queue (represented in issueCount field)
if the query param includeCount is set to true (defaults to false).
Permissions: The calling user must be an agent of the given service desk.
:param service_desk_id: str
:param include_count: bool
:param start: int
:param limit: int
:return: a page of queues |
def scoreatpercentile(data,per,axis=0):
'like the function in scipy.stats but with an axis argument and works on arrays'
a = np.sort(data,axis=axis)
idx = per/100. * (data.shape[axis]-1)
if (idx % 1 == 0):
return a[[slice(None) if ii != axis else idx for ii in range(a.ndim)]]
else:
lowerweight = 1-(idx % 1)
upperweight = (idx % 1)
idx = int(np.floor(idx))
return lowerweight * a[[slice(None) if ii != axis else idx for ii in range(a.ndim)]] \
+ upperweight * a[[slice(None) if ii != axis else idx+1 for ii in range(a.ndim)]] | like the function in scipy.stats but with an axis argument and works on arrays |
def extend( self, itemseq ):
"""
Add sequence of elements to end of ParseResults list of elements.
Example::
patt = OneOrMore(Word(alphas))
# use a parse action to append the reverse of the matched strings, to make a palindrome
def make_palindrome(tokens):
tokens.extend(reversed([t[::-1] for t in tokens]))
return ''.join(tokens)
print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
"""
if isinstance(itemseq, ParseResults):
self += itemseq
else:
self.__toklist.extend(itemseq) | Add sequence of elements to end of ParseResults list of elements.
Example::
patt = OneOrMore(Word(alphas))
# use a parse action to append the reverse of the matched strings, to make a palindrome
def make_palindrome(tokens):
tokens.extend(reversed([t[::-1] for t in tokens]))
return ''.join(tokens)
print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' |
def expected_number_of_transactions_in_first_n_periods(self, n):
r"""
Return expected number of transactions in first n n_periods.
Expected number of transactions occurring across first n transaction
opportunities.
Used by Fader and Hardie to assess in-sample fit.
.. math:: Pr(X(n) = x| \alpha, \beta, \gamma, \delta)
See (7) in Fader & Hardie 2010.
Parameters
----------
n: float
number of transaction opportunities
Returns
-------
DataFrame:
Predicted values, indexed by x
"""
params = self._unload_params("alpha", "beta", "gamma", "delta")
alpha, beta, gamma, delta = params
x_counts = self.data.groupby("frequency")["weights"].sum()
x = np.asarray(x_counts.index)
p1 = binom(n, x) * exp(
betaln(alpha + x, beta + n - x) - betaln(alpha, beta) + betaln(gamma, delta + n) - betaln(gamma, delta)
)
I = np.arange(x.min(), n)
@np.vectorize
def p2(j, x):
i = I[int(j) :]
return np.sum(
binom(i, x)
* exp(
betaln(alpha + x, beta + i - x)
- betaln(alpha, beta)
+ betaln(gamma + 1, delta + i)
- betaln(gamma, delta)
)
)
p1 += np.fromfunction(p2, (x.shape[0],), x=x)
idx = pd.Index(x, name="frequency")
return DataFrame(p1 * x_counts.sum(), index=idx, columns=["model"]) | r"""
Return expected number of transactions in first n n_periods.
Expected number of transactions occurring across first n transaction
opportunities.
Used by Fader and Hardie to assess in-sample fit.
.. math:: Pr(X(n) = x| \alpha, \beta, \gamma, \delta)
See (7) in Fader & Hardie 2010.
Parameters
----------
n: float
number of transaction opportunities
Returns
-------
DataFrame:
Predicted values, indexed by x |
def run(self, visitor):
"""
:param visitor: visitor to call with every node in the domain tree.
:type visitor: subclass of
:class:`everest.entities.traversal.DomainVisitor`
"""
if __debug__:
self.__log_run(visitor)
visitor.prepare()
if self.__root_is_sequence:
if not self._tgt_prx is None:
tgts = iter(self._tgt_prx)
else:
tgts = None
if not self._src_prx is None:
srcs = iter(self._src_prx)
else:
srcs = None
self.traverse_many(None, srcs, tgts, visitor)
else:
self.traverse_one(None, self._src_prx, self._tgt_prx, visitor)
visitor.finalize() | :param visitor: visitor to call with every node in the domain tree.
:type visitor: subclass of
:class:`everest.entities.traversal.DomainVisitor` |
def detach_all_classes(self):
"""
Detach from all tracked classes.
"""
classes = list(self._observers.keys())
for cls in classes:
self.detach_class(cls) | Detach from all tracked classes. |
def list_directories(dir_pathname,
recursive=True,
topdown=True,
followlinks=False):
"""
Enlists all the directories using their absolute paths within the specified
directory, optionally recursively.
:param dir_pathname:
The directory to traverse.
:param recursive:
``True`` for walking recursively through the directory tree;
``False`` otherwise.
:param topdown:
Please see the documentation for :func:`os.walk`
:param followlinks:
Please see the documentation for :func:`os.walk`
"""
for root, dirnames, filenames\
in walk(dir_pathname, recursive, topdown, followlinks):
for dirname in dirnames:
yield absolute_path(os.path.join(root, dirname)) | Enlists all the directories using their absolute paths within the specified
directory, optionally recursively.
:param dir_pathname:
The directory to traverse.
:param recursive:
``True`` for walking recursively through the directory tree;
``False`` otherwise.
:param topdown:
Please see the documentation for :func:`os.walk`
:param followlinks:
Please see the documentation for :func:`os.walk` |
def on_click_dispatcher(self, module_name, event, command):
"""
Dispatch on_click config parameters to either:
- Our own methods for special py3status commands (listed below)
- The i3-msg program which is part of i3wm
"""
if command is None:
return
elif command == "refresh_all":
self.py3_wrapper.refresh_modules()
elif command == "refresh":
self.py3_wrapper.refresh_modules(module_name)
else:
# In commands we are able to use substitutions for the text output
# of a module
if "$OUTPUT" in command or "$OUTPUT_PART" in command:
full_text, partial_text = self.get_module_text(module_name, event)
command = command.replace("$OUTPUT_PART", shell_quote(partial_text))
command = command.replace("$OUTPUT", shell_quote(full_text))
# this is a i3 message
self.wm_msg(module_name, command)
# to make the bar more responsive to users we ask for a refresh
# of the module or of i3status if the module is an i3status one
self.py3_wrapper.refresh_modules(module_name) | Dispatch on_click config parameters to either:
- Our own methods for special py3status commands (listed below)
- The i3-msg program which is part of i3wm |
def lazy_approximate_personalized_pagerank(s,
r,
w_i,
a_i,
out_degree,
in_degree,
seed_node,
rho=0.2,
epsilon=0.00001,
laziness_factor=0.5):
"""
Calculates the approximate personalized PageRank starting from a seed node with self-loops.
Introduced in: Andersen, R., Chung, F., & Lang, K. (2006, October).
Local graph partitioning using pagerank vectors.
In Foundations of Computer Science, 2006. FOCS'06. 47th Annual IEEE Symposium on (pp. 475-486). IEEE.
"""
# Initialize approximate PageRank and residual distributions
# s = np.zeros(number_of_nodes, dtype=np.float64)
# r = np.zeros(number_of_nodes, dtype=np.float64)
r[seed_node] = 1.0
# Initialize queue of nodes to be pushed
pushable = deque()
pushable.append(seed_node)
# Do one push anyway
push_node = pushable.popleft()
pagerank_lazy_push(s,
r,
w_i[push_node],
a_i[push_node],
push_node,
rho,
laziness_factor)
number_of_push_operations = 1
i = np.where(np.divide(r[a_i[push_node]], in_degree[a_i[push_node]]) >= epsilon)[0]
if i.size > 0:
pushable.extend(a_i[push_node][i])
while r[push_node]/in_degree[push_node] >= epsilon:
pagerank_lazy_push(s,
r,
w_i[push_node],
a_i[push_node],
push_node,
rho,
laziness_factor)
number_of_push_operations += 1
# While there are nodes with large residual probabilities, push
while len(pushable) > 0:
push_node = pushable.popleft()
if r[push_node]/in_degree[push_node] >= epsilon:
pagerank_lazy_push(s,
r,
w_i[push_node],
a_i[push_node],
push_node,
rho,
laziness_factor)
number_of_push_operations += 1
i = np.where(np.divide(r[a_i[push_node]], in_degree[a_i[push_node]]) >= epsilon)[0]
if i.size > 0:
pushable.extend(a_i[push_node][i])
while r[push_node]/in_degree[push_node] >= epsilon:
pagerank_lazy_push(s,
r,
w_i[push_node],
a_i[push_node],
push_node,
rho,
laziness_factor)
number_of_push_operations += 1
return number_of_push_operations | Calculates the approximate personalized PageRank starting from a seed node with self-loops.
Introduced in: Andersen, R., Chung, F., & Lang, K. (2006, October).
Local graph partitioning using pagerank vectors.
In Foundations of Computer Science, 2006. FOCS'06. 47th Annual IEEE Symposium on (pp. 475-486). IEEE. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.