language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | @Pure
GP createPath(AStarNode<ST, PT> startPoint, PT endPoint, List<AStarNode<ST, PT>> closeList) {
int idx;
ST segment;
PT point;
AStarNode<ST, PT> node;
GP path = null;
final CloseComparator<ST, PT> cComparator = new CloseComparator<>();
node = newAStarNode(endPoint, Double.NaN, Double.NaN, null);
idx = ListUtil.indexOf(closeList, cComparator, node);
if (idx >= 0) {
node = closeList.remove(idx);
point = node.getGraphPoint();
segment = node.getArrivalConnection();
if (point != null && segment != null) {
final LinkedList<ST> pathSegments = new LinkedList<>();
pathSegments.add(segment);
do {
point = segment.getOtherSidePoint(point);
node = newAStarNode(point, Double.NaN, Double.NaN, null);
idx = ListUtil.indexOf(closeList, cComparator, node);
if (idx >= 0) {
node = closeList.remove(idx);
segment = node.getArrivalConnection();
if (segment != null) {
pathSegments.add(segment);
}
} else {
point = null;
segment = null;
}
}
while (point != null && segment != null);
// Building of the path is done is a second step
// because the oriented graph cannot enable to
// built it during the previous step loop:
// isConnectedSegment replies false when tested
// with an arrival segment and cannot
// enable to go backward along the path.
final Iterator<ST> iterator = pathSegments.descendingIterator();
if (iterator.hasNext()) {
segment = iterator.next();
if (startPoint.getGraphPoint().isConnectedSegment(segment)) {
path = newPath(startPoint.getGraphPoint(), segment);
while (iterator.hasNext()) {
addToPath(path, iterator.next());
}
}
}
}
}
return path;
} |
java | protected void applyDrawerWithHeader() {
setType(SideNavType.DRAWER_WITH_HEADER);
applyBodyScroll();
if (isShowOnAttach()) {
Scheduler.get().scheduleDeferred(() -> {
pushElement(getHeader(), 0);
pushElement(getMain(), 0);
});
}
} |
python | def _decrypt_ciphertext(cipher):
'''
Given a block of ciphertext as a string, and a gpg object, try to decrypt
the cipher and return the decrypted string. If the cipher cannot be
decrypted, log the error, and return the ciphertext back out.
'''
try:
cipher = salt.utils.stringutils.to_unicode(cipher).replace(r'\n', '\n')
except UnicodeDecodeError:
# ciphertext is binary
pass
cipher = salt.utils.stringutils.to_bytes(cipher)
cmd = [_get_gpg_exec(), '--homedir', _get_key_dir(), '--status-fd', '2',
'--no-tty', '-d']
proc = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, shell=False)
decrypted_data, decrypt_error = proc.communicate(input=cipher)
if not decrypted_data:
try:
cipher = salt.utils.stringutils.to_unicode(cipher)
except UnicodeDecodeError:
# decrypted data contains undecodable binary data
pass
log.warning(
'Could not decrypt cipher %s, received: %s',
cipher,
decrypt_error
)
return cipher
else:
try:
decrypted_data = salt.utils.stringutils.to_unicode(decrypted_data)
except UnicodeDecodeError:
# decrypted data contains undecodable binary data
pass
return decrypted_data |
python | def snow(im, voxel_size=1,
boundary_faces=['top', 'bottom', 'left', 'right', 'front', 'back'],
marching_cubes_area=False):
r"""
Analyzes an image that has been partitioned into void and solid regions
and extracts the void and solid phase geometry as well as network
connectivity.
Parameters
----------
im : ND-array
Binary image in the Boolean form with True’s as void phase and False’s
as solid phase.
voxel_size : scalar
The resolution of the image, expressed as the length of one side of a
voxel, so the volume of a voxel would be **voxel_size**-cubed. The
default is 1, which is useful when overlaying the PNM on the original
image since the scale of the image is alway 1 unit lenth per voxel.
boundary_faces : list of strings
Boundary faces labels are provided to assign hypothetical boundary
nodes having zero resistance to transport process. For cubical
geometry, the user can choose ‘left’, ‘right’, ‘top’, ‘bottom’,
‘front’ and ‘back’ face labels to assign boundary nodes. If no label is
assigned then all six faces will be selected as boundary nodes
automatically which can be trimmed later on based on user requirements.
marching_cubes_area : bool
If ``True`` then the surface area and interfacial area between regions
will be using the marching cube algorithm. This is a more accurate
representation of area in extracted network, but is quite slow, so
it is ``False`` by default. The default method simply counts voxels
so does not correctly account for the voxelated nature of the images.
Returns
-------
A dictionary containing the void phase size data, as well as the network
topological information. The dictionary names use the OpenPNM
convention (i.e. 'pore.coords', 'throat.conns') so it may be converted
directly to an OpenPNM network object using the ``update`` command.
* ``net``: A dictionary containing all the void and solid phase size data,
as well as the network topological information. The dictionary names
use the OpenPNM convention (i.e. 'pore.coords', 'throat.conns') so it
may be converted directly to an OpenPNM network object using the
``update`` command.
* ``im``: The binary image of the void space
* ``dt``: The combined distance transform of the image
* ``regions``: The void and solid space partitioned into pores and solids
phases using a marker based watershed with the peaks found by the
SNOW Algorithm.
"""
# -------------------------------------------------------------------------
# SNOW void phase
regions = snow_partitioning(im=im, return_all=True)
im = regions.im
dt = regions.dt
regions = regions.regions
b_num = sp.amax(regions)
# -------------------------------------------------------------------------
# Boundary Conditions
regions = add_boundary_regions(regions=regions, faces=boundary_faces)
# -------------------------------------------------------------------------
# Padding distance transform and image to extract geometrical properties
dt = pad_faces(im=dt, faces=boundary_faces)
im = pad_faces(im=im, faces=boundary_faces)
regions = regions*im
regions = make_contiguous(regions)
# -------------------------------------------------------------------------
# Extract void and throat information from image
net = regions_to_network(im=regions, dt=dt, voxel_size=voxel_size)
# -------------------------------------------------------------------------
# Extract marching cube surface area and interfacial area of regions
if marching_cubes_area:
areas = region_surface_areas(regions=regions)
interface_area = region_interface_areas(regions=regions, areas=areas,
voxel_size=voxel_size)
net['pore.surface_area'] = areas * voxel_size**2
net['throat.area'] = interface_area.area
# -------------------------------------------------------------------------
# Find void to void connections of boundary and internal voids
boundary_labels = net['pore.label'] > b_num
loc1 = net['throat.conns'][:, 0] < b_num
loc2 = net['throat.conns'][:, 1] >= b_num
pore_labels = net['pore.label'] <= b_num
loc3 = net['throat.conns'][:, 0] < b_num
loc4 = net['throat.conns'][:, 1] < b_num
net['pore.boundary'] = boundary_labels
net['throat.boundary'] = loc1 * loc2
net['pore.internal'] = pore_labels
net['throat.internal'] = loc3 * loc4
# -------------------------------------------------------------------------
# label boundary cells
net = label_boundary_cells(network=net, boundary_faces=boundary_faces)
# -------------------------------------------------------------------------
# assign out values to dummy dict
temp = _net_dict(net)
temp.im = im.copy()
temp.dt = dt
temp.regions = regions
return temp |
python | def request(self, method, resource, all_pages=False, **kwargs):
"""
Makes a request to the given endpoint.
Keyword arguments are passed to the :meth:`~requests.request` method.
If the content type of the response is JSON, it will be decoded
automatically and a dictionary will be returned.
Otherwise the :class:`~requests.Response` object is returned.
"""
response = self.raw_request(method, resource, **kwargs)
if not is_valid_response(response):
raise GitHubError(response)
if is_json_response(response):
result = response.json()
while all_pages and response.links.get('next'):
url = response.links['next']['url']
response = self.raw_request(method, url, **kwargs)
if not is_valid_response(response) or \
not is_json_response(response):
raise GitHubError(response)
body = response.json()
if isinstance(body, list):
result += body
elif isinstance(body, dict) and 'items' in body:
result['items'] += body['items']
else:
raise GitHubError(response)
return result
else:
return response |
java | @Override
public CommerceAvailabilityEstimate fetchByPrimaryKey(
Serializable primaryKey) {
Serializable serializable = entityCache.getResult(CommerceAvailabilityEstimateModelImpl.ENTITY_CACHE_ENABLED,
CommerceAvailabilityEstimateImpl.class, primaryKey);
if (serializable == nullModel) {
return null;
}
CommerceAvailabilityEstimate commerceAvailabilityEstimate = (CommerceAvailabilityEstimate)serializable;
if (commerceAvailabilityEstimate == null) {
Session session = null;
try {
session = openSession();
commerceAvailabilityEstimate = (CommerceAvailabilityEstimate)session.get(CommerceAvailabilityEstimateImpl.class,
primaryKey);
if (commerceAvailabilityEstimate != null) {
cacheResult(commerceAvailabilityEstimate);
}
else {
entityCache.putResult(CommerceAvailabilityEstimateModelImpl.ENTITY_CACHE_ENABLED,
CommerceAvailabilityEstimateImpl.class, primaryKey,
nullModel);
}
}
catch (Exception e) {
entityCache.removeResult(CommerceAvailabilityEstimateModelImpl.ENTITY_CACHE_ENABLED,
CommerceAvailabilityEstimateImpl.class, primaryKey);
throw processException(e);
}
finally {
closeSession(session);
}
}
return commerceAvailabilityEstimate;
} |
python | def _count_values(self):
"""Return dict mapping relevance level to sample index"""
indices = {yi: [i] for i, yi in enumerate(self.y) if self.status[i]}
return indices |
java | private void checkAndValidateParameters(ConfigProperty configProperty) {
LOGGER.entering(configProperty);
try {
switch (configProperty) {
case SELENDROID_SERVER_START_TIMEOUT:
case SELENDROID_EMULATOR_START_TIMEOUT: {
// Selendroid takes timeoutEmulatorStart/serverStartTimeout in milliseconds.
Config.getIntConfigProperty(configProperty);
break;
}
case MOBILE_DRIVER_SESSION_TIMEOUT: {
// Selendroid takes sessionTimeout in seconds.
int receivedValue = Config.getIntConfigProperty(configProperty) / 1000;
if (receivedValue == 0) {
String errorMessage = "Insufficient value received for configuration property "
+ configProperty.getName() + ", probably value is less than 1000 milliseconds.";
throw new IllegalArgumentException(errorMessage);
}
break;
}
default: {
throw new IllegalArgumentException(
"Invalid selendroid configuration received for validation, configuration property = "
+ configProperty.getName());
}
}
} catch (ConversionException exe) {
String errorMessage = "Invalid data received for configuration property " + configProperty.getName()
+ ", probably not an integer for milliseconds.";
throw new IllegalArgumentException(errorMessage, exe);
}
LOGGER.exiting();
} |
java | public static Container create(Map<String, Object> params) throws EasyPostException {
return create(params, null);
} |
java | @Override
public ListResolverRulesResult listResolverRules(ListResolverRulesRequest request) {
request = beforeClientExecution(request);
return executeListResolverRules(request);
} |
java | @Override
public PutRemediationConfigurationsResult putRemediationConfigurations(PutRemediationConfigurationsRequest request) {
request = beforeClientExecution(request);
return executePutRemediationConfigurations(request);
} |
python | def shutdown(self):
"""shutdown connection"""
if self.verbose:
print(self.socket.getsockname(), 'xx', self.peername)
try:
self.socket.shutdown(socket.SHUT_RDWR)
except IOError as err:
assert err.errno is _ENOTCONN, "unexpected IOError: %s" % err
# remote peer has already closed the connection,
# just ignore the exceeption
pass |
java | public static void throww(Throwable e, String msg) {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
throw new JKException(msg, e);
} |
python | def on_step_end(self, step, logs={}):
""" Save weights at interval steps during training """
self.total_steps += 1
if self.total_steps % self.interval != 0:
# Nothing to do.
return
filepath = self.filepath.format(step=self.total_steps, **logs)
if self.verbose > 0:
print('Step {}: saving model to {}'.format(self.total_steps, filepath))
self.model.save_weights(filepath, overwrite=True) |
java | @Nullable
public <T> T getUnique(final URI property, final Class<T> valueClass,
@Nullable final T defaultValue) {
try {
final T value = getUnique(property, valueClass);
return value == null ? defaultValue : value;
} catch (final IllegalStateException ex) {
return defaultValue;
} catch (final IllegalArgumentException ex) {
return defaultValue;
}
} |
python | def get_cache_key(path):
"""
Create a cache key by concatenating the prefix with a hash of the path.
"""
# Python 2/3 support for path hashing
try:
path_hash = hashlib.md5(path).hexdigest()
except TypeError:
path_hash = hashlib.md5(path.encode('utf-8')).hexdigest()
return settings.cache_key_prefix + path_hash |
java | public boolean deleteAsync() throws ExecutionException, InterruptedException {
// [START deleteAsync]
Future<Boolean> future = metric.deleteAsync();
// ...
boolean deleted = future.get();
if (deleted) {
// the metric was deleted
} else {
// the metric was not found
}
// [END deleteAsync]
return deleted;
} |
java | public Stoichiometry combineWith(Stoichiometry other) {
Set<SubunitCluster> combinedClusters = new LinkedHashSet<>();
combinedClusters.addAll(this.orderedClusters);
combinedClusters.addAll(other.orderedClusters);
Stoichiometry combinedStoichiometry;
if (this.strategy == StringOverflowStrategy.CUSTOM) {
combinedStoichiometry = new Stoichiometry(new ArrayList<>(combinedClusters),this.customStringGenerator);
} else {
combinedStoichiometry = new Stoichiometry(new ArrayList<>(combinedClusters),this.strategy,false);
}
return combinedStoichiometry;
} |
java | protected void completeSnapshot(Snapshot snapshot) {
Assert.notNull(snapshot, "snapshot");
snapshots.put(snapshot.index(), snapshot);
if (currentSnapshot == null || snapshot.index() > currentSnapshot.index()) {
currentSnapshot = snapshot;
}
// Delete old snapshots if necessary.
if (!storage.retainStaleSnapshots()) {
Iterator<Map.Entry<Long, Snapshot>> iterator = snapshots.entrySet().iterator();
while (iterator.hasNext()) {
Snapshot oldSnapshot = iterator.next().getValue();
if (oldSnapshot.index() < currentSnapshot.index()) {
iterator.remove();
oldSnapshot.close();
oldSnapshot.delete();
}
}
}
} |
python | def resize_bilinear_nd(t, target_shape):
"""Bilinear resizes a tensor t to have shape target_shape.
This function bilinearly resizes a n-dimensional tensor by iteratively
applying tf.image.resize_bilinear (which can only resize 2 dimensions).
For bilinear interpolation, the order in which it is applied does not matter.
Args:
t: tensor to be resized
target_shape: the desired shape of the new tensor.
Returns:
The resized tensor
"""
shape = t.get_shape().as_list()
target_shape = list(target_shape)
assert len(shape) == len(target_shape)
# We progressively move through the shape, resizing dimensions...
d = 0
while d < len(shape):
# If we don't need to deal with the next dimesnion, step over it
if shape[d] == target_shape[d]:
d += 1
continue
# Otherwise, we'll resize the next two dimensions...
# If d+2 doesn't need to be resized, this will just be a null op for it
new_shape = shape[:]
new_shape[d : d+2] = target_shape[d : d+2]
# The helper collapse_shape() makes our shapes 4-dimensional with
# the two dimesnions we want to deal with in the middle.
shape_ = collapse_shape(shape, d, d+2)
new_shape_ = collapse_shape(new_shape, d, d+2)
# We can then reshape and use the 2d tf.image.resize_bilinear() on the
# inner two dimesions.
t_ = tf.reshape(t, shape_)
t_ = tf.image.resize_bilinear(t_, new_shape_[1:3])
# And then reshape back to our uncollapsed version, having finished resizing
# two more dimensions in our shape.
t = tf.reshape(t_, new_shape)
shape = new_shape
d += 2
return t |
python | def _Request(global_endpoint_manager, request, connection_policy, requests_session, path, request_options, request_body):
"""Makes one http request using the requests module.
:param _GlobalEndpointManager global_endpoint_manager:
:param dict request:
contains the resourceType, operationType, endpointOverride,
useWriteEndpoint, useAlternateWriteEndpoint information
:param documents.ConnectionPolicy connection_policy:
:param requests.Session requests_session:
Session object in requests module
:param str resource_url:
The url for the resource
:param dict request_options:
:param str request_body:
Unicode or None
:return:
tuple of (result, headers)
:rtype:
tuple of (dict, dict)
"""
is_media = request_options['path'].find('media') > -1
is_media_stream = is_media and connection_policy.MediaReadMode == documents.MediaReadMode.Streamed
connection_timeout = (connection_policy.MediaRequestTimeout
if is_media
else connection_policy.RequestTimeout)
# Every request tries to perform a refresh
global_endpoint_manager.refresh_endpoint_list(None)
if (request.endpoint_override):
base_url = request.endpoint_override
else:
base_url = global_endpoint_manager.resolve_service_endpoint(request)
if path:
resource_url = base_url + path
else:
resource_url = base_url
parse_result = urlparse(resource_url)
# The requests library now expects header values to be strings only starting 2.11,
# and will raise an error on validation if they are not, so casting all header values to strings.
request_options['headers'] = { header: str(value) for header, value in request_options['headers'].items() }
# We are disabling the SSL verification for local emulator(localhost/127.0.0.1) or if the user
# has explicitly specified to disable SSL verification.
is_ssl_enabled = (parse_result.hostname != 'localhost' and parse_result.hostname != '127.0.0.1' and not connection_policy.DisableSSLVerification)
if connection_policy.SSLConfiguration:
ca_certs = connection_policy.SSLConfiguration.SSLCaCerts
cert_files = (connection_policy.SSLConfiguration.SSLCertFile, connection_policy.SSLConfiguration.SSLKeyFile)
response = requests_session.request(request_options['method'],
resource_url,
data = request_body,
headers = request_options['headers'],
timeout = connection_timeout / 1000.0,
stream = is_media_stream,
verify = ca_certs,
cert = cert_files)
else:
response = requests_session.request(request_options['method'],
resource_url,
data = request_body,
headers = request_options['headers'],
timeout = connection_timeout / 1000.0,
stream = is_media_stream,
# If SSL is disabled, verify = false
verify = is_ssl_enabled)
headers = dict(response.headers)
# In case of media stream response, return the response to the user and the user
# will need to handle reading the response.
if is_media_stream:
return (response.raw, headers)
data = response.content
if not six.PY2:
# python 3 compatible: convert data from byte to unicode string
data = data.decode('utf-8')
if response.status_code >= 400:
raise errors.HTTPFailure(response.status_code, data, headers)
result = None
if is_media:
result = data
else:
if len(data) > 0:
try:
result = json.loads(data)
except:
raise errors.JSONParseFailure(data)
return (result, headers) |
java | public void addRecord(String key,
long l) throws TarMalformatException, IOException {
addRecord(key, Long.toString(l));
} |
java | @SuppressWarnings("unchecked")
public String tokenAsString(Comparable tokenAsComparable) {
ByteBuffer bb = tokenType.decompose(tokenAsComparable);
Token token = tokenFactory.fromByteArray(bb);
return tokenFactory.toString(token);
} |
python | def smith_waterman_similarity(s1,
s2,
match=5,
mismatch=-5,
gap_start=-5,
gap_continue=-1,
norm="mean"):
"""Smith-Waterman string comparison.
An implementation of the Smith-Waterman string comparison algorithm
described in Christen, Peter (2012).
Parameters
----------
s1 : label, pandas.Series
Series or DataFrame to compare all fields.
s2 : label, pandas.Series
Series or DataFrame to compare all fields.
match : float
The value added to the match score if two characters match.
Greater than mismatch, gap_start, and gap_continue. Default: 5.
mismatch : float
The value added to the match score if two characters do not match.
Less than match. Default: -5.
gap_start : float
The value added to the match score upon encountering the start of
a gap. Default: -5.
gap_continue : float
The value added to the match score for positions where a previously
started gap is continuing. Default: -1.
norm : str
The name of the normalization metric to be used. Applied by dividing
the match score by the normalization metric multiplied by match. One
of "min", "max",or "mean". "min" will use the minimum string length
as the normalization metric. "max" and "mean" use the maximum and
mean string length respectively. Default: "mean""
Returns
-------
pandas.Series
A pandas series with similarity values. Values equal or between 0
and 1.
"""
# Assert that match is greater than or equal to mismatch, gap_start, and
# gap_continue.
assert match >= max(mismatch, gap_start, gap_continue), \
"match must be greater than or equal to mismatch, " \
"gap_start, and gap_continue"
if len(s1) != len(s2):
raise ValueError('Arrays or Series have to be same length.')
if len(s1) == len(s2) == 0:
return []
concat = pandas.Series(list(zip(s1, s2)))
def sw_apply(t):
"""
sw_apply(t)
A helper function that is applied to each pair of records
in s1 and s2. Assigns a similarity score to each pair,
between 0 and 1. Used by the pandas.apply method.
Parameters
----------
t : pandas.Series
A pandas Series containing two strings to be compared.
Returns
-------
Float
A similarity score between 0 and 1.
"""
str1 = t[0]
str2 = t[1]
def compute_score():
"""
compute_score()
The helper function that produces the non-normalized
similarity score between two strings. The scores are
determined using the Smith-Waterman dynamic programming
algorithm. The scoring scheme is determined from the
parameters provided to the parent smith_waterman_similarity()
function.
Returns
-------
Float
A score 0 or greater. Indicates similarity between two strings.
"""
# Initialize the score matrix with 0s
m = [[0] * (1 + len(str2)) for i in range(1 + len(str1))]
# Initialize the trace matrix with empty lists
trace = [[[] for _ in range(1 + len(str2))]
for _ in range(1 + len(str1))]
# Initialize the highest seen score to 0
highest = 0
# Iterate through the matrix
for x in range(1, 1 + len(str1)):
for y in range(1, 1 + len(str2)):
# Calculate Diagonal Score
if str1[x - 1] == str2[y - 1]:
# If characters match, add the match score to the
# diagonal score
diagonal = m[x - 1][y - 1] + match
else:
# If characters do not match, add the mismatch score
# to the diagonal score
diagonal = m[x - 1][y - 1] + mismatch
# Calculate the Left Gap Score
if "H" in trace[x - 1][y]:
# If cell to the left's score was calculated based on
# a horizontal gap, add the gap continuation penalty
# to the left score.
gap_horizontal = m[x - 1][y] + gap_continue
else:
# Otherwise, add the gap start penalty to the left
# score
gap_horizontal = m[x - 1][y] + gap_start
# Calculate the Above Gap Score
if "V" in trace[x][y - 1]:
# If above cell's score was calculated based on a
# vertical gap, add the gap continuation penalty to
# the above score.
gap_vertical = m[x][y - 1] + gap_continue
else:
# Otherwise, add the gap start penalty to the above
# score
gap_vertical = m[x][y - 1] + gap_start
# Choose the highest of the three scores
score = max(diagonal, gap_horizontal, gap_vertical)
if score <= 0:
# If score is less than 0, boost to 0
score = 0
else:
# If score is greater than 0, determine whether it was
# calculated based on a diagonal score, horizontal gap,
# or vertical gap. Store D, H, or V in the trace matrix
# accordingly.
if score == diagonal:
trace[x][y].append("D")
if score == gap_horizontal:
trace[x][y].append("H")
if score == gap_vertical:
trace[x][y].append("V")
# If the cell's score is greater than the highest score
# previously present, record the score as the highest.
if score > highest:
highest = score
# Set the cell's score to score
m[x][y] = score
# After iterating through the entire matrix, return the highest
# score found.
return highest
def normalize(score):
"""
normalize(score)
A helper function used to normalize the score produced by
compute_score() to a score between 0 and 1. The method for
normalization is determined by the norm argument provided
to the parent, smith_waterman_similarity function.
Parameters
----------
score : Float
The score produced by the compute_score() function.
Returns
-------
Float
A normalized score between 0 and 1.
"""
if norm == "min":
# Normalize by the shorter string's length
return score / (min(len(str1), len(str2)) * match)
if norm == "max":
# Normalize by the longer string's length
return score / (max(len(str1), len(str2)) * match)
if norm == "mean":
# Normalize by the mean length of the two strings
return 2 * score / ((len(str1) + len(str2)) * match)
else:
warnings.warn(
'Unrecognized longest common substring normalization. '
'Defaulting to "mean" method.')
return 2 * score / ((len(str1) + len(str2)) * match)
try:
if len(str1) == 0 or len(str2) == 0:
return 0
return normalize(compute_score())
except Exception as err:
if pandas.isnull(t[0]) or pandas.isnull(t[1]):
return np.nan
else:
raise err
return concat.apply(sw_apply) |
python | def _get_id(self):
"""Construct and return the identifier"""
return ''.join(map(str,
filter(is_not_None,
[self.Prefix, self.Name]))) |
python | def do_init(
dev=False,
requirements=False,
allow_global=False,
ignore_pipfile=False,
skip_lock=False,
system=False,
concurrent=True,
deploy=False,
pre=False,
keep_outdated=False,
requirements_dir=None,
pypi_mirror=None,
):
"""Executes the init functionality."""
from .environments import (
PIPENV_VIRTUALENV, PIPENV_DEFAULT_PYTHON_VERSION, PIPENV_PYTHON, PIPENV_USE_SYSTEM
)
python = None
if PIPENV_PYTHON is not None:
python = PIPENV_PYTHON
elif PIPENV_DEFAULT_PYTHON_VERSION is not None:
python = PIPENV_DEFAULT_PYTHON_VERSION
if not system and not PIPENV_USE_SYSTEM:
if not project.virtualenv_exists:
try:
do_create_virtualenv(python=python, three=None, pypi_mirror=pypi_mirror)
except KeyboardInterrupt:
cleanup_virtualenv(bare=False)
sys.exit(1)
# Ensure the Pipfile exists.
if not deploy:
ensure_pipfile(system=system)
if not requirements_dir:
requirements_dir = vistir.path.create_tracked_tempdir(
suffix="-requirements", prefix="pipenv-"
)
# Write out the lockfile if it doesn't exist, but not if the Pipfile is being ignored
if (project.lockfile_exists and not ignore_pipfile) and not skip_lock:
old_hash = project.get_lockfile_hash()
new_hash = project.calculate_pipfile_hash()
if new_hash != old_hash:
if deploy:
click.echo(
crayons.red(
"Your Pipfile.lock ({0}) is out of date. Expected: ({1}).".format(
old_hash[-6:], new_hash[-6:]
)
)
)
raise exceptions.DeployException
sys.exit(1)
elif (system or allow_global) and not (PIPENV_VIRTUALENV):
click.echo(
crayons.red(fix_utf8(
"Pipfile.lock ({0}) out of date, but installation "
"uses {1}… re-building lockfile must happen in "
"isolation. Please rebuild lockfile in a virtualenv. "
"Continuing anyway…".format(
crayons.white(old_hash[-6:]), crayons.white("--system")
)),
bold=True,
),
err=True,
)
else:
if old_hash:
msg = fix_utf8("Pipfile.lock ({0}) out of date, updating to ({1})…")
else:
msg = fix_utf8("Pipfile.lock is corrupted, replaced with ({1})…")
click.echo(
crayons.red(msg.format(old_hash[-6:], new_hash[-6:]), bold=True),
err=True,
)
do_lock(
system=system,
pre=pre,
keep_outdated=keep_outdated,
write=True,
pypi_mirror=pypi_mirror,
)
# Write out the lockfile if it doesn't exist.
if not project.lockfile_exists and not skip_lock:
# Unless we're in a virtualenv not managed by pipenv, abort if we're
# using the system's python.
if (system or allow_global) and not (PIPENV_VIRTUALENV):
raise exceptions.PipenvOptionsError(
"--system",
"--system is intended to be used for Pipfile installation, "
"not installation of specific packages. Aborting.\n"
"See also: --deploy flag."
)
else:
click.echo(
crayons.normal(fix_utf8("Pipfile.lock not found, creating…"), bold=True),
err=True,
)
do_lock(
system=system,
pre=pre,
keep_outdated=keep_outdated,
write=True,
pypi_mirror=pypi_mirror,
)
do_install_dependencies(
dev=dev,
requirements=requirements,
allow_global=allow_global,
skip_lock=skip_lock,
concurrent=concurrent,
requirements_dir=requirements_dir,
pypi_mirror=pypi_mirror,
)
# Hint the user what to do to activate the virtualenv.
if not allow_global and not deploy and "PIPENV_ACTIVE" not in os.environ:
click.echo(
"To activate this project's virtualenv, run {0}.\n"
"Alternatively, run a command "
"inside the virtualenv with {1}.".format(
crayons.red("pipenv shell"), crayons.red("pipenv run")
)
) |
python | def _get_instance(self):
"""
Return the instance matching the running_instance_id.
"""
try:
instance = self.compute.virtual_machines.get(
self.running_instance_id, self.running_instance_id,
expand='instanceView'
)
except Exception as error:
raise AzureCloudException(
'Unable to retrieve instance: {0}'.format(error)
)
return instance |
python | def update(self, callback=None, errback=None, **kwargs):
"""
Update monitor configuration. Pass a list of keywords and their values to
update.
"""
if not self.data:
raise MonitorException('monitor not loaded')
def success(result, *args):
self.data = result
if callback:
return callback(self)
else:
return self
return self._rest.update(self.data['id'], {}, callback=success, errback=errback, **kwargs) |
java | public void marshall(ActivityFailedEventDetails activityFailedEventDetails, ProtocolMarshaller protocolMarshaller) {
if (activityFailedEventDetails == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(activityFailedEventDetails.getError(), ERROR_BINDING);
protocolMarshaller.marshall(activityFailedEventDetails.getCause(), CAUSE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
python | def generate_megaman_manifold(sampling=2, nfolds=2,
rotate=True, random_state=None):
"""Generate a manifold of the megaman data"""
X, c = generate_megaman_data(sampling)
for i in range(nfolds):
X = np.hstack([_make_S_curve(x) for x in X.T])
if rotate:
rand = check_random_state(random_state)
R = rand.randn(X.shape[1], X.shape[1])
U, s, VT = np.linalg.svd(R)
X = np.dot(X, U)
return X, c |
python | def make_header(self, locale, catalog):
"""Populate header with correct data from top-most locale file."""
return {
"po-revision-date": self.get_catalogue_header_value(catalog, 'PO-Revision-Date'),
"mime-version": self.get_catalogue_header_value(catalog, 'MIME-Version'),
"last-translator": 'Automatic <[email protected]>',
"x-generator": "Python",
"language": self.get_catalogue_header_value(catalog, 'Language') or locale,
"lang": locale,
"content-transfer-encoding": self.get_catalogue_header_value(catalog, 'Content-Transfer-Encoding'),
"project-id-version": self.get_catalogue_header_value(catalog, 'Project-Id-Version'),
"pot-creation-date": self.get_catalogue_header_value(catalog, 'POT-Creation-Date'),
"domain": self.domain,
"report-msgid-bugs-to": self.get_catalogue_header_value(catalog, 'Report-Msgid-Bugs-To'),
"content-type": self.get_catalogue_header_value(catalog, 'Content-Type'),
"plural-forms": self.get_plural(catalog),
"language-team": self.get_catalogue_header_value(catalog, 'Language-Team')
} |
python | def create_chunk(buf):
"""Create a chunk for the HTTP "chunked" transfer encoding."""
chunk = []
chunk.append(s2b('{:X}\r\n'.format(len(buf))))
chunk.append(buf)
chunk.append(b'\r\n')
return b''.join(chunk) |
java | public String md5Base64Digest(String signature) {
byte[] bytes = md5Digest(signature.getBytes());
return new String(Base64.encodeBase64(bytes));
} |
python | def data_filler_detailed_registration(self, number_of_rows, db):
'''creates and fills the table with detailed regis. information
'''
try:
detailed_registration = db
data_list = list()
for i in range(0, number_of_rows):
post_det_reg = {
"id": rnd_id_generator(self),
"email": self.faker.safe_email(),
"password": self.faker.md5(raw_output=False),
"lastname": self.faker.last_name(),
"name": self.faker.first_name(),
"adress": self.faker.address(),
"phone": self.faker.phone_number()
}
detailed_registration.save(post_det_reg)
logger.warning(
'detailed_registration Commits are successful after write job!',
extra=d)
except Exception as e:
logger.error(e, extra=d) |
python | async def set_webhook(self, url: base.String,
certificate: typing.Union[base.InputFile, None] = None,
max_connections: typing.Union[base.Integer, None] = None,
allowed_updates: typing.Union[typing.List[base.String], None] = None) -> base.Boolean:
"""
Use this method to specify a url and receive incoming updates via an outgoing webhook.
Whenever there is an update for the bot, we will send an HTTPS POST request to the specified url,
containing a JSON-serialized Update. In case of an unsuccessful request,
we will give up after a reasonable amount of attempts.
Source: https://core.telegram.org/bots/api#setwebhook
:param url: HTTPS url to send updates to. Use an empty string to remove webhook integration
:type url: :obj:`base.String`
:param certificate: Upload your public key certificate so that the root certificate in use can be checked
:type certificate: :obj:`typing.Union[base.InputFile, None]`
:param max_connections: Maximum allowed number of simultaneous HTTPS connections to the webhook
for update delivery, 1-100.
:type max_connections: :obj:`typing.Union[base.Integer, None]`
:param allowed_updates: List the types of updates you want your bot to receive
:type allowed_updates: :obj:`typing.Union[typing.List[base.String], None]`
:return: Returns true
:rtype: :obj:`base.Boolean`
"""
allowed_updates = prepare_arg(allowed_updates)
payload = generate_payload(**locals(), exclude=['certificate'])
files = {}
prepare_file(payload, files, 'certificate', certificate)
result = await self.request(api.Methods.SET_WEBHOOK, payload, files)
return result |
java | @Override
protected void decode(ChannelHandlerContext paramChannelHandlerContext,
WebSocketFrame paramINBOUND_IN, List<Object> paramList)
throws Exception {
if(paramINBOUND_IN instanceof BinaryWebSocketFrame)
{
BinaryWebSocketFrame msg=(BinaryWebSocketFrame)paramINBOUND_IN;
ByteBuf data = msg.content();
paramList.add(data);
data.retain();
}
} |
java | public static boolean sendHttpResponse(
boolean isSuccess,
HttpExchange exchange,
byte[] response) {
int returnCode = isSuccess ? HttpURLConnection.HTTP_OK : HttpURLConnection.HTTP_UNAVAILABLE;
try {
exchange.sendResponseHeaders(returnCode, response.length);
} catch (IOException e) {
LOG.log(Level.SEVERE, "Failed to send response headers: ", e);
return false;
}
OutputStream os = exchange.getResponseBody();
try {
os.write(response);
} catch (IOException e) {
LOG.log(Level.SEVERE, "Failed to send http response: ", e);
return false;
} finally {
try {
os.close();
} catch (IOException e) {
LOG.log(Level.SEVERE, "Failed to close OutputStream: ", e);
return false;
}
}
return true;
} |
python | def get_common_register(start, end):
"""Get the register most commonly used in accessing structs.
Access to is considered for every opcode that accesses memory
in an offset from a register::
mov eax, [ebx + 5]
For every access, the struct-referencing registers, in this case
`ebx`, are counted. The most used one is returned.
Args:
start: The adderss to start at
end: The address to finish at
"""
registers = defaultdict(int)
for line in lines(start, end):
insn = line.insn
for operand in insn.operands:
if not operand.type.has_phrase:
continue
if not operand.base:
continue
register_name = operand.base
registers[register_name] += 1
return max(registers.iteritems(), key=operator.itemgetter(1))[0] |
python | def check_expired_activation(self, activation_key):
"""
Check if ``activation_key`` is still valid.
Raises a ``self.model.DoesNotExist`` exception if key is not present or
``activation_key`` is not a valid string
:param activation_key:
String containing the secret SHA1 for a valid activation.
:return:
True if the ket has expired, False if still valid.
"""
if SHA1_RE.search(activation_key):
userena = self.get(activation_key=activation_key)
return userena.activation_key_expired()
raise self.model.DoesNotExist |
python | def set_attribute(self, name, value):
"""Sets attribute's name and value"""
self.attribute_name = name
self.attribute = value |
java | public static void checkIsSpec(Class<?> clazz) {
if (isSpec(clazz)) return;
if (Specification.class.isAssignableFrom(clazz))
throw new InvalidSpecException(
"Specification '%s' was not compiled properly (Spock AST transform was not run); try to do a clean build"
).withArgs(clazz.getName());
throw new InvalidSpecException(
"Class '%s' is not a Spock specification (does not extend spock.lang.Specification or a subclass thereof)"
).withArgs(clazz.getName());
} |
python | def filter_by_pattern(self, pattern):
"""Filter the Data Collection based on a list of booleans.
Args:
pattern: A list of True/False values. Typically, this is a list
with a length matching the length of the Data Collections values
but it can also be a pattern to be repeated over the Data Collection.
Return:
A new Data Collection with filtered data
"""
_filt_values, _filt_datetimes = self._filter_by_pattern(pattern)
if self._enumeration is None:
self._get_mutable_enumeration()
col_obj = self._enumeration['mutable'][self._collection_type]
collection = col_obj(self.header.duplicate(), _filt_values, _filt_datetimes)
collection._validated_a_period = self._validated_a_period
return collection |
python | def detect_xid_devices(self):
"""
For all of the com ports connected to the computer, send an
XID command '_c1'. If the device response with '_xid', it is
an xid device.
"""
self.__xid_cons = []
for c in self.__com_ports:
device_found = False
for b in [115200, 19200, 9600, 57600, 38400]:
con = XidConnection(c, b)
try:
con.open()
except SerialException:
continue
con.flush_input()
con.flush_output()
returnval = con.send_xid_command("_c1", 5).decode('ASCII')
if returnval.startswith('_xid'):
device_found = True
self.__xid_cons.append(con)
if(returnval != '_xid0'):
# set the device into XID mode
con.send_xid_command('c10')
con.flush_input()
con.flush_output()
# be sure to reset the timer to avoid the 4.66 hours
# problem. (refer to XidConnection.xid_input_found to
# read about the 4.66 hours)
con.send_xid_command('e1')
con.send_xid_command('e5')
con.close()
if device_found:
break |
python | def get_firmware_version(self, cb=None):
"""
This method retrieves the Firmata firmware version
:param cb: Reference to a callback function
:returns:If no callback is specified, the firmware version
"""
task = asyncio.ensure_future(self.core.get_firmware_version())
version = self.loop.run_until_complete(task)
if cb:
cb(version)
else:
return version |
python | def run_script(scriptfile):
'''run a script file'''
try:
f = open(scriptfile, mode='r')
except Exception:
return
mpstate.console.writeln("Running script %s" % scriptfile)
sub = mp_substitute.MAVSubstitute()
for line in f:
line = line.strip()
if line == "" or line.startswith('#'):
continue
try:
line = sub.substitute(line, os.environ)
except mp_substitute.MAVSubstituteError as ex:
print("Bad variable: %s" % str(ex))
if mpstate.settings.script_fatal:
sys.exit(1)
continue
if line.startswith('@'):
line = line[1:]
else:
mpstate.console.writeln("-> %s" % line)
process_stdin(line)
f.close() |
python | def _from_dict(cls, _dict):
"""Initialize a SentenceAnalysis object from a json dictionary."""
args = {}
if 'sentence_id' in _dict:
args['sentence_id'] = _dict.get('sentence_id')
else:
raise ValueError(
'Required property \'sentence_id\' not present in SentenceAnalysis JSON'
)
if 'text' in _dict:
args['text'] = _dict.get('text')
else:
raise ValueError(
'Required property \'text\' not present in SentenceAnalysis JSON'
)
if 'tones' in _dict:
args['tones'] = [
ToneScore._from_dict(x) for x in (_dict.get('tones'))
]
if 'tone_categories' in _dict:
args['tone_categories'] = [
ToneCategory._from_dict(x)
for x in (_dict.get('tone_categories'))
]
if 'input_from' in _dict:
args['input_from'] = _dict.get('input_from')
if 'input_to' in _dict:
args['input_to'] = _dict.get('input_to')
return cls(**args) |
java | public void marshall(DisassociateSkillGroupFromRoomRequest disassociateSkillGroupFromRoomRequest, ProtocolMarshaller protocolMarshaller) {
if (disassociateSkillGroupFromRoomRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(disassociateSkillGroupFromRoomRequest.getSkillGroupArn(), SKILLGROUPARN_BINDING);
protocolMarshaller.marshall(disassociateSkillGroupFromRoomRequest.getRoomArn(), ROOMARN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
java | public void transform(float source[], int sourceOffset, float destination[], int destOffset, int numberOfPoints) {
//TODO performance can be improved by removing the safety to the destination array
float result[] = source == destination ? new float[numberOfPoints * 2] : destination;
for(int i=0;i<numberOfPoints * 2;i+=2) {
for(int j=0;j<6;j+=3) {
result[i + (j / 3)] = source[i + sourceOffset] * matrixPosition[j] + source[i + sourceOffset + 1] * matrixPosition[j + 1] + 1 * matrixPosition[j + 2];
}
}
if (source == destination) {
//for safety of the destination, the results are copied after the entire operation.
for(int i=0;i<numberOfPoints * 2;i+=2) {
destination[i + destOffset] = result[i];
destination[i + destOffset + 1] = result[i + 1];
}
}
} |
python | def exec_args(args, in_data='', chdir=None, shell=None, emulate_tty=False):
"""
Run a command in a subprocess, emulating the argument handling behaviour of
SSH.
:param list[str]:
Argument vector.
:param bytes in_data:
Optional standard input for the command.
:param bool emulate_tty:
If :data:`True`, arrange for stdout and stderr to be merged into the
stdout pipe and for LF to be translated into CRLF, emulating the
behaviour of a TTY.
:return:
(return code, stdout bytes, stderr bytes)
"""
LOG.debug('exec_args(%r, ..., chdir=%r)', args, chdir)
assert isinstance(args, list)
if emulate_tty:
stderr = subprocess.STDOUT
else:
stderr = subprocess.PIPE
proc = subprocess.Popen(
args=args,
stdout=subprocess.PIPE,
stderr=stderr,
stdin=subprocess.PIPE,
cwd=chdir,
)
stdout, stderr = proc.communicate(in_data)
if emulate_tty:
stdout = stdout.replace(b('\n'), b('\r\n'))
return proc.returncode, stdout, stderr or b('') |
java | public static <T, L extends List<T>> L sortThis(L list, final Predicate2<? super T, ? super T> predicate)
{
return Iterate.sortThis(list, new Comparator<T>()
{
public int compare(T o1, T o2)
{
if (predicate.accept(o1, o2))
{
return -1;
}
if (predicate.accept(o2, o1))
{
return 1;
}
return 0;
}
});
} |
java | @UiHandler("m_startTime")
void onStartTimeChange(CmsDateBoxEvent event) {
if (handleChange() && !event.isUserTyping()) {
m_controller.setStartTime(event.getDate());
}
} |
python | def add_process(self, tgt, args=None, kwargs=None, name=None):
'''
Create a processes and args + kwargs
This will deterimine if it is a Process class, otherwise it assumes
it is a function
'''
if args is None:
args = []
if kwargs is None:
kwargs = {}
if salt.utils.platform.is_windows():
# Need to ensure that 'log_queue' and 'log_queue_level' is
# correctly transferred to processes that inherit from
# 'MultiprocessingProcess'.
if type(MultiprocessingProcess) is type(tgt) and (
issubclass(tgt, MultiprocessingProcess)):
need_log_queue = True
else:
need_log_queue = False
if need_log_queue:
if 'log_queue' not in kwargs:
if hasattr(self, 'log_queue'):
kwargs['log_queue'] = self.log_queue
else:
kwargs['log_queue'] = (
salt.log.setup.get_multiprocessing_logging_queue()
)
if 'log_queue_level' not in kwargs:
if hasattr(self, 'log_queue_level'):
kwargs['log_queue_level'] = self.log_queue_level
else:
kwargs['log_queue_level'] = (
salt.log.setup.get_multiprocessing_logging_level()
)
# create a nicer name for the debug log
if name is None:
if isinstance(tgt, types.FunctionType):
name = '{0}.{1}'.format(
tgt.__module__,
tgt.__name__,
)
else:
name = '{0}{1}.{2}'.format(
tgt.__module__,
'.{0}'.format(tgt.__class__) if six.text_type(tgt.__class__) != "<type 'type'>" else '',
tgt.__name__,
)
if type(multiprocessing.Process) is type(tgt) and issubclass(tgt, multiprocessing.Process):
process = tgt(*args, **kwargs)
else:
process = multiprocessing.Process(target=tgt, args=args, kwargs=kwargs, name=name)
if isinstance(process, SignalHandlingMultiprocessingProcess):
with default_signals(signal.SIGINT, signal.SIGTERM):
process.start()
else:
process.start()
log.debug("Started '%s' with pid %s", name, process.pid)
self._process_map[process.pid] = {'tgt': tgt,
'args': args,
'kwargs': kwargs,
'Process': process}
return process |
python | def from_data(source):
"""Infers a table/view schema from its JSON representation, a list of records, or a Pandas
dataframe.
Args:
source: the Pandas Dataframe, a dictionary representing a record, a list of heterogeneous
data (record) or homogeneous data (list of records) from which to infer the schema, or
a definition of the schema as a list of dictionaries with 'name' and 'type' entries
and possibly 'mode' and 'description' entries. Only used if no data argument was provided.
'mode' can be 'NULLABLE', 'REQUIRED' or 'REPEATED'. For the allowed types, see:
https://cloud.google.com/bigquery/preparing-data-for-bigquery#datatypes
Note that there is potential ambiguity when passing a list of lists or a list of
dicts between whether that should be treated as a list of records or a single record
that is a list. The heuristic used is to check the length of the entries in the
list; if they are equal then a list of records is assumed. To avoid this ambiguity
you can instead use the Schema.from_record method which assumes a single record,
in either list of values or dictionary of key-values form.
Returns:
A Schema for the data.
"""
if isinstance(source, pandas.DataFrame):
bq_schema = Schema._from_dataframe(source)
elif isinstance(source, list):
if len(source) == 0:
bq_schema = source
elif all(isinstance(d, dict) for d in source):
if all('name' in d and 'type' in d for d in source):
# It looks like a bq_schema; use it as-is.
bq_schema = source
elif all(len(d) == len(source[0]) for d in source):
bq_schema = Schema._from_dict_record(source[0])
else:
raise Exception(('Cannot create a schema from heterogeneous list %s; perhaps you meant ' +
'to use Schema.from_record?') % str(source))
elif isinstance(source[0], list) and \
all([isinstance(l, list) and len(l) == len(source[0]) for l in source]):
# A list of lists all of the same length; treat first entry as a list record.
bq_schema = Schema._from_record(source[0])
else:
# A heterogeneous list; treat as a record.
raise Exception(('Cannot create a schema from heterogeneous list %s; perhaps you meant ' +
'to use Schema.from_record?') % str(source))
elif isinstance(source, dict):
raise Exception(('Cannot create a schema from dict %s; perhaps you meant to use ' +
'Schema.from_record?') % str(source))
else:
raise Exception('Cannot create a schema from %s' % str(source))
return Schema(bq_schema) |
python | def generate(env):
"""Add Builders and construction variables for ar to an Environment."""
SCons.Tool.createSharedLibBuilder(env)
SCons.Tool.createProgBuilder(env)
env['SHLINK'] = '$LINK'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS /dll')
env['_SHLINK_TARGETS'] = windowsShlinkTargets
env['_SHLINK_SOURCES'] = windowsShlinkSources
env['SHLINKCOM'] = compositeShLinkAction
env.Append(SHLIBEMITTER = [windowsLibEmitter])
env.Append(LDMODULEEMITTER = [windowsLibEmitter])
env['LINK'] = 'link'
env['LINKFLAGS'] = SCons.Util.CLVar('/nologo')
env['_PDB'] = pdbGenerator
env['LINKCOM'] = compositeLinkAction
env.Append(PROGEMITTER = [prog_emitter])
env['LIBDIRPREFIX']='/LIBPATH:'
env['LIBDIRSUFFIX']=''
env['LIBLINKPREFIX']=''
env['LIBLINKSUFFIX']='$LIBSUFFIX'
env['WIN32DEFPREFIX'] = ''
env['WIN32DEFSUFFIX'] = '.def'
env['WIN32_INSERT_DEF'] = 0
env['WINDOWSDEFPREFIX'] = '${WIN32DEFPREFIX}'
env['WINDOWSDEFSUFFIX'] = '${WIN32DEFSUFFIX}'
env['WINDOWS_INSERT_DEF'] = '${WIN32_INSERT_DEF}'
env['WIN32EXPPREFIX'] = ''
env['WIN32EXPSUFFIX'] = '.exp'
env['WINDOWSEXPPREFIX'] = '${WIN32EXPPREFIX}'
env['WINDOWSEXPSUFFIX'] = '${WIN32EXPSUFFIX}'
env['WINDOWSSHLIBMANIFESTPREFIX'] = ''
env['WINDOWSSHLIBMANIFESTSUFFIX'] = '${SHLIBSUFFIX}.manifest'
env['WINDOWSPROGMANIFESTPREFIX'] = ''
env['WINDOWSPROGMANIFESTSUFFIX'] = '${PROGSUFFIX}.manifest'
env['REGSVRACTION'] = regServerCheck
env['REGSVR'] = os.path.join(SCons.Platform.win32.get_system_root(),'System32','regsvr32')
env['REGSVRFLAGS'] = '/s '
env['REGSVRCOM'] = '$REGSVR $REGSVRFLAGS ${TARGET.windows}'
env['WINDOWS_EMBED_MANIFEST'] = 0
env['MT'] = 'mt'
#env['MTFLAGS'] = ['-hashupdate']
env['MTFLAGS'] = SCons.Util.CLVar('/nologo')
# Note: use - here to prevent build failure if no manifest produced.
# This seems much simpler than a fancy system using a function action to see
# if the manifest actually exists before trying to run mt with it.
env['MTEXECOM'] = '-$MT $MTFLAGS -manifest ${TARGET}.manifest $_MANIFEST_SOURCES -outputresource:$TARGET;1'
env['MTSHLIBCOM'] = '-$MT $MTFLAGS -manifest ${TARGET}.manifest $_MANIFEST_SOURCES -outputresource:$TARGET;2'
# TODO Future work garyo 27-Feb-11
env['_MANIFEST_SOURCES'] = None # _windowsManifestSources
# Set-up ms tools paths
msvc_setup_env_once(env)
# Loadable modules are on Windows the same as shared libraries, but they
# are subject to different build parameters (LDMODULE* variables).
# Therefore LDMODULE* variables correspond as much as possible to
# SHLINK*/SHLIB* ones.
SCons.Tool.createLoadableModuleBuilder(env)
env['LDMODULE'] = '$SHLINK'
env['LDMODULEPREFIX'] = '$SHLIBPREFIX'
env['LDMODULESUFFIX'] = '$SHLIBSUFFIX'
env['LDMODULEFLAGS'] = '$SHLINKFLAGS'
env['_LDMODULE_TARGETS'] = _windowsLdmodTargets
env['_LDMODULE_SOURCES'] = _windowsLdmodSources
env['LDMODULEEMITTER'] = [ldmodEmitter]
env['LDMODULECOM'] = compositeLdmodAction |
python | def set_prop(self, prop, value, ef=None):
"""
set attributes values
:param prop:
:param value:
:param ef:
:return:
"""
if ef:
# prop should be restricted to n_decoys, an int, the no. of decoys corresponding to a given FPF.
# value is restricted to the corresponding enrichment factor and should be a float
self.ef[prop] = value
else:
if prop == 'ensemble':
# value is a tuple of strings that gives the ensemble composition
self.ensemble = value
elif prop == 'auc':
# value is a float that gives the auc value
self.auc = value |
python | def _get_resolved_dictionary(self, input_dict, key, resolved_value, remaining):
"""
Resolves the function and returns the updated dictionary
:param input_dict: Dictionary to be resolved
:param key: Name of this intrinsic.
:param resolved_value: Resolved or updated value for this action.
:param remaining: Remaining sections for the GetAtt action.
"""
if resolved_value:
# We resolved to a new resource logicalId. Use this as the first element and keep remaining elements intact
# This is the new value of Fn::GetAtt
input_dict[key] = [resolved_value] + remaining
return input_dict |
java | public String resolvePackageName (final String packageName)
{
String resolvedPackageName = packageName;
if (resolvedPackageName != null && resolvedPackageName.startsWith ("*"))
{
resolvedPackageName = getParserPackage () + resolvedPackageName.substring (1);
if (resolvedPackageName.startsWith ("."))
{
resolvedPackageName = resolvedPackageName.substring (1);
}
}
return resolvedPackageName;
} |
java | public boolean isRenderLabel() {
return (boolean) (Boolean) getStateHelper().eval(PropertyKeys.renderLabel, net.bootsfaces.component.ComponentUtils.isRenderLabelDefault());
} |
python | def rollout(self, batch_info: BatchInfo, model: RlModel, number_of_steps: int) -> Rollout:
""" Calculate env rollout """
assert not model.is_recurrent, "Replay env roller does not support recurrent models"
accumulator = TensorAccumulator()
episode_information = [] # List of dictionaries with episode information
for step_idx in range(number_of_steps):
step = model.step(self.last_observation)
replay_extra_information = {}
accumulator.add('observations', self.last_observation_cpu)
# Add step to the tensor accumulator
for name, tensor in step.items():
tensor_cpu = tensor.cpu()
accumulator.add(name, tensor_cpu)
if name != 'actions':
replay_extra_information[name] = tensor_cpu.numpy()
actions_numpy = step['actions'].detach().cpu().numpy()
new_obs, new_rewards, new_dones, new_infos = self.environment.step(actions_numpy)
# Store rollout in the experience replay buffer
self.replay_buffer.store_transition(
frame=self.last_observation_cpu.numpy(),
action=actions_numpy,
reward=new_rewards,
done=new_dones,
extra_info=replay_extra_information
)
# Done is flagged true when the episode has ended AND the frame we see is already a first frame from the
# next episode
dones_tensor = torch.from_numpy(new_dones.astype(np.float32)).clone()
accumulator.add('dones', dones_tensor)
self.last_observation_cpu = torch.from_numpy(new_obs).clone()
self.last_observation = self.last_observation_cpu.to(self.device)
accumulator.add('rewards', torch.from_numpy(new_rewards.astype(np.float32)).clone())
episode_information.append(new_infos)
accumulated_tensors = accumulator.result()
return Trajectories(
num_steps=accumulated_tensors['observations'].size(0),
num_envs=accumulated_tensors['observations'].size(1),
environment_information=episode_information,
transition_tensors=accumulated_tensors,
rollout_tensors={
'final_values': model.value(self.last_observation).cpu()
}
) |
java | public static byte[] decodeString(String encoded) {
return encoded == null ? null : Base64.getDecoder().decode(encoded);
} |
python | def create(self, name=None, description=None):
"""Creates a new, empty dataset.
Parameters
----------
name : str, optional
The name of the dataset.
description : str, optional
The description of the dataset.
Returns
-------
request.Response
The response contains the properties of a new dataset as a JSON object.
"""
uri = URITemplate(self.baseuri + '/{owner}').expand(
owner=self.username)
return self.session.post(uri, json=self._attribs(name, description)) |
python | def __load_child_classes(self, ac: AssetClass):
""" Loads child classes/stocks """
# load child classes for ac
db = self.__get_session()
entities = (
db.query(dal.AssetClass)
.filter(dal.AssetClass.parentid == ac.id)
.order_by(dal.AssetClass.sortorder)
.all()
)
# map
for entity in entities:
child_ac = self.__map_entity(entity)
# depth
child_ac.depth = ac.depth + 1
ac.classes.append(child_ac)
# Add to index
self.model.asset_classes.append(child_ac)
self.__load_child_classes(child_ac) |
python | def _fix_dependendent_params(self, i):
"""Unhide keys if necessary after removing the param at index *i*."""
if not self.params[i].showkey:
for param in self.params[i + 1:]:
if not param.showkey:
param.showkey = True |
java | public void setMFR(Integer newMFR) {
Integer oldMFR = mfr;
mfr = newMFR;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, AfplibPackage.GPARC__MFR, oldMFR, mfr));
} |
python | def code(self, text, lang=None):
"""Add a code block."""
# WARNING: lang is discarded currently.
with self.paragraph(stylename='code'):
lines = text.splitlines()
for line in lines[:-1]:
self._code_line(line)
self.linebreak()
self._code_line(lines[-1]) |
java | public static <E> PCollection<E> partition(PCollection<E> collection,
Dataset<E> dataset) {
return partition(collection, dataset, -1);
} |
python | def _verify_tile_versions(self, hw):
"""Verify that the tiles have the correct versions
"""
for tile, expected_tile_version in self._tile_versions.items():
actual_tile_version = str(hw.get(tile).tile_version())
if expected_tile_version != actual_tile_version:
raise ArgumentError("Tile has incorrect firmware", tile=tile, \
expected_version=expected_tile_version, actual_version=actual_tile_version) |
python | def is_structured_array(obj):
"""
Returns True if the given object is a Numpy Structured Array.
Parameters
----------
obj: instance
The object to test whether or not is a Numpy Structured Array.
"""
if isinstance(obj, np.ndarray) and hasattr(obj, 'dtype'):
if obj.dtype.names is not None:
return True
return False |
java | public void registerProviders(Collection<?> theProviders) {
Validate.noNullElements(theProviders, "theProviders must not contain any null elements");
myProviderRegistrationMutex.lock();
try {
if (!myStarted) {
for (Object provider : theProviders) {
ourLog.info("Registration of provider [" + provider.getClass().getName() + "] will be delayed until FHIR server startup");
if (provider instanceof IResourceProvider) {
myResourceProviders.add((IResourceProvider) provider);
} else {
myPlainProviders.add(provider);
}
}
return;
}
} finally {
myProviderRegistrationMutex.unlock();
}
registerProviders(theProviders, false);
} |
java | @BetaApi
public final Operation insertSubnetwork(String region, Subnetwork subnetworkResource) {
InsertSubnetworkHttpRequest request =
InsertSubnetworkHttpRequest.newBuilder()
.setRegion(region)
.setSubnetworkResource(subnetworkResource)
.build();
return insertSubnetwork(request);
} |
java | private boolean rvaIsWithin(VirtualLocation loc) {
long endpoint = loc.from() + loc.size();
return virtualAddress >= loc.from() && virtualAddress < endpoint;
} |
java | private static void startIfNotStarted(String serviceName) {
synchronized (queueNames) {
if (!queueNames.contains(serviceName)) {
//以下是新建一个rabbitmq客户端
try {
IMqConsumerClient.singleton.consumeStaticQueue(TransferQueueUtil.getTransferQueue(serviceName), callback);
queueNames.add(serviceName);
} catch (Throwable e) {
LOG.warn("订阅静态队列失败:" + serviceName, e);
}
}
}
} |
python | def create_scaffold(project_name):
""" create scaffold with specified project name.
"""
if os.path.isdir(project_name):
logger.log_warning(u"Folder {} exists, please specify a new folder name.".format(project_name))
return
logger.color_print("Start to create new project: {}".format(project_name), "GREEN")
logger.color_print("CWD: {}\n".format(os.getcwd()), "BLUE")
def create_path(path, ptype):
if ptype == "folder":
os.makedirs(path)
elif ptype == "file":
open(path, 'w').close()
msg = "created {}: {}".format(ptype, path)
logger.color_print(msg, "BLUE")
path_list = [
(project_name, "folder"),
(os.path.join(project_name, "api"), "folder"),
(os.path.join(project_name, "testcases"), "folder"),
(os.path.join(project_name, "testsuites"), "folder"),
(os.path.join(project_name, "reports"), "folder"),
(os.path.join(project_name, "debugtalk.py"), "file"),
(os.path.join(project_name, ".env"), "file")
]
[create_path(p[0], p[1]) for p in path_list]
# create .gitignore file
ignore_file = os.path.join(project_name, ".gitignore")
ignore_content = ".env\nreports/*"
with open(ignore_file, "w") as f:
f.write(ignore_content) |
java | @Override
public Long getPropertyLong(Class<?> aClass, String key)
{
return getPropertyLong(new StringBuilder(aClass.getName()).append(".").append(key).toString());
} |
python | def liquid_precipitation_depth(self, value=999.0):
"""Corresponds to IDD Field `liquid_precipitation_depth`
Args:
value (float): value for IDD Field `liquid_precipitation_depth`
Unit: mm
Missing value: 999.0
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `liquid_precipitation_depth`'.format(value))
self._liquid_precipitation_depth = value |
java | protected String replace(String template, String placeholder, String value)
{
if (template == null)
return null;
if ((placeholder == null) || (value == null))
return template;
while (true) {
int index = template.indexOf(placeholder);
if (index < 0)
break;
InternalStringBuilder temp = new InternalStringBuilder(template.substring(0, index));
temp.append(value);
temp.append(template.substring(index + placeholder.length()));
template = temp.toString();
}
return template;
} |
python | def fetch_googl():
"""Returns stock prices for Google company."""
yql = YQL('GOOGL', '2014-01-01', '2014-01-10')
for item in yql:
print item.get('date'), item.get('price')
yql.select('GOOGL', '2014-01-01', '2014-01-10')
for item in yql:
print item.get('date'), item.get('price') |
java | public void save() throws FileNotFoundException {
PrintStream p = new PrintStream(file);
p.println(COMMENT_PREFIX + "Saved ELKI settings. First line is title, remaining lines are parameters.");
for (Pair<String, ArrayList<String>> settings : store) {
p.println(settings.first);
for (String str : settings.second) {
p.println(str);
}
p.println();
}
p.close();
} |
java | public static Bbox intersection(Bbox one, Bbox two) {
if (!intersects(one, two)) {
return null;
} else {
double minx = two.getX() > one.getX() ? two.getX() : one.getX();
double maxx = two.getMaxX() < one.getMaxX() ? two.getMaxX() : one.getMaxX();
double miny = two.getY() > one.getY() ? two.getY() : one.getY();
double maxy = two.getMaxY() < one.getMaxY() ? two.getMaxY() : one.getMaxY();
return new Bbox(minx, miny, (maxx - minx), (maxy - miny));
}
} |
java | private void parseParenthesizedExpr() throws TTXPathException {
consume(TokenType.OPEN_BR, true);
if (!(mToken.getType() == TokenType.CLOSE_BR)) {
parseExpression();
}
consume(TokenType.CLOSE_BR, true);
} |
python | def descr_prototype(self, buf):
"""
Describe the prototype ("head") of the function.
"""
state = "define" if self.blocks else "declare"
ret = self.return_value
args = ", ".join(str(a) for a in self.args)
name = self.get_reference()
attrs = self.attributes
if any(self.args):
vararg = ', ...' if self.ftype.var_arg else ''
else:
vararg = '...' if self.ftype.var_arg else ''
linkage = self.linkage
cconv = self.calling_convention
prefix = " ".join(str(x) for x in [state, linkage, cconv, ret] if x)
metadata = self._stringify_metadata()
prototype = "{prefix} {name}({args}{vararg}) {attrs}{metadata}\n".format(
prefix=prefix, name=name, args=args, vararg=vararg,
attrs=attrs, metadata=metadata)
buf.append(prototype) |
python | def get_free_memory():
"""Return current free memory on the machine.
Currently supported for Windows, Linux, MacOS.
:returns: Free memory in MB unit
:rtype: int
"""
if 'win32' in sys.platform:
# windows
return get_free_memory_win()
elif 'linux' in sys.platform:
# linux
return get_free_memory_linux()
elif 'darwin' in sys.platform:
# mac
return get_free_memory_osx() |
python | def append_segment(self, apdu):
"""This function appends the apdu content to the end of the current
APDU being built. The segmentAPDU is the context."""
if _debug: SSM._debug("append_segment %r", apdu)
# check for no context
if not self.segmentAPDU:
raise RuntimeError("no segmentation context established")
# append the data
self.segmentAPDU.put_data(apdu.pduData) |
python | def modes_off(self):
"""Turn off any mode user may be in."""
bm = self.fitsimage.get_bindmap()
bm.reset_mode(self.fitsimage) |
java | public void setResult(Object result) {
if (resultSent) {
throw new RuntimeException("You can only set the result once.");
}
this.resultSent = true;
Channel channel = this.channel.get();
if (channel == null) {
log.warn("The client is no longer connected.");
return;
}
call.setResult(result);
Invoke reply = new Invoke();
reply.setCall(call);
reply.setTransactionId(transactionId);
channel.write(reply);
channel.getConnection().unregisterDeferredResult(this);
} |
java | public GetListOfFeaturedPlaylistsRequest.Builder getListOfFeaturedPlaylists() {
return new GetListOfFeaturedPlaylistsRequest.Builder(accessToken)
.setDefaults(httpManager, scheme, host, port);
} |
java | public <T> Optional<T> getValueAsObject(final String name, final TypeToken<T> type)
{
try
{
final Optional<String> raw = getValueAsString(name);
if (raw.isPresent())
{
//noinspection unchecked
return Optional.ofNullable((T) objectMapper.readValue(raw.get(), objectMapper.constructType(type.getType())));
}
else
{
return Optional.empty();
}
}
catch (IOException e)
{
LOGGER.warn("failed to get value '{}' as object {} - will return an absent value, reason: {}", name, type, e.getMessage());
return Optional.empty();
}
} |
java | private TempBlockMeta createBlockMetaInternal(long sessionId, long blockId,
BlockStoreLocation location, long initialBlockSize, boolean newBlock)
throws BlockAlreadyExistsException {
// NOTE: a temp block is supposed to be visible for its own writer, unnecessary to acquire
// block lock here since no sharing
try (LockResource r = new LockResource(mMetadataWriteLock)) {
if (newBlock) {
checkTempBlockIdAvailable(blockId);
}
StorageDirView dirView =
mAllocator.allocateBlockWithView(sessionId, initialBlockSize, location, getUpdatedView());
if (dirView == null) {
// Allocator fails to find a proper place for this new block.
return null;
}
// TODO(carson): Add tempBlock to corresponding storageDir and remove the use of
// StorageDirView.createTempBlockMeta.
TempBlockMeta tempBlock = dirView.createTempBlockMeta(sessionId, blockId, initialBlockSize);
try {
// Add allocated temp block to metadata manager. This should never fail if allocator
// correctly assigns a StorageDir.
mMetaManager.addTempBlockMeta(tempBlock);
} catch (WorkerOutOfSpaceException | BlockAlreadyExistsException e) {
// If we reach here, allocator is not working properly
LOG.error("Unexpected failure: {} bytes allocated at {} by allocator, "
+ "but addTempBlockMeta failed", initialBlockSize, location);
throw Throwables.propagate(e);
}
return tempBlock;
}
} |
java | public static auditsyslogpolicy_lbvserver_binding[] get(nitro_service service, String name) throws Exception{
auditsyslogpolicy_lbvserver_binding obj = new auditsyslogpolicy_lbvserver_binding();
obj.set_name(name);
auditsyslogpolicy_lbvserver_binding response[] = (auditsyslogpolicy_lbvserver_binding[]) obj.get_resources(service);
return response;
} |
java | public void addIndexes(StorableInfo<S> info, Direction defaultDirection) {
for (int i=info.getIndexCount(); --i>=0; ) {
add(info.getIndex(i).setDefaultDirection(defaultDirection));
}
} |
python | def content_types(self):
"""
Provides access to content type management methods for content types of an environment.
API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/content-types
:return: :class:`EnvironmentContentTypesProxy <contentful_management.space_content_types_proxy.EnvironmentContentTypesProxy>` object.
:rtype: contentful.space_content_types_proxy.EnvironmentContentTypesProxy
Usage:
>>> space_content_types_proxy = environment.content_types()
<EnvironmentContentTypesProxy space_id="cfexampleapi" environment_id="master">
"""
return EnvironmentContentTypesProxy(self._client, self.space.id, self.id) |
java | public static String getCryptoAlgorithm(String password) {
if (null == password) {
return null;
}
String algorithm = null;
String data = password.trim();
if (data.length() >= 2) {
if ('{' == data.charAt(0)) {
int end = data.indexOf('}', 1);
if (end > 0) {
algorithm = data.substring(1, end).trim();
}
}
}
return algorithm;
} |
java | @SuppressWarnings("unchecked")
private void addToRootMap(String propertyPath, Object propertyValue)
throws PropertyException {
// split propertyPath
String[] propertyKeyArray = propertyPath.split("\\.");
// load configMap from disk
Map<String, Object> configMap = load();
// if simple token add too root map
if (propertyKeyArray.length == 1) {
configMap.put(propertyPath, propertyValue);
}
// add to root map
Map<String, Object> deeperNestedMap = configMap;
Object foundedValue = null;
// search the subMap with the key[i] without the last key
for (int i = 0; i < propertyKeyArray.length - 1; i++) {
foundedValue = deeperNestedMap.get(propertyKeyArray[i]);
if (foundedValue instanceof Map) {
// go deeper if something was found
deeperNestedMap = (Map<String, Object>) foundedValue;
} else {
// if foundedValue == null or not a Map
// then build from the i position new nested map(s)
String[] subArray = getCopyFrom(i + 1, propertyKeyArray);
Map<String, Object> newNestedMap = buildNewNestedMap(subArray,
propertyValue);
// add the newNestedMap map to the deeperNestedMap
deeperNestedMap.put(propertyKeyArray[i], newNestedMap);
break;
}
if (i == propertyKeyArray.length - 2) {
deeperNestedMap.put(
propertyKeyArray[propertyKeyArray.length - 1],
propertyValue);
}
}
// save all
save(configMap);
} |
python | def uriref_startswith_iriref(v1: URIRef, v2: Union[str, ShExJ.IRIREF]) -> bool:
""" Determine whether a :py:class:`rdflib.URIRef` value starts with the text of a :py:class:`ShExJ.IRIREF` value """
return str(v1).startswith(str(v2)) |
python | def Decorate(cls, class_name, member, parent_member):
"""Decorates a member with @typecheck. Inherit checks from parent member."""
if isinstance(member, property):
fget = cls.DecorateMethod(class_name, member.fget, parent_member)
fset = None
if member.fset:
fset = cls.DecorateMethod(class_name, member.fset, parent_member)
fdel = None
if member.fdel:
fdel = cls.DecorateMethod(class_name, member.fdel, parent_member)
return property(fget, fset, fdel, member.__doc__)
else:
return cls.DecorateMethod(class_name, member, parent_member) |
java | public ServiceFuture<Void> addAsync(JobAddParameter job, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromHeaderResponse(addWithServiceResponseAsync(job), serviceCallback);
} |
python | async def exit_rescue_mode(
self, wait: bool = False, wait_interval: int = 5):
"""
Exit rescue mode.
:param wait: If specified, wait until the deploy is complete.
:param wait_interval: How often to poll, defaults to 5 seconds
"""
try:
self._data = await self._handler.exit_rescue_mode(
system_id=self.system_id
)
except CallError as error:
if error.status == HTTPStatus.FORBIDDEN:
message = "Not allowed to exit rescue mode."
raise OperationNotAllowed(message) from error
else:
raise
if not wait:
return self
else:
# Wait for machine to finish exiting rescue mode
while self.status == NodeStatus.EXITING_RESCUE_MODE:
await asyncio.sleep(wait_interval)
self._data = await self._handler.read(system_id=self.system_id)
if self.status == NodeStatus.FAILED_EXITING_RESCUE_MODE:
msg = "{hostname} failed to exit rescue mode.".format(
hostname=self.hostname
)
raise RescueModeFailure(msg, self)
return self |
python | def instruction_LSR_memory(self, opcode, ea, m):
""" Logical shift right memory location """
r = self.LSR(m)
# log.debug("$%x LSR memory value $%x >> 1 = $%x and write it to $%x \t| %s" % (
# self.program_counter,
# m, r, ea,
# self.cfg.mem_info.get_shortest(ea)
# ))
return ea, r & 0xff |
python | def return_msg(self, reply_code, reply_text, exchange, routing_key):
'''
Return a failed message. Not named "return" because python interpreter
can't deal with that.
'''
args = Writer()
args.write_short(reply_code).\
write_shortstr(reply_text).\
write_shortstr(exchange).\
write_shortstr(routing_key)
self.send_frame(MethodFrame(self.channel_id, 60, 50, args)) |
python | def update_with(self, name, security_info):
""" insert/clear authorizations
:param str name: name of the security info to be updated
:param security_info: the real security data, token, ...etc.
:type security_info: **(username, password)** for *basicAuth*, **token** in str for *oauth2*, *apiKey*.
:raises ValueError: unsupported types of authorizations
"""
s = self.__app.root.securityDefinitions.get(name, None)
if s == None:
raise ValueError('Unknown security name: [{0}]'.format(name))
cred = security_info
header = True
if s.type == 'basic':
cred = 'Basic ' + base64.standard_b64encode(six.b('{0}:{1}'.format(*security_info))).decode('utf-8')
key = 'Authorization'
elif s.type == 'apiKey':
key = s.name
header = getattr(s, 'in') == 'header'
elif s.type == 'oauth2':
key = 'access_token'
else:
raise ValueError('Unsupported Authorization type: [{0}, {1}]'.format(name, s.type))
self.__info.update({name: (header, {key: cred})}) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.