language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | private void writeAssignments(Project project)
{
Project.Assignments assignments = m_factory.createProjectAssignments();
project.setAssignments(assignments);
List<Project.Assignments.Assignment> list = assignments.getAssignment();
for (ResourceAssignment assignment : m_projectFile.getResourceAssignments())
{
list.add(writeAssignment(assignment));
}
//
// Check to see if we have any tasks that have a percent complete value
// but do not have resource assignments. If any exist, then we must
// write a dummy resource assignment record to ensure that the MSPDI
// file shows the correct percent complete amount for the task.
//
ProjectConfig config = m_projectFile.getProjectConfig();
boolean autoUniqueID = config.getAutoAssignmentUniqueID();
if (!autoUniqueID)
{
config.setAutoAssignmentUniqueID(true);
}
for (Task task : m_projectFile.getTasks())
{
double percentComplete = NumberHelper.getDouble(task.getPercentageComplete());
if (percentComplete != 0 && task.getResourceAssignments().isEmpty() == true)
{
ResourceAssignment dummy = new ResourceAssignment(m_projectFile, task);
Duration duration = task.getDuration();
if (duration == null)
{
duration = Duration.getInstance(0, TimeUnit.HOURS);
}
double durationValue = duration.getDuration();
TimeUnit durationUnits = duration.getUnits();
double actualWork = (durationValue * percentComplete) / 100;
double remainingWork = durationValue - actualWork;
dummy.setResourceUniqueID(NULL_RESOURCE_ID);
dummy.setWork(duration);
dummy.setActualWork(Duration.getInstance(actualWork, durationUnits));
dummy.setRemainingWork(Duration.getInstance(remainingWork, durationUnits));
// Without this, MS Project will mark a 100% complete milestone as 99% complete
if (percentComplete == 100 && duration.getDuration() == 0)
{
dummy.setActualFinish(task.getActualStart());
}
list.add(writeAssignment(dummy));
}
}
config.setAutoAssignmentUniqueID(autoUniqueID);
} |
java | public <T> Future<T> actAsync(final FileCallable<T> callable) throws IOException, InterruptedException {
try {
DelegatingCallable<T,IOException> wrapper = new FileCallableWrapper<>(callable);
for (FileCallableWrapperFactory factory : ExtensionList.lookup(FileCallableWrapperFactory.class)) {
wrapper = factory.wrap(wrapper);
}
return (channel!=null ? channel : localChannel)
.callAsync(wrapper);
} catch (IOException e) {
// wrap it into a new IOException so that we get the caller's stack trace as well.
throw new IOException("remote file operation failed",e);
}
} |
python | def setWorkingStandingZeroPoseToRawTrackingPose(self):
"""Sets the preferred standing position in the working copy."""
fn = self.function_table.setWorkingStandingZeroPoseToRawTrackingPose
pMatStandingZeroPoseToRawTrackingPose = HmdMatrix34_t()
fn(byref(pMatStandingZeroPoseToRawTrackingPose))
return pMatStandingZeroPoseToRawTrackingPose |
python | def convert_case(name):
"""Converts name from CamelCase to snake_case"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() |
java | @Override
public String getIndexName() {
final NameableVisitor visitor = new NameableVisitor();
this.accept(visitor);
return visitor.getIndexName();
} |
java | public static BigDecimal div(String v1, String v2, int scale) {
return div(v1, v2, scale, RoundingMode.HALF_UP);
} |
python | def load_metadata_from_file(filename):
""" Load the plot related metadata saved in a file
Parameters
----------
filename: str
Name of file load metadata from.
Returns
-------
cp: ConfigParser
A configparser object containing the metadata
"""
try:
extension = os.path.splitext(filename)[1]
return _metadata_loader[extension](filename)
except KeyError:
raise TypeError('Cannot read metadata from file %s, extension %s not '
'supported at this time' % (filename, extension)) |
java | public T withConsistencyLevel(ConsistencyLevel consistencyLevel) {
getOptions().setCl(Optional.of(consistencyLevel));
return getThis();
} |
java | private boolean getAnchorMetadataFromData(ValueMap resourceProps, Element element) {
boolean foundAny = false;
List<Attribute> attributes = element.getAttributes();
for (Attribute attribute : attributes) {
if (DataPropertyUtil.isHtml5DataName(attribute.getName())) {
String value = attribute.getValue();
if (StringUtils.isNotEmpty(value)) {
String property = DataPropertyUtil.toHeadlessCamelCaseName(attribute.getName());
if (StringUtils.startsWith(value, "[") && StringUtils.endsWith(value, "]")) {
try {
JSONArray jsonArray = new JSONArray(value);
String[] values = new String[jsonArray.length()];
for (int i = 0; i < jsonArray.length(); i++) {
values[i] = jsonArray.optString(i);
}
resourceProps.put(property, values);
}
catch (JSONException ex) {
// ignore
}
}
else {
// decode if required
value = decodeIfEncoded(value);
resourceProps.put(property, value);
}
foundAny = true;
}
}
}
return foundAny;
} |
python | def multidict(ordered_pairs):
"""Convert duplicate keys values to lists."""
# read all values into lists
d = defaultdict(list)
for k, v in ordered_pairs:
d[k].append(v)
# unpack lists that have only 1 item
dict_copy = deepcopy(d)
for k, v in iteritems(dict_copy):
if len(v) == 1:
d[k] = v[0]
return dict(d) |
python | def s_get(self, quant):
"""Return a number using the given quantity of signed bits."""
if quant < 2:
# special case, just return that unsigned value
# quant can also be 0
return self.u_get(quant)
sign = self.u_get(1)
raw_number = self.u_get(quant - 1)
if sign == 0:
# positive, simplest case
number = raw_number
else:
# negative, complemento a 2
complement = 2 ** (quant - 1) - 1
number = -1 * ((raw_number ^ complement) + 1)
return number |
python | def refresh_authorizer_token(
self, authorizer_appid, authorizer_refresh_token):
"""
获取(刷新)授权公众号的令牌
:params authorizer_appid: 授权方appid
:params authorizer_refresh_token: 授权方的刷新令牌
"""
return self.post(
'/component/api_authorizer_token',
data={
'component_appid': self.component_appid,
'authorizer_appid': authorizer_appid,
'authorizer_refresh_token': authorizer_refresh_token
}
) |
python | def ChunkedTransformerLM(vocab_size,
feature_depth=512,
feedforward_depth=2048,
num_layers=6,
num_heads=8,
dropout=0.1,
chunk_selector=None,
max_len=2048,
mode='train'):
"""Transformer language model operating on chunks.
The input to this model is a sequence presented as a list or tuple of chunks:
(chunk1, chunk2, chunks3, ..., chunkN).
Each chunk should have the same shape (batch, chunk-length) and together they
represent a long sequence that's a concatenation chunk1,chunk2,...,chunkN.
Chunked Transformer emulates the operation of a Transformer on this long
sequence except for the chunked attention layer, which may attend to only
a subset of the chunks to reduce memory use.
Args:
vocab_size: int: vocab size
feature_depth: int: depth of embedding
feedforward_depth: int: depth of feed-forward layer
num_layers: int: number of encoder/decoder layers
num_heads: int: number of attention heads
dropout: float: dropout rate (how much to drop out)
chunk_selector: a function from chunk number to list of chunks to attend
(if None, attends to the previous chunks which is equivalent to setting
chunk_selector(x) = [] if x < 1 else [x-1] (TransformerXL); we attend
to the current chunk with a causal mask too, selected chunks unmasked).
max_len: int: maximum symbol length for positional encoding
mode: str: 'train' or 'eval'
Returns:
the layer.
"""
stack = [ChunkedDecoderLayer(feature_depth, feedforward_depth, num_heads,
dropout, chunk_selector, mode)
for _ in range(num_layers)]
# Below each Map(L) applies the layer L to each chunk independently.
return layers.Serial(
layers.ShiftRight(),
layers.Map(layers.Embedding(feature_depth, vocab_size)),
layers.Map(layers.Dropout(rate=dropout, mode=mode)),
layers.PositionalEncoding(max_len=max_len),
layers.Serial(*stack),
layers.Map(layers.LayerNorm()),
layers.Map(layers.Dense(vocab_size)),
layers.Map(layers.LogSoftmax()),
) |
python | def datacenters(self):
"""
::
GET /:login/datacenters
:Returns: all datacenters (mapping from short location key to
full URL) that this cloud is aware of
:rtype: :py:class:`dict`
This method also updates the local `known_locations` attribute based
upon this information.
"""
j, _ = self.request('GET', '/datacenters')
self.known_locations.update(j)
return j |
java | public static Map<String, Object> decrypt(KeenClient client, String apiKey, String scopedKey)
throws ScopedKeyException {
try {
// convert the api key from hex string to byte array
final byte[] apiKeyBytes = KeenUtils.hexStringToByteArray(apiKey);
// grab first 16 bytes (aka 32 characters of hex) - that's the IV
String hexedIv = scopedKey.substring(0, 32);
// grab everything else - that's the ciphertext (aka encrypted message)
String hexedCipherText = scopedKey.substring(32);
// unhex the iv and ciphertext
byte[] iv = KeenUtils.hexStringToByteArray(hexedIv);
byte[] cipherText = KeenUtils.hexStringToByteArray(hexedCipherText);
// setup the API key as the secret
final SecretKey secret = new SecretKeySpec(apiKeyBytes, "AES");
// get the right AES cipher
final Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
// initialize the cipher with the right IV
IvParameterSpec ivParameterSpec = new IvParameterSpec(iv);
cipher.init(Cipher.DECRYPT_MODE, secret, ivParameterSpec);
// do the decryption
String plainText = new String(cipher.doFinal(cipherText), "UTF-8");
// return the JSON decoded options map
return client.getJsonHandler().readJson(new StringReader(plainText));
} catch (Exception e) {
throw new ScopedKeyException("An error occurred while attempting to decrypt a Scoped Key", e);
}
} |
python | def is_signature_equal(cls, sig_a, sig_b):
"""Compares two signatures using a constant time algorithm to avoid timing attacks."""
if len(sig_a) != len(sig_b):
return False
invalid_chars = 0
for char_a, char_b in zip(sig_a, sig_b):
if char_a != char_b:
invalid_chars += 1
return invalid_chars == 0 |
java | public void addPatientParticipantObject(String patientId)
{
addParticipantObjectIdentification(
new RFC3881ParticipantObjectCodes.RFC3881ParticipantObjectIDTypeCodes.PatientNumber(),
null,
null,
null,
patientId,
RFC3881ParticipantObjectTypeCodes.PERSON,
RFC3881ParticipantObjectTypeRoleCodes.PATIENT,
null,
null);
} |
java | @CheckReturnValue
private FlowScope traverseReturn(Node n, FlowScope scope) {
scope = traverseChildren(n, scope);
Node retValue = n.getFirstChild();
if (retValue != null) {
JSType type = functionScope.getRootNode().getJSType();
if (type != null) {
FunctionType fnType = type.toMaybeFunctionType();
if (fnType != null) {
inferPropertyTypesToMatchConstraint(
retValue.getJSType(), fnType.getReturnType());
}
}
}
return scope;
} |
python | def change_message_visibility(self, queue, receipt_handle,
visibility_timeout, callback=None):
"""
Extends the read lock timeout for the specified message from
the specified queue to the specified value.
:type queue: A :class:`boto.sqs.queue.Queue` object
:param queue: The Queue from which messages are read.
:type receipt_handle: str
:param queue: The receipt handle associated with the message whose
visibility timeout will be changed.
:type visibility_timeout: int
:param visibility_timeout: The new value of the message's visibility
timeout in seconds.
"""
params = {'ReceiptHandle' : receipt_handle,
'VisibilityTimeout' : visibility_timeout}
return self.get_status('ChangeMessageVisibility', params, queue.id, callback=callback) |
java | @Override
protected UIComponent createComponent(FacesContext context, String id)
{
String componentType = getComponentType();
if (componentType == null)
{
throw new NullPointerException("componentType");
}
if (_binding != null)
{
Application application = context.getApplication();
ValueBinding componentBinding = application.createValueBinding(_binding);
UIComponent component = application.createComponent(componentBinding, context, componentType);
component.setId(id);
component.setValueBinding("binding", componentBinding);
setProperties(component);
return component;
}
UIComponent component = context.getApplication().createComponent(componentType);
component.setId(id);
setProperties(component);
return component;
} |
java | @Nonnull
public static <T> T getDeserializedObject (@Nonnull final byte [] aData)
{
ValueEnforcer.notNull (aData, "Data");
// Read new object from byte array
try (final ObjectInputStream aOIS = new ObjectInputStream (new NonBlockingByteArrayInputStream (aData)))
{
return GenericReflection.uncheckedCast (aOIS.readObject ());
}
catch (final Exception ex)
{
throw new IllegalStateException ("Failed to read serializable object", ex);
}
} |
java | public String connect (String hostname, int port)
throws IOException, CDDBException
{
return connect(hostname, port, CLIENT_NAME, CLIENT_VERSION);
} |
python | def replace(oldstr, newstr, infile, dryrun=False):
"""
Sed-like Replace function..
Usage: pysed.replace(<Old string>, <Replacement String>, <Text File>)
Example: pysed.replace('xyz', 'XYZ', '/path/to/file.txt')
This will dump the output to STDOUT instead of changing the input file.
Example 'DRYRUN':
pysed.replace('xyz', 'XYZ', '/path/to/file.txt', dryrun=True)
"""
linelist = []
with open(infile) as reader:
for item in reader:
newitem = re.sub(oldstr, newstr, item)
linelist.append(newitem)
if dryrun is False:
with open(infile, "w") as writer:
writer.truncate()
for line in linelist:
writer.writelines(line)
elif dryrun is True:
for line in linelist:
print(line, end='')
else:
exit("""Unknown option specified to 'dryrun' argument,
Usage: dryrun=<True|False>.""") |
python | def max_knob_end_distance(self):
""" Maximum distance between knob_end and each of the hole side-chain centres. """
return max([distance(self.knob_end, h) for h in self.hole]) |
python | def parse_url(arg, extract, key=None):
"""
Returns the portion of a URL corresponding to a part specified
by 'extract'
Can optionally specify a key to retrieve an associated value
if extract parameter is 'QUERY'
Parameters
----------
extract : one of {'PROTOCOL', 'HOST', 'PATH', 'REF',
'AUTHORITY', 'FILE', 'USERINFO', 'QUERY'}
key : string (optional)
Examples
--------
>>> url = "https://www.youtube.com/watch?v=kEuEcWfewf8&t=10"
>>> parse_url(url, 'QUERY', 'v') # doctest: +SKIP
'kEuEcWfewf8'
Returns
-------
extracted : string
"""
return ops.ParseURL(arg, extract, key).to_expr() |
python | def power_on(self, id): # pylint: disable=invalid-name,redefined-builtin
"""Power on a device using it's power on command.
:param id: Device ID as an int.
:return: :class:`devices.PowerCmd <devices.PowerCmd>` object
:rtype: devices.PowerCmd
"""
schema = PowerCmdSchema()
resp = self.service.post(self.base+str(id)+'/power/on/')
return self.service.decode(schema, resp) |
java | final AbstractJcrProperty removeExistingProperty( Name name ) throws VersionException, LockException, RepositoryException {
AbstractJcrProperty existing = getProperty(name);
if (existing != null) {
existing.remove();
return existing;
}
// Return without throwing an exception to match behavior of the reference implementation.
// This is also in conformance with the spec. See MODE-956 for details.
return null;
} |
python | def load(self, filename):
"""Optimized load and return the parsed version of filename.
Uses the on-disk parse cache if the file is located in it.
"""
# Compute sha1 hash (key)
with open(filename) as fp:
key = sha1(fp.read()).hexdigest()
path = self.key_to_path(key)
# Return the cached file if available
if key in self.hashes:
try:
with open(path) as fp:
return cPickle.load(fp)
except EOFError:
os.unlink(path)
self.hashes.remove(key)
except IOError:
self.hashes.remove(key)
# Create the nested cache directory
try:
os.makedirs(os.path.dirname(path))
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
# Process the file and save in the cache
scratch = kurt.Project.load(filename) # can fail
with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT,
0400), 'w') as fp:
# open file for writing but make it immediately read-only
cPickle.dump(scratch, fp, cPickle.HIGHEST_PROTOCOL)
self.hashes.add(key)
return scratch |
java | public void register(FoxHttpInterceptorType interceptorType, FoxHttpInterceptor foxHttpInterceptor) throws FoxHttpException {
FoxHttpInterceptorType.verifyInterceptor(interceptorType, foxHttpInterceptor);
if (foxHttpInterceptors.containsKey(interceptorType)) {
foxHttpInterceptors.get(interceptorType).add(foxHttpInterceptor);
} else {
foxHttpInterceptors.put(interceptorType, new ArrayList<>(Arrays.asList(foxHttpInterceptor)));
}
} |
java | @Override
public int compareTo(MachineTime<U> other) {
if (this.scale == other.scale) {
if (this.seconds < other.seconds) {
return -1;
} else if (this.seconds > other.seconds) {
return 1;
} else {
return (this.nanos - other.nanos);
}
} else {
throw new ClassCastException("Different time scales.");
}
} |
java | public JvmGenericType toClass(/* @Nullable */ EObject sourceElement, /* @Nullable */ QualifiedName name, /* @Nullable */ Procedure1<? super JvmGenericType> initializer) {
return toClass(sourceElement, name != null ? name.toString() : null, initializer);
} |
java | public static void installDomainConnectorServices(final OperationContext context,
final ServiceTarget serviceTarget,
final ServiceName endpointName,
final ServiceName networkInterfaceBinding,
final int port,
final OptionMap options,
final ServiceName securityRealm,
final ServiceName saslAuthenticationFactory,
final ServiceName sslContext) {
String sbmCap = "org.wildfly.management.socket-binding-manager";
ServiceName sbmName = context.hasOptionalCapability(sbmCap, NATIVE_MANAGEMENT_RUNTIME_CAPABILITY.getName(), null)
? context.getCapabilityServiceName(sbmCap, SocketBindingManager.class) : null;
installConnectorServicesForNetworkInterfaceBinding(serviceTarget, endpointName, MANAGEMENT_CONNECTOR,
networkInterfaceBinding, port, options, securityRealm, saslAuthenticationFactory, sslContext, sbmName);
} |
python | def rm_(key, recurse=False, profile=None, **kwargs):
'''
.. versionadded:: 2014.7.0
Delete a key from etcd. Returns True if the key was deleted, False if it was
not and None if there was a failure.
CLI Example:
.. code-block:: bash
salt myminion etcd.rm /path/to/key
salt myminion etcd.rm /path/to/key profile=my_etcd_config
salt myminion etcd.rm /path/to/key host=127.0.0.1 port=2379
salt myminion etcd.rm /path/to/dir recurse=True profile=my_etcd_config
'''
client = __utils__['etcd_util.get_conn'](__opts__, profile, **kwargs)
return client.rm(key, recurse=recurse) |
python | def _high_dim_sim(self, v, w, normalize=False, X=None, idx=0):
"""Similarity measurement based on Gaussian Distribution"""
sim = np.exp((-np.linalg.norm(v - w) ** 2) / (2*self._sigma[idx] ** 2))
if normalize:
return sim / sum(map(lambda x: x[1], self._knn(idx, X, high_dim=True)))
else:
return sim |
python | def to_array(self):
"""
Serializes this InlineQueryResultCachedSticker to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
"""
array = super(InlineQueryResultCachedSticker, self).to_array()
array['type'] = u(self.type) # py2: type unicode, py3: type str
array['id'] = u(self.id) # py2: type unicode, py3: type str
array['sticker_file_id'] = u(self.sticker_file_id) # py2: type unicode, py3: type str
if self.reply_markup is not None:
array['reply_markup'] = self.reply_markup.to_array() # type InlineKeyboardMarkup
if self.input_message_content is not None:
array['input_message_content'] = self.input_message_content.to_array() # type InputMessageContent
return array |
java | @Override
public ConnectionManager getConnectionManager(ResourceInfo refInfo, AbstractConnectionFactoryService svc) throws ResourceException {
final boolean trace = TraceComponent.isAnyTracingEnabled();
if (trace && tc.isEntryEnabled())
Tr.entry(this, tc, "getConnectionManager", refInfo, svc);
ConnectionManager cm;
lock.readLock().lock();
try {
if (pm == null)
try {
// Switch to write lock for lazy initialization
lock.readLock().unlock();
lock.writeLock().lock();
if (pm == null)
createPoolManager(svc);
} finally {
// Downgrade to read lock for rest of method
lock.readLock().lock();
lock.writeLock().unlock();
}
CMConfigData cmConfigData = getCMConfigData(svc, refInfo);
String cfDetailsKey = cmConfigData.getCFDetailsKey();
cm = cfKeyToCM.get(cfDetailsKey);
if (cm == null) {
CommonXAResourceInfo xaResInfo = new EmbXAResourceInfo(cmConfigData);
J2CGlobalConfigProperties gConfigProps = pm.getGConfigProps();
synchronized (this) {
cm = cfKeyToCM.get(cfDetailsKey);
if (cm == null) {
cm = new ConnectionManager(svc, pm, gConfigProps, xaResInfo);
cfKeyToCM.put(cfDetailsKey, cm);
}
}
}
} finally {
lock.readLock().unlock();
}
if (trace && tc.isEntryEnabled())
Tr.exit(this, tc, "getConnectionManager", cm);
return cm;
} |
python | def bgp_normalize_table_data(bgp_table):
"""The 'show bgp all summary vrf all' table can have entries that wrap multiple lines.
2001:db8:4:701::2
4 65535 163664 163693 145 0 0 3w2d 3
2001:db8:e0:dd::1
4 10 327491 327278 145 0 0 3w1d 4
Normalize this so the line wrap doesn't exit.
"""
bgp_table = bgp_table.strip()
bgp_multiline_pattern = r"({})\s*\n".format(IPV4_OR_IPV6_REGEX)
# Strip out the newline
return re.sub(bgp_multiline_pattern, r"\1", bgp_table) |
java | public static ArrayNode getArrayAndRemove(ObjectNode obj, String fieldName) {
ArrayNode result = null;
if (obj != null) {
result = array(remove(obj, fieldName));
}
return result;
} |
python | def get_assistant(filename):
"""Imports a module from filename as a string, returns the contained Assistant object"""
agent_name = os.path.splitext(filename)[0]
try:
agent_module = import_with_3(
agent_name, os.path.join(os.getcwd(), filename))
except ImportError:
agent_module = import_with_2(
agent_name, os.path.join(os.getcwd(), filename))
for name, obj in agent_module.__dict__.items():
if isinstance(obj, Assistant):
return obj |
python | def __EncodedAttribute_encode_rgb24(self, rgb24, width=0, height=0):
"""Encode a 24 bit color image (no compression)
:param rgb24: an object containning image information
:type rgb24: :py:obj:`str` or :class:`numpy.ndarray` or seq< seq<element> >
:param width: image width. **MUST** be given if rgb24 is a string or
if it is a :class:`numpy.ndarray` with ndims != 3.
Otherwise it is calculated internally.
:type width: :py:obj:`int`
:param height: image height. **MUST** be given if rgb24 is a string
or if it is a :class:`numpy.ndarray` with ndims != 3.
Otherwise it is calculated internally.
:type height: :py:obj:`int`
.. note::
When :class:`numpy.ndarray` is given:
- rgb24 **MUST** be CONTIGUOUS, ALIGNED
- if rgb24.ndims != 3, width and height **MUST** be given and
rgb24.nbytes/3 **MUST** match width*height
- if rgb24.ndims == 3, rgb24.itemsize **MUST** be 1 (typically,
rgb24.dtype is one of `numpy.dtype.byte`, `numpy.dtype.ubyte`,
`numpy.dtype.int8` or `numpy.dtype.uint8`) and shape **MUST** be
(height, width, 3)
Example::
def read_myattr(self, attr):
enc = tango.EncodedAttribute()
# create an 'image' where each pixel is R=0x01, G=0x01, B=0x01
arr = numpy.ones((10,10,3), dtype=numpy.uint8)
enc.encode_rgb24(data)
attr.set_value(enc)
"""
self._generic_encode_rgb24(rgb24, width=width, height=height, format=_ImageFormat.RawImage) |
python | def run_hmmbuild(self):
'''
Generate hmm with hhbuild,
output to file. Also stores query names.
'''
for alignment in self.alignment_list:
print 'building Hmm for', alignment
alignment_full_path = self.alignment_dir + alignment
query_name = alignment.split("_")[0]
self.query_names.append(query_name)
new_hmm= self.hmm_dir + query_name + ".hmm"
hmmbuild_output = subprocess.call(["hmmbuild", new_hmm,
alignment_full_path])
print 'hhbuild complete for', self.query_names |
java | public void translate(Tuple2D<?> move) {
assert move != null : AssertMessages.notNullParameter();
this.curvilineTranslation += move.getX();
this.shiftTranslation += move.getY();
this.isIdentity = null;
} |
java | @XmlElementDecl(namespace = "http://www.w3.org/1998/Math/MathML", name = "arccsc")
public JAXBElement<ElementaryFunctionsType> createArccsc(ElementaryFunctionsType value) {
return new JAXBElement<ElementaryFunctionsType>(_Arccsc_QNAME, ElementaryFunctionsType.class, null, value);
} |
python | def _stream_annotation(file_name, pb_dir):
"""
Stream an entire remote annotation file from physiobank
Parameters
----------
file_name : str
The name of the annotation file to be read.
pb_dir : str
The physiobank directory where the annotation file is located.
"""
# Full url of annotation file
url = posixpath.join(config.db_index_url, pb_dir, file_name)
# Get the content
response = requests.get(url)
# Raise HTTPError if invalid url
response.raise_for_status()
# Convert to numpy array
ann_data = np.fromstring(response.content, dtype=np.dtype('<u1'))
return ann_data |
python | def _create_sentence_objects(self):
'''Returns a list of Sentence objects from the raw text.
'''
sentence_objects = []
sent_tokenizer = SentenceTokenizer(locale=self.language.code)
seq = Sequence(self.raw)
seq = sent_tokenizer.transform(seq)
for start_index, end_index in zip(seq.idx[:-1], seq.idx[1:]):
# Sentences share the same models as their parent blob
sent = seq.text[start_index: end_index].strip()
if not sent: continue
s = Sentence(sent, start_index=start_index, end_index=end_index)
s.detected_languages = self.detected_languages
sentence_objects.append(s)
return sentence_objects |
java | @Override
public void terminateAllConnections() {
this.terminationLock.lock();
try {
for (int i = 0; i < this.pool.thriftServerCount; i++) {
this.pool.partitions.get(i).setUnableToCreateMoreTransactions(false);
List<ThriftConnectionHandle<T>> clist = new LinkedList<>();
this.pool.partitions.get(i).getFreeConnections().drainTo(clist);
for (ThriftConnectionHandle<T> c : clist) {
this.pool.destroyConnection(c);
}
}
} finally {
this.terminationLock.unlock();
}
} |
python | def parse(cls, resource):
""" Parse a resource
:param resource: Element representing the text inventory
:param _cls_dict: Dictionary of classes to generate subclasses
"""
xml = xmlparser(resource)
o = cls(name=xml.xpath("//ti:TextInventory", namespaces=XPATH_NAMESPACES)[0].get("tiid") or "")
# Parse textgroups
_xpathDict(xml=xml, xpath='//ti:textgroup', cls=cls.CLASS_TEXTGROUP, parent=o)
return o |
java | @Override
public EClass getMetrics() {
if (metricsEClass == null) {
metricsEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(StorePackage.eNS_URI).getEClassifiers().get(100);
}
return metricsEClass;
} |
java | public Node previousNode() throws DOMException
{
if (!m_cacheNodes)
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_CANNOT_ITERATE, null)); //"This NodeSet can not iterate to a previous node!");
if ((m_next - 1) > 0)
{
m_next--;
return this.elementAt(m_next);
}
else
return null;
} |
python | def _find_key_cols(df):
"""Identify columns in a DataFrame that could be a unique key"""
keys = []
for col in df:
if len(df[col].unique()) == len(df[col]):
keys.append(col)
return keys |
java | @SuppressWarnings("unchecked")
public static void addJSFAttrbituteToAngularModel(Map<String, Object> model, String key, Object value,
boolean cacheable) {
Map<String, Object> sessionMap = FacesContext.getCurrentInstance().getExternalContext().getSessionMap();
Map<String, Object> cache = (Map<String, Object>) sessionMap.get(JSF_ATTRIBUTES_SESSION_CACHE);
if (cache.containsKey(key)) {
if (cacheable)
if (FacesContext.getCurrentInstance().isPostback()) {
Object previousValue = cache.get(key);
if (null == value && previousValue == null) {
return;
}
if (null != value && value.equals(previousValue)) {
return;
}
}
cache.remove(key);
}
cache.put(key, value);
String[] keys = key.split("\\.");
Map<String, Object> currentMap = model;
for (int i = 0; i < keys.length - 1; i++) {
if (!currentMap.containsKey(keys[i])) {
currentMap.put(keys[i], new HashMap<String, Object>());
}
final Object object = currentMap.get(keys[i]);
if (!(object instanceof Map)) {
// the parent object has already been stored
return;
}
currentMap = (Map<String, Object>) object;
}
currentMap.put(keys[keys.length - 1], value);
} |
python | def attach_pipeline(self, pipeline, name, chunks=None, eager=True):
"""Register a pipeline to be computed at the start of each day.
Parameters
----------
pipeline : Pipeline
The pipeline to have computed.
name : str
The name of the pipeline.
chunks : int or iterator, optional
The number of days to compute pipeline results for. Increasing
this number will make it longer to get the first results but
may improve the total runtime of the simulation. If an iterator
is passed, we will run in chunks based on values of the iterator.
Default is True.
eager : bool, optional
Whether or not to compute this pipeline prior to
before_trading_start.
Returns
-------
pipeline : Pipeline
Returns the pipeline that was attached unchanged.
See Also
--------
:func:`zipline.api.pipeline_output`
"""
if chunks is None:
# Make the first chunk smaller to get more immediate results:
# (one week, then every half year)
chunks = chain([5], repeat(126))
elif isinstance(chunks, int):
chunks = repeat(chunks)
if name in self._pipelines:
raise DuplicatePipelineName(name=name)
self._pipelines[name] = AttachedPipeline(pipeline, iter(chunks), eager)
# Return the pipeline to allow expressions like
# p = attach_pipeline(Pipeline(), 'name')
return pipeline |
python | def update(self, ipv6s):
"""
Method to update ipv6's
:param ipv6s: List containing ipv6's desired to updated
:return: None
"""
data = {'ips': ipv6s}
ipv6s_ids = [str(ipv6.get('id')) for ipv6 in ipv6s]
return super(ApiIPv6, self).put('api/v3/ipv6/%s/' %
';'.join(ipv6s_ids), data) |
python | def link_page_filter(self, page, modelview_name):
"""
Arguments are passed like: page_<VIEW_NAME>=<PAGE_NUMBER>
"""
new_args = request.view_args.copy()
args = request.args.copy()
args["page_" + modelview_name] = page
return url_for(
request.endpoint,
**dict(list(new_args.items()) + list(args.to_dict().items()))
) |
java | @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
public float getLineSpacingMultiplier (){
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN)
return mInputView.getLineSpacingMultiplier();
return 0f;
} |
python | def get_type(data):
"""Retrieve the type of effects calculation to do.
"""
if data["analysis"].lower().startswith("var") or dd.get_variantcaller(data):
return tz.get_in(("config", "algorithm", "effects"), data, "snpeff") |
java | public OvhMigrationAccount domain_account_accountName_migrate_destinationServiceName_destinationEmailAddress_destinationEmailAddress_GET(String domain, String accountName, String destinationServiceName, String destinationEmailAddress) throws IOException {
String qPath = "/email/domain/{domain}/account/{accountName}/migrate/{destinationServiceName}/destinationEmailAddress/{destinationEmailAddress}";
StringBuilder sb = path(qPath, domain, accountName, destinationServiceName, destinationEmailAddress);
String resp = exec(qPath, "GET", sb.toString(), null);
return convertTo(resp, OvhMigrationAccount.class);
} |
java | public double getAverageFiringTime() {
long fires = this.agendaStats.getConsolidatedStats().matchesFired.get();
long time = this.agendaStats.getConsolidatedStats().firingTime.get();
// calculating the average and converting it from nano secs to milli secs
return fires > 0 ? (((double) time / (double) fires) / (double) NANO_TO_MILLISEC) : 0;
} |
java | private String getTaskField(int key)
{
String result = null;
if ((key > 0) && (key < m_taskNames.length))
{
result = m_taskNames[key];
}
return (result);
} |
java | private String getAdditionalDependencies(final TargetInfo linkTarget, final List<DependencyDef> projectDependencies,
final Map<String, TargetInfo> targets, final String basePath) {
String dependencies = null;
final File[] linkSources = linkTarget.getAllSources();
final StringBuffer buf = new StringBuffer();
for (final File linkSource : linkSources) {
//
// if file was not compiled or otherwise generated
//
if (targets.get(linkSource.getName()) == null) {
//
// if source appears to be a system library or object file
// just output the name of the file (advapi.lib for example)
// otherwise construct a relative path.
//
String relPath = linkSource.getName();
//
// check if file comes from a project dependency
// if it does it should not be explicitly linked
boolean fromDependency = false;
if (relPath.indexOf(".") > 0) {
final String baseName = relPath.substring(0, relPath.indexOf("."));
for (DependencyDef depend : projectDependencies) {
if (baseName.compareToIgnoreCase(depend.getName()) == 0) {
fromDependency = true;
}
}
}
if (!fromDependency) {
if (!CUtil.isSystemPath(linkSource)) {
relPath = CUtil.getRelativePath(basePath, linkSource);
}
//
// if path has an embedded space then
// must quote
if (relPath.indexOf(' ') > 0) {
buf.append('\"');
buf.append(CUtil.toWindowsPath(relPath));
buf.append('\"');
} else {
buf.append(relPath);
}
buf.append(' ');
}
}
}
if (buf.length() > 0) {
buf.setLength(buf.length() - 1);
dependencies = buf.toString();
}
return dependencies;
} |
python | def _first(self, **spec):
""" Get the earliest entry in this category, optionally including subcategories """
for record in self._entries(spec).order_by(model.Entry.local_date,
model.Entry.id)[:1]:
return entry.Entry(record)
return None |
python | def clusterQueues(self):
""" Return a dict of queues in cluster and servers running them
"""
servers = yield self.getClusterServers()
queues = {}
for sname in servers:
qs = yield self.get('rhumba.server.%s.queues' % sname)
uuid = yield self.get('rhumba.server.%s.uuid' % sname)
qs = json.loads(qs)
for q in qs:
if q not in queues:
queues[q] = []
queues[q].append({'host': sname, 'uuid': uuid})
defer.returnValue(queues) |
java | public static GeoDistanceSortFieldBuilder geoDistance(String mapper, double latitude, double longitude) {
return new GeoDistanceSortFieldBuilder(mapper, latitude, longitude);
} |
python | def project_get(auth=None, **kwargs):
'''
Get a single project
CLI Example:
.. code-block:: bash
salt '*' keystoneng.project_get name=project1
salt '*' keystoneng.project_get name=project2 domain_id=b62e76fbeeff4e8fb77073f591cf211e
salt '*' keystoneng.project_get name=f315afcf12f24ad88c92b936c38f2d5a
'''
cloud = get_openstack_cloud(auth)
kwargs = _clean_kwargs(**kwargs)
return cloud.get_project(**kwargs) |
java | public BUILDER setDeprecated(ModelVersion since, boolean notificationUseful) {
//noinspection deprecation
this.deprecated = new DeprecationData(since, notificationUseful);
return (BUILDER) this;
} |
java | @Override
public SpatialReferenceSystem queryForSameId(SpatialReferenceSystem data)
throws SQLException {
SpatialReferenceSystem srs = super.queryForSameId(data);
setDefinition_12_063(srs);
return srs;
} |
java | public void set(Rec fieldList) throws DBException
{
this.syncCurrentToBase();
boolean[] rgbListenerState = this.getRecord().setEnableListeners(false);
this.getRecord().handleRecordChange(DBConstants.UPDATE_TYPE); // Fake the call for the grid table
this.getRecord().setEnableListeners(rgbListenerState);
super.set(fieldList);
this.syncCurrentToBase();
rgbListenerState = this.getRecord().setEnableListeners(false);
this.getRecord().handleRecordChange(DBConstants.AFTER_UPDATE_TYPE); // Fake the call for the grid table
this.getRecord().setEnableListeners(rgbListenerState);
} |
python | def sgd_entropic_regularization(a, b, M, reg, batch_size, numItermax, lr):
'''
Compute the sgd algorithm to solve the regularized discrete measures
optimal transport dual problem
The function solves the following optimization problem:
.. math::
\gamma = arg\min_\gamma <\gamma,M>_F + reg\cdot\Omega(\gamma)
s.t. \gamma 1 = a
\gamma^T 1= b
\gamma \geq 0
Where :
- M is the (ns,nt) metric cost matrix
- :math:`\Omega` is the entropic regularization term with :math:`\Omega(\gamma)=\sum_{i,j} \gamma_{i,j}\log(\gamma_{i,j})`
- a and b are source and target weights (sum to 1)
Parameters
----------
a : np.ndarray(ns,)
source measure
b : np.ndarray(nt,)
target measure
M : np.ndarray(ns, nt)
cost matrix
reg : float number
Regularization term > 0
batch_size : int number
size of the batch
numItermax : int number
number of iteration
lr : float number
learning rate
Returns
-------
alpha : np.ndarray(ns,)
dual variable
beta : np.ndarray(nt,)
dual variable
Examples
--------
>>> n_source = 7
>>> n_target = 4
>>> reg = 1
>>> numItermax = 20000
>>> lr = 0.1
>>> batch_size = 3
>>> log = True
>>> a = ot.utils.unif(n_source)
>>> b = ot.utils.unif(n_target)
>>> rng = np.random.RandomState(0)
>>> X_source = rng.randn(n_source, 2)
>>> Y_target = rng.randn(n_target, 2)
>>> M = ot.dist(X_source, Y_target)
>>> sgd_dual_pi, log = stochastic.solve_dual_entropic(a, b, M, reg,
batch_size,
numItermax, lr, log)
>>> print(log['alpha'], log['beta'])
>>> print(sgd_dual_pi)
References
----------
[Seguy et al., 2018] :
International Conference on Learning Representation (2018),
arXiv preprint arxiv:1711.02283.
'''
n_source = np.shape(M)[0]
n_target = np.shape(M)[1]
cur_alpha = np.zeros(n_source)
cur_beta = np.zeros(n_target)
for cur_iter in range(numItermax):
k = np.sqrt(cur_iter + 1)
batch_alpha = np.random.choice(n_source, batch_size, replace=False)
batch_beta = np.random.choice(n_target, batch_size, replace=False)
update_alpha, update_beta = batch_grad_dual(a, b, M, reg, cur_alpha,
cur_beta, batch_size,
batch_alpha, batch_beta)
cur_alpha[batch_alpha] += (lr / k) * update_alpha[batch_alpha]
cur_beta[batch_beta] += (lr / k) * update_beta[batch_beta]
return cur_alpha, cur_beta |
java | public static <K> PoolKey<K> lookup(K key) {
if (key == null)
throw new IllegalStateException("Key must not be null");
return new PoolKey<K>(key);
} |
python | def _get_template(path, option_key):
'''
Get the contents of a template file and provide it as a module type
:param path: path to the template.yml file
:type path: ``str``
:param option_key: The unique key of this template
:type option_key: ``str``
:returns: Details about the template
:rtype: ``tuple``
'''
with salt.utils.files.fopen(path, 'r') as template_f:
template = deserialize(template_f)
info = (option_key, template.get('description', ''), template)
return info |
java | public static boolean isValidMapcodeFormat(@Nonnull final String mapcode) throws IllegalArgumentException {
checkNonnull("mapcode", mapcode);
try {
// Throws an exception if the format is incorrect.
getPrecisionFormat(mapcode.toUpperCase());
return true;
} catch (final UnknownPrecisionFormatException ignored) {
return false;
}
} |
java | public void encodeListNullToNull(Writer writer, List<? extends T> list) throws IOException {
if (list == null) {
writer.write("null");
writer.flush();
return;
}
JsonUtil.startArray(writer);
int size = list.size();
for (int i = 0; i < size; i++) {
encodeNullToNull(writer, list.get(i));
if (i + 1 < size) {
JsonUtil.addSeparator(writer);
}
}
JsonUtil.endArray(writer);
writer.flush();
} |
java | public void containsEntry(@NullableDecl Object key, @NullableDecl Object value) {
// TODO(kak): Can we share any of this logic w/ MapSubject.containsEntry()?
if (!actual().containsEntry(key, value)) {
Entry<Object, Object> entry = Maps.immutableEntry(key, value);
List<Entry<Object, Object>> entryList = ImmutableList.of(entry);
if (hasMatchingToStringPair(actual().entries(), entryList)) {
failWithoutActual(
simpleFact(
lenientFormat(
"Not true that %s contains entry <%s (%s)>. However, it does contain entries "
+ "<%s>",
actualAsString(),
entry,
objectToTypeName(entry),
countDuplicatesAndAddTypeInfo(
retainMatchingToString(
actual().entries(), entryList /* itemsToCheck */)))));
} else if (actual().containsKey(key)) {
failWithoutActual(
simpleFact(
lenientFormat(
"Not true that %s contains entry <%s>. However, it has a mapping from <%s> to "
+ "<%s>",
actualAsString(), entry, key, actual().asMap().get(key))));
} else if (actual().containsValue(value)) {
Set<Object> keys = new LinkedHashSet<>();
for (Entry<?, ?> actualEntry : actual().entries()) {
if (Objects.equal(actualEntry.getValue(), value)) {
keys.add(actualEntry.getKey());
}
}
failWithoutActual(
simpleFact(
lenientFormat(
"Not true that %s contains entry <%s>. "
+ "However, the following keys are mapped to <%s>: %s",
actualAsString(), entry, value, keys)));
} else {
failWithActual("expected to contain entry", Maps.immutableEntry(key, value));
}
}
} |
python | def getDimensionForImage(filename, maxsize):
"""Return scaled image size in (width, height) format.
The scaling preserves the aspect ratio.
If PIL is not found returns None."""
try:
from PIL import Image
except ImportError:
return None
img = Image.open(filename)
width, height = img.size
if width > maxsize[0] or height > maxsize[1]:
img.thumbnail(maxsize)
out.info("Downscaled display size from %s to %s" % ((width, height), img.size))
return img.size |
java | public ClientConnectionTimingsBuilder socketConnectEnd() {
checkState(socketConnectStartTimeMicros >= 0, "socketConnectStart() is not called yet.");
checkState(!socketConnectEndSet, "socketConnectEnd() is already called.");
socketConnectEndNanos = System.nanoTime();
socketConnectEndSet = true;
return this;
} |
python | def _AddReprMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def __repr__(self):
return text_format.MessageToString(self)
cls.__repr__ = __repr__ |
java | public static LifecycleChaincodePackage fromFile(File policyFile) throws IOException, InvalidArgumentException {
if (null == policyFile) {
throw new InvalidArgumentException("The parameter policyFile may not be null.");
}
try (InputStream is = new FileInputStream(policyFile)) {
byte[] packageBytes = IOUtils.toByteArray(is);
return fromBytes(packageBytes);
}
} |
python | def GET(self, courseid, taskid, path): # pylint: disable=arguments-differ
""" GET request """
try:
course = self.course_factory.get_course(courseid)
if not self.user_manager.course_is_open_to_user(course):
return self.template_helper.get_renderer().course_unavailable()
path_norm = posixpath.normpath(urllib.parse.unquote(path))
if taskid == "$common":
public_folder = course.get_fs().from_subfolder("$common").from_subfolder("public")
else:
task = course.get_task(taskid)
if not self.user_manager.task_is_visible_by_user(task): # ignore LTI check here
return self.template_helper.get_renderer().task_unavailable()
public_folder = task.get_fs().from_subfolder("public")
(method, mimetype_or_none, file_or_url) = public_folder.distribute(path_norm, False)
if method == "local":
web.header('Content-Type', mimetype_or_none)
return file_or_url
elif method == "url":
raise web.redirect(file_or_url)
else:
raise web.notfound()
except web.HTTPError as error_or_redirect:
raise error_or_redirect
except:
if web.config.debug:
raise
else:
raise web.notfound() |
java | public Set getResourcePaths(String uriInContext)
{
try
{
uriInContext=URI.canonicalPath(uriInContext);
if (uriInContext==null)
return Collections.EMPTY_SET;
Resource resource=getHttpContext().getResource(uriInContext);
if (resource==null || !resource.isDirectory())
return Collections.EMPTY_SET;
String[] contents=resource.list();
if (contents==null || contents.length==0)
return Collections.EMPTY_SET;
HashSet set = new HashSet(contents.length*2);
for (int i=0;i<contents.length;i++)
set.add(URI.addPaths(uriInContext,contents[i]));
return set;
}
catch(Exception e)
{
e.printStackTrace();
LogSupport.ignore(log,e);
}
return Collections.EMPTY_SET;
} |
java | @Nonnull
@ReturnsMutableCopy
public ICommonsList <ICSSTopLevelRule> getAllRules (@Nonnull final Predicate <? super ICSSTopLevelRule> aFilter)
{
return m_aRules.getAll (aFilter);
} |
java | public static byte[] getBytes(String text) {
byte[] bytes = new byte[] {};
try {
bytes = text.getBytes("utf-8");
} catch (UnsupportedEncodingException e) {
}
return bytes;
} |
python | def revert(self):
"""
Revert any changes made to settings.
"""
for attr, value in self._changed.items():
setattr(django_settings, attr, value)
for attr in self._added:
delattr(django_settings, attr)
self._changed = {}
self._added = []
if self.isolated:
self._isolated_overrides = BaseSettings() |
java | final Table SYSTEM_TYPEINFO() {
Table t = sysTables[SYSTEM_TYPEINFO];
if (t == null) {
t = createBlankTable(sysTableHsqlNames[SYSTEM_TYPEINFO]);
//-------------------------------------------
// required by JDBC:
// ------------------------------------------
addColumn(t, "TYPE_NAME", SQL_IDENTIFIER);
addColumn(t, "DATA_TYPE", Type.SQL_SMALLINT);
addColumn(t, "PRECISION", Type.SQL_INTEGER);
addColumn(t, "LITERAL_PREFIX", CHARACTER_DATA);
addColumn(t, "LITERAL_SUFFIX", CHARACTER_DATA);
addColumn(t, "CREATE_PARAMS", CHARACTER_DATA);
addColumn(t, "NULLABLE", Type.SQL_SMALLINT);
addColumn(t, "CASE_SENSITIVE", Type.SQL_BOOLEAN);
addColumn(t, "SEARCHABLE", Type.SQL_INTEGER);
addColumn(t, "UNSIGNED_ATTRIBUTE", Type.SQL_BOOLEAN);
addColumn(t, "FIXED_PREC_SCALE", Type.SQL_BOOLEAN);
addColumn(t, "AUTO_INCREMENT", Type.SQL_BOOLEAN);
addColumn(t, "LOCAL_TYPE_NAME", SQL_IDENTIFIER);
addColumn(t, "MINIMUM_SCALE", Type.SQL_SMALLINT);
addColumn(t, "MAXIMUM_SCALE", Type.SQL_SMALLINT);
addColumn(t, "SQL_DATA_TYPE", Type.SQL_INTEGER);
addColumn(t, "SQL_DATETIME_SUB", Type.SQL_INTEGER);
addColumn(t, "NUM_PREC_RADIX", Type.SQL_INTEGER);
//-------------------------------------------
// for JDBC sort contract:
//-------------------------------------------
addColumn(t, "TYPE_SUB", Type.SQL_INTEGER);
// order: DATA_TYPE, TYPE_SUB
// true PK
HsqlName name = HsqlNameManager.newInfoSchemaObjectName(
sysTableHsqlNames[SYSTEM_TYPEINFO].name, false,
SchemaObject.INDEX);
t.createPrimaryKey(name, new int[] {
1, 18
}, true);
return t;
}
PersistentStore store = database.persistentStoreCollection.getStore(t);
Session sys = database.sessionManager.newSysSession(
SqlInvariants.INFORMATION_SCHEMA_HSQLNAME, session.getUser());
Result rs = sys.executeDirectStatement(
"select TYPE_NAME, DATA_TYPE, PRECISION, LITERAL_PREFIX,"
+ "LITERAL_SUFFIX, CREATE_PARAMS, NULLABLE, CASE_SENSITIVE,"
+ "SEARCHABLE,"
+ "UNSIGNED_ATTRIBUTE, FIXED_PREC_SCALE, AUTO_INCREMENT, LOCAL_TYPE_NAME, MINIMUM_SCALE, "
+ "MAXIMUM_SCALE, SQL_DATA_TYPE, SQL_DATETIME_SUB, NUM_PREC_RADIX, TYPE_SUB "
+ "from INFORMATION_SCHEMA.SYSTEM_ALLTYPEINFO where AS_TAB_COL = true;");
t.insertSys(store, rs);
sys.close();
return t;
} |
java | @Override
public void setMessageType(char indicationType)
throws IllegalArgumentException {
/*
* old TURN DATA indication type is an indication despite 0x0115 &
* 0x0110 indicates STUN error response type
*/
if (!isIndicationType(indicationType)
&& indicationType != StunMessage.OLD_DATA_INDICATION) {
throw new IllegalArgumentException((int) (indicationType)
+ " - is not a valid indication type.");
}
super.setMessageType(indicationType);
} |
python | def _set_intrinsics(self):
"""Read the intrinsics matrix from the stream.
"""
strm = self._profile.get_stream(rs.stream.color)
obj = strm.as_video_stream_profile().get_intrinsics()
self._intrinsics[0, 0] = obj.fx
self._intrinsics[1, 1] = obj.fy
self._intrinsics[0, 2] = obj.ppx
self._intrinsics[1, 2] = obj.ppy |
python | def create_role_policy(role_name, policy_name, policy, region=None, key=None,
keyid=None, profile=None):
'''
Create or modify a role policy.
CLI Example:
.. code-block:: bash
salt myminion boto_iam.create_role_policy myirole mypolicy '{"MyPolicy": "Statement": [{"Action": ["sqs:*"], "Effect": "Allow", "Resource": ["arn:aws:sqs:*:*:*"], "Sid": "MyPolicySqs1"}]}'
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
_policy = get_role_policy(role_name, policy_name, region, key, keyid, profile)
mode = 'create'
if _policy:
if _policy == policy:
return True
mode = 'modify'
if isinstance(policy, six.string_types):
policy = salt.utils.json.loads(policy, object_pairs_hook=odict.OrderedDict)
try:
_policy = salt.utils.json.dumps(policy)
conn.put_role_policy(role_name, policy_name, _policy)
if mode == 'create':
msg = 'Successfully added policy %s to IAM role %s.'
else:
msg = 'Successfully modified policy %s for IAM role %s.'
log.info(msg, policy_name, role_name)
return True
except boto.exception.BotoServerError as e:
log.error(e)
log.error('Failed to %s policy %s for IAM role %s.',
mode, policy_name, role_name)
return False |
python | def start(self, subid, params=None):
''' /v1/server/start
POST - account
Start a virtual machine. If the machine is already
running, it will be restarted.
Link: https://www.vultr.com/api/#server_start
'''
params = update_params(params, {'SUBID': subid})
return self.request('/v1/server/start', params, 'POST') |
java | @Override
public KeywordLiteral setType(int nodeType) {
if (!(nodeType == Token.THIS
|| nodeType == Token.NULL
|| nodeType == Token.TRUE
|| nodeType == Token.FALSE
|| nodeType == Token.DEBUGGER))
throw new IllegalArgumentException("Invalid node type: "
+ nodeType);
type = nodeType;
return this;
} |
python | def diff(before, after, check_modified=False):
"""Diff two sequences of comparable objects.
The result of this function is a list of dictionaries containing
values in ``before`` or ``after`` with a ``state`` of either
'unchanged', 'added', 'deleted', or 'modified'.
>>> import pprint
>>> result = diff(['a', 'b', 'c'], ['b', 'c', 'd'])
>>> pprint.pprint(result)
[{'state': 'deleted', 'value': 'a'},
{'state': 'unchanged', 'value': 'b'},
{'state': 'unchanged', 'value': 'c'},
{'state': 'added', 'value': 'd'}]
Parameters
----------
before : iterable
An iterable containing values to be used as the baseline version.
after : iterable
An iterable containing values to be compared against the baseline.
check_modified : bool
Whether or not to check for modifiedness.
Returns
-------
diff_items : A list of dictionaries containing diff information.
"""
# The grid will be empty if `before` or `after` are
# empty; this will violate the assumptions made in the rest
# of this function.
# If this is the case, we know what the result of the diff is
# anyways: the contents of the other, non-empty input.
if len(before) == 0:
return [
{'state': 'added', 'value': v}
for v in after
]
elif len(after) == 0:
return [
{'state': 'deleted', 'value': v}
for v in before
]
grid = create_grid(before, after)
nrows = len(grid[0])
ncols = len(grid)
dps = diff_points(grid)
result = []
for kind, col, row in dps:
if kind == 'unchanged':
value = before[col]
result.append({
'state': kind,
'value': value,
})
elif kind == 'deleted':
assert col < ncols
value = before[col]
result.append({
'state': kind,
'value': value,
})
elif kind == 'added':
assert row < nrows
value = after[row]
result.append({
'state': kind,
'value': value,
})
elif check_modified and kind == 'modified':
result.append({
'state': kind,
'originalvalue': before[col],
'modifiedvalue': after[row],
})
elif (not check_modified) and kind == 'modified':
result.append({
'state': 'deleted',
'value': before[col],
})
result.append({
'state': 'added',
'value': after[row],
})
else:
raise Exception('We should not be here.')
return result |
python | def _at_extend(self, calculator, rule, scope, block):
"""
Implements @extend
"""
from scss.selector import Selector
selectors = calculator.apply_vars(block.argument)
rule.extends_selectors.extend(Selector.parse_many(selectors)) |
python | def extract_cookies(self, response, request):
"""Extract cookies from response, where allowable given the request."""
_debug("extract_cookies: %s", response.info())
self._cookies_lock.acquire()
try:
self._policy._now = self._now = int(time.time())
for cookie in self.make_cookies(response, request):
if self._policy.set_ok(cookie, request):
_debug(" setting cookie: %s", cookie)
self.set_cookie(cookie)
finally:
self._cookies_lock.release() |
python | def start_browser_when_ready(host, port=None, cancel_event=None):
"""
Starts a thread that waits for the server then opens the specified
address in the browser. Set cancel_event to cancel the wait. The
started thread object is returned.
"""
browser_thread = Thread(
target=wait_and_start_browser, args=(host, port, cancel_event))
browser_thread.daemon = True
browser_thread.start()
return browser_thread |
java | public static void validateMergedXML(ModuleInitData mid)
throws EJBConfigurationException
{
if (!validationEnabled()) {
return;
}
EJBJar mergedEJBJar = mid.getMergedEJBJar();
if (mergedEJBJar == null) { // managed beans only
return;
}
List<String> errors = new ArrayList<String>();
List<EnterpriseBean> mergedBeans = mergedEJBJar.getEnterpriseBeans();
if (mergedBeans == null)
{
errors.add(MERGE + "contains no EJBs.");
logErrors(mid.ivJ2EEName, errors);
return;
}
TreeSet<String> mergedNames = new TreeSet<String>();
for (EnterpriseBean mergedBean : mergedBeans) {
mergedNames.add(mergedBean.getName());
}
for (BeanInitData bid : mid.ivBeans) {
mergedNames.remove(bid.ivName);
}
if (!mergedNames.isEmpty()) {
errors.add(MERGE + "contains extra EJBs named " + mergedNames);
}
logErrors(mid.ivJ2EEName, errors);
} |
python | def start(st_reg_number):
"""Checks the number valiaty for the Alagoas state"""
if len(st_reg_number) > 9:
return False
if len(st_reg_number) < 9:
return False
if st_reg_number[0:2] != "24":
return False
if st_reg_number[2] not in ['0', '3', '5', '7', '8']:
return False
aux = 9
sum_total = 0
for i in range(len(st_reg_number)-1):
sum_total = sum_total + int(st_reg_number[i]) * aux
aux -= 1
product = sum_total * 10
aux_2 = int(product/11)
digit = product - aux_2 * 11
if digit == 10:
digit = 0
return digit == int(st_reg_number[len(st_reg_number)-1]) |
python | def smoothing(self, f, w, sm, smtol, gstol):
"""
Smooths a surface f by choosing nodal function values and gradients to
minimize the linearized curvature of F subject to a bound on the
deviation from the data values. This is more appropriate than interpolation
when significant errors are present in the data.
Parameters
----------
f : array of floats, shape (n,)
field to apply smoothing on
w : array of floats, shape (n,)
weights associated with data value in f
w[i] = 1/sigma_f^2 is a good rule of thumb.
sm : float
positive parameter specifying an upper bound on Q2(f).
generally n-sqrt(2n) <= sm <= n+sqrt(2n)
smtol : float
specifies relative error in satisfying the constraint
sm(1-smtol) <= Q2 <= sm(1+smtol) between 0 and 1.
gstol : float
tolerance for convergence.
gstol = 0.05*mean(sigma_f)^2 is a good rule of thumb.
Returns
-------
f_smooth : array of floats, shape (n,)
smoothed version of f
(dfdx, dfdy) : tuple of floats, tuple of 2 shape (n,) arrays
first derivatives of f_smooth in the x and y directions
"""
if f.size != self.npoints or f.size != w.size:
raise ValueError('f and w should be the same size as mesh')
f, w = self._shuffle_field(f, w)
sigma = 0
iflgs = 0
f_smooth, df, ierr = _srfpack.smsurf(self.x, self.y, f, self.lst, self.lptr, self.lend,\
iflgs, sigma, w, sm, smtol, gstol)
if ierr < 0:
raise ValueError('ierr={} in gradg\n{}'.format(ierr, _ier_codes[ierr]))
if ierr == 1:
raise RuntimeWarning("No errors were encountered but the constraint is not active --\n\
F, FX, and FY are the values and partials of a linear function \
which minimizes Q2(F), and Q1 = 0.")
return self._deshuffle_field(f_smooth), self._deshuffle_field(df[0], df[1]) |
python | def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'text') and self.text is not None:
_dict['text'] = self.text
if hasattr(self, 'score') and self.score is not None:
_dict['score'] = self.score
return _dict |
python | def set_coords(self, names, inplace=None):
"""Given names of one or more variables, set them as coordinates
Parameters
----------
names : str or list of str
Name(s) of variables in this dataset to convert into coordinates.
inplace : bool, optional
If True, modify this dataset inplace. Otherwise, create a new
object.
Returns
-------
Dataset
See also
--------
Dataset.swap_dims
"""
# TODO: allow inserting new coordinates with this method, like
# DataFrame.set_index?
# nb. check in self._variables, not self.data_vars to insure that the
# operation is idempotent
inplace = _check_inplace(inplace)
if isinstance(names, str):
names = [names]
self._assert_all_in_dataset(names)
obj = self if inplace else self.copy()
obj._coord_names.update(names)
return obj |
java | @Override
public void visitClassContext(ClassContext classContext) {
try {
clsContext = classContext;
JavaClass cls = classContext.getJavaClass();
if (cls.isInterface())
return;
superClasses = cls.getSuperClasses();
cls.accept(this);
} catch (ClassNotFoundException cnfe) {
bugReporter.reportMissingClass(cnfe);
} finally {
clsContext = null;
superClasses = null;
}
} |
java | public static boolean checkUri(String uri) {
QueryStringDecoder queryStringDecoder = new QueryStringDecoder(uri, true);
String path = queryStringDecoder.path();
int groupIndex = path.indexOf('/') + 1,
unitIndex = path.indexOf('/', groupIndex) + 1;
if (groupIndex == 0 || unitIndex == 0) {
LOG.warn("URI is illegal: " + uri);
return false;
}
return true;
} |
java | public void marshall(DescribeCollectionRequest describeCollectionRequest, ProtocolMarshaller protocolMarshaller) {
if (describeCollectionRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeCollectionRequest.getCollectionId(), COLLECTIONID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.