language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public void setTimeout(int pTimeout) {
if (pTimeout < 0) { // Must be positive
throw new IllegalArgumentException("Timeout must be positive.");
}
timeout = pTimeout;
if (socket != null) {
try {
socket.setSoTimeout(pTimeout);
}
catch (SocketException se) {
// Not much to do about that...
}
}
} |
java | @javax.annotation.CheckForNull
public static IResource getResource(Object element) {
if (element instanceof IJavaElement) {
return ((IJavaElement) element).getResource();
}
return Util.getAdapter(IResource.class, element);
} |
java | public static Dictionary buildDictionary(final ObjectStream<Parse> data,
final HeadRules rules, final int cutoff) throws IOException {
final TrainingParameters params = new TrainingParameters();
params.put("dict", TrainingParameters.CUTOFF_PARAM,
Integer.toString(cutoff));
return buildDictionary(data, rules, params);
} |
python | def validate_authentication(self, username, password, handler):
"""authenticate user with password
"""
user = authenticate(
**{self.username_field: username, 'password': password}
)
account = self.get_account(username)
if not (user and account):
raise AuthenticationFailed("Authentication failed.") |
java | static BasicTag convert(Tag t) {
return (t instanceof BasicTag) ? (BasicTag) t : new BasicTag(t.key(), t.value());
} |
java | Table TABLE_CONSTRAINTS() {
Table t = sysTables[TABLE_CONSTRAINTS];
if (t == null) {
t = createBlankTable(sysTableHsqlNames[TABLE_CONSTRAINTS]);
addColumn(t, "CONSTRAINT_CATALOG", SQL_IDENTIFIER);
addColumn(t, "CONSTRAINT_SCHEMA", SQL_IDENTIFIER);
addColumn(t, "CONSTRAINT_NAME", SQL_IDENTIFIER); // not null
addColumn(t, "CONSTRAINT_TYPE", CHARACTER_DATA); // not null
addColumn(t, "TABLE_CATALOG", SQL_IDENTIFIER);
addColumn(t, "TABLE_SCHEMA", SQL_IDENTIFIER);
addColumn(t, "TABLE_NAME", SQL_IDENTIFIER); // not null
addColumn(t, "IS_DEFERRABLE", YES_OR_NO); // not null
addColumn(t, "INITIALLY_DEFERRED", YES_OR_NO); // not null
// false PK, as CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA,
// TABLE_CATALOG and/or TABLE_SCHEMA may be null
HsqlName name = HsqlNameManager.newInfoSchemaObjectName(
sysTableHsqlNames[TABLE_CONSTRAINTS].name, false,
SchemaObject.INDEX);
t.createPrimaryKey(name, new int[] {
0, 1, 2, 4, 5, 6
}, false);
return t;
}
PersistentStore store = database.persistentStoreCollection.getStore(t);
// Intermediate holders
Iterator tables;
Table table;
Constraint[] constraints;
int constraintCount;
Constraint constraint;
String cat;
String schem;
Object[] row;
// column number mappings
final int constraint_catalog = 0;
final int constraint_schema = 1;
final int constraint_name = 2;
final int constraint_type = 3;
final int table_catalog = 4;
final int table_schema = 5;
final int table_name = 6;
final int is_deferable = 7;
final int initially_deferred = 8;
// initialization
tables =
database.schemaManager.databaseObjectIterator(SchemaObject.TABLE);
table = null; // else compiler complains
// do it
while (tables.hasNext()) {
table = (Table) tables.next();
/** @todo - requires table level INSERT or UPDATE or DELETE or REFERENCES (not SELECT) right */
if (table.isView() || !isAccessibleTable(table)) {
continue;
}
constraints = table.getConstraints();
constraintCount = constraints.length;
for (int i = 0; i < constraintCount; i++) {
constraint = constraints[i];
row = t.getEmptyRowData();
switch (constraint.getConstraintType()) {
case Constraint.CHECK : {
row[constraint_type] = "CHECK";
break;
}
case Constraint.UNIQUE : {
row[constraint_type] = "UNIQUE";
break;
}
case Constraint.FOREIGN_KEY : {
row[constraint_type] = "FOREIGN KEY";
table = constraint.getRef();
break;
}
case Constraint.PRIMARY_KEY : {
row[constraint_type] = "PRIMARY KEY";
break;
}
case Constraint.MAIN :
default : {
continue;
}
}
cat = database.getCatalogName().name;
schem = table.getSchemaName().name;
row[constraint_catalog] = cat;
row[constraint_schema] = schem;
row[constraint_name] = constraint.getName().name;
row[table_catalog] = cat;
row[table_schema] = schem;
row[table_name] = table.getName().name;
row[is_deferable] = Tokens.T_NO;
row[initially_deferred] = Tokens.T_NO;
t.insertSys(store, row);
}
}
return t;
} |
java | public void close() {
if (tc.isEntryEnabled())
SibTr.entry(this, tc, "close");
// begin F177053
ChannelFramework framework = ChannelFrameworkFactory.getChannelFramework(); // F196678.10
try {
framework.stopChain(chainInbound, CHAIN_STOP_TIME);
} catch (ChainException e) {
FFDCFilter.processException(e, "com.ibm.ws.sib.jfapchannel.impl.ListenerPortImpl.close",
JFapChannelConstants.LISTENERPORTIMPL_CLOSE_01,
new Object[] { framework, chainInbound }); // D232185
if (tc.isEventEnabled())
SibTr.exception(this, tc, e);
} catch (ChannelException e) {
FFDCFilter.processException(e, "com.ibm.ws.sib.jfapchannel.impl.ListenerPortImpl.close",
JFapChannelConstants.LISTENERPORTIMPL_CLOSE_02,
new Object[] { framework, chainInbound }); // D232185
if (tc.isEventEnabled())
SibTr.exception(this, tc, e);
}
// end F177053
if (tc.isEntryEnabled())
SibTr.exit(this, tc, "close");
} |
java | public boolean containsBetaSettings(
java.lang.String key) {
if (key == null) { throw new java.lang.NullPointerException(); }
return internalGetBetaSettings().getMap().containsKey(key);
} |
python | def checksum1(data, stringlength):
""" Calculate Checksum 1
Calculate the ckecksum 1 required for the herkulex data packet
Args:
data (list): the data of which checksum is to be calculated
stringlength (int): the length of the data
Returns:
int: The calculated checksum 1
"""
value_buffer = 0
for count in range(0, stringlength):
value_buffer = value_buffer ^ data[count]
return value_buffer&0xFE |
java | public void set(String name, String value) {
if (deprecatedKeyMap.isEmpty()) {
getProps();
}
getOverlay().setProperty(name, value);
getProps().setProperty(name, value);
updatingResource.put(name, UNKNOWN_RESOURCE);
String[] altNames = getAlternateNames(name);
if (altNames != null && altNames.length > 0) {
for (String altName : altNames) {
getOverlay().setProperty(altName, value);
getProps().setProperty(altName, value);
}
}
warnOnceIfDeprecated(name);
} |
java | public java.util.List<String> getFilter() {
if (filter == null) {
filter = new com.amazonaws.internal.SdkInternalList<String>();
}
return filter;
} |
python | def _get_group(conn=None, name=None, vpc_id=None, vpc_name=None, group_id=None,
region=None, key=None, keyid=None, profile=None): # pylint: disable=W0613
'''
Get a group object given a name, name and vpc_id/vpc_name or group_id. Return
a boto.ec2.securitygroup.SecurityGroup object if the group is found, else
return None.
'''
if vpc_name and vpc_id:
raise SaltInvocationError('The params \'vpc_id\' and \'vpc_name\' '
'are mutually exclusive.')
if vpc_name:
try:
vpc_id = _vpc_name_to_id(vpc_id=vpc_id, vpc_name=vpc_name, region=region,
key=key, keyid=keyid, profile=profile)
except boto.exception.BotoServerError as e:
log.debug(e)
return None
if name:
if vpc_id is None:
log.debug('getting group for %s', name)
group_filter = {'group-name': name}
filtered_groups = conn.get_all_security_groups(filters=group_filter)
# security groups can have the same name if groups exist in both
# EC2-Classic and EC2-VPC
# iterate through groups to ensure we return the EC2-Classic
# security group
for group in filtered_groups:
# a group in EC2-Classic will have vpc_id set to None
if group.vpc_id is None:
return group
# If there are more security groups, and no vpc_id, we can't know which one to choose.
if len(filtered_groups) > 1:
raise CommandExecutionError('Security group belongs to more VPCs, specify the VPC ID!')
elif len(filtered_groups) == 1:
return filtered_groups[0]
return None
elif vpc_id:
log.debug('getting group for %s in vpc_id %s', name, vpc_id)
group_filter = {'group-name': name, 'vpc_id': vpc_id}
filtered_groups = conn.get_all_security_groups(filters=group_filter)
if len(filtered_groups) == 1:
return filtered_groups[0]
else:
return None
else:
return None
elif group_id:
try:
groups = conn.get_all_security_groups(group_ids=[group_id])
except boto.exception.BotoServerError as e:
log.debug(e)
return None
if len(groups) == 1:
return groups[0]
else:
return None
else:
return None |
java | public static void replaceUnit(/*String group, String unitName, */ Unit newUnit) {
String group = newUnit.getGroup().getName(), unitName = newUnit.getName();
readWriteLock.writeLock().lock();
try {
//unitMap
List<Unit> unitList = unitMap.get(group);
unitList.removeIf(unitToRemove -> Objects.equals(unitToRemove.getName(), unitName));
unitList.add(newUnit);
//searchUnitByClassMap
Map<Class<? extends Unit>, Unit> tmp = new HashMap<>();
for (Map.Entry<Class<? extends Unit>, Unit> classUnitEntry : searchUnitByClass.entrySet()) {
Unit unit = classUnitEntry.getValue();
if (unit.getGroup().getName().equals(group) && unit.getName().equals(unitName)) {
tmp.put(classUnitEntry.getKey(), newUnit);
break;
}
}
searchUnitByClass.putAll(tmp);
//searchUnitMap
searchUnitMap.put(Unit.fullName(group, unitName), newUnit);
} finally {
readWriteLock.writeLock().unlock();
}
} |
java | private String extractFileName(Part part) {
String disposition = part.getHeader("Content-Disposition");
if (disposition == null)
return null;
Matcher matcher = FILENAME_PATTERN.matcher(disposition);
if (!matcher.find())
return null;
return matcher.group(1);
} |
python | def _CheckPythonModule(self, dependency):
"""Checks the availability of a Python module.
Args:
dependency (DependencyDefinition): dependency definition.
Returns:
tuple: consists:
bool: True if the Python module is available and conforms to
the minimum required version, False otherwise.
str: status message.
"""
module_object = self._ImportPythonModule(dependency.name)
if not module_object:
status_message = 'missing: {0:s}'.format(dependency.name)
return False, status_message
if not dependency.version_property:
return True, dependency.name
return self._CheckPythonModuleVersion(
dependency.name, module_object, dependency.version_property,
dependency.minimum_version, dependency.maximum_version) |
java | @Override public Object getCachedValue(FieldType field)
{
return (field == null ? null : m_array[field.getValue()]);
} |
python | def pan_cb(self, setting, value):
"""Handle callback related to changes in pan."""
pan_x, pan_y = value[:2]
self.logger.debug("pan set to %.2f,%.2f" % (pan_x, pan_y))
self.redraw(whence=0) |
python | def post(self, resource_endpoint, data={}, files=None):
"""Don't use it."""
url = self._create_request_url(resource_endpoint)
if files:
data = self._prepare_params_for_file_upload(data)
return req.post(url, headers=self.auth_header, files=files, data=data)
else:
return req.post(url, headers=self.auth_header, json=data) |
java | public Title getTitle(int pageId) throws WikiApiException {
Session session = this.__getHibernateSession();
session.beginTransaction();
Object returnValue = session.createNativeQuery(
"select p.name from PageMapLine as p where p.pageId= :pId").setParameter("pId", pageId, IntegerType.INSTANCE).uniqueResult();
session.getTransaction().commit();
String title = (String)returnValue;
if(title==null){
throw new WikiPageNotFoundException();
}
return new Title(title);
} |
java | public final IntResultListener asIntResultListener ()
{
return new IntResultListener() {
public void requestCompleted (int result) {
// call the invocation method and it will do the unsafe cast for us.
Resulting.this.requestProcessed((Object)result);
}
public void requestFailed (Exception cause) {
Resulting.this.requestFailed(cause);
}
};
} |
java | private String sanitizePrincipal(final String input) {
String s = "";
for (int i = 0; i < input.length(); i++) {
char c = input.charAt(i);
if (c == '*') {
// escape asterisk
s += "\\2a";
} else if (c == '(') {
// escape left parenthesis
s += "\\28";
} else if (c == ')') {
// escape right parenthesis
s += "\\29";
} else if (c == '\\') {
// escape backslash
s += "\\5c";
} else if (c == '\u0000') {
// escape NULL char
s += "\\00";
} else if (c <= 0x7f) {
// regular 1-byte UTF-8 char
s += String.valueOf(c);
} else if (c >= 0x080) {
// higher-order 2, 3 and 4-byte UTF-8 chars
byte[] utf8bytes = String.valueOf(c).getBytes(StandardCharsets.UTF_8);
for (byte b : utf8bytes) {
s += String.format(Locale.ENGLISH, "\\%02x", b);
}
}
}
return s;
} |
java | protected void disconnect() {
// Stop broadcaster
if (locator != null) {
locator.disconnect();
locator = null;
}
// Stop gossiper's timer
if (gossiperTimer != null) {
gossiperTimer.cancel(false);
gossiperTimer = null;
}
// Close socket reader
if (reader != null) {
reader.disconnect();
reader = null;
}
// Close socket writer
if (writer != null) {
writer.disconnect();
writer = null;
}
} |
python | def module(self):
"""The module in which the Class is defined.
Python equivalent of the CLIPS defglobal-module command.
"""
modname = ffi.string(lib.EnvDefclassModule(self._env, self._cls))
defmodule = lib.EnvFindDefmodule(self._env, modname)
return Module(self._env, defmodule) |
python | def add_word(self, word, freq=None, tag=None):
"""
Add a word to dictionary.
freq and tag can be omitted, freq defaults to be a calculated value
that ensures the word can be cut out.
"""
self.check_initialized()
word = strdecode(word)
freq = int(freq) if freq is not None else self.suggest_freq(word, False)
self.FREQ[word] = freq
self.total += freq
if tag:
self.user_word_tag_tab[word] = tag
for ch in xrange(len(word)):
wfrag = word[:ch + 1]
if wfrag not in self.FREQ:
self.FREQ[wfrag] = 0
if freq == 0:
finalseg.add_force_split(word) |
java | public boolean isOnAtCurrentZoom(GoogleMap map, LatLng latLng) {
float zoom = MapUtils.getCurrentZoom(map);
boolean on = isOnAtCurrentZoom(zoom, latLng);
return on;
} |
python | def add_user(self, group, username):
"""
Add a user to the specified LDAP group.
Args:
group: Name of group to update
username: Username of user to add
Raises:
ldap_tools.exceptions.InvalidResult:
Results of the query were invalid. The actual exception raised
inherits from InvalidResult. See #lookup_id for more info.
"""
try:
self.lookup_id(group)
except ldap_tools.exceptions.InvalidResult as err: # pragma: no cover
raise err from None
operation = {'memberUid': [(ldap3.MODIFY_ADD, [username])]}
self.client.modify(self.__distinguished_name(group), operation) |
python | def write(self, file, text, subvars={}, trim_leading_lf=True):
'''write to a file with variable substitution'''
file.write(self.substitute(text, subvars=subvars, trim_leading_lf=trim_leading_lf)) |
python | def copy_s3_bucket(src_bucket_name, src_bucket_secret_key, src_bucket_access_key,
dst_bucket_name, dst_bucket_secret_key, dst_bucket_access_key):
""" Copy S3 bucket directory with CMS data between environments. Operations are done on server. """
with cd(env.remote_path):
tmp_dir = "s3_tmp"
sudo('rm -rf %s' % tmp_dir, warn_only=True, user=env.remote_user)
sudo('mkdir %s' % tmp_dir, user=env.remote_user)
sudo('s3cmd --recursive get s3://%s/upload/ %s --secret_key=%s --access_key=%s' % (
src_bucket_name, tmp_dir, src_bucket_secret_key, src_bucket_access_key),
user=env.remote_user)
sudo('s3cmd --recursive put %s/ s3://%s/upload/ --secret_key=%s --access_key=%s' % (
tmp_dir, dst_bucket_name, dst_bucket_secret_key, dst_bucket_access_key),
user=env.remote_user)
sudo('s3cmd setacl s3://%s/upload --acl-public --recursive --secret_key=%s --access_key=%s' % (
dst_bucket_name, dst_bucket_secret_key, dst_bucket_access_key),
user=env.remote_user)
# cleanup
sudo('rm -rf %s' % tmp_dir, warn_only=True, user=env.remote_user) |
python | def setzscale(self, z1="auto", z2="auto", nsig=3, samplesizelimit = 10000, border=300):
"""
We set z1 and z2, according to different algorithms or arguments.
For both z1 and z2, give either :
- "auto" (default automatic, different between z1 and z2)
- "ex" (extrema)
- "flat" ("sigma-cuts" around median value, well-suited for flatfields)
- numeric value like 1230.34
nsig is the number of sigmas to be rejected (used by auto z1 + both flats)
samplesizelimit is the maximum number of pixels to compute statistics on.
If your image is larger then samplesizelimit, I will use only samplesizelimit pixels of it.
If your image is 3 times border in width and height, I will skip border pixels around the image before
doing calculations. This is made to get rid of the overscan and prescan etc.
So you can basically leave this at 300, it will only affect images wider then 900 pixels.
(300 happens to be a safe value for many telescopes.)
You can put border = 0 to deactivate this feature.
If you give nothing, the cutoff will not be changed.
You should set the z scale directly after cropping the image.
"""
if self.pilimage != None:
raise RuntimeError, "Cannot set z scale anymore, PIL image already exists !"
if self.numpyarray.shape[0] > 3 * border and self.numpyarray.shape[1] > 3 * border:
if border > 0:
if self.verbose :
print "For the stats I will leave a border of %i pixels" % border
calcarray = self.numpyarray[border:-border, border:-border].copy()
else:
calcarray = self.numpyarray.copy()
else:
calcarray = self.numpyarray.copy()
if self.verbose:
print "Image is too small for a border of %i" % (border)
# Starting with the simple possibilities :
if z1 == "ex" :
self.z1 = np.min(calcarray)
if self.verbose:
print "Setting ex z1 to %f" % self.z1
if z2 == "ex":
self.z2 = np.max(calcarray)
if self.verbose:
print "Setting ex z2 to %f" % self.z2
if type(z1) == type(0) or type(z1) == type(0.0):
self.z1 = z1
if self.verbose:
print "Setting z1 to %f" % self.z1
if type(z2) == type(0) or type(z2) == type(0.0):
self.z2 = z2
if self.verbose:
print "Setting z2 to %f" % self.z2
# Now it gets a little more sophisticated.
if z1 == "auto" or z2 == "auto" or z1 == "flat" or z2 == "flat":
# To speed up, we do not want to do statistics on the full image if it is large.
# So we prepare a small random sample of pixels.
calcarray.shape = calcarray.size # We flatten the 2D array
if calcarray.size > samplesizelimit :
#selectionindices = np.random.random_integers(low = 0, high = calcarray.size - 1, size=samplesizelimit)
selectionindices = np.linspace(0, calcarray.size-1, samplesizelimit).astype(np.int)
statsel = calcarray[selectionindices]
else :
statsel = calcarray
#nbrofbins = 10 + int(np.log10(calcarray.size)*10.0)
#print "Building histogram with %i bins" % nbrofbins
#nbrofbins = 100
#hist = np.histogram(statsel, bins=nbrofbins, range=(self.z1, self.z2), normed=False, weights=None, new=True)
medianlevel = np.median(statsel)
firststd = np.std(statsel)
if z1 == "auto" :
# 2 sigma clipping (quick and dirty star removal) :
nearskypixvals = statsel[np.logical_and(statsel > medianlevel - 2*firststd, statsel < medianlevel + 2*firststd)]
skylevel = np.median(nearskypixvals)
secondstd = np.std(nearskypixvals)
if self.verbose :
print "Sky level at %f +/- %f" % (skylevel, secondstd)
self.z1 = skylevel - nsig*secondstd
if self.verbose :
print "Setting auto z1 to %f, nsig = %i" % (self.z1, nsig)
if z2 == "auto" :
# Here we want to reject a percentage of high values...
sortedstatsel = np.sort(statsel)
n = round(0.9995 * statsel.size)
self.z2 = sortedstatsel[n]
if self.verbose :
print "Setting auto z2 to %f" % self.z2
if z1 == "flat" :
# 5 sigma clipping to get rid of cosmics :
nearflatpixvals = statsel[np.logical_and(statsel > medianlevel - 5*firststd, statsel < medianlevel + 5*firststd)]
flatlevel = np.median(nearflatpixvals)
flatstd = np.std(nearflatpixvals)
self.z1 = flatlevel - nsig*flatstd
if self.verbose :
print "Setting flat z1 : %f, nsig = %i" % (self.z1, nsig)
if z2 == "flat" : # symmetric to z1
# 5 sigma clipping to get rid of cosmics :
nearflatpixvals = statsel[np.logical_and(statsel > medianlevel - 5*firststd, statsel < medianlevel + 5*firststd)]
flatlevel = np.median(nearflatpixvals)
flatstd = np.std(nearflatpixvals)
self.z2 = flatlevel + nsig*flatstd
if self.verbose :
print "Setting flat z2 : %f, nsig = %i" % (self.z2, nsig) |
java | public java.util.List<PrefixList> getPrefixLists() {
if (prefixLists == null) {
prefixLists = new com.amazonaws.internal.SdkInternalList<PrefixList>();
}
return prefixLists;
} |
python | def _apply_fit(self, font_family, font_size, is_bold, is_italic):
"""
Arrange all the text in this text frame to fit inside its extents by
setting auto size off, wrap on, and setting the font of all its text
to *font_family*, *font_size*, *is_bold*, and *is_italic*.
"""
self.auto_size = MSO_AUTO_SIZE.NONE
self.word_wrap = True
self._set_font(font_family, font_size, is_bold, is_italic) |
java | public static Resource optimizeNestedResourceChain(final SecurityContext securityContext, final HttpServletRequest request, final Map<Pattern, Class<? extends Resource>> resourceMap, final Value<String> propertyView) throws FrameworkException {
final List<Resource> resourceChain = ResourceHelper.parsePath(securityContext, request, resourceMap, propertyView);
ViewFilterResource view = null;
int num = resourceChain.size();
boolean found = false;
do {
for (Iterator<Resource> it = resourceChain.iterator(); it.hasNext(); ) {
Resource constr = it.next();
if (constr instanceof ViewFilterResource) {
view = (ViewFilterResource) constr;
it.remove();
}
}
found = false;
try {
for (int i = 0; i < num; i++) {
Resource firstElement = resourceChain.get(i);
Resource secondElement = resourceChain.get(i + 1);
Resource combinedConstraint = firstElement.tryCombineWith(secondElement);
if (combinedConstraint != null) {
// remove source constraints
resourceChain.remove(firstElement);
resourceChain.remove(secondElement);
// add combined constraint
resourceChain.add(i, combinedConstraint);
// signal success
found = true;
}
}
} catch (Throwable t) {
// ignore exceptions thrown here but make it possible to set a breakpoint
final boolean test = false;
}
} while (found);
if (resourceChain.size() == 1) {
Resource finalResource = resourceChain.get(0);
if (view != null) {
finalResource = finalResource.tryCombineWith(view);
}
if (finalResource == null) {
// fall back to original resource
finalResource = resourceChain.get(0);
}
return finalResource;
} else {
logger.warn("Resource chain evaluation for path {} resulted in {} entries, returning status code 400.", new Object[] { request.getPathInfo(), resourceChain.size() });
}
throw new IllegalPathException("Cannot resolve URL path");
} |
python | def predict(self, X):
"""Predict values using the model
Parameters
----------
X : {array-like, sparse matrix} of shape [n_samples, n_features]
Returns
-------
C : numpy array of shape [n_samples, n_outputs]
Predicted values.
"""
raw_predictions = self.decision_function(X)
class_predictions = self.binarizer.inverse_transform(raw_predictions)
return class_predictions |
python | def add_to_group(self, group_path):
"""Add a device to a group, if the group doesn't exist it is created
:param group_path: Path or "name" of the group
"""
if self.get_group_path() != group_path:
post_data = ADD_GROUP_TEMPLATE.format(connectware_id=self.get_connectware_id(),
group_path=group_path)
self._conn.put('/ws/DeviceCore', post_data)
# Invalidate cache
self._device_json = None |
java | public static MozuUrl createOrderItemUrl(String orderId, String responseFields, Boolean skipInventoryCheck, String updateMode, String version)
{
UrlFormatter formatter = new UrlFormatter("/api/commerce/orders/{orderId}/items?updatemode={updateMode}&version={version}&skipInventoryCheck={skipInventoryCheck}&responseFields={responseFields}");
formatter.formatUrl("orderId", orderId);
formatter.formatUrl("responseFields", responseFields);
formatter.formatUrl("skipInventoryCheck", skipInventoryCheck);
formatter.formatUrl("updateMode", updateMode);
formatter.formatUrl("version", version);
return new MozuUrl(formatter.getResourceUrl(), MozuUrl.UrlLocation.TENANT_POD) ;
} |
java | public void setTraceSegmentDocuments(java.util.Collection<String> traceSegmentDocuments) {
if (traceSegmentDocuments == null) {
this.traceSegmentDocuments = null;
return;
}
this.traceSegmentDocuments = new java.util.ArrayList<String>(traceSegmentDocuments);
} |
java | public com.google.api.ads.admanager.axis.v201808.SslScanResult getSslScanResult() {
return sslScanResult;
} |
java | public EditableResourceBundle getResourceBundle(Locale locale) {
EditableResourceBundle localeBundle = bundles.get(locale);
if(localeBundle==null) {
ResourceBundle resourceBundle = ResourceBundle.getBundle(baseName, locale);
if(!resourceBundle.getLocale().equals(locale)) throw new AssertionError("ResourceBundle not for this locale: "+locale);
if(!(resourceBundle instanceof EditableResourceBundle)) throw new AssertionError("ResourceBundle is not a EditableResourceBundle: "+resourceBundle);
localeBundle = (EditableResourceBundle)resourceBundle;
if(localeBundle.getBundleSet()!=this) throw new AssertionError("EditableResourceBundle not for this EditableResourceBundleSet: "+localeBundle);
if(!localeBundle.getBundleLocale().equals(locale)) throw new AssertionError("EditableResourceBundle not for this locale: "+locale);
// EditableResourceBundle will have added the bundle to the bundles map.
}
return localeBundle;
} |
python | def from_rdd_of_dataframes(self, rdd, column_idxs=None):
"""Take an RDD of Panda's DataFrames and return a Dataframe.
If the columns and indexes are already known (e.g. applyMap)
then supplying them with columnsIndexes will skip eveluating
the first partition to determine index info."""
def frame_to_spark_sql(frame):
"""Convert a Panda's DataFrame into Spark SQL Rows"""
return [r.tolist() for r in frame.to_records()]
def frame_to_schema_and_idx_names(frames):
"""Returns the schema and index names of the frames. Useful
if the frame is large and we wish to avoid transfering
the entire frame. Only bothers to apply once per partiton"""
try:
frame = frames.next()
return [(list(frame.columns), list(frame.index.names))]
except StopIteration:
return []
# Store if the RDD was persisted so we don't uncache an
# explicitly cached input.
was_persisted = rdd.is_cached
# If we haven't been supplied with the schema info cache the RDD
# since we are going to eveluate the first partition and then eveluate
# the entire RDD as part of creating a Spark DataFrame.
(schema, index_names) = ([], [])
if not column_idxs:
rdd.cache()
(schema, index_names) = rdd.mapPartitions(
frame_to_schema_and_idx_names).first()
else:
(schema, index_names) = column_idxs
# Add the index_names to the schema.
index_names = _normalize_index_names(index_names)
schema = index_names + schema
ddf = DataFrame.from_schema_rdd(
self.sql_ctx.createDataFrame(rdd.flatMap(frame_to_spark_sql),
schema=schema))
ddf._index_names = index_names
if not was_persisted:
rdd.unpersist()
return ddf |
python | def anchor_and_curate_genV_and_genJ(self, V_anchor_pos_file, J_anchor_pos_file):
"""Trim V and J germline sequences to the CDR3 region.
Unproductive sequences have an empty string '' for the CDR3 region
sequence.
Edits the attributes genV and genJ
Parameters
----------
V_anchor_pos_file_name : str
File name for the conserved residue (C) locations and functionality
of each V genomic sequence.
J_anchor_pos_file_name : str
File name for the conserved residue (F/W) locations and
functionality of each J genomic sequence.
"""
V_anchor_pos = load_genomic_CDR3_anchor_pos_and_functionality(V_anchor_pos_file)
J_anchor_pos = load_genomic_CDR3_anchor_pos_and_functionality(J_anchor_pos_file)
for V in self.genV:
try:
if V_anchor_pos[V[0]][0] > 0 and V_anchor_pos[V[0]][1] == 'F': #Check for functionality
V[1] = V[2][V_anchor_pos[V[0]][0]:]
else:
V[1] = ''
except KeyError:
V[1] = ''
for J in self.genJ:
try:
if J_anchor_pos[J[0]][0] > 0 and J_anchor_pos[J[0]][1] == 'F': #Check for functionality
J[1] = J[2][:J_anchor_pos[J[0]][0]+3]
else:
J[1] = ''
except KeyError:
J[1] = '' |
java | public static String encodeCookie(String string) {
if (string == null) {
return null;
}
string = string.replaceAll("%", "%25");
string = string.replaceAll(";", "%3B");
string = string.replaceAll(",", "%2C");
return string;
} |
python | def get(self, key, **kwargs):
"""
Get the value of a key from etcd.
example usage:
.. code-block:: python
>>> import etcd3
>>> etcd = etcd3.client()
>>> etcd.get('/thing/key')
'hello world'
:param key: key in etcd to get
:param serializable: whether to allow serializable reads. This can
result in stale reads
:returns: value of key and metadata
:rtype: bytes, ``KVMetadata``
"""
range_response = self.get_response(key, **kwargs)
if range_response.count < 1:
return None, None
else:
kv = range_response.kvs.pop()
return kv.value, KVMetadata(kv, range_response.header) |
java | public void close() throws IOException {
value = null;
token = null;
stack.clear();
stack.add(JsonScope.CLOSED);
in.close();
} |
java | protected void learnAmbiguousNegative(Rectangle2D_F64 targetRegion) {
TldHelperFunctions.convertRegion(targetRegion, targetRegion_I32);
if( detection.isSuccess() ) {
TldRegion best = detection.getBest();
// see if it found the correct solution
double overlap = helper.computeOverlap(best.rect,targetRegion_I32);
if( overlap <= config.overlapLower ) {
template.addDescriptor(false,best.rect);
// fern.learnFernNoise(false, best.rect );
}
// mark all ambiguous regions as bad
List<ImageRectangle> ambiguous = detection.getAmbiguousRegions();
for( ImageRectangle r : ambiguous ) {
overlap = helper.computeOverlap(r,targetRegion_I32);
if( overlap <= config.overlapLower ) {
fern.learnFernNoise(false, r );
template.addDescriptor(false,r);
}
}
}
} |
java | @Override
protected void removeConstraintsSub(Constraint[] con) {
logger.finest("Trying to remove constraints " + Arrays.toString(con) + "...");
if (con != null && con.length != 0) {
Bounds[] tot = new Bounds[con.length];
int[] from = new int[con.length];
int[] to = new int[con.length];
for (int i = 0; i < con.length; i++) {
if (con[i] instanceof SimpleDistanceConstraint) {
SimpleDistanceConstraint c = (SimpleDistanceConstraint)con[i];
tot[i] = new Bounds(c.getMinimum(),c.getMaximum());
from[i] = ((TimePoint)c.getFrom()).getID();
to[i] = ((TimePoint)c.getTo()).getID();
}
}
if (canRestoreDMatrix(con)) cDelete(tot,from,to,true);
else {
if (backupDMatrixSimple) resetDMatrixBackups();
cDelete(tot,from,to,false);
}
}
} |
java | @Override
public <B> State<S, B> flatMap(Function<? super A, ? extends Monad<B, State<S, ?>>> f) {
return state(s -> run(s).into((a, s2) -> f.apply(a).<State<S, B>>coerce().run(s2)));
} |
java | protected boolean projectOntoFundamentalSpace( DMatrixRMaj F ) {
if( !svdConstraints.decompose(F) ) {
return false;
}
svdV = svdConstraints.getV(svdV,false);
svdU = svdConstraints.getU(svdU,false);
svdS = svdConstraints.getW(svdS);
SingularOps_DDRM.descendingOrder(svdU, false, svdS, svdV, false);
// the smallest singular value needs to be set to zero, unlike
svdS.set(2, 2, 0);
// recompute F
CommonOps_DDRM.mult(svdU, svdS, temp0);
CommonOps_DDRM.multTransB(temp0,svdV, F);
return true;
} |
python | def load(cls, pipeline_name, frequency, subject_id, visit_id, from_study,
path):
"""
Loads a saved provenance object from a JSON file
Parameters
----------
path : str
Path to the provenance file
frequency : str
The frequency of the record
subject_id : str | None
The subject ID of the provenance record
visit_id : str | None
The visit ID of the provenance record
from_study : str
Name of the study the derivatives were created for
Returns
-------
record : Record
The loaded provenance record
"""
with open(path) as f:
prov = json.load(f)
return Record(pipeline_name, frequency, subject_id, visit_id,
from_study, prov) |
java | @NotNull
public static DoubleStream of(@NotNull PrimitiveIterator.OfDouble iterator) {
Objects.requireNonNull(iterator);
return new DoubleStream(iterator);
} |
python | def make_class_postinject_decorator(classkey, modname=None):
"""
Args:
classkey : the class to be injected into
modname : the global __name__ of the module youa re injecting from
Returns:
closure_decorate_postinject (func): decorator for injectable methods
SeeAlso:
make_class_method_decorator
"""
if util_arg.VERBOSE or VERBOSE_CLASS:
print('[util_class] register class_postinject classkey=%r, modname=%r'
% (classkey, modname))
if modname == '__main__':
print('WARNING: cannot register class functions as __main__')
# skips reinjects into main
return lambda func: func
closure_decorate_postinject = functools.partial(decorate_postinject,
classkey=classkey)
return closure_decorate_postinject |
python | def _pad_nochord(target, axis=-1):
'''Pad a chord annotation with no-chord flags.
Parameters
----------
target : np.ndarray
the input data
axis : int
the axis along which to pad
Returns
-------
target_pad
`target` expanded by 1 along the specified `axis`.
The expanded dimension will be 0 when `target` is non-zero
before padding, and 1 otherwise.
'''
ncmask = ~np.max(target, axis=axis, keepdims=True)
return np.concatenate([target, ncmask], axis=axis) |
python | def delete(self, client=None):
"""API call: delete a sink via a DELETE request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/delete
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
client.sinks_api.sink_delete(self.project, self.name) |
python | def Parser(grammar, **actions):
r"""Make a parsing function from a peglet grammar, defining the
grammar's semantic actions with keyword arguments.
The parsing function maps a string to a results tuple or raises
Unparsable. (It can optionally take a rule name to start from, by
default the first in the grammar.) It doesn't necessarily match
the whole input, just a prefix.
>>> nums = Parser(r"nums = num ,\s* nums | num num = (\d+) int", int=int)
>>> nums('42, 137, and 0 are magic numbers')
(42, 137)
>>> nums('The magic numbers are 42, 137, and 0')
Traceback (most recent call last):
Unparsable: ('nums', '', 'The magic numbers are 42, 137, and 0')
"""
parts = re.split(' ('+_identifier+') += ',
' '+re.sub(r'\s', ' ', grammar))
if len(parts) == 1 or parts[0].strip():
raise BadGrammar("Missing left hand side", parts[0])
if len(set(parts[1::2])) != len(parts[1::2]):
raise BadGrammar("Multiply-defined rule(s)", grammar)
rules = dict((lhs, [alt.split() for alt in (' '+rhs+' ').split(' | ')])
for lhs, rhs in zip(parts[1::2], parts[2::2]))
return lambda text, rule=parts[1]: _parse(rules, actions, rule, text) |
python | def set(key, value, timeout = -1, adapter = MemoryAdapter):
'''
set cache by code, must set timeout length
'''
if adapter(timeout = timeout).set(key, pickle.dumps(value)):
return value
else:
return None |
java | public static String findDefaultImageVersion( String rawVersion ) {
String rbcfVersion = rawVersion;
if( rbcfVersion == null
|| rbcfVersion.toLowerCase().endsWith( "snapshot" ))
rbcfVersion = LATEST;
return rbcfVersion;
} |
java | public String getPOIs(String id, double minX, double minY, double maxX, double maxY, List<Param> optionalParams)
throws Exception {
DescribeService describeService = getDescribeServiceByID(id);
POIProxyEvent beforeEvent = new POIProxyEvent(POIProxyEventEnum.BeforeBrowseExtent, describeService,
new Extent(minX, minY, maxX, maxY), null, null, null, null, null, null, null, null, null);
notifyListenersBeforeRequest(beforeEvent);
String geoJSON = this.getCacheData(beforeEvent);
boolean fromCache = true;
if (geoJSON == null) {
fromCache = false;
geoJSON = getResponseAsGeoJSON(id, optionalParams, describeService, minX, minY, maxX, maxY, 0, 0,
beforeEvent);
}
POIProxyEvent afterEvent = new POIProxyEvent(POIProxyEventEnum.AfterBrowseExtent, describeService,
new Extent(minX, minY, maxX, maxY), null, null, null, null, null, null, null, geoJSON, null);
if (!fromCache) {
storeData(afterEvent);
}
notifyListenersAfterParse(afterEvent);
return geoJSON;
} |
python | def _ping(self, uid, addr, port):
"""
Just say hello so that pymlgame knows that your controller is still alive. Unused controllers will be deleted
after a while. This function is also used to update the address and port of the controller if it has changed.
:param uid: Unique id of the controller
:param addr: Address of the controller
:param port: Port that the controller listens on
:type uid: str
:type addr: str
:type port: int
"""
try:
self.controllers[uid][0] = addr
self.controllers[uid][1] = port
self.controllers[uid][3] = time.time()
e = Event(uid, E_PING)
self.queue.put_nowait(e)
except KeyError:
# There is no such controller, ignore the command
pass |
python | def set_resize_parameters(
self,
degrad=6,
labels=None,
resize_mm=None,
resize_voxel_number=None,
):
"""
set_input_data() should be called before
:param degrad:
:param labels:
:param resize_mm:
:param resize_voxel_number:
:return:
"""
# from . import show_segmentation
logger.debug("set_resize_parameters(\ndegrad={}, \nlabels={}\nresize_mm={}\nresize_voxel_number={}".format(
degrad, labels, resize_mm, resize_voxel_number
))
degrad = int(degrad)
# import ipdb; ipdb.set_trace()
# return voxelsize_mm, degrad
self.degrad = degrad
self.labels = labels
segmentation = self._select_labels(self.segmentation, labels)
if resize_voxel_number is not None:
nvoxels = np.sum(segmentation > 0)
volume = nvoxels * np.prod(self.voxelsize_mm)
voxel_volume = volume / float(resize_voxel_number)
resize_mm = voxel_volume ** (1.0 / 3.0)
else:
resize_mm = np.mean(self.voxelsize_mm)
# self.working_voxelsize_mm = voxelsize_mm
# self.working_segmentation = segmentation
if np.sum(np.abs(self.resize_mm_1d - resize_mm)) != 0:
# resize parameter changed
self.resized_segmentation = None
self.resized_binar_segmentation = None
self.resize_mm_1d = resize_mm |
python | def enqueue(self, item, queue=None):
"""
Enqueue items.
If you define "self.filter" (sequence),
this method put the item to queue after filtering.
"self.filter" operates as blacklist.
This method expects that
"item" argument has dict type "data" attribute.
"""
if queue is None:
queue = self.queue
is_enqueue_item = True
if self.invalid_key_list is not None:
for entry in self.invalid_key_list:
if entry in item.data['key']:
is_enqueue_item = False
log_message = (
'{key} is filtered by "invalid_key_list".'
''.format(key=item.data['key'],
plugin=__name__)
)
self.logger.debug(log_message)
break
if is_enqueue_item:
try:
queue.put(item, block=False)
return True
except Full:
self.logger.error('Blackbird item Queue is Full!!!')
return False
else:
return False |
java | public static INDArrayIndex[] fillIn(int[] shape, INDArrayIndex... indexes) {
if (shape.length == indexes.length)
return indexes;
INDArrayIndex[] newIndexes = new INDArrayIndex[shape.length];
System.arraycopy(indexes, 0, newIndexes, 0, indexes.length);
for (int i = indexes.length; i < shape.length; i++) {
newIndexes[i] = NDArrayIndex.interval(0, shape[i]);
}
return newIndexes;
} |
python | def do_page_truncate(self, args: List[str]):
"""Read in a text file and display its output in a pager, truncating long lines if they don't fit.
Truncated lines can still be accessed by scrolling to the right using the arrow keys.
Usage: page_chop <file_path>
"""
if not args:
self.perror('page_truncate requires a path to a file as an argument', traceback_war=False)
return
self.page_file(args[0], chop=True) |
python | def fix_module(job):
"""
Fix for tasks without a module. Provides backwards compatibility with < 0.1.5
"""
modules = settings.RQ_JOBS_MODULE
if not type(modules) == tuple:
modules = [modules]
for module in modules:
try:
module_match = importlib.import_module(module)
if hasattr(module_match, job.task):
job.task = '{}.{}'.format(module, job.task)
break
except ImportError:
continue
return job |
java | private TypeSpec enumAdapter(NameAllocator nameAllocator, EnumType type, ClassName javaType,
ClassName adapterJavaType) {
String value = nameAllocator.get("value");
String i = nameAllocator.get("i");
String reader = nameAllocator.get("reader");
String writer = nameAllocator.get("writer");
TypeSpec.Builder builder = TypeSpec.classBuilder(adapterJavaType.simpleName());
builder.superclass(adapterOf(javaType));
builder.addModifiers(PUBLIC);
MethodSpec.Builder constructorBuilder = MethodSpec.constructorBuilder();
constructorBuilder.addModifiers(PUBLIC);
constructorBuilder.addStatement("super($T.VARINT, $T.class)", FieldEncoding.class, javaType);
for (EnumConstant constant : type.constants()) {
String name = nameAllocator.get(constant);
FieldSpec.Builder fieldBuilder = FieldSpec.builder(javaType, name)
.addModifiers(PROTECTED, FINAL);
if (!constant.documentation().isEmpty()) {
fieldBuilder.addJavadoc("$L\n", sanitizeJavadoc(constant.documentation()));
}
if ("true".equals(constant.options().get(ENUM_DEPRECATED))) {
fieldBuilder.addAnnotation(Deprecated.class);
}
builder.addField(fieldBuilder.build());
constructorBuilder.addParameter(javaType, name);
constructorBuilder.addStatement("this.$N = $N", name, name);
}
builder.addMethod(constructorBuilder.build());
MethodSpec.Builder toValueBuilder = MethodSpec.methodBuilder("toValue")
.addModifiers(PROTECTED)
.returns(int.class)
.addParameter(javaType, value);
for (EnumConstant constant : type.constants()) {
String name = nameAllocator.get(constant);
toValueBuilder.addStatement("if ($N.equals($N)) return $L", value, name, constant.tag());
}
toValueBuilder.addStatement("return $L", -1);
builder.addMethod(toValueBuilder.build());
MethodSpec.Builder fromValueBuilder = MethodSpec.methodBuilder("fromValue")
.addModifiers(PROTECTED)
.returns(javaType)
.addParameter(int.class, value);
fromValueBuilder.beginControlFlow("switch ($N)", value);
for (EnumConstant constant : type.constants()) {
String name = nameAllocator.get(constant);
fromValueBuilder.addStatement("case $L: return $N", constant.tag(), name);
}
fromValueBuilder.addStatement("default: throw new $T($N, $T.class)",
EnumConstantNotFoundException.class, value, javaType);
fromValueBuilder.endControlFlow();
builder.addMethod(fromValueBuilder.build());
builder.addMethod(MethodSpec.methodBuilder("encodedSize")
.addAnnotation(Override.class)
.addModifiers(PUBLIC)
.returns(int.class)
.addParameter(javaType, value)
.addStatement("return $T.UINT32.encodedSize(toValue($N))", ProtoAdapter.class, value)
.build());
builder.addMethod(MethodSpec.methodBuilder("encode")
.addAnnotation(Override.class)
.addModifiers(PUBLIC)
.addParameter(ProtoWriter.class, writer)
.addParameter(javaType, value)
.addException(IOException.class)
.addStatement("int $N = toValue($N)", i, value)
.addStatement("if ($N == $L) throw new $T($S + $N)",
i, -1, ProtocolException.class, "Unexpected enum constant: ", value)
.addStatement("$N.writeVarint32($N)", writer, i)
.build());
builder.addMethod(MethodSpec.methodBuilder("decode")
.addAnnotation(Override.class)
.addModifiers(PUBLIC)
.returns(javaType)
.addParameter(ProtoReader.class, reader)
.addException(IOException.class)
.addStatement("int $N = $N.readVarint32()", value, reader)
.addStatement("return fromValue($N)", value)
.build());
return builder.build();
} |
java | public Object execute(Context context) {
for (Map.Entry<String, Integer> entry : ruleNameToExpectedFireCountMap.entrySet()) {
String ruleName = entry.getKey();
int expectedFireCount = entry.getValue();
int actualFireCount = firedRuleCounter.getRuleNameFireCount(ruleName);
Assert.assertEquals("The rule (" + ruleName + ")'s fireCount is incorrect.",
expectedFireCount, actualFireCount);
}
return null;
} |
python | def get_netapp_data(self):
""" Retrieve netapp volume information
returns ElementTree of netapp volume information
"""
netapp_data = self.server.invoke('volume-list-info')
if netapp_data.results_status() == 'failed':
self.log.error(
'While using netapp API failed to retrieve '
'volume-list-info for netapp filer %s' % self.device)
return
netapp_xml = ET.fromstring(netapp_data.sprintf()).find('volumes')
return netapp_xml |
python | def nvrtcCreateProgram(self, src, name, headers, include_names):
"""
Creates and returns a new NVRTC program object.
"""
res = c_void_p()
headers_array = (c_char_p * len(headers))()
headers_array[:] = encode_str_list(headers)
include_names_array = (c_char_p * len(include_names))()
include_names_array[:] = encode_str_list(include_names)
code = self._lib.nvrtcCreateProgram(byref(res),
c_char_p(encode_str(src)), c_char_p(encode_str(name)),
len(headers),
headers_array, include_names_array)
self._throw_on_error(code)
return res |
python | def is_prime(number):
"""
Function to test primality of a number. Function lifted from online
resource:
http://www.codeproject.com/Articles/691200/Primality-test-algorithms-Prime-test-The-fastest-w
This function is distributed under a separate licence:
This article, along with any associated source code and files, is \
licensed under The Code Project Open License (CPOL)
:type number: int
:param number: Integer to test for primality
:returns: bool
>>> is_prime(4)
False
>>> is_prime(3)
True
"""
''' if number != 1 '''
if number > 1:
''' repeat the test few times '''
for time in range(3):
''' Draw a RANDOM number in range of number ( Z_number ) '''
randomNumber = random.randint(2, number - 1)
''' Test if a^(n-1) = 1 mod n '''
if pow(randomNumber, number - 1, number) != 1:
return False
return True
else:
''' case number == 1 '''
return False |
python | def _should_fetch_reason(self) -> Tuple[bool, str]:
'''Return info about whether the URL should be fetched.
Returns:
tuple: A two item tuple:
1. bool: If True, the URL should be fetched.
2. str: A short reason string explaining the verdict.
'''
is_redirect = False
if self._strong_redirects:
try:
is_redirect = self._web_client_session.redirect_tracker\
.is_redirect()
except AttributeError:
pass
return self._fetch_rule.check_subsequent_web_request(
self._item_session, is_redirect=is_redirect) |
java | @SuppressWarnings({ "unchecked", "rawtypes" })
public static Object convert(Object value, Class<?> type) {
if (value == null || type == null || type.isAssignableFrom(value.getClass())) {
return value;
}
if (value instanceof String) {
String string = (String) value;
if (char.class.equals(type) || Character.class.equals(type)) {
if (string.length() != 1) {
throw new IllegalArgumentException(String.format("can not convert String(%s) to char!" +
" when convert String to char, the String must only 1 char.", string));
}
return string.charAt(0);
} else if (type.isEnum()) {
return Enum.valueOf((Class<Enum>) type, string);
} else if (type == BigInteger.class) {
return new BigInteger(string);
} else if (type == BigDecimal.class) {
return new BigDecimal(string);
} else if (type == Short.class || type == short.class) {
return Short.valueOf(string);
} else if (type == Integer.class || type == int.class) {
return Integer.valueOf(string);
} else if (type == Long.class || type == long.class) {
return Long.valueOf(string);
} else if (type == Double.class || type == double.class) {
return new Double(string);
} else if (type == Float.class || type == float.class) {
return new Float(string);
} else if (type == Byte.class || type == byte.class) {
return Byte.valueOf(string);
} else if (type == Boolean.class || type == boolean.class) {
return Boolean.valueOf(string);
} else if (type == Date.class || type == java.sql.Date.class || type == java.sql.Time.class ||
type == java.sql.Timestamp.class) {
try {
if (type == Date.class) {
return DateUtils.strToDate(string, DateUtils.DATE_FORMAT_TIME);
} else if (type == java.sql.Date.class) {
return new java.sql.Date(DateUtils.strToLong(string));
} else if (type == java.sql.Timestamp.class) {
return new java.sql.Timestamp(DateUtils.strToLong(string));
} else {
return new java.sql.Time(DateUtils.strToLong(string));
}
} catch (ParseException e) {
throw new IllegalStateException("Failed to parse date " + value + " by format " +
DateUtils.DATE_FORMAT_TIME + ", cause: " + e.getMessage(), e);
}
} else if (type == Class.class) {
return ClassTypeUtils.getClass((String) value);
}
} else if (value instanceof Number) {
Number number = (Number) value;
if (type == byte.class || type == Byte.class) {
return number.byteValue();
} else if (type == short.class || type == Short.class) {
return number.shortValue();
} else if (type == int.class || type == Integer.class) {
return number.intValue();
} else if (type == long.class || type == Long.class) {
return number.longValue();
} else if (type == float.class || type == Float.class) {
return number.floatValue();
} else if (type == double.class || type == Double.class) {
return number.doubleValue();
} else if (type == BigInteger.class) {
return BigInteger.valueOf(number.longValue());
} else if (type == BigDecimal.class) {
return BigDecimal.valueOf(number.doubleValue());
} else if (type == Date.class) {
return new Date(number.longValue());
} else if (type == java.sql.Date.class) {
return new java.sql.Date(number.longValue());
} else if (type == java.sql.Time.class) {
return new java.sql.Time(number.longValue());
} else if (type == java.sql.Timestamp.class) {
return new java.sql.Timestamp(number.longValue());
}
} else if (value instanceof Collection) {
Collection collection = (Collection) value;
if (type.isArray()) {
int length = collection.size();
Object array = Array.newInstance(type.getComponentType(), length);
int i = 0;
for (Object item : collection) {
Array.set(array, i++, item);
}
return array;
} else if (!type.isInterface()) {
try {
Collection result = (Collection) type.newInstance();
result.addAll(collection);
return result;
} catch (Throwable ignore) { // NOPMD
}
} else if (type == List.class) {
return new ArrayList<Object>(collection);
} else if (type == Set.class) {
return new HashSet<Object>(collection);
}
} else if (value.getClass().isArray() && Collection.class.isAssignableFrom(type)) {
Collection collection;
if (!type.isInterface()) {
try {
collection = (Collection) type.newInstance();
} catch (Throwable e) {
collection = new ArrayList<Object>();
}
} else if (type == Set.class) {
collection = new HashSet<Object>();
} else {
collection = new ArrayList<Object>();
}
int length = Array.getLength(value);
for (int i = 0; i < length; i++) {
collection.add(Array.get(value, i));
}
return collection;
}
return value;
} |
java | public static boolean registerVIVOPush(Application application, String profile) {
vivoDeviceProfile = profile;
com.vivo.push.PushClient client = com.vivo.push.PushClient.getInstance(application.getApplicationContext());
try {
client.checkManifest();
client.initialize();
return true;
} catch (com.vivo.push.util.VivoPushException ex) {
printErrorLog("register error, mainifest is incomplete! details=" + ex.getMessage());
return false;
}
} |
python | def NewFromLab(l, a, b, alpha=1.0, wref=_DEFAULT_WREF):
'''Create a new instance based on the specifed CIE-LAB values.
Parameters:
:l:
The L component [0...100]
:a:
The a component [-1...1]
:b:
The a component [-1...1]
:alpha:
The color transparency [0...1], default is opaque
:wref:
The whitepoint reference, default is 2° D65.
Returns:
A grapefruit.Color instance.
>>> str(Color.NewFromLab(66.9518, 0.43084, 0.739692))
'(1, 0.5, 1.09491e-08, 1)'
>>> str(Color.NewFromLab(66.9518, 0.43084, 0.739692, wref=Color.WHITE_REFERENCE['std_D50']))
'(1.01238, 0.492011, -0.14311, 1)'
>>> str(Color.NewFromLab(66.9518, 0.43084, 0.739692, 0.5))
'(1, 0.5, 1.09491e-08, 0.5)'
>>> str(Color.NewFromLab(66.9518, 0.43084, 0.739692, 0.5, Color.WHITE_REFERENCE['std_D50']))
'(1.01238, 0.492011, -0.14311, 0.5)'
'''
return Color(Color.XyzToRgb(*Color.LabToXyz(l, a, b, wref)), 'rgb', alpha, wref) |
python | def _get_web_session(self):
"""
:return: authenticated web session
:rtype: :class:`requests.Session`
:raises: :class:`RuntimeError` when session is unavailable
"""
if isinstance(self.backend, MobileWebAuth):
return self.backend.session
else:
if self.backend.logged_on:
sess = self.backend.get_web_session()
if sess is None:
raise RuntimeError("Failed to get a web session. Try again in a few minutes")
else:
return sess
else:
raise RuntimeError("SteamClient instance is not connected") |
java | public byte[] getEncodedValue()
{
final String characterEncoding = this.chaiConfiguration.getSetting( ChaiSetting.LDAP_CHARACTER_ENCODING );
final byte[] password = modifyPassword.getBytes( Charset.forName( characterEncoding ) );
final byte[] dn = modifyDn.getBytes( Charset.forName( characterEncoding ) );
// Sequence tag (1) + sequence length (1) + dn tag (1) +
// dn length (1) + dn (variable) + password tag (1) +
// password length (1) + password (variable)
final int encodedLength = 6 + dn.length + password.length;
final byte[] encoded = new byte[encodedLength];
int valueI = 0;
// sequence start
encoded[valueI++] = ( byte ) 0x30;
// length of body
encoded[valueI++] = ( byte ) ( 4 + dn.length + password.length );
encoded[valueI++] = LDAP_TAG_EXOP_X_MODIFY_PASSWD_ID;
encoded[valueI++] = ( byte ) dn.length;
System.arraycopy( dn, 0, encoded, valueI, dn.length );
valueI += dn.length;
encoded[valueI++] = LDAP_TAG_EXOP_X_MODIFY_PASSWD_NEW;
encoded[valueI++] = ( byte ) password.length;
System.arraycopy( password, 0, encoded, valueI, password.length );
valueI += password.length;
return encoded;
} |
java | public Variable[] intersect(Scope scope) {
Collection<Variable> intersection = new ArrayList<Variable>();
// A set of variable names that have been moved into the intersection.
Set<String> matchedNames = new HashSet<String>(7);
intersectFrom(this, scope, matchedNames, intersection);
intersectFrom(scope, this, matchedNames, intersection);
Variable[] vars = new Variable[intersection.size()];
return intersection.toArray(vars);
} |
java | protected void applyWcmMarkup(@Nullable HtmlElement<?> mediaElement, @NotNull Media media) {
// further processing in edit or preview mode
Resource resource = media.getMediaRequest().getResource();
if (mediaElement != null && resource != null && wcmMode != null) {
switch (wcmMode) {
case EDIT:
// enable drag&drop from content finder
media.getMediaSource().enableMediaDrop(mediaElement, media.getMediaRequest());
// set custom IPE crop ratios
media.getMediaSource().setCustomIPECropRatios(mediaElement, media.getMediaRequest());
break;
case PREVIEW:
// enable drag&drop from content finder
media.getMediaSource().enableMediaDrop(mediaElement, media.getMediaRequest());
// add diff decoration
if (request != null) {
String refProperty = StringUtils.defaultString(media.getMediaRequest().getRefProperty(),
mediaHandlerConfig.getMediaRefProperty());
MediaMarkupBuilderUtil.addDiffDecoration(mediaElement, resource, refProperty, request, mediaHandlerConfig);
}
// set custom IPE crop ratios
media.getMediaSource().setCustomIPECropRatios(mediaElement, media.getMediaRequest());
break;
default:
// do nothing
}
}
} |
java | private int countOverlap(CharSequence c1, int start1,
CharSequence c2, int start2) {
// The maxium overlap is the number of characters in the sequences
int maxOverlap = Math.min(c1.length() - start1, c2.length() - start2);
int overlap = 0;
for (; overlap < maxOverlap; ++overlap) {
if (c1.charAt(overlap + start1) != c2.charAt(overlap + start2)) {
break;
}
}
return overlap;
} |
java | public static Method find(TypeDef clazz, Property property) {
return find(clazz, property, false);
} |
python | def output_data_ports(self, output_data_ports):
""" Setter for _output_data_ports field
See property
:param dict output_data_ports: Dictionary that maps :class:`int` data_port_ids onto values of type
:class:`rafcon.core.state_elements.data_port.OutputDataPort`
:raises exceptions.TypeError: if the output_data_ports parameter has the wrong type
:raises exceptions.AttributeError: if the key of the output dictionary and the id of the data port do not match
"""
if not isinstance(output_data_ports, dict):
raise TypeError("output_data_ports must be of type dict")
if [port_id for port_id, port in output_data_ports.items() if not port_id == port.data_port_id]:
raise AttributeError("The key of the output dictionary and the id of the data port do not match")
# This is a fix for older state machines, which didn't distinguish between input and output ports
for port_id, port in output_data_ports.items():
if not isinstance(port, OutputDataPort):
if isinstance(port, DataPort):
port = OutputDataPort(port.name, port.data_type, port.default_value, port.data_port_id)
output_data_ports[port_id] = port
else:
raise TypeError("Elements of output_data_ports must be of type OutputDataPort, given: {0}".format(
type(port).__name__))
old_output_data_ports = self._output_data_ports
self._output_data_ports = output_data_ports
for port_id, port in output_data_ports.items():
try:
port.parent = self
except ValueError:
self._output_data_ports = old_output_data_ports
raise
# check that all old_output_data_ports are no more referencing self as there parent
for old_output_data_port in old_output_data_ports.values():
if old_output_data_port not in self._output_data_ports.values() and old_output_data_port.parent is self:
old_output_data_port.parent = None |
java | public void setPagerTransformer(boolean reverseDrawingOrder,BaseTransformer transformer){
mViewPagerTransformer = transformer;
mViewPagerTransformer.setCustomAnimationInterface(mCustomAnimation);
mViewPager.setPageTransformer(reverseDrawingOrder,mViewPagerTransformer);
} |
python | def _register(self):
"""Register wrapper."""
logger.debug('register on %s', "ws://{}:{}".format(self.ip, self.port));
try:
websocket = yield from websockets.connect(
"ws://{}:{}".format(self.ip, self.port), timeout=self.timeout_connect)
except:
logger.error('register failed to connect to %s', "ws://{}:{}".format(self.ip, self.port));
return False
logger.debug('register websocket connected to %s', "ws://{}:{}".format(self.ip, self.port));
try:
yield from self._send_register_payload(websocket)
finally:
logger.debug('close register connection to %s', "ws://{}:{}".format(self.ip, self.port));
yield from websocket.close() |
java | @Override
public DataSet vectorize(InputStream is, String label) {
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
String line = "";
StringBuilder builder = new StringBuilder();
while ((line = reader.readLine()) != null) {
builder.append(line);
}
return vectorize(builder.toString(), label);
} catch (Exception e) {
throw new RuntimeException(e);
}
} |
java | public final void mIDENT_LIST() throws RecognitionException {
try {
int _type = IDENT_LIST;
int _channel = DEFAULT_TOKEN_CHANNEL;
// BELScript.g:269:11: ( '{' OBJECT_IDENT ( COMMA OBJECT_IDENT )* '}' )
// BELScript.g:270:5: '{' OBJECT_IDENT ( COMMA OBJECT_IDENT )* '}'
{
match('{');
mOBJECT_IDENT();
// BELScript.g:270:22: ( COMMA OBJECT_IDENT )*
loop3:
do {
int alt3=2;
int LA3_0 = input.LA(1);
if ( (LA3_0==' '||LA3_0==',') ) {
alt3=1;
}
switch (alt3) {
case 1 :
// BELScript.g:270:23: COMMA OBJECT_IDENT
{
mCOMMA();
mOBJECT_IDENT();
}
break;
default :
break loop3;
}
} while (true);
match('}');
}
state.type = _type;
state.channel = _channel;
}
finally {
}
} |
python | def compare(self, remotedir = None, localdir = None, skip_remote_only_dirs = False):
''' Usage: compare [remotedir] [localdir] - \
compare the remote directory with the local directory
remotedir - the remote directory at Baidu Yun (after app's directory). \
if not specified, it defaults to the root directory.
localdir - the local directory, if not specified, it defaults to the current directory.
skip_remote_only_dirs - skip remote-only sub-directories (faster if the remote \
directory is much larger than the local one). it defaults to False.
'''
same, diff, local, remote = self.__compare(get_pcs_path(remotedir), localdir, str2bool(skip_remote_only_dirs))
pr("==== Same files ===")
for c in same:
pr("{} - {}".format(c[0], c[1]))
pr("==== Different files ===")
for d in diff:
pr("{} - {}".format(d[0], d[1]))
pr("==== Local only ====")
for l in local:
pr("{} - {}".format(l[0], l[1]))
pr("==== Remote only ====")
for r in remote:
pr("{} - {}".format(r[0], r[1]))
pr("\nStatistics:")
pr("--------------------------------")
pr("Same: {}".format(len(same)))
pr("Different: {}".format(len(diff)))
pr("Local only: {}".format(len(local)))
pr("Remote only: {}".format(len(remote)))
self.result['same'] = same
self.result['diff'] = diff
self.result['local'] = local
self.result['remote'] = remote
return const.ENoError |
java | public List<Duplet<String, ArrayList<WComponent>>> getTerms() {
Map<String, Duplet<String, ArrayList<WComponent>>> componentsByTerm = new HashMap<>();
List<Duplet<String, ArrayList<WComponent>>> result = new ArrayList<>();
List<WComponent> childList = content.getComponentModel().getChildren();
if (childList != null) {
for (int i = 0; i < childList.size(); i++) {
WComponent child = childList.get(i);
String term = child.getTag();
Duplet<String, ArrayList<WComponent>> termComponents = componentsByTerm.get(term);
if (termComponents == null) {
termComponents = new Duplet<>(term,
new ArrayList<WComponent>());
componentsByTerm.put(term, termComponents);
result.add(termComponents);
}
termComponents.getSecond().add(child);
}
}
return result;
} |
python | def notify(self, instance, old, new):
"""
Call all callback functions with the current value
Each callback will either be called using
callback(new) or callback(old, new) depending
on whether ``echo_old`` was set to `True` when calling
:func:`~echo.add_callback`
Parameters
----------
instance
The instance to consider
old
The old value of the property
new
The new value of the property
"""
if self._disabled.get(instance, False):
return
for cback in self._callbacks.get(instance, []):
cback(new)
for cback in self._2arg_callbacks.get(instance, []):
cback(old, new) |
python | def convex_conj(self):
r"""The convex conjugate functional of the quadratic form.
Notes
-----
The convex conjugate of the quadratic form :math:`<x, Ax> + <b, x> + c`
is given by
.. math::
(<x, Ax> + <b, x> + c)^* (x) =
<(x - b), A^-1 (x - b)> - c =
<x , A^-1 x> - <x, A^-* b> - <x, A^-1 b> + <b, A^-1 b> - c.
If the quadratic part of the functional is zero it is instead given
by a translated indicator function on zero, i.e., if
.. math::
f(x) = <b, x> + c,
then
.. math::
f^*(x^*) =
\begin{cases}
-c & \text{if } x^* = b \\
\infty & \text{else.}
\end{cases}
See Also
--------
IndicatorZero
"""
if self.operator is None:
tmp = IndicatorZero(space=self.domain, constant=-self.constant)
if self.vector is None:
return tmp
else:
return tmp.translated(self.vector)
if self.vector is None:
# Handle trivial case separately
return QuadraticForm(operator=self.operator.inverse,
constant=-self.constant)
else:
# Compute the needed variables
opinv = self.operator.inverse
vector = -opinv.adjoint(self.vector) - opinv(self.vector)
constant = self.vector.inner(opinv(self.vector)) - self.constant
# Create new quadratic form
return QuadraticForm(operator=opinv,
vector=vector,
constant=constant) |
python | def _count_by_date(self, fname, all_dates):
"""
reads a logfile and returns a dictionary by date
showing the count of log entries
"""
if not os.path.isfile(fname):
return {}
d_log_sum = {}
with open(fname, "r") as raw_log:
for line in raw_log:
cols = line.split(',')
dte = cols[0].strip('"')[0:10].replace('-', '')
all_dates.append(dte)
if dte in d_log_sum:
d_log_sum[dte] += 1
else:
d_log_sum[dte] = 1
return d_log_sum |
java | public DocumentRepository asDocumentRepository(EnvironmentType env, String user, String pwd) throws Exception
{
return (DocumentRepository)new FactoryConverter().convert(type.asFactoryArguments(this, env, true, user, pwd));
} |
python | def _parse_option(option):
"""
Parse a 'key=val' option string into a python (key, val) pair
:param option: str
:return: tuple
"""
try:
key, val = option.split("=", 1)
except ValueError:
return option, True
try:
val = json.loads(val)
except json.JSONDecodeError:
pass
return key, val |
java | public ServiceSimpleType createServiceSimpleTypeFromString(EDataType eDataType, String initialValue) {
ServiceSimpleType result = ServiceSimpleType.get(initialValue);
if (result == null)
throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'");
return result;
} |
java | public static String join(Collection<String> toJoin, String separator) {
if (isNullOrEmpty(toJoin)) {
return "";
}
StringBuilder joinedString = new StringBuilder();
int currentIndex = 0;
for (String s : toJoin) {
if(s != null) {
joinedString.append(s);
}
if (currentIndex++ != toJoin.size() - 1) {
joinedString.append(separator);
}
}
return joinedString.toString();
} |
python | def get_random_password():
"""Get a random password that complies with most of the requirements.
Note:
This random password is not strong and not "really" random, and should only be
used for testing purposes.
Returns:
str: The random password.
"""
password = []
password.append(RandomInputHelper.get_random_value(4, [string.ascii_lowercase]))
password.append(RandomInputHelper.get_random_value(2, [string.digits]))
password.append(RandomInputHelper.get_random_value(2, ["$&*@!"]))
password.append(RandomInputHelper.get_random_value(4, [string.ascii_uppercase]))
return "".join(password) |
python | def _windows_virtual(osdata):
'''
Returns what type of virtual hardware is under the hood, kvm or physical
'''
# Provides:
# virtual
# virtual_subtype
grains = dict()
if osdata['kernel'] != 'Windows':
return grains
grains['virtual'] = 'physical'
# It is possible that the 'manufacturer' and/or 'productname' grains
# exist but have a value of None.
manufacturer = osdata.get('manufacturer', '')
if manufacturer is None:
manufacturer = ''
productname = osdata.get('productname', '')
if productname is None:
productname = ''
if 'QEMU' in manufacturer:
# FIXME: Make this detect between kvm or qemu
grains['virtual'] = 'kvm'
if 'Bochs' in manufacturer:
grains['virtual'] = 'kvm'
# Product Name: (oVirt) www.ovirt.org
# Red Hat Community virtualization Project based on kvm
elif 'oVirt' in productname:
grains['virtual'] = 'kvm'
grains['virtual_subtype'] = 'oVirt'
# Red Hat Enterprise Virtualization
elif 'RHEV Hypervisor' in productname:
grains['virtual'] = 'kvm'
grains['virtual_subtype'] = 'rhev'
# Product Name: VirtualBox
elif 'VirtualBox' in productname:
grains['virtual'] = 'VirtualBox'
# Product Name: VMware Virtual Platform
elif 'VMware Virtual Platform' in productname:
grains['virtual'] = 'VMware'
# Manufacturer: Microsoft Corporation
# Product Name: Virtual Machine
elif 'Microsoft' in manufacturer and \
'Virtual Machine' in productname:
grains['virtual'] = 'VirtualPC'
# Manufacturer: Parallels Software International Inc.
elif 'Parallels Software' in manufacturer:
grains['virtual'] = 'Parallels'
# Apache CloudStack
elif 'CloudStack KVM Hypervisor' in productname:
grains['virtual'] = 'kvm'
grains['virtual_subtype'] = 'cloudstack'
return grains |
java | void chooseUpsertPlugin(Context context) {
DatabaseType type = detectDB(context);
PluginConfiguration pluginConfiguration = new PluginConfiguration();
switch (type){
case MYSQL:
pluginConfiguration.setConfigurationType(MySqlUpsertPlugin.class.getTypeName());
context.addPluginConfiguration(pluginConfiguration);
if(verbose)getLog().info("enable upsert service with mysql for context " + context.getId());
break;
case POSTGRESQL:
pluginConfiguration.setConfigurationType(PostgreSQLUpsertPlugin.class.getTypeName());
context.addPluginConfiguration(pluginConfiguration);
if(verbose)getLog().info("enable upsert service with postgresql for context " + context.getId());
break;
case SQLSERVER:
pluginConfiguration.setConfigurationType(SQLServerUpsertPlugin.class.getTypeName());
context.addPluginConfiguration(pluginConfiguration);
if(verbose)getLog().info("enable upsert service with sqlserver for context " + context.getId());
break;
case DB2:
pluginConfiguration.setConfigurationType(DB2UpsertPlugin.class.getTypeName());
context.addPluginConfiguration(pluginConfiguration);
if(verbose)getLog().info("enable upsert service with db2 for context " + context.getId());
break;
case ORACLE:
pluginConfiguration.setConfigurationType(OracleUpsertPlugin.class.getTypeName());
context.addPluginConfiguration(pluginConfiguration);
if(verbose)getLog().info("enable upsert service with oracle for context " + context.getId());
break;
case HSQLDB:
pluginConfiguration.setConfigurationType(HsqldbUpsertPlugin.class.getTypeName());
context.addPluginConfiguration(pluginConfiguration);
if(verbose)getLog().info("enable upsert service with hsqldb for context " + context.getId());
break;
}
} |
java | public static ns_ssl_certkey_policy[] get_filtered(nitro_service service, String filter) throws Exception
{
ns_ssl_certkey_policy obj = new ns_ssl_certkey_policy();
options option = new options();
option.set_filter(filter);
ns_ssl_certkey_policy[] response = (ns_ssl_certkey_policy[]) obj.getfiltered(service, option);
return response;
} |
python | def add(self, type, orig, replace):
"""Add an entry in the catalog, it may overwrite existing but
different entries. """
ret = libxml2mod.xmlACatalogAdd(self._o, type, orig, replace)
return ret |
java | @Pure
public static String getString(ClassLoader classLoader, String key, Object... params) {
return getString(classLoader, detectResourceClass(null), key, params);
} |
python | def getLogger(name):
"""This is used by gcdt plugins to get a logger with the right level."""
logger = logging.getLogger(name)
# note: the level might be adjusted via '-v' option
logger.setLevel(logging_config['loggers']['gcdt']['level'])
return logger |
java | public void setGramRole(String v) {
if (PTBConstituent_Type.featOkTst && ((PTBConstituent_Type)jcasType).casFeat_gramRole == null)
jcasType.jcas.throwFeatMissing("gramRole", "de.julielab.jules.types.PTBConstituent");
jcasType.ll_cas.ll_setStringValue(addr, ((PTBConstituent_Type)jcasType).casFeatCode_gramRole, v);} |
java | protected void pushDecodingInstanceVar(CodeAssembler a, int ordinal,
LocalVariable instanceVar) {
if (instanceVar == null) {
// Push this to stack in preparation for storing a property.
a.loadThis();
} else if (instanceVar.getType() != TypeDesc.forClass(Object[].class)) {
// Push reference to stack in preparation for storing a property.
a.loadLocal(instanceVar);
} else {
// Push array and index to stack in preparation for storing a property.
a.loadLocal(instanceVar);
a.loadConstant(ordinal);
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.