language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public final boolean matchEncoding(final EnhancedMimeType other) {
return match(other) && Objects.equals(getEncoding(), other.getEncoding());
} |
java | @Deprecated
public void kill(Map<String, String> modelEnvVars) throws InterruptedException {
kill(null,modelEnvVars);
} |
java | private void mergeReservoirInto(final ReservoirItemsSketch<T> reservoir) {
final long reservoirN = reservoir.getN();
if (reservoirN == 0) {
return;
}
n_ += reservoirN;
final int reservoirK = reservoir.getK();
if (reservoir.getN() <= reservoirK) {
// exact mode, so just insert and be done
for (T item : reservoir.getRawSamplesAsList()) {
gadget_.update(item, 1.0, false);
}
} else {
// sampling mode. We'll replicate a weight-correcting iterator
final double reservoirTau = reservoir.getImplicitSampleWeight();
double cumWeight = 0.0;
final ArrayList<T> samples = reservoir.getRawSamplesAsList();
for (int i = 0; i < (reservoirK - 1); ++i) {
gadget_.update(samples.get(i), reservoirTau, true);
cumWeight += reservoirTau;
}
// correct for any numerical discrepancies with the last item
gadget_.update(samples.get(reservoirK - 1), reservoir.getN() - cumWeight, true);
// resolve tau
final double outerTau = getOuterTau();
if (outerTauDenom == 0) {
// detect first estimation mode sketch and grab its tau
outerTauNumer = reservoirN;
outerTauDenom = reservoirK;
} else if (reservoirTau > outerTau) {
// switch to a bigger value of outerTau
outerTauNumer = reservoirN;
outerTauDenom = reservoirK;
} else if (reservoirTau == outerTau) {
// Ok if previous equality test isn't quite perfect. Mistakes in either direction should
// be fairly benign.
// Without conceptually changing outerTau, update number and denominator. In particular,
// add the total weight of the incoming reservoir to the running total.
outerTauNumer += reservoirN;
outerTauDenom += reservoirK;
}
// do nothing if reservoir "tau" is no smaller than outerTau
}
} |
python | def load_items(self, items):
"""Loads any number of items in chunks, handling continuation tokens.
:param items: Unpacked in chunks into "RequestItems" for :func:`boto3.DynamoDB.Client.batch_get_item`.
"""
loaded_items = {}
requests = collections.deque(create_batch_get_chunks(items))
while requests:
request = requests.pop()
try:
response = self.dynamodb_client.batch_get_item(RequestItems=request)
except botocore.exceptions.ClientError as error:
raise BloopException("Unexpected error while loading items.") from error
# Accumulate results
for table_name, table_items in response.get("Responses", {}).items():
loaded_items.setdefault(table_name, []).extend(table_items)
# Push additional request onto the deque.
# "UnprocessedKeys" is {} if this request is done
if response["UnprocessedKeys"]:
requests.append(response["UnprocessedKeys"])
return loaded_items |
java | public JSONStringer value(Object value) throws JSONException {
if (this.stack.isEmpty()) {
throw new JSONException("Nesting problem");
}
if (value instanceof JSONArray) {
((JSONArray) value).writeTo(this);
return this;
}
else if (value instanceof JSONObject) {
((JSONObject) value).writeTo(this);
return this;
}
beforeValue();
if (value == null || value instanceof Boolean || value == JSONObject.NULL) {
this.out.append(value);
}
else if (value instanceof Number) {
this.out.append(JSONObject.numberToString((Number) value));
}
else {
string(value.toString());
}
return this;
} |
java | private int encode(int x, int y, int r) {
int mask = (1 << r) - 1;
int hodd = 0;
int heven = x ^ y;
int notx = ~x & mask;
int noty = ~y & mask;
int temp = notx ^ y;
int v0 = 0, v1 = 0;
for (int k = 1; k < r; k++) {
v1 = ((v1 & heven) | ((v0 ^ noty) & temp)) >> 1;
v0 = ((v0 & (v1 ^ notx)) | (~v0 & (v1 ^ noty))) >> 1;
}
hodd = (~v0 & (v1 ^ x)) | (v0 & (v1 ^ noty));
return interleaveBits(hodd, heven);
} |
java | public String getNewSessionIdIfNodeFromSessionIdUnavailable( @Nonnull final String sessionId ) {
if ( isEncodeNodeIdInSessionId() ) {
final String nodeId = _sessionIdFormat.extractMemcachedId( sessionId );
final String newNodeId = _nodeIdService.getNewNodeIdIfUnavailable( nodeId );
if ( newNodeId != null ) {
return _sessionIdFormat.createNewSessionId( sessionId, newNodeId);
}
}
return null;
} |
java | public String print() throws IOException {
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
this.print(baos);
return new Utf8String(baos.toByteArray()).asString();
} |
java | protected void finishAdding()
{
noMoreAdding = true;
workSpace = null;
storageSpace = null;
wordCounts = null;
final int[] frqs = new int[dimensionSize];
for(int i = 0; i < termDocumentFrequencys.length(); i++)
frqs[i] = termDocumentFrequencys.get(i);
weighting.setWeight(vectors, IntList.unmodifiableView(frqs, dimensionSize));
for(SparseVector vec : vectors)
weighting.applyTo(vec);
termDocumentFrequencys = null;
} |
python | def add_dict_to_cookiejar(cj, cookie_dict):
"""Returns a CookieJar from a key/value dictionary.
:param cj: CookieJar to insert cookies into.
:param cookie_dict: Dict of key/values to insert into CookieJar.
"""
cj2 = cookiejar_from_dict(cookie_dict)
cj.update(cj2)
return cj |
java | private void handleNullRead() throws IOException {
if (curStreamFinished && readNullAfterStreamFinished) {
// If we read a null operation after the NameNode closed
// the stream, then we surely reached the end of the file.
curStreamConsumed = true;
} else {
try {
// This affects how much we wait after we reached the end of the
// current stream.
Thread.sleep(100);
} catch (InterruptedException e) {
throw new IOException(e);
}
}
if (curStreamFinished)
readNullAfterStreamFinished = true;
refreshStreamPosition();
} |
python | def key_func(*keys, **kwargs):
"""Creates a "key function" based on given keys.
Resulting function will perform lookup using specified keys, in order,
on the object passed to it as an argument.
For example, ``key_func('a', 'b')(foo)`` is equivalent to ``foo['a']['b']``.
:param keys: Lookup keys
:param default: Optional keyword argument specifying default value
that will be returned when some lookup key is not present
:return: Unary key function
"""
ensure_argcount(keys, min_=1)
ensure_keyword_args(kwargs, optional=('default',))
keys = list(map(ensure_string, keys))
if 'default' in kwargs:
default = kwargs['default']
def getitems(obj):
for key in keys:
try:
obj = obj[key]
except KeyError:
return default
return obj
else:
if len(keys) == 1:
getitems = operator.itemgetter(keys[0])
else:
def getitems(obj):
for key in keys:
obj = obj[key]
return obj
return getitems |
java | private Object toReference(int type, Object referent, int hash)
{
switch (type)
{
case HARD:
return referent;
case SOFT:
return new SoftRef(hash, referent, queue);
case WEAK:
return new WeakRef(hash, referent, queue);
default:
throw new Error();
}
} |
java | private static int permParity(final ILigand[] ligands) {
// count the number of swaps made by insertion sort - if duplicates
// are fount the parity is 0
int swaps = 0;
for (int j = 1, hi = ligands.length; j < hi; j++) {
ILigand ligand = ligands[j];
int i = j - 1;
int cmp = 0;
while ((i >= 0) && (cmp = cipRule.compare(ligand, ligands[i])) > 0) {
ligands[i + 1] = ligands[i--];
swaps++;
}
if (cmp == 0) // identical entries
return 0;
ligands[i + 1] = ligand;
}
// odd (-1) or even (+1)
return (swaps & 0x1) == 0x1 ? -1 : +1;
} |
java | private Expression compileUnary(UnaryOperation unary, int opPos)
throws TransformerException
{
int rightPos = getFirstChildPos(opPos);
unary.setRight(compile(rightPos));
return unary;
} |
python | def read(self, b=-1):
"""Keep reading from source stream until either the source stream is done
or the requested number of bytes have been obtained.
:param int b: number of bytes to read
:return: All bytes read from wrapped stream
:rtype: bytes
"""
remaining_bytes = b
data = io.BytesIO()
while True:
try:
chunk = to_bytes(self.__wrapped__.read(remaining_bytes))
except ValueError:
if self.__wrapped__.closed:
break
raise
if not chunk:
break
data.write(chunk)
remaining_bytes -= len(chunk)
if remaining_bytes <= 0:
break
return data.getvalue() |
java | private void addBrokenLinkAdditionalInfo(CmsObject cms, CmsResource resource, CmsBrokenLinkBean result) {
String dateLastModifiedLabel = org.opencms.workplace.commons.Messages.get().getBundle(
OpenCms.getWorkplaceManager().getWorkplaceLocale(cms)).key(
org.opencms.workplace.commons.Messages.GUI_LABEL_DATE_LAST_MODIFIED_0);
String dateLastModified = CmsVfsService.formatDateTime(cms, resource.getDateLastModified());
String userLastModifiedLabel = org.opencms.workplace.commons.Messages.get().getBundle(
OpenCms.getWorkplaceManager().getWorkplaceLocale(cms)).key(
org.opencms.workplace.commons.Messages.GUI_LABEL_USER_LAST_MODIFIED_0);
String userLastModified = "" + resource.getUserLastModified();
try {
userLastModified = cms.readUser(resource.getUserLastModified()).getName();
} catch (CmsException e) {
LOG.error(e.getLocalizedMessage(), e);
}
result.addInfo(dateLastModifiedLabel, dateLastModified);
result.addInfo(userLastModifiedLabel, userLastModified);
} |
java | public List<Option> getSelectedItems() {
final List<Option> items = new ArrayList<>(0);
for (Entry<OptionElement, Option> entry : itemMap.entrySet()) {
Option opt = entry.getValue();
if (opt.isSelected())
items.add(opt);
}
return items;
} |
python | def open_(filename, mode=None, compresslevel=9):
"""Switch for both open() and gzip.open().
Determines if the file is normal or gzipped by looking at the file
extension.
The filename argument is required; mode defaults to 'rb' for gzip and 'r'
for normal and compresslevel defaults to 9 for gzip.
>>> import gzip
>>> from contextlib import closing
>>> with open_(filename) as f:
... f.read()
"""
if filename[-3:] == '.gz':
if mode is None: mode = 'rt'
return closing(gzip.open(filename, mode, compresslevel))
else:
if mode is None: mode = 'r'
return open(filename, mode) |
python | def find_doi(self, curr_dict):
"""
Recursively search the file for the DOI id. More taxing, but more flexible when dictionary structuring isn't absolute
:param dict curr_dict: Current dictionary being searched
:return dict bool: Recursive - Current dictionary, False flag that DOI was not found
:return str bool: Final - DOI id, True flag that DOI was found
"""
try:
if 'id' in curr_dict:
return curr_dict['id'], True
elif isinstance(curr_dict, list):
for i in curr_dict:
return self.find_doi(i)
elif isinstance(curr_dict, dict):
for k, v in curr_dict.items():
if k == 'identifier':
return self.find_doi(v)
return curr_dict, False
else:
return curr_dict, False
# If the ID key doesn't exist, then return the original dict with a flag
except TypeError:
return curr_dict, False |
python | def add_inputs_from_inputstring(self, input_string):
"""
Add inputs using the input string format:
gitroot==~/workspace
username
password?
main_branch==comp_main
"""
raw_params = input_string.split('\n')
param_attributes = (self._parse_param_line(rp) for rp in raw_params if len(rp.strip(' \t')) > 0)
for param, attributes in param_attributes:
self.add_input(param, attributes) |
python | def set_default_unit(self, twig=None, unit=None, **kwargs):
"""
TODO: add documentation
"""
if twig is not None and unit is None:
# then try to support value as the first argument if no matches with twigs
if isinstance(unit, u.Unit) or not isinstance(twig, str):
unit = twig
twig = None
elif not len(self.filter(twig=twig, check_default=check_default, **kwargs)):
unit = twig
twig = None
return self.get_parameter(twig=twig, **kwargs).set_default_unit(unit) |
python | def post(self, url, data, charset=CHARSET_UTF8, headers={}):
'''response json text'''
if 'Api-Lang' not in headers:
headers['Api-Lang'] = 'python'
if 'Content-Type' not in headers:
headers['Content-Type'] = "application/x-www-form-urlencoded;charset=" + charset
rsp = requests.post(url, data, headers=headers,
timeout=(int(self.conf(HTTP_CONN_TIMEOUT, '10')), int(self.conf(HTTP_SO_TIMEOUT, '30'))))
return json.loads(rsp.text) |
python | def runtime_to_build(runtime_deps):
"""Adds all runtime deps to build deps"""
build_deps = copy.deepcopy(runtime_deps)
for dep in build_deps:
if len(dep) > 0:
dep[0] = 'BuildRequires'
return build_deps |
java | @Override
public void cacheResult(
List<CommerceShippingFixedOptionRel> commerceShippingFixedOptionRels) {
for (CommerceShippingFixedOptionRel commerceShippingFixedOptionRel : commerceShippingFixedOptionRels) {
if (entityCache.getResult(
CommerceShippingFixedOptionRelModelImpl.ENTITY_CACHE_ENABLED,
CommerceShippingFixedOptionRelImpl.class,
commerceShippingFixedOptionRel.getPrimaryKey()) == null) {
cacheResult(commerceShippingFixedOptionRel);
}
else {
commerceShippingFixedOptionRel.resetOriginalValues();
}
}
} |
java | public String getStack()
{
StringBuffer sb =
new StringBuffer("fr.esrf.TangoApi.WrongNameSyntax:\n");
for (int i=0 ; i<errors.length ; i++)
{
sb.append("Severity -> ");
switch (errors[i].severity.value())
{
case ErrSeverity._WARN :
sb.append("WARNING \n");
break;
case ErrSeverity._ERR :
sb.append("ERROR \n");
break;
case ErrSeverity._PANIC :
sb.append("PANIC \n");
break;
default :
sb.append("Unknown severity code");
break;
}
sb.append("Desc -> ").append(errors[i].desc).append("\n");
sb.append("Reason -> ").append(errors[i].reason).append("\n");
sb.append("Origin -> ").append(errors[i].origin).append("\n");
if (i<errors.length-1)
sb.append("-------------------------------------------------------------\n");
}
return sb.toString();
} |
python | def drop_all(self, queue_name):
"""
Drops all the task in the queue.
:param queue_name: The name of the queue. Usually handled by the
``Gator`` instance.
:type queue_name: string
"""
task_ids = self.conn.lrange(queue_name, 0, -1)
for task_id in task_ids:
self.conn.delete(task_id)
self.conn.delete(queue_name) |
python | def gzipped(fn):
"""
Decorator used to pack data returned from the Bottle function to GZIP.
The decorator adds GZIP compression only if the browser accepts GZIP in
it's ``Accept-Encoding`` headers. In that case, also the correct
``Content-Encoding`` is used.
"""
def gzipped_wrapper(*args, **kwargs):
accepted_encoding = request.get_header("Accept-Encoding")
if not accepted_encoding or "gzip" not in accepted_encoding:
return fn(*args, **kwargs)
response.set_header("Content-Encoding", "gzip")
return to_gzipped_file(fn(*args, **kwargs))
return gzipped_wrapper |
java | public void setRoot(int context, Object environment)
{
m_context = context;
XPathContext xctxt = (XPathContext)environment;
m_execContext = xctxt;
m_cdtm = xctxt.getDTM(context);
m_currentContextNode = context; // only if top level?
// Yech, shouldn't have to do this. -sb
if(null == m_prefixResolver)
m_prefixResolver = xctxt.getNamespaceContext();
m_lastFetched = DTM.NULL;
m_foundLast = false;
m_pos = 0;
m_length = -1;
if (m_isTopLevel)
this.m_stackFrame = xctxt.getVarStack().getStackFrame();
// reset();
} |
java | @Override
public void deserializeInstance(SerializationStreamReader streamReader, EntityType instance) throws SerializationException {
deserialize(streamReader, instance);
} |
java | @Override
public DescribeConstraintResult describeConstraint(DescribeConstraintRequest request) {
request = beforeClientExecution(request);
return executeDescribeConstraint(request);
} |
java | public static void main(String[] args) {
Object[] a1 = new Object[]{"a", "b"};
Object[] a2 = new Object[]{"a", "b"};
System.out.println(a1.equals(a2));
GeneralizedCounter<String> gc = new GeneralizedCounter<String>(3);
gc.incrementCount(Arrays.asList(new String[]{"a", "j", "x"}), 3.0);
gc.incrementCount(Arrays.asList(new String[]{"a", "l", "x"}), 3.0);
gc.incrementCount(Arrays.asList(new String[]{"b", "k", "y"}), 3.0);
gc.incrementCount(Arrays.asList(new String[]{"b", "k", "z"}), 3.0);
System.out.println("incremented counts.");
System.out.println(gc.dumpKeys());
System.out.println("string representation of generalized counter:");
System.out.println(gc.toString());
gc.printKeySet();
System.out.println("entry set:\n" + gc.entrySet());
arrayPrintDouble(gc.getCounts(Arrays.asList(new String[]{"a", "j", "x"})));
arrayPrintDouble(gc.getCounts(Arrays.asList(new String[]{"a", "j", "z"})));
arrayPrintDouble(gc.getCounts(Arrays.asList(new String[]{"b", "k", "w"})));
arrayPrintDouble(gc.getCounts(Arrays.asList(new String[]{"b", "k", "z"})));
GeneralizedCounter<String> gc1 = gc.conditionalize(Arrays.asList(new String[]{"a"}));
gc1.incrementCount(Arrays.asList(new String[]{"j", "x"}));
gc1.incrementCount2D("j", "z");
GeneralizedCounter<String> gc2 = gc1.conditionalize(Arrays.asList(new String[]{"j"}));
gc2.incrementCount1D("x");
System.out.println("Pretty-printing gc after incrementing gc1:");
gc.prettyPrint();
System.out.println("Total: " + gc.totalCount());
gc1.printKeySet();
System.out.println("another entry set:\n" + gc1.entrySet());
ClassicCounter<List<String>> c = gc.counterView();
System.out.println("string representation of counter view:");
System.out.println(c.toString());
double d1 = c.getCount(Arrays.asList(new String[]{"a", "j", "x"}));
double d2 = c.getCount(Arrays.asList(new String[]{"a", "j", "w"}));
System.out.println(d1 + " " + d2);
ClassicCounter<List<String>> c1 = gc1.counterView();
System.out.println("Count of {j,x} -- should be 3.0\t" + c1.getCount(Arrays.asList(new String[]{"j", "x"})));
System.out.println(c.keySet() + " size " + c.keySet().size());
System.out.println(c1.keySet() + " size " + c1.keySet().size());
System.out.println(c1.equals(c));
System.out.println(c.equals(c1));
System.out.println(c.equals(c));
System.out.println("### testing equality of regular Counter...");
ClassicCounter<String> z1 = new ClassicCounter<String>();
ClassicCounter<String> z2 = new ClassicCounter<String>();
z1.incrementCount("a1");
z1.incrementCount("a2");
z2.incrementCount("b");
System.out.println(z1.equals(z2));
System.out.println(z1.toString());
System.out.println(z1.keySet().toString());
} |
java | public final void mWS() throws RecognitionException {
try {
int _type = WS;
int _channel = DEFAULT_TOKEN_CHANNEL;
// druidG.g:669:2: ( ( ' ' | '\\t' | NEWLINE )+ )
// druidG.g:669:4: ( ' ' | '\\t' | NEWLINE )+
{
// druidG.g:669:4: ( ' ' | '\\t' | NEWLINE )+
int cnt37=0;
loop37:
while (true) {
int alt37=4;
switch ( input.LA(1) ) {
case ' ':
{
alt37=1;
}
break;
case '\t':
{
alt37=2;
}
break;
case '\n':
case '\r':
{
alt37=3;
}
break;
}
switch (alt37) {
case 1 :
// druidG.g:669:5: ' '
{
match(' ');
}
break;
case 2 :
// druidG.g:669:11: '\\t'
{
match('\t');
}
break;
case 3 :
// druidG.g:669:18: NEWLINE
{
mNEWLINE();
}
break;
default :
if ( cnt37 >= 1 ) break loop37;
EarlyExitException eee = new EarlyExitException(37, input);
throw eee;
}
cnt37++;
}
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
} |
python | def assign_order(self):
"""
The goal is to assign scaffold orders. To help order the scaffolds, two
dummy node, START and END, mark the ends of the chromosome. We connect
START to each scaffold (directed), and each scaffold to END.
"""
linkage_groups = self.linkage_groups
for mlg in linkage_groups:
mapname = mlg.mapname
if mapname == self.pivot:
pivot_position = mlg.position
for mlg in linkage_groups:
position = mlg.position
# Flip order if path goes in the opposite direction to the pivot
common = []
for a, ap in position.items():
if a not in pivot_position:
continue
pp = pivot_position[a]
common.append((ap, pp))
mlg.rho = get_rho(common)
if mlg.rho < 0:
mlg.path = mlg.path[::-1]
mlg.populate_pairwise_distance()
# Preparation of TSP
distances = defaultdict(list)
for mlg in linkage_groups:
mapname = mlg.mapname
position = mlg.position
length = mlg.length
path = mlg.path
rho = mlg.rho
dd = mlg.distances
for a, b in combinations(path, 2):
d = dd[a, b]
distances[a, b].append((d, mapname))
for p in path:
adist, bdist = position[p], length - position[p]
if rho < 0:
adist, bdist = bdist, adist
distances[START, p].append((adist, mapname))
distances[p, END].append((bdist, mapname))
self.distances = distances
tour = self.distances_to_tour()
return tour |
java | private PatternConstant create(Constant constant) {
if (!constants.containsKey(constant)) {
constants.put(
constant,
new PatternConstant(constant.getValue(), constant.isIgnoreCase(), constant.isGhost())
);
}
return constants.get(constant);
} |
python | def draw_material(material, face=GL_FRONT_AND_BACK):
"""Draw a single material"""
if material.gl_floats is None:
material.gl_floats = (GLfloat * len(material.vertices))(*material.vertices)
material.triangle_count = len(material.vertices) / material.vertex_size
vertex_format = VERTEX_FORMATS.get(material.vertex_format)
if not vertex_format:
raise ValueError("Vertex format {} not supported by pyglet".format(material.vertex_format))
glPushClientAttrib(GL_CLIENT_VERTEX_ARRAY_BIT)
glPushAttrib(GL_CURRENT_BIT | GL_ENABLE_BIT | GL_LIGHTING_BIT)
glEnable(GL_CULL_FACE)
glCullFace(GL_BACK)
# Fall back to ambient texture if no diffuse
texture = material.texture or material.texture_ambient
if texture and material.has_uvs:
bind_texture(texture)
else:
glDisable(GL_TEXTURE_2D)
glMaterialfv(face, GL_DIFFUSE, gl_light(material.diffuse))
glMaterialfv(face, GL_AMBIENT, gl_light(material.ambient))
glMaterialfv(face, GL_SPECULAR, gl_light(material.specular))
glMaterialfv(face, GL_EMISSION, gl_light(material.emissive))
glMaterialf(face, GL_SHININESS, min(128.0, material.shininess))
glEnable(GL_LIGHT0)
if material.has_normals:
glEnable(GL_LIGHTING)
else:
glDisable(GL_LIGHTING)
glInterleavedArrays(vertex_format, 0, material.gl_floats)
glDrawArrays(GL_TRIANGLES, 0, int(material.triangle_count))
glPopAttrib()
glPopClientAttrib() |
python | def cli(ctx, debug, config, path, cache):
"""
\U0001F98A Inspect and search through the complexity of your source code.
To get started, run setup:
$ wily setup
To reindex any changes in your source code:
$ wily build <src>
Then explore basic metrics with:
$ wily report <file>
You can also graph specific metrics in a browser with:
$ wily graph <file> <metric>
"""
ctx.ensure_object(dict)
ctx.obj["DEBUG"] = debug
if debug:
logger.setLevel("DEBUG")
else:
logger.setLevel("INFO")
ctx.obj["CONFIG"] = load_config(config)
if path:
logger.debug(f"Fixing path to {path}")
ctx.obj["CONFIG"].path = path
if cache:
logger.debug(f"Fixing cache to {cache}")
ctx.obj["CONFIG"].cache_path = cache
logger.debug(f"Loaded configuration from {config}") |
python | def mkdir_chown(paths, user_group=None, permissions='ug=rwX,o=rX', create_parent=True, check_if_exists=False, recursive=False):
"""
Generates a unix command line for creating a directory and assigning permissions to it. Shortcut to a combination of
:func:`~mkdir`, :func:`~chown`, and :func:`~chmod`.
Note that if `check_if_exists` has been set to ``True``, and the directory is found, `mkdir` is not called, but
`user_group` and `permissions` are still be applied.
:param paths: Can be a single path string, or a list or tuple of path strings.
:type paths: unicode | str | tuple[unicode | str] | list[unicode | str]
:param: Optional owner of the directory. For notation, see :func:`~get_user_group`.
:type user_group: unicode | str | int | tuple
:param permissions: Optional permission mode, in any notation accepted by the unix `chmod` command.
Default is ``ug=rwX,o=rX``.
:type permissions: unicode | str
:param create_parent: Parent directories are created if not present (`-p` argument to `mkdir`).
:type create_parent: bool
:param check_if_exists: Prior to creating the directory, checks if it already exists.
:type check_if_exists: bool
:param recursive: Apply permissions and owner change recursively.
:type recursive: bool
:return: Unix shell command line.
:rtype: unicode | str
"""
def _generate_str(path):
mkdir_str = mkdir(path, create_parent, check_if_exists)
chown_str = chown(user_group, path, recursive) if user_group else None
chmod_str = chmod(permissions, path, recursive) if permissions else None
return ' && '.join(n for n in (mkdir_str, chown_str, chmod_str) if n)
if isinstance(paths, (tuple, list)):
return '; '.join((_generate_str(path) for path in paths))
return _generate_str(paths) |
python | def tags(self, extra_params=None):
""""
All Tags in this Space
"""
return self.api._get_json(
Tag,
space=self,
rel_path=self._build_rel_path('tags'),
extra_params=extra_params,
) |
python | def generate_pagination(total_page_num, current_page_num):
"""
>>> PAGE_SIZE = 10
>>> generate_pagination(total_page_num=9, current_page_num=1)
{'start': 1, 'end': 9, 'current': 1}
>>> generate_pagination(total_page_num=20, current_page_num=12)
{'start': 8, 'end': 17, 'current': 12}
>>> generate_pagination(total_page_num=20, current_page_num=4)
{'start': 1, 'end': 10, 'current': 4}
>>> generate_pagination(total_page_num=16, current_page_num=14)
{'start': 7, 'end': 16, 'current': 14}
"""
pagination = {'start': 1, 'end': PAGE_SIZE, 'current': current_page_num}
if total_page_num <= PAGE_SIZE:
pagination['end'] = total_page_num
else:
# base on front four and back five
pagination['start'] = current_page_num - 4
pagination['end'] = current_page_num + 5
if pagination['start'] < 1:
pagination['start'] = 1
pagination['end'] = PAGE_SIZE
if pagination['end'] > total_page_num:
pagination['end'] = total_page_num
pagination['start'] = total_page_num - 9
return pagination |
python | def by_name(self, name, archived=False, limit=None, page=None):
""" return a project by it's name.
this only works with the exact name of the project.
"""
# this only works with the exact name
return super(Projects, self).by_name(name, archived=archived,
limit=limit, page=page) |
python | def sinus_values_by_hz(framerate, hz, max_value):
"""
Create sinus values with the given framerate and Hz.
Note:
We skip the first zero-crossing, so the values can be used directy in a loop.
>>> values = sinus_values_by_hz(22050, 1200, 255)
>>> len(values) # 22050 / 1200Hz = 18,375
18
>>> values
(87, 164, 221, 251, 251, 221, 164, 87, 0, -87, -164, -221, -251, -251, -221, -164, -87, 0)
>>> tl = TextLevelMeter(255, width=40)
>>> for v in values:
... tl.feed(v)
'| | * |'
'| | * |'
'| | * |'
'| | *|'
'| | *|'
'| | * |'
'| | * |'
'| | * |'
'| * |'
'| * | |'
'| * | |'
'| * | |'
'|* | |'
'|* | |'
'| * | |'
'| * | |'
'| * | |'
'| * |'
>>> values = sinus_values_by_hz(44100, 1200, 255)
>>> len(values) # 44100 / 1200Hz = 36,75
37
"""
count = int(round(float(framerate) / float(hz)))
count += 1
values = tuple(sinus_values(count, max_value))
values = values[1:]
return values |
python | def _set_interface_te_ospf_conf(self, v, load=False):
"""
Setter method for interface_te_ospf_conf, mapped from YANG variable /interface/tengigabitethernet/ip/interface_te_ospf_conf (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_interface_te_ospf_conf is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interface_te_ospf_conf() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=interface_te_ospf_conf.interface_te_ospf_conf, is_container='container', presence=False, yang_name="interface-te-ospf-conf", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'callpoint': u'OSPFTeInterfaceCallPoint'}}, namespace='urn:brocade.com:mgmt:brocade-ospf', defining_module='brocade-ospf', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interface_te_ospf_conf must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=interface_te_ospf_conf.interface_te_ospf_conf, is_container='container', presence=False, yang_name="interface-te-ospf-conf", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'callpoint': u'OSPFTeInterfaceCallPoint'}}, namespace='urn:brocade.com:mgmt:brocade-ospf', defining_module='brocade-ospf', yang_type='container', is_config=True)""",
})
self.__interface_te_ospf_conf = t
if hasattr(self, '_set'):
self._set() |
python | def write_service_double_file(target_root, service_name, rendered):
"""Render syntactically valid python service double code."""
target_path = os.path.join(
target_root,
'snapstore_schemas', 'service_doubles', '%s.py' % service_name
)
with open(target_path, 'w') as target_file:
target_file.write(rendered) |
python | def cut_across_axis(self, dim, minval=None, maxval=None):
'''
Cut the mesh by a plane, discarding vertices that lie behind that
plane. Or cut the mesh by two parallel planes, discarding vertices
that lie outside them.
The region to keep is defined by an axis of perpendicularity,
specified by `dim`: 0 means x, 1 means y, 2 means z. `minval`
and `maxval` indicate the portion of that axis to keep.
Return the original indices of the kept vertices.
'''
# vertex_mask keeps track of the vertices we want to keep.
vertex_mask = np.ones((len(self.v),), dtype=bool)
if minval is not None:
predicate = self.v[:, dim] >= minval
vertex_mask = np.logical_and(vertex_mask, predicate)
if maxval is not None:
predicate = self.v[:, dim] <= maxval
vertex_mask = np.logical_and(vertex_mask, predicate)
vertex_indices = np.flatnonzero(vertex_mask)
self.keep_vertices(vertex_indices)
return vertex_indices |
java | public static String fileTypeAsString( int fileType ) {
String result;
switch( fileType ) {
case AGGREGATOR:
result = "aggregator";
break;
case GRAPH:
result = "graph";
break;
case INSTANCE:
result = "intsnace";
break;
case UNDETERMINED:
result = "undetermined";
break;
default:
result = "unknown";
break;
}
return result;
} |
java | @Override
public WireFeed parse(final Document document, final boolean validate, final Locale locale) throws IllegalArgumentException, FeedException {
Opml opml;
opml = (Opml) super.parse(document, validate, locale);
final Element head = document.getRootElement().getChild("head");
if (head != null) {
opml.setOwnerId(head.getChildTextTrim("ownerId"));
opml.setDocs(head.getChildTextTrim("docs"));
if (opml.getDocs() == null) {
opml.setDocs("http://www.opml.org/spec2");
}
}
opml.setFeedType("opml_2.0");
return opml;
} |
python | def serializer(self, create=False, many=False):
"""
Decorator to mark a :class:`Serializer` subclass for a specific purpose, ie,
to be used during object creation **or** for serializing lists of objects.
:param create: Whether or not this serializer is for object creation.
:param many: Whether or not this serializer is for lists of objects.
"""
if create and many:
raise Exception('Can only set one of `create` or `many` to `True`')
def wrapper(cls):
cls.__kind__ = (create and 'create'
or many and 'many'
or 'all')
return cls
return wrapper |
java | @Override
public Set<URI> listOptionalParts(URI messageContent) {
if (messageContent == null) {
return ImmutableSet.of();
}
return ImmutableSet.copyOf(this.messageOptionalPartsMap.get(messageContent));
} |
python | def put_readme(self, content):
"""Store the readme descriptive metadata."""
logger.debug("Putting readme")
key = self.get_readme_key()
self.put_text(key, content) |
python | def filter_dict(iterable, keys):
"""
filters keys of each element of iterable
$.(a,b) returns all objects from array that have at least one of the keys:
[1,"aa",{"a":2,"c":3},{"c":3},{"a":1,"b":2}].(a,b) -> [{"a":2},{"a":1,"b":2}]
"""
if type(keys) is not list:
keys = [keys]
for i in iterable:
try:
d = {}
for a in keys:
try:
d[a] = i[a]
except KeyError:
pass
if d != {}:
yield d
except Exception:
pass |
python | def plot2d(self, color='default', alpha=1, ret=True):
"""
Generates a 2D plot for the z=0 Polygon projection.
:param color: Polygon color.
:type color: matplotlib color
:param alpha: Opacity.
:type alpha: float
:param ret: If True, returns the figure. It can be used to add
more elements to the plot or to modify it.
:type ret: bool
:returns: None, axes
:rtype: None, matplotlib axes
"""
import matplotlib.pyplot as plt
import matplotlib.patches as patches
path = self.get_path()
domain = self.get_domain()[:, :2]
if color is 'default': color = 'b'
# Plot
fig = plt.figure()
ax = fig.add_subplot(111)
ax.add_patch(patches.PathPatch(path, facecolor=color, lw=1,
edgecolor='k', alpha=alpha))
ax.set_xlim(domain[0,0],domain[1,0])
ax.set_ylim(domain[0,1], domain[1,1])
if ret: return ax |
java | @SuppressWarnings("unchecked")
public static <S> Problem<S> loadProblem(String problemName) {
Problem<S> problem ;
try {
problem = (Problem<S>)Class.forName(problemName).getConstructor().newInstance() ;
} catch (InstantiationException e) {
throw new JMetalException("newInstance() cannot instantiate (abstract class)", e) ;
} catch (IllegalAccessException e) {
throw new JMetalException("newInstance() is not usable (uses restriction)", e) ;
} catch (InvocationTargetException e) {
throw new JMetalException("an exception was thrown during the call of newInstance()", e) ;
} catch (NoSuchMethodException e) {
throw new JMetalException("getConstructor() was not able to find the constructor without arguments", e) ;
} catch (ClassNotFoundException e) {
throw new JMetalException("Class.forName() did not recognized the name of the class", e) ;
}
return problem ;
} |
java | protected static String replaceShorthand(char c, Map<String, String> headers,
TimeZone timeZone, boolean needRounding, int unit, int roundDown,
boolean useLocalTimestamp, long ts) {
String timestampHeader = null;
try {
if (!useLocalTimestamp) {
timestampHeader = headers.get("timestamp");
Preconditions.checkNotNull(timestampHeader, "Expected timestamp in " +
"the Flume event headers, but it was null");
ts = Long.valueOf(timestampHeader);
} else {
timestampHeader = String.valueOf(ts);
}
} catch (NumberFormatException e) {
throw new RuntimeException("Flume wasn't able to parse timestamp header"
+ " in the event to resolve time based bucketing. Please check that"
+ " you're correctly populating timestamp header (for example using"
+ " TimestampInterceptor source interceptor).", e);
}
if (needRounding) {
ts = roundDown(roundDown, unit, ts, timeZone);
}
// It's a date
String formatString = "";
switch (c) {
case '%':
return "%";
case 'a':
formatString = "EEE";
break;
case 'A':
formatString = "EEEE";
break;
case 'b':
formatString = "MMM";
break;
case 'B':
formatString = "MMMM";
break;
case 'c':
formatString = "EEE MMM d HH:mm:ss yyyy";
break;
case 'd':
formatString = "dd";
break;
case 'e':
formatString = "d";
break;
case 'D':
formatString = "MM/dd/yy";
break;
case 'H':
formatString = "HH";
break;
case 'I':
formatString = "hh";
break;
case 'j':
formatString = "DDD";
break;
case 'k':
formatString = "H";
break;
case 'l':
formatString = "h";
break;
case 'm':
formatString = "MM";
break;
case 'M':
formatString = "mm";
break;
case 'n':
formatString = "M";
break;
case 'p':
formatString = "a";
break;
case 's':
return "" + (ts / 1000);
case 'S':
formatString = "ss";
break;
case 't':
// This is different from unix date (which would insert a tab character
// here)
return timestampHeader;
case 'y':
formatString = "yy";
break;
case 'Y':
formatString = "yyyy";
break;
case 'z':
formatString = "ZZZ";
break;
default:
// LOG.warn("Unrecognized escape in event format string: %" + c);
return "";
}
SimpleDateFormat format = getSimpleDateFormat(formatString);
if (timeZone != null) {
format.setTimeZone(timeZone);
} else {
format.setTimeZone(TimeZone.getDefault());
}
Date date = new Date(ts);
return format.format(date);
} |
java | public static RaidInfo getFileRaidInfo(final FileStatus stat,
Configuration conf, boolean skipHarChecking)
throws IOException {
// now look for the parity file
ParityFilePair ppair = null;
for (Codec c : Codec.getCodecs()) {
ppair = ParityFilePair.getParityFile(c, stat, conf, skipHarChecking);
if (ppair != null) {
return new RaidInfo(c, ppair, c.parityLength);
}
}
return new RaidInfo(null, ppair, 0);
} |
python | def get_hash(path, form='sha256', chunk_size=65536):
'''
Get the hash sum of a file
This is better than ``get_sum`` for the following reasons:
- It does not read the entire file into memory.
- It does not return a string on error. The returned value of
``get_sum`` cannot really be trusted since it is vulnerable to
collisions: ``get_sum(..., 'xyz') == 'Hash xyz not supported'``
'''
hash_type = hasattr(hashlib, form) and getattr(hashlib, form) or None
if hash_type is None:
raise ValueError('Invalid hash type: {0}'.format(form))
with salt.utils.files.fopen(path, 'rb') as ifile:
hash_obj = hash_type()
# read the file in in chunks, not the entire file
for chunk in iter(lambda: ifile.read(chunk_size), b''):
hash_obj.update(chunk)
return hash_obj.hexdigest() |
python | def send_bcm(bcm_socket, data):
"""
Send raw frame to a BCM socket and handle errors.
"""
try:
return bcm_socket.send(data)
except OSError as e:
base = "Couldn't send CAN BCM frame. OS Error {}: {}\n".format(e.errno, e.strerror)
if e.errno == errno.EINVAL:
raise can.CanError(base + "You are probably referring to a non-existing frame.")
elif e.errno == errno.ENETDOWN:
raise can.CanError(base + "The CAN interface appears to be down.")
elif e.errno == errno.EBADF:
raise can.CanError(base + "The CAN socket appears to be closed.")
else:
raise e |
java | public com.google.api.ads.admanager.axis.v201805.PricingMethod getPricingMethod() {
return pricingMethod;
} |
python | def cluster_get_keys_in_slots(self, slot, count, *, encoding):
"""Return local key names in the specified hash slot."""
return self.execute(b'CLUSTER', b'GETKEYSINSLOT', slot, count,
encoding=encoding) |
java | public static Pattern createFilePattern(String filePattern) {
filePattern = StringUtils.isNullOrBlank(filePattern) ? "\\*" : filePattern;
filePattern = filePattern.replaceAll("\\.", "\\.");
filePattern = filePattern.replaceAll("\\*", ".*");
return Pattern.compile("^" + filePattern + "$");
} |
java | @Override
public Object call(Context cx, Scriptable scope, Scriptable thisObj,
Object[] args)
{
Object result;
boolean checkMethodResult = false;
int argsLength = args.length;
for (int i = 0; i < argsLength; i++) {
// flatten cons-strings before passing them as arguments
if (args[i] instanceof ConsString) {
args[i] = args[i].toString();
}
}
if (parmsLength < 0) {
if (parmsLength == VARARGS_METHOD) {
Object[] invokeArgs = { cx, thisObj, args, this };
result = member.invoke(null, invokeArgs);
checkMethodResult = true;
} else {
boolean inNewExpr = (thisObj == null);
Boolean b = inNewExpr ? Boolean.TRUE : Boolean.FALSE;
Object[] invokeArgs = { cx, args, this, b };
result = (member.isCtor())
? member.newInstance(invokeArgs)
: member.invoke(null, invokeArgs);
}
} else {
if (!isStatic) {
Class<?> clazz = member.getDeclaringClass();
if (!clazz.isInstance(thisObj)) {
boolean compatible = false;
if (thisObj == scope) {
Scriptable parentScope = getParentScope();
if (scope != parentScope) {
// Call with dynamic scope for standalone function,
// use parentScope as thisObj
compatible = clazz.isInstance(parentScope);
if (compatible) {
thisObj = parentScope;
}
}
}
if (!compatible) {
// Couldn't find an object to call this on.
throw ScriptRuntime.typeError1("msg.incompat.call",
functionName);
}
}
}
Object[] invokeArgs;
if (parmsLength == argsLength) {
// Do not allocate new argument array if java arguments are
// the same as the original js ones.
invokeArgs = args;
for (int i = 0; i != parmsLength; ++i) {
Object arg = args[i];
Object converted = convertArg(cx, scope, arg, typeTags[i]);
if (arg != converted) {
if (invokeArgs == args) {
invokeArgs = args.clone();
}
invokeArgs[i] = converted;
}
}
} else if (parmsLength == 0) {
invokeArgs = ScriptRuntime.emptyArgs;
} else {
invokeArgs = new Object[parmsLength];
for (int i = 0; i != parmsLength; ++i) {
Object arg = (i < argsLength)
? args[i]
: Undefined.instance;
invokeArgs[i] = convertArg(cx, scope, arg, typeTags[i]);
}
}
if (member.isMethod()) {
result = member.invoke(thisObj, invokeArgs);
checkMethodResult = true;
} else {
result = member.newInstance(invokeArgs);
}
}
if (checkMethodResult) {
if (hasVoidReturn) {
result = Undefined.instance;
} else if (returnTypeTag == JAVA_UNSUPPORTED_TYPE) {
result = cx.getWrapFactory().wrap(cx, scope, result, null);
}
// XXX: the code assumes that if returnTypeTag == JAVA_OBJECT_TYPE
// then the Java method did a proper job of converting the
// result to JS primitive or Scriptable to avoid
// potentially costly Context.javaToJS call.
}
return result;
} |
python | def read_file(filename):
"""
Reads the lines of a file into a list, and returns the list
:param filename: String - path and name of the file
:return: List - lines within the file
"""
lines = []
with open(filename) as f:
for line in f:
if len(line.strip()) != 0:
lines.append(line.strip())
return lines |
java | public boolean isExceptionMatched(AnalyzedToken token) {
if (exceptionSet) {
for (PatternToken testException : exceptionList) {
if (!testException.exceptionValidNext) {
if (testException.isMatched(token)) {
return true;
}
}
}
}
return false;
} |
python | def coord_pyramid(coord, zoom_start, zoom_stop):
"""
generate full pyramid for coord
Generate the full pyramid for a single coordinate. Note that zoom_stop is
exclusive.
"""
if zoom_start <= coord.zoom:
yield coord
for child_coord in coord_children_range(coord, zoom_stop):
if zoom_start <= child_coord.zoom:
yield child_coord |
java | public static Pair<AbstractTopology, Set<Integer>> mutateRemoveHosts(AbstractTopology currentTopology,
Set<Integer> removalHosts) {
Set<Integer> removalPartitionIds = getPartitionIdsForHosts(currentTopology, removalHosts);
return Pair.of(new AbstractTopology(currentTopology, removalHosts, removalPartitionIds), removalPartitionIds);
} |
python | def substitute(template, mapping=None):
"""
Render the template *template*. *mapping* is a :class:`dict` with
values to add to the template.
"""
if mapping is None:
mapping = {}
templ = Template(template)
return templ.substitute(mapping) |
python | def push(src, dest):
"""
Push object from host to target
:param src: string path to source object on host
:param dest: string destination path on target
:return: result of _exec_command() execution
"""
adb_full_cmd = [v.ADB_COMMAND_PREFIX, v.ADB_COMMAND_PUSH, src, dest]
return _exec_command(adb_full_cmd) |
java | public void error(final CellField<T> cellField) {
ArgUtils.notNull(cellField, "cellField");
error(cellField, getMessageKey(), getMessageVariables(cellField));
} |
python | def _api_scrape(json_inp, ndx):
"""
Internal method to streamline the getting of data from the json
Args:
json_inp (json): json input from our caller
ndx (int): index where the data is located in the api
Returns:
If pandas is present:
DataFrame (pandas.DataFrame): data set from ndx within the
API's json
else:
A dictionary of both headers and values from the page
"""
try:
headers = json_inp['resultSets'][ndx]['headers']
values = json_inp['resultSets'][ndx]['rowSet']
except KeyError:
# This is so ugly but this is what you get when your data comes out
# in not a standard format
try:
headers = json_inp['resultSet'][ndx]['headers']
values = json_inp['resultSet'][ndx]['rowSet']
except KeyError:
# Added for results that only include one set (ex. LeagueLeaders)
headers = json_inp['resultSet']['headers']
values = json_inp['resultSet']['rowSet']
if HAS_PANDAS:
return DataFrame(values, columns=headers)
else:
# Taken from www.github.com/bradleyfay/py-goldsberry
return [dict(zip(headers, value)) for value in values] |
java | public SortedSet<TypeElement> implementingClasses(TypeElement typeElement) {
SortedSet<TypeElement> result = get(implementingClasses, typeElement);
SortedSet<TypeElement> intfcs = allSubClasses(typeElement, false);
// If class x implements a subinterface of typeElement, then it follows
// that class x implements typeElement.
Iterator<TypeElement> subInterfacesIter = intfcs.iterator();
while (subInterfacesIter.hasNext()) {
Iterator<TypeElement> implementingClassesIter
= implementingClasses(subInterfacesIter.next()).iterator();
while (implementingClassesIter.hasNext()) {
TypeElement c = implementingClassesIter.next();
if (!result.contains(c)) {
result.add(c);
}
}
}
return result;
} |
python | def block2(self, value):
"""
Set the Block2 option.
:param value: the Block2 value
"""
option = Option()
option.number = defines.OptionRegistry.BLOCK2.number
num, m, size = value
if size > 512:
szx = 6
elif 256 < size <= 512:
szx = 5
elif 128 < size <= 256:
szx = 4
elif 64 < size <= 128:
szx = 3
elif 32 < size <= 64:
szx = 2
elif 16 < size <= 32:
szx = 1
else:
szx = 0
value = (num << 4)
value |= (m << 3)
value |= szx
option.value = value
self.add_option(option) |
java | @Deprecated
public StreamVariantsRequest getStreamVariantsRequest(String variantSetId) {
return StreamVariantsRequest.newBuilder()
.setVariantSetId(variantSetId)
.setReferenceName(referenceName)
.setStart(start)
.setEnd(end)
.build();
} |
java | @Override
public List<MonitorLine> getLastLine(Integer count) throws Exception {
String line;
final List<MonitorLine> result = new ArrayList<MonitorLine>();
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(getFilename()));
int i = 0;
line = in.readLine();
while (line != null) {
result.add(new MonitorLine(i, line));
i++;
line = in.readLine();
}
} finally {
try {
if (in != null) {
in.close();
}
} catch (IOException ignore) {
}
}
if (result.size() <= count) {
return result;
} else {
return result.subList(result.size() - count, result.size());
}
} |
java | public static double calculateSlope( GridNode node, double flowValue ) {
double value = doubleNovalue;
if (!isNovalue(flowValue)) {
int flowDir = (int) flowValue;
if (flowDir != 10) {
Direction direction = Direction.forFlow(flowDir);
double distance = direction.getDistance(node.xRes, node.yRes);
double currentElevation = node.elevation;
double nextElevation = node.getElevationAt(direction);
value = (currentElevation - nextElevation) / distance;
}
}
return value;
} |
java | public boolean isExistingIndex(String index) throws IOException {
logger.debug("is existing index [{}]", index);
try {
Response restResponse = lowLevelClient.performRequest("GET", "/" + index);
logger.trace("get index metadata response: {}", asMap(restResponse));
return true;
} catch (ResponseException e) {
if (e.getResponse().getStatusLine().getStatusCode() == 404) {
logger.debug("index [{}] does not exist", index);
return false;
}
throw e;
}
} |
java | @Override
public void execute(final Object context, int uiTaskId, final Object... args) {
Activity activity = ContextUtils.asActivity(context);
Set<Method> methods = MethodUtils.getAllMethods(context, UI.class);
for (final Method method : methods) {
final UI uiTask = method.getAnnotation(UI.class);
if(uiTask.value() == uiTaskId) {
final Runnable runnable = new Runnable() {
@Override
public void run() {
try {
if(!method.isAccessible()) method.setAccessible(true);
method.invoke(context, args);
}
catch (Exception e) {
StringBuilder stringBuilder = new StringBuilder()
.append("Failed to execute UI task ")
.append(method.getName())
.append(" on ")
.append(context.getClass().getName())
.append(" with arguments ")
.append(Arrays.toString(args))
.append(". ");
Log.e(UITaskService.class.getName(), stringBuilder.toString(), e);
}
}
};
if(uiTask.delay() > 0l) {
try {
Thread.sleep(uiTask.delay());
}
catch (InterruptedException ie) {
StringBuilder stringBuilder = new StringBuilder()
.append("UI task delay of ")
.append(uiTask.delay())
.append(" for ")
.append(method.getName())
.append(" on ")
.append(context.getClass().getName())
.append(" was interrupted!");
Log.e(getClass().getSimpleName(), stringBuilder.toString(), ie);
}
}
activity.runOnUiThread(runnable);
break;
}
}
} |
java | public void marshall(ListTagsRequest listTagsRequest, ProtocolMarshaller protocolMarshaller) {
if (listTagsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listTagsRequest.getResource(), RESOURCE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
java | public void setAssessmentTargetArns(java.util.Collection<String> assessmentTargetArns) {
if (assessmentTargetArns == null) {
this.assessmentTargetArns = null;
return;
}
this.assessmentTargetArns = new java.util.ArrayList<String>(assessmentTargetArns);
} |
java | public static File download(String url, File toDir) throws AlipayApiException {
toDir.mkdirs();
HttpURLConnection conn = null;
OutputStream output = null;
File file = null;
try {
conn = getConnection(new URL(url));
String ctype = conn.getContentType();
if (CTYPE_OCTET.equals(ctype)) {
String fileName = getFileName(conn);
file = new File(toDir, fileName);
output = new FileOutputStream(file);
copy(conn.getInputStream(), output);
} else {
String rsp = WebUtils.getResponseAsString(conn);
throw new AlipayApiException(rsp);
}
} catch (IOException e) {
throw new AlipayApiException(e.getMessage());
} finally {
closeQuietly(output);
if (conn != null) {
conn.disconnect();
}
}
return file;
} |
java | @SuppressWarnings({ "rawtypes", "unchecked" })
protected String getCommonSuperClass(final String type1, final String type2)
{
Class c, d;
try {
c = Class.forName(type1.replace('/', '.'));
d = Class.forName(type2.replace('/', '.'));
} catch (Exception e) {
throw new RuntimeException(e.toString());
}
if (c.isAssignableFrom(d)) {
return type1;
}
if (d.isAssignableFrom(c)) {
return type2;
}
if (c.isInterface() || d.isInterface()) {
return "java/lang/Object";
} else {
do {
c = c.getSuperclass();
} while (!c.isAssignableFrom(d));
return c.getName().replace('.', '/');
}
} |
java | public boolean isToBeDeleted()
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
{
SibTr.entry(tc, "isToBeDeleted");
SibTr.exit(tc, "isToBeDeleted", Boolean.valueOf(toBeDeleted));
}
return toBeDeleted;
} |
java | public MutableIntSet asSet(List<T> list, MutableIntSet setToAppend)
{
asSet(list, this, setToAppend);
return setToAppend;
} |
java | protected <T> T getInstanceByType(BeanManager manager, Class<T> type, Annotation... bindings) {
final Bean<?> bean = manager.resolve(manager.getBeans(type, bindings));
if (bean == null) {
throw CommonLogger.LOG.unableToResolveBean(type, Arrays.asList(bindings));
}
CreationalContext<?> cc = manager.createCreationalContext(bean);
return type.cast(manager.getReference(bean, type, cc));
} |
python | def parse_pseudo_class(self, sel, m, has_selector, iselector, is_html):
"""Parse pseudo class."""
complex_pseudo = False
pseudo = util.lower(css_unescape(m.group('name')))
if m.group('open'):
complex_pseudo = True
if complex_pseudo and pseudo in PSEUDO_COMPLEX:
has_selector = self.parse_pseudo_open(sel, pseudo, has_selector, iselector, m.end(0))
elif not complex_pseudo and pseudo in PSEUDO_SIMPLE:
if pseudo == ':root':
sel.flags |= ct.SEL_ROOT
elif pseudo == ':defined':
sel.flags |= ct.SEL_DEFINED
is_html = True
elif pseudo == ':scope':
sel.flags |= ct.SEL_SCOPE
elif pseudo == ':empty':
sel.flags |= ct.SEL_EMPTY
elif pseudo in (':link', ':any-link'):
sel.selectors.append(CSS_LINK)
elif pseudo == ':checked':
sel.selectors.append(CSS_CHECKED)
elif pseudo == ':default':
sel.selectors.append(CSS_DEFAULT)
elif pseudo == ':indeterminate':
sel.selectors.append(CSS_INDETERMINATE)
elif pseudo == ":disabled":
sel.selectors.append(CSS_DISABLED)
elif pseudo == ":enabled":
sel.selectors.append(CSS_ENABLED)
elif pseudo == ":required":
sel.selectors.append(CSS_REQUIRED)
elif pseudo == ":optional":
sel.selectors.append(CSS_OPTIONAL)
elif pseudo == ":read-only":
sel.selectors.append(CSS_READ_ONLY)
elif pseudo == ":read-write":
sel.selectors.append(CSS_READ_WRITE)
elif pseudo == ":in-range":
sel.selectors.append(CSS_IN_RANGE)
elif pseudo == ":out-of-range":
sel.selectors.append(CSS_OUT_OF_RANGE)
elif pseudo == ":placeholder-shown":
sel.selectors.append(CSS_PLACEHOLDER_SHOWN)
elif pseudo == ':first-child':
sel.nth.append(ct.SelectorNth(1, False, 0, False, False, ct.SelectorList()))
elif pseudo == ':last-child':
sel.nth.append(ct.SelectorNth(1, False, 0, False, True, ct.SelectorList()))
elif pseudo == ':first-of-type':
sel.nth.append(ct.SelectorNth(1, False, 0, True, False, ct.SelectorList()))
elif pseudo == ':last-of-type':
sel.nth.append(ct.SelectorNth(1, False, 0, True, True, ct.SelectorList()))
elif pseudo == ':only-child':
sel.nth.extend(
[
ct.SelectorNth(1, False, 0, False, False, ct.SelectorList()),
ct.SelectorNth(1, False, 0, False, True, ct.SelectorList())
]
)
elif pseudo == ':only-of-type':
sel.nth.extend(
[
ct.SelectorNth(1, False, 0, True, False, ct.SelectorList()),
ct.SelectorNth(1, False, 0, True, True, ct.SelectorList())
]
)
has_selector = True
elif complex_pseudo and pseudo in PSEUDO_COMPLEX_NO_MATCH:
self.parse_selectors(iselector, m.end(0), FLG_PSEUDO | FLG_OPEN)
sel.no_match = True
has_selector = True
elif not complex_pseudo and pseudo in PSEUDO_SIMPLE_NO_MATCH:
sel.no_match = True
has_selector = True
elif pseudo in PSEUDO_SUPPORTED:
raise SelectorSyntaxError(
"Invalid syntax for pseudo class '{}'".format(pseudo),
self.pattern,
m.start(0)
)
else:
raise NotImplementedError(
"'{}' pseudo-class is not implemented at this time".format(pseudo)
)
return has_selector, is_html |
python | def to_array(self):
"""
Serializes this InlineQueryResultCachedAudio to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
"""
array = super(InlineQueryResultCachedAudio, self).to_array()
array['type'] = u(self.type) # py2: type unicode, py3: type str
array['id'] = u(self.id) # py2: type unicode, py3: type str
array['audio_file_id'] = u(self.audio_file_id) # py2: type unicode, py3: type str
if self.caption is not None:
array['caption'] = u(self.caption) # py2: type unicode, py3: type str
if self.parse_mode is not None:
array['parse_mode'] = u(self.parse_mode) # py2: type unicode, py3: type str
if self.reply_markup is not None:
array['reply_markup'] = self.reply_markup.to_array() # type InlineKeyboardMarkup
if self.input_message_content is not None:
array['input_message_content'] = self.input_message_content.to_array() # type InputMessageContent
return array |
java | public FDistort input( ImageBase input ) {
if( this.input == null || this.input.width != input.width || this.input.height != input.height ) {
distorter = null;
}
this.input = input;
inputType = input.getImageType();
return this;
} |
python | def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'status') and self.status is not None:
_dict['status'] = self.status
if hasattr(self, 'created') and self.created is not None:
_dict['created'] = self.created
if hasattr(self, 'updated') and self.updated is not None:
_dict['updated'] = self.updated
if hasattr(self, 'url') and self.url is not None:
_dict['url'] = self.url
if hasattr(self, 'user_token') and self.user_token is not None:
_dict['user_token'] = self.user_token
if hasattr(self, 'results') and self.results is not None:
_dict['results'] = [x._to_dict() for x in self.results]
if hasattr(self, 'warnings') and self.warnings is not None:
_dict['warnings'] = self.warnings
return _dict |
python | def color(self, value):
"""
Setter for **self.__color** attribute.
:param value: Attribute value.
:type value: QColor
"""
if value is not None:
assert type(value) is QColor, "'{0}' attribute: '{1}' type is not 'QColor'!".format("color", value)
self.__color = value
self.__set_style_sheet() |
python | def init_bn_weight(layer):
'''initilize batch norm layer weight.
'''
n_filters = layer.num_features
new_weights = [
add_noise(np.ones(n_filters, dtype=np.float32), np.array([0, 1])),
add_noise(np.zeros(n_filters, dtype=np.float32), np.array([0, 1])),
add_noise(np.zeros(n_filters, dtype=np.float32), np.array([0, 1])),
add_noise(np.ones(n_filters, dtype=np.float32), np.array([0, 1])),
]
layer.set_weights(new_weights) |
java | private void createContextMenu() {
// Create menu manager.
MenuManager menuMgr = new MenuManager();
menuMgr.setRemoveAllWhenShown(true);
menuMgr.addMenuListener(new IMenuListener() {
public void menuAboutToShow(IMenuManager mgr) {
fillContextMenu(mgr);
}
});
// Create menu.
Menu menu = menuMgr.createContextMenu(viewer.getControl());
viewer.getControl().setMenu(menu);
// Register menu for extension.
getSite().registerContextMenu(menuMgr, viewer);
} |
python | def _expected_condition_find_first_element(self, elements):
"""Try to find sequentially the elements of the list and return the first element found
:param elements: list of PageElements or element locators as a tuple (locator_type, locator_value) to be found
sequentially
:returns: first element found or None
:rtype: toolium.pageelements.PageElement or tuple
"""
from toolium.pageelements.page_element import PageElement
element_found = None
for element in elements:
try:
if isinstance(element, PageElement):
element._web_element = None
element._find_web_element()
else:
self.driver_wrapper.driver.find_element(*element)
element_found = element
break
except (NoSuchElementException, TypeError):
pass
return element_found |
python | def solve(self):
""" Solves a DC power flow.
"""
case = self.case
logger.info("Starting DC power flow [%s]." % case.name)
t0 = time.time()
# Update bus indexes.
self.case.index_buses()
# Find the index of the refence bus.
ref_idx = self._get_reference_index(case)
if ref_idx < 0:
return False
# Build the susceptance matrices.
B, Bsrc, p_businj, p_srcinj = case.Bdc
# Get the vector of initial voltage angles.
v_angle_guess = self._get_v_angle_guess(case)
# Calculate the new voltage phase angles.
v_angle, p_ref = self._get_v_angle(case, B, v_angle_guess, p_businj,
ref_idx)
logger.debug("Bus voltage phase angles: \n%s" % v_angle)
self.v_angle = v_angle
# Push the results to the case.
self._update_model(case, B, Bsrc, v_angle, p_srcinj, p_ref, ref_idx)
logger.info("DC power flow completed in %.3fs." % (time.time() - t0))
return True |
java | public Observable<ApplicationGatewaySslPredefinedPolicyInner> getSslPredefinedPolicyAsync(String predefinedPolicyName) {
return getSslPredefinedPolicyWithServiceResponseAsync(predefinedPolicyName).map(new Func1<ServiceResponse<ApplicationGatewaySslPredefinedPolicyInner>, ApplicationGatewaySslPredefinedPolicyInner>() {
@Override
public ApplicationGatewaySslPredefinedPolicyInner call(ServiceResponse<ApplicationGatewaySslPredefinedPolicyInner> response) {
return response.body();
}
});
} |
python | def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead
of subclassing whenever possible.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None
and not use_decimal and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
if use_decimal:
if parse_float is not None:
raise TypeError("use_decimal=True implies parse_float=Decimal")
kw['parse_float'] = Decimal
return cls(encoding=encoding, **kw).decode(s) |
python | def get_root_path():
"""Get the root directory for the application."""
return os.path.realpath(os.path.join(os.path.dirname(__file__), os.pardir)) |
java | public void move(String srcAbsPath, String destAbsPath) throws ItemExistsException, PathNotFoundException,
VersionException, LockException, RepositoryException
{
// In this particular case we rely on the default configuration
move(srcAbsPath, destAbsPath, triggerEventsForDescendantsOnRename, triggerEventsForDescendantsOnMove);
} |
java | public static Location getLocation(Element elem, String description) {
Attr srcAttr = elem.getAttributeNodeNS(URI, SRC_ATTR);
if (srcAttr == null) {
return LocationImpl.UNKNOWN;
}
return new LocationImpl(description == null ? elem.getNodeName() : description, srcAttr.getValue(),
getLine(elem), getColumn(elem));
} |
python | def owner(self):
"""
Returns the owner of these capabilities, if any.
:return: the owner, can be None
:rtype: JavaObject
"""
obj = javabridge.call(self.jobject, "getOwner", "()Lweka/core/CapabilitiesHandler;")
if obj is None:
return None
else:
return JavaObject(jobject=obj) |
python | def seek(self, frames, whence=SEEK_SET):
"""Set the read/write position.
Parameters
----------
frames : int
The frame index or offset to seek.
whence : {SEEK_SET, SEEK_CUR, SEEK_END}, optional
By default (``whence=SEEK_SET``), `frames` are counted from
the beginning of the file.
``whence=SEEK_CUR`` seeks from the current position
(positive and negative values are allowed for `frames`).
``whence=SEEK_END`` seeks from the end (use negative value
for `frames`).
Returns
-------
int
The new absolute read/write position in frames.
Examples
--------
>>> from soundfile import SoundFile, SEEK_END
>>> myfile = SoundFile('stereo_file.wav')
Seek to the beginning of the file:
>>> myfile.seek(0)
0
Seek to the end of the file:
>>> myfile.seek(0, SEEK_END)
44100 # this is the file length
"""
self._check_if_closed()
position = _snd.sf_seek(self._file, frames, whence)
_error_check(self._errorcode)
return position |
java | @BetaApi
public final ListRegionInstanceGroupManagersPagedResponse listRegionInstanceGroupManagers(
String region) {
ListRegionInstanceGroupManagersHttpRequest request =
ListRegionInstanceGroupManagersHttpRequest.newBuilder().setRegion(region).build();
return listRegionInstanceGroupManagers(request);
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.