language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java
|
public StartResumeUsersPlaybackRequest.Builder startResumeUsersPlayback() {
return new StartResumeUsersPlaybackRequest.Builder(accessToken)
.setDefaults(httpManager, scheme, host, port);
}
|
java
|
public void init(Object parent, Object record)
{
m_parent = parent;
this.setOpaque(false);
if (this.getBaseApplet() != null)
{
String strScreenDesc = this.getScreenDesc();
this.getBaseApplet().setStatusText(strScreenDesc); // Display screen desc or clear screen desc.
if (strScreenDesc != null)
if (this.getTargetScreen(JBaseFrame.class) != null)
((JBaseFrame)this.getTargetScreen(JBaseFrame.class)).setTitle(strScreenDesc);
}
}
|
java
|
public static void generateCompileTimeGlobalsFile(
Map<String, ?> compileTimeGlobalsMap, Appendable output) throws IOException {
Map<String, PrimitiveData> compileTimeGlobals =
InternalValueUtils.convertCompileTimeGlobalsMap(compileTimeGlobalsMap);
for (Map.Entry<String, PrimitiveData> entry : compileTimeGlobals.entrySet()) {
String valueSrcStr =
InternalValueUtils.convertPrimitiveDataToExpr(entry.getValue(), SourceLocation.UNKNOWN)
.toSourceString();
output.append(entry.getKey()).append(" = ").append(valueSrcStr).append("\n");
}
}
|
python
|
def merge_networks(network, donor=[]):
r"""
Combine multiple networks into one without doing any topological
manipulations (such as stiching nearby pores to each other).
Parameters
----------
network : OpenPNM Network Object
The network to which all the other networks should be added.
donor : OpenPNM Network Object or list of Objects
The network object(s) to add to the given network
Notes
-----
This methods does *not* attempt to stitch the networks topologically.
See Also
--------
extend
trim
stitch
"""
if type(donor) == list:
donors = donor
else:
donors = [donor]
for donor in donors:
network['pore.coords'] = sp.vstack((network['pore.coords'],
donor['pore.coords']))
network['throat.conns'] = sp.vstack((network['throat.conns'],
donor['throat.conns'] +
network.Np))
p_all = sp.ones((sp.shape(network['pore.coords'])[0],), dtype=bool)
t_all = sp.ones((sp.shape(network['throat.conns'])[0],), dtype=bool)
network.update({'pore.all': p_all})
network.update({'throat.all': t_all})
for key in set(network.keys()).union(set(donor.keys())):
if key.split('.')[1] not in ['conns', 'coords', '_id', 'all']:
if key in network.keys():
pop_flag = False
if key not in donor.keys():
logger.debug('Adding ' + key + ' to donor')
# If key not on donor add it first
if network[key].dtype == bool:
donor[key] = False
else:
donor[key] = sp.nan
pop_flag = True
# Then merge it with existing array on network
try:
temp = sp.hstack((network[key], donor[key]))
except ValueError:
temp = sp.vstack((network[key], donor[key]))
network[key] = temp
if pop_flag:
donor.pop(key, None)
else:
# If key not on network add it first
logger.debug('Adding ' + key + ' to network')
if donor[key].dtype == bool:
network[key] = False
else:
network[key] = sp.nan
# Then append donor values to network
s = sp.shape(donor[key])[0]
network[key][-s:] = donor[key]
# Clear adjacency and incidence matrices which will be out of date now
network._am.clear()
network._im.clear()
|
java
|
public static String getRemoteImplClassName(EnterpriseBean enterpriseBean, boolean isPost11DD) // d114199
{
String remoteInterfaceName = getRemoteInterfaceName(enterpriseBean);
if (remoteInterfaceName == null) { // f111627
return null; // f111627
} // f111627
String packageName = packageName(remoteInterfaceName);
String remoteName = encodeBeanInterfacesName(enterpriseBean, isPost11DD, false, false, false); //d114199 d147734
StringBuffer result = new StringBuffer();
if (packageName != null) {
result.append(packageName);
result.append('.');
}
result.append(remotePrefix);
result.append(getUniquePrefix(enterpriseBean));
result.append(remoteName);
return result.toString();
}
|
java
|
@Nullable
public static VATINStructure getFromVATINCountry (@Nullable final String sVATIN)
{
if (StringHelper.getLength (sVATIN) >= 2)
{
final String sCountry = sVATIN.substring (0, 2);
for (final VATINStructure aStructure : s_aList)
if (aStructure.getExamples ().get (0).substring (0, 2).equalsIgnoreCase (sCountry))
return aStructure;
}
return null;
}
|
java
|
public void calcScale(int zoomFactor){
float s = getScaleForZoom(zoomFactor);
scale = s;
//logger.info("calc scale zoom:"+zoomFactor+ " s: " + s);
panel1.setScale(s);
panel2.setScale(s);
panel1.repaint();
panel2.repaint();
//return scale;
}
|
python
|
def setup(self, # pylint: disable=arguments-differ
endpoint=None,
username=None,
password=None,
incident_id=None,
sketch_id=None):
"""Setup a connection to a Timesketch server and create a sketch if needed.
Args:
endpoint: str, Timesketch endpoint (e.g. http://timesketch.com/)
username: str, Username to authenticate against the Timesketch endpoint.
password: str, Password to authenticate against the Timesketch endpoint.
incident_id: str, Incident ID or reference. Used in sketch description.
sketch_id: int, Sketch ID to add the resulting timeline to. If not
provided, a new sketch is created.
"""
self.timesketch_api = timesketch_utils.TimesketchApiClient(
endpoint, username, password)
self.incident_id = None
self.sketch_id = int(sketch_id) if sketch_id else None
# Check that we have a timesketch session
if not self.timesketch_api.session:
message = 'Could not connect to Timesketch server at ' + endpoint
self.state.add_error(message, critical=True)
return
if not self.sketch_id: # No sketch id is provided, create it
if incident_id:
sketch_name = 'Sketch for incident ID: ' + incident_id
else:
sketch_name = 'Untitled sketch'
sketch_description = 'Sketch generated by dfTimewolf'
self.sketch_id = self.timesketch_api.create_sketch(
sketch_name, sketch_description)
print('Sketch {0:d} created'.format(self.sketch_id))
|
java
|
public static Date setSeconds(final Date date, final int amount) {
return set(date, Calendar.SECOND, amount);
}
|
python
|
def ExtractEvents(
self, parser_mediator, registry_key, codepage='cp1252', **kwargs):
"""Extracts events from a Windows Registry key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
codepage (Optional[str]): extended ASCII string codepage.
"""
self._ParseSubKey(parser_mediator, registry_key, [], codepage=codepage)
|
python
|
def login(self):
"""
Logs the user in.
The log in information is saved in the client
- userid
- username
- cookies
:return: The raw response from the request
"""
if self.options['token']:
self.client.token = self.options['token']
result = self.users.get_user('me')
else:
response = self.users.login_user({
'login_id': self.options['login_id'],
'password': self.options['password'],
'token': self.options['mfa_token']
})
if response.status_code == 200:
self.client.token = response.headers['Token']
self.client.cookies = response.cookies
try:
result = response.json()
except ValueError:
log.debug('Could not convert response to json, returning raw response')
result = response
log.debug(result)
if 'id' in result:
self.client.userid = result['id']
if 'username' in result:
self.client.username = result['username']
return result
|
python
|
def register_value_proxy(namespace, value_proxy, help_text):
"""Register a value proxy with the namespace, and add the help_text."""
namespace.register_proxy(value_proxy)
config.config_help.add(
value_proxy.config_key, value_proxy.validator, value_proxy.default,
namespace.get_name(), help_text)
|
python
|
async def get_power_parameters_for(
cls, system_ids: typing.Sequence[str]):
"""
Get a list of power parameters for specified systems.
*WARNING*: This method is considered 'alpha' and may be modified
in future.
:param system_ids: The system IDs to get power parameters for
"""
if len(system_ids) == 0:
return {}
data = await cls._handler.power_parameters(id=system_ids)
return data
|
python
|
def create_default_config():
"""
Create default ConfigParser instance
"""
import codecs
config = ConfigParser.SafeConfigParser()
config.readfp(StringIO(DEFAULT_CONFIG))
# Load user settings
filename = get_user_config_filename()
if not os.path.exists(filename):
from wizard import setup_wizard
setup_wizard(config)
else:
try:
fi = codecs.open(filename, 'r', encoding='utf-8')
config.readfp(fi)
finally:
fi.close()
return config
|
java
|
private boolean writeToCharacteristic(BluetoothGattCharacteristic charc, byte[] data) {
charc.setValue(data);
boolean result = mGattClient.writeCharacteristic(charc);
if (result) {
Log.d(TAG, "Wrote to characteristic: " + charc.getUuid() +
", data: " + Arrays.toString(data));
} else {
Log.e(TAG, "Write failed to characteristic: " + charc.getUuid() +
", data: " + Arrays.toString(data));
}
return result;
}
|
python
|
def schedule_branches(b: bool, quoted_true, quoted_false):
"""
Helper function to choose which workflow to execute
based on the boolean `b`.
:param b:
promised boolean value
:param quoted_true:
quoted workflow to eval if the boolean is true.
:param quoted_true:
quoted workflow to eval if the boolean is false. """
if b:
return unquote(quoted_true)
else:
return unquote(quoted_false)
|
python
|
def _create_record(self, rtype, name, content):
"""
Connects to Hetzner account, adds a new record to the zone and returns a
boolean, if creation was successful or not. Needed record rtype, name and
content for record to create.
"""
with self._session(self.domain, self.domain_id) as ddata:
# Validate method parameters
if not rtype or not name or not content:
LOGGER.warning('Hetzner => Record has no rtype|name|content specified')
return False
# Add record to zone
name = ddata['cname'] if ddata['cname'] else self._fqdn_name(name)
rrset = ddata['zone']['data'].get_rdataset(name, rdtype=rtype, create=True)
for rdata in rrset:
if self._convert_content(rtype, content) == rdata.to_text():
LOGGER.info('Hetzner => Record with content \'%s\' already exists',
content)
return True
ttl = (rrset.ttl if 0 < rrset.ttl < self._get_lexicon_option('ttl')
else self._get_lexicon_option('ttl'))
rdataset = dns.rdataset.from_text(rrset.rdclass, rrset.rdtype,
ttl, self._convert_content(rtype, content))
rrset.update(rdataset)
# Post zone to Hetzner
synced_change = self._post_zone(ddata['zone'])
if synced_change:
self._propagated_record(rtype, name, self._convert_content(rtype, content),
ddata['nameservers'])
return synced_change
|
java
|
protected final ModelNode correctValue(final ModelNode newValue, final ModelNode oldValue) {
if (valueCorrector != null) {
return valueCorrector.correct(newValue, oldValue);
}
return newValue;
}
|
java
|
public static SingleID parseFilterID(String id, int[] pos) {
int start = pos[0];
Specs specs = parseFilterID(id, pos, true);
if (specs == null) {
pos[0] = start;
return null;
}
// Assemble return results
SingleID single = specsToID(specs, FORWARD);
single.filter = specs.filter;
return single;
}
|
python
|
def boost(self, dtrain, grad, hess):
"""
Boost the booster for one iteration, with customized gradient statistics.
Parameters
----------
dtrain : DMatrix
The training DMatrix.
grad : list
The first order of gradient.
hess : list
The second order of gradient.
"""
if len(grad) != len(hess):
raise ValueError('grad / hess length mismatch: {} / {}'.format(len(grad), len(hess)))
if not isinstance(dtrain, DMatrix):
raise TypeError('invalid training matrix: {}'.format(type(dtrain).__name__))
self._validate_features(dtrain)
_check_call(_LIB.XGBoosterBoostOneIter(self.handle, dtrain.handle,
c_array(ctypes.c_float, grad),
c_array(ctypes.c_float, hess),
len(grad)))
|
python
|
def compile_folder(self, directory, write=True, package=True, *args, **kwargs):
"""Compile a directory and returns paths to compiled files."""
if not isinstance(write, bool) and os.path.isfile(write):
raise CoconutException("destination path cannot point to a file when compiling a directory")
filepaths = []
for dirpath, dirnames, filenames in os.walk(directory):
if isinstance(write, bool):
writedir = write
else:
writedir = os.path.join(write, os.path.relpath(dirpath, directory))
for filename in filenames:
if os.path.splitext(filename)[1] in code_exts:
with self.handling_exceptions():
destpath = self.compile_file(os.path.join(dirpath, filename), writedir, package, *args, **kwargs)
if destpath is not None:
filepaths.append(destpath)
for name in dirnames[:]:
if not is_special_dir(name) and name.startswith("."):
if logger.verbose:
logger.show_tabulated("Skipped directory", name, "(explicitly pass as source to override).")
dirnames.remove(name) # directories removed from dirnames won't appear in further os.walk iterations
return filepaths
|
python
|
def accumulate(self, axis: AxisIdentifier) -> HistogramBase:
"""Calculate cumulative frequencies along a certain axis.
Returns
-------
new_hist: Histogram of the same type & size
"""
# TODO: Merge with Histogram1D.cumulative_frequencies
# TODO: Deal with errors and totals etc.
# TODO: inplace
new_one = self.copy()
axis_id = self._get_axis(axis)
new_one._frequencies = np.cumsum(new_one.frequencies, axis_id[0])
return new_one
|
java
|
public void onChangeMode(IndexerIoMode mode)
{
try
{
switch (mode)
{
case READ_ONLY :
setReadOnly();
break;
case READ_WRITE :
setReadWrite();
break;
}
}
catch (IOException e)
{
LOG.error("An error occurs while changing of mode " + mode, e);
}
}
|
java
|
public void setProxyChainName(String proxyChainName) {
if (proxyChainName == null) {
return;
}
this.proxyChainName = proxyChainName.trim();
if (proxyChainName.isEmpty()) {
setUseProxyChain(false);
}
getConfig().setProperty(PROXY_CHAIN_NAME, this.proxyChainName);
}
|
java
|
public void draw(Canvas canvas) {
for (int i = 0; i < mHolders.size(); ++i) {
Drawable drawable = get(i).getTopLevelDrawable();
if (drawable != null) {
drawable.draw(canvas);
}
}
}
|
python
|
def get_tasks(client, list_id, completed=False):
''' Gets un/completed tasks for the given list ID '''
params = {
'list_id' : str(list_id),
'completed' : completed
}
response = client.authenticated_request(client.api.Endpoints.TASKS, params=params)
return response.json()
|
java
|
public static String getCollation(final int id) {
Entry entry = getEntry(id);
if (entry != null) {
return entry.mysqlCollation;
} else {
logger.warn("Unexpect mysql charset: " + id);
return null;
}
}
|
python
|
def _do_document(self, node):
'''_do_document(self, node) -> None
Process a document node. documentOrder holds whether the document
element has been encountered such that PIs/comments can be written
as specified.'''
self.documentOrder = _LesserElement
for child in node.childNodes:
if child.nodeType == Node.ELEMENT_NODE:
self.documentOrder = _Element # At document element
self._do_element(child)
self.documentOrder = _GreaterElement # After document element
elif child.nodeType == Node.PROCESSING_INSTRUCTION_NODE:
self._do_pi(child)
elif child.nodeType == Node.COMMENT_NODE:
self._do_comment(child)
elif child.nodeType == Node.DOCUMENT_TYPE_NODE:
pass
else:
raise TypeError, str(child)
|
java
|
@SuppressWarnings("unchecked")
public static <T> String join(String separator, T... items) {
StringBuilder result = new StringBuilder();
for (int i = 0; i < items.length; ++i) {
T item = items[i];
if (item instanceof Number) {
result.append(Op.str((Number) item));
} else {
result.append(item.toString());
}
if (i + 1 < items.length) {
result.append(separator);
}
}
return result.toString();
}
|
python
|
def plot(self, qubit_subset=None):
"""
Plots a bar chart with bitstring on the x axis and probability on the y axis.
:param list qubit_subset: Optional parameter used for plotting a subset of the Hilbert space.
"""
import matplotlib.pyplot as plt
prob_dict = self.get_outcome_probs()
if qubit_subset:
sub_dict = {}
qubit_num = len(self)
for i in qubit_subset:
if i > (2**qubit_num - 1):
raise IndexError("Index {} too large for {} qubits.".format(i, qubit_num))
else:
sub_dict[get_bitstring_from_index(i, qubit_num)] = prob_dict[get_bitstring_from_index(i, qubit_num)]
prob_dict = sub_dict
plt.bar(range(len(prob_dict)), prob_dict.values(), align='center', color='#6CAFB7')
plt.xticks(range(len(prob_dict)), prob_dict.keys())
plt.show()
|
python
|
def process_raw_data(self, fname, max_size):
"""
Loads data from the input file.
:param fname: input file name
:param max_size: loads at most 'max_size' samples from the input file,
if None loads the entire dataset
"""
logging.info(f'Processing data from {fname}')
data = []
with open(fname) as dfile:
for idx, line in enumerate(dfile):
if max_size and idx == max_size:
break
data.append(line)
return data
|
java
|
private void finishValue()
{
final ContainerInfo current = currentContainer();
if (current != null && current.type == ContainerType.ANNOTATION)
{
// close out and patch the length
popContainer();
}
hasWrittenValuesSinceFinished = true;
hasWrittenValuesSinceConstructed = true;
}
|
python
|
async def _capability_negotiated(self, capab):
""" Mark capability as negotiated, and end negotiation if we're done. """
self._capabilities_negotiating.discard(capab)
if not self._capabilities_requested and not self._capabilities_negotiating:
await self.rawmsg('CAP', 'END')
|
java
|
private ValueUnitWrapper normalizeMolarUnit(final double value, final String unit)
throws UnknownUnitException {
Matcher matcher = mMolarUnitPattern.matcher(unit);
if (matcher.find()) {
String siPrefix = matcher.group(1);
Double convFactor = getSIFactor(siPrefix);
if (convFactor == null) {
throw new UnknownUnitException(unit);
} else {
double normalizedValue = convFactor * value;
return new ValueUnitWrapper(normalizedValue, MOLAR_NORMALIZED_UNIT);
}
} else {
throw new UnknownUnitException(unit);
}
}
|
java
|
public Set<Constructor> getConstructorsWithAnyParamAnnotated(Annotation annotation) {
return filter(getConstructorsWithAnyParamAnnotated(annotation.annotationType()), withAnyParameterAnnotation(annotation));
}
|
java
|
public void setMessage(String data) throws HttpMalformedHeaderException {
clear();
try {
if (!this.parse(data)) {
mMalformedHeader = true;
}
} catch (Exception e) {
mMalformedHeader = true;
}
if (mMalformedHeader) {
throw new HttpMalformedHeaderException();
}
}
|
java
|
public String getRootPath(Resource resource) {
int rootLevel = urlHandlerConfig.getSiteRootLevel(resource);
if (rootLevel > 0) {
return Path.getAbsoluteParent(resource.getPath(), rootLevel, resource.getResourceResolver());
}
return null;
}
|
python
|
def config_value_changed(option):
"""
Determine if config value changed since last call to this function.
"""
hook_data = unitdata.HookData()
with hook_data():
db = unitdata.kv()
current = config(option)
saved = db.get(option)
db.set(option, current)
if saved is None:
return False
return current != saved
|
java
|
void lpc_to_curve(float[] curve, float[] lpc, float amp){
for(int i=0; i<ln*2; i++)
curve[i]=0.0f;
if(amp==0)
return;
for(int i=0; i<m; i++){
curve[i*2+1]=lpc[i]/(4*amp);
curve[i*2+2]=-lpc[i]/(4*amp);
}
fft.backward(curve);
{
int l2=ln*2;
float unit=(float)(1./amp);
curve[0]=(float)(1./(curve[0]*2+unit));
for(int i=1; i<ln; i++){
float real=(curve[i]+curve[l2-i]);
float imag=(curve[i]-curve[l2-i]);
float a=real+unit;
curve[i]=(float)(1.0/FAST_HYPOT(a, imag));
}
}
}
|
python
|
def _prepare_files(self, encoding):
""" private function to prepare content for paramType=form with File
"""
content_type = 'multipart/form-data'
if self.__op.consumes and content_type not in self.__op.consumes:
raise errs.SchemaError('content type {0} does not present in {1}'.format(content_type, self.__op.consumes))
boundary = uuid4().hex
content_type += '; boundary={0}'
content_type = content_type.format(boundary)
# init stream
body = io.BytesIO()
w = codecs.getwriter(encoding)
def append(name, obj):
body.write(six.b('--{0}\r\n'.format(boundary)))
# header
w(body).write('Content-Disposition: form-data; name="{0}"; filename="{1}"'.format(name, obj.filename))
body.write(six.b('\r\n'))
if 'Content-Type' in obj.header:
w(body).write('Content-Type: {0}'.format(obj.header['Content-Type']))
body.write(six.b('\r\n'))
if 'Content-Transfer-Encoding' in obj.header:
w(body).write('Content-Transfer-Encoding: {0}'.format(obj.header['Content-Transfer-Encoding']))
body.write(six.b('\r\n'))
body.write(six.b('\r\n'))
# body
if not obj.data:
with open(obj.filename, 'rb') as f:
body.write(f.read())
else:
data = obj.data.read()
if isinstance(data, six.text_type):
w(body).write(data)
else:
body.write(data)
body.write(six.b('\r\n'))
for k, v in self.__p['formData']:
body.write(six.b('--{0}\r\n'.format(boundary)))
w(body).write('Content-Disposition: form-data; name="{0}"'.format(k))
body.write(six.b('\r\n'))
body.write(six.b('\r\n'))
w(body).write(v)
body.write(six.b('\r\n'))
# begin of file section
for k, v in six.iteritems(self.__p['file']):
if isinstance(v, list):
for vv in v:
append(k, vv)
else:
append(k, v)
# final boundary
body.write(six.b('--{0}--\r\n'.format(boundary)))
return content_type, body.getvalue()
|
java
|
public static Date getLastNDay(Date d, int n, int unitType) {
Calendar cal = Calendar.getInstance();
cal.setTime(d);
cal.add(unitType, -n);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTime();
}
|
python
|
def _gen_pool_xml(name,
ptype,
target=None,
permissions=None,
source_devices=None,
source_dir=None,
source_adapter=None,
source_hosts=None,
source_auth=None,
source_name=None,
source_format=None):
'''
Generate the XML string to define a libvirt storage pool
'''
hosts = [host.split(':') for host in source_hosts or []]
context = {
'name': name,
'ptype': ptype,
'target': {'path': target, 'permissions': permissions},
'source': {
'devices': source_devices or [],
'dir': source_dir,
'adapter': source_adapter,
'hosts': [{'name': host[0], 'port': host[1] if len(host) > 1 else None} for host in hosts],
'auth': source_auth,
'name': source_name,
'format': source_format
}
}
fn_ = 'libvirt_pool.jinja'
try:
template = JINJA.get_template(fn_)
except jinja2.exceptions.TemplateNotFound:
log.error('Could not load template %s', fn_)
return ''
return template.render(**context)
|
python
|
def compose(self, text, minimal_clears=False, no_clears=False):
'''
Returns the sequence of combinations necessary to compose given text.
If the text expression is not possible with the given layout an ComposeException is thrown.
Iterate over the string, converting each character into a key sequence.
Between each character, an empty combo is inserted to handle duplicate strings (and USB HID codes between characters)
@param text: Input UTF-8 string
@param minimal_clears: Set to True to minimize the number of code clears. False (default) includes a clear after every character.
@param no_clears: Set to True to not add any code clears (useful for input sequences). False (default) to include code clears.
@returns: Sequence of combinations needed to generate the given text string
'''
sequence = []
clear = self.json_data['to_hid_keyboard']['0x00'] # No Event
for char in text:
# Make sure the composition element is available
if char not in self.json_data['composition']:
raise ComposeException("'{}' is not defined as a composition in the layout '{}'".format(char, self.name))
# Lookup the sequence to handle this character
lookup = self.json_data['composition'][char]
# If using minimal clears, check to see if we need to re-use any codes
# Only need to check the most recent addition with the first combo
if sequence and set(tuple(lookup[0])) & set(tuple(sequence[-1])) and not no_clears:
sequence.extend([[clear]])
# Add to overall sequence
sequence.extend(lookup)
# Add empty combo for sequence splitting
if not minimal_clears and not no_clears:
# Blindly add a clear combo between characters
sequence.extend([[clear]])
# When using minimal clears, we still need to add a final clear
if minimal_clears and not no_clears:
sequence.extend([[clear]])
return sequence
|
python
|
def feats_from_doc(self, raw_text):
'''
Parameters
----------
raw_text, uncleaned text for parsing out features
Returns
-------
csr_matrix, feature matrix
'''
parsed_text = self._nlp(self._clean_function(raw_text))
X_factory = CSRMatrixFactory()
X_factory.set_last_col_idx(self._term_idx_store.getnumvals() - 1)
term_freq = self._get_features_from_parsed_text(parsed_text, self._term_idx_store)
self._register_document_features_with_X_factory(X_factory, 0, term_freq)
return X_factory.get_csr_matrix()
|
python
|
async def check_result(method_name: str, content_type: str, status_code: int, body: str):
"""
Checks whether `result` is a valid API response.
A result is considered invalid if:
- The server returned an HTTP response code other than 200
- The content of the result is invalid JSON.
- The method call was unsuccessful (The JSON 'ok' field equals False)
:param method_name: The name of the method called
:param status_code: status code
:param content_type: content type of result
:param body: result body
:return: The result parsed to a JSON dictionary
:raises ApiException: if one of the above listed cases is applicable
"""
log.debug('Response for %s: [%d] "%r"', method_name, status_code, body)
if content_type != 'application/json':
raise exceptions.NetworkError(f"Invalid response with content type {content_type}: \"{body}\"")
try:
result_json = json.loads(body)
except ValueError:
result_json = {}
description = result_json.get('description') or body
parameters = types.ResponseParameters(**result_json.get('parameters', {}) or {})
if HTTPStatus.OK <= status_code <= HTTPStatus.IM_USED:
return result_json.get('result')
elif parameters.retry_after:
raise exceptions.RetryAfter(parameters.retry_after)
elif parameters.migrate_to_chat_id:
raise exceptions.MigrateToChat(parameters.migrate_to_chat_id)
elif status_code == HTTPStatus.BAD_REQUEST:
exceptions.BadRequest.detect(description)
elif status_code == HTTPStatus.NOT_FOUND:
exceptions.NotFound.detect(description)
elif status_code == HTTPStatus.CONFLICT:
exceptions.ConflictError.detect(description)
elif status_code in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN]:
exceptions.Unauthorized.detect(description)
elif status_code == HTTPStatus.REQUEST_ENTITY_TOO_LARGE:
raise exceptions.NetworkError('File too large for uploading. '
'Check telegram api limits https://core.telegram.org/bots/api#senddocument')
elif status_code >= HTTPStatus.INTERNAL_SERVER_ERROR:
if 'restart' in description:
raise exceptions.RestartingTelegram()
raise exceptions.TelegramAPIError(description)
raise exceptions.TelegramAPIError(f"{description} [{status_code}]")
|
python
|
def _set_isns_vrf(self, v, load=False):
"""
Setter method for isns_vrf, mapped from YANG variable /isns/isns_vrf (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_isns_vrf is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_isns_vrf() directly.
YANG Description: List of Discovery Domain Parameters.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("isns_vrf_instance",isns_vrf.isns_vrf, yang_name="isns-vrf", rest_name="vrf-forwarding", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='isns-vrf-instance', extensions={u'tailf-common': {u'info': u'Configure Discovery Domain Parameters', u'cli-no-key-completion': None, u'alt-name': u'vrf-forwarding', u'cli-suppress-list-no': None, u'cli-full-command': None, u'hidden': u'isns-vrf', u'callpoint': u'isns_vrf_instance_cp', u'cli-mode-name': u'config-isns-vrf-forwarding-$(isns-vrf-instance)'}}), is_container='list', yang_name="isns-vrf", rest_name="vrf-forwarding", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Discovery Domain Parameters', u'cli-no-key-completion': None, u'alt-name': u'vrf-forwarding', u'cli-suppress-list-no': None, u'cli-full-command': None, u'hidden': u'isns-vrf', u'callpoint': u'isns_vrf_instance_cp', u'cli-mode-name': u'config-isns-vrf-forwarding-$(isns-vrf-instance)'}}, namespace='urn:brocade.com:mgmt:brocade-isns', defining_module='brocade-isns', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """isns_vrf must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("isns_vrf_instance",isns_vrf.isns_vrf, yang_name="isns-vrf", rest_name="vrf-forwarding", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='isns-vrf-instance', extensions={u'tailf-common': {u'info': u'Configure Discovery Domain Parameters', u'cli-no-key-completion': None, u'alt-name': u'vrf-forwarding', u'cli-suppress-list-no': None, u'cli-full-command': None, u'hidden': u'isns-vrf', u'callpoint': u'isns_vrf_instance_cp', u'cli-mode-name': u'config-isns-vrf-forwarding-$(isns-vrf-instance)'}}), is_container='list', yang_name="isns-vrf", rest_name="vrf-forwarding", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Discovery Domain Parameters', u'cli-no-key-completion': None, u'alt-name': u'vrf-forwarding', u'cli-suppress-list-no': None, u'cli-full-command': None, u'hidden': u'isns-vrf', u'callpoint': u'isns_vrf_instance_cp', u'cli-mode-name': u'config-isns-vrf-forwarding-$(isns-vrf-instance)'}}, namespace='urn:brocade.com:mgmt:brocade-isns', defining_module='brocade-isns', yang_type='list', is_config=True)""",
})
self.__isns_vrf = t
if hasattr(self, '_set'):
self._set()
|
python
|
def currentdir(self) -> str:
"""Name of the current working directory containing the relevant files.
To show most of the functionality of |property|
|FileManager.currentdir| (unpacking zip files on the fly is
explained in the documentation on function
(|FileManager.zip_currentdir|), we first prepare a |FileManager|
object corresponding to the |FileManager.basepath|
`projectname/basename`:
>>> from hydpy.core.filetools import FileManager
>>> filemanager = FileManager()
>>> filemanager.BASEDIR = 'basename'
>>> filemanager.projectdir = 'projectname'
>>> import os
>>> from hydpy import repr_, TestIO
>>> TestIO.clear()
>>> with TestIO():
... os.makedirs('projectname/basename')
... repr_(filemanager.basepath) # doctest: +ELLIPSIS
'...hydpy/tests/iotesting/projectname/basename'
At first, the base directory is empty and asking for the
current working directory results in the following error:
>>> with TestIO():
... filemanager.currentdir # doctest: +ELLIPSIS
Traceback (most recent call last):
...
RuntimeError: The current working directory of the FileManager object \
has not been defined manually and cannot be determined automatically: \
`.../projectname/basename` does not contain any available directories.
If only one directory exists, it is considered as the current
working directory automatically:
>>> with TestIO():
... os.mkdir('projectname/basename/dir1')
... filemanager.currentdir
'dir1'
|property| |FileManager.currentdir| memorises the name of the
current working directory, even if another directory is later
added to the base path:
>>> with TestIO():
... os.mkdir('projectname/basename/dir2')
... filemanager.currentdir
'dir1'
Set the value of |FileManager.currentdir| to |None| to let it
forget the memorised directory. After that, asking for the
current working directory now results in another error, as
it is not clear which directory to select:
>>> with TestIO():
... filemanager.currentdir = None
... filemanager.currentdir # doctest: +ELLIPSIS
Traceback (most recent call last):
...
RuntimeError: The current working directory of the FileManager object \
has not been defined manually and cannot be determined automatically: \
`....../projectname/basename` does contain multiple available directories \
(dir1 and dir2).
Setting |FileManager.currentdir| manually solves the problem:
>>> with TestIO():
... filemanager.currentdir = 'dir1'
... filemanager.currentdir
'dir1'
Remove the current working directory `dir1` with the `del` statement:
>>> with TestIO():
... del filemanager.currentdir
... os.path.exists('projectname/basename/dir1')
False
|FileManager| subclasses can define a default directory name.
When many directories exist and none is selected manually, the
default directory is selected automatically. The following
example shows an error message due to multiple directories
without any having the default name:
>>> with TestIO():
... os.mkdir('projectname/basename/dir1')
... filemanager.DEFAULTDIR = 'dir3'
... del filemanager.currentdir
... filemanager.currentdir # doctest: +ELLIPSIS
Traceback (most recent call last):
...
RuntimeError: The current working directory of the FileManager object \
has not been defined manually and cannot be determined automatically: The \
default directory (dir3) is not among the available directories (dir1 and dir2).
We can fix this by adding the required default directory manually:
>>> with TestIO():
... os.mkdir('projectname/basename/dir3')
... filemanager.currentdir
'dir3'
Setting the |FileManager.currentdir| to `dir4` not only overwrites
the default name, but also creates the required folder:
>>> with TestIO():
... filemanager.currentdir = 'dir4'
... filemanager.currentdir
'dir4'
>>> with TestIO():
... sorted(os.listdir('projectname/basename'))
['dir1', 'dir2', 'dir3', 'dir4']
Failed attempts in removing directories result in error messages
like the following one:
>>> import shutil
>>> from unittest.mock import patch
>>> with patch.object(shutil, 'rmtree', side_effect=AttributeError):
... with TestIO():
... del filemanager.currentdir # doctest: +ELLIPSIS
Traceback (most recent call last):
...
AttributeError: While trying to delete the current working directory \
`.../projectname/basename/dir4` of the FileManager object, the following \
error occurred: ...
Then, the current working directory still exists and is remembered
by |FileManager.currentdir|:
>>> with TestIO():
... filemanager.currentdir
'dir4'
>>> with TestIO():
... sorted(os.listdir('projectname/basename'))
['dir1', 'dir2', 'dir3', 'dir4']
"""
if self._currentdir is None:
directories = self.availabledirs.folders
if len(directories) == 1:
self.currentdir = directories[0]
elif self.DEFAULTDIR in directories:
self.currentdir = self.DEFAULTDIR
else:
prefix = (f'The current working directory of the '
f'{objecttools.classname(self)} object '
f'has not been defined manually and cannot '
f'be determined automatically:')
if not directories:
raise RuntimeError(
f'{prefix} `{objecttools.repr_(self.basepath)}` '
f'does not contain any available directories.')
if self.DEFAULTDIR is None:
raise RuntimeError(
f'{prefix} `{objecttools.repr_(self.basepath)}` '
f'does contain multiple available directories '
f'({objecttools.enumeration(directories)}).')
raise RuntimeError(
f'{prefix} The default directory ({self.DEFAULTDIR}) '
f'is not among the available directories '
f'({objecttools.enumeration(directories)}).')
return self._currentdir
|
python
|
def encode_int(code, bits_per_char=6):
"""Encode int into a string preserving order
It is using 2, 4 or 6 bits per coding character (default 6).
Parameters:
code: int Positive integer.
bits_per_char: int The number of bits per coding character.
Returns:
str: the encoded integer
"""
if code < 0:
raise ValueError('Only positive ints are allowed!')
if bits_per_char == 6:
return _encode_int64(code)
if bits_per_char == 4:
return _encode_int16(code)
if bits_per_char == 2:
return _encode_int4(code)
raise ValueError('`bits_per_char` must be in {6, 4, 2}')
|
python
|
def toggle(self, key):
""" Toggles a boolean key """
val = self[key]
assert isinstance(val, bool), 'key[%r] = %r is not a bool' % (key, val)
self.pref_update(key, not val)
|
java
|
@Override
public void loadTiff(URL tiff, ImageProcessor imageProcessor, int... pages) throws VectorPrintException {
try {
if (log.isLoggable(Level.FINE)) {
log.fine(String.format("loading tiff from %s", String.valueOf(tiff)));
}
if ("file".equals(tiff.getProtocol())) {
loadTiff(new File(tiff.getFile()), imageProcessor, pages);
} else {
loadTiff(tiff.openStream(), imageProcessor, pages);
}
} catch (IOException ex) {
throw new VectorPrintException(String.format("unable to load image %s", tiff.toString()), ex);
}
}
|
java
|
private void setSoftwareLayerType()
{
if (setLayerTypeMethod == null)
return;
try
{
int LAYER_TYPE_SOFTWARE = View.class.getField("LAYER_TYPE_SOFTWARE").getInt(new View(getContext()));
setLayerTypeMethod.invoke(this, LAYER_TYPE_SOFTWARE, null);
}
catch (Exception e)
{
Log.w("SVGImageView", "Unexpected failure calling setLayerType", e);
}
}
|
java
|
public static host_cpu_core[] get_filtered(nitro_service service, String filter) throws Exception
{
host_cpu_core obj = new host_cpu_core();
options option = new options();
option.set_filter(filter);
host_cpu_core[] response = (host_cpu_core[]) obj.getfiltered(service, option);
return response;
}
|
java
|
public Date getDate(FrenchRepublicanAlgorithm algorithm) {
if (algorithm == DEFAULT_ALGORITHM) {
return new Date(this, DEFAULT_ALGORITHM);
}
long utcDays = DEFAULT_ALGORITHM.transform(this);
return new Date(algorithm.transform(utcDays), algorithm);
}
|
python
|
def updateSynapses(self, synapses, delta):
"""Update a set of synapses in the segment.
@param tp The owner TP
@param synapses List of synapse indices to update
@param delta How much to add to each permanence
@returns True if synapse reached 0
"""
reached0 = False
if delta > 0:
for synapse in synapses:
self.syns[synapse][2] = newValue = self.syns[synapse][2] + delta
# Cap synapse permanence at permanenceMax
if newValue > self.tp.permanenceMax:
self.syns[synapse][2] = self.tp.permanenceMax
else:
for synapse in synapses:
self.syns[synapse][2] = newValue = self.syns[synapse][2] + delta
# Cap min synapse permanence to 0 in case there is no global decay
if newValue <= 0:
self.syns[synapse][2] = 0
reached0 = True
return reached0
|
python
|
def process_sample(job, inputs, tar_id):
"""
Converts sample.tar(.gz) into two fastq files.
Due to edge conditions... BEWARE: HERE BE DRAGONS
:param JobFunctionWrappingJob job: passed by Toil automatically
:param Namespace inputs: Stores input arguments (see main)
:param str tar_id: FileStore ID of sample tar
"""
job.fileStore.logToMaster('Processing sample into read pairs: {}'.format(inputs.uuid))
work_dir = job.fileStore.getLocalTempDir()
# I/O
tar_path = job.fileStore.readGlobalFile(tar_id, os.path.join(work_dir, 'sample.tar'))
# Untar File and concat
subprocess.check_call(['tar', '-xvf', tar_path, '-C', work_dir])
os.remove(os.path.join(work_dir, 'sample.tar'))
# Grab files from tarball
fastqs = []
for root, subdir, files in os.walk(work_dir):
fastqs.extend([os.path.join(root, x) for x in files])
# Check for read 1 and read 2 files
r1 = sorted([x for x in fastqs if 'R1' in x])
r2 = sorted([x for x in fastqs if 'R2' in x])
if not r1 or not r2:
# Check if using a different standard
r1 = sorted([x for x in fastqs if '_1' in x])
r2 = sorted([x for x in fastqs if '_2' in x])
# Prune file name matches from each list
if len(r1) > len(r2):
r1 = [x for x in r1 if x not in r2]
elif len(r2) > len(r1):
r2 = [x for x in r2 if x not in r1]
# Flag if data is single-ended
assert r1 and r2, 'This pipeline does not support single-ended data. R1: {}\nR2:{}'.format(r1, r2)
command = 'zcat' if r1[0].endswith('gz') and r2[0].endswith('gz') else 'cat'
with open(os.path.join(work_dir, 'R1.fastq'), 'w') as f1:
p1 = subprocess.Popen([command] + r1, stdout=f1)
with open(os.path.join(work_dir, 'R2.fastq'), 'w') as f2:
p2 = subprocess.Popen([command] + r2, stdout=f2)
p1.wait()
p2.wait()
# Write to fileStore
r1_id = job.fileStore.writeGlobalFile(os.path.join(work_dir, 'R1.fastq'))
r2_id = job.fileStore.writeGlobalFile(os.path.join(work_dir, 'R2.fastq'))
job.fileStore.deleteGlobalFile(tar_id)
# Start cutadapt step
job.addChildJobFn(cutadapt, inputs, r1_id, r2_id, disk='60G').rv()
|
java
|
@Override
public final void process(final Map<String, Object> pRqVs,
final IRequestData pRqDt) throws Exception {
OnlineBuyer buyer = this.buySr.getBuyr(pRqVs, pRqDt);
if (buyer == null) {
buyer = this.buySr.createBuyr(pRqVs, pRqDt);
}
String nm = pRqDt.getParameter("nm");
String em = pRqDt.getParameter("em");
String pw = pRqDt.getParameter("pw");
String pwc = pRqDt.getParameter("pwc");
long now = new Date().getTime();
String tbn = OnlineBuyer.class.getSimpleName();
pRqDt.setAttribute("buyr", buyer);
if (buyer.getRegEmail() == null) { //unregistered:
if (nm != null && pw != null && pwc != null && em != null) {
//creating:
if (nm.length() > 2 && pw.length() > 7 && pw.equals(pwc)
&& em.length() > 5) {
Set<String> ndFl = new HashSet<String>();
ndFl.add("itsId");
pRqVs.put(tbn + "neededFields", ndFl);
List<OnlineBuyer> brs = getSrvOrm().retrieveListWithConditions(pRqVs,
OnlineBuyer.class, "where REGEMAIL='" + em + "'");
pRqVs.remove(tbn + "neededFields");
if (brs.size() == 0) {
buyer.setItsName(nm);
buyer.setRegisteredPassword(pw);
buyer.setRegEmail(em);
buyer.setLsTm(now);
UUID buseid = UUID.randomUUID();
buyer.setBuSeId(buseid.toString());
if (buyer.getIsNew()) {
this.srvOrm.insertEntity(pRqVs, buyer);
} else {
this.srvOrm.updateEntity(pRqVs, buyer);
}
pRqDt.setCookieValue("cBuyerId", buyer.getItsId().toString());
pRqDt.setCookieValue("buSeId", buyer.getBuSeId());
} else if (brs.size() == 1) {
pRqDt.setAttribute("errMsg", "emBusy");
} else {
getLog().error(pRqVs, PrLog.class,
"Several users with same email!: " + em);
}
} else {
pRqDt.setAttribute("errMsg", "buyCrRul");
}
} else if (pw != null && em != null) {
//login from new browser
pRqVs.put(tbn + "regCustomerdeepLevel", 1);
pRqVs.put(tbn + "taxDestplacedeepLevel", 1);
List<OnlineBuyer> brs = getSrvOrm().retrieveListWithConditions(
pRqVs, OnlineBuyer.class, "where REGISTEREDPASSWORD='" + pw
+ "' and REGEMAIL='" + em + "'");
pRqVs.remove(tbn + "regCustomerdeepLevel");
pRqVs.remove(tbn + "taxDestplacedeepLevel");
if (brs.size() == 1) {
//free buyer and moving its cart by fast updates:
mkFreBuyr(pRqVs, pRqDt, buyer, brs.get(0));
} else if (brs.size() == 0) {
pRqDt.setAttribute("errMsg", "wrong_em_password");
} else {
getLog().error(pRqVs, PrLog.class,
"Several users with same password and email!: " + em);
}
} else {
spam(pRqVs, pRqDt);
}
} else { //registered:
if (now - buyer.getLsTm() < 1800000L && buyer.getBuSeId() != null) {
//there is opened session:
String buSeId = pRqDt.getCookieValue("buSeId");
if (buyer.getBuSeId().equals(buSeId)) {
//authorized requests:
String zip = pRqDt.getParameter("zip");
String adr1 = pRqDt.getParameter("adr1");
if (nm != null && zip != null && adr1 != null) {
//change name, shipping address:
String cnt = pRqDt.getParameter("cnt");
String cit = pRqDt.getParameter("cit");
String adr2 = pRqDt.getParameter("adr2");
String phn = pRqDt.getParameter("phn");
if (nm.length() > 2 && zip.length() > 2 && adr1.length() > 2) {
buyer.setItsName(nm);
buyer.setRegZip(zip);
buyer.setRegAddress1(adr1);
buyer.setRegAddress2(adr2);
buyer.setRegCountry(cnt);
buyer.setRegCity(cit);
buyer.setRegPhone(phn);
buyer.setLsTm(now);
this.srvOrm.updateEntity(pRqVs, buyer);
} else {
pRqDt.setAttribute("errMsg", "buyEmRul");
}
} else if (pw != null && pwc != null) {
//change password:
if (pw.length() > 7 && pw.equals(pwc)) {
buyer.setRegisteredPassword(pw);
buyer.setLsTm(now);
this.srvOrm.updateEntity(pRqVs, buyer);
} else {
pRqDt.setAttribute("errMsg", "buyPwdRul");
}
} else {
//logout action:
buyer.setLsTm(0L);
this.srvOrm.updateEntity(pRqVs, buyer);
}
} else { //either spam or buyer login from other browser without logout
spam(pRqVs, pRqDt);
}
} else {
//unauthorized requests:
if (pw != null) {
//login action:
if (pw.equals(buyer.getRegisteredPassword())) {
buyer.setLsTm(now);
UUID buseid = UUID.randomUUID();
buyer.setBuSeId(buseid.toString());
pRqDt.setCookieValue("buSeId", buyer.getBuSeId());
this.srvOrm.updateEntity(pRqVs, buyer);
} else {
pRqDt.setAttribute("errMsg", "wrong_password");
}
} else {
spam(pRqVs, pRqDt);
}
}
}
String procNm = pRqDt.getParameter("nmPrcRed");
IProcessor proc = this.procFac.lazyGet(pRqVs, procNm);
proc.process(pRqVs, pRqDt);
}
|
java
|
public ResumeProcessesRequest withScalingProcesses(String... scalingProcesses) {
if (this.scalingProcesses == null) {
setScalingProcesses(new com.amazonaws.internal.SdkInternalList<String>(scalingProcesses.length));
}
for (String ele : scalingProcesses) {
this.scalingProcesses.add(ele);
}
return this;
}
|
java
|
public void resumeJob (@Nonnull final TriggerKey aTriggerKey)
{
ValueEnforcer.notNull (aTriggerKey, "TriggerKey");
try
{
m_aScheduler.resumeTrigger (aTriggerKey);
LOGGER.info ("Succesfully resumed job with TriggerKey " + aTriggerKey.toString ());
}
catch (final SchedulerException ex)
{
LOGGER.error ("Failed to resume job with TriggerKey " + aTriggerKey.toString (), ex);
}
}
|
java
|
private void unmarshalDescriptor() throws CmsXmlException, CmsException {
if (null != m_desc) {
// unmarshal descriptor
m_descContent = CmsXmlContentFactory.unmarshal(m_cms, m_cms.readFile(m_desc));
// configure messages if wanted
CmsProperty bundleProp = m_cms.readPropertyObject(m_desc, PROPERTY_BUNDLE_DESCRIPTOR_LOCALIZATION, true);
if (!(bundleProp.isNullProperty() || bundleProp.getValue().trim().isEmpty())) {
m_configuredBundle = bundleProp.getValue();
}
}
}
|
java
|
public LocalVariable getLocalVariable(Location useLocation, int number) {
int useLoc = useLocation.getLocation();
if (useLoc < 0) {
return null;
} else {
return getLocalVariable(useLoc, number);
}
}
|
java
|
@Override
public MetaData getMetaData() throws CDIException {
MetaData metaData = null;
if (isModule()) {
metaData = getModuleMetaData();
} else {
metaData = application.getApplicationMetaData();
}
return metaData;
}
|
java
|
public static String getAppId() {
if (appId != null)
return appId;
appId = PropertyManager.getProperty(PropertyNames.MDW_APP_ID);
if (appId == null) // try legacy property
appId = PropertyManager.getProperty("mdw.application.name");
if (appId == null)
return "Unknown";
return appId;
}
|
python
|
def remove_range(self, start, end, callback=None):
'''Remove a range by rank.
This is equivalent to perform::
del l[start:end]
on a python list.
It returns the number of element removed.
'''
N = len(self)
if start < 0:
start = max(N + start, 0)
if start >= N:
return 0
if end is None:
end = N
elif end < 0:
end = max(N + end, 0)
else:
end = min(end, N)
if start >= end:
return 0
node = self._head
index = 0
chain = [None] * self._level
for i in range(self._level-1, -1, -1):
while node.next[i] and (index + node.width[i]) <= start:
index += node.width[i]
node = node.next[i]
chain[i] = node
node = node.next[0]
initial = self._size
while node and index < end:
next = node.next[0]
self._remove_node(node, chain)
index += 1
if callback:
callback(node.score, node.value)
node = next
return initial - self._size
|
python
|
def timeout_queue_process(self):
"""
Check the timeout_queue and set any due modules to update.
"""
# process any items that need adding to the queue
while self.timeout_add_queue:
self.timeout_process_add_queue(*self.timeout_add_queue.popleft())
now = time.time()
due_timeouts = []
# find any due timeouts
for timeout in self.timeout_keys:
if timeout > now:
break
due_timeouts.append(timeout)
if due_timeouts:
# process them
for timeout in due_timeouts:
modules = self.timeout_queue[timeout]
# remove from the queue
del self.timeout_queue[timeout]
self.timeout_keys.remove(timeout)
for module in modules:
# module no longer in queue
del self.timeout_queue_lookup[module]
# tell module to update
self.timeout_update_due.append(module)
# when is next timeout due?
try:
self.timeout_due = self.timeout_keys[0]
except IndexError:
self.timeout_due = None
# process any finished modules.
# Now that the module has finished running it may have been marked to
# be triggered again. This is most likely to happen when events are
# being processed and the events are arriving much faster than the
# module can handle them. It is important as a module may handle
# events but not trigger the module update. If during the event the
# module is due to update the update is not actioned but it needs to be
# once the events have finished or else the module will no longer
# continue to update.
while self.timeout_finished:
module_name = self.timeout_finished.popleft()
self.timeout_running.discard(module_name)
if module_name in self.timeout_missed:
module = self.timeout_missed.pop(module_name)
self.timeout_update_due.append(module)
# run any modules that are due
while self.timeout_update_due:
module = self.timeout_update_due.popleft()
module_name = getattr(module, "module_full_name", None)
# if the module is running then we do not want to trigger it but
# instead wait till it has finished running and then trigger
if module_name and module_name in self.timeout_running:
self.timeout_missed[module_name] = module
else:
self.timeout_running.add(module_name)
Runner(module, self, module_name)
# we return how long till we next need to process the timeout_queue
if self.timeout_due is not None:
return self.timeout_due - time.time()
|
python
|
def atmos_worker(srcs, window, ij, args):
"""A simple atmospheric correction user function."""
src = srcs[0]
rgb = src.read(window=window)
rgb = to_math_type(rgb)
atmos = simple_atmo(rgb, args["atmo"], args["contrast"], args["bias"])
# should be scaled 0 to 1, scale to outtype
return scale_dtype(atmos, args["out_dtype"])
|
python
|
def bind(self, source=None, destination=None, node=None,
edge_title=None, edge_label=None, edge_color=None, edge_weight=None,
point_title=None, point_label=None, point_color=None, point_size=None):
"""Relate data attributes to graph structure and visual representation.
To facilitate reuse and replayable notebooks, the binding call is chainable. Invocation does not effect the old binding: it instead returns a new Plotter instance with the new bindings added to the existing ones. Both the old and new bindings can then be used for different graphs.
:param source: Attribute containing an edge's source ID
:type source: String.
:param destination: Attribute containing an edge's destination ID
:type destination: String.
:param node: Attribute containing a node's ID
:type node: String.
:param edge_title: Attribute overriding edge's minimized label text. By default, the edge source and destination is used.
:type edge_title: HtmlString.
:param edge_label: Attribute overriding edge's expanded label text. By default, scrollable list of attribute/value mappings.
:type edge_label: HtmlString.
:param edge_color: Attribute overriding edge's color. `See palette definitions <https://graphistry.github.io/docs/legacy/api/0.9.2/api.html#extendedpalette>`_ for values. Based on Color Brewer.
:type edge_color: String.
:param edge_weight: Attribute overriding edge weight. Default is 1. Advanced layout controls will relayout edges based on this value.
:type edge_weight: String.
:param point_title: Attribute overriding node's minimized label text. By default, the node ID is used.
:type point_title: HtmlString.
:param point_label: Attribute overriding node's expanded label text. By default, scrollable list of attribute/value mappings.
:type point_label: HtmlString.
:param point_color: Attribute overriding node's color. `See palette definitions <https://graphistry.github.io/docs/legacy/api/0.9.2/api.html#extendedpalette>`_ for values. Based on Color Brewer.
:type point_color: Integer.
:param point_size: Attribute overriding node's size. By default, uses the node degree. The visualization will normalize point sizes and adjust dynamically using semantic zoom.
:type point_size: HtmlString.
:returns: Plotter.
:rtype: Plotter.
**Example: Minimal**
::
import graphistry
g = graphistry.bind()
g = g.bind(source='src', destination='dst')
**Example: Node colors**
::
import graphistry
g = graphistry.bind()
g = g.bind(source='src', destination='dst',
node='id', point_color='color')
**Example: Chaining**
::
import graphistry
g = graphistry.bind(source='src', destination='dst', node='id')
g1 = g.bind(point_color='color1', point_size='size1')
g.bind(point_color='color1b')
g2a = g1.bind(point_color='color2a')
g2b = g1.bind(point_color='color2b', point_size='size2b')
g3a = g2a.bind(point_size='size3a')
g3b = g2b.bind(point_size='size3b')
In the above **Chaining** example, all bindings use src/dst/id. Colors and sizes bind to:
::
g: default/default
g1: color1/size1
g2a: color2a/size1
g2b: color2b/size2b
g3a: color2a/size3a
g3b: color2b/size3b
"""
res = copy.copy(self)
res._source = source or self._source
res._destination = destination or self._destination
res._node = node or self._node
res._edge_title = edge_title or self._edge_title
res._edge_label = edge_label or self._edge_label
res._edge_color = edge_color or self._edge_color
res._edge_weight = edge_weight or self._edge_weight
res._point_title = point_title or self._point_title
res._point_label = point_label or self._point_label
res._point_color = point_color or self._point_color
res._point_size = point_size or self._point_size
return res
|
python
|
def draw_pathcollection(data, obj):
"""Returns PGFPlots code for a number of patch objects.
"""
content = []
# gather data
assert obj.get_offsets() is not None
labels = ["x" + 21 * " ", "y" + 21 * " "]
dd = obj.get_offsets()
draw_options = ["only marks"]
table_options = []
if obj.get_array() is not None:
draw_options.append("scatter")
dd = numpy.column_stack([dd, obj.get_array()])
labels.append("colordata" + 13 * " ")
draw_options.append("scatter src=explicit")
table_options.extend(["x=x", "y=y", "meta=colordata"])
ec = None
fc = None
ls = None
else:
# gather the draw options
try:
ec = obj.get_edgecolors()[0]
except (TypeError, IndexError):
ec = None
try:
fc = obj.get_facecolors()[0]
except (TypeError, IndexError):
fc = None
try:
ls = obj.get_linestyle()[0]
except (TypeError, IndexError):
ls = None
is_contour = len(dd) == 1
if is_contour:
draw_options = ["draw=none"]
# `only mark` plots don't need linewidth
data, extra_draw_options = get_draw_options(data, obj, ec, fc, ls, None)
draw_options.extend(extra_draw_options)
if obj.get_cmap():
mycolormap, is_custom_cmap = _mpl_cmap2pgf_cmap(obj.get_cmap(), data)
draw_options.append("colormap" + ("=" if is_custom_cmap else "/") + mycolormap)
legend_text = get_legend_text(obj)
if legend_text is None and has_legend(obj.axes):
draw_options.append("forget plot")
for path in obj.get_paths():
if is_contour:
dd = path.vertices
if len(obj.get_sizes()) == len(dd):
# See Pgfplots manual, chapter 4.25.
# In Pgfplots, \mark size specifies raddi, in matplotlib circle areas.
radii = numpy.sqrt(obj.get_sizes() / numpy.pi)
dd = numpy.column_stack([dd, radii])
labels.append("sizedata" + 14 * " ")
draw_options.extend(
[
"visualization depends on="
"{\\thisrow{sizedata} \\as\\perpointmarksize}",
"scatter/@pre marker code/.append style="
"{/tikz/mark size=\\perpointmarksize}",
]
)
do = " [{}]".format(", ".join(draw_options)) if draw_options else ""
content.append("\\addplot{}\n".format(do))
to = " [{}]".format(", ".join(table_options)) if table_options else ""
content.append("table{}{{%\n".format(to))
content.append((" ".join(labels)).strip() + "\n")
ff = data["float format"]
fmt = (" ".join(dd.shape[1] * [ff])) + "\n"
for d in dd:
content.append(fmt.format(*tuple(d)))
content.append("};\n")
if legend_text is not None:
content.append("\\addlegendentry{{{}}}\n".format(legend_text))
return data, content
|
python
|
def _dihed_angle_low(av, bv, cv, deriv):
"""Similar to dihed_cos, but with relative vectors"""
a = Vector3(9, deriv, av, (0, 1, 2))
b = Vector3(9, deriv, bv, (3, 4, 5))
c = Vector3(9, deriv, cv, (6, 7, 8))
b /= b.norm()
tmp = b.copy()
tmp *= dot(a, b)
a -= tmp
tmp = b.copy()
tmp *= dot(c, b)
c -= tmp
a /= a.norm()
c /= c.norm()
result = dot(a, c).results()
# avoid trobles with the gradients by either using arccos or arcsin
if abs(result[0]) < 0.5:
# if the cosine is far away for -1 or +1, it is safe to take the arccos
# and fix the sign of the angle.
sign = 1-(np.linalg.det([av, bv, cv]) > 0)*2
return _cos_to_angle(result, deriv, sign)
else:
# if the cosine is close to -1 or +1, it is better to compute the sine,
# take the arcsin and fix the sign of the angle
d = cross(b, a)
side = (result[0] > 0)*2-1 # +1 means angle in range [-pi/2,pi/2]
result = dot(d, c).results()
return _sin_to_angle(result, deriv, side)
|
java
|
public PageKey withParentId(String parentId) {
if (StringUtil.isBlank(parentId)) {
throw new IllegalArgumentException("Parent ID cannot be empty");
}
return new PageKey(parentId, this.offset, this.blockSize);
}
|
python
|
def _create_server(
shell,
server_address,
port,
cert_file=None,
key_file=None,
key_password=None,
ca_file=None,
):
"""
Creates the TCP console on the given address and port
:param shell: The remote shell handler
:param server_address: Server bound address
:param port: Server port
:param cert_file: Path to the server certificate
:param key_file: Path to the server private key
:param key_password: Password for the key file
:param ca_file: Path to Certificate Authority to authenticate clients
:return: A tuple: Server thread, TCP server object, Server active flag
"""
# Set up the request handler creator
active_flag = SharedBoolean(True)
def request_handler(*rh_args):
"""
Constructs a RemoteConsole as TCP request handler
"""
return RemoteConsole(shell, active_flag, *rh_args)
# Set up the server
server = ThreadingTCPServerFamily(
(server_address, port),
request_handler,
cert_file,
key_file,
key_password,
ca_file,
)
# Set flags
server.daemon_threads = True
server.allow_reuse_address = True
# Activate the server
server.server_bind()
server.server_activate()
# Serve clients
server_thread = threading.Thread(
target=server.serve_forever, name="RemoteShell-{0}".format(port)
)
server_thread.daemon = True
server_thread.start()
return server_thread, server, active_flag
|
python
|
def sup_inf(u):
"""SI operator."""
if np.ndim(u) == 2:
P = _P2
elif np.ndim(u) == 3:
P = _P3
else:
raise ValueError("u has an invalid number of dimensions "
"(should be 2 or 3)")
erosions = []
for P_i in P:
erosions.append(ndi.binary_erosion(u, P_i))
return np.array(erosions, dtype=np.int8).max(0)
|
java
|
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case STATUS:
return isSetStatus();
case METADATA_IN_JSON:
return isSetMetadataInJson();
case COLUMNS_LIST:
return isSetColumnsList();
case DATA_TYPE:
return isSetDataType();
case SHOW_TIMESERIES_LIST:
return isSetShowTimeseriesList();
case SHOW_STORAGE_GROUPS:
return isSetShowStorageGroups();
}
throw new IllegalStateException();
}
|
python
|
def disconnect(self):
"""
Disconnects from the SSDB server
"""
self._parser.on_disconnect()
if self._sock is None:
return
try:
self._sock.shutdown(socket.SHUT_RDWR)
self._sock.close()
except socket.error:
pass
self._sock = None
|
java
|
public UnsafeSorterIterator getSortedIterator() throws IOException {
assert(recordComparatorSupplier != null);
if (spillWriters.isEmpty()) {
assert(inMemSorter != null);
readingIterator = new SpillableIterator(inMemSorter.getSortedIterator());
return readingIterator;
} else {
final UnsafeSorterSpillMerger spillMerger = new UnsafeSorterSpillMerger(
recordComparatorSupplier.get(), prefixComparator, spillWriters.size());
for (UnsafeSorterSpillWriter spillWriter : spillWriters) {
spillMerger.addSpillIfNotEmpty(spillWriter.getReader(serializerManager));
}
if (inMemSorter != null) {
readingIterator = new SpillableIterator(inMemSorter.getSortedIterator());
spillMerger.addSpillIfNotEmpty(readingIterator);
}
return spillMerger.getSortedIterator();
}
}
|
java
|
public LocationInfo whatIsAtPoint( int pixelX , int pixelY , LocationInfo output ) {
if( output == null )
output = new LocationInfo();
int numCategories = confusion.getNumRows();
synchronized ( this ) {
if( pixelX >= gridWidth ) {
output.insideMatrix = false;
output.col = output.row = pixelY*numCategories/gridHeight;
} else {
output.insideMatrix = true;
output.row = pixelY*numCategories/gridHeight;
output.col = pixelX*numCategories/gridWidth;
}
}
return output;
}
|
python
|
def license_id(filename):
"""Return the spdx id for the license contained in `filename`. If no
license is detected, returns `None`.
spdx: https://spdx.org/licenses/
licenses from choosealicense.com: https://github.com/choosealicense.com
Approximate algorithm:
1. strip copyright line
2. normalize whitespace (replace all whitespace with a single space)
3. check exact text match with existing licenses
4. failing that use edit distance
"""
import editdistance # `pip install identify[license]`
with io.open(filename, encoding='UTF-8') as f:
contents = f.read()
norm = _norm_license(contents)
min_edit_dist = sys.maxsize
min_edit_dist_spdx = ''
# try exact matches
for spdx, text in licenses.LICENSES:
norm_license = _norm_license(text)
if norm == norm_license:
return spdx
# skip the slow calculation if the lengths are very different
if norm and abs(len(norm) - len(norm_license)) / len(norm) > .05:
continue
edit_dist = editdistance.eval(norm, norm_license)
if edit_dist < min_edit_dist:
min_edit_dist = edit_dist
min_edit_dist_spdx = spdx
# if there's less than 5% edited from the license, we found our match
if norm and min_edit_dist / len(norm) < .05:
return min_edit_dist_spdx
else:
# no matches :'(
return None
|
python
|
def _generate_message_error(cls, response_code, messages, response_id):
"""
:type response_code: int
:type messages: list[str]
:type response_id: str
:rtype: str
"""
line_response_code = cls._FORMAT_RESPONSE_CODE_LINE \
.format(response_code)
line_response_id = cls._FORMAT_RESPONSE_ID_LINE.format(response_id)
line_error_message = cls._FORMAT_ERROR_MESSAGE_LINE.format(
cls._GLUE_ERROR_MESSAGE_STRING_EMPTY.join(messages)
)
return cls._glue_all_error_message(
[line_response_code, line_response_id, line_error_message]
)
|
python
|
def upload_local_file(filename=None, file=None, media_type=None, keep_open=False,
wait_on_close=False, use_existing_dxfile=None, show_progress=False,
write_buffer_size=None, multithread=True, **kwargs):
'''
:param filename: Local filename
:type filename: string
:param file: File-like object
:type file: File-like object
:param media_type: Internet Media Type
:type media_type: string
:param keep_open: If False, closes the file after uploading
:type keep_open: boolean
:param write_buffer_size: Buffer size to use for upload
:type write_buffer_size: int
:param wait_on_close: If True, waits for the file to close
:type wait_on_close: boolean
:param use_existing_dxfile: Instead of creating a new file object, upload to the specified file
:type use_existing_dxfile: :class:`~dxpy.bindings.dxfile.DXFile`
:param multithread: If True, sends multiple write requests asynchronously
:type multithread: boolean
:returns: Remote file handler
:rtype: :class:`~dxpy.bindings.dxfile.DXFile`
Additional optional parameters not listed: all those under
:func:`dxpy.bindings.DXDataObject.new`.
Exactly one of *filename* or *file* is required.
Uploads *filename* or reads from *file* into a new file object (with
media type *media_type* if given) and returns the associated remote
file handler. The "name" property of the newly created remote file
is set to the basename of *filename* or to *file.name* (if it
exists).
Examples::
# Upload from a path
dxpy.upload_local_file("/home/ubuntu/reads.fastq.gz")
# Upload from a file-like object
with open("reads.fastq") as fh:
dxpy.upload_local_file(file=fh)
'''
fd = file if filename is None else open(filename, 'rb')
try:
file_size = os.fstat(fd.fileno()).st_size
except:
file_size = 0
file_is_mmapd = hasattr(fd, "fileno")
if write_buffer_size is None:
write_buffer_size=dxfile.DEFAULT_BUFFER_SIZE
if use_existing_dxfile:
handler = use_existing_dxfile
else:
# Set a reasonable name for the file if none has been set
# already
creation_kwargs = kwargs.copy()
if 'name' not in kwargs:
if filename is not None:
creation_kwargs['name'] = os.path.basename(filename)
else:
# Try to get filename from file-like object
try:
local_file_name = file.name
except AttributeError:
pass
else:
creation_kwargs['name'] = os.path.basename(local_file_name)
# Use 'a' mode because we will be responsible for closing the file
# ourselves later (if requested).
handler = new_dxfile(mode='a', media_type=media_type, write_buffer_size=write_buffer_size,
expected_file_size=file_size, file_is_mmapd=file_is_mmapd, **creation_kwargs)
# For subsequent API calls, don't supply the dataobject metadata
# parameters that are only needed at creation time.
_, remaining_kwargs = dxpy.DXDataObject._get_creation_params(kwargs)
num_ticks = 60
offset = 0
handler._ensure_write_bufsize(**remaining_kwargs)
def can_be_mmapd(fd):
if not hasattr(fd, "fileno"):
return False
mode = os.fstat(fd.fileno()).st_mode
return not (stat.S_ISCHR(mode) or stat.S_ISFIFO(mode))
def read(num_bytes):
"""
Returns a string or mmap'd data containing the next num_bytes of
the file, or up to the end if there are fewer than num_bytes
left.
"""
# If file cannot be mmap'd (e.g. is stdin, or a fifo), fall back
# to doing an actual read from the file.
if not can_be_mmapd(fd):
return fd.read(handler._write_bufsize)
bytes_available = max(file_size - offset, 0)
if bytes_available == 0:
return b""
return mmap.mmap(fd.fileno(), min(handler._write_bufsize, bytes_available), offset=offset, access=mmap.ACCESS_READ)
handler._num_bytes_transmitted = 0
def report_progress(handler, num_bytes):
handler._num_bytes_transmitted += num_bytes
if file_size > 0:
ticks = int(round((handler._num_bytes_transmitted / float(file_size)) * num_ticks))
percent = int(round((handler._num_bytes_transmitted / float(file_size)) * 100))
fmt = "[{done}{pending}] Uploaded {done_bytes:,} of {total:,} bytes ({percent}%) {name}"
sys.stderr.write(fmt.format(done='=' * (ticks - 1) + '>' if ticks > 0 else '',
pending=' ' * (num_ticks - ticks),
done_bytes=handler._num_bytes_transmitted,
total=file_size,
percent=percent,
name=filename if filename is not None else ''))
sys.stderr.flush()
sys.stderr.write("\r")
sys.stderr.flush()
if show_progress:
report_progress(handler, 0)
while True:
buf = read(handler._write_bufsize)
offset += len(buf)
if len(buf) == 0:
break
handler.write(buf,
report_progress_fn=report_progress if show_progress else None,
multithread=multithread,
**remaining_kwargs)
if filename is not None:
fd.close()
handler.flush(report_progress_fn=report_progress if show_progress else None, **remaining_kwargs)
if show_progress:
sys.stderr.write("\n")
sys.stderr.flush()
if not keep_open:
handler.close(block=wait_on_close, report_progress_fn=report_progress if show_progress else None, **remaining_kwargs)
return handler
|
python
|
def set_key_state(self, key, state):
"""Sets the key state and redraws it.
:param key: Key to update state for.
:param state: New key state.
"""
key.state = state
self.renderer.draw_key(self.surface, key)
|
python
|
def _generate_mark_code(rule_name):
"""Generates a two digit string based on a provided string
Args:
rule_name (str): A configured rule name 'pytest_mark3'.
Returns:
str: A two digit code based on the provided string '03'
"""
code = ''.join([i for i in str(rule_name) if i.isdigit()])
code = code.zfill(2)
return code
|
java
|
public List<JoinColumn<ManyToOne<T>>> getAllJoinColumn()
{
List<JoinColumn<ManyToOne<T>>> list = new ArrayList<JoinColumn<ManyToOne<T>>>();
List<Node> nodeList = childNode.get("join-column");
for(Node node: nodeList)
{
JoinColumn<ManyToOne<T>> type = new JoinColumnImpl<ManyToOne<T>>(this, "join-column", childNode, node);
list.add(type);
}
return list;
}
|
java
|
@Override
public <R2> Lazy<Either<L, R2>> lazyZip(
Lazy<? extends Applicative<Function<? super R, ? extends R2>, Either<L, ?>>> lazyAppFn) {
return match(l -> lazy(left(l)),
r -> lazyAppFn.fmap(eitherLF -> eitherLF.<R2>fmap(f -> f.apply(r)).coerce()));
}
|
java
|
@CanIgnoreReturnValue
public final Ordered containsAtLeastElementsIn(Iterable<?> expectedIterable) {
List<?> actual = Lists.newLinkedList(actual());
final Collection<?> expected = iterableToCollection(expectedIterable);
List<Object> missing = newArrayList();
List<Object> actualNotInOrder = newArrayList();
boolean ordered = true;
// step through the expected elements...
for (Object e : expected) {
int index = actual.indexOf(e);
if (index != -1) { // if we find the element in the actual list...
// drain all the elements that come before that element into actualNotInOrder
moveElements(actual, actualNotInOrder, index);
// and remove the element from the actual list
actual.remove(0);
} else { // otherwise try removing it from actualNotInOrder...
if (actualNotInOrder.remove(e)) { // if it was in actualNotInOrder, we're not in order
ordered = false;
} else { // if it's not in actualNotInOrder, we're missing an expected element
missing.add(e);
}
}
}
// if we have any missing expected elements, fail
if (!missing.isEmpty()) {
return failAtLeast(expected, missing);
}
/*
* TODO(cpovirk): In the NotInOrder case, also include a Fact that shows _only_ the required
* elements (that is, without any extras) but in the order they were actually found. That should
* make it easier for users to compare the actual order of the required elements to the expected
* order. Or, if that's too much trouble, at least try to find a better title for the full
* actual iterable than the default of "but was," which may _sound_ like it should show only the
* required elements, rather than the full actual iterable.
*/
return ordered
? IN_ORDER
: new Ordered() {
@Override
public void inOrder() {
failWithActual(
simpleFact("required elements were all found, but order was wrong"),
fact("expected order for required elements", expected));
}
};
}
|
java
|
public ServiceFuture<KeyBundle> updateKeyAsync(String vaultBaseUrl, String keyName, String keyVersion, final ServiceCallback<KeyBundle> serviceCallback) {
return ServiceFuture.fromResponse(updateKeyWithServiceResponseAsync(vaultBaseUrl, keyName, keyVersion), serviceCallback);
}
|
python
|
def make_article_info_copyright(self, article_info_div):
"""
Makes the copyright section for the ArticleInfo. For PLoS, this means
handling the information contained in the metadata <permissions>
element.
"""
perm = self.article.root.xpath('./front/article-meta/permissions')
if not perm:
return
copyright_div = etree.SubElement(article_info_div, 'div', {'id': 'copyright'})
cp_bold = etree.SubElement(copyright_div, 'b')
cp_bold.text = 'Copyright: '
copyright_string = '\u00A9 '
copyright_holder = perm[0].find('copyright-holder')
if copyright_holder is not None:
copyright_string += all_text(copyright_holder) + '. '
lic = perm[0].find('license')
if lic is not None:
copyright_string += all_text(lic.find('license-p'))
append_new_text(copyright_div, copyright_string)
|
python
|
def detect(self):
"""Detect and return the IP address."""
if PY3: # py23
import subprocess # noqa: S404 @UnresolvedImport pylint: disable=import-error
else:
import commands as subprocess # @UnresolvedImport pylint: disable=import-error
try:
theip = subprocess.getoutput(self.opts_command) # noqa: S605
except Exception:
theip = None
self.set_current_value(theip)
return theip
|
python
|
def load_code(fp, magic_int, code_objects={}):
"""
marshal.load() written in Python. When the Python bytecode magic loaded is the
same magic for the running Python interpreter, we can simply use the
Python-supplied marshal.load().
However we need to use this when versions are different since the internal
code structures are different. Sigh.
"""
global internStrings, internObjects
internStrings = []
internObjects = []
seek_pos = fp.tell()
# Do a sanity check. Is this a code type?
b = ord(fp.read(1))
if (b & 0x80):
b = b & 0x7f
c = chr(b)
if c != 'c':
raise TypeError("File %s doesn't smell like Python bytecode:\n"
"expecting code indicator 'c'; got '%s'"
% (fp.name, c))
fp.seek(seek_pos)
return load_code_internal(fp, magic_int, code_objects=code_objects)
|
python
|
def run(debug=False):
"""PURPOSE: start a starcraft2 process using the defined the config parameters"""
FLAGS(sys.argv)
config = gameConfig.Config(
version=None, # vers is None... unless a specific game version is desired
themap=selectMap(ladder=True), # pick a random ladder map
players=["defaulthuman", "blizzbot2_easy"],
)
createReq = sc_pb.RequestCreateGame( # used to advance to "Init Game" state, when hosting
realtime = config.realtime,
disable_fog = config.fogDisabled,
random_seed = int(now()), # a game is created using the current second timestamp as the seed
local_map = sc_pb.LocalMap(map_path=config.mapLocalPath,
map_data=config.mapData) )
joinRace = None
for player in config.players:
reqPlayer = createReq.player_setup.add() # add new player; get link to settings
playerObj = PlayerPreGame(player)
if playerObj.isComputer:
reqPlayer.difficulty = playerObj.difficulty.gameValue()
pType = playerObj.type.type
else: pType = c.PARTICIPANT
reqPlayer.type = t.PlayerControls(pType).gameValue()
reqPlayer.race = playerObj.selectedRace.gameValue()
if not playerObj.isComputer:
joinRace = reqPlayer.race
interface = sc_pb.InterfaceOptions()
raw,score,feature,rendered = config.interfaces
interface.raw = raw # whether raw data is reported in observations
interface.score = score # whether score data is reported in observations
interface.feature_layer.width = 24
joinReq = sc_pb.RequestJoinGame(options=interface) # SC2APIProtocol.RequestJoinGame
joinReq.race = joinRace # update joinGame request as necessary
if debug: print("Starcraft2 game process is launching.")
controller = None
with config.launchApp() as controller:
try:
if debug: print("Starcraft2 application is live. (%s)"%(controller.status)) # status: launched
controller.create_game(createReq)
if debug: print("Starcraft2 is waiting for %d player(s) to join. (%s)"%(config.numAgents, controller.status)) # status: init_game
playerID = controller.join_game(joinReq).player_id # SC2APIProtocol.RequestJoinGame
print("[GET IN-GAME DATA] player#%d %s"%(playerID, config))
return (controller.ping(), controller.data())
except (protocol.ConnectionError, protocol.ProtocolError, remote_controller.RequestError) as e:
if debug: print("%s Connection to game closed (NOT a bug)%s%s"%(type(e), os.linesep, e))
else: print( "Connection to game closed.")
except KeyboardInterrupt:
print("caught command to forcibly shutdown Starcraft2 host server.")
finally:
config.disable() # if the saved cfg file still exists, always remove it
controller.quit()
|
java
|
public LockedInodePath lockFullInodePath(AlluxioURI uri, LockPattern lockPattern)
throws InvalidPathException, FileDoesNotExistException {
LockedInodePath inodePath = lockInodePath(uri, lockPattern);
if (!inodePath.fullPathExists()) {
inodePath.close();
throw new FileDoesNotExistException(ExceptionMessage.PATH_DOES_NOT_EXIST.getMessage(uri));
}
return inodePath;
}
|
java
|
public static <E, T, X extends RuntimeException> List<E> parallelConvertIfNullThrow(List<T> source,
Class<E> targetClass, Supplier<X> exceptionSupplier) {
return BeansConvertStrategy.parallelConvertBeans(source, targetClass, exceptionSupplier, false);
}
|
python
|
def setup_dir(self):
"""Change directory for script if necessary."""
cd = self.opts.cd or self.config['crony'].get('directory')
if cd:
self.logger.debug(f'Adding cd to {cd}')
self.cmd = f'cd {cd} && {self.cmd}'
|
java
|
@Modified
protected synchronized void modified(Map<String, Object> cfwConfiguration) {
if (null == cfwConfiguration) {
return;
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) {
Tr.event(this, tc, "Processing config", cfwConfiguration);
}
this.chfw.updateConfig(cfwConfiguration);
}
|
java
|
public static Calendar addYears(Calendar origin, int value) {
Calendar cal = sync((Calendar) origin.clone());
cal.add(Calendar.YEAR, value);
return sync(cal);
}
|
python
|
def major(self, major: int) -> None:
"""
param major
Major version number property. Must be a non-negative integer.
"""
self.filter_negatives(major)
self._major = major
|
python
|
def media_upload(self, media, additional_owners=None):
"""
Uploads an image to Twitter for later embedding in tweets.
https://dev.twitter.com/rest/reference/post/media/upload
:param file media:
The image file to upload (see the API docs for limitations).
:param list additional_owners:
A list of Twitter users that will be able to access the uploaded
file and embed it in their tweets (maximum 100 users).
:returns:
A dict containing information about the file uploaded. (Contains
the media id needed to embed the image in the ``media_id`` field).
"""
params = {}
set_list_param(
params, 'additional_owners', additional_owners, max_len=100)
return self._upload_media('media/upload.json', media, params)
|
python
|
def get_root_bins(self):
"""Gets the root bins in the bin hierarchy.
A node with no parents is an orphan. While all bin ``Ids`` are
known to the hierarchy, an orphan does not appear in the
hierarchy unless explicitly added as a root node or child of
another node.
return: (osid.resource.BinList) - the root bins
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_root_bins
if self._catalog_session is not None:
return self._catalog_session.get_root_catalogs()
return BinLookupSession(
self._proxy,
self._runtime).get_bins_by_ids(list(self.get_root_bin_ids()))
|
python
|
def _proxy_bypass_macosx_sysconf(host, proxy_settings):
"""
Return True iff this host shouldn't be accessed using a proxy
This function uses the MacOSX framework SystemConfiguration
to fetch the proxy information.
proxy_settings come from _scproxy._get_proxy_settings or get mocked ie:
{ 'exclude_simple': bool,
'exceptions': ['foo.bar', '*.bar.com', '127.0.0.1', '10.1', '10.0/16']
}
"""
from fnmatch import fnmatch
hostonly, port = splitport(host)
def ip2num(ipAddr):
parts = ipAddr.split('.')
parts = list(map(int, parts))
if len(parts) != 4:
parts = (parts + [0, 0, 0, 0])[:4]
return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3]
# Check for simple host names:
if '.' not in host:
if proxy_settings['exclude_simple']:
return True
hostIP = None
for value in proxy_settings.get('exceptions', ()):
# Items in the list are strings like these: *.local, 169.254/16
if not value: continue
m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value)
if m is not None:
if hostIP is None:
try:
hostIP = socket.gethostbyname(hostonly)
hostIP = ip2num(hostIP)
except socket.error:
continue
base = ip2num(m.group(1))
mask = m.group(2)
if mask is None:
mask = 8 * (m.group(1).count('.') + 1)
else:
mask = int(mask[1:])
mask = 32 - mask
if (hostIP >> mask) == (base >> mask):
return True
elif fnmatch(host, value):
return True
return False
|
python
|
def CSS_setKeyframeKey(self, styleSheetId, range, keyText):
"""
Function path: CSS.setKeyframeKey
Domain: CSS
Method name: setKeyframeKey
Parameters:
Required arguments:
'styleSheetId' (type: StyleSheetId) -> No description
'range' (type: SourceRange) -> No description
'keyText' (type: string) -> No description
Returns:
'keyText' (type: Value) -> The resulting key text after modification.
Description: Modifies the keyframe rule key text.
"""
assert isinstance(keyText, (str,)
), "Argument 'keyText' must be of type '['str']'. Received type: '%s'" % type(
keyText)
subdom_funcs = self.synchronous_command('CSS.setKeyframeKey',
styleSheetId=styleSheetId, range=range, keyText=keyText)
return subdom_funcs
|
python
|
def survey_change_name(request, pk):
"""
Works well with:
http://www.appelsiini.net/projects/jeditable
"""
survey = get_object_or_404(Survey, pk=pk)
if not request.user.has_perm("formly.change_survey_name", obj=survey):
raise PermissionDenied()
survey.name = request.POST.get("name")
survey.save()
return JsonResponse({
"status": "OK",
"name": survey.name
})
|
java
|
private CmsTemplateMapperConfiguration getConfiguration(final CmsObject cms) {
if (!m_enabled) {
return CmsTemplateMapperConfiguration.EMPTY_CONFIG;
}
if (m_configPath == null) {
m_configPath = OpenCms.getSystemInfo().getConfigFilePath(cms, "template-mapping.xml");
}
return (CmsTemplateMapperConfiguration)(CmsVfsMemoryObjectCache.getVfsMemoryObjectCache().loadVfsObject(
cms,
m_configPath,
new Transformer() {
@Override
public Object transform(Object input) {
try {
CmsFile file = cms.readFile(m_configPath, CmsResourceFilter.IGNORE_EXPIRATION);
SAXReader saxBuilder = new SAXReader();
try (ByteArrayInputStream stream = new ByteArrayInputStream(file.getContents())) {
Document document = saxBuilder.read(stream);
CmsTemplateMapperConfiguration config = new CmsTemplateMapperConfiguration(cms, document);
return config;
}
} catch (Exception e) {
LOG.warn(e.getLocalizedMessage(), e);
return new CmsTemplateMapperConfiguration(); // empty configuration, does not do anything
}
}
}));
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.