language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python
|
def _get_date_time_format(dt_string):
'''
Copied from win_system.py (_get_date_time_format)
Function that detects the date/time format for the string passed.
:param str dt_string:
A date/time string
:return: The format of the passed dt_string
:rtype: str
'''
valid_formats = [
'%I:%M:%S %p',
'%I:%M %p',
'%H:%M:%S',
'%H:%M',
'%Y-%m-%d',
'%m-%d-%y',
'%m-%d-%Y',
'%m/%d/%y',
'%m/%d/%Y',
'%Y/%m/%d'
]
for dt_format in valid_formats:
try:
datetime.strptime(dt_string, dt_format)
return dt_format
except ValueError:
continue
return False
|
java
|
public void setShrink(double shrink)
{
if(shrink >= 1 || shrink <= 0 || Double.isNaN(shrink) || Double.isInfinite(shrink) )
throw new ArithmeticException("Shrinkage constant must be > 0 and < 1, not " + shrink);
this.shrink = shrink;
}
|
java
|
private static boolean collectCallableLeavesInternal(Node expr, ArrayList<Node> results) {
switch (expr.getToken()) {
case FUNCTION:
case GETPROP:
case NAME:
results.add(expr);
return true;
case SUPER:
{
// Pretend that `super` is an alias for the superclass reference.
Node clazz = checkNotNull(NodeUtil.getEnclosingClass(expr));
Node function = checkNotNull(NodeUtil.getEnclosingFunction(expr));
Node ctorDef = checkNotNull(NodeUtil.getEs6ClassConstructorMemberFunctionDef(clazz));
// The only place SUPER should be a callable expression is in a class ctor.
checkState(
function.isFirstChildOf(ctorDef), "Unknown SUPER reference: %s", expr.toStringTree());
return collectCallableLeavesInternal(clazz.getSecondChild(), results);
}
case CLASS:
{
// Collect the constructor function, or failing that, the superclass reference.
@Nullable Node ctorDef = NodeUtil.getEs6ClassConstructorMemberFunctionDef(expr);
if (ctorDef != null) {
return collectCallableLeavesInternal(ctorDef.getOnlyChild(), results);
} else if (expr.getSecondChild().isEmpty()) {
return true; // A class an implicit ctor is pure when there is no superclass.
} else {
return collectCallableLeavesInternal(expr.getSecondChild(), results);
}
}
case AND:
case OR:
return collectCallableLeavesInternal(expr.getFirstChild(), results)
&& collectCallableLeavesInternal(expr.getSecondChild(), results);
case COMMA:
case ASSIGN:
return collectCallableLeavesInternal(expr.getSecondChild(), results);
case HOOK:
return collectCallableLeavesInternal(expr.getChildAtIndex(1), results)
&& collectCallableLeavesInternal(expr.getChildAtIndex(2), results);
case NEW_TARGET:
case THIS:
// These could be an alias to any function. Treat them as an unknown callable.
default:
return false; // Unsupported call type.
}
}
|
java
|
public static Object serializeAndDeserialize(Object o) throws IOException,
ClassNotFoundException {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
ObjectOutputStream out = new ObjectOutputStream(bytes);
try {
out.writeObject(o);
} finally {
out.close();
}
ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(
bytes.toByteArray()));
try {
Object result = in.readObject();
return result;
} finally {
in.close();
}
}
|
java
|
@Managed
@ManagedDescription("Returns global index size")
public long getGlobalIndexSize() throws QuotaManagerException
{
long size = 0;
for (RepositoryQuotaManager rqm : rQuotaManagers.values())
{
size += rqm.getRepositoryIndexSize();
}
return size;
}
|
python
|
def _slice_area_from_bbox(self, src_area, dst_area, ll_bbox=None,
xy_bbox=None):
"""Slice the provided area using the bounds provided."""
if ll_bbox is not None:
dst_area = AreaDefinition(
'crop_area', 'crop_area', 'crop_latlong',
{'proj': 'latlong'}, 100, 100, ll_bbox)
elif xy_bbox is not None:
dst_area = AreaDefinition(
'crop_area', 'crop_area', 'crop_xy',
src_area.proj_dict, src_area.x_size, src_area.y_size,
xy_bbox)
x_slice, y_slice = src_area.get_area_slices(dst_area)
return src_area[y_slice, x_slice], y_slice, x_slice
|
java
|
static public String getFormattedDateTime(long dt, String format, TimeZone tz, String country, long tolerance)
{
String ret = "";
SimpleDateFormat df = null;
try
{
if(dt > tolerance)
{
df = formatPool.getFormat(format);
df.setTimeZone(tz);
DateUtilities.setCalendarData(df.getCalendar(), DateUtilities.getLocale(country));
ret = df.format(new Date(dt));
}
}
catch(Exception e)
{
}
if(df != null)
formatPool.release(df);
return ret;
}
|
java
|
public WorkProgress createTaskProgress(long amount, String text) {
this.amount += amount;
SubWorkProgress task = new SubWorkProgress(this, amount, amount, text);
synchronized (tasks) {
tasks.add(task);
if (jp != null) jp.addToJoinDoNotCancel(task.getSynch());
}
return task;
}
|
python
|
def query(self, tableClass, comparison=None,
limit=None, offset=None, sort=None):
"""
Return a generator of instances of C{tableClass},
or tuples of instances if C{tableClass} is a
tuple of classes.
Examples::
fastCars = s.query(Vehicle,
axiom.attributes.AND(
Vehicle.wheels == 4,
Vehicle.maxKPH > 200),
limit=100,
sort=Vehicle.maxKPH.descending)
quotesByClient = s.query( (Client, Quote),
axiom.attributes.AND(
Client.active == True,
Quote.client == Client.storeID,
Quote.created >= someDate),
limit=10,
sort=(Client.name.ascending,
Quote.created.descending))
@param tableClass: a subclass of Item to look for instances of,
or a tuple of subclasses.
@param comparison: a provider of L{IComparison}, or None, to match
all items available in the store. If tableClass is a tuple, then
the comparison must refer to all Item subclasses in that tuple,
and specify the relationships between them.
@param limit: an int to limit the total length of the results, or None
for all available results.
@param offset: an int to specify a starting point within the available
results, or None to start at 0.
@param sort: an L{ISort}, something that comes from an SQLAttribute's
'ascending' or 'descending' attribute.
@return: an L{ItemQuery} object, which is an iterable of Items or
tuples of Items, according to tableClass.
"""
if isinstance(tableClass, tuple):
queryClass = MultipleItemQuery
else:
queryClass = ItemQuery
return queryClass(self, tableClass, comparison, limit, offset, sort)
|
java
|
public static Parser<Void> blockComment(Parser<Void> begin, Parser<Void> end, Parser<?> commented) {
return Parsers.sequence(begin, end.not().next(commented).skipMany(), end);
}
|
java
|
public Object handleRemoteCommand(String strCommand, Map<String,Object> properties, Object sourceSession) throws RemoteException, DBException
{
Object objHandled = this.doRemoteCommand(strCommand, properties); // Do I handle it?
if (Boolean.FALSE.equals(objHandled))
{ // Not handled by this screen, try child windows
for (int iFieldSeq = 0; iFieldSeq < this.getSessionObjectCount(); iFieldSeq++)
{ // See if any of my children want to handle this command
BaseSession sField = this.getSessionObjectAt(iFieldSeq);
if (sField != sourceSession) // Don't call the child that passed this up
{
objHandled = sField.handleRemoteCommand(strCommand, properties, this); // Send to children (make sure they don't call me)
if (!Boolean.FALSE.equals(objHandled))
return objHandled; // If handled by sub-session, return
}
}
}
if (Boolean.FALSE.equals(objHandled))
objHandled = super.handleRemoteCommand(strCommand, properties, sourceSession); // This will send the command to my parent
return objHandled;
}
|
python
|
def libvlc_audio_equalizer_set_preamp(p_equalizer, f_preamp):
'''Set a new pre-amplification value for an equalizer.
The new equalizer settings are subsequently applied to a media player by invoking
L{libvlc_media_player_set_equalizer}().
The supplied amplification value will be clamped to the -20.0 to +20.0 range.
@param p_equalizer: valid equalizer handle, must not be NULL.
@param f_preamp: preamp value (-20.0 to 20.0 Hz).
@return: zero on success, -1 on error.
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_equalizer_set_preamp', None) or \
_Cfunction('libvlc_audio_equalizer_set_preamp', ((1,), (1,),), None,
ctypes.c_int, ctypes.c_void_p, ctypes.c_float)
return f(p_equalizer, f_preamp)
|
java
|
@Override
public synchronized ApiFuture<Void> add(RowMutation rowMutation) {
Preconditions.checkNotNull(rowMutation, "mutation details cannot be null");
final ApiFuture<Void> response = bulkMutateBatcher.add(rowMutation);
operationAccountant.registerOperation(ApiFutureUtil.adapt(response));
return response;
}
|
python
|
def get_registration_email_link(application):
""" Retrieve a link that can be emailed to the logged other users. """
url = '%s/applications/%d/' % (
settings.REGISTRATION_BASE_URL, application.pk)
is_secret = False
return url, is_secret
|
python
|
def score_n1(matrix, matrix_size):
"""\
Implements the penalty score feature 1.
ISO/IEC 18004:2015(E) -- 7.8.3 Evaluation of data masking results - Table 11 (page 54)
============================================ ======================== ======
Feature Evaluation condition Points
============================================ ======================== ======
Adjacent modules in row/column in same color No. of modules = (5 + i) N1 + i
============================================ ======================== ======
N1 = 3
:param matrix: The matrix to evaluate
:param matrix_size: The width (or height) of the matrix.
:return int: The penalty score (feature 1) of the matrix.
"""
score = 0
for i in range(matrix_size):
prev_bit_row, prev_bit_col = -1, -1
row_counter, col_counter = 0, 0
for j in range(matrix_size):
# Row-wise
bit = matrix[i][j]
if bit == prev_bit_row:
row_counter += 1
else:
if row_counter >= 5:
score += row_counter - 2 # N1 == 3
row_counter = 1
prev_bit_row = bit
# Col-wise
bit = matrix[j][i]
if bit == prev_bit_col:
col_counter += 1
else:
if col_counter >= 5:
score += col_counter - 2 # N1 == 3
col_counter = 1
prev_bit_col = bit
if row_counter >= 5:
score += row_counter - 2 # N1 == 3
if col_counter >= 5:
score += col_counter - 2 # N1 == 3
return score
|
python
|
def deleteSystemVariable(self, remote, name):
"""Delete a system variable from CCU / Homegear"""
if self._server is not None:
return self._server.deleteSystemVariable(remote, name)
|
python
|
async def CreateModel(self, cloud_tag, config, credential, name, owner_tag, region):
'''
cloud_tag : str
config : typing.Mapping[str, typing.Any]
credential : str
name : str
owner_tag : str
region : str
Returns -> typing.Union[_ForwardRef('Number'), str, typing.Sequence[~ModelMachineInfo], _ForwardRef('ModelMigrationStatus'), _ForwardRef('ModelSLAInfo'), _ForwardRef('EntityStatus'), typing.Sequence[~ModelUserInfo]]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelManager',
request='CreateModel',
version=5,
params=_params)
_params['cloud-tag'] = cloud_tag
_params['config'] = config
_params['credential'] = credential
_params['name'] = name
_params['owner-tag'] = owner_tag
_params['region'] = region
reply = await self.rpc(msg)
return reply
|
java
|
public ServiceFuture<SyncMemberInner> createOrUpdateAsync(String resourceGroupName, String serverName, String databaseName, String syncGroupName, String syncMemberName, SyncMemberInner parameters, final ServiceCallback<SyncMemberInner> serviceCallback) {
return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, serverName, databaseName, syncGroupName, syncMemberName, parameters), serviceCallback);
}
|
python
|
def _update_color_hsv(self, event=None):
"""Update display after a change in the HSV spinboxes."""
if event is None or event.widget.old_value != event.widget.get():
h = self.hue.get()
s = self.saturation.get()
v = self.value.get()
sel_color = hsv_to_rgb(h, s, v)
self.red.set(sel_color[0])
self.green.set(sel_color[1])
self.blue.set(sel_color[2])
if self.alpha_channel:
sel_color += (self.alpha.get(),)
self.alphabar.set_color(sel_color)
hexa = rgb_to_hexa(*sel_color)
self.hexa.delete(0, "end")
self.hexa.insert(0, hexa)
self.square.set_hsv((h, s, v))
self.bar.set(h)
self._update_preview()
|
python
|
def case(self, case_id=None):
"""Return a Case object
If no case_id is given return one case
Args:
case_id (str): A case id
Returns:
case(Case): A Case object
"""
cases = self.cases()
if case_id:
for case in cases:
if case.case_id == case_id:
return case
else:
if cases:
return cases[0]
return None
|
java
|
@Override
public Page<Entity> previousPage(final Page<Entity> page,
final TwilioRestClient client) {
Request request = new Request(
HttpMethod.GET,
page.getPreviousPageUrl(
Domains.AUTHY.toString(),
client.getRegion()
)
);
return pageForRequest(client, request);
}
|
python
|
def decr(self, key, value, noreply=False):
"""
The memcached "decr" command.
Args:
key: str, see class docs for details.
value: int, the amount by which to increment the value.
noreply: optional bool, False to wait for the reply (the default).
Returns:
If noreply is True, always returns None. Otherwise returns the new
value of the key, or None if the key wasn't found.
"""
key = self.check_key(key)
cmd = b'decr ' + key + b' ' + six.text_type(value).encode('ascii')
if noreply:
cmd += b' noreply'
cmd += b'\r\n'
results = self._misc_cmd([cmd], b'decr', noreply)
if noreply:
return None
if results[0] == b'NOT_FOUND':
return None
return int(results[0])
|
python
|
def write(self, data, context=None):
"""Enqueues the passed in data to the :func:`queue`. If the caller specifies a context as well, it will
take precedence over the instance in :func:`context`.
Args:
data (object). data the telemetry data to send. This will be wrapped in an :class:`contracts.Envelope`
before being enqueued to the :func:`queue`.
context (:class:`TelemetryContext`). context the override context to use when constructing the
:class:`contracts.Envelope`.
"""
local_context = context or self._context
if not local_context:
raise Exception('Context was required but not provided')
if not data:
raise Exception('Data was required but not provided')
envelope = contracts.Envelope()
envelope.name = data.ENVELOPE_TYPE_NAME
envelope.time = datetime.datetime.utcnow().isoformat() + 'Z'
envelope.ikey = local_context.instrumentation_key
tags = envelope.tags
for prop_context in [self._context, context]:
if not prop_context:
continue
for key, value in self._write_tags(prop_context):
tags[key] = value
envelope.data = contracts.Data()
envelope.data.base_type = data.DATA_TYPE_NAME
for prop_context in [context, self._context]:
if not prop_context:
continue
if hasattr(data, 'properties') and prop_context.properties:
properties = data.properties
for key in prop_context.properties:
if key not in properties:
properties[key] = prop_context.properties[key]
envelope.data.base_data = data
self._queue.put(envelope)
|
java
|
@Override
public T get() {
try {
return clazz.getConstructor(CharStream.class).newInstance(new Object[] { null });
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
|
python
|
def add_widgets(self, *widgets_or_spacings):
"""Add widgets/spacing to dialog vertical layout"""
layout = self.layout()
for widget_or_spacing in widgets_or_spacings:
if isinstance(widget_or_spacing, int):
layout.addSpacing(widget_or_spacing)
else:
layout.addWidget(widget_or_spacing)
|
java
|
public java.util.List<Ipv6Range> getIpv6Ranges() {
if (ipv6Ranges == null) {
ipv6Ranges = new com.amazonaws.internal.SdkInternalList<Ipv6Range>();
}
return ipv6Ranges;
}
|
python
|
def _find_last_line_index(contents):
"""Find the last line of the headerblock in contents."""
lineno = 0
headerblock = re.compile(r"^{0}.*$".format(_ALL_COMMENT))
if not len(contents):
raise RuntimeError("""File does not not have any contents""")
while headerblock.match(contents[lineno]):
if lineno + 1 == len(contents):
raise RuntimeError("""No end of headerblock in file""")
lineno = lineno + 1
if lineno < 2:
raise RuntimeError("""Headerblock must have at least two lines""")
return lineno - 1
|
python
|
def get_kline_data(self, symbol, kline_type='5min', start=None, end=None):
"""Get kline data
For each query, the system would return at most 1500 pieces of data.
To obtain more data, please page the data by time.
:param symbol: Name of symbol e.g. KCS-BTC
:type symbol: string
:param kline_type: type of symbol, type of candlestick patterns: 1min, 3min, 5min, 15min, 30min, 1hour, 2hour,
4hour, 6hour, 8hour, 12hour, 1day, 1week
:type kline_type: string
:param start: Start time as unix timestamp (optional) default start of day in UTC
:type start: int
:param end: End time as unix timestamp (optional) default now in UTC
:type end: int
https://docs.kucoin.com/#get-historic-rates
.. code:: python
klines = client.get_kline_data('KCS-BTC', '5min', 1507479171, 1510278278)
:returns: ApiResponse
.. code:: python
[
[
"1545904980", //Start time of the candle cycle
"0.058", //opening price
"0.049", //closing price
"0.058", //highest price
"0.049", //lowest price
"0.018", //Transaction amount
"0.000945" //Transaction volume
],
[
"1545904920",
"0.058",
"0.072",
"0.072",
"0.058",
"0.103",
"0.006986"
]
]
:raises: KucoinResponseException, KucoinAPIException
"""
data = {
'symbol': symbol
}
if kline_type is not None:
data['type'] = kline_type
if start is not None:
data['startAt'] = start
else:
data['startAt'] = calendar.timegm(datetime.utcnow().date().timetuple())
if end is not None:
data['endAt'] = end
else:
data['endAt'] = int(time.time())
return self._get('market/candles', False, data=data)
|
java
|
public char[] passwordHash(char[] password) {
if (null == password) {
return null;
}
return BCrypt.hashpw(password, BCrypt.gensalt());
}
|
python
|
def get_magic_guesses(fullpath):
"""
Return all the possible guesses from the magic library about
the content of the file.
@param fullpath: location of the file
@type fullpath: string
@return: guesses about content of the file
@rtype: tuple
"""
if CFG_HAS_MAGIC == 1:
magic_cookies = _get_magic_cookies()
magic_result = []
for key in magic_cookies.keys():
magic_result.append(magic_cookies[key].file(fullpath))
return tuple(magic_result)
elif CFG_HAS_MAGIC == 2:
magic_result = []
for key in ({'mime': False, 'mime_encoding': False},
{'mime': True, 'mime_encoding': False},
{'mime': False, 'mime_encoding': True}):
magic_result.append(_magic_wrapper(fullpath, **key))
return tuple(magic_result)
|
java
|
public Observable<Page<AppServicePlanInner>> listByResourceGroupNextAsync(final String nextPageLink) {
return listByResourceGroupNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<AppServicePlanInner>>, Page<AppServicePlanInner>>() {
@Override
public Page<AppServicePlanInner> call(ServiceResponse<Page<AppServicePlanInner>> response) {
return response.body();
}
});
}
|
java
|
void store(int A, int r, int B) {
getB(A, r).add(B);
getA(B, r).add(A);
}
|
python
|
def _create(self, **attributes):
""" Create new interface on IxNetwork.
Set enabled and description (==name).
:return: interface object reference.
"""
attributes['enabled'] = True
if 'name' in self._data:
attributes['description'] = self._data['name']
obj_ref = self.api.add(self.obj_parent(), self.obj_type(), **attributes)
self.api.commit()
return self.api.remapIds(obj_ref)
|
python
|
def validate_all_keys_in_obj(obj_name, obj, validation_fun):
"""Validate all (nested) keys in `obj` by using `validation_fun`.
Args:
obj_name (str): name for `obj` being validated.
obj (dict): dictionary object.
validation_fun (function): function used to validate the value
of `key`.
Returns:
None: indicates validation successful
Raises:
ValidationError: `validation_fun` will raise this error on failure
"""
for key, value in obj.items():
validation_fun(obj_name, key)
if isinstance(value, dict):
validate_all_keys_in_obj(obj_name, value, validation_fun)
elif isinstance(value, list):
validate_all_items_in_list(obj_name, value, validation_fun)
|
java
|
public static String formatDurationOnSecond(@NotNull Date startDate, @NotNull Date endDate) {
return DurationFormatUtils.formatDuration(endDate.getTime() - startDate.getTime(), "HH:mm:ss");
}
|
python
|
def sq_dist(X1,X2=None):
"""
computes a matrix of all pariwise squared distances
"""
if X2==None:
X2 = X1
assert X1.shape[1]==X2.shape[1], 'dimensions do not match'
n = X1.shape[0]
m = X2.shape[0]
d = X1.shape[1]
# (X1 - X2)**2 = X1**2 + X2**2 - 2X1X2
X1sq = sp.reshape((X1**2).sum(1),n,1)
X2sq = sp.reshape((X2**2).sum(1),m,1)
K = sp.tile((X1*X1).sum(1),(m,1)).T + sp.tile((X2*X2).sum(1),(n,1)) - 2*sp.dot(X1,X2.T)
return K
|
python
|
def get_buildfile_path(settings):
"""
Path to which a build tarball should be downloaded.
"""
base = os.path.basename(settings.build_url)
return os.path.join(BUILDS_ROOT, base)
|
python
|
def _addrinfo_or_none(contact_point, port):
"""
A helper function that wraps socket.getaddrinfo and returns None
when it fails to, e.g. resolve one of the hostnames. Used to address
PYTHON-895.
"""
try:
return socket.getaddrinfo(contact_point, port,
socket.AF_UNSPEC, socket.SOCK_STREAM)
except socket.gaierror:
log.debug('Could not resolve hostname "{}" '
'with port {}'.format(contact_point, port))
return None
|
java
|
@Override
public DescribeIdentityProviderConfigurationResult describeIdentityProviderConfiguration(DescribeIdentityProviderConfigurationRequest request) {
request = beforeClientExecution(request);
return executeDescribeIdentityProviderConfiguration(request);
}
|
java
|
@Override
public EClass getIfcDerivedUnit() {
if (ifcDerivedUnitEClass == null) {
ifcDerivedUnitEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(172);
}
return ifcDerivedUnitEClass;
}
|
python
|
def tokenize(self,
sentence,
normalize=True,
is_feature=False,
is_surface=False,
return_list=False,
func_normalizer=text_preprocess.normalize_text):
# type: (text_preprocess, bool, bool, bool, bool, Callable[[str], text_type])->Union[List[text_type], TokenizedSenetence]
"""This method returns tokenized result.
If return_list==True(default), this method returns list whose element is tuple consisted with word_stem and POS.
If return_list==False, this method returns TokenizedSenetence object.
"""
assert isinstance(normalize, bool)
assert isinstance(sentence, text_type)
normalized_sentence = func_normalizer(sentence)
result = self.call_juman_interface(normalized_sentence)
token_objects = [
self.__extract_morphological_information(
mrph_object=morph_object,
is_surface=is_surface,
is_feature=is_feature
)
for morph_object in result]
if return_list:
tokenized_objects = TokenizedSenetence(
sentence=sentence,
tokenized_objects=token_objects
)
return tokenized_objects.convert_list_object()
else:
tokenized_objects = TokenizedSenetence(
sentence=sentence,
tokenized_objects=token_objects)
return tokenized_objects
|
python
|
def get_online_version ():
"""Download update info and parse it."""
# prevent getting a cached answer
headers = {'Pragma': 'no-cache', 'Cache-Control': 'no-cache'}
content, info = get_content(UPDATE_URL, addheaders=headers)
if content is None:
return content, info
version, url = None, None
for line in content.splitlines():
if line.startswith(VERSION_TAG):
version = line.split(':', 1)[1].strip()
elif line.startswith(URL_TAG):
url = line.split(':', 1)[1].strip()
url = url.replace('${version}', version)
return version, url
|
python
|
def logout(self):
"""
Logs the user out of all clients and removes the expires_in,
expires_datetime, id_token, refresh_token, access_token, and token_type
attributes
:return:
"""
self.client.global_sign_out(
AccessToken=self.access_token
)
self.id_token = None
self.refresh_token = None
self.access_token = None
self.token_type = None
|
python
|
def _status_apf():
'''
Return True if apf is running otherwise return False
'''
status = 0
table = iptc.Table(iptc.Table.FILTER)
for chain in table.chains:
if 'sanity' in chain.name.lower():
status = 1
return True if status else False
|
java
|
private void handleInputElements(Eventable eventable) {
CopyOnWriteArrayList<FormInput> formInputs = eventable.getRelatedFormInputs();
for (FormInput formInput : formHandler.getFormInputs()) {
if (!formInputs.contains(formInput)) {
formInputs.add(formInput);
}
}
formHandler.handleFormElements(formInputs);
}
|
python
|
def process_value(self, value):
"""Process publication value
"""
# UID -> SuperModel
if api.is_uid(value):
return self.to_super_model(value)
# Content -> SuperModel
elif api.is_object(value):
return self.to_super_model(value)
# String -> Unicode
elif isinstance(value, basestring):
return safe_unicode(value).encode("utf-8")
# DateTime -> DateTime
elif isinstance(value, DateTime):
return value
# Process list values
elif isinstance(value, (LazyMap, list, tuple)):
return map(self.process_value, value)
# Process dict values
elif isinstance(value, (dict)):
return {k: self.process_value(v) for k, v in value.iteritems()}
# Process function
elif safe_callable(value):
return self.process_value(value())
# Always return the unprocessed value last
return value
|
python
|
def redirect(self, id, name, **options):
"""
Forwards an incoming call to another destination / phone number before answering it.
Argument: id is a String
Argument: name is a String
Argument: **options is a set of optional keyword arguments.
See https://www.tropo.com/docs/webapi/redirect
"""
self._steps.append(Redirect(id, name, **options).obj)
|
java
|
@Pure
public List<String> getMimeTypes() {
if (this.mimeTypes.isEmpty()) {
return Arrays.asList("text/x-" + getLanguageSimpleName().toLowerCase()); //$NON-NLS-1$
}
return this.mimeTypes;
}
|
python
|
def output(data, **kwargs): # pylint: disable=unused-argument
'''
Read in the dict structure generated by the salt key API methods and
print the structure.
'''
color = salt.utils.color.get_colors(
__opts__.get('color'),
__opts__.get('color_theme'))
strip_colors = __opts__.get('strip_colors', True)
ident = 0
if __opts__.get('__multi_key'):
ident = 4
if __opts__['transport'] in ('zeromq', 'tcp'):
acc = 'minions'
pend = 'minions_pre'
den = 'minions_denied'
rej = 'minions_rejected'
cmap = {pend: color['RED'],
acc: color['GREEN'],
den: color['MAGENTA'],
rej: color['BLUE'],
'local': color['MAGENTA']}
trans = {pend: u'{0}{1}Unaccepted Keys:{2}'.format(
' ' * ident,
color['LIGHT_RED'],
color['ENDC']),
acc: u'{0}{1}Accepted Keys:{2}'.format(
' ' * ident,
color['LIGHT_GREEN'],
color['ENDC']),
den: u'{0}{1}Denied Keys:{2}'.format(
' ' * ident,
color['LIGHT_MAGENTA'],
color['ENDC']),
rej: u'{0}{1}Rejected Keys:{2}'.format(
' ' * ident,
color['LIGHT_BLUE'],
color['ENDC']),
'local': u'{0}{1}Local Keys:{2}'.format(
' ' * ident,
color['LIGHT_MAGENTA'],
color['ENDC'])}
else:
acc = 'accepted'
pend = 'pending'
rej = 'rejected'
cmap = {pend: color['RED'],
acc: color['GREEN'],
rej: color['BLUE'],
'local': color['MAGENTA']}
trans = {pend: u'{0}{1}Unaccepted Keys:{2}'.format(
' ' * ident,
color['LIGHT_RED'],
color['ENDC']),
acc: u'{0}{1}Accepted Keys:{2}'.format(
' ' * ident,
color['LIGHT_GREEN'],
color['ENDC']),
rej: u'{0}{1}Rejected Keys:{2}'.format(
' ' * ident,
color['LIGHT_BLUE'],
color['ENDC']),
'local': u'{0}{1}Local Keys:{2}'.format(
' ' * ident,
color['LIGHT_MAGENTA'],
color['ENDC'])}
ret = ''
for status in sorted(data):
ret += u'{0}\n'.format(trans[status])
for key in sorted(data[status]):
key = salt.utils.data.decode(key)
skey = salt.output.strip_esc_sequence(key) if strip_colors else key
if isinstance(data[status], list):
ret += u'{0}{1}{2}{3}\n'.format(
' ' * ident,
cmap[status],
skey,
color['ENDC'])
if isinstance(data[status], dict):
ret += u'{0}{1}{2}: {3}{4}\n'.format(
' ' * ident,
cmap[status],
skey,
data[status][key],
color['ENDC'])
return ret
|
java
|
public IterableOfProtosFluentAssertion<M> ignoringExtraRepeatedFieldElementsOfFields(
int firstFieldNumber, int... rest) {
return usingConfig(
config.ignoringExtraRepeatedFieldElementsOfFields(asList(firstFieldNumber, rest)));
}
|
python
|
def image_size(self, pnmfile):
"""Get width and height of pnm file.
simeon@homebox src>pnmfile /tmp/214-2.png
/tmp/214-2.png:PPM raw, 100 by 100 maxval 255
"""
pout = os.popen(self.shellsetup + self.pnmfile + ' ' + pnmfile, 'r')
pnmfileout = pout.read(200)
pout.close()
m = re.search(', (\d+) by (\d+) ', pnmfileout)
if (m is None):
raise IIIFError(
text="Bad output from pnmfile when trying to get size.")
w = int(m.group(1))
h = int(m.group(2))
# print "pnmfile output = %s" % (pnmfileout)
# print "image size = %d,%d" % (w,h)
return(w, h)
|
python
|
def get_balance(self, address, block="latest"):
"""
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getbalance
"""
response = self.make_request("eth_getBalance", [address, block])
return int(response['result'], 16)
|
python
|
def write_plot(plot, filename, width=DEFAULT_PAGE_WIDTH, height=DEFAULT_PAGE_HEIGHT, unit=DEFAULT_PAGE_UNIT):
"""Writes a plot SVG to a file.
Args:
plot (list): a list of layers to plot
filename (str): the name of the file to write
width (float): the width of the output SVG
height (float): the height of the output SVG
unit (str): the unit of the height and width
"""
svg = plot_to_svg(plot, width, height, unit)
with open(filename, 'w') as outfile:
outfile.write(svg)
|
python
|
def eth_getBlockByNumber(self, number):
"""Get block body by block number.
:param number:
:return:
"""
block_hash = self.reader._get_block_hash(number)
block_number = _format_block_number(number)
body_key = body_prefix + block_number + block_hash
block_data = self.db.get(body_key)
body = rlp.decode(block_data, sedes=Block)
return body
|
java
|
private static boolean isValidDate(String date, Calendar out, String separator) {
try {
String[] dates = date.split(separator);
out.set(Calendar.DATE, Integer.parseInt(dates[dates.length - 1]));
out.set(Calendar.MONTH, Integer.parseInt(dates[dates.length - 2]));
if (dates.length > 2) {
out.set(Calendar.YEAR, Integer.parseInt(dates[dates.length - 3]));
}
} catch (Exception e) {
try {
out.set(Calendar.DATE, Integer.parseInt(date.substring(date.length() - 2, date.length())));
out.set(Calendar.MONTH, Integer.parseInt(date.substring(date.length() - 4, date.length() - 2)) - 1);
if (date.length() > 4) {
out.set(Calendar.YEAR, Integer.parseInt(date.substring(date.length() - 8, date.length() - 4)) - 1);
}
} catch (Exception e2) {
return false;
}
}
return true;
}
|
python
|
def get_dictionary_representation_of_object_attributes(obj, omit_null_fields=False):
"""Returns a dictionary of object's attributes, ignoring methods
@param obj: The object to represent as dict
@param omit_null_fields: If true, will not include fields in the dictionary that are null
@return: Dictionary of the object's attributes
"""
obj_dictionary = obj.__dict__
obj_dictionary_temp = obj_dictionary.copy()
for k, v in obj_dictionary.iteritems():
if omit_null_fields:
if v is None:
obj_dictionary_temp.pop(k, None)
if k.startswith('_'):
obj_dictionary_temp.pop(k, None)
return obj_dictionary_temp
|
java
|
public static DateIterator createDateIterator(
String rdata, Date start, TimeZone tzid, boolean strict)
throws ParseException {
return new RecurrenceIteratorWrapper(
RecurrenceIteratorFactory.createRecurrenceIterator(
rdata, dateToDateValue(start, true),
tzid, strict));
}
|
java
|
public void setCanvasWidthHeight(int width, int height) {
hudWidth = width;
hudHeight = height;
HUD = Bitmap.createBitmap(width, height, Config.ARGB_8888);
canvas = new Canvas(HUD);
texture = null;
}
|
java
|
protected OkRequest<T> writePartHeader(final String name, final String filename)
throws IOException {
return writePartHeader(name, filename, null);
}
|
java
|
public static void pack(File sourceDir, OutputStream os, NameMapper mapper, int compressionLevel) {
log.debug("Compressing '{}' into a stream.", sourceDir);
if (!sourceDir.exists()) {
throw new ZipException("Given file '" + sourceDir + "' doesn't exist!");
}
ZipOutputStream out = null;
IOException error = null;
try {
out = new ZipOutputStream(new BufferedOutputStream(os));
out.setLevel(compressionLevel);
pack(sourceDir, out, mapper, "", true);
}
catch (IOException e) {
error = e;
}
finally {
if (out != null && error == null) {
try {
out.finish();
out.flush();
}
catch (IOException e) {
error = e;
}
}
}
if (error != null) {
throw ZipExceptionUtil.rethrow(error);
}
}
|
python
|
def __read_lipd_contents():
"""
Use the file metadata to read in the LiPD file contents as a dataset library
:return dict: Metadata
"""
global files, settings
_d = {}
try:
if len(files[".lpd"]) == 1:
_d = lipd_read(files[".lpd"][0]["full_path"])
if settings["verbose"]:
print("Finished read: 1 record")
else:
for file in files[".lpd"]:
_d[file["filename_no_ext"]] = lipd_read(file["full_path"])
if settings["verbose"]:
print("Finished read: {} records".format(len(_d)))
except Exception as e:
print("Error: read_lipd_contents: {}".format(e))
return _d
|
java
|
private void startCameraSource() throws SecurityException {
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getActivity().getApplicationContext());
if (code != ConnectionResult.SUCCESS) {
Dialog dlg =
GoogleApiAvailability.getInstance().getErrorDialog(getActivity(), code, RC_HANDLE_GMS);
dlg.show();
}
if (mCameraSource != null) {
try {
mPreview.start(mCameraSource, mGraphicOverlay);
} catch (IOException e) {
Log.e("BARCODE-SCANNER", "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
if (mListener != null)
mListener.onBarcodeScanningFailed("could not create camera source");
}
}
}
|
java
|
@PublicAPI(usage = ACCESS)
public ClassFileImporter withImportOption(ImportOption option) {
return new ClassFileImporter(importOptions.with(option));
}
|
java
|
public static String capitalize (String s)
{
if (isBlank(s)) {
return s;
}
char c = s.charAt(0);
if (Character.isUpperCase(c)) {
return s;
} else {
return String.valueOf(Character.toUpperCase(c)) + s.substring(1);
}
}
|
java
|
public static Attribute getQueryRuleAttribute(QueryRule queryRule, EntityType entityType) {
String queryRuleField = queryRule.getField();
if (queryRuleField == null) {
return null;
}
Attribute attr = null;
String[] queryRuleFieldTokens = StringUtils.split(queryRuleField, NESTED_ATTRIBUTE_SEPARATOR);
EntityType entityTypeAtCurrentDepth = entityType;
for (int depth = 0; depth < queryRuleFieldTokens.length; ++depth) {
String attrName = queryRuleFieldTokens[depth];
attr = entityTypeAtCurrentDepth.getAttribute(attrName);
if (attr == null) {
throw new UnknownAttributeException(entityTypeAtCurrentDepth, attrName);
}
if (depth + 1 < queryRuleFieldTokens.length) {
entityTypeAtCurrentDepth = attr.getRefEntity();
}
}
return attr;
}
|
python
|
def detect(byte_str):
"""
Detect the encoding of the given byte string.
:param byte_str: The byte sequence to examine.
:type byte_str: ``bytes`` or ``bytearray``
"""
if not isinstance(byte_str, bytearray):
if not isinstance(byte_str, bytes):
raise TypeError('Expected object of type bytes or bytearray, got: '
'{0}'.format(type(byte_str)))
else:
byte_str = bytearray(byte_str)
detector = UniversalDetector()
detector.feed(byte_str)
return detector.close()
|
python
|
def _compile_pattern(self, rule):
''' Return a regular expression with named groups for each wildcard. '''
out = ''
for i, part in enumerate(self.syntax.split(rule)):
if i%3 == 0: out += re.escape(part.replace('\\:',':'))
elif i%3 == 1: out += '(?P<%s>' % part if part else '(?:'
else: out += '%s)' % (part or '[^/]+')
return re.compile('^%s$'%out)
|
java
|
private boolean tokenizeCSVLine(CSVFile csvFile,
BufferedReader reader,
Map<String, String> fieldMap) {
fieldMap.clear();
m_token.setLength(0);
// First build a list of tokens found in the order they are found.
List<String> tokenList = new ArrayList<String>();
boolean bInQuote = false;
int aChar = 0;
try {
while (true) {
aChar = reader.read();
if (aChar < 0) {
break;
}
if (!bInQuote && aChar == ',') {
tokenList.add(m_token.toString());
m_token.setLength(0);
} else if (!bInQuote && aChar == '\r') {
tokenList.add(m_token.toString());
m_token.setLength(0);
reader.mark(1);
aChar = reader.read();
if (aChar == -1) {
break;
}
if (aChar != '\n') {
reader.reset(); // put back non-LF
}
break;
} else if (!bInQuote && aChar == '\n') {
tokenList.add(m_token.toString());
m_token.setLength(0);
break;
} else if (aChar == '"') {
bInQuote = !bInQuote;
} else {
m_token.append((char)aChar);
}
}
} catch (IOException e) {
logErrorThrow("I/O error reading file", e.toString());
}
// If we hit EOF without a final EOL, we could have a token in the buffer.
if (m_token.length() > 0) {
tokenList.add(m_token.toString());
m_token.setLength(0);
}
if (tokenList.size() > 0) {
for (int index = 0; index < tokenList.size(); index++) {
String token = tokenList.get(index).trim();
if (token.length() > 0) {
String fieldName = csvFile.m_fieldList.get(index);
fieldMap.put(fieldName, token);
}
}
return true;
}
// No tokens found; return false if EOF.
return aChar != -1;
}
|
python
|
def new_feature_container(self, idx, path=None):
"""
Add a new feature container with the given data.
Parameters:
idx (str): An unique identifier within the dataset.
path (str): The path to store the feature file. If None a default path is used.
Returns:
FeatureContainer: The newly added feature-container.
"""
new_feature_idx = idx
new_feature_path = path
# Add index to idx if already existing
if new_feature_idx in self._feature_containers.keys():
new_feature_idx = naming.index_name_if_in_list(new_feature_idx,
self._feature_containers.keys())
# Set default path if none given
if new_feature_path is None:
if not os.path.isdir(self.path):
raise ValueError('To copy file the dataset needs to have a path.')
new_feature_path = os.path.join(self.path, DEFAULT_FEAT_SUBDIR, new_feature_idx)
else:
new_feature_path = os.path.abspath(new_feature_path)
feat_container = containers.FeatureContainer(new_feature_path)
self._feature_containers[new_feature_idx] = feat_container
return feat_container
|
python
|
def update_bookmark(self, old, new, *, max_retries=3):
"""
Update a bookmark and check it was successful.
The bookmark matches an existing bookmark `old` according to
bookmark equalitiy and replaces it by `new`. The bookmark
`new` is added if no bookmark matching `old` exists.
:param old: the bookmark to replace
:type bookmark_to_remove: a :class:`~bookmark_xso.Bookmark` subclass.
:param new: the replacement bookmark
:type bookmark_to_remove: a :class:`~bookmark_xso.Bookmark` subclass.
:param max_retries: the number of retries of removing the bookmark
fails.
:type max_retries: :class:`int`
:raises RuntimeError: if the bookmark is not in the bookmark list
after `max_retries` retries.
After replacing the bookmark it is checked, whether the
bookmark `new` is in the online storage, if it is not it is
tried again at most `max_retries` times to replace the
bookmark. A :class:`RuntimeError` is raised if the bookmark
could not be replaced successfully after `max_retries`.
.. note:: Do not modify a bookmark retrieved from the signals
or from :meth:`get_bookmarks` to obtain the bookmark
`new`, this will lead to data corruption as they are
passed by reference. Instead use :func:`copy.copy`
and modify the copy.
"""
def replace_bookmark(bookmarks, old, new):
modified_bookmarks = list(bookmarks)
try:
i = bookmarks.index(old)
modified_bookmarks[i] = new
except ValueError:
modified_bookmarks.append(new)
return modified_bookmarks
with (yield from self._lock):
bookmarks = yield from self._get_bookmarks()
try:
yield from self._set_bookmarks(
replace_bookmark(bookmarks, old, new)
)
retries = 0
bookmarks = yield from self._get_bookmarks()
while retries < max_retries:
if new in bookmarks:
break
yield from self._set_bookmarks(
replace_bookmark(bookmarks, old, new)
)
bookmarks = yield from self._get_bookmarks()
retries += 1
if new not in bookmarks:
raise RuntimeError("Cold not update bookmark")
finally:
self._diff_emit_update(bookmarks)
|
java
|
public static <T> void quickSelect(T data, Adapter<T> adapter, int start, int end, int rank) {
while(true) {
// Optimization for small arrays
// This also ensures a minimum size below
if(start + SMALL > end) {
insertionSort(data, adapter, start, end);
return;
}
// Best of 5 pivot picking:
// Choose pivots by looking at five candidates.
final int len = end - start;
final int seventh = (len >> 3) + (len >> 6) + 1;
final int m3 = (start + end) >> 1; // middle
final int m2 = m3 - seventh;
final int m1 = m2 - seventh;
final int m4 = m3 + seventh;
final int m5 = m4 + seventh;
// Explicit (and optimal) sorting network for 5 elements
// See Knuth for details.
if(adapter.compare(data, m1, m2) > 0) {
adapter.swap(data, m1, m2);
}
if(adapter.compare(data, m1, m3) > 0) {
adapter.swap(data, m1, m3);
}
if(adapter.compare(data, m2, m3) > 0) {
adapter.swap(data, m2, m3);
}
if(adapter.compare(data, m4, m5) > 0) {
adapter.swap(data, m4, m5);
}
if(adapter.compare(data, m1, m4) > 0) {
adapter.swap(data, m1, m4);
}
if(adapter.compare(data, m3, m4) > 0) {
adapter.swap(data, m3, m4);
}
if(adapter.compare(data, m2, m5) > 0) {
adapter.swap(data, m2, m5);
}
if(adapter.compare(data, m2, m3) > 0) {
adapter.swap(data, m2, m3);
}
if(adapter.compare(data, m4, m5) > 0) {
adapter.swap(data, m4, m5);
}
int best = bestPivot(rank, m1, m2, m3, m4, m5);
// final double pivot = data[best];
// Move middle element out of the way.
adapter.swap(data, best, end - 1);
// Begin partitioning
int i = start, j = end - 2;
// This is classic quicksort stuff
while(true) {
while(i <= j && adapter.compare(data, end - 1, i) >= 0) {
i++;
}
while(j >= i && adapter.compare(data, end - 1, j) <= 0) {
j--;
}
if(i >= j) {
break;
}
adapter.swap(data, i, j);
}
// Move pivot (former middle element) back into the appropriate place
adapter.swap(data, i, end - 1);
// Skip duplicates to narrow down the search interval:
while(rank < i && adapter.compare(data, i, i - 1) == 0) {
--i;
}
while(rank > i && adapter.compare(data, i, i + 1) == 0) {
++i;
}
// In contrast to quicksort, we only need to recurse into the half we are
// interested in. Instead of recursion we now use iteration.
if(rank < i) {
end = i;
}
else if(rank > i) {
start = i + 1;
}
else {
break;
}
} // Loop until rank==i
}
|
java
|
private HttpClient getClient(int retries, int connectionTimeout, int socketTimeout) {
HttpClient httpClient = new DefaultHttpClient();
HttpParams httpParams = httpClient.getParams();
// Retries
if (retries > 0) {
DefaultHttpRequestRetryHandler retryHandler = new DefaultHttpRequestRetryHandler(retries, true);
((AbstractHttpClient) httpClient).setHttpRequestRetryHandler(retryHandler);
}
// Timeouts
if (connectionTimeout > 0) {
httpParams.setParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, connectionTimeout);
}
if (socketTimeout > 0) {
httpParams.setParameter(CoreConnectionPNames.SO_TIMEOUT, socketTimeout);
}
//Proxy
if (StringUtils.isNotEmpty(proxyHostName)) {
HttpHost proxy = new HttpHost(proxyHostName, proxyPort);
httpParams.setParameter(ConnRoutePNames.DEFAULT_PROXY, proxy);
}
return httpClient;
}
|
python
|
def get_name_uids(self, name, channel=None):
"""get list of users (helper)
:param channel: number [1:7]
:return: list of users
"""
if channel is None:
channel = self.get_network_channel()
uid_list = []
max_ids = self.get_channel_max_user_count(channel)
for uid in range(1, max_ids):
if name == self.get_user_name(uid=uid):
uid_list.append(uid)
return uid_list
|
python
|
def del_repo(repo, root=None):
'''
Delete a repo.
root
operate on a different root directory.
CLI Examples:
.. code-block:: bash
salt '*' pkg.del_repo alias
'''
repos_cfg = _get_configured_repos(root=root)
for alias in repos_cfg.sections():
if alias == repo:
doc = __zypper__(root=root).xml.call('rr', '--loose-auth', '--loose-query', alias)
msg = doc.getElementsByTagName('message')
if doc.getElementsByTagName('progress') and msg:
return {
repo: True,
'message': msg[0].childNodes[0].nodeValue,
}
raise CommandExecutionError('Repository \'{0}\' not found.'.format(repo))
|
java
|
@Override
public synchronized Collection<WrapperRunnable> getAndClearStartRunnables() {
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled() && logger.isLoggable(Level.FINEST)) {
logger.entering(CLASS_NAME, "getAndClearStartRunnables",this);
}
//set the pointer to null so that the remove cannot remove an Runnable
//in the middle of traversing over the list.
Collection<WrapperRunnable> tempStartRunnables = startRunnables;
this.startRunnables=null;
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled() && logger.isLoggable(Level.FINEST)) {
logger.exiting(CLASS_NAME, "getAndClearStartRunnables",this);
}
return tempStartRunnables;
}
|
python
|
def array(self, name):
"""
Returns the array of tables with the given name.
"""
if name in self._navigable:
if isinstance(self._navigable[name], (list, tuple)):
return self[name]
else:
raise NoArrayFoundError
else:
return ArrayOfTables(toml_file=self, name=name)
|
python
|
def update(self, read, write, manage):
"""
Update the SyncMapPermissionInstance
:param bool read: Read access.
:param bool write: Write access.
:param bool manage: Manage access.
:returns: Updated SyncMapPermissionInstance
:rtype: twilio.rest.sync.v1.service.sync_map.sync_map_permission.SyncMapPermissionInstance
"""
data = values.of({'Read': read, 'Write': write, 'Manage': manage, })
payload = self._version.update(
'POST',
self._uri,
data=data,
)
return SyncMapPermissionInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
map_sid=self._solution['map_sid'],
identity=self._solution['identity'],
)
|
python
|
def options(self):
"""Train tickets query options."""
arg = self.get(0)
if arg.startswith('-') and not self.is_asking_for_help:
return arg[1:]
return ''.join(x for x in arg if x in 'dgktz')
|
python
|
def runcommand(cosmology='WMAP5'):
""" Example interface commands """
# Return the WMAP5 cosmology concentration predicted for
# z=0 range of masses
Mi = [1e8, 1e9, 1e10]
zi = 0
print("Concentrations for haloes of mass %s at z=%s" % (Mi, zi))
output = commah.run(cosmology=cosmology, zi=zi, Mi=Mi)
print(output['c'].flatten())
# Return the WMAP5 cosmology concentration predicted for
# z=0 range of masses AND cosmological parameters
Mi = [1e8, 1e9, 1e10]
zi = 0
print("Concentrations for haloes of mass %s at z=%s" % (Mi, zi))
output, cosmo = commah.run(cosmology=cosmology, zi=zi, Mi=Mi,
retcosmo=True)
print(output['c'].flatten())
print(cosmo)
# Return the WMAP5 cosmology concentration predicted for MW
# mass (2e12 Msol) across redshift
Mi = 2e12
z = [0, 0.5, 1, 1.5, 2, 2.5]
output = commah.run(cosmology=cosmology, zi=0, Mi=Mi, z=z)
for zval in z:
print("M(z=0)=%s has c(z=%s)=%s"
% (Mi, zval, output[output['z'] == zval]['c'].flatten()))
# Return the WMAP5 cosmology concentration predicted for MW
# mass (2e12 Msol) across redshift
Mi = 2e12
zi = [0, 0.5, 1, 1.5, 2, 2.5]
output = commah.run(cosmology=cosmology, zi=zi, Mi=Mi)
for zval in zi:
print("M(z=%s)=%s has concentration %s"
% (zval, Mi, output[(output['zi'] == zval) &
(output['z'] == zval)]['c'].flatten()))
# Return the WMAP5 cosmology concentration and
# rarity of high-z cluster
Mi = 2e14
zi = 6
output = commah.run(cosmology=cosmology, zi=zi, Mi=Mi)
print("Concentrations for haloes of mass %s at z=%s" % (Mi, zi))
print(output['c'].flatten())
print("Mass variance sigma of haloes of mass %s at z=%s" % (Mi, zi))
print(output['sig'].flatten())
print("Fluctuation for haloes of mass %s at z=%s" % (Mi, zi))
print(output['nu'].flatten())
# Return the WMAP5 cosmology accretion rate prediction
# for haloes at range of redshift and mass
Mi = [1e8, 1e9, 1e10]
zi = [0]
z = [0, 0.5, 1, 1.5, 2, 2.5]
output = commah.run(cosmology=cosmology, zi=zi, Mi=Mi, z=z)
for Mval in Mi:
print("dM/dt for halo of mass %s at z=%s across redshift %s is: "
% (Mval, zi, z))
print(output[output['Mi'] == Mval]['dMdt'].flatten())
# Return the WMAP5 cosmology Halo Mass History for haloes with M(z=0) = 1e8
M = [1e8]
z = [0, 0.5, 1, 1.5, 2, 2.5]
print("Halo Mass History for z=0 mass of %s across z=%s" % (M, z))
output = commah.run(cosmology=cosmology, zi=0, Mi=M, z=z)
print(output['Mz'].flatten())
# Return the WMAP5 cosmology formation redshifts for haloes at
# range of redshift and mass
M = [1e8, 1e9, 1e10]
z = [0]
print("Formation Redshifts for haloes of mass %s at z=%s" % (M, z))
output = commah.run(cosmology=cosmology, zi=0, Mi=M, z=z)
for Mval in M:
print(output[output['Mi'] == Mval]['zf'].flatten())
return("Done")
|
java
|
public static <T> T getElementAt(T[] array, int index, T defaultValue) {
return nullSafeLength(array) > index ? array[index] : defaultValue;
}
|
java
|
public static Map<FieldType, String> getDefaultAliases()
{
Map<FieldType, String> map = new HashMap<FieldType, String>();
map.put(TaskField.DATE1, "Suspend Date");
map.put(TaskField.DATE2, "Resume Date");
map.put(TaskField.TEXT1, "Code");
map.put(TaskField.TEXT2, "Activity Type");
map.put(TaskField.TEXT3, "Status");
map.put(TaskField.NUMBER1, "Primary Resource Unique ID");
return map;
}
|
java
|
public void onConversationHidden(Peer peer) {
if (visiblePeer != null && visiblePeer.equals(peer)) {
this.visiblePeer = null;
}
}
|
java
|
public ConjunctionMatcher<T> and(Matcher<? super T> matcher)
{
requireNonNull(matcher, "matcher");
return new ConjunctionMatcher<>(compositeDescription, concat(matchers, matcher));
}
|
java
|
private static Set<GeoPosition> getMapGeoBounds(JXMapViewer mapViewer)
{
Set<GeoPosition> set = new HashSet<GeoPosition>();
TileFactory tileFactory = mapViewer.getTileFactory();
int zoom = mapViewer.getZoom();
Rectangle2D bounds = mapViewer.getViewportBounds();
Point2D pt = new Point2D.Double(bounds.getX(), bounds.getY());
set.add(tileFactory.pixelToGeo(pt, zoom));
pt = new Point2D.Double(bounds.getX() + bounds.getWidth(), bounds.getY() + bounds.getHeight());
set.add(tileFactory.pixelToGeo(pt, zoom));
return set;
}
|
java
|
private final ValueNode registerFinalValue(int value) {
// We always register final values because while ADDING
// we do not know yet whether we will build fast or small.
lookupFinalValueNode.setFinalValue(value);
Node oldNode=nodes.get(lookupFinalValueNode);
if(oldNode!=null) {
return (ValueNode)oldNode;
}
ValueNode newNode=new ValueNode(value);
// If put() returns a non-null value from an equivalent, previously
// registered node, then get() failed to find that and we will leak newNode.
oldNode=nodes.put(newNode, newNode);
assert(oldNode==null);
return newNode;
}
|
java
|
protected void resizeBuffer (final int additionalLength, final boolean copyData) {
if (additionalLength < 0) { throw new IllegalArgumentException("The length must be greater or equal than 0."); }
dataBuffer.position(length);
// reallocate a bigger dataBuffer, if needed
if (length + additionalLength > dataBuffer.capacity()) {
final ByteBuffer newBuffer = ByteBuffer.allocate(getTotalLength(length + additionalLength));
// copy old data...
if (copyData) {
dataBuffer.flip();
newBuffer.put(dataBuffer);
}
dataBuffer = newBuffer;
dataBuffer.limit(getTotalLength(length + additionalLength));
}
length += additionalLength;
}
|
java
|
private boolean checkForBadModuleReference(Name name, Ref ref) {
JSModuleGraph moduleGraph = compiler.getModuleGraph();
if (name.getGlobalSets() == 0 || ref.type == Ref.Type.SET_FROM_GLOBAL) {
// Back off if either 1) this name was never set, or 2) this reference /is/ a set.
return false;
}
if (name.getGlobalSets() == 1) {
// there is only one global set - it should be set as name.declaration
// just look at that declaration instead of iterating through every single reference.
Ref declaration = checkNotNull(name.getDeclaration());
return !isSetFromPrecedingModule(ref, declaration, moduleGraph);
}
// there are multiple sets, so check if any of them happens in this module or a module earlier
// in the dependency chain.
for (Ref set : name.getRefs()) {
if (isSetFromPrecedingModule(ref, set, moduleGraph)) {
return false;
}
}
return true;
}
|
python
|
def _format_pair_no_equals(explode, separator, escape, key, value):
"""
Format a key, value pair but don't include the equals sign
when there is no value
"""
if not value:
return key
return _format_pair(explode, separator, escape, key, value)
|
python
|
def random_filename(path=None):
"""Make a UUID-based file name which is extremely unlikely
to exist already."""
filename = uuid4().hex
if path is not None:
filename = os.path.join(path, filename)
return filename
|
python
|
def op_paths(self, path_base):
# type: (Union[str, UrlPath]) -> Generator[Tuple[UrlPath, Operation]]
"""
Return all operations stored in containers.
"""
path_base += self.path_prefix
for operation in self._operations:
for op_path in operation.op_paths(path_base):
yield op_path
|
java
|
@Override
@SuppressWarnings("unchecked")
public <C> Const<A, C> biMapR(Function<? super B, ? extends C> fn) {
return (Const<A, C>) Bifunctor.super.biMapR(fn);
}
|
java
|
public void clear(int size, int off) {
if (size%4 != 4) size = (size/4+1)*4;
next = null;
previous = null;
data = new byte[size];
offset = off;
for(int i = 0; i < data.length; i++)
data[i] = 0;
usableBits = off;
}
|
python
|
def perform(cls, entity_cls, usecase_cls, request_object_cls,
payload: dict, raise_error=False):
"""
This method bundles all essential artifacts and initiates usecase
execution.
:param entity_cls: The entity class to be used for running the usecase
:param usecase_cls: The usecase class that will be executed by
the tasklet.
:param request_object_cls: The request object to be used as input to the
use case
:type request_object_cls: protean.core.Request
:param payload: The payload to be passed to the request object
:type payload: dict
:param raise_error: Raise error when a failure response is generated
:type raise_error: bool
"""
# Initialize the use case and request objects
use_case = usecase_cls()
payload.update({'entity_cls': entity_cls})
request_object = request_object_cls.from_dict(payload)
# Run the use case and return the response
resp = use_case.execute(request_object)
# If raise error is set then check the response and raise error
if raise_error and isinstance(resp, ResponseFailure):
raise UsecaseExecutionError(
(resp.code, resp.value),
orig_exc=getattr(resp, 'exc', None),
orig_trace=getattr(resp, 'trace', None)
)
return resp
|
java
|
@Override
public DownloadPolicy getDownloadPolicy() {
if (_asset.getWlpInformation() == null) {
return null;
}
return _asset.getWlpInformation().getDownloadPolicy();
}
|
java
|
static void escape(
final Reader reader, final Writer writer, final XmlEscapeSymbols symbols,
final XmlEscapeType escapeType, final XmlEscapeLevel escapeLevel)
throws IOException {
if (reader == null) {
return;
}
final int level = escapeLevel.getEscapeLevel();
final boolean useCERs = escapeType.getUseCERs();
final boolean useHexa = escapeType.getUseHexa();
int c1, c2; // c0: last char, c1: current char, c2: next char
c2 = reader.read();
while (c2 >= 0) {
c1 = c2;
c2 = reader.read();
final int codepoint = codePointAt((char)c1, (char)c2);
boolean codepointValid = symbols.CODEPOINT_VALIDATOR.isValid(codepoint);
/*
* Shortcut: most characters will be ASCII/Alphanumeric, and we won't need to do anything at
* all for them
*/
if (codepoint <= (XmlEscapeSymbols.LEVELS_LEN - 2)
&& level < symbols.ESCAPE_LEVELS[codepoint]
&& codepointValid) {
writer.write(c1);
continue;
}
/*
* Shortcut: we might not want to escape non-ASCII chars at all either.
*/
if (codepoint > (XmlEscapeSymbols.LEVELS_LEN - 2)
&& level < symbols.ESCAPE_LEVELS[XmlEscapeSymbols.LEVELS_LEN - 1]
&& codepointValid) {
writer.write(c1);
if (Character.charCount(codepoint) > 1) {
// This is to compensate that we are actually escaping two char[] positions with a single codepoint.
writer.write(c2);
c1 = c2;
c2 = reader.read();
}
continue;
}
/*
* We know we need to escape, so from here on we will only work with the codepoint -- we can advance
* the chars.
*/
if (Character.charCount(codepoint) > 1) {
// This is to compensate that we are actually reading two char positions with a single codepoint.
c1 = c2;
c2 = reader.read();
}
/*
* If the char is invalid, there is nothing to write, simply skip it (which we already did by
* incrementing the readOffset.
*/
if (!codepointValid) {
// nothing to write
continue;
}
/*
* -----------------------------------------------------------------------------------------
*
* Perform the real escape, attending the different combinations of NCR, DCR and HCR needs.
*
* -----------------------------------------------------------------------------------------
*/
if (useCERs) {
// We will try to use a CER
final int codepointIndex =
Arrays.binarySearch(symbols.SORTED_CODEPOINTS, codepoint);
if (codepointIndex >= 0) {
// CER found! just write it and go for the next char
writer.write(symbols.SORTED_CERS_BY_CODEPOINT[codepointIndex]);
continue;
}
}
/*
* No NCR-escape was possible (or allowed), so we need decimal/hexa escape.
*/
if (useHexa) {
writer.write(REFERENCE_HEXA_PREFIX);
writer.write(Integer.toHexString(codepoint));
} else {
writer.write(REFERENCE_DECIMAL_PREFIX);
writer.write(String.valueOf(codepoint));
}
writer.write(REFERENCE_SUFFIX);
}
}
|
python
|
def check_flashing_need(self, execution_type, build_id, force):
"""
Check if flashing of local device is required.
:param execution_type: Should be 'hardware'
:param build_id: Build id, usually file name
:param force: Forceflash flag
:return: Boolean
"""
binary_file_name = AllocationContextList.get_build(build_id)
if binary_file_name:
if execution_type == 'hardware' and os.path.isfile(binary_file_name):
if not force:
#@todo: Make a better check for binary compatibility
extension_split = os.path.splitext(binary_file_name)
extension = extension_split[-1].lower()
if extension != '.bin' and extension != '.hex':
self.logger.debug("File ('%s') is not supported to flash, skip it" %(
build_id))
return False
return True
return True
else:
raise ResourceInitError("Given binary %s does not exist" % build_id)
else:
raise ResourceInitError("Given binary %s does not exist" % build_id)
|
python
|
def dotted_parts(s):
"""
For a string "a.b.c", yields "a", "a.b", "a.b.c".
"""
idx = -1
while s:
idx = s.find('.', idx+1)
if idx == -1:
yield s
break
yield s[:idx]
|
java
|
public boolean hasSystemLocks(CmsDbContext dbc, CmsResource resource) throws CmsException {
if (resource == null) {
return false;
}
Iterator<CmsLock> itLocks = OpenCms.getMemoryMonitor().getAllCachedLocks().iterator();
while (itLocks.hasNext()) {
CmsLock lock = itLocks.next();
if (lock.getSystemLock().isUnlocked()) {
// only system locks matter here
continue;
}
if (lock.getResourceName().startsWith(resource.getRootPath())) {
if (lock.getResourceName().startsWith(resource.getRootPath())) {
return true;
}
try {
resource = m_driverManager.readResource(dbc, lock.getResourceName(), CmsResourceFilter.ALL);
} catch (CmsVfsResourceNotFoundException e) {
OpenCms.getMemoryMonitor().uncacheLock(lock.getResourceName());
continue;
}
CmsResource lockedResource;
try {
lockedResource = m_driverManager.readResource(dbc, lock.getResourceName(), CmsResourceFilter.ALL);
} catch (CmsVfsResourceNotFoundException e) {
OpenCms.getMemoryMonitor().uncacheLock(lock.getResourceName());
continue;
}
if (lockedResource.getSiblingCount() > 1) {
Iterator<CmsResource> itSiblings = internalReadSiblings(dbc, lockedResource).iterator();
while (itSiblings.hasNext()) {
CmsResource sibling = itSiblings.next();
CmsLock siblingLock = internalSiblingLock(lock, sibling.getRootPath());
if (siblingLock.getResourceName().startsWith(resource.getRootPath())) {
return true;
}
}
}
}
}
return false;
}
|
java
|
public static CalendarPicker<ThaiSolarCalendar> thaiWithSystemDefaults() {
return CalendarPicker.thai(
Locale.getDefault(Locale.Category.FORMAT),
() -> SystemClock.inLocalView().now(ThaiSolarCalendar.axis())
);
}
|
java
|
public static void unregisterLogListener(@NonNull final Context context, @NonNull final DfuLogListener listener) {
if (mLogBroadcastReceiver != null) {
final boolean empty = mLogBroadcastReceiver.removeLogListener(listener);
if (empty) {
LocalBroadcastManager.getInstance(context).unregisterReceiver(mLogBroadcastReceiver);
mLogBroadcastReceiver = null;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.