language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python | def finish_init(environment, start_web, create_sysadmin, log_syslog=False,
do_install=True, quiet=False, site_url=None, interactive=False,
init_db=True):
"""
Common parts of create and init: Install, init db, start site, sysadmin
"""
if not init_db:
start_web = False
create_sysadmin = False
if do_install:
install_all(environment, False, verbose=False, quiet=quiet)
if init_db:
if not quiet:
write('Initializing database')
environment.install_postgis_sql()
environment.ckan_db_init()
if not quiet:
write('\n')
if site_url:
try:
site_url = site_url.format(address=environment.address, port=environment.port)
environment.site_url = site_url
environment.save_site(False)
except (KeyError, IndexError, ValueError) as e:
raise DatacatsError('Could not parse site_url: {}'.format(e))
if start_web:
environment.start_ckan(log_syslog=log_syslog)
if not quiet and not interactive:
write('Starting web server at {0} ...\n'.format(
environment.web_address()))
if create_sysadmin:
try:
adminpw = confirm_password()
environment.create_admin_set_password(adminpw)
except KeyboardInterrupt:
print
if not start_web:
environment.stop_supporting_containers() |
python | def add (self, defn):
"""Adds the given Command Definition to this Command Dictionary."""
self[defn.name] = defn
self.colnames[defn.name] = defn |
python | def register_model_converter(model, name=None, field='pk', base=IntConverter, queryset=None):
"""
Registers a custom path converter for a model.
:param model: a Django model
:param str name: name to register the converter as
:param str field: name of the lookup field
:param base: base path converter, either by name or as class
(optional, defaults to `django.urls.converter.IntConverter`)
:param queryset: a custom querset to use (optional, defaults to `model.objects.all()`)
"""
if name is None:
name = camel_to_snake(model.__name__)
converter_name = '{}Converter'.format(model.__name__)
else:
converter_name = '{}Converter'.format(snake_to_camel(name))
if isinstance(base, str):
base = get_converter(base).__class__
converter_class = type(
converter_name,
(ModelConverterMixin, base,),
{'model': model, 'field': field, 'queryset': queryset}
)
register_converter(converter_class, name) |
java | OutputStream writeChannel(int channel)
throws IOException
{
while (os != null) {
boolean canWrite = false;
synchronized (WRITE_LOCK) {
if (!isWriteLocked) {
isWriteLocked = true;
canWrite = true;
}
else {
try {
WRITE_LOCK.wait(5000);
} catch (Exception e) {
}
}
}
if (canWrite) {
os.write('C');
os.write(channel >> 8);
os.write(channel);
return os;
}
}
return null;
} |
java | public static void parkUntil(Object blocker, long deadline) {
Thread t = Thread.currentThread();
setBlocker(t, blocker);
U.park(true, deadline);
setBlocker(t, null);
} |
java | public static double[][] circleEndForLineVectorAlgebra(double x1, double y1,
double x2, double y2, double radius) {
double[][] result = new double[2][];
result[0] = new double[2];
result[1] = new double[2];
double [] baseVector = {x2-x1, y2-y1};
double baseVectorLenght = Math.sqrt(baseVector[0]*baseVector[0] + baseVector[1]*baseVector[1]);
double [] baseUnitVector = {baseVector[0]/baseVectorLenght, baseVector[1]/baseVectorLenght};
//point C:
result[0][0] = x2 - baseUnitVector[0] * radius * 2;
result[0][1] = y2 - baseUnitVector[1] * radius * 2;
//Point O:
result[1][0] = x2 - baseUnitVector[0] * radius;
result[1][1] = y2 - baseUnitVector[1] * radius;
return result;
} |
python | def decode_index_value(self, index, value):
"""
Decodes a secondary index value into the correct Python type.
:param index: the name of the index
:type index: str
:param value: the value of the index entry
:type value: str
:rtype str or int
"""
if index.endswith("_int"):
return int(value)
else:
return bytes_to_str(value) |
java | public static void sendAttachmentFile(final String uri) {
dispatchConversationTask(new ConversationDispatchTask() {
@Override
protected boolean execute(Conversation conversation) {
if (TextUtils.isEmpty(uri)) {
return false; // TODO: add error message
}
CompoundMessage message = new CompoundMessage();
// No body, just attachment
message.setBody(null);
message.setRead(true);
message.setHidden(true);
message.setSenderId(conversation.getPerson().getId());
ArrayList<StoredFile> attachmentStoredFiles = new ArrayList<StoredFile>();
/* Make a local copy in the cache dir. By default the file name is "apptentive-api-file + nonce"
* If original uri is known, the name will be taken from the original uri
*/
Context context = ApptentiveInternal.getInstance().getApplicationContext();
String localFilePath = Util.generateCacheFilePathFromNonceOrPrefix(context, message.getNonce(), Uri.parse(uri).getLastPathSegment());
String mimeType = Util.getMimeTypeFromUri(context, Uri.parse(uri));
MimeTypeMap mime = MimeTypeMap.getSingleton();
String extension = mime.getExtensionFromMimeType(mimeType);
// If we can't get the mime type from the uri, try getting it from the extension.
if (extension == null) {
extension = MimeTypeMap.getFileExtensionFromUrl(uri);
}
if (mimeType == null && extension != null) {
mimeType = mime.getMimeTypeFromExtension(extension);
}
if (!TextUtils.isEmpty(extension)) {
localFilePath += "." + extension;
}
StoredFile storedFile = Util.createLocalStoredFile(uri, localFilePath, mimeType);
if (storedFile == null) {
return false; // TODO: add error message
}
storedFile.setId(message.getNonce());
attachmentStoredFiles.add(storedFile);
message.setAssociatedFiles(attachmentStoredFiles);
conversation.getMessageManager().sendMessage(message);
return true;
}
}, "send attachment file");
} |
python | def plat_specific_errors(*errnames):
"""Return error numbers for all errors in errnames on this platform.
The 'errno' module contains different global constants depending on
the specific platform (OS). This function will return the list of
numeric values for a given list of potential names.
"""
missing_attr = set([None, ])
unique_nums = set(getattr(errno, k, None) for k in errnames)
return list(unique_nums - missing_attr) |
python | def clear_step_handler_by_chat_id(self, chat_id):
"""
Clears all callback functions registered by register_next_step_handler().
:param chat_id: The chat for which we want to clear next step handlers
"""
self.next_step_handlers[chat_id] = []
if self.next_step_saver is not None:
self.next_step_saver.start_save_timer() |
java | @Nullable
public static URL getAsURL (@Nullable final String sURL, final boolean bWhine)
{
if (StringHelper.hasText (sURL))
try
{
return new URL (sURL);
}
catch (final MalformedURLException ex)
{
// fall-through
if (bWhine && GlobalDebug.isDebugMode ())
if (LOGGER.isWarnEnabled ())
LOGGER.warn ("Debug warn: failed to convert '" + sURL + "' to a URL!");
}
return null;
} |
python | def get_all_counters(obj, instance_list=None):
'''
Get the values for all counters available to a Counter object
Args:
obj (str):
The name of the counter object. You can get a list of valid names
using the ``list_objects`` function
instance_list (list):
A list of instances to return. Use this to narrow down the counters
that are returned.
.. note::
``_Total`` is returned as ``*``
'''
counters, instances_avail = win32pdh.EnumObjectItems(None, None, obj, -1, 0)
if instance_list is None:
instance_list = instances_avail
if not isinstance(instance_list, list):
instance_list = [instance_list]
counter_list = []
for counter in counters:
for instance in instance_list:
instance = '*' if instance.lower() == '_total' else instance
counter_list.append((obj, instance, counter))
else: # pylint: disable=useless-else-on-loop
counter_list.append((obj, None, counter))
return get_counters(counter_list) if counter_list else {} |
python | def filter_t(func):
"""
Transformation for Sequence.filter
:param func: filter function
:return: transformation
"""
return Transformation('filter({0})'.format(name(func)),
partial(filter, func),
{ExecutionStrategies.PARALLEL}) |
java | public void throwDOMException(short code, String msg)
{
String themsg = XSLMessages.createMessage(msg, null);
throw new DOMException(code, themsg);
} |
java | public static <T> T newInstance(Settings settings, Key<String> key) throws ServiceLocationException
{
// Workaround for compiler bug (#6302954)
return Factories.<T>newInstance(settings, key, Thread.currentThread().getContextClassLoader());
} |
python | def get_renderers(self):
"""Optionally block Browsable API rendering. """
renderers = super(WithDynamicViewSetMixin, self).get_renderers()
if settings.ENABLE_BROWSABLE_API is False:
return [
r for r in renderers if not isinstance(r, BrowsableAPIRenderer)
]
else:
return renderers |
java | public static BoundedOverlay getBoundedOverlay(TileDao tileDao, float density) {
BoundedOverlay overlay = null;
if (tileDao.isGoogleTiles()) {
overlay = new GoogleAPIGeoPackageOverlay(tileDao);
} else {
overlay = new GeoPackageOverlay(tileDao, density);
}
return overlay;
} |
java | public Period plusYears(int years) {
if (years == 0) {
return this;
}
int[] values = getValues(); // cloned
getPeriodType().addIndexedField(this, PeriodType.YEAR_INDEX, values, years);
return new Period(values, getPeriodType());
} |
java | public void pauseJob(JobKey jobKey, T jedis) throws JobPersistenceException {
for (OperableTrigger trigger : getTriggersForJob(jobKey, jedis)) {
pauseTrigger(trigger.getKey(), jedis);
}
} |
python | def backtrack(self, source):
"""Given a unique key in the store, recreate original source"""
key = self.get_tok(source)
s = self[key]()
meta = s.metadata['original_source']
cls = meta['cls']
args = meta['args']
kwargs = meta['kwargs']
cls = import_name(cls)
sout = cls(*args, **kwargs)
sout.metadata = s.metadata['original_metadata']
sout.name = s.metadata['original_name']
return sout |
java | public static <K, V> Map<K, V> checkNotNullOrEmpty(Map<K, V> arg, String argName) throws NullPointerException,
IllegalArgumentException {
Preconditions.checkNotNull(arg, argName);
checkArgument(!arg.isEmpty(), argName, "Cannot be an empty map.");
return arg;
} |
python | def drop(self):
"""Drop the table from the database.
Deletes both the schema and all the contents within it.
"""
with self.db.lock:
if self.exists:
self._threading_warn()
self.table.drop(self.db.executable, checkfirst=True)
self._table = None |
python | def assign_to_series(self, name, series_type, item):
"""Assign name to item converted to the given series_type."""
if series_type == "(":
self.add_def(name + " = _coconut.tuple(" + item + ")")
elif series_type == "[":
self.add_def(name + " = _coconut.list(" + item + ")")
else:
raise CoconutInternalException("invalid series match type", series_type) |
java | public DConnection findByRefreshToken(java.lang.String refreshToken) {
return queryUniqueByField(null, DConnectionMapper.Field.REFRESHTOKEN.getFieldName(), refreshToken);
} |
python | def to_table_data(self):
"""
:raises ValueError:
:raises pytablereader.error.ValidationError:
"""
self._validate_source_data()
header_list = []
for json_record in self._buffer:
for key in json_record:
if key not in header_list:
header_list.append(key)
self._loader.inc_table_count()
yield TableData(
self._make_table_name(),
header_list,
self._buffer,
dp_extractor=self._loader.dp_extractor,
type_hints=self._extract_type_hints(header_list),
) |
java | public static Interval invert(Interval i) {
byte iLabel = (byte) (OCTAVE - (i.getLabel()%OCTAVE));
byte iQuality = (byte) -i.getQuality();
byte iOrder = (byte) -i.getDirection();
return new Interval(iLabel, iQuality, iOrder);
} |
java | public static Color[] redblue(int n, float alpha) {
Color[] palette = new Color[n];
for (int i = 0; i < n; i++) {
palette[i] = new Color((float) Math.sqrt((i + 1.0f) / n), 0.0f, (float) Math.sqrt(1 - (i + 1.0f) / n), alpha);
}
return palette;
} |
python | def fft(a, n=None, axis=-1, norm=None):
"""
Compute the one-dimensional discrete Fourier Transform.
This function computes the one-dimensional *n*-point discrete Fourier
Transform (DFT) with the efficient Fast Fourier Transform (FFT)
algorithm [CT].
Parameters
----------
a : array_like
Input array, can be complex.
n : int, optional
Length of the transformed axis of the output.
If `n` is smaller than the length of the input, the input is cropped.
If it is larger, the input is padded with zeros. If `n` is not given,
the length of the input along the axis specified by `axis` is used.
axis : int, optional
Axis over which to compute the FFT. If not given, the last axis is
used.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : complex ndarray
The truncated or zero-padded input, transformed along the axis
indicated by `axis`, or the last one if `axis` is not specified.
Raises
------
IndexError
if `axes` is larger than the last axis of `a`.
See Also
--------
numpy.fft : for definition of the DFT and conventions used.
ifft : The inverse of `fft`.
fft2 : The two-dimensional FFT.
fftn : The *n*-dimensional FFT.
rfftn : The *n*-dimensional FFT of real input.
fftfreq : Frequency bins for given FFT parameters.
Notes
-----
FFT (Fast Fourier Transform) refers to a way the discrete Fourier
Transform (DFT) can be calculated efficiently, by using symmetries in the
calculated terms. The symmetry is highest when `n` is a power of 2, and
the transform is therefore most efficient for these sizes.
The DFT is defined, with the conventions used in this implementation, in
the documentation for the `numpy.fft` module.
References
----------
.. [CT] Cooley, James W., and John W. Tukey, 1965, "An algorithm for the
machine calculation of complex Fourier series," *Math. Comput.*
19: 297-301.
Examples
--------
>>> np.fft.fft(np.exp(2j * np.pi * np.arange(8) / 8))
array([ -3.44505240e-16 +1.14383329e-17j,
8.00000000e+00 -5.71092652e-15j,
2.33482938e-16 +1.22460635e-16j,
1.64863782e-15 +1.77635684e-15j,
9.95839695e-17 +2.33482938e-16j,
0.00000000e+00 +1.66837030e-15j,
1.14383329e-17 +1.22460635e-16j,
-1.64863782e-15 +1.77635684e-15j])
>>> import matplotlib.pyplot as plt
>>> t = np.arange(256)
>>> sp = np.fft.fft(np.sin(t))
>>> freq = np.fft.fftfreq(t.shape[-1])
>>> plt.plot(freq, sp.real, freq, sp.imag)
[<matplotlib.lines.Line2D object at 0x...>, <matplotlib.lines.Line2D object at 0x...>]
>>> plt.show()
In this example, real input has an FFT which is Hermitian, i.e., symmetric
in the real part and anti-symmetric in the imaginary part, as described in
the `numpy.fft` documentation.
"""
output = mkl_fft.fft(a, n, axis)
if _unitary(norm):
output *= 1 / sqrt(output.shape[axis])
return output |
python | def find_n75(contig_lengths_dict, genome_length_dict):
"""
Calculate the N75 for each strain. N75 is defined as the largest contig such that at least 3/4 of the total
genome size is contained in contigs equal to or larger than this contig
:param contig_lengths_dict: dictionary of strain name: reverse-sorted list of all contig lengths
:param genome_length_dict: dictionary of strain name: total genome length
:return: n75_dict: dictionary of strain name: N75
"""
# Initialise the dictionary
n75_dict = dict()
for file_name, contig_lengths in contig_lengths_dict.items():
currentlength = 0
for contig_length in contig_lengths:
currentlength += contig_length
# If the current length is now greater than the 3/4 of the total genome length, the current contig length
# is the N75
if currentlength >= genome_length_dict[file_name] * 0.75:
n75_dict[file_name] = contig_length
break
return n75_dict |
python | def __crawler_start(self):
"""Spawn the first X queued request, where X is the max threads option.
Note:
The main thread will sleep until the crawler is finished. This enables
quiting the application using sigints (see http://stackoverflow.com/a/11816038/2491049).
Note:
`__crawler_stop()` and `__spawn_new_requests()` are called here on the main thread to
prevent thread recursion and deadlocks.
"""
try:
self.__options.callbacks.crawler_before_start()
except Exception as e:
print(e)
print(traceback.format_exc())
self.__spawn_new_requests()
while not self.__stopped:
if self.__should_stop:
self.__crawler_stop()
if self.__should_spawn_new_requests:
self.__spawn_new_requests()
time.sleep(0.1) |
python | def get_all_usb_devices(idVendor, idProduct):
""" Returns a list of all the usb devices matching the provided vendor ID and product ID."""
all_dev = list(usb.core.find(find_all = True, idVendor = idVendor, idProduct = idProduct))
for dev in all_dev:
try:
dev.detach_kernel_driver(0)
except usb.USBError:
pass
return all_dev |
python | def _parse_query_key(self, key, val, is_escaped):
"""
Strips query modifier from key and call's the appropriate value modifier.
Args:
key (str): Query key
val: Query value
Returns:
Parsed query key and value.
"""
if key.endswith('__contains'):
key = key[:-10]
val = self._parse_query_modifier('contains', val, is_escaped)
elif key.endswith('__range'):
key = key[:-7]
val = self._parse_query_modifier('range', val, is_escaped)
elif key.endswith('__startswith'):
key = key[:-12]
val = self._parse_query_modifier('startswith', val, is_escaped)
elif key.endswith('__endswith'):
key = key[:-10]
val = self._parse_query_modifier('endswith', val, is_escaped)
# lower than
elif key.endswith('__lt'):
key = key[:-4]
val = self._parse_query_modifier('lt', val, is_escaped)
# greater than
elif key.endswith('__gt'):
key = key[:-4]
val = self._parse_query_modifier('gt', val, is_escaped)
# lower than or equal
elif key.endswith('__lte'):
key = key[:-5]
val = self._parse_query_modifier('lte', val, is_escaped)
# greater than or equal
elif key.endswith('__gte'):
key = key[:-5]
val = self._parse_query_modifier('gte', val, is_escaped)
elif key != 'NOKEY' and not is_escaped:
val = self._escape_query(val)
return key, val |
python | def receive(self, **kwargs):
"""
A decorator for connecting receivers to this signal. Used by passing in the
keyword arguments to connect::
@post_save.receive(sender=MyModel)
def signal_receiver(sender, **kwargs):
...
"""
def _decorator(func):
self.connect(func, **kwargs)
return func
return _decorator |
python | def order_market_buy(self, **params):
"""Send in a new market buy order
:param symbol: required
:type symbol: str
:param quantity: required
:type quantity: decimal
:param newClientOrderId: A unique id for the order. Automatically generated if not sent.
:type newClientOrderId: str
:param newOrderRespType: Set the response JSON. ACK, RESULT, or FULL; default: RESULT.
:type newOrderRespType: str
:param recvWindow: the number of milliseconds the request is valid for
:type recvWindow: int
:returns: API response
See order endpoint for full response options
:raises: BinanceRequestException, BinanceAPIException, BinanceOrderException, BinanceOrderMinAmountException, BinanceOrderMinPriceException, BinanceOrderMinTotalException, BinanceOrderUnknownSymbolException, BinanceOrderInactiveSymbolException
"""
params.update({
'side': self.SIDE_BUY
})
return self.order_market(**params) |
python | def setup_ci():
# type: () -> None
""" Setup AppEngine SDK on CircleCI """
gcloud_path = shell.run('which gcloud', capture=True).stdout.strip()
sdk_path = normpath(join(gcloud_path, '../../platform/google_appengine'))
gcloud_cmd = gcloud_path + ' --quiet'
if not exists(sdk_path):
log.info("Installing AppEngine SDK")
shell.run('sudo {} components install app-engine-python'.format(
gcloud_cmd
))
else:
# Only initialise once. To reinitialise, just build without cache.
log.info("AppEngine SDK already initialised")
log.info("Using service account authentication")
shell.run('{} auth activate-service-account --key-file {}'.format(
gcloud_cmd,
conf.proj_path('ops/client_secret.json')
)) |
java | private void load(final String key, final String value, final String location) {
// Recursive bit
if (INCLUDE.equals(key)) {
load(parseStringArray(value));
} else {
backing.put(key, value);
if ("yes".equals(value) || "true".equals(value)) {
booleanBacking.add(key);
} else {
booleanBacking.remove(key);
}
String history = locations.get(key);
if (history == null) {
history = location;
} else {
history = location + "; " + history;
}
locations.put(key, history);
}
} |
python | def encoding_and_executable(notebook, metadata, ext):
"""Return encoding and executable lines for a notebook, if applicable"""
lines = []
comment = _SCRIPT_EXTENSIONS.get(ext, {}).get('comment')
jupytext_metadata = metadata.get('jupytext', {})
if ext not in ['.Rmd', '.md'] and 'executable' in jupytext_metadata:
lines.append(comment + '!' + jupytext_metadata.pop('executable'))
if 'encoding' in jupytext_metadata:
lines.append(jupytext_metadata.pop('encoding'))
elif ext not in ['.Rmd', '.md']:
for cell in notebook.cells:
try:
cell.source.encode('ascii')
except (UnicodeEncodeError, UnicodeDecodeError):
lines.append(comment + _UTF8_HEADER)
break
return lines |
java | @NullSafe
@SuppressWarnings("unchecked")
public static <T> T[] asArray(Iterable<T> iterable, Class<T> componentType) {
List<T> arrayList = new ArrayList<>();
for (T element : CollectionUtils.nullSafeIterable(iterable)) {
arrayList.add(element);
}
return arrayList.toArray((T[]) Array.newInstance(defaultIfNull(componentType, Object.class), arrayList.size()));
} |
python | def _do_cron():
"""Handles the cron request to github to check for new pull requests. If
any are found, they are run *sequentially* until they are all completed.
"""
if not args["cron"]:
return
if ("enabled" in db and not db["enabled"]) or "enabled" not in db:
warn("The CI server is disabled. Exiting.")
exit(0)
#Our basic idea with the cron is as follows:
# - the cron runs every minute of the day.
# - each installed XML file has the last time it ran saved in the script's
# database. If the specified check frequency has elapsed since it last
# ran, then we run the repository server checks.
# - NB: before running the time-intensive checks against remote servers
# or running the unit tests, first update the running status of the repo
# so that another call with -cron doesn't duplicate the work!
#By having the cron run every minute, we maximize the probability that
#repo checks with time intensive unit tests may run in parallel. Since
#servers usually have many cores, this shouldn't impact the run times too
#severely unless the tests are disk intensive.
#We use the repo full names as keys in the db's status dictionary.
from pyci.server import Server
from datetime import datetime
attempted = []
server = Server(testmode=args["nolive"])
nextrepo = _find_next(server)
dbs = db["status"]
while nextrepo is not None:
vms("Working on '{}' in cron.".format(nextrepo))
if nextrepo in attempted:
#This makes sure we don't end up in an infinite loop.
vms("'{}' has already been handled! Exiting infinite loop.".format(nextrepo))
break
if nextrepo not in dbs:
vms("Created blank status dictionary for '{}' in db.".format(nextrepo))
dbs[nextrepo] = {"start": None, "end": None}
dbs[nextrepo]["start"] = datetime.now()
_save_db()
#Now that we have saved our intent to run these repo-checks, let's
#actually run them.
attempted.append(nextrepo)
server.runnable = [nextrepo]
if not args["nolive"]:
vms("Starting pull request processing for '{}'.".format(nextrepo))
server.process_pulls()
dbs[nextrepo]["end"] = datetime.now()
_save_db()
nextrepo = _find_next(server) |
java | protected String buildUrl(AbstractBaseRequest request) throws AbstractCosException {
String endPoint = this.config.getCosEndPoint();
int appId = this.cred.getAppId();
String bucketName = request.getBucketName();
String cosPath = request.getCosPath();
cosPath = CommonPathUtils.encodeRemotePath(cosPath);
return String.format("%s/%s/%s%s", endPoint, appId, bucketName, cosPath);
} |
python | def make_parts_for(self, field_name, field_data):
"""Create the relevant parts for this field
Args:
field_name (str): Short field name, e.g. VAL
field_data (FieldData): Field data object
"""
typ = field_data.field_type
subtyp = field_data.field_subtype
if typ in ("read", "xadc"):
writeable = False
else:
writeable = True
if typ == "time" or typ in ("param", "read") and subtyp == "time":
self._make_time_parts(field_name, field_data, writeable)
elif typ == "write" and subtyp == "action":
self._make_action_part(field_name, field_data)
elif typ in ("param", "read", "write", "xadc"):
self._make_param_part(field_name, field_data, writeable)
elif typ == "bit_out":
self._make_out(field_name, field_data, "bit")
elif typ == "pos_out":
self._make_out(field_name, field_data, "pos")
self._make_scale_offset(field_name)
self._make_out_capture(field_name, field_data)
elif typ == "ext_out":
self._make_out_capture(field_name, field_data)
elif typ == "bit_mux":
self._make_mux(field_name, field_data, "bit")
self._make_mux_delay(field_name)
elif typ == "pos_mux":
self._make_mux(field_name, field_data, "pos")
elif typ == "table":
self._make_table(field_name, field_data)
else:
raise ValueError("Unknown type %r subtype %r" % (typ, subtyp)) |
java | public <T> T findEntity(Class<T> clazz, Object... id) {
String executeSql = MirageUtil.buildSelectSQL(null, beanDescFactory, entityOperator, clazz, nameConverter);
return sqlExecutor.getSingleResult(clazz, executeSql, id);
} |
java | public static Vector2i convert(Tuple2D<?> tuple) {
if (tuple instanceof Vector2i) {
return (Vector2i) tuple;
}
return new Vector2i(tuple.getX(), tuple.getY());
} |
java | public void release()
{
for(String name : all)
{
allColumnFamilyMetrics.get(name).remove(Metrics.defaultRegistry().allMetrics().get(factory.createMetricName(name)));
Metrics.defaultRegistry().removeMetric(factory.createMetricName(name));
}
readLatency.release();
writeLatency.release();
rangeLatency.release();
Metrics.defaultRegistry().removeMetric(factory.createMetricName("EstimatedRowSizeHistogram"));
Metrics.defaultRegistry().removeMetric(factory.createMetricName("EstimatedColumnCountHistogram"));
Metrics.defaultRegistry().removeMetric(factory.createMetricName("KeyCacheHitRate"));
Metrics.defaultRegistry().removeMetric(factory.createMetricName("CoordinatorReadLatency"));
Metrics.defaultRegistry().removeMetric(factory.createMetricName("CoordinatorScanLatency"));
Metrics.defaultRegistry().removeMetric(factory.createMetricName("WaitingOnFreeMemtableSpace"));
} |
java | public boolean stopCriterionSatisfied(){
int i = 0;
while (i < stopCriteria.size() && !stopCriteria.get(i).searchShouldStop(search)) {
i++;
}
return i < stopCriteria.size();
} |
python | def init_report(self, reporter=None):
"""Initialize the report instance."""
self.options.report = (reporter or self.options.reporter)(self.options)
return self.options.report |
python | def fraction_visited(source, sink, waypoint, msm):
"""
Calculate the fraction of times a walker on `tprob` going from `sources`
to `sinks` will travel through the set of states `waypoints` en route.
Computes the conditional committors q^{ABC^+} and uses them to find the
fraction of paths mentioned above.
Note that in the notation of Dickson et. al. this computes h_c(A,B), with
sources = A
sinks = B
waypoint = C
Parameters
----------
source : int
The index of the source state
sink : int
The index of the sink state
waypoint : int
The index of the intermediate state
msm : msmbuilder.MarkovStateModel
MSM to analyze.
Returns
-------
fraction_visited : float
The fraction of times a walker going from `sources` -> `sinks` stops
by `waypoints` on its way.
See Also
--------
msmbuilder.tpt.conditional_committors
Calculate the probability of visiting a waypoint while on a path
between a source and sink.
msmbuilder.tpt.hub_scores : function
Compute the 'hub score', the weighted fraction of visits for an
entire network.
References
----------
.. [1] Dickson & Brooks (2012), J. Chem. Theory Comput., 8, 3044-3052.
"""
for_committors = committors([source], [sink], msm)
cond_committors = conditional_committors(source, sink, waypoint, msm)
if hasattr(msm, 'all_transmats_'):
frac_visited = np.zeros((msm.n_states,))
for i, tprob in enumerate(msm.all_transmats_):
frac_visited[i] = _fraction_visited(source, sink, waypoint,
msm.transmat_, for_committors,
cond_committors)
return np.median(frac_visited, axis=0)
return _fraction_visited(source, sink, waypoint, msm.transmat_,
for_committors, cond_committors) |
python | def issorted(list_, op=operator.le):
"""
Determines if a list is sorted
Args:
list_ (list):
op (func): sorted operation (default=operator.le)
Returns:
bool : True if the list is sorted
"""
return all(op(list_[ix], list_[ix + 1]) for ix in range(len(list_) - 1)) |
python | def validate_target(self, target):
"""Make sure that the specified target only contains architectures that we know about."""
archs = target.split('/')
for arch in archs:
if not arch in self.archs:
return False
return True |
java | public Long rankReverse(final String member) {
return doWithJedis(new JedisCallable<Long>() {
@Override
public Long call(Jedis jedis) {
return jedis.zrevrank(getKey(), member);
}
});
} |
java | public void put(String url, HttpResponse response, Map<String, Object> headers, File file) {
HttpPut methodPut = new HttpPut(url);
HttpEntity multipart = buildBodyWithFile(file);
methodPut.setEntity(multipart);
getResponse(url, response, methodPut, headers);
} |
java | @Override
public int getRemoteTeamId() {
int penalty = Timing.getInstance().REMOTE_ACCESS_PENALTY;
log.trace("[getRemoteTeamId] Waiting {} cycles", penalty);
turnsControl.waitTurns(penalty, "Get Remote Team Id");
Robot neighbour = world.getNeighbour(this.robot);
if (neighbour != null) {
return neighbour.getData().getTeamId();
} else {
return 0;
}
} |
java | public static CommerceSubscriptionEntry findByG_U_Last(long groupId,
long userId,
OrderByComparator<CommerceSubscriptionEntry> orderByComparator)
throws com.liferay.commerce.exception.NoSuchSubscriptionEntryException {
return getPersistence()
.findByG_U_Last(groupId, userId, orderByComparator);
} |
java | private static void generateProtobufDefinedForField(StringBuilder code, FieldElement field, Set<String> enumNames) {
code.append("@").append(Protobuf.class.getSimpleName()).append("(");
String fieldType = fieldTypeMapping.get(getTypeName(field));
if (fieldType == null) {
if (enumNames.contains(getTypeName(field))) {
fieldType = "FieldType.ENUM";
} else {
if (field.type().kind() == DataType.Kind.MAP) {
fieldType = "FieldType.MAP";
} else {
fieldType = "FieldType.OBJECT";
}
}
}
code.append("fieldType=").append(fieldType);
code.append(", order=").append(field.tag());
if (FieldElement.Label.OPTIONAL == field.label()) {
code.append(", required=false");
} else if (Label.REQUIRED == field.label()) {
code.append(", required=true");
}
code.append(")\n");
} |
python | def moveTab(self, fromIndex, toIndex):
"""
Moves the tab from the inputed index to the given index.
:param fromIndex | <int>
toIndex | <int>
"""
try:
item = self.layout().itemAt(fromIndex)
self.layout().insertItem(toIndex, item.widget())
except StandardError:
pass |
java | void updateInternal() {
// HSuperColumnImpl needs a refactor, this construction is lame.
// the value serializer is not used in HSuperColumnImpl, so this is safe for name
if ( !subColumns.isEmpty() ) {
log.debug("Adding column {} for key {} and cols {}", new Object[]{getCurrentSuperColumn(), getCurrentKey(), subColumns});
HSuperColumnImpl<SN, N, ?> column = new HSuperColumnImpl(getCurrentSuperColumn(), subColumns,
0, template.getTopSerializer(), template.getSubSerializer(), TypeInferringSerializer.get());
mutator.addInsertion(getCurrentKey(), template.getColumnFamily(), column);
}
} |
python | def server_inspect_exception(self, req_event, rep_event, task_ctx, exc_info):
"""Called when an exception has been raised in the code run by ZeroRPC"""
# Hide the zerorpc internal frames for readability, for a REQ/REP or
# REQ/STREAM server the frames to hide are:
# - core.ServerBase._async_task
# - core.Pattern*.process_call
# - core.DecoratorBase.__call__
#
# For a PUSH/PULL or PUB/SUB server the frame to hide is:
# - core.Puller._receiver
if self._hide_zerorpc_frames:
traceback = exc_info[2]
while traceback:
zerorpc_frame = traceback.tb_frame
zerorpc_frame.f_locals["__traceback_hide__"] = True
frame_info = inspect.getframeinfo(zerorpc_frame)
# Is there a better way than this (or looking up the filenames
# or hardcoding the number of frames to skip) to know when we
# are out of zerorpc?
if frame_info.function == "__call__" or frame_info.function == "_receiver":
break
traceback = traceback.tb_next
self._elasticapm_client.capture_exception(exc_info, extra=task_ctx, handled=False) |
java | public FindingFilter withAutoScalingGroups(String... autoScalingGroups) {
if (this.autoScalingGroups == null) {
setAutoScalingGroups(new java.util.ArrayList<String>(autoScalingGroups.length));
}
for (String ele : autoScalingGroups) {
this.autoScalingGroups.add(ele);
}
return this;
} |
java | static double computeDynamicTimeWarpingDistance(
DoubleTuple u, DoubleTuple v)
{
int m = u.getSize();
int n = v.getSize();
double matrix[][] = threadLocalMatrix.get();
if (matrix.length < m || matrix[0].length < n)
{
matrix = new double[m][n];
threadLocalMatrix.set(matrix);
}
double u0 = u.get(0);
double v0 = v.get(0);
matrix[0][0] = Math.abs(u0-v0);
for (int j=1; j<n; j++)
{
matrix[0][j] = Math.abs(u0-v.get(j)) + matrix[0][j-1];
}
for (int i=1; i<m; i++)
{
matrix[i][0] = Math.abs(u.get(i)-v0) + matrix[i-1][0];
}
for (int i = 1; i < m; i++)
{
double ui = u.get(i);
for (int j = 1; j < n; j++)
{
double vj = v.get(j);
double min = matrix[i - 1][j - 1];
min = min(min, matrix[i][j - 1]);
min = min(min, matrix[i - 1][j]);
double d = ui - vj;
matrix[i][j] = min + (d < 0 ? -d : d);
}
}
return matrix[m-1][n-1];
} |
python | def ces(subsystem, mechanisms=False, purviews=False, cause_purviews=False,
effect_purviews=False, parallel=False):
"""Return the conceptual structure of this subsystem, optionally restricted
to concepts with the mechanisms and purviews given in keyword arguments.
If you don't need the full |CauseEffectStructure|, restricting the possible
mechanisms and purviews can make this function much faster.
Args:
subsystem (Subsystem): The subsystem for which to determine the
|CauseEffectStructure|.
Keyword Args:
mechanisms (tuple[tuple[int]]): Restrict possible mechanisms to those
in this list.
purviews (tuple[tuple[int]]): Same as in |Subsystem.concept()|.
cause_purviews (tuple[tuple[int]]): Same as in |Subsystem.concept()|.
effect_purviews (tuple[tuple[int]]): Same as in |Subsystem.concept()|.
parallel (bool): Whether to compute concepts in parallel. If ``True``,
overrides :data:`config.PARALLEL_CONCEPT_EVALUATION`.
Returns:
CauseEffectStructure: A tuple of every |Concept| in the cause-effect
structure.
"""
if mechanisms is False:
mechanisms = utils.powerset(subsystem.node_indices, nonempty=True)
engine = ComputeCauseEffectStructure(mechanisms, subsystem, purviews,
cause_purviews, effect_purviews)
return CauseEffectStructure(engine.run(parallel or
config.PARALLEL_CONCEPT_EVALUATION),
subsystem=subsystem) |
java | public static long longVal(String val, long defaultValue) {
try {
return NumberUtils.createNumber(val).longValue();
} catch (Exception e) {
log.warn("longVal(String, int) throw {}, return defaultValue = {}", e, defaultValue);
return defaultValue;
}
} |
java | private void findMatches(Match match, int n) throws IOException {
if (n > 0) {
int largestMatchingEndPosition = match.endPosition();
Set<Integer> list = ignoreItem.getFullEndPositionList(spans.docID(),
match.endPosition());
// try to find matches with existing queue
if (!queueSpans.isEmpty()) {
Match span;
for (int i = 0; i < queueSpans.size(); i++) {
span = queueSpans.get(i);
if (match.endPosition() == span.startPosition()
|| (list != null && list.contains(span.startPosition()))) {
findMatches(new Match(match.startPosition(), span.endPosition()),
(n - 1));
largestMatchingEndPosition = Math.max(largestMatchingEndPosition,
span.endPosition());
}
}
}
// extend queue if necessary and possible
while (!lastSpan && (largestMatchingEndPosition >= lastStartPosition)) {
if (spans.nextStartPosition() == NO_MORE_POSITIONS) {
lastSpan = true;
} else {
Match span = new Match(spans.startPosition(), spans.endPosition());
queueSpans.add(span);
lastStartPosition = spans.startPosition();
// check if this provides new match
if (match.endPosition() == span.startPosition()
|| (list != null && list.contains(span.startPosition()))) {
findMatches(new Match(match.startPosition(), span.endPosition()),
(n - 1));
largestMatchingEndPosition = Math.max(largestMatchingEndPosition,
span.endPosition());
}
}
}
} else {
// only unique spans
if (!queueMatches.contains(match)) {
queueMatches.add(match);
}
}
} |
python | def path(self, which=None):
"""Extend ``nailgun.entity_mixins.Entity.path``.
The format of the returned path depends on the value of ``which``:
content_lifecycle_environments
/capsules/<id>/content/lifecycle_environments
content_sync
/capsules/<id>/content/sync
``super`` is called otherwise.
"""
if which and which.startswith('content_'):
return '{0}/content/{1}'.format(
super(Capsule, self).path(which='self'),
which.split('content_')[1]
)
return super(Capsule, self).path(which) |
python | def pct_decode(s):
"""
Return the percent-decoded version of string s.
>>> pct_decode('%43%6F%75%63%6F%75%2C%20%6A%65%20%73%75%69%73%20%63%6F%6E%76%69%76%69%61%6C')
'Coucou, je suis convivial'
>>> pct_decode('')
''
>>> pct_decode('%2525')
'%25'
"""
if s is None:
return None
elif not isinstance(s, unicode):
s = str(s)
else:
s = s.encode('utf8')
return PERCENT_CODE_SUB(lambda mo: chr(int(mo.group(0)[1:], 16)), s) |
python | def set_light_state_raw(self, hue, saturation, brightness, kelvin,
bulb=ALL_BULBS, timeout=None):
"""
Sets the (low-level) light state of one or more bulbs.
"""
with _blocking(self.lock, self.light_state, self.light_state_event,
timeout):
self.send(REQ_SET_LIGHT_STATE, bulb, 'xHHHHI',
hue, saturation, brightness, kelvin, 0)
self.send(REQ_GET_LIGHT_STATE, ALL_BULBS, '')
return self.light_state |
java | private static MessageElement getMapKVMessageElements(String name, MapType mapType) {
MessageElement.Builder ret = MessageElement.builder();
ret.name(name);
DataType keyType = mapType.keyType();
Builder fieldBuilder = FieldElement.builder().name("key").tag(1);
fieldBuilder.type(keyType).label(FieldElement.Label.OPTIONAL);
ret.addField(fieldBuilder.build());
DataType valueType = mapType.valueType();
fieldBuilder = FieldElement.builder().name("value").tag(2);
fieldBuilder.type(valueType).label(FieldElement.Label.OPTIONAL);
ret.addField(fieldBuilder.build());
return ret.build();
} |
java | @Override
public void clearCache() {
entityCache.clearCache(CommercePriceListImpl.class);
finderCache.clearCache(FINDER_CLASS_NAME_ENTITY);
finderCache.clearCache(FINDER_CLASS_NAME_LIST_WITH_PAGINATION);
finderCache.clearCache(FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION);
} |
python | def _create_service_nwk(self, tenant_id, tenant_name, direc):
"""Function to create the service in network in DCNM. """
net_dict = self.retrieve_dcnm_net_info(tenant_id, direc)
net = utils.Dict2Obj(net_dict)
subnet_dict = self.retrieve_dcnm_subnet_info(tenant_id, direc)
subnet = utils.Dict2Obj(subnet_dict)
try:
self.dcnm_obj.create_service_network(tenant_name, net, subnet)
except dexc.DfaClientRequestFailed:
LOG.error("Failed to create network in DCNM %s", direc)
return False
return True |
python | def kelvin2rgb(temperature):
"""
Converts from Kelvin temperature to an RGB color.
Algorithm credits: |tannerhelland|_
"""
# range check
if temperature < 1000:
temperature = 1000
elif temperature > 40000:
temperature = 40000
tmp_internal = temperature / 100.0
# red
if tmp_internal <= 66:
red = 255
else:
tmp_red = 329.698727446 * np.power(tmp_internal - 60, -0.1332047592)
if tmp_red < 0:
red = 0
elif tmp_red > 255:
red = 255
else:
red = tmp_red
# green
if tmp_internal <= 66:
tmp_green = 99.4708025861 * np.log(tmp_internal) - 161.1195681661
if tmp_green < 0:
green = 0
elif tmp_green > 255:
green = 255
else:
green = tmp_green
else:
tmp_green = 288.1221695283 * np.power(tmp_internal - 60, -0.0755148492)
if tmp_green < 0:
green = 0
elif tmp_green > 255:
green = 255
else:
green = tmp_green
# blue
if tmp_internal >= 66:
blue = 255
elif tmp_internal <= 19:
blue = 0
else:
tmp_blue = 138.5177312231 * np.log(tmp_internal - 10) - 305.0447927307
if tmp_blue < 0:
blue = 0
elif tmp_blue > 255:
blue = 255
else:
blue = tmp_blue
return [red / 255, green / 255, blue / 255] |
python | def _stage(self, accepted, count=0):
"""This is a repeated state in the state removal algorithm"""
new5 = self._combine_rest_push()
new1 = self._combine_push_pop()
new2 = self._combine_push_rest()
new3 = self._combine_pop_rest()
new4 = self._combine_rest_rest()
new = new1 + new2 + new3 + new4 + new5
del new1
del new2
del new3
del new4
del new5
if len(new) == 0:
# self.printer()
# print 'PDA is empty'
# logging.debug('PDA is empty')
return None
self.statediag = self.statediag + new
del new
# print 'cleaning...'
# It is cheaper to create a new array than to use the old one and
# delete a key
newstates = []
for key in self.statediag:
if len(key.trans) == 0 or key.trans == {}:
# rint 'delete '+`key.id`
# self.statediag.remove(key)
pass
else:
newstates.append(key)
del self.statediag
self.statediag = newstates
self.quickresponse = {}
self.quickresponse_types = {}
self.quickresponse_types[0] = []
self.quickresponse_types[1] = []
self.quickresponse_types[2] = []
self.quickresponse_types[3] = []
self.quickresponse_types[4] = []
for state in self.statediag:
if state.id not in self.quickresponse:
self.quickresponse[state.id] = [state]
else:
self.quickresponse[state.id].append(state)
self.quickresponse_types[state.type].append(state)
# else:
# print `key.id`+' (type: '+`key.type`+' and sym:'+`key.sym`+')'
# print key.trans
# print 'checking...'
exists = self._check(accepted)
if exists == -1:
# DEBUGself.printer()
# raw_input('next step?')
return self._stage(accepted, count + 1)
else:
# DEBUGself.printer()
# print 'Found '
print exists
# return self._stage(accepted, count+1)
return exists |
java | @Override
public Iterable<Tag> httpLongRequestTags(@Nullable HttpServletRequest request, @Nullable Object handler) {
return Arrays.asList(WebMvcTags.method(request), WebMvcTags.uri(request, null));
} |
java | @Modified
protected void modified(Map<String, Object> map) {
String filterString = OpentracingConfiguration.getServerSkipPattern();
updateFilters(filterString);
} |
python | def request_sensor_sampling(self, req, msg):
"""Configure or query the way a sensor is sampled.
Sampled values are reported asynchronously using the #sensor-status
message.
Parameters
----------
name : str
Name of the sensor whose sampling strategy to query or configure.
strategy : {'none', 'auto', 'event', 'differential', \
'period', 'event-rate'}, optional
Type of strategy to use to report the sensor value. The
differential strategy type may only be used with integer or float
sensors. If this parameter is supplied, it sets the new strategy.
params : list of str, optional
Additional strategy parameters (dependent on the strategy type).
For the differential strategy, the parameter is an integer or float
giving the amount by which the sensor value may change before an
updated value is sent.
For the period strategy, the parameter is the sampling period
in float seconds.
The event strategy has no parameters. Note that this has changed
from KATCPv4.
For the event-rate strategy, a minimum period between updates and
a maximum period between updates (both in float seconds) must be
given. If the event occurs more than once within the minimum period,
only one update will occur. Whether or not the event occurs, the
sensor value will be updated at least once per maximum period.
The differential-rate strategy is not supported in this release.
Returns
-------
success : {'ok', 'fail'}
Whether the sensor-sampling request succeeded.
name : str
Name of the sensor queried or configured.
strategy : {'none', 'auto', 'event', 'differential', 'period'}
Name of the new or current sampling strategy for the sensor.
params : list of str
Additional strategy parameters (see description under Parameters).
Examples
--------
::
?sensor-sampling cpu.power.on
!sensor-sampling ok cpu.power.on none
?sensor-sampling cpu.power.on period 500
!sensor-sampling ok cpu.power.on period 500
"""
f = Future()
self.ioloop.add_callback(lambda: chain_future(
self._handle_sensor_sampling(req, msg), f))
return f |
java | public static <K, V> FIFOCache<K, V> newFIFOCache(int capacity, long timeout){
return new FIFOCache<K, V>(capacity, timeout);
} |
python | def get_events(self, from_=None, to=None):
"""Query a slice of the events.
Events are always returned in the order the were added.
Does never throw EventOrderError because it is hard to detect
from an append-only file.
Parameters:
from_ -- if not None, return only events added after the event with
id `from_`. If None, return from the start of history.
to -- if not None, return only events added before, and
including, the event with event id `to`. If None, return up
to, and including, the last added event.
returns -- an iterable of (event id, eventdata) tuples.
"""
assert from_ is None or isinstance(from_, str)
assert to is None or isinstance(to, str)
self._close()
try:
return self._unsafe_get_events(from_=from_, to=to)
finally:
self._open() |
python | def _grid_widgets(self):
"""Put the widgets in the correct position based on self.__compound."""
orient = str(self._scale.cget('orient'))
self._scale.grid(row=2, column=2, sticky='ew' if orient == tk.HORIZONTAL else 'ns',
padx=(0, self.__entryscalepad) if self.__compound is tk.RIGHT else
(self.__entryscalepad, 0) if self.__compound is tk.LEFT else 0,
pady=(0, self.__entryscalepad) if self.__compound is tk.BOTTOM else
(self.__entryscalepad, 0) if self.__compound is tk.TOP else 0)
self._entry.grid(row=1 if self.__compound is tk.TOP else 3 if self.__compound is tk.BOTTOM else 2,
column=1 if self.__compound is tk.LEFT else 3 if self.__compound is tk.RIGHT else 2)
if orient == tk.HORIZONTAL:
self.columnconfigure(0, weight=0)
self.columnconfigure(2, weight=1)
self.columnconfigure(4, weight=0)
self.rowconfigure(0, weight=1)
self.rowconfigure(2, weight=0)
self.rowconfigure(4, weight=1)
else:
self.rowconfigure(0, weight=0)
self.rowconfigure(2, weight=1)
self.rowconfigure(4, weight=0)
self.columnconfigure(0, weight=1)
self.columnconfigure(2, weight=0)
self.columnconfigure(4, weight=1) |
java | public synchronized boolean startUpgrade() throws IOException {
if(upgradeState) { // upgrade is already in progress
assert currentUpgrades != null :
"UpgradeManagerDatanode.currentUpgrades is null.";
UpgradeObjectDatanode curUO = (UpgradeObjectDatanode)currentUpgrades.first();
curUO.startUpgrade();
return true;
}
if(broadcastCommand != null) {
if(broadcastCommand.getVersion() > this.getUpgradeVersion()) {
// stop broadcasting, the cluster moved on
// start upgrade for the next version
broadcastCommand = null;
} else {
// the upgrade has been finished by this data-node,
// but the cluster is still running it,
// reply with the broadcast command
assert currentUpgrades == null :
"UpgradeManagerDatanode.currentUpgrades is not null.";
assert upgradeDaemon == null :
"UpgradeManagerDatanode.upgradeDaemon is not null.";
dataNode.getNSNamenode(namespaceId).processUpgradeCommand(broadcastCommand);
return true;
}
}
if(currentUpgrades == null)
currentUpgrades = getDistributedUpgrades();
if(currentUpgrades == null) {
DataNode.LOG.info("\n Distributed upgrade for DataNode version "
+ getUpgradeVersion() + " to current LV "
+ FSConstants.LAYOUT_VERSION + " cannot be started. "
+ "The upgrade object is not defined.");
return false;
}
upgradeState = true;
UpgradeObjectDatanode curUO = (UpgradeObjectDatanode)currentUpgrades.first();
curUO.setDatanode(dataNode);
curUO.startUpgrade();
upgradeDaemon = new Daemon(curUO);
upgradeDaemon.start();
DataNode.LOG.info("\n Distributed upgrade for DataNode "
+ dataNode.getDatanodeInfo()
+ " version " + getUpgradeVersion() + " to current LV "
+ FSConstants.LAYOUT_VERSION + " is started.");
return true;
} |
java | private void freeAllocatedSpace(java.util.Collection sortedFreeSpaceList)
{
if (Tracing.isAnyTracingEnabled() && trace.isEntryEnabled())
trace.entry(this,
cclass,
"freeAllocatedSpace",
new Object[] { new Integer(sortedFreeSpaceList.size()), new Long(freeSpaceByLength.size()) });
// Remove from the head of the sorted set until we find the first non-negative
// address - indicating that the storage was allocated
java.util.Iterator listIterator = sortedFreeSpaceList.iterator();
Directory.StoreArea currentArea = null;
while (listIterator.hasNext()) {
currentArea = (Directory.StoreArea) listIterator.next();
if (currentArea.byteAddress > 0)
break;
}
// Did we find at least one to merge?
if (currentArea != null) {
// We now have a pointer to the first store area in the sorted list
// that needs to be merged into the free space map.
// We iterate through the free space map (which is also in order)
// merging the entries in, and moving our pointer forwards.
FreeSpace spaceEntry = freeSpaceByAddressHead;
FreeSpace previousEntry = null;
do {
// If spaceEntry is null then we have reached the end of the list.
// We handle this case first, because we can avoid null-checks in
// other branches.
// The same logic is used to handle the case where we have moved
// past the point in the address-sorted free space list where this
// entry would be merged, and did not find any existing entries
// to merge it with. Merging would have been performed in branches
// below on an earlier pass round the loop if it was possible (as
// we would have looked at the entry that is now spaceEntry as
// spaceEntry.next in the below branches).
if (spaceEntry == null || // Tail of list reached
spaceEntry.address > currentArea.byteAddress // Moved past insertion point without merge
) {
// Create a new entry, unless this is a zero-sized entry
if (currentArea.length > 0) {
FreeSpace newSpaceEntry =
new FreeSpace(currentArea.byteAddress, currentArea.length);
// Link it in behind the current entry
newSpaceEntry.next = spaceEntry;
if (previousEntry != null) {
previousEntry.next = newSpaceEntry;
}
else {
// We are the new head
freeSpaceByAddressHead = newSpaceEntry;
}
newSpaceEntry.prev = previousEntry;
if (spaceEntry != null) {
spaceEntry.prev = newSpaceEntry;
}
// Add our extended entry into the length-sorted list
freeSpaceByLength.add(newSpaceEntry);
// Debug freespace list
// if (Tracing.isAnyTracingEnabled() && trace.isDebugEnabled()) trace.debug(this, cclass, methodName, "ADD to freespace list");
// Keep track of the maximum free space count as a statistic
if (gatherStatistics && freeSpaceByLength.size() > maxFreeSpaceCount)
maxFreeSpaceCount = freeSpaceByLength.size();
// As we've added a new entry before the current on, we should use it next time round
spaceEntry = newSpaceEntry;
// Previous entry stayed the same - as we've inserted without moving forwards
}
// Regardless of whether we added an entry, move onto the next store area and
// go back round the loop.
if (listIterator.hasNext()) {
currentArea = (Directory.StoreArea) listIterator.next();
}
else
currentArea = null; // We've run out of entries to merge
}
// Can our current store entry be merged with the current free space entry.
else if (spaceEntry.address + spaceEntry.length == currentArea.byteAddress) {
// We can merge this entry with the one before it.
// Remove from the length-sorted list and change the size
freeSpaceByLength.remove(spaceEntry);
spaceEntry.length += currentArea.length;
// Can we also merge it with the one after it?
FreeSpace nextSpaceEntry = spaceEntry.next;
if (nextSpaceEntry != null &&
currentArea.byteAddress + currentArea.length == nextSpaceEntry.address) {
// Remove the eliminated space entry from the length-sorted list
freeSpaceByLength.remove(nextSpaceEntry);
// Debug freespace list
// if (Tracing.isAnyTracingEnabled() && trace.isDebugEnabled()) trace.debug(this, cclass, methodName, "REMOVE from freespace list");
// Make the previous one larger
spaceEntry.length += nextSpaceEntry.length;
// Remove the next one
spaceEntry.next = nextSpaceEntry.next;
if (nextSpaceEntry.next != null) {
nextSpaceEntry.next.prev = spaceEntry;
}
}
// Add our extended entry into the length-sorted list
freeSpaceByLength.add(spaceEntry);
// We've merged this store entry now, so move onto the next one
// in the sorted list.
if (listIterator.hasNext()) {
currentArea = (Directory.StoreArea) listIterator.next();
}
else
currentArea = null; // We've run out of entries to merge
// Note we do not advance our position in the free space, as the
// current entry could also be of interest to the next store item.
}
// Can our current store entry be merged with the next free space entry
// (note that the case where it merges with both is already handled).
else if (spaceEntry.next != null &&
currentArea.byteAddress + currentArea.length == spaceEntry.next.address) {
// Remove from the length-sorted list and change the size
FreeSpace nextSpaceEntry = spaceEntry.next;
freeSpaceByLength.remove(nextSpaceEntry);
nextSpaceEntry.address = currentArea.byteAddress;
nextSpaceEntry.length += currentArea.length;
// Add back into the length-sorted list
freeSpaceByLength.add(nextSpaceEntry);
// We've merged this store entry now, so move onto the next one
// in the sorted list.
if (listIterator.hasNext()) {
currentArea = (Directory.StoreArea) listIterator.next();
}
else
currentArea = null; // We've run out of entries to merge
// Note we do not advance our position in the free space, as the
// current entry could also be of interest to the next store item.
}
// Otherwise this space entry is not interesting to us, and we
// can simply move onto the next one.
else {
previousEntry = spaceEntry;
spaceEntry = spaceEntry.next;
}
// Although looping through the free space map, our condition for
// breaking the loop is when we've run out of entries to merge.
} while (currentArea != null);
}
if (Tracing.isAnyTracingEnabled() && trace.isEntryEnabled())
trace.exit(this,
cclass,
"freeAllocatedSpace",
new Object[] { new Long(freeSpaceByLength.size()) });
} |
python | def average_temperature(self, unit='kelvin'):
"""Returns the average value in the temperature series
:param unit: the unit of measure for the temperature values. May be
among: '*kelvin*' (default), '*celsius*' or '*fahrenheit*'
:type unit: str
:returns: a float
:raises: ValueError when invalid values are provided for the unit of
measure or the measurement series is empty
"""
if unit not in ('kelvin', 'celsius', 'fahrenheit'):
raise ValueError("Invalid value for parameter 'unit'")
average = self._average(self._purge_none_samples(
self.temperature_series()))
if unit == 'kelvin':
result = average
if unit == 'celsius':
result = temputils.kelvin_to_celsius(average)
if unit == 'fahrenheit':
result = temputils.kelvin_to_fahrenheit(average)
return result |
python | def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'values') and self.values is not None:
_dict['values'] = [x._to_dict() for x in self.values]
if hasattr(self, 'pagination') and self.pagination is not None:
_dict['pagination'] = self.pagination._to_dict()
return _dict |
java | @Override
public DescribeSnapshotCopyGrantsResult describeSnapshotCopyGrants(DescribeSnapshotCopyGrantsRequest request) {
request = beforeClientExecution(request);
return executeDescribeSnapshotCopyGrants(request);
} |
java | public void marshall(LocationListEntry locationListEntry, ProtocolMarshaller protocolMarshaller) {
if (locationListEntry == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(locationListEntry.getLocationArn(), LOCATIONARN_BINDING);
protocolMarshaller.marshall(locationListEntry.getLocationUri(), LOCATIONURI_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
python | def record_close(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /record-xxxx/close API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Data-Object-Lifecycle#API-method%3A-%2Fclass-xxxx%2Fclose
"""
return DXHTTPRequest('/%s/close' % object_id, input_params, always_retry=always_retry, **kwargs) |
java | @Override
public SortedSet<String> getDomainNames(String name) {
if ( name.length() < 4)
throw new IllegalArgumentException("Can't interpret IDs that are shorter than 4 residues!");
String url = String.format("%srepresentativeDomains?cluster=%s&structureId=%s",
base, cutoff, name);
return requestRepresentativeDomains(url);
} |
java | public void lock() {
boolean wasInterrupted = false;
while (true) {
try {
impl.lockInterruptibly();
if (wasInterrupted) {
Thread.currentThread().interrupt();
}
return;
}
catch (InterruptedException e) {
wasInterrupted = true;
}
}
} |
python | async def _client_ready_async(self):
"""Determine whether the client is ready to start sending messages.
To be ready, the connection must be open and authentication complete,
The Session, Link and MessageSender must be open and in non-errored
states.
:rtype: bool
:raises: ~uamqp.errors.MessageHandlerError if the MessageSender
goes into an error state.
"""
# pylint: disable=protected-access
if not self.message_handler:
self.message_handler = self.sender_type(
self._session, self._name, self._remote_address,
name='sender-link-{}'.format(uuid.uuid4()),
debug=self._debug_trace,
send_settle_mode=self._send_settle_mode,
max_message_size=self._max_message_size,
properties=self._link_properties,
error_policy=self._error_policy,
encoding=self._encoding,
loop=self.loop)
await asyncio.shield(self.message_handler.open_async())
return False
if self.message_handler.get_state() == constants.MessageSenderState.Error:
raise errors.MessageHandlerError(
"Message Sender Client is in an error state. "
"Please confirm credentials and access permissions."
"\nSee debug trace for more details.")
if self.message_handler.get_state() != constants.MessageSenderState.Open:
return False
return True |
python | def related_archives(self):
"""
The pathnames of the source distribution(s) for this requirement (a list of strings).
.. note:: This property is very new in pip-accel and its logic may need
some time to mature. For now any misbehavior by this property
shouldn't be too much of a problem because the pathnames
reported by this property are only used for cache
invalidation (see the :attr:`last_modified` and
:attr:`checksum` properties).
"""
# Escape the requirement's name for use in a regular expression.
name_pattern = escape_name(self.name)
# Escape the requirement's version for in a regular expression.
version_pattern = re.escape(self.version)
# Create a regular expression that matches any of the known source
# distribution archive extensions.
extension_pattern = '|'.join(re.escape(ext) for ext in ARCHIVE_EXTENSIONS if ext != '.whl')
# Compose the regular expression pattern to match filenames of source
# distribution archives in the local source index directory.
pattern = '^%s-%s(%s)$' % (name_pattern, version_pattern, extension_pattern)
# Compile the regular expression for case insensitive matching.
compiled_pattern = re.compile(pattern, re.IGNORECASE)
# Find the matching source distribution archives.
return [os.path.join(self.config.source_index, fn)
for fn in os.listdir(self.config.source_index)
if compiled_pattern.match(fn)] |
java | public List<Motion> findCameraMotions(Camera target , @Nullable List<Motion> storage ) {
if( storage == null )
storage = new ArrayList<>();
for (int i = 0; i < edges.size(); i++) {
Motion m = edges.get(i);
if( m.viewSrc.camera == target && m.viewDst.camera == target ) {
storage.add(m);
}
}
return storage;
} |
python | def latcyl(radius, lon, lat):
"""
Convert from latitudinal coordinates to cylindrical coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/latcyl_c.html
:param radius: Distance of a point from the origin.
:type radius:
:param lon: Angle of the point from the XZ plane in radians.
:param lat: Angle of the point from the XY plane in radians.
:return: (r, lonc, z)
:rtype: tuple
"""
radius = ctypes.c_double(radius)
lon = ctypes.c_double(lon)
lat = ctypes.c_double(lat)
r = ctypes.c_double()
lonc = ctypes.c_double()
z = ctypes.c_double()
libspice.latcyl_c(radius, lon, lat, ctypes.byref(r), ctypes.byref(lonc),
ctypes.byref(z))
return r.value, lonc.value, z.value |
python | def require_directory(self):
"""Ensure directory path entered in dialog exist.
When the path does not exist, this function will ask the user if he
want to create it or not.
:raises: CanceledImportDialogError - when user choose 'No' in
the question dialog for creating directory.
"""
path = self.output_directory.text()
if path == '':
# If let empty, we create an temporary directory
return
if os.path.exists(path):
return
title = self.tr('Directory %s not exist') % path
question = self.tr(
'Directory %s not exist. Do you want to create it?') % path
# noinspection PyCallByClass,PyTypeChecker
answer = QMessageBox.question(
self, title, question, QMessageBox.Yes | QMessageBox.No)
if answer == QMessageBox.Yes:
if len(path) != 0:
os.makedirs(path)
else:
# noinspection PyCallByClass,PyTypeChecker,PyArgumentList
display_warning_message_box(
self,
self.tr('InaSAFE error'),
self.tr('Output directory can not be empty.'))
raise CanceledImportDialogError()
else:
raise CanceledImportDialogError() |
java | public <T extends Function> T addFunction(T function) {
addItem(function, functions, "functions");
return function;
} |
java | void encodeProperty(ByteArrayOutputStream baos, Object value) throws JMSException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "encodeProperty", new Object[]{baos, value});
if (value.equals(ApiJmsConstants.ON)) {
super.encodeProperty(baos, SHORT_ON);
}
else {
super.encodeProperty(baos, SHORT_OFF);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "encodeProperty");
} |
java | public EncodingState getEncodingStateFor(CharSequence string) {
int identityHashCode = System.identityHashCode(string);
Set<Encoder> result = null;
for (Map.Entry<Encoder, Set<Integer>> entry : encodingTagIdentityHashCodes.entrySet()) {
if (entry.getValue().contains(identityHashCode)) {
if (result == null) {
result = Collections.singleton(entry.getKey());
}
else {
if (result.size() == 1) {
result = new HashSet<Encoder>(result);
}
result.add(entry.getKey());
}
}
}
return result != null ? new EncodingStateImpl(result, null) : EncodingStateImpl.UNDEFINED_ENCODING_STATE;
} |
java | public KafkaMessage consumeMessage(String consumerGroupId, boolean consumeFromBeginning,
String topic) {
KafkaMsgConsumer kafkaConsumer = getKafkaConsumer(consumerGroupId, consumeFromBeginning);
return kafkaConsumer.consume(topic);
} |
python | def toList(variable, types=(basestring, int, float, )):
"""Converts a variable of type string, int, float to a list, containing the
variable as the only element.
:param variable: any python object
:type variable: (str, int, float, others)
:returns: [variable] or variable
"""
if isinstance(variable, types):
return [variable]
else:
return variable |
java | private void moveIndex() {
int i = rightmostIndexBelowMax();
if (i >= 0) {
index[i] = index[i]+1;
for (int j = i+1; j<r; j++)
index[j] = index[j-1] + 1;
}
else hasNext = false;
} |
python | def augmentation_transform(self, data, label): # pylint: disable=arguments-differ
"""Override Transforms input data with specified augmentations."""
for aug in self.auglist:
data, label = aug(data, label)
return (data, label) |
python | def result(cls, ab, pa, pitch_list):
"""
At Bat Result
:param ab: at bat object(type:Beautifulsoup)
:param pa: atbat data for plate appearance
:param pitch_list: Pitching data
:return: pa result value(dict)
"""
atbat = OrderedDict()
atbat['ball_ct'] = MlbamUtil.get_attribute_stats(ab, 'b', int, None)
atbat['strike_ct'] = MlbamUtil.get_attribute_stats(ab, 's', int, None)
atbat['pitch_seq'] = ''.join([pitch['pitch_res'] for pitch in pitch_list])
atbat['pitch_type_seq'] = '|'.join([pitch['pitch_type'] for pitch in pitch_list])
atbat['battedball_cd'] = RetroSheet.battedball_cd(pa['event_cd'], pa['event_tx'], pa['ab_des'])
return atbat |
java | public int getFormatIndex(CellValueRecordInterface cell) {
ExtendedFormatRecord xfr = _xfRecords.get(cell.getXFIndex());
if (xfr == null) {
logger.log(POILogger.ERROR, "Cell " + cell.getRow() + "," + cell.getColumn()
+ " uses XF with index " + cell.getXFIndex() + ", but we don't have that");
return -1;
}
return xfr.getFormatIndex();
} |
python | def get_object_by_filename(self, request, filename):
"""
Returns owner object by filename (to be downloaded).
This can be used to implement custom permission checks.
:param request: HttpRequest
:param filename: File name of the downloaded object.
:return: owner object
"""
kw = dict()
kw[self.file_field] = filename
obj = self.get_queryset(request).filter(**kw).first()
if not obj:
raise Http404(_('File {} not found').format(filename))
return self.get_object(request, obj.id) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.