language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python | def import_jwks(self, jwks, issuer):
"""
Imports all the keys that are represented in a JWKS
:param jwks: Dictionary representation of a JWKS
:param issuer: Who 'owns' the JWKS
"""
try:
_keys = jwks["keys"]
except KeyError:
raise ValueError('Not a proper JWKS')
else:
try:
self.issuer_keys[issuer].append(
self.keybundle_cls(_keys, verify_ssl=self.verify_ssl))
except KeyError:
self.issuer_keys[issuer] = [self.keybundle_cls(
_keys, verify_ssl=self.verify_ssl)] |
python | def type_name(value):
"""
Returns a user-readable name for the type of an object
:param value:
A value to get the type name of
:return:
A unicode string of the object's type name
"""
if inspect.isclass(value):
cls = value
else:
cls = value.__class__
if cls.__module__ in set(['builtins', '__builtin__']):
return cls.__name__
return '%s.%s' % (cls.__module__, cls.__name__) |
java | public static void segWithStopWords(File input, File output) throws Exception{
Utils.seg(input, output, false, SegmentationAlgorithm.MaxNgramScore);
} |
java | public static String[] merge(final String[] input, final String[] list) {
final List<String> v = new ArrayList<String>(Arrays.asList(list));
for (final String anInput : input) {
if ((null != anInput) && !v.contains(anInput)) {
v.add(anInput);
}
}
return v.toArray(new String[v.size()]);
} |
python | def disable_cert_validation():
"""Context manager to temporarily disable certificate validation in the standard SSL
library.
Note: This should not be used in production code but is sometimes useful for
troubleshooting certificate validation issues.
By design, the standard SSL library does not provide a way to disable verification
of the server side certificate. However, a patch to disable validation is described
by the library developers. This context manager allows applying the patch for
specific sections of code.
"""
current_context = ssl._create_default_https_context
ssl._create_default_https_context = ssl._create_unverified_context
try:
yield
finally:
ssl._create_default_https_context = current_context |
python | def _evolve_reader(in_file):
"""Generate a list of region IDs and trees from a top_k_trees evolve.py file.
"""
cur_id_list = None
cur_tree = None
with open(in_file) as in_handle:
for line in in_handle:
if line.startswith("id,"):
if cur_id_list:
yield cur_id_list, cur_tree
cur_id_list = []
cur_tree = None
elif cur_tree is not None:
if line.strip() and not line.startswith("Number of non-empty"):
cur_tree.append(line.rstrip())
elif not line.strip() and cur_id_list and len(cur_id_list) > 0:
cur_tree = []
elif line.strip():
parts = []
for part in line.strip().split("\t"):
if part.endswith(","):
part = part[:-1]
parts.append(part)
if len(parts) > 4:
nid, freq, _, _, support = parts
cur_id_list.append((nid, freq, support.split("; ")))
if cur_id_list:
yield cur_id_list, cur_tree |
java | void signedSubtract(MutableBigInteger addend) {
if (sign == 1)
sign = sign * subtract(addend);
else
add(addend);
if (intLen == 0)
sign = 1;
} |
java | public static void set( Object obj, String method, String value ) throws NoSuchMethodException
{
for( Setter s : c_setters )
{
try
{
s.set( obj, method, value );
return;
}
catch(Exception e) {}
}
throw new NoSuchMethodException("No valid setter found for "+method+"(\""+value+"\")");
} |
python | def get_endpoint_resources(self, device_id, **kwargs): # noqa: E501
"""List the resources on an endpoint # noqa: E501
The list of resources is cached by Device Management Connect, so this call does not create a message to the device. **Example usage:** curl -X GET https://api.us-east-1.mbedcloud.com/v2/endpoints/{device-id} -H 'authorization: Bearer {api-key}' # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.get_endpoint_resources(device_id, asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param str device_id: A unique device ID for an endpoint. Note that the ID needs to be an exact match. You cannot use wildcards here. (required)
:return: list[Resource]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.get_endpoint_resources_with_http_info(device_id, **kwargs) # noqa: E501
else:
(data) = self.get_endpoint_resources_with_http_info(device_id, **kwargs) # noqa: E501
return data |
java | public List<CmsSolrIndex> getAllSolrIndexes() {
List<CmsSolrIndex> result = new ArrayList<CmsSolrIndex>();
for (String indexName : getIndexNames()) {
CmsSolrIndex index = getIndexSolr(indexName);
if (index != null) {
result.add(index);
}
}
return result;
} |
java | public void setFlashAssetSize(com.google.api.ads.admanager.axis.v201811.Size flashAssetSize) {
this.flashAssetSize = flashAssetSize;
} |
java | @SuppressWarnings("unchecked")
public QueryResult findTaxClassificationByParentId(IEntity entity) throws FMSException {
IntuitMessage intuitMessage = prepareFindByParentId(entity);
//execute interceptors
executeInterceptors(intuitMessage);
QueryResult queryResult = null;
// Iterate the IntuitObjects list in QueryResponse and convert to <T> entity
IntuitResponse intuitResponse = (IntuitResponse) intuitMessage.getResponseElements().getResponse();
if (intuitResponse != null) {
QueryResponse queryResponse = intuitResponse.getQueryResponse();
if (queryResponse != null) {
queryResult = getQueryResult(queryResponse);
}
}
return queryResult;
} |
java | @BetaApi
public final ListForwardingRulesPagedResponse listForwardingRules(ProjectRegionName region) {
ListForwardingRulesHttpRequest request =
ListForwardingRulesHttpRequest.newBuilder()
.setRegion(region == null ? null : region.toString())
.build();
return listForwardingRules(request);
} |
java | public BareJid getOwnJid() {
if (ownJid == null && connection().isAuthenticated()) {
ownJid = connection().getUser().asBareJid();
}
return ownJid;
} |
python | def write_tabular(obj, filepath):
"""Write tabular object in HDF5 or pickle format
Args:
obj (array or DataFrame): tabular object to write
filepath (path-like): path to write to; must end in '.h5' or '.pkl'
"""
_, fn, ext = splitext2(filepath)
if ext == '.h5':
_write_tabular_h5(obj, filepath)
elif ext == '.pkl':
_write_tabular_pickle(obj, filepath)
else:
raise NotImplementedError |
java | protected InputStream getStreamFromNetwork(String imageUri, Object extra) throws IOException {
HttpURLConnection conn = createConnection(imageUri, extra);
int redirectCount = 0;
while (conn.getResponseCode() / 100 == 3 && redirectCount < MAX_REDIRECT_COUNT) {
conn = createConnection(conn.getHeaderField("Location"), extra);
redirectCount++;
}
InputStream imageStream;
try {
imageStream = conn.getInputStream();
} catch (IOException e) {
// Read all data to allow reuse connection (http://bit.ly/1ad35PY)
IoUtils.readAndCloseStream(conn.getErrorStream());
throw e;
}
if (!shouldBeProcessed(conn)) {
IoUtils.closeSilently(imageStream);
throw new IOException("Image request failed with response code " + conn.getResponseCode());
}
return new ContentLengthInputStream(new BufferedInputStream(imageStream, BUFFER_SIZE), conn.getContentLength());
} |
java | @Handler(channels = NetworkChannel.class)
public void onPurge(Purge event, IOSubchannel netChannel) {
LinkedIOSubchannel.downstreamChannel(this, netChannel,
WebAppMsgChannel.class).ifPresent(appChannel -> {
appChannel.handlePurge(event);
});
} |
python | def log_config():
"""Logs the config used to start the application"""
conf = '\n'.join(
['{}="{}"'.format(k, v) for k, v
in sorted(options.as_dict().iteritems())])
logging.info('Service started with the following settings:\n' + conf) |
java | protected void processKeyGenerator(EntityTable entityTable, EntityField field, EntityColumn entityColumn) {
//KeySql 优先级最高
if (field.isAnnotationPresent(KeySql.class)) {
processKeySql(entityTable, entityColumn, field.getAnnotation(KeySql.class));
} else if (field.isAnnotationPresent(GeneratedValue.class)) {
//执行 sql - selectKey
processGeneratedValue(entityTable, entityColumn, field.getAnnotation(GeneratedValue.class));
}
} |
java | @Override protected void handleEvents(final String EVENT_TYPE) {
super.handleEvents(EVENT_TYPE);
if ("VISIBILITY".equals(EVENT_TYPE)) {
Helper.enableNode(titleText, !tile.getTitle().isEmpty());
Helper.enableNode(text, tile.isTextVisible());
countryContainer.setMaxSize(size * 0.9, tile.isTextVisible() ? size * 0.68 : size * 0.795);
countryContainer.setPrefSize(size * 0.9, tile.isTextVisible() ? size * 0.68 : size * 0.795);
} else if ("RECALC".equals(EVENT_TYPE)) {
country = tile.getCountry();
if (null == country) { country = Country.DE; }
countryPaths = Helper.getHiresCountryPaths().get(country.name());
countryPaths.forEach(path -> path.setFill(tile.getBarColor()));
countryGroup.getChildren().setAll(countryPaths);
text.setText(country.getDisplayName());
resize();
redraw();
}
} |
java | public final AnnotateAssessmentResponse annotateAssessment(
String name, AnnotateAssessmentRequest.Annotation annotation) {
AnnotateAssessmentRequest request =
AnnotateAssessmentRequest.newBuilder().setName(name).setAnnotation(annotation).build();
return annotateAssessment(request);
} |
java | @Expose
public static String oxford(Collection<?> items, Locale locale)
{
return oxford(items.toArray(), locale);
} |
java | public @Nullable <K, V> CacheProxy<K, V> tryToCreateFromExternalSettings(String cacheName) {
Optional<CaffeineConfiguration<K, V>> configuration =
TypesafeConfigurator.from(rootConfig, cacheName);
return configuration.isPresent() ? createCache(cacheName, configuration.get()) : null;
} |
java | private static Object getPrivateAttributes(HttpServletRequest req, String key) {
HttpServletRequest sr = req;
if (sr instanceof HttpServletRequestWrapper) {
HttpServletRequestWrapper w = (HttpServletRequestWrapper) sr;
sr = (HttpServletRequest) w.getRequest();
while (sr != null && sr instanceof HttpServletRequestWrapper)
sr = (HttpServletRequest) ((HttpServletRequestWrapper) sr).getRequest();
}
if (sr != null && sr instanceof IPrivateRequestAttributes) {
return ((IPrivateRequestAttributes) sr).getPrivateAttribute(key);
}
return null;
} |
python | def helpEvent( self, event ):
"""
Displays a tool tip for the given help event.
:param event | <QHelpEvent>
"""
item = self.itemAt(event.scenePos())
if ( item and item and item.toolTip() ):
parent = self.parent()
rect = item.path().boundingRect()
point = event.scenePos()
point.setY(item.pos().y() + rect.bottom())
point = parent.mapFromScene(point)
point = parent.mapToGlobal(point)
XPopupWidget.showToolTip(item.toolTip(),
point = point,
parent = parent)
event.accept()
else:
super(XCalendarScene, self).helpEvent(event) |
python | def print_info(self):
"andreax + pts/0 2013-08-21 08:58 . 32341 (l26.box)"
" pts/34 2013-06-12 15:04 26396 id=s/34 term=0 exit=0"
# if self.ut_type not in [6,7]:
# return
print("%-10s %-12s %15s %15s %-8s" % (
str_from_c(self.ut_user),
str_from_c(self.ut_line),
time.strftime("%Y-%m-%d %H:%M", time.gmtime(self.ut_tv.tv_sec)),
self.ut_pid,
str_from_c(self.ut_host) and "(%s)" % str_from_c(self.ut_host) or str_from_c(self.ut_id) and "id=%s" % str_from_c(self.ut_id) or "")) |
python | def get_child(self, name: YangIdentifier,
ns: YangIdentifier = None) -> Optional[SchemaNode]:
"""Return receiver's schema child.
Args:
name: Child's name.
ns: Child's namespace (= `self.ns` if absent).
"""
ns = ns if ns else self.ns
todo = []
for child in self.children:
if child.name is None:
todo.append(child)
elif child.name == name and child.ns == ns:
return child
for c in todo:
grandchild = c.get_child(name, ns)
if grandchild is not None:
return grandchild |
python | def run_mace_smothr(x, y, bass_enhancement=0.0): # pylint: disable=unused-argument
"""Run the FORTRAN SMOTHR."""
N = len(x)
weight = numpy.ones(N)
results = numpy.zeros(N)
flags = numpy.zeros((N, 7))
mace.smothr(1, x, y, weight, results, flags)
return results |
java | private static int getCycleNumber(long epochDay) {
Long[] days = ADJUSTED_CYCLES;
int cycleNumber;
try {
for (int i = 0; i < days.length; i++) {
if (epochDay < days[i].longValue()) {
return i - 1;
}
}
cycleNumber = (int) epochDay / 10631;
} catch (ArrayIndexOutOfBoundsException e) {
cycleNumber = (int) epochDay / 10631;
}
return cycleNumber;
} |
java | public <V> V attachIfAbsent(final AttachmentKey<V> key, final V value) {
assert key != null;
return key.cast(contextAttachments.putIfAbsent(key, value));
} |
java | private CompletableFuture<Integer> reconcileData(AggregatedAppendOperation op, SegmentProperties storageInfo, TimeoutTimer timer) {
InputStream appendStream = this.dataSource.getAppendData(op.getStreamSegmentId(), op.getStreamSegmentOffset(), (int) op.getLength());
if (appendStream == null) {
return Futures.failedFuture(new ReconciliationFailureException(
String.format("Unable to reconcile operation '%s' because no append data is associated with it.", op), this.metadata, storageInfo));
}
// Only read as much data as we need.
long readLength = Math.min(op.getLastStreamSegmentOffset(), storageInfo.getLength()) - op.getStreamSegmentOffset();
assert readLength > 0 : "Append Operation to be reconciled is beyond the Segment's StorageLength (" + storageInfo.getLength() + "): " + op;
// Read all data from storage.
byte[] storageData = new byte[(int) readLength];
AtomicInteger reconciledBytes = new AtomicInteger();
return Futures
.loop(
() -> reconciledBytes.get() < readLength,
() -> this.storage.read(this.handle.get(), op.getStreamSegmentOffset() + reconciledBytes.get(), storageData, reconciledBytes.get(), (int) readLength - reconciledBytes.get(), timer.getRemaining()),
bytesRead -> {
assert bytesRead > 0 : String.format("Unable to make any read progress when reconciling operation '%s' after reading %s bytes.", op, reconciledBytes);
reconciledBytes.addAndGet(bytesRead);
},
this.executor)
.thenApplyAsync(v -> {
// Compare, byte-by-byte, the contents of the append.
verifySame(appendStream, storageData, op, storageInfo);
return reconciledBytes.get();
}, this.executor);
} |
java | public HitResult intersect(Shape shape, Line line) {
float distance = Float.MAX_VALUE;
HitResult hit = null;
for (int i=0;i<shape.getPointCount();i++) {
int next = rationalPoint(shape, i+1);
Line local = getLine(shape, i, next);
Vector2f pt = line.intersect(local, true);
if (pt != null) {
float newDis = pt.distance(line.getStart());
if ((newDis < distance) && (newDis > EPSILON)) {
hit = new HitResult();
hit.pt = pt;
hit.line = local;
hit.p1 = i;
hit.p2 = next;
distance = newDis;
}
}
}
return hit;
} |
python | def encode(self, encoding='utf-8', errors='strict'):
"""
Returns bytes
Encode S using the codec registered for encoding. Default encoding
is 'utf-8'. errors may be given to set a different error
handling scheme. Default is 'strict' meaning that encoding errors raise
a UnicodeEncodeError. Other possible values are 'ignore', 'replace' and
'xmlcharrefreplace' as well as any other name registered with
codecs.register_error that can handle UnicodeEncodeErrors.
"""
from future.types.newbytes import newbytes
# Py2 unicode.encode() takes encoding and errors as optional parameter,
# not keyword arguments as in Python 3 str.
# For the surrogateescape error handling mechanism, the
# codecs.register_error() function seems to be inadequate for an
# implementation of it when encoding. (Decoding seems fine, however.)
# For example, in the case of
# u'\udcc3'.encode('ascii', 'surrogateescape_handler')
# after registering the ``surrogateescape_handler`` function in
# future.utils.surrogateescape, both Python 2.x and 3.x raise an
# exception anyway after the function is called because the unicode
# string it has to return isn't encodable strictly as ASCII.
if errors == 'surrogateescape':
if encoding == 'utf-16':
# Known to fail here. See test_encoding_works_normally()
raise NotImplementedError('FIXME: surrogateescape handling is '
'not yet implemented properly')
# Encode char by char, building up list of byte-strings
mybytes = []
for c in self:
code = ord(c)
if 0xD800 <= code <= 0xDCFF:
mybytes.append(newbytes([code - 0xDC00]))
else:
mybytes.append(c.encode(encoding=encoding))
return newbytes(b'').join(mybytes)
return newbytes(super(newstr, self).encode(encoding, errors)) |
python | def load_file(self, path, objtype=None, encoding='utf-8'):
'''
Load the file specified by path
This method will first try to load the file contents from cache and
if there is a cache miss, it will load the contents from disk
Args:
path (string): The full or relative path to the file to be loaded
encoding (string): The file contents text encoding
objtype (object): The object type of the file contents. This
is used to type check the deserialized content against the
contents loaded from disk.
Ignore serializing if objtype is string_types
Returns:
object: The deserialized file contents which could be either a
string object or a dict object
Raises:
ConfigurationError:
'''
path = self.abspath(path)
debug('file path is %s' % path)
if path in self._cache:
return self._cache[path]
try:
debug('cache miss, attempting to load file from disk: %s' % path)
contents = parsed_data = self.get_contents(path)
if encoding:
parsed_data = contents.encode(encoding)
except ConfigurationError as exc:
debug(exc)
raise
except UnicodeEncodeError:
raise ConfigurationError('unable to encode file contents')
if objtype is not string_types:
for deserializer in (self._load_json, self._load_yaml):
parsed_data = deserializer(contents)
if parsed_data:
break
if objtype and not isinstance(parsed_data, objtype):
debug('specified file %s is not of type %s' % (path, objtype))
raise ConfigurationError('invalid file serialization type for contents')
self._cache[path] = parsed_data
return parsed_data |
java | @Override
public <T> T parse(String in, Class<? extends T> type) throws JSONMarshallException {
try {
return mapper.readValue(in, type);
} catch (JsonParseException e) {
throw new JSONMarshallException("Unable to parse non-well-formed content", e);
} catch (JsonMappingException e) {
throw new JSONMarshallException("Fatal problems occurred while mapping content", e);
} catch (IOException e) {
throw new JSONMarshallException("I/O exception of some sort has occurred", e);
}
} |
python | def _admin_metadata_from_uri(uri, config_path):
"""Helper function for getting admin metadata."""
uri = dtoolcore.utils.sanitise_uri(uri)
storage_broker = _get_storage_broker(uri, config_path)
admin_metadata = storage_broker.get_admin_metadata()
return admin_metadata |
java | public void fillInitially(List<CmsVfsEntryBean> entries, String selectedSiteRoot) {
clear();
for (CmsVfsEntryBean entry : entries) {
if (entry != null) {
CmsLazyTreeItem item = createItem(entry);
addWidgetToList(item);
}
}
if (null != selectedSiteRoot) {
selectSite(selectedSiteRoot);
}
m_initialized = true;
} |
java | public String cardinal(Long n) {
return (n == null) ? null : cardinal(n.longValue());
} |
python | def vector_distance(v1, v2):
"""Given 2 vectors of multiple dimensions, calculate the euclidean
distance measure between them."""
dist = 0
for dim in v1:
for x in v1[dim]:
dd = int(v1[dim][x]) - int(v2[dim][x])
dist = dist + dd**2
return dist |
java | public void setAce_class(String v) {
if (Entity_Type.featOkTst && ((Entity_Type)jcasType).casFeat_ace_class == null)
jcasType.jcas.throwFeatMissing("ace_class", "de.julielab.jules.types.ace.Entity");
jcasType.ll_cas.ll_setStringValue(addr, ((Entity_Type)jcasType).casFeatCode_ace_class, v);} |
java | public void setToolbar(@NonNull Activity activity, @NonNull Toolbar toolbar) {
setToolbar(activity, toolbar, false);
} |
python | def to_json(self):
''' Returns the JSON representation of this graph. '''
roots = []
for r in self.roots:
roots.append(r.to_json())
return {'roots': roots} |
python | def use_plenary_asset_view(self):
"""Pass through to provider AssetLookupSession.use_plenary_asset_view"""
self._object_views['asset'] = PLENARY
# self._get_provider_session('asset_lookup_session') # To make sure the session is tracked
for session in self._get_provider_sessions():
try:
session.use_plenary_asset_view()
except AttributeError:
pass |
python | def set(context="notebook", style="darkgrid", palette="deep",
font="sans-serif", font_scale=1, color_codes=False, rc=None):
"""Set aesthetic parameters in one step.
Each set of parameters can be set directly or temporarily, see the
referenced functions below for more information.
Parameters
----------
context : string or dict
Plotting context parameters, see :func:`plotting_context`
style : string or dict
Axes style parameters, see :func:`axes_style`
palette : string or sequence
Color palette, see :func:`color_palette`
font : string
Font family, see matplotlib font manager.
font_scale : float, optional
Separate scaling factor to independently scale the size of the
font elements.
color_codes : bool
If ``True`` and ``palette`` is a seaborn palette, remap the shorthand
color codes (e.g. "b", "g", "r", etc.) to the colors from this palette.
rc : dict or None
Dictionary of rc parameter mappings to override the above.
"""
mpl.rcParams = {}
set_context(context, font_scale)
set_style(style, rc={"font.family": font})
if rc is not None:
mpl.rcParams.update(rc)
return mpl.rcParams |
java | private static Class<?> getArgumentType(ArgumentMatcher<?> argumentMatcher) {
Method[] methods = argumentMatcher.getClass().getMethods();
for (Method method : methods) {
if (isMatchesMethod(method)) {
return method.getParameterTypes()[0];
}
}
throw new NoSuchMethodError("Method 'matches(T)' not found in ArgumentMatcher: " + argumentMatcher + " !\r\n Please file a bug with this stack trace at: https://github.com/mockito/mockito/issues/new ");
} |
java | public final <E> FluentCloseableIterable<E> transform(Function<? super T, ? extends E> function) {
return from(CloseableIterables.transform(this, function));
} |
java | public static String encode(byte[] bytes, String indentation) {
int length = bytes.length;
if (length == 0) return ""; // empty byte array
String encoded = Base64.encodeBase64String(bytes).replaceAll("\\s", ""); // remove all white space
StringBuilder result = new StringBuilder();
if (indentation != null) result.append(indentation);
result.append(encoded.charAt(0));
for (int c = 1; c < encoded.length(); c++) {
if (c % 80 == 0) {
// format to indented 80 character blocks
result.append("\n");
if (indentation != null) result.append(indentation);
}
result.append(encoded.charAt(c));
}
return result.toString();
} |
java | public <T> T getDocument(String indexName, String documentId, Class<T> beanType) throws ElasticSearchException{
return getDocument( indexName, _doc, documentId, beanType);
} |
python | def set_schema_to_public(self):
"""
Instructs to stay in the common 'public' schema.
"""
self.tenant = FakeTenant(schema_name=get_public_schema_name())
self.schema_name = get_public_schema_name()
self.set_settings_schema(self.schema_name)
self.search_path_set = False |
java | public static IReactionSet getRelevantReactionsAsProduct(IReactionSet reactSet, IAtomContainer molecule) {
IReactionSet newReactSet = reactSet.getBuilder().newInstance(IReactionSet.class);
for (IReaction reaction : reactSet.reactions()) {
for (IAtomContainer atomContainer : reaction.getProducts().atomContainers())
if (atomContainer.equals(molecule)) newReactSet.addReaction(reaction);
}
return newReactSet;
} |
java | private void downloadConservation(Species species, String assembly, Path speciesFolder)
throws IOException, InterruptedException {
logger.info("Downloading conservation information ...");
Path conservationFolder = speciesFolder.resolve("conservation");
if (species.getScientificName().equals("Homo sapiens")) {
makeDir(conservationFolder);
makeDir(conservationFolder.resolve("phastCons"));
makeDir(conservationFolder.resolve("phylop"));
makeDir(conservationFolder.resolve("gerp"));
String[] chromosomes = {"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14",
"15", "16", "17", "18", "19", "20", "21", "22", "X", "Y", "M", };
if (assembly.equalsIgnoreCase("GRCh37")) {
logger.debug("Downloading GERP++ ...");
downloadFile(configuration.getDownload().getGerp().getHost(),
conservationFolder.resolve(EtlCommons.GERP_SUBDIRECTORY + "/" + EtlCommons.GERP_FILE).toAbsolutePath().toString());
saveVersionData(EtlCommons.CONSERVATION_DATA, GERP_NAME, null, getTimeStamp(),
Collections.singletonList(configuration.getDownload().getGerp().getHost()),
conservationFolder.resolve("gerpVersion.json"));
String url = configuration.getDownload().getConservation().getHost() + "/hg19";
List<String> phastconsUrls = new ArrayList<>(chromosomes.length);
List<String> phyloPUrls = new ArrayList<>(chromosomes.length);
for (int i = 0; i < chromosomes.length; i++) {
String phastConsUrl = url + "/phastCons46way/primates/chr" + chromosomes[i] + ".phastCons46way.primates.wigFix.gz";
downloadFile(phastConsUrl, conservationFolder.resolve("phastCons").resolve("chr" + chromosomes[i]
+ ".phastCons46way.primates.wigFix.gz").toString());
phastconsUrls.add(phastConsUrl);
String phyloPUrl = url + "/phyloP46way/primates/chr" + chromosomes[i] + ".phyloP46way.primate.wigFix.gz";
downloadFile(phyloPUrl, conservationFolder.resolve("phylop").resolve("chr" + chromosomes[i]
+ ".phyloP46way.primate.wigFix.gz").toString());
phyloPUrls.add(phyloPUrl);
}
saveVersionData(EtlCommons.CONSERVATION_DATA, PHASTCONS_NAME, null, getTimeStamp(), phastconsUrls,
conservationFolder.resolve("phastConsVersion.json"));
saveVersionData(EtlCommons.CONSERVATION_DATA, PHYLOP_NAME, null, getTimeStamp(), phyloPUrls,
conservationFolder.resolve("phyloPVersion.json"));
}
if (assembly.equalsIgnoreCase("GRCh38")) {
String url = configuration.getDownload().getConservation().getHost() + "/hg38";
List<String> phastconsUrls = new ArrayList<>(chromosomes.length);
List<String> phyloPUrls = new ArrayList<>(chromosomes.length);
for (int i = 0; i < chromosomes.length; i++) {
String phastConsUrl = url + "/phastCons100way/hg38.100way.phastCons/chr" + chromosomes[i]
+ ".phastCons100way.wigFix.gz";
downloadFile(phastConsUrl, conservationFolder.resolve("phastCons").resolve("chr" + chromosomes[i]
+ ".phastCons100way.wigFix.gz").toString());
phastconsUrls.add(phastConsUrl);
String phyloPUrl = url + "/phyloP100way/hg38.100way.phyloP100way/chr" + chromosomes[i] + ".phyloP100way.wigFix.gz";
downloadFile(phyloPUrl, conservationFolder.resolve("phylop").resolve("chr" + chromosomes[i]
+ ".phyloP100way.wigFix.gz").toString());
phyloPUrls.add(phyloPUrl);
}
saveVersionData(EtlCommons.CONSERVATION_DATA, PHASTCONS_NAME, null, getTimeStamp(), phastconsUrls,
conservationFolder.resolve("phastConsVersion.json"));
saveVersionData(EtlCommons.CONSERVATION_DATA, PHYLOP_NAME, null, getTimeStamp(), phyloPUrls,
conservationFolder.resolve("phyloPVersion.json"));
// String phastConsUrl = url + "/phastCons7way/hg38.phastCons100way.wigFix.gz";
// Path outFile = conservationFolder.resolve("phastCons").resolve("hg38.phastCons100way.wigFix.gz");
// downloadFile(phastConsUrl, outFile.toString());
//
// String phyloPUrl = url + "/phyloP7way/hg38.phyloP100way.wigFix.gz";
// outFile = conservationFolder.resolve("phylop").resolve("hg38.phyloP100way.wigFix.gz");
// downloadFile(phyloPUrl, outFile.toString());
}
}
if (species.getScientificName().equals("Mus musculus")) {
makeDir(conservationFolder);
makeDir(conservationFolder.resolve("phastCons"));
makeDir(conservationFolder.resolve("phylop"));
String url = configuration.getDownload().getConservation().getHost() + "/mm10";
String[] chromosomes = {"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14",
"15", "16", "17", "18", "19", "X", "Y", "M", };
List<String> phastconsUrls = new ArrayList<>(chromosomes.length);
List<String> phyloPUrls = new ArrayList<>(chromosomes.length);
for (int i = 0; i < chromosomes.length; i++) {
String phastConsUrl = url + "/phastCons60way/mm10.60way.phastCons/chr" + chromosomes[i] + ".phastCons60way.wigFix.gz";
downloadFile(phastConsUrl, conservationFolder.resolve("phastCons").resolve("chr" + chromosomes[i]
+ ".phastCons60way.wigFix.gz").toString());
phastconsUrls.add(phastConsUrl);
String phyloPUrl = url + "/phyloP60way/mm10.60way.phyloP60way/chr" + chromosomes[i] + ".phyloP60way.wigFix.gz";
downloadFile(phyloPUrl, conservationFolder.resolve("phylop").resolve("chr" + chromosomes[i]
+ ".phyloP60way.wigFix.gz").toString());
phyloPUrls.add(phyloPUrl);
}
saveVersionData(EtlCommons.CONSERVATION_DATA, PHASTCONS_NAME, null, getTimeStamp(), phastconsUrls,
conservationFolder.resolve("phastConsVersion.json"));
saveVersionData(EtlCommons.CONSERVATION_DATA, PHYLOP_NAME, null, getTimeStamp(), phyloPUrls,
conservationFolder.resolve("phastConsVersion.json"));
}
} |
python | def cluster_count_key_in_slots(self, slot):
"""Return the number of local keys in the specified hash slot."""
if not isinstance(slot, int):
raise TypeError("Expected slot to be of type int, got {}"
.format(type(slot)))
return self.execute(b'CLUSTER', b'COUNTKEYSINSLOT', slot) |
python | def token_cache_pkgs(source=None, release=None):
"""Determine additional packages needed for token caching
@param source: source string for charm
@param release: release of OpenStack currently deployed
@returns List of package to enable token caching
"""
packages = []
if enable_memcache(source=source, release=release):
packages.extend(['memcached', 'python-memcache'])
return packages |
java | public byte[] generateSeed(int length)
{
for (SeedGenerator generator : GENERATORS)
{
try
{
byte[] seed = generator.generateSeed(length);
try
{
boolean debug = System.getProperty(DEBUG_PROPERTY, "false").equals("true");
if (debug)
{
String seedString = BinaryUtils.convertBytesToHexString(seed);
System.out.println(seed.length + " bytes of seed data acquired from " + generator + ":");
System.out.println(" " + seedString);
}
}
catch (SecurityException ex)
{
// Ignore, means we can't read the property so just default to false.
}
return seed;
}
catch (SeedException ex)
{
// Ignore and try the next generator...
}
}
// This shouldn't happen as at least one the generators should be
// able to generate a seed.
throw new IllegalStateException("All available seed generation strategies failed.");
} |
python | def parent_link_extent(self):
# type: () -> int
'''
Get the extent of the parent of this entry if it has one.
Parameters:
None.
Returns:
The logical block number of the parent if it exists.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('Rock Ridge extension not yet initialized')
if self.dr_entries.pl_record is not None:
return self.dr_entries.pl_record.parent_log_block_num
if self.ce_entries.pl_record is not None:
return self.ce_entries.pl_record.parent_log_block_num
raise pycdlibexception.PyCdlibInternalError('Asked for parent extent for non-existent parent record') |
java | @SuppressWarnings("unchecked")
public static <T extends Enum<T>> T likeValueOf(Class<T> enumClass, Object value) {
if(value instanceof CharSequence) {
value = value.toString().trim();
}
final Field[] fields = ReflectUtil.getFields(enumClass);
final Enum<?>[] enums = enumClass.getEnumConstants();
String fieldName;
for (Field field : fields) {
fieldName = field.getName();
if (field.getType().isEnum() || "ENUM$VALUES".equals(fieldName) || "ordinal".equals(fieldName)) {
//跳过一些特殊字段
continue;
}
for (Enum<?> enumObj : enums) {
if(ObjectUtil.equal(value, ReflectUtil.getFieldValue(enumObj, field))) {
return (T) enumObj;
}
}
}
return null;
} |
java | private static String convertToValidJavaClassname(String inName) {
if (inName == null) return "_";
if (inName.startsWith("scriptdef_")) inName = inName.substring(10);
if (inName.equals("")) return "_";
StringBuilder output = new StringBuilder(inName.length());
boolean firstChar = true;
for (int i = 0; i < inName.length(); ++i) {
char ch = inName.charAt(i);
if (firstChar && !Character.isJavaIdentifierStart(ch)) {
ch = '_';
} else if (!firstChar
&& !(Character.isJavaIdentifierPart(ch) || ch == '.')) {
ch = '_';
}
firstChar = (ch == '.');
output.append(ch);
}
return output.toString();
} |
java | public static CommerceWarehouseItem findByCWI_CPIU(
long commerceWarehouseId, String CPInstanceUuid)
throws com.liferay.commerce.exception.NoSuchWarehouseItemException {
return getPersistence()
.findByCWI_CPIU(commerceWarehouseId, CPInstanceUuid);
} |
python | def next_frame_stochastic_discrete_range(rhp):
"""Next frame stochastic discrete tuning grid."""
rhp.set_float("learning_rate_constant", 0.001, 0.01)
rhp.set_float("dropout", 0.2, 0.6)
rhp.set_int("filter_double_steps", 3, 5)
rhp.set_discrete("hidden_size", [64, 96, 128])
rhp.set_discrete("bottleneck_bits", [32, 64, 128, 256])
rhp.set_discrete("video_num_target_frames", [4])
rhp.set_float("bottleneck_noise", 0.0, 0.2) |
python | def getall(self, key):
"""
Return a list of all values matching the key (may be an empty list)
"""
result = []
for k, v in self._items:
if key == k:
result.append(v)
return result |
java | void writePayload(ByteArrayOutputStream os)
{
// RF frames don't use NPDU length field
os.write(addInfo[ADDINFO_RFMEDIUM] != null ? 0 : data.length - 1);
os.write(data, 0, data.length);
} |
java | public InstanceFailoverGroupInner get(String resourceGroupName, String locationName, String failoverGroupName) {
return getWithServiceResponseAsync(resourceGroupName, locationName, failoverGroupName).toBlocking().single().body();
} |
java | protected String buildSnapshotBody(CreateSnapshotTaskParameters taskParams) {
CreateSnapshotBridgeParameters bridgeParams =
new CreateSnapshotBridgeParameters(dcHost, dcPort, dcStoreId,
taskParams.getSpaceId(),
taskParams.getDescription(),
taskParams.getUserEmail(),
bridgeMemberId);
return bridgeParams.serialize();
} |
java | private JTextField getUrlText()
{
if (urlText == null)
{
urlText = new JTextField();
urlText.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(java.awt.event.ActionEvent e)
{
displayURL(urlText.getText());
}
});
}
return urlText;
} |
java | protected void parseDefaultValues( TokenStream tokens,
JcrPropertyDefinitionTemplate propDefn ) {
if (tokens.canConsume('=')) {
List<String> defaultValues = parseStringList(tokens);
if (!defaultValues.isEmpty()) {
propDefn.setDefaultValues(values(defaultValues));
}
}
} |
python | def regex(self, expression, ignore_case=False, options=None):
""" A query to check if a field matches a given regular expression
:param ignore_case: Whether or not to ignore the case (setting this to True is the same as setting the 'i' option)
:param options: A string of option characters, as per the MongoDB $regex operator (e.g. "imxs")
**Example**: ``session.query(Spell).filter(Spells.name.regex(r'^abra[a-z]*cadabra$', ignore_case=True))``
"""
regex = {'$regex' : expression}
if options is not None:
regex['$options'] = options
if ignore_case:
regex['$options'] = regex.get('$options', '') + 'i'
expr = {
self : regex
}
return QueryExpression(expr) |
python | def AnularCertificacion(self, coe):
"Anular liquidación activa"
ret = self.client.cgSolicitarAnulacion(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
coe=coe,
)
ret = ret['oReturn']
self.__analizar_errores(ret)
self.Estado = ret.get('estadoCertificado', "")
return self.COE |
java | public static MemcachedClient create(final PippoSettings settings) {
String host = settings.getString(HOST, "localhost:11211");
String prot = settings.getString(PROT, "BINARY");
CommandFactory protocol;
switch (prot) {
case "BINARY":
protocol = new BinaryCommandFactory();
break;
case "TEXT":
protocol = new TextCommandFactory();
break;
default:
protocol = new BinaryCommandFactory();
break;
}
String user = settings.getString(USER, "");
String pass = settings.getString(PASS, "");
List<String> autM = settings.getStrings(AUTM);
String[] mechanisms = autM.toArray(new String[autM.size()]);
return create(host, protocol, user, pass, mechanisms);
} |
java | @NotNull
public ResultSetFuture executeAsync(@NotNull String keyspace, @NotNull Statement statement) {
Timer time = getMetricsFactory().getTimer(MetricsType.CASSANDRA_EXECUTE_ASYNC.name());
getMetricsFactory().getCounter(MetricsType.CASSANDRA_PROCESSING_QUERIES.name()).inc();
ResultSetFuture resultSetFuture = getOrCreateSession(keyspace).executeAsync(statement);
String query = (statement instanceof BoundStatement) ? ((BoundStatement) statement).preparedStatement().getQueryString() : statement.toString();
Futures.addCallback(resultSetFuture, new StatementExecutionCallback(keyspace, query));
monitorFuture(time, resultSetFuture);
return resultSetFuture;
} |
java | public static <T> SortedSet<HBaseColumn<T>> asSortedSet(T value, long version) {
return asSortedSet(value, version, DESC_HBASE_COLUMN_COMPARATOR);
} |
python | def login_required(func):
'''decorator describing User methods that need to be logged in'''
def ret(obj, *args, **kw):
if not hasattr(obj, 'sessionToken'):
message = '%s requires a logged-in session' % func.__name__
raise ResourceRequestLoginRequired(message)
return func(obj, *args, **kw)
return ret |
java | private Point calculatePointOnCircle(final float centerX, final float centerY,
final float radius, final float degrees) {
// for trigonometry, 0 is pointing east, so subtract 90
// compass degrees are the wrong way round
final double dblRadians = Math.toRadians(-degrees + 90);
final int intX = (int) (radius * Math.cos(dblRadians));
final int intY = (int) (radius * Math.sin(dblRadians));
return new Point((int) centerX + intX, (int) centerY - intY);
} |
python | def dataset_prepare(self):
'''Subcommand of dataset for processing a corpus into a dataset'''
# Initialize the prepare subcommand's argparser
parser = argparse.ArgumentParser(description='Preprocess a raw dialogue corpus into a dsrt dataset')
self.init_dataset_prepare_args(parser)
# Parse the args we got
args = parser.parse_args(sys.argv[3:])
args.config = ConfigurationLoader(args.config).load().data_config
print(CLI_DIVIDER + '\n')
Preprocessor(**vars(args)).run() |
python | def get_columns(context, query):
"""Get list of cartoframes.columns.Column"""
table_info = context.sql_client.send(query)
if 'fields' in table_info:
return Column.from_sql_api_fields(table_info['fields'])
return None |
java | private DB openStorage(StorageType storageType) {
DB storage = storageRegistry.get(storageType);
if(!isOpenStorage(storage)) {
DBMaker m;
if(storageType == StorageType.PRIMARY_STORAGE || storageType == StorageType.SECONDARY_STORAGE) {
//main storage
Path rootPath = getRootPath(storageName);
try {
createDirectoryIfNotExists(rootPath);
}
catch (IOException ex) {
throw new UncheckedIOException(ex);
}
m = DBMaker.newFileDB(new File(rootPath.toFile(), storageType.toString()));
}
else if(storageType == StorageType.TEMP_PRIMARY_STORAGE || storageType == StorageType.TEMP_SECONDARY_STORAGE) {
//temporary storage
m = DBMaker.newTempFileDB().deleteFilesAfterClose();
}
else {
throw new IllegalArgumentException("Unsupported StorageType.");
}
if(storageConfiguration.isCompressed()) {
m = m.compressionEnable();
}
boolean permitCaching = storageType == StorageType.PRIMARY_STORAGE || storageType == StorageType.TEMP_PRIMARY_STORAGE;
if(permitCaching && storageConfiguration.getCacheSize()>0) {
m = m.cacheLRUEnable().cacheSize(storageConfiguration.getCacheSize()) ;
}
else {
m = m.cacheDisable();
}
if(storageConfiguration.isAsynchronous()) {
m = m.asyncWriteEnable();
}
m = m.transactionDisable();
m = m.closeOnJvmShutdown();
storage = m.make();
storageRegistry.put(storageType, storage);
}
return storage;
} |
python | def get_track_by_mbid(self, mbid):
"""Looks up a track by its MusicBrainz ID"""
params = {"mbid": mbid}
doc = _Request(self, "track.getInfo", params).execute(True)
return Track(_extract(doc, "name", 1), _extract(doc, "name"), self) |
python | def predict(self, h=5):
""" Makes forecast with the estimated model
Parameters
----------
h : int (default : 5)
How many steps ahead would you like to forecast?
Returns
----------
- pd.DataFrame with predictions
"""
if self.latent_variables.estimated is False:
raise Exception("No latent variables estimated!")
else:
y_holder = self.y.copy() # holds past data and predicted data to create AR matrix
full_X = self.X.copy()
full_X = np.append(full_X,np.array([np.append(1.0, y_holder[-self.ar:][::-1])]), axis=0)
Z = full_X
for step in range(h):
a, P = self._forecast_model(self.latent_variables.get_z_values(),Z,step)
new_value = np.dot(Z[-1,:],a[:,self.y.shape[0]+step])
y_holder = np.append(y_holder, new_value)
Z = np.append(Z, np.array([np.append(1.0, y_holder[-self.ar:][::-1])]), axis=0)
date_index = self.shift_dates(h)
result = pd.DataFrame(y_holder[-h:])
result.rename(columns={0:self.y_name}, inplace=True)
result.index = date_index[-h:]
return result |
java | private KafkaMsgConsumer getKafkaConsumer(String consumerGroupId,
boolean consumeFromBeginning) {
KafkaMsgConsumer kafkaConsumer = cacheConsumers.get(consumerGroupId);
if (kafkaConsumer == null) {
kafkaConsumer = _newKafkaConsumer(consumerGroupId, consumeFromBeginning);
KafkaMsgConsumer temp = cacheConsumers.putIfAbsent(consumerGroupId, kafkaConsumer);
if (temp != null) {
kafkaConsumer.destroy();
kafkaConsumer = temp;
}
}
return kafkaConsumer;
} |
java | public ZonedDateTime getStartZonedDateTime() {
if (zonedStartDateTime == null) {
zonedStartDateTime = ZonedDateTime.of(startDate, startTime, zoneId);
}
return zonedStartDateTime;
} |
java | public String build() {
StringBuilder scriptBuilder = new StringBuilder();
StringBuilder scriptBody = new StringBuilder();
String importStmt = "import ";
try {
if (scriptCode.contains(importStmt)) {
BufferedReader reader = new BufferedReader(new StringReader(scriptCode));
String line;
while ((line = reader.readLine()) != null) {
if (line.trim().startsWith(importStmt)) {
scriptBuilder.append(line);
scriptBuilder.append("\n");
} else {
scriptBody.append((scriptBody.length() == 0 ? "" : "\n"));
scriptBody.append(line);
}
}
} else {
scriptBody.append(scriptCode);
}
} catch (IOException e) {
throw new CitrusRuntimeException("Failed to construct script from template", e);
}
scriptBuilder.append(scriptHead);
scriptBuilder.append(scriptBody.toString());
scriptBuilder.append(scriptTail);
return scriptBuilder.toString();
} |
java | @Override
public QueryResult query(final Query query, final TimeUnit timeUnit) {
Call<QueryResult> call = null;
if (query instanceof BoundParameterQuery) {
BoundParameterQuery boundParameterQuery = (BoundParameterQuery) query;
call = this.influxDBService.query(query.getDatabase(),
TimeUtil.toTimePrecision(timeUnit), query.getCommandWithUrlEncoded(),
boundParameterQuery.getParameterJsonWithUrlEncoded());
} else {
call = this.influxDBService.query(query.getDatabase(),
TimeUtil.toTimePrecision(timeUnit), query.getCommandWithUrlEncoded());
}
return executeQuery(call);
} |
java | private <R> R doWithWriteLock(Action<K, V, R> action) {
long stamp = sl.writeLock();
try {
return action.doWith(commonCache);
} finally {
sl.unlockWrite(stamp);
}
} |
java | @Override
public ZipImporter importFrom(final InputStream stream) throws ArchiveImportException {
return importFrom(stream, Filters.includeAll());
} |
java | public void registerProblem(GitHubRepositoryName repo, Throwable throwable) {
if (throwable == null) {
return;
}
registerProblem(repo, throwable.getMessage());
} |
python | def zext(self, width):
"""Zero-extends a word to a larger width. It is an error to specify
a smaller width (use ``extract`` instead to crop off the extra bits).
"""
width = operator.index(width)
if width < self._width:
raise ValueError('zero extending to a smaller width')
return BinWord(width, self._val) |
java | @Override
public DeleteFilterResult deleteFilter(DeleteFilterRequest request) {
request = beforeClientExecution(request);
return executeDeleteFilter(request);
} |
python | def dodging(bar):
"""Return a context manager which erases the bar, lets you output things, and then redraws the bar.
It's reentrant.
"""
class ShyProgressBar(object):
"""Context manager that implements a progress bar that gets out of the way"""
def __enter__(self):
"""Erase the progress bar so bits of disembodied progress bar don't get scrolled up the terminal."""
# My terminal has no status line, so we make one manually.
bar._is_dodging += 1 # Increment before calling erase(), which
# calls dodging() again.
if bar._is_dodging <= 1: # It *was* 0.
bar.erase()
def __exit__(self, type, value, tb):
"""Redraw the last saved state of the progress bar."""
if bar._is_dodging == 1: # Can't decrement yet; write() could
# read it.
# This is really necessary only because we monkeypatch
# stderr; the next test is about to start and will redraw
# the bar.
with bar._at_last_line():
bar.stream.write(bar.last)
bar.stream.flush()
bar._is_dodging -= 1
return ShyProgressBar() |
java | @Override
protected IncomingDataPoint getDataPointFromString(final TSDB tsdb,
final String[] words) {
final RollUpDataPoint dp = new RollUpDataPoint();
final String interval_agg = words[TelnetIndex.INTERVAL_AGG.ordinal()];
String interval = null;
String temporal_agg = null;
String spatial_agg = null;
// if the interval_agg has a - in it, then it's an interval. If there's a :
// then it is both. If no dash or colon then it's just a spatial agg.
final String[] interval_parts = interval_agg.split(":");
final int dash = interval_parts[0].indexOf("-");
if (dash > -1) {
interval = interval_parts[0].substring(0,dash);
temporal_agg = interval_parts[0].substring(dash + 1);
} else if (interval_parts.length == 1) {
spatial_agg = interval_parts[0];
}
if (interval_parts.length > 1) {
spatial_agg = interval_parts[1];
}
dp.setInterval(interval);
dp.setAggregator(temporal_agg);
dp.setGroupByAggregator(spatial_agg);
dp.setMetric(words[TelnetIndex.METRIC.ordinal()]);
if (words[TelnetIndex.TIMESTAMP.ordinal()].contains(".")) {
dp.setTimestamp(Tags.parseLong(words[TelnetIndex.TIMESTAMP.ordinal()]
.replace(".", "")));
} else {
dp.setTimestamp(Tags.parseLong(words[TelnetIndex.TIMESTAMP.ordinal()]));
}
dp.setValue(words[TelnetIndex.VALUE.ordinal()]);
final HashMap<String, String> tags = new HashMap<String, String>();
for (int i = TelnetIndex.TAGS.ordinal(); i < words.length; i++) {
if (!words[i].isEmpty()) {
Tags.parse(tags, words[i]);
}
}
dp.setTags(tags);
return dp;
} |
python | def simxSetBooleanParameter(clientID, paramIdentifier, paramValue, operationMode):
'''
Please have a look at the function description/documentation in the V-REP user manual
'''
return c_SetBooleanParameter(clientID, paramIdentifier, paramValue, operationMode) |
java | Object getClassInstance() throws UtilEvalError {
if (this.classInstance != null)
return this.classInstance;
if (this.classStatic != null
// || (getParent()!=null && getParent().classStatic != null)
)
throw new UtilEvalError(
"Can't refer to class instance from static context.");
else
throw new InterpreterError(
"Can't resolve class instance 'this' in: " + this);
} |
java | public static BitStore asStore(boolean[] bits) {
if (bits == null) throw new IllegalArgumentException("null bits");
return new BooleansBitStore(bits, 0, bits.length, true);
} |
java | public void marshall(PutMailboxPermissionsRequest putMailboxPermissionsRequest, ProtocolMarshaller protocolMarshaller) {
if (putMailboxPermissionsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(putMailboxPermissionsRequest.getOrganizationId(), ORGANIZATIONID_BINDING);
protocolMarshaller.marshall(putMailboxPermissionsRequest.getEntityId(), ENTITYID_BINDING);
protocolMarshaller.marshall(putMailboxPermissionsRequest.getGranteeId(), GRANTEEID_BINDING);
protocolMarshaller.marshall(putMailboxPermissionsRequest.getPermissionValues(), PERMISSIONVALUES_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
python | def _f90complex(self, value):
"""Return a Fortran 90 representation of a complex number."""
return '({0:{fmt}}, {1:{fmt}})'.format(value.real, value.imag,
fmt=self.float_format) |
python | def move_point_cat(point, ipoint, to_clust, from_clust, cl_attr_freq,
membship, centroids):
"""Move point between clusters, categorical attributes."""
membship[to_clust, ipoint] = 1
membship[from_clust, ipoint] = 0
# Update frequencies of attributes in cluster.
for iattr, curattr in enumerate(point):
to_attr_counts = cl_attr_freq[to_clust][iattr]
from_attr_counts = cl_attr_freq[from_clust][iattr]
# Increment the attribute count for the new "to" cluster
to_attr_counts[curattr] += 1
current_attribute_value_freq = to_attr_counts[curattr]
current_centroid_value = centroids[to_clust][iattr]
current_centroid_freq = to_attr_counts[current_centroid_value]
if current_centroid_freq < current_attribute_value_freq:
# We have incremented this value to the new mode. Update the centroid.
centroids[to_clust][iattr] = curattr
# Decrement the attribute count for the old "from" cluster
from_attr_counts[curattr] -= 1
old_centroid_value = centroids[from_clust][iattr]
if old_centroid_value == curattr:
# We have just removed a count from the old centroid value. We need to
# recalculate the centroid as it may no longer be the maximum
centroids[from_clust][iattr] = get_max_value_key(from_attr_counts)
return cl_attr_freq, membship, centroids |
java | @Override
public void eSet(int featureID, Object newValue)
{
switch (featureID)
{
case SarlPackage.SARL_BEHAVIOR_UNIT__NAME:
setName((JvmParameterizedTypeReference)newValue);
return;
case SarlPackage.SARL_BEHAVIOR_UNIT__GUARD:
setGuard((XExpression)newValue);
return;
case SarlPackage.SARL_BEHAVIOR_UNIT__EXPRESSION:
setExpression((XExpression)newValue);
return;
}
super.eSet(featureID, newValue);
} |
java | public Token scanCharacterLiteral() {
int start = pos;
pos++;
char c = input.charAt(pos++);
if (c == '\\') {
// escape code
switch (input.charAt(pos++)) {
case 'b':
c = '\b';
break;
case 't':
c = '\t';
break;
case 'n':
c = '\n';
break;
case 'f':
c = '\f';
break;
case 'r':
c = '\r';
break;
case '"':
c = '\"';
break;
case '\'':
c = '\'';
break;
case '\\':
c = '\\';
break;
default:
syntaxError("unrecognised escape character", pos);
}
}
if (input.charAt(pos) != '\'') {
syntaxError("unexpected end-of-character", pos);
}
pos = pos + 1;
return new Token(Token.Kind.CharLiteral, input.substring(start, pos),
start);
} |
python | def launched():
"""Test whether the current python environment is the correct lore env.
:return: :any:`True` if the environment is launched
:rtype: bool
"""
if not PREFIX:
return False
return os.path.realpath(sys.prefix) == os.path.realpath(PREFIX) |
java | public static <OPERATION> SubdocOperationResult<OPERATION> createError(String path, OPERATION operation, ResponseStatus status, CouchbaseException exception) {
return new SubdocOperationResult<OPERATION>(path, operation, status, exception);
} |
python | def mom_recurse(self, idxi, idxj, idxk):
"""Backend mement main loop."""
rank_ = min(
chaospy.bertran.rank(idxi, self.dim),
chaospy.bertran.rank(idxj, self.dim),
chaospy.bertran.rank(idxk, self.dim)
)
par, axis0 = chaospy.bertran.parent(idxk, self.dim)
gpar, _ = chaospy.bertran.parent(par, self.dim, axis0)
idxi_child = chaospy.bertran.child(idxi, self.dim, axis0)
oneup = chaospy.bertran.child(0, self.dim, axis0)
out1 = self.mom_111(idxi_child, idxj, par)
out2 = self.mom_111(
chaospy.bertran.child(oneup, self.dim, axis0), par, par)
for k in range(gpar, idxk):
if chaospy.bertran.rank(k, self.dim) >= rank_:
out1 -= self.mom_111(oneup, k, par) \
* self.mom_111(idxi, idxj, k)
out2 -= self.mom_111(oneup, par, k) \
* self(oneup, k, par)
return out1 / out2 |
python | def fetch_one(self, *args, **kwargs):
"""
return one document which match the structure of the object
`fetch_one()` takes the same arguments than the the pymongo.collection.find method.
If multiple documents are found, raise a MultipleResultsFound exception.
If no document is found, return None
The query is launch against the db and collection of the object.
"""
bson_obj = self.fetch(*args, **kwargs)
count = bson_obj.count()
if count > 1:
raise MultipleResultsFound("%s results found" % count)
elif count == 1:
# return self(bson_obj.next(), fetched_fields=kwargs.get("projection"))
return next(bson_obj) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.