language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java
|
public void setEntityId(String v) {
if (DictTerm_Type.featOkTst && ((DictTerm_Type)jcasType).casFeat_entityId == null)
jcasType.jcas.throwFeatMissing("entityId", "ch.epfl.bbp.uima.types.DictTerm");
jcasType.ll_cas.ll_setStringValue(addr, ((DictTerm_Type)jcasType).casFeatCode_entityId, v);}
|
java
|
public DataObjectModel hide(String... properties)
{
DataObjectModel view = clone();
view.propertyList = new ArrayList<String>();
view.propertyList.addAll(this.propertyList);
for (String property : properties)
{
view.propertyList.remove(property);
}
return view;
}
|
java
|
public void selectByValue(String value) {
getDispatcher().beforeSelect(this, value);
new Select(getElement()).selectByValue(value);
if (Config.getBoolConfigProperty(ConfigProperty.ENABLE_GUI_LOGGING)) {
logUIActions(UIActions.SELECTED, value);
}
getDispatcher().afterSelect(this, value);
}
|
java
|
protected void doHandleTargetedRequest(final Request request) {
// Check for file id
String fileId = request.getParameter(FILE_UPLOAD_ID_KEY);
if (fileId == null) {
throw new SystemException("No file id provided for content request.");
}
// Check valid file id
FileWidgetUpload file = getFile(fileId);
if (file == null) {
throw new SystemException("Invalid file id [" + fileId + "].");
}
// Check if thumb nail requested
boolean thumbNail = request.getParameter(FILE_UPLOAD_THUMB_NAIL_KEY) != null;
if (thumbNail) {
doHandleThumbnailRequest(file);
} else {
doHandleFileContentRequest(file);
}
}
|
python
|
def shutdown(self):
"""
Called by the server to commence a graceful shutdown.
"""
if self.cycle is None or self.cycle.response_complete:
self.transport.close()
else:
self.cycle.keep_alive = False
|
python
|
def _get_api_params(api_url=None,
page_id=None,
api_key=None,
api_version=None):
'''
Retrieve the API params from the config file.
'''
statuspage_cfg = __salt__['config.get']('statuspage')
if not statuspage_cfg:
statuspage_cfg = {}
return {
'api_url': api_url or statuspage_cfg.get('api_url') or BASE_URL, # optional
'api_page_id': page_id or statuspage_cfg.get('page_id'), # mandatory
'api_key': api_key or statuspage_cfg.get('api_key'), # mandatory
'api_version': api_version or statuspage_cfg.get('api_version') or DEFAULT_VERSION
}
|
java
|
public static BloomSpecification computeBloomSpec(int maxBucketsPerElement, double maxFalsePosProb)
{
assert maxBucketsPerElement >= 1;
assert maxBucketsPerElement <= probs.length - 1;
int maxK = probs[maxBucketsPerElement].length - 1;
// Handle the trivial cases
if(maxFalsePosProb >= probs[minBuckets][minK]) {
return new BloomSpecification(2, optKPerBuckets[2]);
}
if (maxFalsePosProb < probs[maxBucketsPerElement][maxK]) {
throw new UnsupportedOperationException(String.format("Unable to satisfy %s with %s buckets per element",
maxFalsePosProb, maxBucketsPerElement));
}
// First find the minimal required number of buckets:
int bucketsPerElement = 2;
int K = optKPerBuckets[2];
while(probs[bucketsPerElement][K] > maxFalsePosProb){
bucketsPerElement++;
K = optKPerBuckets[bucketsPerElement];
}
// Now that the number of buckets is sufficient, see if we can relax K
// without losing too much precision.
while(probs[bucketsPerElement][K - 1] <= maxFalsePosProb){
K--;
}
return new BloomSpecification(K, bucketsPerElement);
}
|
python
|
def greedy_merge(
variant_sequences,
min_overlap_size=MIN_VARIANT_SEQUENCE_ASSEMBLY_OVERLAP_SIZE):
"""
Greedily merge overlapping sequences into longer sequences.
Accepts a collection of VariantSequence objects and returns another
collection of elongated variant sequences. The reads field of the
returned VariantSequence object will contain reads which
only partially overlap the full sequence.
"""
merged_any = True
while merged_any:
variant_sequences, merged_any = greedy_merge_helper(
variant_sequences,
min_overlap_size=min_overlap_size)
return variant_sequences
|
java
|
public Object processInvocation(final InterceptorContext context) throws Exception {
try {
Method method = this.method;
if (withContext) {
if (changeMethod) {
final Method oldMethod = context.getMethod();
context.setMethod(method);
try {
return method.invoke(interceptorInstance, context.getInvocationContext());
} finally {
context.setMethod(oldMethod);
}
} else {
return method.invoke(interceptorInstance, context.getInvocationContext());
}
} else {
method.invoke(interceptorInstance, null);
return context.proceed();
}
} catch (IllegalAccessException e) {
final IllegalAccessError n = new IllegalAccessError(e.getMessage());
n.setStackTrace(e.getStackTrace());
throw n;
} catch (InvocationTargetException e) {
throw Interceptors.rethrow(e.getCause());
}
}
|
java
|
public static GrayU8 labelToBinary(GrayS32 labelImage , GrayU8 binaryImage ) {
binaryImage = InputSanityCheck.checkDeclare(labelImage, binaryImage, GrayU8.class);
if( BoofConcurrency.USE_CONCURRENT ) {
ImplBinaryImageOps_MT.labelToBinary(labelImage, binaryImage);
} else {
ImplBinaryImageOps.labelToBinary(labelImage, binaryImage);
}
return binaryImage;
}
|
java
|
@SuppressWarnings("unchecked")
public static <T extends Sorter> T createSorter(final SortType type) {
switch (ObjectUtils.defaultIfNull(type, SortType.UNKONWN)) {
case BUBBLE_SORT:
return (T) new BubbleSort();
case COMB_SORT:
return (T) new CombSort();
case HEAP_SORT:
return (T) new HeapSort();
case INSERTION_SORT:
return (T) new InsertionSort();
case MERGE_SORT:
return (T) new MergeSort();
case QUICK_SORT:
return (T) new QuickSort();
case SELECTION_SORT:
return (T) new SelectionSort();
case SHELL_SORT:
return (T) new ShellSort();
default:
throw new IllegalArgumentException(String.format("The SortType (%1$s) is not supported by the %2$s!", type,
SorterFactory.class.getSimpleName()));
}
}
|
python
|
def locateChild(self, context, segments):
"""
Return a statically defined child or a child defined by a site root
plugin or an avatar from guard.
"""
request = IRequest(context)
webViewer = IWebViewer(self.store, None)
childAndSegments = self.siteProduceResource(request, segments, webViewer)
if childAndSegments is not None:
return childAndSegments
return NotFound
|
python
|
def store_mapping(self, path):
"""Store the current Id mappings into a TSV file."""
with open(path, "w") as writer:
for key, value in self.mapping.iteritems():
writer.write("{}\t{}\n".format(key, value))
|
python
|
def file_transfer(
ssh_conn,
source_file,
dest_file,
file_system=None,
direction="put",
disable_md5=False,
inline_transfer=False,
overwrite_file=False,
):
"""Use Secure Copy or Inline (IOS-only) to transfer files to/from network devices.
inline_transfer ONLY SUPPORTS TEXT FILES and will not support binary file transfers.
return {
'file_exists': boolean,
'file_transferred': boolean,
'file_verified': boolean,
}
"""
transferred_and_verified = {
"file_exists": True,
"file_transferred": True,
"file_verified": True,
}
transferred_and_notverified = {
"file_exists": True,
"file_transferred": True,
"file_verified": False,
}
nottransferred_but_verified = {
"file_exists": True,
"file_transferred": False,
"file_verified": True,
}
if "cisco_ios" in ssh_conn.device_type or "cisco_xe" in ssh_conn.device_type:
cisco_ios = True
else:
cisco_ios = False
if not cisco_ios and inline_transfer:
raise ValueError("Inline Transfer only supported for Cisco IOS/Cisco IOS-XE")
scp_args = {
"ssh_conn": ssh_conn,
"source_file": source_file,
"dest_file": dest_file,
"direction": direction,
}
if file_system is not None:
scp_args["file_system"] = file_system
TransferClass = InLineTransfer if inline_transfer else FileTransfer
with TransferClass(**scp_args) as scp_transfer:
if scp_transfer.check_file_exists():
if overwrite_file:
if not disable_md5:
if scp_transfer.compare_md5():
return nottransferred_but_verified
else:
# File exists, you can overwrite it, MD5 is wrong (transfer file)
verifyspace_and_transferfile(scp_transfer)
if scp_transfer.compare_md5():
return transferred_and_verified
else:
raise ValueError(
"MD5 failure between source and destination files"
)
else:
# File exists, you can overwrite it, but MD5 not allowed (transfer file)
verifyspace_and_transferfile(scp_transfer)
return transferred_and_notverified
else:
# File exists, but you can't overwrite it.
if not disable_md5:
if scp_transfer.compare_md5():
return nottransferred_but_verified
msg = "File already exists and overwrite_file is disabled"
raise ValueError(msg)
else:
verifyspace_and_transferfile(scp_transfer)
# File doesn't exist
if not disable_md5:
if scp_transfer.compare_md5():
return transferred_and_verified
else:
raise ValueError("MD5 failure between source and destination files")
else:
return transferred_and_notverified
|
java
|
private PrintStream setUpNewPrintStream(final boolean visitorStream, final String... names) {
PrintStream out = System.out;
if (folder == null) {
if (visitorStream) {
out.println();
}
} else {
final File toWriteTo = new File(folder, buildFileName(names));
try {
if (usedFiles.containsKey(toWriteTo)) {
out = usedFiles.get(toWriteTo);
} else {
toWriteTo.delete();
out = new PrintStream(new FileOutputStream(toWriteTo, !visitorStream));
usedFiles.put(toWriteTo, out);
firstResult = true;
}
} catch (final FileNotFoundException e) {
throw new IllegalStateException(e);
}
}
return out;
}
|
java
|
public List<Rev> getPageRevisions(String pageTitle, int revisionLimit, final String rvprop, String queryParams) throws Exception {
if (StringUtils.isBlank(pageTitle)) {
throw new IllegalArgumentException("Please provide a valid page title.");
}
if (revisionLimit < 1 || revisionLimit > 500) {
throw new IllegalArgumentException("Revision limit must be > 0 and <= 500.");
}
if (StringUtils.isBlank(rvprop)) {
throw new IllegalArgumentException("Please provide a meaningful rvprop string.");
}
final Api api = getQueryResult("" +
"&prop=revisions" +
"&rvprop=" + rvprop +
"&rvlimit=" + revisionLimit + (queryParams != null ? queryParams : "") +
"&titles=" + normalizeTitle(pageTitle));
handleError(api);
final List<Page> pages = api.getQuery().getPages();
final List<Rev> pageRevisions = new LinkedList<>();
if (pages != null) {
Page page = pages.get(0);
if (page != null) {
pageRevisions.addAll(page.getRevisions());
}
}
return Collections.unmodifiableList(pageRevisions);
}
|
python
|
def suggestion_delete(self, account_id):
"""
Remove the user with the given `account_id` from the follow suggestions.
"""
account_id = self.__unpack_id(account_id)
url = '/api/v1/suggestions/{0}'.format(str(account_id))
self.__api_request('DELETE', url)
|
python
|
def Parse(self, stat, file_object, knowledge_base):
"""Parse the netgroup file and return User objects.
Lines are of the form:
group1 (-,user1,) (-,user2,) (-,user3,)
Groups are ignored, we return users in lines that match the filter regexes,
or all users in the file if no filters are specified.
We assume usernames are in the default regex format specified in the adduser
man page. Notably no non-ASCII characters.
Args:
stat: unused statentry
file_object: netgroup VFSFile
knowledge_base: unused
Returns:
rdf_client.User
"""
_, _ = stat, knowledge_base
lines = [
l.strip()
for l in utils.ReadFileBytesAsUnicode(file_object).splitlines()
]
return self.ParseLines(lines)
|
java
|
public String convertBDDReserved2ToString(EDataType eDataType, Object instanceValue) {
return instanceValue == null ? null : instanceValue.toString();
}
|
python
|
def iter_lines(url_or_text, ext=None, mode='rt'):
r""" Return an iterator over the lines of a file or URI response.
>>> len(list(iter_lines('cats_and_dogs.txt')))
263
>>> len(list(iter_lines(list('abcdefgh'))))
8
>>> len(list(iter_lines('abc\n def\n gh\n')))
3
>>> len(list(iter_lines('abc\n def\n gh')))
3
>>> 20000 > len(list(iter_lines(BOOK_PATH))) > 200
True
"""
if url_or_text is None or not url_or_text:
return []
# url_or_text = 'https://www.fileformat.info/info/charset/UTF-8/list.htm'
elif isinstance(url_or_text, (str, bytes, basestring)):
if '\n' in url_or_text or '\r' in url_or_text:
return StringIO(url_or_text)
elif os.path.isfile(os.path.join(DATA_PATH, url_or_text)):
return open(os.path.join(DATA_PATH, url_or_text), mode=mode)
elif os.path.isfile(url_or_text):
return open(os.path.join(url_or_text), mode=mode)
if os.path.isdir(url_or_text):
filepaths = [filemeta['path'] for filemeta in find_files(url_or_text, ext=ext)]
return itertools.chain.from_iterable(map(open, filepaths))
url = looks_like_url(url_or_text)
if url:
for i in range(3):
return requests.get(url, stream=True, allow_redirects=True, timeout=5)
else:
return StringIO(url_or_text)
elif isinstance(url_or_text, (list, tuple)):
# FIXME: make this lazy with chain and map so it doesn't gobble up RAM
text = ''
for s in url_or_text:
text += '\n'.join(list(iter_lines(s, ext=ext, mode=mode))) + '\n'
return iter_lines(text)
|
java
|
public synchronized void addElement(@NonNull T element) {
hashCached = false;
this.elementsMap.put(element.getLabel(), element);
this.elements.add(element);
}
|
java
|
@Override
public DisableImportFindingsForProductResult disableImportFindingsForProduct(DisableImportFindingsForProductRequest request) {
request = beforeClientExecution(request);
return executeDisableImportFindingsForProduct(request);
}
|
java
|
public void getElevationAlongPath(PathElevationRequest req, ElevationServiceCallback callback) {
this.callback = callback;
JSObject doc = (JSObject) getJSObject().eval("document");
doc.setMember(getVariableName(), this);
StringBuilder r = new StringBuilder(getVariableName())
.append(".")
.append("getElevationAlongPath(")
.append(req.getVariableName())
.append(", ")
.append("function(results, status) {document.")
.append(getVariableName())
.append(".processResponse(results, status);});");
getJSObject().eval(r.toString());
}
|
python
|
def write(graph, fileformat=None, filename=None):
"""
A basic graph writer (to stdout) for any of the sources.
this will write raw triples in rdfxml, unless specified.
to write turtle, specify format='turtle'
an optional file can be supplied instead of stdout
:return: None
"""
filewriter = None
if fileformat is None:
fileformat = 'turtle'
if filename is not None:
with open(filename, 'wb') as filewriter:
LOG.info("Writing triples in %s to %s", fileformat, filename)
# rdflib serialize
graph.serialize(filewriter, format=fileformat)
else:
print(graph.serialize(fileformat).decode())
return
|
python
|
def rm_command(
ignore_missing,
star_silent,
recursive,
enable_globs,
endpoint_plus_path,
label,
submission_id,
dry_run,
deadline,
skip_activation_check,
notify,
meow,
heartbeat,
polling_interval,
timeout,
timeout_exit_code,
):
"""
Executor for `globus rm`
"""
endpoint_id, path = endpoint_plus_path
client = get_client()
# attempt to activate unless --skip-activation-check is given
if not skip_activation_check:
autoactivate(client, endpoint_id, if_expires_in=60)
delete_data = DeleteData(
client,
endpoint_id,
label=label,
recursive=recursive,
ignore_missing=ignore_missing,
submission_id=submission_id,
deadline=deadline,
skip_activation_check=skip_activation_check,
interpret_globs=enable_globs,
**notify
)
if not star_silent and enable_globs and path.endswith("*"):
# not intuitive, but `click.confirm(abort=True)` prints to stdout
# unnecessarily, which we don't really want...
# only do this check if stderr is a pty
if (
err_is_terminal()
and term_is_interactive()
and not click.confirm(
'Are you sure you want to delete all files matching "{}"?'.format(path),
err=True,
)
):
safeprint("Aborted.", write_to_stderr=True)
click.get_current_context().exit(1)
delete_data.add_item(path)
if dry_run:
formatted_print(delete_data, response_key="DATA", fields=[("Path", "path")])
# exit safely
return
# Print task submission to stderr so that `-Fjson` is still correctly
# respected, as it will be by `task wait`
res = client.submit_delete(delete_data)
task_id = res["task_id"]
safeprint(
'Delete task submitted under ID "{}"'.format(task_id), write_to_stderr=True
)
# do a `task wait` equivalent, including printing and correct exit status
task_wait_with_io(
meow,
heartbeat,
polling_interval,
timeout,
task_id,
timeout_exit_code,
client=client,
)
|
python
|
def to_mask(self, size, start_name='start', stop_name='end'):
"""Construct a mask array where elements are True if the fall within
features in the table.
Parameters
----------
size : int
Size of chromosome/contig.
start_name : string, optional
Name of column with start coordinates.
stop_name : string, optional
Name of column with stop coordinates.
Returns
-------
mask : ndarray, bool
"""
m = np.zeros(size, dtype=bool)
for start, stop in self[[start_name, stop_name]]:
m[start-1:stop] = True
return m
|
python
|
def token(self):
"""
Token given by Transbank for payment initialization url.
Will raise PaymentError when an error ocurred.
"""
if not self._token:
self._token = self.fetch_token()
logger.payment(self)
return self._token
|
python
|
def add_listener(self, callback, event_type=None):
"""Add a callback handler for events going to this room.
Args:
callback (func(room, event)): Callback called when an event arrives.
event_type (str): The event_type to filter for.
Returns:
uuid.UUID: Unique id of the listener, can be used to identify the listener.
"""
listener_id = uuid4()
self.listeners.append(
{
'uid': listener_id,
'callback': callback,
'event_type': event_type
}
)
return listener_id
|
java
|
@Override
public DescribeSMBSettingsResult describeSMBSettings(DescribeSMBSettingsRequest request) {
request = beforeClientExecution(request);
return executeDescribeSMBSettings(request);
}
|
java
|
public static BigMoney ofMinor(CurrencyUnit currency, long amountMinor) {
MoneyUtils.checkNotNull(currency, "CurrencyUnit must not be null");
return BigMoney.of(currency, BigDecimal.valueOf(amountMinor, currency.getDecimalPlaces()));
}
|
python
|
def ajax_recalculate_records(self):
"""Recalculate all AR records and dependencies
- samples
- templates
- profiles
- services
- dependecies
XXX: This function has grown too much and needs refactoring!
"""
out = {}
# The sorted records from the request
records = self.get_records()
for n, record in enumerate(records):
# Mapping of client UID -> client object info
client_metadata = {}
# Mapping of contact UID -> contact object info
contact_metadata = {}
# Mapping of sample UID -> sample object info
sample_metadata = {}
# Mapping of sampletype UID -> sampletype object info
sampletype_metadata = {}
# Mapping of specification UID -> specification object info
specification_metadata = {}
# Mapping of specification UID -> list of service UIDs
specification_to_services = {}
# Mapping of service UID -> list of specification UIDs
service_to_specifications = {}
# Mapping of template UID -> template object info
template_metadata = {}
# Mapping of template UID -> list of service UIDs
template_to_services = {}
# Mapping of service UID -> list of template UIDs
service_to_templates = {}
# Mapping of profile UID -> list of service UIDs
profile_to_services = {}
# Mapping of service UID -> list of profile UIDs
service_to_profiles = {}
# Profile metadata for UI purposes
profile_metadata = {}
# Mapping of service UID -> service object info
service_metadata = {}
# mapping of service UID -> unmet service dependency UIDs
unmet_dependencies = {}
# Mappings of UID -> object of selected items in this record
_clients = self.get_objs_from_record(record, "Client_uid")
_contacts = self.get_objs_from_record(record, "Contact_uid")
_specifications = self.get_objs_from_record(
record, "Specification_uid")
_templates = self.get_objs_from_record(record, "Template_uid")
_samples = self.get_objs_from_record(record, "PrimaryAnalysisRequest_uid")
_profiles = self.get_objs_from_record(record, "Profiles_uid")
_services = self.get_objs_from_record(record, "Analyses")
_sampletypes = self.get_objs_from_record(record, "SampleType_uid")
# CLIENTS
for uid, obj in _clients.iteritems():
# get the client metadata
metadata = self.get_client_info(obj)
# remember the sampletype metadata
client_metadata[uid] = metadata
# CONTACTS
for uid, obj in _contacts.iteritems():
# get the client metadata
metadata = self.get_contact_info(obj)
# remember the sampletype metadata
contact_metadata[uid] = metadata
# SPECIFICATIONS
for uid, obj in _specifications.iteritems():
# get the specification metadata
metadata = self.get_specification_info(obj)
# remember the metadata of this specification
specification_metadata[uid] = metadata
# get the spec'd service UIDs
service_uids = metadata["service_uids"]
# remember a mapping of specification uid -> spec'd services
specification_to_services[uid] = service_uids
# remember a mapping of service uid -> specifications
for service_uid in service_uids:
if service_uid in service_to_specifications:
service_to_specifications[service_uid].append(uid)
else:
service_to_specifications[service_uid] = [uid]
# AR TEMPLATES
for uid, obj in _templates.iteritems():
# get the template metadata
metadata = self.get_template_info(obj)
# remember the template metadata
template_metadata[uid] = metadata
# profile from the template
profile = obj.getAnalysisProfile()
# add the profile to the other profiles
if profile is not None:
profile_uid = api.get_uid(profile)
_profiles[profile_uid] = profile
# get the template analyses
# [{'partition': 'part-1', 'service_uid': '...'},
# {'partition': 'part-1', 'service_uid': '...'}]
analyses = obj.getAnalyses() or []
# get all UIDs of the template records
service_uids = map(
lambda rec: rec.get("service_uid"), analyses)
# remember a mapping of template uid -> service
template_to_services[uid] = service_uids
# remember a mapping of service uid -> templates
for service_uid in service_uids:
# append service to services mapping
service = self.get_object_by_uid(service_uid)
# remember the template of all services
if service_uid in service_to_templates:
service_to_templates[service_uid].append(uid)
else:
service_to_templates[service_uid] = [uid]
# remember the service metadata
if service_uid not in service_metadata:
metadata = self.get_service_info(service)
service_metadata[service_uid] = metadata
# PROFILES
for uid, obj in _profiles.iteritems():
# get the profile metadata
metadata = self.get_profile_info(obj)
# remember the profile metadata
profile_metadata[uid] = metadata
# get all services of this profile
services = obj.getService()
# get all UIDs of the profile services
service_uids = map(api.get_uid, services)
# remember all services of this profile
profile_to_services[uid] = service_uids
# remember a mapping of service uid -> profiles
for service in services:
# get the UID of this service
service_uid = api.get_uid(service)
# add the service to the other services
_services[service_uid] = service
# remember the profiles of this service
if service_uid in service_to_profiles:
service_to_profiles[service_uid].append(uid)
else:
service_to_profiles[service_uid] = [uid]
# PRIMARY ANALYSIS REQUESTS
for uid, obj in _samples.iteritems():
# get the sample metadata
metadata = self.get_sample_info(obj)
# remember the sample metadata
sample_metadata[uid] = metadata
# SAMPLETYPES
for uid, obj in _sampletypes.iteritems():
# get the sampletype metadata
metadata = self.get_sampletype_info(obj)
# remember the sampletype metadata
sampletype_metadata[uid] = metadata
# SERVICES
for uid, obj in _services.iteritems():
# get the service metadata
metadata = self.get_service_info(obj)
# remember the services' metadata
service_metadata[uid] = metadata
# DEPENDENCIES
for uid, obj in _services.iteritems():
# get the dependencies of this service
deps = get_service_dependencies_for(obj)
# check for unmet dependencies
for dep in deps["dependencies"]:
# we use the UID to test for equality
dep_uid = api.get_uid(dep)
if dep_uid not in _services.keys():
if uid in unmet_dependencies:
unmet_dependencies[uid].append(
self.get_base_info(dep))
else:
unmet_dependencies[uid] = [self.get_base_info(dep)]
# remember the dependencies in the service metadata
service_metadata[uid].update({
"dependencies": map(
self.get_base_info, deps["dependencies"]),
})
# Each key `n` (1,2,3...) contains the form data for one AR Add
# column in the UI.
# All relevant form data will be set accoriding to this data.
out[n] = {
"client_metadata": client_metadata,
"contact_metadata": contact_metadata,
"sample_metadata": sample_metadata,
"sampletype_metadata": sampletype_metadata,
"specification_metadata": specification_metadata,
"specification_to_services": specification_to_services,
"service_to_specifications": service_to_specifications,
"template_metadata": template_metadata,
"template_to_services": template_to_services,
"service_to_templates": service_to_templates,
"profile_metadata": profile_metadata,
"profile_to_services": profile_to_services,
"service_to_profiles": service_to_profiles,
"service_metadata": service_metadata,
"unmet_dependencies": unmet_dependencies,
}
return out
|
python
|
def load_messages(self, directory, catalogue):
"""
Loads translation found in a directory.
@type directory: string
@param directory: The directory to search
@type catalogue: MessageCatalogue
@param catalogue: The message catalogue to dump
@raises: ValueError
"""
if not os.path.isdir(directory):
raise ValueError("{0} is not a directory".format(directory))
for format, loader in list(self.loaders.items()):
extension = "{0}.{1}".format(catalogue.locale, format)
files = find_files(directory, "*.{0}".format(extension))
for file in files:
domain = file.split("/")[-1][:-1 * len(extension) - 1]
catalogue.add_catalogue(
loader.load(
file,
catalogue.locale,
domain))
|
python
|
def is_running(self) -> bool:
"""Specifies whether or not the thread is running"""
return (
self._has_started and
self.is_alive() or
self.completed_at is None or
(datetime.utcnow() - self.completed_at).total_seconds() < 0.5
)
|
python
|
def get_project_trend_graph_url(project, start, end):
"""Generates a bar graph for a project. """
filename = get_project_trend_graph_filename(project, start, end)
urls = {
'graph_url': urlparse.urljoin(GRAPH_URL, filename + ".png"),
'data_url': urlparse.urljoin(GRAPH_URL, filename + ".csv"),
}
return urls
|
java
|
protected Record getOrNullIfExpired(Record record, long now, boolean backup) {
if (!isRecordStoreExpirable()) {
return record;
}
if (record == null) {
return null;
}
Data key = record.getKey();
if (isLocked(key)) {
return record;
}
if (!isExpired(record, now, backup)) {
return record;
}
evict(key, backup);
if (!backup) {
doPostEvictionOperations(record);
}
return null;
}
|
python
|
def dl(self, ds_type:DatasetType=DatasetType.Valid)->DeviceDataLoader:
"Returns appropriate `Dataset` for validation, training, or test (`ds_type`)."
#TODO: refactor
return (self.train_dl if ds_type == DatasetType.Train else
self.test_dl if ds_type == DatasetType.Test else
self.valid_dl if ds_type == DatasetType.Valid else
self.single_dl if ds_type == DatasetType.Single else
self.fix_dl)
|
java
|
public synchronized static void read(int fd, int length, OutputStream stream) throws IOException{
stream.write(read(fd, length));
}
|
python
|
def _new_replica(self, instance_id: int, is_master: bool, bls_bft: BlsBft) -> Replica:
"""
Create a new replica with the specified parameters.
"""
return self._replica_class(self._node, instance_id, self._config, is_master, bls_bft, self._metrics)
|
python
|
def _organize_step_scatter(step, inputs, remapped):
"""Add scattering information from inputs, remapping input variables.
"""
def extract_scatter_id(inp):
_, ns_var = inp.split("#")
_, var = ns_var.split("/")
return var
scatter_local = {}
if "scatter" in step.tool:
assert step.tool["scatterMethod"] == "dotproduct", \
"Only support dotproduct scattering in conversion to WDL"
inp_val = collections.OrderedDict()
for x in inputs:
inp_val[x["id"]] = x["value"]
for scatter_key in [extract_scatter_id(x) for x in step.tool["scatter"]]:
scatter_key = remapped.get(scatter_key) or scatter_key
val = inp_val[scatter_key]
if len(val.split(".")) in [1, 2]:
base_key = val
attr = None
elif len(val.split(".")) == 3:
orig_location, record, attr = val.split(".")
base_key = "%s.%s" % (orig_location, record)
else:
raise ValueError("Unexpected scatter input: %s" % val)
local_ref = base_key.split(".")[-1] + "_local"
scatter_local[base_key] = local_ref
if attr:
local_ref += ".%s" % attr
inp_val[scatter_key] = local_ref
inputs = [{"id": iid, "value": ival} for iid, ival in inp_val.items()]
return inputs, [(v, k) for k, v in scatter_local.items()]
|
java
|
@Nonnull
public static HeightSpec abs (@Nonnegative final float fValue)
{
ValueEnforcer.isGT0 (fValue, "Value");
return new HeightSpec (EValueUOMType.ABSOLUTE, fValue);
}
|
python
|
def url_scan(self, url, opts, functionality, enabled_functionality, hide_progressbar):
"""
This is the main function called whenever a URL needs to be scanned.
This is called when a user specifies an individual CMS, or after CMS
identification has taken place. This function is called for individual
hosts specified by `-u` or for individual lines specified by `-U`.
@param url: this parameter can either be a URL or a (url, host_header)
tuple. The url, if a string, can be in the format of url + " " +
host_header.
@param opts: options object as returned by self._options().
@param functionality: as returned by self._general_init.
@param enabled_functionality: as returned by self._general_init.
@param hide_progressbar: whether to hide the progressbar.
@return: results dictionary.
"""
self.out.debug('base_plugin_internal.url_scan -> %s' % str(url))
if isinstance(url, tuple):
url, host_header = url
else:
url, host_header = self._process_host_line(url)
url = common.repair_url(url)
if opts['follow_redirects']:
url, host_header = self.determine_redirect(url, host_header, opts)
need_sm = opts['enumerate'] in ['a', 'p', 't']
if need_sm and (self.can_enumerate_plugins or self.can_enumerate_themes):
scanning_method = opts['method']
if not scanning_method:
scanning_method = self.determine_scanning_method(url,
opts['verb'], opts['timeout'], self._generate_headers(host_header))
else:
scanning_method = None
enumerating_all = opts['enumerate'] == 'a'
result = {}
for enumerate in enabled_functionality:
enum = functionality[enumerate]
if common.shutdown:
continue
# Get the arguments for the function.
kwargs = dict(enum['kwargs'])
kwargs['url'] = url
kwargs['hide_progressbar'] = hide_progressbar
if enumerate in ['themes', 'plugins']:
kwargs['scanning_method'] = scanning_method
kwargs['headers'] = self._generate_headers(host_header)
# Call to the respective functions occurs here.
finds, is_empty = enum['func'](**kwargs)
result[enumerate] = {'finds': finds, 'is_empty': is_empty}
return result
|
java
|
public List<TypedDependency> getStanfordTypedDependencies(DependencyForm form) {
List<TypedDependency> dependencies = new ArrayList<TypedDependency>();
if (this.nodes == null)
nodes = getStanfordTreeGraphNodes(form);
List<AgigaTypedDependency> agigaDeps = getAgigaDeps(form);
for (AgigaTypedDependency agigaDep : agigaDeps) {
// Add one, since the tokens are zero-indexed but the TreeGraphNodes are one-indexed
TreeGraphNode gov = nodes.get(agigaDep.getGovIdx() + 1);
TreeGraphNode dep = nodes.get(agigaDep.getDepIdx() + 1);
// Create the typed dependency
TypedDependency typedDep = new TypedDependency(GrammaticalRelation.valueOf(agigaDep.getType()), gov, dep);
dependencies.add(typedDep);
}
return dependencies;
}
|
python
|
def filter_used_routes(
transfers_pair: List[MediationPairState],
routes: List[RouteState],
) -> List[RouteState]:
"""This function makes sure we filter routes that have already been used.
So in a setup like this, we want to make sure that node 2, having tried to
route the transfer through 3 will also try 5 before sending it backwards to 1
1 -> 2 -> 3 -> 4
v ^
5 -> 6 -> 7
This function will return routes as provided in their original order.
"""
channelid_to_route = {r.channel_identifier: r for r in routes}
routes_order = {route.node_address: index for index, route in enumerate(routes)}
for pair in transfers_pair:
channelid = pair.payer_transfer.balance_proof.channel_identifier
if channelid in channelid_to_route:
del channelid_to_route[channelid]
channelid = pair.payee_transfer.balance_proof.channel_identifier
if channelid in channelid_to_route:
del channelid_to_route[channelid]
return sorted(
channelid_to_route.values(),
key=lambda route: routes_order[route.node_address],
)
|
java
|
public static void setReplicationsPending(Context context, Class<? extends
PeriodicReplicationService> prsClass, boolean pending) {
SharedPreferences prefs = context.getSharedPreferences(PREFERENCES_FILE_NAME, Context
.MODE_PRIVATE);
SharedPreferences.Editor editor = prefs.edit();
editor.putBoolean(constructKey(prsClass, REPLICATIONS_PENDING_SUFFIX), pending);
editor.apply();
}
|
java
|
public String fixValue(String value) {
logger.trace("Fix label value : ", value);
if (value != null && !value.equals("")) {
value = value.toUpperCase();
if (value.startsWith("MI "))
value = value.replaceFirst("MI ", "");
if (value.startsWith("MEN "))
value = value.replaceFirst("MEN ", "");
if (value.startsWith("MOD "))
value = value.replaceFirst("MOD ", "");
value = value.replaceAll("-", " ");
value = value.replaceAll("_", " ");
value = JKStringUtil.capitalizeFully(value);
// final String[] words = value.toLowerCase().split("_");
// value = "";
// for (final String word : words) {
// if (word.length() > 1) {
// value += word.substring(0, 1).toUpperCase() + word.substring(1) + " ";
// } else {
// value = word;
// }
// }
}
if (value.contains("\\n")) {
value = value.replace("\\n", System.getProperty("line.separator"));
}
return value;
}
|
java
|
@XmlElementDecl(namespace = "http://www.opengis.net/gml", name = "Sphere", substitutionHeadNamespace = "http://www.opengis.net/gml", substitutionHeadName = "_GriddedSurface")
public JAXBElement<SphereType> createSphere(SphereType value) {
return new JAXBElement<SphereType>(_Sphere_QNAME, SphereType.class, null, value);
}
|
java
|
public void
apply(Message m, int error, TSIGRecord old) {
Record r = generate(m, m.toWire(), error, old);
m.addRecord(r, Section.ADDITIONAL);
m.tsigState = Message.TSIG_SIGNED;
}
|
java
|
public int diff_levenshtein(LinkedList<Diff> diffs) {
int levenshtein = 0;
int insertions = 0;
int deletions = 0;
for (Diff aDiff : diffs) {
switch (aDiff.operation) {
case INSERT:
insertions += aDiff.text.length();
break;
case DELETE:
deletions += aDiff.text.length();
break;
case EQUAL:
// A deletion and an insertion is one substitution.
levenshtein += Math.max(insertions, deletions);
insertions = 0;
deletions = 0;
break;
}
}
levenshtein += Math.max(insertions, deletions);
return levenshtein;
}
|
java
|
@Override
public double execute(Geometry geom1, Geometry geom2,
ProgressTracker progressTracker) {
if (null == geom1 || null == geom2) {
throw new IllegalArgumentException();
}
Geometry geometryA = geom1;
Geometry geometryB = geom2;
if (geometryA.isEmpty() || geometryB.isEmpty())
return NumberUtils.TheNaN;
Polygon polygonA;
Polygon polygonB;
MultiPoint multiPointA;
MultiPoint multiPointB;
// if geometryA is an envelope use a polygon instead (if geom1 was
// folded, then geometryA will already be a polygon)
// if geometryA is a point use a multipoint instead
Geometry.Type gtA = geometryA.getType();
Geometry.Type gtB = geometryB.getType();
if (gtA == Geometry.Type.Point) {
if (gtB == Geometry.Type.Point) {
return Point2D.distance(((Point)geometryA).getXY(), ((Point)geometryB).getXY());
}
else if (gtB == Geometry.Type.Envelope) {
Envelope2D envB = new Envelope2D();
geometryB.queryEnvelope2D(envB);
return envB.distance(((Point)geometryA).getXY());
}
multiPointA = new MultiPoint();
multiPointA.add((Point) geometryA);
geometryA = multiPointA;
} else if (gtA == Geometry.Type.Envelope) {
if (gtB == Geometry.Type.Envelope) {
Envelope2D envA = new Envelope2D();
geometryA.queryEnvelope2D(envA);
Envelope2D envB = new Envelope2D();
geometryB.queryEnvelope2D(envB);
return envB.distance(envA);
}
polygonA = new Polygon();
polygonA.addEnvelope((Envelope) geometryA, false);
geometryA = polygonA;
}
// if geom_2 is an envelope use a polygon instead
// if geom_2 is a point use a multipoint instead
if (gtB == Geometry.Type.Point) {
multiPointB = new MultiPoint();
multiPointB.add((Point) geometryB);
geometryB = multiPointB;
} else if (gtB == Geometry.Type.Envelope) {
polygonB = new Polygon();
polygonB.addEnvelope((Envelope) geometryB, false);
geometryB = polygonB;
}
DistanceCalculator distanceCalculator = new DistanceCalculator(
progressTracker);
double distance = distanceCalculator.calculate(geometryA, geometryB);
return distance;
}
|
python
|
def get_nodes(code, desired_type, path="__main__", mode="exec", tree=None):
"""Find all nodes of a given type
Arguments:
code -- code text
desired_type -- ast Node or tuple
Keyword Arguments:
path -- code path
mode -- execution mode (exec, eval, single)
tree -- current tree, if it was optimized
"""
return _GetVisitor(parse(code, path, mode, tree), desired_type).result
|
python
|
def save_feature(self, cat, img, feature, data):
"""Saves a new feature."""
filename = self.path(cat, img, feature)
mkdir(filename)
savemat(filename, {'output':data})
|
java
|
public static <T> Field create(Type type, T value, Map<String, String> attributes) {
return new Field(Utils.checkNotNull(type, "type"), type.convert(value), attributes);
}
|
java
|
public static <T> Collection<T> diff(Collection<T> list1, Collection<T> list2) {
Collection<T> diff = new ArrayList<T>();
for (T t : list1) {
if (!list2.contains(t)) {
diff.add(t);
}
}
return diff;
}
|
python
|
def create(self, name=None, **kwargs):
"""
Create a volume.
Args:
name (str): Name of the volume. If not specified, the engine
generates a name.
driver (str): Name of the driver used to create the volume
driver_opts (dict): Driver options as a key-value dictionary
labels (dict): Labels to set on the volume
Returns:
(:py:class:`Volume`): The volume created.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
>>> volume = client.volumes.create(name='foobar', driver='local',
driver_opts={'foo': 'bar', 'baz': 'false'},
labels={"key": "value"})
"""
obj = self.client.api.create_volume(name, **kwargs)
return self.prepare_model(obj)
|
java
|
@Override
public synchronized void onNext(final ClientRuntimeProtocol.JobControlProto jobControlProto) {
if (jobControlProto.hasSignal()) {
if (jobControlProto.getSignal() == ClientRuntimeProtocol.Signal.SIG_TERMINATE) {
try {
if (jobControlProto.hasMessage()) {
getClientCloseWithMessageDispatcher().onNext(jobControlProto.getMessage().toByteArray());
} else {
getClientCloseDispatcher().onNext(null);
}
} finally {
this.driverStatusManager.onComplete();
}
} else {
LOG.log(Level.FINEST, "Unsupported signal: " + jobControlProto.getSignal());
}
} else if (jobControlProto.hasMessage()) {
getClientMessageDispatcher().onNext(jobControlProto.getMessage().toByteArray());
}
}
|
java
|
public Observable<JobTargetGroupInner> getAsync(String resourceGroupName, String serverName, String jobAgentName, String targetGroupName) {
return getWithServiceResponseAsync(resourceGroupName, serverName, jobAgentName, targetGroupName).map(new Func1<ServiceResponse<JobTargetGroupInner>, JobTargetGroupInner>() {
@Override
public JobTargetGroupInner call(ServiceResponse<JobTargetGroupInner> response) {
return response.body();
}
});
}
|
python
|
def parse_alt_text(alt):
""" Parses the arguments out from a Publ-Markdown alt text into a tuple of text, args """
match = re.match(r'([^\{]*)(\{(.*)\})$', alt)
if match:
alt = match.group(1)
args = parse_arglist(match.group(3))
else:
args = {}
return alt, args
|
java
|
@Override
public String getHeader(String key) {
ServletRESTRequestImpl ret = castRequest();
if (ret != null)
return ret.getHeader(key);
return null;
}
|
python
|
def target_range(self):
"""Get the range on the target strand
:return: target range
:rtype: GenomicRange
"""
if not self.is_aligned(): return None
if self._target_range: return self._target_range # check cache
global _sam_cigar_target_add
tlen = sum([x[0] for x in self.cigar_array if _sam_cigar_target_add.match(x[1])])
self._target_range = GenomicRange(self.entries.rname,self.entries.pos,self.entries.pos+tlen-1)
return self._target_range
|
java
|
public static void init(String[] args) throws UnknownHostException {
boolean storeFile = false, binaryTransfer = false, error = false, listFiles = false, listNames = false, hidden = false;
boolean localActive = false, useEpsvWithIPv4 = false, feat = false, printHash = false;
boolean mlst = false, mlsd = false;
boolean lenient = false;
long keepAliveTimeout = -1;
int controlKeepAliveReplyTimeout = -1;
int minParams = 5; // listings require 3 params
String protocol = null; // SSL protocol
String doCommand = null;
String trustmgr = null;
String proxyHost = null;
int proxyPort = 80;
String proxyUser = null;
String proxyPassword = null;
String username = null;
String password = null;
int base = 0;
for (base = 0; base < args.length; base++) {
if (args[base].equals("-s")) {
storeFile = true;
} else if (args[base].equals("-a")) {
localActive = true;
} else if (args[base].equals("-A")) {
username = "anonymous";
password = System.getProperty("user.name") + "@" + InetAddress.getLocalHost().getHostName();
} else if (args[base].equals("-b")) {
binaryTransfer = true;
} else if (args[base].equals("-c")) {
doCommand = args[++base];
minParams = 3;
} else if (args[base].equals("-d")) {
mlsd = true;
minParams = 3;
} else if (args[base].equals("-e")) {
useEpsvWithIPv4 = true;
} else if (args[base].equals("-f")) {
feat = true;
minParams = 3;
} else if (args[base].equals("-h")) {
hidden = true;
} else if (args[base].equals("-k")) {
keepAliveTimeout = Long.parseLong(args[++base]);
} else if (args[base].equals("-l")) {
listFiles = true;
minParams = 3;
} else if (args[base].equals("-L")) {
lenient = true;
} else if (args[base].equals("-n")) {
listNames = true;
minParams = 3;
} else if (args[base].equals("-p")) {
protocol = args[++base];
} else if (args[base].equals("-t")) {
mlst = true;
minParams = 3;
} else if (args[base].equals("-w")) {
controlKeepAliveReplyTimeout = Integer.parseInt(args[++base]);
} else if (args[base].equals("-T")) {
trustmgr = args[++base];
} else if (args[base].equals("-PrH")) {
proxyHost = args[++base];
String[] parts = proxyHost.split(":");
if (parts.length == 2) {
proxyHost = parts[0];
proxyPort = Integer.parseInt(parts[1]);
}
} else if (args[base].equals("-PrU")) {
proxyUser = args[++base];
} else if (args[base].equals("-PrP")) {
proxyPassword = args[++base];
} else if (args[base].equals("-#")) {
printHash = true;
} else {
break;
}
}
int remain = args.length - base;
if (username != null) {
minParams -= 2;
}
if (remain < minParams) // server, user, pass, remote, local [protocol]
{
System.err.println(USAGE);
System.exit(1);
}
String server = args[base++];
int port = 0;
String[] parts = server.split(":");
if (parts.length == 2) {
server = parts[0];
port = Integer.parseInt(parts[1]);
}
if (username == null) {
username = args[base++];
password = args[base++];
}
String remote = null;
if (args.length - base > 0) {
remote = args[base++];
}
String local = null;
if (args.length - base > 0) {
local = args[base++];
}
final FTPClient ftp;
if (protocol == null) {
if (proxyHost != null) {
System.out.println("Using HTTP proxy server: " + proxyHost);
ftp = new FTPHTTPClient(proxyHost, proxyPort, proxyUser, proxyPassword);
} else {
ftp = new FTPClient();
}
} else {
FTPSClient ftps;
if (protocol.equals("true")) {
ftps = new FTPSClient(true);
} else if (protocol.equals("false")) {
ftps = new FTPSClient(false);
} else {
String[] prot = protocol.split(",");
if (prot.length == 1) { // Just protocol
ftps = new FTPSClient(protocol);
} else { // protocol,true|false
ftps = new FTPSClient(prot[0], Boolean.parseBoolean(prot[1]));
}
}
ftp = ftps;
if ("all".equals(trustmgr)) {
ftps.setTrustManager(TrustManagerUtils.getAcceptAllTrustManager());
} else if ("valid".equals(trustmgr)) {
ftps.setTrustManager(TrustManagerUtils.getValidateServerCertificateTrustManager());
} else if ("none".equals(trustmgr)) {
ftps.setTrustManager(null);
}
}
if (printHash) {
ftp.setCopyStreamListener(createListener());
}
if (keepAliveTimeout >= 0) {
ftp.setControlKeepAliveTimeout(keepAliveTimeout);
}
if (controlKeepAliveReplyTimeout >= 0) {
ftp.setControlKeepAliveReplyTimeout(controlKeepAliveReplyTimeout);
}
ftp.setListHiddenFiles(hidden);
// suppress login details
ftp.addProtocolCommandListener(new PrintCommandListener(new PrintWriter(System.out), true));
try {
int reply;
if (port > 0) {
ftp.connect(server, port);
} else {
ftp.connect(server);
}
System.out.println("Connected to " + server + " on " + (port > 0 ? port : ftp.getDefaultPort()));
// After connection attempt, you should check the reply code to
// verify
// success.
reply = ftp.getReplyCode();
if (!FTPReply.isPositiveCompletion(reply)) {
ftp.disconnect();
System.err.println("FTP server refused connection.");
System.exit(1);
}
} catch (IOException e) {
if (ftp.isConnected()) {
try {
ftp.disconnect();
} catch (IOException f) {
// do nothing
}
}
System.err.println("Could not connect to server.");
e.printStackTrace();
System.exit(1);
}
__main: try {
if (!ftp.login(username, password)) {
ftp.logout();
error = true;
break __main;
}
System.out.println("Remote system is " + ftp.getSystemType());
if (binaryTransfer) {
ftp.setFileType(FTP.BINARY_FILE_TYPE);
} else {
// in theory this should not be necessary as servers should
// default to ASCII
// but they don't all do so - see NET-500
ftp.setFileType(FTP.ASCII_FILE_TYPE);
}
// Use passive mode as default because most of us are
// behind firewalls these days.
if (localActive) {
ftp.enterLocalActiveMode();
} else {
ftp.enterLocalPassiveMode();
}
ftp.setUseEPSVwithIPv4(useEpsvWithIPv4);
if (storeFile) {
InputStream input;
input = new FileInputStream(local);
ftp.storeFile(remote, input);
input.close();
} else if (listFiles) {
if (lenient) {
FTPClientConfig config = new FTPClientConfig();
config.setLenientFutureDates(true);
ftp.configure(config);
}
for (FTPFile f : ftp.listFiles(remote)) {
System.out.println(f.getRawListing());
System.out.println(f.toFormattedString());
}
} else if (mlsd) {
for (FTPFile f : ftp.mlistDir(remote)) {
System.out.println(f.getRawListing());
System.out.println(f.toFormattedString());
}
} else if (mlst) {
FTPFile f = ftp.mlistFile(remote);
if (f != null) {
System.out.println(f.toFormattedString());
}
} else if (listNames) {
for (String s : ftp.listNames(remote)) {
System.out.println(s);
}
} else if (feat) {
// boolean feature check
if (remote != null) { // See if the command is present
if (ftp.hasFeature(remote)) {
System.out.println("Has feature: " + remote);
} else {
if (FTPReply.isPositiveCompletion(ftp.getReplyCode())) {
System.out.println("FEAT " + remote + " was not detected");
} else {
System.out.println("Command failed: " + ftp.getReplyString());
}
}
// Strings feature check
String[] features = ftp.featureValues(remote);
if (features != null) {
for (String f : features) {
System.out.println("FEAT " + remote + "=" + f + ".");
}
} else {
if (FTPReply.isPositiveCompletion(ftp.getReplyCode())) {
System.out.println("FEAT " + remote + " is not present");
} else {
System.out.println("Command failed: " + ftp.getReplyString());
}
}
} else {
if (ftp.features()) {
// Command listener has already printed the output
} else {
System.out.println("Failed: " + ftp.getReplyString());
}
}
} else if (doCommand != null) {
if (ftp.doCommand(doCommand, remote)) {
// Command listener has already printed the output
// for(String s : ftp.getReplyStrings()) {
// System.out.println(s);
// }
} else {
System.out.println("Failed: " + ftp.getReplyString());
}
} else {
OutputStream output;
output = new FileOutputStream(local);
ftp.retrieveFile(remote, output);
output.close();
}
ftp.noop(); // check that control connection is working OK
ftp.logout();
} catch (FTPConnectionClosedException e) {
error = true;
System.err.println("Server closed connection.");
e.printStackTrace();
} catch (IOException e) {
error = true;
e.printStackTrace();
} finally {
if (ftp.isConnected()) {
try {
ftp.disconnect();
} catch (IOException f) {
// do nothing
}
}
}
System.exit(error ? 1 : 0);
}
|
python
|
def get_sticker_set(self, name):
"""
Use this method to get a sticker set. On success, a StickerSet object is returned.
https://core.telegram.org/bots/api#getstickerset
Parameters:
:param name: Name of the sticker set
:type name: str|unicode
Returns:
:return: On success, a StickerSet object is returned
:rtype: pytgbot.api_types.receivable.stickers.StickerSet
"""
assert_type_or_raise(name, unicode_type, parameter_name="name")
result = self.do("getStickerSet", name=name)
if self.return_python_objects:
logger.debug("Trying to parse {data}".format(data=repr(result)))
from pytgbot.api_types.receivable.stickers import StickerSet
try:
return StickerSet.from_array(result)
except TgApiParseException:
logger.debug("Failed parsing as api_type StickerSet", exc_info=True)
# end try
# no valid parsing so far
raise TgApiParseException("Could not parse result.") # See debug log for details!
# end if return_python_objects
return result
|
java
|
private static int mod(int a, final int B) {
final int N = a / B;
a -= N * B;
if (a < 0) {
return a + B;
}
return a;
}
|
python
|
def handle_error(self, error, req, schema, error_status_code, error_headers):
"""Handles errors during parsing."""
status = status_map.get(error_status_code or self.DEFAULT_VALIDATION_STATUS)
if status is None:
raise LookupError("Status code {0} not supported".format(error_status_code))
raise HTTPError(status, errors=error.messages, headers=error_headers)
|
java
|
public float time_tell(){
// translate time to PCM position and call pcm_seek
int link=-1;
long pcm_total=0;
float time_total=0.f;
if(seekable){
pcm_total=pcm_total(-1);
time_total=time_total(-1);
// which bitstream section does this time offset occur in?
for(link=links-1; link>=0; link--){
pcm_total-=pcmlengths[link];
time_total-=time_total(link);
if(pcm_offset>=pcm_total)
break;
}
}
return ((float)time_total+(float)(pcm_offset-pcm_total)/vi[link].rate);
}
|
java
|
public String convertIfcConstructionProductResourceTypeEnumToString(EDataType eDataType, Object instanceValue) {
return instanceValue == null ? null : instanceValue.toString();
}
|
python
|
def _variant_to_dsl_helper(tokens) -> Variant:
"""Convert variant tokens to DSL objects.
:type tokens: ParseResult
"""
kind = tokens[KIND]
if kind == HGVS:
return hgvs(tokens[IDENTIFIER])
if kind == GMOD:
return gmod(
name=tokens[IDENTIFIER][NAME],
namespace=tokens[IDENTIFIER][NAMESPACE],
)
if kind == PMOD:
return pmod(
name=tokens[IDENTIFIER][NAME],
namespace=tokens[IDENTIFIER][NAMESPACE],
code=tokens.get(PMOD_CODE),
position=tokens.get(PMOD_POSITION),
)
if kind == FRAGMENT:
start, stop = tokens.get(FRAGMENT_START), tokens.get(FRAGMENT_STOP)
return fragment(
start=start,
stop=stop,
description=tokens.get(FRAGMENT_DESCRIPTION)
)
raise ValueError('invalid fragment kind: {}'.format(kind))
|
python
|
def _create_online_model(self, lattice_instance, use_bl, mode='simu'):
""" make online model according to the lattice.Lattice instance
:param lattice_instance: lattice.Lattice instance, created from lte/json file
:param use_bl: selected beamline name
:param simu: online modeling type, 'simu': simulation,
'online': online (incorporate control fields)
"""
new_model = models.Models(name=use_bl, mode=mode)
ele_name_list = lattice_instance.getElementList(use_bl)
ele_eobj_list = []
for ele in ele_name_list:
ele_eobj_list.append(lattice_instance.makeElement(ele))
new_model.addElement(*ele_eobj_list)
return new_model
|
python
|
def log_debug(func, *args, **kwargs):
''' Wrap call of provided function with debug log statements. '''
logging.debug('Starting "%s" in thread %s...', func.__name__, current_thread())
results = func(*args, **kwargs)
logging.debug('Successfully finished "%s" in thread %s.', func.__name__, current_thread())
return results
|
python
|
def prin(*args, **kwargs):
r"""Like ``print``, but a function. I.e. prints out all arguments as
``print`` would do. Specify output stream like this::
print('ERROR', `out="sys.stderr"``).
"""
print >> kwargs.get('out',None), " ".join([str(arg) for arg in args])
|
python
|
def select_between_exonic_splice_site_and_alternate_effect(effect):
"""
If the given effect is an ExonicSpliceSite then it might contain
an alternate effect of higher priority. In that case, return the
alternate effect. Otherwise, this acts as an identity function.
"""
if effect.__class__ is not ExonicSpliceSite:
return effect
if effect.alternate_effect is None:
return effect
splice_priority = effect_priority(effect)
alternate_priority = effect_priority(effect.alternate_effect)
if splice_priority > alternate_priority:
return effect
else:
return effect.alternate_effect
|
python
|
def _join_disease(query, disease_definition, disease_id, disease_name):
"""helper function to add a query join to Disease model
:param sqlalchemy.orm.query.Query query: SQL Alchemy query
:param disease_definition:
:param str disease_id: see :attr:`models.Disease.disease_id`
:param disease_name:
:rtype: sqlalchemy.orm.query.Query
"""
if disease_definition or disease_id or disease_name:
query = query.join(models.Disease)
if disease_definition:
query = query.filter(models.Disease.definition.like(disease_definition))
if disease_id:
query = query.filter(models.Disease.disease_id == disease_id)
if disease_name:
query = query.filter(models.Disease.disease_name.like(disease_name))
return query
|
python
|
def _assemble_corpus_string(self, corpus):
"""Takes a list of filepaths, returns a string containing contents of
all files."""
if corpus == 'phi5':
filepaths = assemble_phi5_author_filepaths()
file_cleaner = phi5_plaintext_cleanup
elif corpus == 'tlg':
filepaths = assemble_tlg_author_filepaths()
file_cleaner = tlg_plaintext_cleanup
for filepath in filepaths:
with open(filepath) as file_open:
file_read = file_open.read().lower()
file_clean = file_cleaner(file_read)
yield file_clean
|
java
|
private void setComponents() {
CmsUriSplitter splitter = new CmsUriSplitter(m_uri, true);
m_target = splitter.getPrefix();
m_anchor = CmsLinkProcessor.unescapeLink(splitter.getAnchor());
setQuery(splitter.getQuery());
}
|
python
|
def login(self, return_to=None, force_authn=False, is_passive=False, set_nameid_policy=True, name_id_value_req=None):
"""
Initiates the SSO process.
:param return_to: Optional argument. The target URL the user should be redirected to after login.
:type return_to: string
:param force_authn: Optional argument. When true the AuthNRequest will set the ForceAuthn='true'.
:type force_authn: bool
:param is_passive: Optional argument. When true the AuthNRequest will set the Ispassive='true'.
:type is_passive: bool
:param set_nameid_policy: Optional argument. When true the AuthNRequest will set a nameIdPolicy element.
:type set_nameid_policy: bool
:param name_id_value_req: Optional argument. Indicates to the IdP the subject that should be authenticated
:type name_id_value_req: string
:returns: Redirection URL
:rtype: string
"""
authn_request = OneLogin_Saml2_Authn_Request(self.__settings, force_authn, is_passive, set_nameid_policy, name_id_value_req)
self.__last_request = authn_request.get_xml()
self.__last_request_id = authn_request.get_id()
saml_request = authn_request.get_request()
parameters = {'SAMLRequest': saml_request}
if return_to is not None:
parameters['RelayState'] = return_to
else:
parameters['RelayState'] = OneLogin_Saml2_Utils.get_self_url_no_query(self.__request_data)
security = self.__settings.get_security_data()
if security.get('authnRequestsSigned', False):
parameters['SigAlg'] = security['signatureAlgorithm']
parameters['Signature'] = self.build_request_signature(saml_request, parameters['RelayState'], security['signatureAlgorithm'])
return self.redirect_to(self.get_sso_url(), parameters)
|
java
|
public String joining(CharSequence delimiter, CharSequence prefix, CharSequence suffix) {
return collect(DoubleCollector.joining(delimiter, prefix, suffix));
}
|
python
|
def get_sensor_code_by_number(si, mtype, sensor_number, quiet=False):
"""
Given a sensor number, get the full sensor code (e.g. ACCX-UB1-L2C-M)
:param si: dict, sensor index json dictionary
:param mtype: str, sensor type
:param sensor_number: int, number of sensor
:param quiet: bool, if true then return None if not found
:return: str or None, sensor_code: a sensor code (e.g. ACCX-UB1-L2C-M)
"""
try:
if 'Orientation' in si[mtype][sensor_number]:
orientation = si[mtype][sensor_number]['Orientation']
else:
orientation = ""
return "%s%s-%s-%s-%s" % (mtype,
orientation,
si[mtype][sensor_number]['X-CODE'],
si[mtype][sensor_number]['Y-CODE'],
si[mtype][sensor_number]['Z-CODE'])
except KeyError:
if quiet:
return None
raise
|
python
|
def install_client_interceptors(client_interceptors=()):
"""
Install client interceptors for the patchers.
:param client_interceptors: a list of client interceptors to install.
Should be a list of classes
"""
if not _valid_args(client_interceptors):
raise ValueError('client_interceptors argument must be a list')
from ..http_client import ClientInterceptors
for client_interceptor in client_interceptors:
logging.info('Loading client interceptor %s', client_interceptor)
interceptor_class = _load_symbol(client_interceptor)
logging.info('Adding client interceptor %s', client_interceptor)
ClientInterceptors.append(interceptor_class())
|
java
|
protected synchronized void fireTrackPositionChangeEvent(long newTime) {
TrackPositionChangeEvent tpce = new TrackPositionChangeEvent(this, newTime);
for (TrackPositionChangeListener tpcl : trackListeners) {
tpcl.trackPositionChanged(tpce);
}
}
|
python
|
def post_build(self, container_builder, container):
"""
This method make sure the flask configuration is fine, and
check the if ioc.extra.jinja2 service is available. If so, the
flask instance will use this service, by keeping the flask template
loader and the one registered at the jinja2
"""
app = container.get('ioc.extra.flask.app')
app.config.update(container_builder.parameters.get('ioc.extra.flask.app.config'))
if container.has('ioc.extra.jinja2'):
# This must be an instance of jinja.ChoiceLoader
# This code replace the flask specific jinja configuration to use
# the one provided by the ioc.extra.jinja2 code
jinja2 = container.get('ioc.extra.jinja2')
jinja2.loader.loaders.append(app.create_global_jinja_loader())
for name, value in app.jinja_env.globals.items():
if name not in jinja2.globals:
jinja2.globals[name] = value
for name, value in app.jinja_env.filters.items():
if name not in jinja2.filters:
jinja2.filters[name] = value
app.jinja_env = jinja2
|
java
|
@SuppressWarnings("unchecked")
private Constructor<BDBRepository> getRepositoryConstructor()
throws ClassCastException, ClassNotFoundException, NoSuchMethodException
{
String packageName;
{
String thisClassName = getClass().getName();
packageName = thisClassName.substring(0, thisClassName.lastIndexOf('.'));
}
String className = packageName + '.' + getBDBProduct().name() + "_Repository";
Class repoClass = Class.forName(className);
if (BDBRepository.class.isAssignableFrom(repoClass)) {
return repoClass.getDeclaredConstructor
(AtomicReference.class, BDBRepositoryBuilder.class);
}
throw new ClassCastException("Not an instance of BDBRepository: " + repoClass.getName());
}
|
python
|
def get_asset(self, asset_hash, id=None, endpoint=None):
"""
Get an asset by its hash
Args:
asset_hash: (str) asset to lookup, example would be 'c56f33fc6ecfcd0c225c4ab356fee59390af8560be0e930faebe74a6daff7c9b'
id: (int, optional) id to use for response tracking
endpoint: (RPCEndpoint, optional) endpoint to specify to use
Returns:
json object of the result or the error encountered in the RPC call
"""
return self._call_endpoint(GET_ASSET_STATE, params=[asset_hash], id=id, endpoint=endpoint)
|
java
|
@VisibleForTesting
CompletableFuture<List<Long>> getBucketOffsets(DirectSegmentAccess segment, TableBucket bucket, TimeoutTimer timer) {
val result = new ArrayList<Long>();
AtomicLong offset = new AtomicLong(bucket.getSegmentOffset());
return Futures.loop(
() -> offset.get() >= 0,
() -> {
result.add(offset.get());
return getBackpointerOffset(segment, offset.get(), timer.getRemaining());
},
offset::set,
this.executor)
.thenApply(v -> result);
}
|
java
|
public Stat stat(final Txn<T> txn) {
if (SHOULD_CHECK) {
requireNonNull(txn);
txn.checkReady();
}
final MDB_stat stat = new MDB_stat(RUNTIME);
checkRc(LIB.mdb_stat(txn.pointer(), ptr, stat));
return new Stat(
stat.f0_ms_psize.intValue(),
stat.f1_ms_depth.intValue(),
stat.f2_ms_branch_pages.longValue(),
stat.f3_ms_leaf_pages.longValue(),
stat.f4_ms_overflow_pages.longValue(),
stat.f5_ms_entries.longValue());
}
|
java
|
private void setCalendarToLastRelativeDay(Calendar calendar)
{
calendar.set(Calendar.DAY_OF_MONTH, calendar.getActualMaximum(Calendar.DAY_OF_MONTH));
int currentDayOfWeek = calendar.get(Calendar.DAY_OF_WEEK);
int requiredDayOfWeek = getDayOfWeek().getValue();
int dayOfWeekOffset = 0;
if (currentDayOfWeek > requiredDayOfWeek)
{
dayOfWeekOffset = requiredDayOfWeek - currentDayOfWeek;
}
else
{
if (currentDayOfWeek < requiredDayOfWeek)
{
dayOfWeekOffset = -7 + (requiredDayOfWeek - currentDayOfWeek);
}
}
if (dayOfWeekOffset != 0)
{
calendar.add(Calendar.DAY_OF_YEAR, dayOfWeekOffset);
}
}
|
python
|
def preprocess(content, options):
# type: (Text, Optional[optparse.Values]) -> ReqFileLines
"""Split, filter, and join lines, and return a line iterator
:param content: the content of the requirements file
:param options: cli options
"""
lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines
lines_enum = join_lines(lines_enum)
lines_enum = ignore_comments(lines_enum)
lines_enum = skip_regex(lines_enum, options)
lines_enum = expand_env_variables(lines_enum)
return lines_enum
|
java
|
public Observable<Void> deleteAsync(String jobScheduleId) {
return deleteWithServiceResponseAsync(jobScheduleId).map(new Func1<ServiceResponseWithHeaders<Void, JobScheduleDeleteHeaders>, Void>() {
@Override
public Void call(ServiceResponseWithHeaders<Void, JobScheduleDeleteHeaders> response) {
return response.body();
}
});
}
|
python
|
def expand_all(self):
"""
Expand all positions; works only if the underlying tree allows it.
"""
if implementsCollapseAPI(self._tree):
self._tree.expand_all()
self._walker.clear_cache()
self.refresh()
|
python
|
def _timestamp_query_param_from_json(value, field):
"""Coerce 'value' to a datetime, if set or not nullable.
Args:
value (str): The timestamp.
field (.SchemaField): The field corresponding to the value.
Returns:
Optional[datetime.datetime]: The parsed datetime object from
``value`` if the ``field`` is not null (otherwise it is
:data:`None`).
"""
if _not_null(value, field):
# Canonical formats for timestamps in BigQuery are flexible. See:
# g.co/cloud/bigquery/docs/reference/standard-sql/data-types#timestamp-type
# The separator between the date and time can be 'T' or ' '.
value = value.replace(" ", "T", 1)
# The UTC timezone may be formatted as Z or +00:00.
value = value.replace("Z", "")
value = value.replace("+00:00", "")
if "." in value:
# YYYY-MM-DDTHH:MM:SS.ffffff
return datetime.datetime.strptime(value, _RFC3339_MICROS_NO_ZULU).replace(
tzinfo=UTC
)
else:
# YYYY-MM-DDTHH:MM:SS
return datetime.datetime.strptime(value, _RFC3339_NO_FRACTION).replace(
tzinfo=UTC
)
else:
return None
|
python
|
def compute_positions(cls, screen_width, line):
"""Compute the relative position of the fields on a given line.
Args:
screen_width (int): the width of the screen
line (mpdlcd.display_fields.Field list): the list of fields on the
line
Returns:
((int, mpdlcd.display_fields.Field) list): the positions of fields,
as (position, field) tuples.
Raises:
FormatError: if the line contains more than one flexible field, or
is too long for the screen size.
"""
# First index
left = 1
# Last index
right = screen_width + 1
# Current 'flexible' field
flexible = None
# Compute the space to the left and to the right of the (optional)
# flexible field.
for field in line:
if field.is_flexible():
if flexible:
raise FormatError(
'There can be only one flexible field per line.')
flexible = field
elif not flexible:
left += field.width
else:
# Met a 'flexible', computing from the right
right -= field.width
# Available space for the 'flexible' field
available = right - left
if available <= 0:
raise FormatError("Too much data for screen width")
if flexible:
if available < 1:
raise FormatError(
"Not enough space to display flexible field %s" %
flexible.name)
flexible.width = available
positions = []
left = 1
for field in line:
positions.append((left, field))
left += field.width
logger.debug('Positions are %r', positions)
return positions
|
python
|
def model_file_fields(self, model):
"""
Generator yielding all instances of FileField and its subclasses of a model.
"""
for field in model._meta.fields:
if isinstance(field, models.FileField):
yield field
|
java
|
public String getContent() throws ConnectionException {
logger.debug("Enter Response::getContent");
if (content != null) {
logger.debug("content already available ");
return content;
}
BufferedReader rd = new BufferedReader(new InputStreamReader(stream));
StringBuffer result = new StringBuffer();
String line = "";
try {
while ((line = rd.readLine()) != null) {
result.append(line);
}
} catch (IOException e) {
logger.error("Exception while retrieving content", e);
throw new ConnectionException(e.getMessage());
}
content = result.toString();
logger.debug("End Response::getContent");
return content;
}
|
python
|
def is_rate_limited(self, namespace: str) -> bool:
"""
Checks if a namespace is already rate limited or not without making any additional attempts
:param namespace: Rate limiting namespace
:type namespace: str
:return: Returns true if attempt can go ahead under current rate limiting rules, false otherwise
"""
return not self.__can_attempt(namespace=namespace, add_attempt=False)
|
java
|
public void printScreen(PrintWriter out, ResourceBundle reg)
throws DBException
{
String strParamHelp = this.getProperty(DBParams.HELP); // Display record
if (strParamHelp != null)
return; // Don't do this for help screens
this.printHtmlStartForm(out);
int iHtmlOptions = this.getScreenField().getPrintOptions();
if ((iHtmlOptions & HtmlConstants.PRINT_TOOLBAR_BEFORE) != 0)
this.printZmlToolbarData(out, iHtmlOptions);
if ((iHtmlOptions & HtmlConstants.DONT_PRINT_SCREEN) == 0)
this.getScreenField().printData(out, iHtmlOptions); // DO print screen
if ((iHtmlOptions & HtmlConstants.PRINT_TOOLBAR_AFTER) != 0)
this.printZmlToolbarData(out, iHtmlOptions);
this.printHtmlEndForm(out);
}
|
python
|
def helices(self):
"""Generates new `Assembly` containing just α-helices.
Notes
-----
Metadata is not currently preserved from the parent object.
Returns
-------
hel_assembly : ampal.Protein
`Assembly` containing only the α-helices of the original `Assembly`.
"""
hel_molecules = list(itertools.chain(
*[p.helices._molecules
for p in self._molecules if hasattr(p, 'helices')]))
hel_assembly = Assembly(molecules=hel_molecules, assembly_id=self.id)
return hel_assembly
|
python
|
def transform(self, X, y, sample_weight=None):
'''
Transforms the time series data with run length encoding of the target variable
Note this transformation changes the number of samples in the data
If sample_weight is provided, it is transformed to align to the new target encoding
Parameters
----------
X : array-like, shape [n_series, ...]
Time series data and (optionally) contextual data
y : array-like shape [n_series, ...]
target variable encoded as a time series
sample_weight : array-like shape [n_series], default = None
sample weights
Returns
-------
Xt : array-like, shape [n_rle_series, ]
transformed time series data
yt : array-like, shape [n_rle_series]
target values for each series
sample_weight_new : array-like shape [n_rle_series]
sample weights
'''
check_ts_data_with_ts_target(X, y)
Xt, Xc = get_ts_data_parts(X)
N = len(Xt) # number of time series
# transformed data
yt = []
Xtt = []
swt = sample_weight
Nt = []
for i in range(N):
Xi, yi = self._transform(Xt[i], y[i])
yt+=yi
Xtt+=Xi
Nt.append(len(yi)) # number of contiguous class instances
if Xc is not None:
Xct = expand_variables_to_segments(Xc, Nt)
Xtt = TS_Data(Xtt, Xct)
if sample_weight is not None:
swt = expand_variables_to_segments(sample_weight, Nt)
return Xtt, yt, swt
|
java
|
public Workflow getWorkflowById(String workflowId, boolean includeTasks) {
Workflow workflow = executionDAO.getWorkflow(workflowId, includeTasks);
if (workflow == null) {
LOGGER.debug("Workflow {} not found in executionDAO, checking indexDAO", workflowId);
String json = indexDAO.get(workflowId, RAW_JSON_FIELD);
if (json == null) {
String errorMsg = String.format("No such workflow found by id: %s", workflowId);
LOGGER.error(errorMsg);
throw new ApplicationException(ApplicationException.Code.NOT_FOUND, errorMsg);
}
try {
workflow = objectMapper.readValue(json, Workflow.class);
if (!includeTasks) {
workflow.getTasks().clear();
}
} catch (IOException e) {
String errorMsg = String.format("Error reading workflow: %s", workflowId);
LOGGER.error(errorMsg);
throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, errorMsg, e);
}
}
return workflow;
}
|
python
|
def _write_inventory(self):
"""
Writes the provisioner's inventory file to disk and returns None.
:return: None
"""
self._verify_inventory()
util.write_file(self.inventory_file, util.safe_dump(self.inventory))
|
java
|
@Override
public List<CommerceTierPriceEntry> findByCommercePriceEntryId(
long commercePriceEntryId, int start, int end,
OrderByComparator<CommerceTierPriceEntry> orderByComparator) {
return findByCommercePriceEntryId(commercePriceEntryId, start, end,
orderByComparator, true);
}
|
python
|
def setdocument(self, doc):
"""Associate a document with this element.
Arguments:
doc (:class:`Document`): A document
Each element must be associated with a FoLiA document.
"""
assert isinstance(doc, Document)
if not self.doc:
self.doc = doc
if self.id:
if self.id in doc:
raise DuplicateIDError(self.id)
else:
self.doc.index[id] = self
for e in self: #recursive for all children
if isinstance(e,AbstractElement): e.setdocument(doc)
|
python
|
def subnode(self, node):
"""Make `node` receiver's child."""
self.children.append(node)
node.parent = self
node.adjust_interleave(node.interleave)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.