language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | @Override
public String nextUID() {
StringBuilder tmp = new StringBuilder(prefix.length + 16);
tmp.append(prefix);
tmp.append(counter.incrementAndGet());
return tmp.toString();
} |
java | public static FixedStringSearchInterpolator createInterpolator(MojoParameters params, String filter) {
String[] delimiters = extractDelimiters(filter);
if (delimiters == null) {
// Don't interpolate anything
return FixedStringSearchInterpolator.create();
}
DockerAssemblyConfigurationSource configSource = new DockerAssemblyConfigurationSource(params, null, null);
// Patterned after org.apache.maven.plugins.assembly.interpolation.AssemblyExpressionEvaluator
return AssemblyInterpolator
.fullInterpolator(params.getProject(),
DefaultAssemblyReader.createProjectInterpolator(params.getProject())
.withExpressionMarkers(delimiters[0], delimiters[1]), configSource)
.withExpressionMarkers(delimiters[0], delimiters[1]);
} |
java | public static String simplifyPath(String pathname) {
if (N.isNullOrEmpty(pathname)) {
return ".";
}
pathname = pathname.replace('\\', '/');
// split the path apart
String[] components = pathSplitter.splitToArray(pathname);
List<String> path = new ArrayList<>();
// resolve ., .., and //
for (String component : components) {
if (component.length() == 0 || component.equals(".")) {
continue;
} else if (component.equals("..")) {
if (path.size() > 0 && !path.get(path.size() - 1).equals("..")) {
path.remove(path.size() - 1);
} else {
path.add("..");
}
} else {
path.add(component);
}
}
// put it back together
String result = StringUtil.join(path, '/');
if (pathname.charAt(0) == '/') {
result = "/" + result;
}
while (result.startsWith("/../")) {
result = result.substring(3);
}
if (result.equals("/..")) {
result = "/";
} else if ("".equals(result)) {
result = ".";
}
return result;
} |
java | public Observable<Page<IntegrationAccountInner>> listNextAsync(final String nextPageLink) {
return listNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<IntegrationAccountInner>>, Page<IntegrationAccountInner>>() {
@Override
public Page<IntegrationAccountInner> call(ServiceResponse<Page<IntegrationAccountInner>> response) {
return response.body();
}
});
} |
java | public void setServerCustomizers(
Collection<? extends JettyServerCustomizer> customizers) {
Assert.notNull(customizers, "Customizers must not be null");
this.jettyServerCustomizers = new ArrayList<>(customizers);
} |
python | def update_task_ids(self, encoder_vocab_size):
"""Generate task_ids for each problem.
These ids correspond to the index of the task in the task_list.
Args:
encoder_vocab_size: the size of the vocab which is used to compute
the index offset.
"""
for idx, task in enumerate(self.task_list):
task.set_task_id(idx + encoder_vocab_size)
tf.logging.info("Task %d (%s) has id %d." %
(idx, task.name, task.task_id)) |
python | def value(self):
"""returns object as dictionary"""
return {
"type" : "uniqueValue",
"field1" : self._field1,
"field2" : self._field2,
"field3" : self._field3,
"fieldDelimiter" : self._fieldDelimiter,
"defaultSymbol" : self._defaultSymbol.value,
"defaultLabel" : self._defaultLabel,
"uniqueValueInfos" : self._uniqueValueInfos,
"rotationType": self._rotationType,
"rotationExpression": self._rotationExpression
} |
java | private void duplicateProperties(final String spaceId,
final String contentId, final Map<String, String> sourceProperties)
throws TaskExecutionFailedException {
log.info("Duplicating properties for " + contentId + " in space "
+ spaceId + " in account " + dupTask.getAccount());
try {
new Retrier().execute(new Retriable() {
@Override
public String retry() throws Exception {
// Set properties
try {
destStore.setContentProperties(spaceId, contentId, sourceProperties);
} catch (StorageStateException ex) {
String message = "Unable to set content properties" +
" on destination store ({0}) for " +
"{1} (content) in {2} (space)";
log.warn(MessageFormat.format(message, destStore, contentId, spaceId));
}
return "success";
}
});
log.info("Successfully duplicated properties for " + contentId
+ " in space " + spaceId + " in account "
+ dupTask.getAccount());
} catch (Exception e) {
String msg = "Error attempting to duplicate content properties: " + e.getMessage();
throw new DuplicationTaskExecutionFailedException(
buildFailureMessage(msg), e);
}
} |
python | def _upload_image(registry, docker_tag, image_id) -> None:
"""
Upload the passed image by id, tag it with docker tag and upload to S3 bucket
:param registry: Docker registry name
:param docker_tag: Docker tag
:param image_id: Image id
:return: None
"""
# We don't have to retag the image since it is already in the right format
logging.info('Uploading %s (%s) to %s', docker_tag, image_id, registry)
push_cmd = ['docker', 'push', docker_tag]
subprocess.check_call(push_cmd) |
java | public static void setLearningRate(MultiLayerNetwork net, int layerNumber, double newLr) {
setLearningRate(net, layerNumber, newLr, null, true);
} |
python | def __init_yaml():
"""Lazy init yaml because canmatrix might not be fully loaded when loading this format."""
global _yaml_initialized
if not _yaml_initialized:
_yaml_initialized = True
yaml.add_constructor(u'tag:yaml.org,2002:Frame', _frame_constructor)
yaml.add_constructor(u'tag:yaml.org,2002:Signal', _signal_constructor)
yaml.add_representer(canmatrix.Frame, _frame_representer) |
python | def stop(self, measurementId, failureReason=None):
"""
informs the target the named measurement has completed
:param measurementId: the measurement that has completed.
:return:
"""
if failureReason is None:
self.endResponseCode = self._doPut(self.sendURL + "/complete")
else:
self.endResponseCode = self._doPut(self.sendURL + "/failed", data={'failureReason': failureReason})
self.sendURL = None |
python | def get_agents_by_ids(self, agent_ids):
"""Gets an ``AgentList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the agents
specified in the ``Id`` list, in the order of the list,
including duplicates, or an error results if an ``Id`` in the
supplied list is not found or inaccessible. Otherwise,
inaccessible ``Agents`` may be omitted from the list and may
present the elements in any order including returning a unique
set.
arg: agent_ids (osid.id.IdList): a list of agent ``Ids``
return: (osid.authentication.AgentList) - the returned ``Agent
list``
raise: NotFound - an ``Id was`` not found
raise: NullArgument - ``agent_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_ids
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('authentication',
collection='Agent',
runtime=self._runtime)
object_id_list = []
for i in agent_ids:
object_id_list.append(ObjectId(self._get_id(i, 'authentication').get_identifier()))
result = collection.find(
dict({'_id': {'$in': object_id_list}},
**self._view_filter()))
result = list(result)
sorted_result = []
for object_id in object_id_list:
for object_map in result:
if object_map['_id'] == object_id:
sorted_result.append(object_map)
break
return objects.AgentList(sorted_result, runtime=self._runtime, proxy=self._proxy) |
python | def Transit(time, t0=0., dur=0.1, per=3.56789, depth=0.001, **kwargs):
'''
A `Mandel-Agol <http://adsabs.harvard.edu/abs/2002ApJ...580L.171M>`_
transit model, but with the depth and the duration as primary
input variables.
:param numpy.ndarray time: The time array
:param float t0: The time of first transit in units of \
:py:obj:`BJD` - 2454833.
:param float dur: The transit duration in days. Don't go too crazy on \
this one -- very small or very large values will break the \
inverter. Default 0.1
:param float per: The orbital period in days. Default 3.56789
:param float depth: The fractional transit depth. Default 0.001
:param dict kwargs: Any additional keyword arguments, passed directly \
to :py:func:`pysyzygy.Transit`
:returns tmod: The transit model evaluated at the same times as the \
:py:obj:`time` array
'''
if ps is None:
raise Exception("Unable to import `pysyzygy`.")
# Note that rhos can affect RpRs, so we should really do this iteratively,
# but the effect is pretty negligible!
RpRs = Get_RpRs(depth, t0=t0, per=per, **kwargs)
rhos = Get_rhos(dur, t0=t0, per=per, **kwargs)
return ps.Transit(t0=t0, per=per, RpRs=RpRs, rhos=rhos, **kwargs)(time) |
python | async def list_networks(request: web.Request) -> web.Response:
"""
Get request will return a list of discovered ssids:
GET /wifi/list
200 OK
{ "list": [
{
ssid: string // e.g. "linksys", name to connect to
signal: int // e.g. 100; arbitrary signal strength, more is better
active: boolean // e.g. true; whether there is a connection active
security: str // e.g. "WPA2 802.1X" raw nmcli security type output
securityType: str // e.g. "wpa-eap"; see below
}
]
}
The securityType field contains a value suitable for passing to the
securityType argument of /configure, or 'unsupported'. The security
field is mostly useful for debugging if you are unable to connect to
the network even though you think you are using the correct security
type.
"""
try:
networks = await nmcli.available_ssids()
except RuntimeError as e:
return web.json_response({'message': ' '.join(e.args)}, status=500)
else:
return web.json_response({'list': networks}, status=200) |
python | def infer_transportation_modes(self, dt_threshold=10):
"""In-place transportation inferring of segments
Returns:
This track
"""
self.segments = [
segment.infer_transportation_mode(dt_threshold=dt_threshold)
for segment in self.segments
]
return self |
java | private long timeOfBeat(BeatGrid beatGrid, int beatNumber, DeviceUpdate update) {
if (beatNumber <= beatGrid.beatCount) {
return beatGrid.getTimeWithinTrack(beatNumber);
}
logger.warn("Received beat number " + beatNumber + " from " + update.getDeviceName() + " " +
update.getDeviceNumber() + ", but beat grid only goes up to beat " + beatGrid.beatCount +
". Packet: " + update);
if (beatGrid.beatCount < 2) {
return beatGrid.getTimeWithinTrack(1);
}
long lastTime = beatGrid.getTimeWithinTrack(beatGrid.beatCount);
long lastInterval = lastTime - beatGrid.getTimeWithinTrack(beatGrid.beatCount - 1);
return lastTime + (lastInterval * (beatNumber - beatGrid.beatCount));
} |
java | protected List<AbstractClassTypeDeclarationDescr> sortByHierarchy(Collection<AbstractClassTypeDeclarationDescr> unsortedDescrs, KnowledgeBuilderImpl kbuilder) {
taxonomy = new HashMap<QualifiedName, Collection<QualifiedName>>();
Map<QualifiedName, AbstractClassTypeDeclarationDescr> cache = new HashMap<QualifiedName, AbstractClassTypeDeclarationDescr>();
for (AbstractClassTypeDeclarationDescr tdescr : unsortedDescrs) {
cache.put(tdescr.getType(), tdescr);
}
for (AbstractClassTypeDeclarationDescr tdescr : unsortedDescrs) {
QualifiedName name = tdescr.getType();
Collection<QualifiedName> supers = taxonomy.get(name);
if (supers == null) {
supers = new ArrayList<QualifiedName>();
taxonomy.put(name, supers);
} else {
kbuilder.addBuilderResult(new TypeDeclarationError(tdescr,
"Found duplicate declaration for type " + tdescr.getType()));
}
boolean circular = false;
for (QualifiedName sup : tdescr.getSuperTypes()) {
if (!Object.class.getName().equals(name.getFullName())) {
if (!hasCircularDependency(tdescr.getType(), sup, taxonomy)) {
if (cache.containsKey(sup)) {
supers.add(sup);
}
} else {
circular = true;
kbuilder.addBuilderResult(new TypeDeclarationError(tdescr,
"Found circular dependency for type " + tdescr.getTypeName()));
break;
}
}
}
if (circular) {
tdescr.getSuperTypes().clear();
}
}
for (AbstractClassTypeDeclarationDescr tdescr : unsortedDescrs) {
for (TypeFieldDescr field : tdescr.getFields().values()) {
QualifiedName name = tdescr.getType();
QualifiedName typeName = new QualifiedName(field.getPattern().getObjectType());
if (!hasCircularDependency(name, typeName, taxonomy)) {
if (cache.containsKey(typeName)) {
taxonomy.get(name).add(typeName);
}
} else {
field.setRecursive(true);
}
}
}
List<QualifiedName> sorted = new HierarchySorter<QualifiedName>().sort(taxonomy);
ArrayList list = new ArrayList(sorted.size());
for (QualifiedName name : sorted) {
list.add(cache.get(name));
}
return list;
} |
java | private static Identity parse(final JsonObject json) {
final Map<String, String> props = new HashMap<>(json.size());
// @checkstyle MultipleStringLiteralsCheck (1 line)
props.put(PsGithub.LOGIN, json.getString(PsGithub.LOGIN, "unknown"));
props.put("avatar", json.getString("avatar_url", "#"));
return new Identity.Simple(
String.format("urn:github:%d", json.getInt("id")), props
);
} |
python | def _handle_func_def(self, node, scope, ctxt, stream):
"""Handle FuncDef nodes
:node: TODO
:scope: TODO
:ctxt: TODO
:stream: TODO
:returns: TODO
"""
self._dlog("handling function definition")
func = self._handle_node(node.decl, scope, ctxt, stream)
func.body = node.body |
java | static public Angle difference(Angle angle1, Angle angle2) {
return new Angle(angle1.value - angle2.value);
} |
python | def get_user_contact_list(self, id, contact_list_id, **data):
"""
GET /users/:id/contact_lists/:contact_list_id/
Gets a user's :format:`contact_list` by ID as ``contact_list``.
"""
return self.get("/users/{0}/contact_lists/{0}/".format(id,contact_list_id), data=data) |
java | public List<Project> getAllProjects() {
try {
sessionService.startSession();
List<Project> projects = projectDao.getAll();
log.debug("Retrieved All Projects number: " + projects.size());
return projects;
} finally {
sessionService.closeSession();
}
} |
java | public Instances samoaInstancesInformation(weka.core.Instances instances) {
Instances samoaInstances;
List<Attribute> attInfo = new ArrayList<Attribute>();
for (int i = 0; i < instances.numAttributes(); i++) {
attInfo.add(samoaAttribute(i, instances.attribute(i)));
}
samoaInstances = new Instances(instances.relationName(), attInfo, 0);
if(instances.classIndex() >= 0) { // class attribute is present
samoaInstances.setClassIndex(instances.classIndex());
}
return samoaInstances;
} |
java | public static void main(String[] args)
throws NoSuchAlgorithmException, IOException, InvalidKeyException, XmlPullParserException {
try {
/* play.min.io for test and development. */
MinioClient minioClient = new MinioClient("https://play.min.io:9000", "Q3AM3UQ867SPQQA43P2F",
"zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG");
/* Amazon S3: */
// MinioClient minioClient = new MinioClient("https://s3.amazonaws.com", "YOUR-ACCESSKEYID",
//
// Create some content for the object.
StringBuilder builder = new StringBuilder();
for (int i = 0; i < 10; i++) {
builder.append("Sphinx of black quartz, judge my vow: Used by Adobe InDesign to display font samples. ");
builder.append("(29 letters)\n");
builder.append("Jackdaws love my big sphinx of quartz: Similarly, used by Windows XP for some fonts. ");
builder.append("(31 letters)\n");
builder.append("Pack my box with five dozen liquor jugs: According to Wikipedia, this one is used on ");
builder.append("NASAs Space Shuttle. (32 letters)\n");
builder.append("The quick onyx goblin jumps over the lazy dwarf: Flavor text from an Unhinged Magic Card. ");
builder.append("(39 letters)\n");
builder.append("How razorback-jumping frogs can level six piqued gymnasts!: Not going to win any brevity ");
builder.append("awards at 49 letters long, but old-time Mac users may recognize it.\n");
builder.append("Cozy lummox gives smart squid who asks for job pen: A 41-letter tester sentence for Mac ");
builder.append("computers after System 7.\n");
builder.append("A few others we like: Amazingly few discotheques provide jukeboxes; Now fax quiz Jack! my ");
builder.append("brave ghost pled; Watch Jeopardy!, Alex Trebeks fun TV quiz game.\n");
builder.append("---\n");
}
// Create a InputStream for object upload.
ByteArrayInputStream bais = new ByteArrayInputStream(builder.toString().getBytes("UTF-8"));
// Generate a new 256 bit AES key - This key must be remembered by the client.
KeyGenerator keyGen = KeyGenerator.getInstance("AES");
keyGen.init(256);
// To test SSE-C
ServerSideEncryption sse = ServerSideEncryption.withCustomerKey(keyGen.generateKey());
minioClient.putObject("my-bucketname", "my-objectname", bais, bais.available(), sse);
bais.close();
System.out.println("my-objectname is encrypted and uploaded successfully");
// Get the metadata of the object.
ObjectStat objectStat = minioClient.statObject("my-bucketname", "my-objectname", sse);
System.out.println("my-objectname metadata: ");
System.out.println(objectStat);
} catch (MinioException e) {
System.out.println("Error occurred: " + e);
}
} |
java | public Set<String> selectOrganizationPermissions(DbSession dbSession, String organizationUuid, int userId) {
return mapper(dbSession).selectOrganizationPermissions(organizationUuid, userId);
} |
python | def asynchronous(func):
"""Return `func` in a "smart" asynchronous-aware wrapper.
If `func` is called within the event-loop — i.e. when it is running — this
returns the result of `func` without alteration. However, when called from
outside of the event-loop, and the result is awaitable, the result will be
passed though the current event-loop's `run_until_complete` method.
In other words, this automatically blocks when calling an asynchronous
function from outside of the event-loop, and so makes interactive use of
these APIs far more intuitive.
"""
@wraps(func)
def wrapper(*args, **kwargs):
eventloop = get_event_loop()
result = func(*args, **kwargs)
if not eventloop.is_running():
while isawaitable(result):
result = eventloop.run_until_complete(result)
return result
return wrapper |
python | def intersperse(iterable, element):
"""Generator yielding all elements of `iterable`, but with `element`
inserted between each two consecutive elements"""
iterable = iter(iterable)
yield next(iterable)
while True:
next_from_iterable = next(iterable)
yield element
yield next_from_iterable |
python | def remove(args):
"""
Remove the feed given in <args>
"""
session = c.Session(args)
if not args["name"] in session.feeds:
sys.exit("You don't have a feed with that name.")
inputtext = ("Are you sure you want to remove the {} "
" feed? (y/N) ").format(args["name"])
reply = input(inputtext)
if reply != "y" and reply != "Y":
return 0
else:
session.feeds.remove_section(args["name"])
with open(session.data_filename, 'w') as configfile:
session.feeds.write(configfile)
try:
os.remove(os.path.join(session.data_dir, args["name"]))
except FileNotFoundError:
pass |
java | public static String delFirst(Pattern pattern, CharSequence content) {
if (null == pattern || StrUtil.isBlank(content)) {
return StrUtil.str(content);
}
return pattern.matcher(content).replaceFirst(StrUtil.EMPTY);
} |
python | def extract_declarations(map_el, dirs, scale=1, user_styles=[]):
""" Given a Map element and directories object, remove and return a complete
list of style declarations from any Stylesheet elements found within.
"""
styles = []
#
# First, look at all the stylesheets defined in the map itself.
#
for stylesheet in map_el.findall('Stylesheet'):
map_el.remove(stylesheet)
content, mss_href = fetch_embedded_or_remote_src(stylesheet, dirs)
if content:
styles.append((content, mss_href))
#
# Second, look through the user-supplied styles for override rules.
#
for stylesheet in user_styles:
mss_href = urljoin(dirs.source.rstrip('/')+'/', stylesheet)
content = urllib.urlopen(mss_href).read().decode(DEFAULT_ENCODING)
styles.append((content, mss_href))
declarations = []
for (content, mss_href) in styles:
is_merc = is_merc_projection(map_el.get('srs',''))
for declaration in stylesheet_declarations(content, is_merc, scale):
#
# Change the value of each URI relative to the location
# of the containing stylesheet. We generally just have
# the one instance of "dirs" around for a full parse cycle,
# so it's necessary to perform this normalization here
# instead of later, while mss_href is still available.
#
uri_value = declaration.value.value
if uri_value.__class__ is uri:
uri_value.address = urljoin(mss_href, uri_value.address)
declarations.append(declaration)
return declarations |
python | def get_post(post_id, username, password):
"""
metaWeblog.getPost(post_id, username, password)
=> post structure
"""
user = authenticate(username, password)
site = Site.objects.get_current()
return post_structure(Entry.objects.get(id=post_id, authors=user), site) |
python | def dag(self) -> Tuple[Dict, Dict]:
"""Construct the DAG of this pipeline based on the its operations and their downstream."""
from pipelines import dags
operations = self.operations.all().prefetch_related('downstream_operations')
def get_downstream(op):
return op.downstream_operations.values_list('id', flat=True)
return dags.get_dag(operations, get_downstream) |
python | def main_callback(self, *args, **kwargs):
"""
Main callback called when an event is received from an entry point.
:returns: The entry point's callback.
:rtype: function
:raises NotImplementedError: When the entrypoint doesn't have the required attributes.
"""
if not self.callback:
raise NotImplementedError('Entrypoints must declare `callback`')
if not self.settings:
raise NotImplementedError('Entrypoints must declare `settings`')
self.callback.im_self.db = None
#1. Start all the middlewares
with self.debug(*args, **kwargs):
with self.database():
#2. `Real` callback
result = self.callback(*args, **kwargs)#pylint: disable=not-callable
return result |
python | def DeleteAllFlowRequestsAndResponses(self, client_id, flow_id):
"""Deletes all requests and responses for a given flow from the database."""
flow_key = (client_id, flow_id)
try:
self.flows[flow_key]
except KeyError:
raise db.UnknownFlowError(client_id, flow_id)
try:
del self.flow_requests[flow_key]
except KeyError:
pass
try:
del self.flow_responses[flow_key]
except KeyError:
pass |
java | public static void writeRecords(final List<VcfSample> samples,
final List<VcfRecord> records,
final PrintWriter writer) {
checkNotNull(samples);
checkNotNull(records);
checkNotNull(writer);
for (VcfRecord record : records) {
writeRecord(samples, record, writer);
}
} |
java | public void putBooleanMapping(String property, Object trueValue, Object falseValue) {
Map<String, Object> mapping = new HashMap<>(2);
mapping.put(TRUE, trueValue);
mapping.put(FALSE, falseValue);
this.getBooleanMapping().put(property, mapping);
} |
python | def valueAt(self, percent):
"""
Returns the value the percent represents between the minimum and
maximum for this axis.
:param value | <int> || <float>
:return <float>
"""
min_val = self.minimum()
max_val = self.maximum()
# round the max value to sync with the values in the grid
max_val = self.rounded(max_val)
range_val = max(min(percent, 1.0), 0.0) * (max_val - min_val)
return round(range_val + min_val, 1) |
java | @Override
public synchronized void registerObserver(SubjectObserver<Settings> settingsObserver)
{
if (settingsObserver == null)
throw new IllegalArgumentException("settingsObserver is required");
if(file != null)
{
if(fileMonitor == null)
fileMonitor = new FileMonitor();
fileMonitor.monitor(file.getParent(), file.getName(), false);
//Observer fileEventBridge = (observable,o) -> {this.reLoad();};
//fileMonitor.addObserver(fileEventBridge);
fileMonitor.add((observable,o) -> this.reLoad());
}
registry.register(getClass().getName(), settingsObserver);
} |
java | public Server jdkSsl() throws SSLException, CertificateException {
SelfSignedCertificate cert = new SelfSignedCertificate();
sslContext = SslContextBuilder.forServer(cert.certificate(), cert.privateKey())
.sslProvider(SslProvider.JDK)
.build();
return this;
} |
java | public synchronized List<TaskStatus> getNonRunningTasks() {
List<TaskStatus> result = new ArrayList<TaskStatus>(tasks.size());
for(Map.Entry<TaskAttemptID, TaskInProgress> task: tasks.entrySet()) {
if (!runningTasks.containsKey(task.getKey())) {
result.add(task.getValue().getStatus());
}
}
return result;
} |
java | public List<CmsListItem> getSelectedItems() {
Iterator<String> it = CmsStringUtil.splitAsList(
getParamSelItems(),
CmsHtmlList.ITEM_SEPARATOR,
true).iterator();
List<CmsListItem> items = new ArrayList<CmsListItem>();
while (it.hasNext()) {
String id = it.next();
items.add(getList().getItem(id));
}
return items;
} |
python | def to_dict(self, img_sets):
"""Create a dictionary serialization for a prediction image set handle.
Parameters
----------
img_sets : PredictionImageSetHandle
Returns
-------
dict
Dictionary serialization of the resource
"""
# Get the basic Json object from the super class
json_obj = super(DefaultPredictionImageSetManager, self).to_dict(img_sets)
# Add list of image sets as Json array
json_obj['images'] = [img_set.to_dict() for img_set in img_sets.images]
return json_obj |
java | private void checkInlineParams(NodeTraversal t, Node function) {
Node paramList = NodeUtil.getFunctionParameters(function);
for (Node param : paramList.children()) {
JSDocInfo jsDoc = param.getJSDocInfo();
if (jsDoc == null) {
t.report(param, MISSING_PARAMETER_JSDOC);
return;
} else {
JSTypeExpression paramType = jsDoc.getType();
checkNotNull(paramType, "Inline JSDoc info should always have a type");
checkParam(t, param, null, paramType);
}
}
} |
python | def get_form(self, step=None, data=None, files=None):
"""
Constructs the form for a given `step`. If no `step` is defined, the
current step will be determined automatically.
The form will be initialized using the `data` argument to prefill the
new form. If needed, instance or queryset (for `ModelForm` or
`ModelFormSet`) will be added too.
"""
if step is None:
step = self.steps.current
# prepare the kwargs for the form instance.
kwargs = self.get_form_kwargs(step)
kwargs.update({
'data': data,
'files': files,
'prefix': self.get_form_prefix(step, self.form_list[step]),
'initial': self.get_form_initial(step),
})
if issubclass(self.form_list[step], forms.ModelForm):
# If the form is based on ModelForm, add instance if available.
kwargs.update({'instance': self.get_form_instance(step)})
elif issubclass(self.form_list[step], forms.models.BaseModelFormSet):
# If the form is based on ModelFormSet, add queryset if available.
kwargs.update({'queryset': self.get_form_instance(step)})
return self.form_list[step](**kwargs) |
python | def find_or_build_all(cls, list_of_kwargs):
"""Similar to `find_or_create_all`. But transaction is not committed.
"""
return cls.add_all([cls.first(**kwargs) or cls.new(**kwargs)
for kwargs in list_of_kwargs], commit=False) |
java | public static Object instantiate(String type, ClassLoader classLoader) {
try {
return ClassUtils.forName(type, classLoader)
.flatMap(InstantiationUtils::tryInstantiate)
.orElseThrow(() -> new InstantiationException("No class found for name: " + type));
} catch (Throwable e) {
throw new InstantiationException("Could not instantiate type [" + type + "]: " + e.getMessage(), e);
}
} |
python | def protect_libraries_from_patching():
"""
In this function we delete some modules from `sys.modules` dictionary and import them again inside
`_pydev_saved_modules` in order to save their original copies there. After that we can use these
saved modules within the debugger to protect them from patching by external libraries (e.g. gevent).
"""
patched = ['threading', 'thread', '_thread', 'time', 'socket', 'Queue', 'queue', 'select',
'xmlrpclib', 'SimpleXMLRPCServer', 'BaseHTTPServer', 'SocketServer',
'xmlrpc.client', 'xmlrpc.server', 'http.server', 'socketserver']
for name in patched:
try:
__import__(name)
except:
pass
patched_modules = dict([(k, v) for k, v in sys.modules.items()
if k in patched])
for name in patched_modules:
del sys.modules[name]
# import for side effects
import _pydev_imps._pydev_saved_modules
for name in patched_modules:
sys.modules[name] = patched_modules[name] |
java | void setArrayExpression(BSHArrayInitializer init) {
this.isArrayExpression = true;
if (parent instanceof BSHAssignment) {
BSHAssignment ass = (BSHAssignment) parent;
if ( null != ass.operator
&& ass.operator == ParserConstants.ASSIGN )
this.isMapExpression = true;
if ( this.isMapExpression
&& init.jjtGetParent() instanceof BSHArrayInitializer )
init.setMapInArray(true);
}
} |
python | def atan2(y, x, context=None):
"""
Return ``atan(y / x)`` with the appropriate choice of function branch.
If ``x > 0``, then ``atan2(y, x)`` is mathematically equivalent to ``atan(y
/ x)``. If ``x < 0`` and ``y > 0``, ``atan(y, x)`` is equivalent to ``π +
atan(y, x)``. If ``x < 0`` and ``y < 0``, the result is ``-π + atan(y,
x)``.
Geometrically, ``atan2(y, x)`` is the angle (measured counterclockwise, in
radians) from the positive x-axis to the line segment joining (0, 0) to (x,
y), in the usual representation of the x-y plane.
Special values are handled as described in the ISO C99 and IEEE 754-2008
standards for the atan2 function. The following examples illustrate the
rules for positive y; for negative y, apply the symmetry ``atan(-y, x) ==
-atan(y, x)``.
>>> finite = positive = 2.3
>>> negative = -2.3
>>> inf = BigFloat('inf')
>>> print(atan2(+0.0, -0.0)) # pi
3.1415926535897931
>>> print(atan2(+0.0, +0.0)) # 0
0
>>> print(atan2(+0.0, negative)) # pi
3.1415926535897931
>>> print(atan2(+0.0, positive)) # 0
0
>>> print(atan2(positive, 0.0)) # pi / 2
1.5707963267948966
>>> print(atan2(inf, -inf)) # 3*pi / 4
2.3561944901923448
>>> print(atan2(inf, inf)) # pi / 4
0.78539816339744828
>>> print(atan2(inf, finite)) # pi / 2
1.5707963267948966
>>> print(atan2(positive, -inf)) # pi
3.1415926535897931
>>> print(atan2(positive, +inf)) # 0
0
"""
return _apply_function_in_current_context(
BigFloat,
mpfr.mpfr_atan2,
(
BigFloat._implicit_convert(y),
BigFloat._implicit_convert(x),
),
context,
) |
java | public synchronized void registerNewConf(Address address, List<ConfigProperty> configList) {
Preconditions.checkNotNull(address, "address should not be null");
Preconditions.checkNotNull(configList, "configuration list should not be null");
// Instead of recording property name, we record property key.
mConfMap.put(address, configList.stream().map(c -> new ConfigRecord()
.setKey(toPropertyKey(c.getName())).setSource(c.getSource())
.setValue(c.getValue())).collect(Collectors.toList()));
mLostNodes.remove(address);
for (Runnable function : mChangeListeners) {
function.run();
}
} |
java | public static String clean(String path) {
path = path.replaceAll("//", "/");
return StringUtils.trimToNull(path);
} |
python | def get_bucket(self, bucket, marker=None, max_keys=None, prefix=None):
"""
Get a list of all the objects in a bucket.
@param bucket: The name of the bucket from which to retrieve objects.
@type bucket: L{unicode}
@param marker: If given, indicate a position in the overall
results where the results of this call should begin. The
first result is the first object that sorts greater than
this marker.
@type marker: L{bytes} or L{NoneType}
@param max_keys: If given, the maximum number of objects to
return.
@type max_keys: L{int} or L{NoneType}
@param prefix: If given, indicate that only objects with keys
beginning with this value should be returned.
@type prefix: L{bytes} or L{NoneType}
@return: A L{Deferred} that fires with a L{BucketListing}
describing the result.
@see: U{http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html}
"""
args = []
if marker is not None:
args.append(("marker", marker))
if max_keys is not None:
args.append(("max-keys", "%d" % (max_keys,)))
if prefix is not None:
args.append(("prefix", prefix))
if args:
object_name = "?" + urlencode(args)
else:
object_name = None
details = self._details(
method=b"GET",
url_context=self._url_context(bucket=bucket, object_name=object_name),
)
d = self._submit(self._query_factory(details))
d.addCallback(self._parse_get_bucket)
return d |
java | @Execute
public HtmlResponse create(final CreateForm form) {
verifyCrudMode(form.crudMode, CrudMode.CREATE);
validate(form, messages -> {}, () -> asEditHtml());
verifyToken(() -> asEditHtml());
getBadWord(form).ifPresent(
entity -> {
try {
badWordService.store(entity);
suggestHelper.addBadWord(entity.getSuggestWord(), false);
saveInfo(messages -> messages.addSuccessCrudCreateCrudTable(GLOBAL));
} catch (final Exception e) {
throwValidationError(messages -> messages.addErrorsCrudFailedToCreateCrudTable(GLOBAL, buildThrowableMessage(e)),
() -> asEditHtml());
}
}).orElse(() -> {
throwValidationError(messages -> messages.addErrorsCrudFailedToCreateInstance(GLOBAL), () -> asEditHtml());
});
return redirect(getClass());
} |
python | def _escape_token(token, alphabet):
r"""Replace characters that aren't in the alphabet and append "_" to token.
Apply three transformations to the token:
1. Replace underline character "_" with "\u", and backslash "\" with "\\".
2. Replace characters outside of the alphabet with "\###;", where ### is the
character's Unicode code point.
3. Appends "_" to mark the end of a token.
Args:
token: unicode string to be escaped
alphabet: list of all known characters
Returns:
escaped string
"""
token = token.replace(u"\\", u"\\\\").replace(u"_", u"\\u")
ret = [c if c in alphabet and c != u"\n" else r"\%d;" % ord(c) for c in token]
return u"".join(ret) + "_" |
java | public static Context pushContext(Map<String, Object> current) {
Context context = new Context(getContext(), current);
LOCAL.set(context);
return context;
} |
java | public boolean isReadOnly(ELContext context,
Object base,
Object property) {
context.setPropertyResolved(false);
boolean readOnly;
for (int i = 0; i < size; i++) {
readOnly = elResolvers[i].isReadOnly(context, base, property);
if (context.isPropertyResolved()) {
return readOnly;
}
}
return false; // Does not matter
} |
java | public void startWithoutExecuting(Map<String, Object> variables) {
initialize();
initializeTimerDeclarations();
fireHistoricProcessStartEvent();
performOperation(PvmAtomicOperation.FIRE_PROCESS_START);
setActivity(null);
setActivityInstanceId(getId());
// set variables
setVariables(variables);
} |
python | def debug(self, group, message):
'''Maybe write a debug-level log message.
In particular, this gets written if the hidden `debug_worker`
option contains `group`.
'''
if group in self.debug_worker:
if 'stdout' in self.debug_worker:
print message
self.log(logging.DEBUG, message) |
java | public static DocPath forName(Utils utils, TypeElement typeElement) {
return (typeElement == null) ? empty : new DocPath(utils.getSimpleName(typeElement) + ".html");
} |
java | @SuppressWarnings("rawtypes")
public static boolean isBesicType(Class cls){
if (cls == null) return false;
String type = cls.getName();
return isBasicType(type);
} |
python | def polarity_scores(self, text):
"""
Return a float for sentiment strength based on the input text.
Positive values are positive valence, negative value are negative
valence.
"""
# convert emojis to their textual descriptions
text_token_list = text.split()
text_no_emoji_lst = []
for token in text_token_list:
if token in self.emojis:
# get the textual description
description = self.emojis[token]
text_no_emoji_lst.append(description)
else:
text_no_emoji_lst.append(token)
text = " ".join(x for x in text_no_emoji_lst)
sentitext = SentiText(text)
sentiments = []
words_and_emoticons = sentitext.words_and_emoticons
for item in words_and_emoticons:
valence = 0
i = words_and_emoticons.index(item)
# check for vader_lexicon words that may be used as modifiers or negations
if item.lower() in BOOSTER_DICT:
sentiments.append(valence)
continue
if (i < len(words_and_emoticons) - 1 and item.lower() == "kind" and
words_and_emoticons[i + 1].lower() == "of"):
sentiments.append(valence)
continue
sentiments = self.sentiment_valence(valence, sentitext, item, i, sentiments)
sentiments = self._but_check(words_and_emoticons, sentiments)
valence_dict = self.score_valence(sentiments, text)
return valence_dict |
python | def run_query(ont, aset, args):
"""
Basic querying by positive/negative class lists
"""
subjects = aset.query(args.query, args.negative)
for s in subjects:
print("{} {}".format(s, str(aset.label(s))))
if args.plot:
import plotly.plotly as py
import plotly.graph_objs as go
tups = aset.query_associations(subjects=subjects)
z, xaxis, yaxis = tuple_to_matrix(tups)
spacechar = " "
xaxis = mk_axis(xaxis, aset, args, spacechar=" ")
yaxis = mk_axis(yaxis, aset, args, spacechar=" ")
logging.info("PLOTTING: {} x {} = {}".format(xaxis, yaxis, z))
trace = go.Heatmap(z=z,
x=xaxis,
y=yaxis)
data=[trace]
py.plot(data, filename='labelled-heatmap') |
java | protected HttpTunnel newHttpTunnel(HttpRequest request, HttpResponse response, InetAddress iaddr, int port, int timeoutMS) throws IOException {
try {
Socket socket = new Socket(iaddr, port);
socket.setSoTimeout(timeoutMS);
socket.setTcpNoDelay(true);
return new HttpTunnel(socket, null, null);
}
catch (IOException e) {
log.log(Level.FINE, "Exception thrown", e);
response.sendError(HttpResponse.__400_Bad_Request);
return null;
}
} |
python | def _remove_duplicates(self, items):
"""
Remove duplicates, while keeping the order.
(Sometimes we have duplicates, because the there several matches of the
same grammar, each yielding similar completions.)
"""
result = []
for i in items:
if i not in result:
result.append(i)
return result |
java | public void marshall(DisassociateDomainRequest disassociateDomainRequest, ProtocolMarshaller protocolMarshaller) {
if (disassociateDomainRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(disassociateDomainRequest.getFleetArn(), FLEETARN_BINDING);
protocolMarshaller.marshall(disassociateDomainRequest.getDomainName(), DOMAINNAME_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
java | public static AVIMMessageIntervalBound createBound(String messageId, long timestamp, boolean closed) {
return new AVIMMessageIntervalBound(messageId, timestamp, closed);
} |
python | def qmed(self, method='best', **method_options):
"""
Return QMED estimate using best available methodology depending on what catchment attributes are available.
The preferred/best order of methods is defined by :attr:`qmed_methods`. Alternatively, a method can be supplied
e.g. `method='descriptors_1999'` to force the use of a particular method.
================= ======================= ======================================================================
`method` `method_options` notes
================= ======================= ======================================================================
`amax_records` n/a Simple median of annual maximum flow records using
`Catchment.amax_records`.
`pot_records` n/a Uses peaks-over-threshold (POT) flow records. Suitable for flow
records shorter than 14 years.
`descriptors` Synonym for `method=descriptors2008`.
`descriptors2008` `as_rural=False` FEH 2008 regression methodology using `Catchment.descriptors`. Setting
`donor_catchments=None` `as_rural=True` returns rural estimate and setting `donor_catchments`
to a specific list of :class:`Catchment` object **overrides**
automatic selection of the most suitable donor catchment. An empty
list forces no donors to be used at all.
`descriptors1999` as_rural=False FEH 1999 regression methodology.
`area` n/a Simplified FEH 1999 regression methodology using
`Cachment.descriptors.dtm_area` only.
`channel_width` n/a Emperical regression method using the river channel width only.
================= ======================= ======================================================================
:param method: methodology to use to estimate QMED. Default: automatically choose best method.
:type method: str
:param method_options: any optional parameters for the QMED method function, e.g. `as_rural=True`
:type method_options: kwargs
:return: QMED in m³/s
:rtype: float
"""
if method == 'best':
# Rules for gauged catchments
if self.catchment.pot_dataset:
if self.catchment.amax_records:
if self.catchment.record_length <= self.catchment.pot_dataset.record_length < 14:
use_method = 'pot_records'
elif self.catchment.record_length >= 2:
use_method = 'amax_records'
else:
use_method = None
elif self.catchment.pot_dataset.record_length >= 1:
use_method = 'pot_records'
else:
use_method = None
elif self.catchment.record_length >= 2:
use_method = 'amax_records'
else:
use_method = None # None of the gauged methods will work
if use_method:
self.results_log['method'] = use_method
return getattr(self, '_qmed_from_' + use_method)()
# Ungauged methods
for method in self.methods[1:]:
try:
# Return the first method that works
self.results_log['method'] = method
return getattr(self, '_qmed_from_' + method)(**method_options)
except (TypeError, InsufficientDataError):
pass
# In case none of them worked
return None
else:
# A specific method has been requested
try:
self.results_log['method'] = method
return getattr(self, '_qmed_from_' + method)(**method_options)
except AttributeError:
raise AttributeError("Method `{}` to estimate QMED does not exist.".format(method)) |
python | def create_order(email,
request,
addresses=None,
shipping_address=None,
billing_address=None,
shipping_option=None,
capture_payment=False):
"""
Create an order from a basket and customer infomation
"""
basket_items, _ = get_basket_items(request)
if addresses:
# Longclaw < 0.2 used 'shipping_name', longclaw > 0.2 uses a consistent
# prefix (shipping_address_xxxx)
try:
shipping_name = addresses['shipping_name']
except KeyError:
shipping_name = addresses['shipping_address_name']
shipping_country = addresses['shipping_address_country']
if not shipping_country:
shipping_country = None
shipping_address, _ = Address.objects.get_or_create(name=shipping_name,
line_1=addresses[
'shipping_address_line1'],
city=addresses[
'shipping_address_city'],
postcode=addresses[
'shipping_address_zip'],
country=shipping_country)
shipping_address.save()
try:
billing_name = addresses['billing_name']
except KeyError:
billing_name = addresses['billing_address_name']
billing_country = addresses['shipping_address_country']
if not billing_country:
billing_country = None
billing_address, _ = Address.objects.get_or_create(name=billing_name,
line_1=addresses[
'billing_address_line1'],
city=addresses[
'billing_address_city'],
postcode=addresses[
'billing_address_zip'],
country=billing_country)
billing_address.save()
else:
shipping_country = shipping_address.country
ip_address = get_real_ip(request)
if shipping_country and shipping_option:
site_settings = Configuration.for_site(request.site)
shipping_rate = get_shipping_cost(
site_settings,
shipping_address.country.pk,
shipping_option)['rate']
else:
shipping_rate = Decimal(0)
order = Order(
email=email,
ip_address=ip_address,
shipping_address=shipping_address,
billing_address=billing_address,
shipping_rate=shipping_rate
)
order.save()
# Create the order items & compute total
total = 0
for item in basket_items:
total += item.total()
order_item = OrderItem(
product=item.variant,
quantity=item.quantity,
order=order
)
order_item.save()
if capture_payment:
desc = 'Payment from {} for order id #{}'.format(email, order.id)
try:
transaction_id = GATEWAY.create_payment(request,
total + shipping_rate,
description=desc)
order.payment_date = timezone.now()
order.transaction_id = transaction_id
# Once the order has been successfully taken, we can empty the basket
destroy_basket(request)
except PaymentError:
order.status = order.FAILURE
order.save()
return order |
java | public void setProductCodes(java.util.Collection<ProductCode> productCodes) {
if (productCodes == null) {
this.productCodes = null;
return;
}
this.productCodes = new com.amazonaws.internal.SdkInternalList<ProductCode>(productCodes);
} |
java | @Override
public java.util.concurrent.Future<GetShardIteratorResult> getShardIteratorAsync(String streamName, String shardId, String shardIteratorType,
String startingSequenceNumber) {
return getShardIteratorAsync(new GetShardIteratorRequest().withStreamName(streamName).withShardId(shardId).withShardIteratorType(shardIteratorType)
.withStartingSequenceNumber(startingSequenceNumber));
} |
java | public String convertIfcCoilTypeEnumToString(EDataType eDataType, Object instanceValue) {
return instanceValue == null ? null : instanceValue.toString();
} |
python | def wrap_content(content, settings, hard_breaks=False):
"""
Returns *content* wrapped in a HTML structure.
If *hard_breaks* is set, line breaks are converted to `<br />` tags.
"""
settings.context['content'] = wrap_paragraphs(content, hard_breaks)
template = Template(settings.template)
try:
return template.render(**settings.context)
except KeyError as error:
msg = "missing context setting: {}".format(error)
raise ContextError(msg) |
python | def _prepare_deprecation_data(self):
"""
Cycles through the list of AppSettingDeprecation instances set on
``self.deprecations`` and prepulates two new dictionary attributes:
``self._deprecated_settings``:
Uses the deprecated setting names themselves as the keys. Used to
check whether a request is for a deprecated setting.
``self._renamed_settings``:
Uses the 'replacement setting' names as keys (where supplied).
Used to allow the helper to temporarily support override settings
defined using the old name, when the values for the new setting are
requested.
"""
if not isinstance(self.deprecations, (list, tuple)):
raise IncorrectDeprecationsValueType(
"'deprecations' must be a list or tuple, not a {}."
.format(type(self.deprecations).__name__)
)
self._deprecated_settings = {}
self._replacement_settings = defaultdict(list)
for item in self.deprecations:
item.prefix = self.get_prefix()
if not self.in_defaults(item.setting_name):
raise InvalidDeprecationDefinition(
"There is an issue with one of your setting deprecation "
"definitions. '{setting_name}' could not be found in "
"{defaults_module_path}. Please ensure a default value "
"remains there until the end of the setting's deprecation "
"period.".format(
setting_name=item.setting_name,
defaults_module_path=self._defaults_module_path,
)
)
if item.setting_name in self._deprecated_settings:
raise DuplicateDeprecationError(
"The setting name for each deprecation definition must be "
"unique, but '{setting_name}' has been used more than once "
"for {helper_class}.".format(
setting_name=item.setting_name,
helper_class=self.__class__.__name__,
)
)
self._deprecated_settings[item.setting_name] = item
if item.replacement_name:
if not self.in_defaults(item.replacement_name):
raise InvalidDeprecationDefinition(
"There is an issue with one of your settings "
"deprecation definitions. '{replacement_name}' is not "
"a valid replacement for '{setting_name}', as no such "
"value can be found in {defaults_module_path}."
.format(
replacement_name=item.replacement_name,
setting_name=item.setting_name,
defaults_module_path=self._defaults_module_path,
)
)
self._replacement_settings[item.replacement_name].append(item) |
java | private int unFilledSpacesInHeaderGroup(int header) {
//If mNumColumns is equal to zero we will have a divide by 0 exception
if(mNumColumns == 0){
return 0;
}
int remainder = mDelegate.getCountForHeader(header) % mNumColumns;
return remainder == 0 ? 0 : mNumColumns - remainder;
} |
python | def __delete_internal_blob(self, key):
''' This method will insert blob data to blob table
'''
with self.get_conn() as conn:
conn.isolation_level = None
try:
c = conn.cursor()
c.execute("BEGIN")
if key is None:
c.execute("DELETE FROM cache_entries WHERE key IS NULL")
c.execute("DELETE FROM blob_entries WHERE KEY IS NULL")
else:
c.execute("DELETE FROM cache_entries WHERE key = ?", (key,))
c.execute("DELETE FROM blob_entries WHERE KEY = ?", (key,))
c.execute("COMMIT")
except:
getLogger().debug("Cannot delete")
return False
return True |
java | public static boolean handleIfMatch(final String ifMatch, final List<ETag> etags, boolean allowWeak) {
if (ifMatch == null) {
return true;
}
if (ifMatch.equals("*")) {
return true; //todo: how to tell if there is a current entity for the request
}
List<ETag> parts = parseETagList(ifMatch);
for (ETag part : parts) {
if (part.isWeak() && !allowWeak) {
continue;
}
for (ETag tag : etags) {
if (tag != null) {
if (tag.isWeak() && !allowWeak) {
continue;
}
if (tag.getTag().equals(part.getTag())) {
return true;
}
}
}
}
return false;
} |
python | def create(obj: PersistedObject, obj_type: Type[Any], arg_name: str):
"""
Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests
https://github.com/nose-devs/nose/issues/725
:param obj:
:param obj_type:
:param arg_name:
:return:
"""
return MissingMandatoryAttributeFiles('Multifile object ' + str(obj) + ' cannot be built from constructor of '
'type ' + get_pretty_type_str(obj_type) +
', mandatory constructor argument \'' + arg_name + '\'was not found on '
'filesystem') |
python | def _get_environs(self, prefix: str = None) -> dict:
"""
Fetches set environment variables if such exist, via the :func:`~notifiers.utils.helpers.dict_from_environs`
Searches for `[PREFIX_NAME]_[PROVIDER_NAME]_[ARGUMENT]` for each of the arguments defined in the schema
:param prefix: The environ prefix to use. If not supplied, uses the default
:return: A dict of arguments and value retrieved from environs
"""
if not prefix:
log.debug("using default environ prefix")
prefix = DEFAULT_ENVIRON_PREFIX
return dict_from_environs(prefix, self.name, list(self.arguments.keys())) |
java | public static UIResults extractException(HttpServletRequest httpRequest)
throws ServletException {
UIResults results = (UIResults) httpRequest.getAttribute(FERRET_NAME);
if (results == null) {
throw new ServletException("No attribute..");
}
if (results.exception == null) {
throw new ServletException("No WaybackException..");
}
if (results.wbRequest == null) {
throw new ServletException("No WaybackRequest..");
}
if (results.uriConverter == null) {
throw new ServletException("No ResultURIConverter..");
}
return results;
} |
java | public static boolean in( char c, String str ) {
return Chr.in ( c, FastStringUtils.toCharArray(str) );
} |
java | @Override
public synchronized Task take() throws TimeoutException {
try {
Task task = queue.remove();
inprocess.add(task);
return task;
} catch (NoSuchElementException ex) {
throw new TimeoutException(ex);
}
} |
java | public static CredentialDeleter deleter(final String pathAccountSid,
final String pathCredentialListSid,
final String pathSid) {
return new CredentialDeleter(pathAccountSid, pathCredentialListSid, pathSid);
} |
python | def _ttv_compute(self, v, dims, vidx, remdims):
"""
Tensor times vector product
Parameter
---------
"""
if not isinstance(v, tuple):
raise ValueError('v must be a tuple of vectors')
ndim = self.ndim
order = list(remdims) + list(dims)
if ndim > 1:
T = self.transpose(order)
sz = array(self.shape)[order]
for i in np.arange(len(dims), 0, -1):
T = T.reshape((sz[:ndim - 1].prod(), sz[ndim - 1]))
T = T.dot(v[vidx[i - 1]])
ndim -= 1
if ndim > 0:
T = T.reshape(sz[:ndim])
return T |
python | def Concat(*args: Union[BitVec, List[BitVec]]) -> BitVec:
"""Create a concatenation expression.
:param args:
:return:
"""
# The following statement is used if a list is provided as an argument to concat
if len(args) == 1 and isinstance(args[0], list):
bvs = args[0] # type: List[BitVec]
else:
bvs = cast(List[BitVec], args)
nraw = z3.Concat([a.raw for a in bvs])
annotations = [] # type: Annotations
bitvecfunc = False
for bv in bvs:
annotations += bv.annotations
if isinstance(bv, BitVecFunc):
bitvecfunc = True
if bitvecfunc:
# Is there a better value to set func_name and input to in this case?
return BitVecFunc(
raw=nraw, func_name=None, input_=None, annotations=annotations
)
return BitVec(nraw, annotations) |
python | def _mod_aggregate(self, low, running, chunks):
'''
Execute the aggregation systems to runtime modify the low chunk
'''
agg_opt = self.functions['config.option']('state_aggregate')
if 'aggregate' in low:
agg_opt = low['aggregate']
if agg_opt is True:
agg_opt = [low['state']]
elif not isinstance(agg_opt, list):
return low
if low['state'] in agg_opt and not low.get('__agg__'):
agg_fun = '{0}.mod_aggregate'.format(low['state'])
if agg_fun in self.states:
try:
low = self.states[agg_fun](low, chunks, running)
low['__agg__'] = True
except TypeError:
log.error('Failed to execute aggregate for state %s', low['state'])
return low |
python | def bulk_get(cls, imports, api=None):
"""
Retrieve imports in bulk
:param imports: Imports to be retrieved.
:param api: Api instance.
:return: List of ImportBulkRecord objects.
"""
api = api or cls._API
import_ids = [Transform.to_import(import_) for import_ in imports]
data = {'import_ids': import_ids}
response = api.post(url=cls._URL['bulk_get'], data=data)
return ImportBulkRecord.parse_records(response=response, api=api) |
java | public void setAlternateHandlingShifted(boolean shifted) {
checkNotFrozen();
if(shifted == isAlternateHandlingShifted()) { return; }
CollationSettings ownedSettings = getOwnedSettings();
ownedSettings.setAlternateHandlingShifted(shifted);
setFastLatinOptions(ownedSettings);
} |
java | @Override
public NodeSet<OWLObjectPropertyExpression> getDisjointObjectProperties(
OWLObjectPropertyExpression pe)
throws InconsistentOntologyException, FreshEntitiesException,
ReasonerInterruptedException, TimeOutException {
throw new ReasonerInternalException(
"getDisjointObjectProperties not implemented");
} |
python | def verify_arguments(target, method_name, args, kwargs):
"""Verifies that the provided arguments match the signature of the provided method.
:param Target target: A ``Target`` object containing the object with the method to double.
:param str method_name: The name of the method to double.
:param tuple args: The positional arguments the method should be called with.
:param dict kwargs: The keyword arguments the method should be called with.
:raise: ``VerifyingDoubleError`` if the provided arguments do not match the signature.
"""
if method_name == '_doubles__new__':
return _verify_arguments_of_doubles__new__(target, args, kwargs)
attr = target.get_attr(method_name)
method = attr.object
if attr.kind in ('data', 'attribute', 'toplevel', 'class method', 'static method'):
try:
method = method.__get__(None, attr.defining_class)
except AttributeError:
method = method.__call__
elif attr.kind == 'property':
if args or kwargs:
raise VerifyingDoubleArgumentError("Properties do not accept arguments.")
return
else:
args = ['self_or_cls'] + list(args)
_verify_arguments(method, method_name, args, kwargs) |
java | @SuppressWarnings("unused") // called through reflection by RequestServer
public GridsV99 list(int version, GridsV99 s) {
final Key[] gridKeys = KeySnapshot.globalSnapshot().filter(new KeySnapshot.KVFilter() {
@Override
public boolean filter(KeySnapshot.KeyInfo k) {
return Value.isSubclassOf(k._type, Grid.class);
}
}).keys();
s.grids = new GridSchemaV99[gridKeys.length];
for (int i = 0; i < gridKeys.length; i++) {
s.grids[i] = new GridSchemaV99();
s.grids[i].fillFromImpl(getFromDKV("(none)", gridKeys[i], Grid.class));
}
return s;
} |
java | public static ISonarConverter getConverterInstance() {
if (converterInstance == null) {
synchronized (DefaultSonarConverter.class) {
if (converterInstance == null)
converterInstance = new DefaultSonarConverter();
}
}
return converterInstance;
} |
java | public void setLcHeight(Integer newLcHeight) {
Integer oldLcHeight = lcHeight;
lcHeight = newLcHeight;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, AfplibPackage.FNPRG__LC_HEIGHT, oldLcHeight, lcHeight));
} |
java | public Object getJsonPath(String json, String jsonPath) {
if (!JsonPath.isPathDefinite(jsonPath)) {
throw new RuntimeException(jsonPath + " returns a list of results, not a single.");
}
return parseJson(json).read(jsonPath);
} |
python | def save_picture(self, outfolder, filename):
"""Saves a picture"""
self.set_fancy_ray()
self.png_workaround("/".join([outfolder, filename])) |
python | def register_config_changes(self, configs, meta_changes):
""" Persist config changes to the JSON state file. When a config
changes, a process manager may perform certain actions based on these
changes. This method can be called once the actions are complete.
"""
for config_file in meta_changes['remove_configs'].keys():
self._purge_config_file(config_file)
for config_file, config in configs.items():
if 'update_attribs' in config:
config['attribs'] = config.pop('update_attribs')
if 'update_instance_name' in config:
config['instance_name'] = config.pop('update_instance_name')
if 'update_services' in config or 'remove_services' in config:
remove = config.pop('remove_services', [])
services = config.pop('update_services', [])
# need to prevent old service defs from overwriting new ones
for service in config['services']:
if service not in remove and service not in services:
services.append(service)
config['services'] = services
self._register_config_file(config_file, config) |
python | def ConvertValues(default_metadata, values, token=None, options=None):
"""Converts a set of RDFValues into a set of export-friendly RDFValues.
Args:
default_metadata: export.ExportedMetadata instance with basic information
about where the values come from. This metadata will be passed to
exporters.
values: Values to convert. They should be of the same type.
token: Security token.
options: rdfvalue.ExportOptions instance that will be passed to
ExportConverters.
Returns:
Converted values. Converted values may be of different types
(unlike the source values which are all of the same type). This is due to
the fact that multiple ExportConverters may be applied to the same value
thus generating multiple converted values of different types.
Raises:
NoConverterFound: in case no suitable converters were found for the values.
"""
batch_data = [(default_metadata, obj) for obj in values]
return ConvertValuesWithMetadata(batch_data, token=token, options=options) |
python | def ascii_mode(enabled=True):
""" Disables color and switches to an ASCII character set if True.
"""
global _backups, _chars, _primary_style, _secondary_style, _ascii_mode
if not (enabled or _backups) or (enabled and _ascii_mode):
return
if enabled:
_backups = _chars.copy(), _primary_style, _secondary_style
_chars = {
"primary": {"selected": "*", "block": "#"},
"secondary": {"arrow": ">", "left-edge": "|", "right-edge": "|"},
"plain": {"unselected": "."},
}
_primary_style = ()
_secondary_style = ()
else:
_chars, _primary_style, _secondary_style = _backups
_ascii_mode = enabled |
python | def node_number(self, *, count_pnode=True) -> int:
"""Return the number of node"""
return (sum(1 for n in self.nodes())
+ (sum(1 for n in self.powernodes()) if count_pnode else 0)) |
java | private Observable<ProposedBucketConfigContext> buildRefreshFallbackSequence(List<NodeInfo> nodeInfos, String bucketName) {
Observable<ProposedBucketConfigContext> failbackSequence = null;
for (final NodeInfo nodeInfo : nodeInfos) {
if (!isValidCarrierNode(environment.sslEnabled(), nodeInfo)) {
continue;
}
if (failbackSequence == null) {
failbackSequence = refreshAgainstNode(bucketName, nodeInfo.hostname());
} else {
failbackSequence = failbackSequence.onErrorResumeNext(
refreshAgainstNode(bucketName, nodeInfo.hostname())
);
}
}
if (failbackSequence == null) {
LOGGER.debug("Could not build refresh sequence, node list is empty - ignoring attempt.");
return Observable.empty();
}
return failbackSequence;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.