language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python | def insert_volume(self, metadata, attachments=[]):
'''Insert a new volume
Returns the ID of the added volume
`metadata` must be a dict containg metadata of the volume::
{
"_language" : "it", # language of the metadata
"key1" : "value1", # attribute
"key2" : "value2",
...
"keyN" : "valueN"
}
The only required key is `_language`
`attachments` must be an array of dict::
{
"file" : "/prova/una/path/a/caso" # path or fp
"name" : "nome_buffo.ext" # name of the file (extension included) [optional if a path was given]
"mime" : "application/json" # mime type of the file [optional]
"notes" : "this file is awesome" # notes that will be attached to this file [optional]
}
'''
log.debug("adding new volume:\n\tdata: {}\n\tfiles: {}".format(metadata, attachments))
requiredFields = ['_language']
for requiredField in requiredFields:
if requiredField not in metadata:
raise KeyError("Required field '{}' is missing".format(requiredField))
volume = deepcopy(metadata)
attsData = []
for index, a in enumerate(attachments):
try:
attData = self._assemble_attachment(a['file'], a)
attsData.append(attData)
except Exception:
log.exception("Error while elaborating attachments array at index: {}".format(index))
raise
volume['_attachments'] = attsData
log.debug('constructed volume for insertion: {}'.format(volume))
addedVolume = self._db.add_book(body=volume)
log.debug("added new volume: '{}'".format(addedVolume['_id']))
return addedVolume['_id'] |
python | def scroll_to_bottom(self):
"""
Scoll to the very bottom of the page
TODO: add increment & delay options to scoll slowly down the whole page to let each section load in
"""
if self.driver.selenium is not None:
try:
self.driver.selenium.execute_script("window.scrollTo(0, document.body.scrollHeight);")
except WebDriverException:
self.driver.selenium.execute_script("window.scrollTo(0, 50000);")
except Exception:
logger.exception("Unknown error scrolling page") |
java | public void toBeBetween(double lower, double upper) {
Arguments.ensureTrue(lower < upper, "upper has to be greater than lower");
boolean isBetween = this.value >= lower && this.value <= upper;
Expectations.expectTrue(isBetween, "Expected %s to be between %s and %s", this.value, lower, upper);
} |
java | protected void initWidget(Widget widget) {
// Validate. Make sure the widget is not being set twice.
if (m_widget != null) {
throw new IllegalStateException("Composite.initWidget() may only be " + "called once.");
}
// Use the contained widget's element as the composite's element,
// effectively merging them within the DOM.
setElement((Element)widget.getElement());
adopt(widget);
// Logical attach.
m_widget = widget;
} |
java | protected Node findNextWrite() {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "findNextWrite entry: on node " + this + "With status: " + status + "and positive ratio of: " + getPriorityRatioPositive());
}
if (status == NODE_STATUS.REQUESTING_WRITE) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "findNextWrite exit: node to write next is: " + this.toStringDetails());
}
return this;
} else {
// go through all dependents in order
for (int i = 0; i < dependents.size(); i++) {
Node n = dependents.get(i);
Node nextWrite = n.findNextWrite();
if (nextWrite != null) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "findNextWrite exit: next write node found. stream-id: " + nextWrite.getStreamID() + " node hc: " + nextWrite.hashCode());
}
return nextWrite;
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "findNextWrite exit: null");
}
return null;
}
} |
java | public void register(Label label, List<StackSize> stackSizes) {
sizes.put(label, stackSizes);
} |
python | async def update_state(self, msg, _context):
"""Update the status of a service."""
name = msg.get('name')
status = msg.get('new_status')
await self.service_manager.update_state(name, status) |
java | public static AuthCallsIpAccessControlListMappingDeleter deleter(final String pathAccountSid,
final String pathDomainSid,
final String pathSid) {
return new AuthCallsIpAccessControlListMappingDeleter(pathAccountSid, pathDomainSid, pathSid);
} |
java | public Activation[] getActivations() {
final List<Activation> list = new ArrayList<Activation>();
for (InternalAgendaGroup group : this.agendaGroups.values()) {
for (Match activation : group.getActivations()) {
list.add((Activation) activation);
}
}
return list.toArray( new Activation[list.size()] );
} |
python | def convert_UCERFSource(self, node):
"""
Converts the Ucerf Source node into an SES Control object
"""
dirname = os.path.dirname(self.fname) # where the source_model_file is
source_file = os.path.join(dirname, node["filename"])
if "startDate" in node.attrib and "investigationTime" in node.attrib:
# Is a time-dependent model - even if rates were originally
# poissonian
# Verify that the source time span is the same as the TOM time span
inv_time = float(node["investigationTime"])
if inv_time != self.investigation_time:
raise ValueError("Source investigation time (%s) is not "
"equal to configuration investigation time "
"(%s)" % (inv_time, self.investigation_time))
start_date = datetime.strptime(node["startDate"], "%d/%m/%Y")
else:
start_date = None
return UCERFSource(
source_file,
self.investigation_time,
start_date,
float(node["minMag"]),
npd=self.convert_npdist(node),
hdd=self.convert_hpdist(node),
aspect=~node.ruptAspectRatio,
upper_seismogenic_depth=~node.pointGeometry.upperSeismoDepth,
lower_seismogenic_depth=~node.pointGeometry.lowerSeismoDepth,
msr=valid.SCALEREL[~node.magScaleRel](),
mesh_spacing=self.rupture_mesh_spacing,
trt=node["tectonicRegion"]) |
java | public void setPoint( double x, double y ) {
if (selection >= 0) {
Coordinate coordinate = new Coordinate(x, y);
pts.set(selection, coordinate);
}
} |
python | def setup_top_concepts(rdf, mark_top_concepts):
"""Determine the top concepts of each concept scheme and mark them using
hasTopConcept/topConceptOf."""
for cs in sorted(rdf.subjects(RDF.type, SKOS.ConceptScheme)):
for conc in sorted(rdf.subjects(SKOS.inScheme, cs)):
if (conc, RDF.type, SKOS.Concept) not in rdf:
continue # not a Concept, so can't be a top concept
# check whether it's a top concept
broader = rdf.value(conc, SKOS.broader, None, any=True)
if broader is None: # yes it is a top concept!
if (cs, SKOS.hasTopConcept, conc) not in rdf and \
(conc, SKOS.topConceptOf, cs) not in rdf:
if mark_top_concepts:
logging.info(
"Marking loose concept %s "
"as top concept of scheme %s", conc, cs)
rdf.add((cs, SKOS.hasTopConcept, conc))
rdf.add((conc, SKOS.topConceptOf, cs))
else:
logging.debug(
"Not marking loose concept %s as top concept "
"of scheme %s, as mark_top_concepts is disabled",
conc, cs) |
java | public Duration minusDays(long daysToSubtract) {
return (daysToSubtract == Long.MIN_VALUE ? plusDays(Long.MAX_VALUE).plusDays(1) : plusDays(-daysToSubtract));
} |
java | public static nspbr6_stats[] get(nitro_service service, options option) throws Exception{
nspbr6_stats obj = new nspbr6_stats();
nspbr6_stats[] response = (nspbr6_stats[])obj.stat_resources(service,option);
return response;
} |
java | private boolean matches(String str, Pattern... patterns) {
// Check given string against all provided patterns
for (Pattern pattern : patterns) {
// Fail overall test if any pattern fails to match
Matcher matcher = pattern.matcher(str);
if (!matcher.find())
return false;
}
// All provided patterns matched
return true;
} |
python | def help(self, message, plugin=None):
"""help: the normal help you're reading."""
# help_data = self.load("help_files")
selected_modules = help_modules = self.load("help_modules")
self.say("Sure thing, %s." % message.sender.handle)
help_text = "Here's what I know how to do:"
if plugin and plugin in help_modules:
help_text = "Here's what I know how to do about %s:" % plugin
selected_modules = dict()
selected_modules[plugin] = help_modules[plugin]
for k in sorted(selected_modules, key=lambda x: x[0]):
help_data = selected_modules[k]
if help_data:
help_text += "<br/><br/><b>%s</b>:" % k
for line in help_data:
if line:
if ":" in line:
line = " <b>%s</b>%s" % (line[:line.find(":")], line[line.find(":"):])
help_text += "<br/> %s" % line
self.say(help_text, html=True) |
java | protected Assertion validateServiceTicket(final WebApplicationService service, final String serviceTicketId) {
return serviceValidateConfigurationContext.getCentralAuthenticationService().validateServiceTicket(serviceTicketId, service);
} |
java | public WorkflowTriggerInner get(String resourceGroupName, String workflowName, String triggerName) {
return getWithServiceResponseAsync(resourceGroupName, workflowName, triggerName).toBlocking().single().body();
} |
java | public void addColumn(FastTrackColumn column)
{
FastTrackField type = column.getType();
Object[] data = column.getData();
for (int index = 0; index < data.length; index++)
{
MapRow row = getRow(index);
row.getMap().put(type, data[index]);
}
} |
python | def cli(ctx, feature_id, organism="", sequence=""):
"""Delete a feature
Output:
A standard apollo feature dictionary ({"features": [{...}]})
"""
return ctx.gi.annotations.delete_feature(feature_id, organism=organism, sequence=sequence) |
python | def _(s: Influence, cutoff: float = 0.7) -> bool:
""" Returns true if both subj and obj are grounded to the UN ontology. """
return all(map(lambda c: is_well_grounded(c, cutoff), s.agent_list())) |
python | def main():
"""Sanitizes the loaded *.ipynb."""
with open(sys.argv[1], 'r') as nbfile:
notebook = json.load(nbfile)
# remove kernelspec (venvs)
try:
del notebook['metadata']['kernelspec']
except KeyError:
pass
# remove outputs and metadata, set execution counts to None
for cell in notebook['cells']:
try:
if cell['cell_type'] == 'code':
cell['outputs'] = []
cell['execution_count'] = None
cell['metadata'] = {}
except KeyError:
pass
with open(sys.argv[1], 'w') as nbfile:
json.dump(notebook, nbfile, indent=1) |
python | def search_texts(args, parser):
"""Searches texts for presence of n-grams."""
store = utils.get_data_store(args)
corpus = utils.get_corpus(args)
catalogue = utils.get_catalogue(args)
store.validate(corpus, catalogue)
ngrams = []
for ngram_file in args.ngrams:
ngrams.extend(utils.get_ngrams(ngram_file))
store.search(catalogue, ngrams, sys.stdout) |
java | Symbol findGlobalType(Env<AttrContext> env, Scope scope, Name name, RecoveryLoadClass recoveryLoadClass) {
Symbol bestSoFar = typeNotFound;
for (Symbol s : scope.getSymbolsByName(name)) {
Symbol sym = loadClass(env, s.flatName(), recoveryLoadClass);
if (bestSoFar.kind == TYP && sym.kind == TYP &&
bestSoFar != sym)
return new AmbiguityError(bestSoFar, sym);
else
bestSoFar = bestOf(bestSoFar, sym);
}
return bestSoFar;
} |
python | def _poll_once(self, timeout_ms, max_records):
"""Do one round of polling. In addition to checking for new data, this does
any needed heart-beating, auto-commits, and offset updates.
Arguments:
timeout_ms (int): The maximum time in milliseconds to block.
Returns:
dict: Map of topic to list of records (may be empty).
"""
self._coordinator.poll()
# Fetch positions if we have partitions we're subscribed to that we
# don't know the offset for
if not self._subscription.has_all_fetch_positions():
self._update_fetch_positions(self._subscription.missing_fetch_positions())
# If data is available already, e.g. from a previous network client
# poll() call to commit, then just return it immediately
records, partial = self._fetcher.fetched_records(max_records)
if records:
# Before returning the fetched records, we can send off the
# next round of fetches and avoid block waiting for their
# responses to enable pipelining while the user is handling the
# fetched records.
if not partial:
self._fetcher.send_fetches()
return records
# Send any new fetches (won't resend pending fetches)
self._fetcher.send_fetches()
timeout_ms = min(timeout_ms, self._coordinator.time_to_next_poll() * 1000)
self._client.poll(timeout_ms=timeout_ms)
# after the long poll, we should check whether the group needs to rebalance
# prior to returning data so that the group can stabilize faster
if self._coordinator.need_rejoin():
return {}
records, _ = self._fetcher.fetched_records(max_records)
return records |
python | def sample_from_posterior(self, A: pd.DataFrame) -> None:
""" Run Bayesian inference - sample from the posterior distribution."""
self.sample_from_proposal(A)
self.set_latent_state_sequence(A)
self.update_log_prior(A)
self.update_log_likelihood()
candidate_log_joint_probability = self.log_prior + self.log_likelihood
delta_log_joint_probability = (
candidate_log_joint_probability - self.log_joint_probability
)
acceptance_probability = min(1, np.exp(delta_log_joint_probability))
if acceptance_probability > np.random.rand():
self.update_log_joint_probability()
else:
A[f"∂({self.source})/∂t"][self.target] = self.original_value
self.set_latent_state_sequence(A)
self.update_log_likelihood()
self.update_log_prior(A)
self.update_log_joint_probability() |
java | public Observable<EncryptionProtectorInner> beginCreateOrUpdateAsync(String resourceGroupName, String serverName, EncryptionProtectorInner parameters) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, serverName, parameters).map(new Func1<ServiceResponse<EncryptionProtectorInner>, EncryptionProtectorInner>() {
@Override
public EncryptionProtectorInner call(ServiceResponse<EncryptionProtectorInner> response) {
return response.body();
}
});
} |
java | public static boolean check(Class<?> clz) {
if (Config.CACHE_SEEN_CLASSES_V) {
int index = hash(clz);
if (CACHE[index] == clz) {
return true;
}
CACHE[index] = clz;
}
return false;
} |
python | def ids(self):
""" Returns set with all todo IDs. """
if config().identifiers() == 'text':
ids = self._id_todo_map.keys()
else:
ids = [str(i + 1) for i in range(self.count())]
return set(ids) |
python | def with_binaries(self, *args, **kw):
"""Add binaries tagged to this artifact.
For example: ::
provides = setup_py(
name = 'my_library',
zip_safe = True
).with_binaries(
my_command = ':my_library_bin'
)
This adds a console_script entry_point for the python_binary target
pointed at by :my_library_bin. Currently only supports
python_binaries that specify entry_point explicitly instead of source.
Also can take a dictionary, e.g.
with_binaries({'my-command': ':my_library_bin'})
"""
for arg in args:
if isinstance(arg, dict):
self._binaries.update(arg)
self._binaries.update(kw)
return self |
python | def _apply_udfs(self, record, hist, udf_type):
"""
Excute user define processes, user-defined functionalty is designed to
applyies custome trasformations to data.
:param dict record: dictionary of values to validate
:param dict hist: existing input of history values
"""
def function_executor(func, *args):
"""
Execute user define function
:param python method func: Function obj
:param methods arguments args: Function arguments
"""
result, result_hist = func(*args)
return result, result_hist
if udf_type in self.udfs:
cust_function_od_obj = collections.OrderedDict(
sorted(
self.udfs[udf_type].items()
)
)
for cust_function in cust_function_od_obj:
record, hist = function_executor(
cust_function_od_obj[cust_function],
record,
hist
)
return record, hist |
java | @Override
public void watch(DatabaseWatch watch,
Result<Cancel> result,
Object... args)
{
TableKelp tableKelp = _table.getTableKelp();
RowCursor minCursor = tableKelp.cursor();
RowCursor maxCursor = tableKelp.cursor();
minCursor.clear();
maxCursor.setKeyMax();
_whereKraken.fillMinCursor(minCursor, args);
_whereKraken.fillMaxCursor(minCursor, args);
//QueryKelp whereKelp = _whereExpr.bind(args);
// XXX: binding should be with unique
//EnvKelp whereKelp = new EnvKelp(_whereKelp, args);
//tableKelp.findOne(minCursor, maxCursor, whereKelp,
// new FindDeleteResult(result));
_table.addWatch(watch, minCursor.getKey(), result);
// result.completed(null);
} |
python | def id2word(self, xs):
"""Map id(s) to word(s)
Parameters
----------
xs : int
id or a list of ids
Returns
-------
str or list
word or a list of words
"""
if isinstance(xs, list):
return [self._id2word[x] for x in xs]
return self._id2word[xs] |
python | def run_command(self, stream=sys.stdout, dry_run=False):
"""Runs the command for this link. This method can be overridden by
sub-classes to invoke a different command
Parameters
-----------
stream : `file`
Must have 'write' function
dry_run : bool
Print command but do not run it
"""
return run_gtapp(self.__app, stream, dry_run, **self.args) |
java | protected Apikey createFromMapInternal(@Nullable String user, long start, long duration, @Nullable Arr roles, Map<String, Object> nameAndValMap) {
return createFromMapWithFingerprint(user, start, duration, roles, nameAndValMap, randomFingerprint());
} |
java | public long readNBit(int n) throws IOException {
if (n > 64)
throw new IllegalArgumentException("Can not readByte more then 64 bit");
long val = 0;
for (int i = 0; i < n; i++) {
val <<= 1;
val |= read1Bit();
}
return val;
} |
python | def get_arthur_params_from_url(cls, url):
# In the url the org and the repository are included
params = url.split()
""" Get the arthur params given a URL for the data source """
params = {"owner": params[0], "repository": params[1]}
return params |
python | def query(self, query):
"""Q.query(query string) -> category string -- return the matched
category for any user query
"""
self.query = query
self.process_query()
matching_corpus_index = self.match_query_to_corpus()
return self.category_list[matching_corpus_index].strip() |
java | public SemanticVersion getNextVersion() {
int major = head.getMajorVersion();
int minor = head.getMinorVersion();
int patch = head.getPatchVersion();
return new SemanticVersion(major, minor, patch + 1);
} |
python | def value_validate(self, value):
"""
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
"""
if not isinstance(value, six.string_types):
raise tldap.exceptions.ValidationError("should be a string") |
python | async def cursor(self) -> Cursor:
"""Create an aiosqlite cursor wrapping a sqlite3 cursor object."""
return Cursor(self, await self._execute(self._conn.cursor)) |
java | @SuppressWarnings("WeakerAccess")
public ApiFuture<Instance> createInstanceAsync(CreateInstanceRequest request) {
return ApiFutures.transform(
stub.createInstanceOperationCallable().futureCall(request.toProto(projectId)),
new ApiFunction<com.google.bigtable.admin.v2.Instance, Instance>() {
@Override
public Instance apply(com.google.bigtable.admin.v2.Instance proto) {
return Instance.fromProto(proto);
}
},
MoreExecutors.directExecutor());
} |
java | protected void set(double values[][])
{
this.nRows = values.length;
this.nCols = values[0].length;
this.values = values;
for (int r = 1; r < nRows; ++r) {
nCols = Math.min(nCols, values[r].length);
}
} |
java | public void setBaselineWork(int baselineNumber, Duration value)
{
set(selectField(ResourceFieldLists.BASELINE_WORKS, baselineNumber), value);
} |
java | protected void generateMemberAppender(MemberDescription description) {
if (description.isTopElement()) {
return;
}
final TypeReference appender = description.getElementDescription().getAppenderType();
final String generatedFieldAccessor = getGeneratedMemberAccessor(description);
final StringConcatenationClient content = new StringConcatenationClient() {
@Override
protected void appendTo(TargetStringConcatenation it) {
it.append("/** Source appender of a " + getLanguageName() //$NON-NLS-1$
+ " " + description.getElementDescription().getName() + "."); //$NON-NLS-1$ //$NON-NLS-2$
it.newLine();
it.append(" */"); //$NON-NLS-1$
it.newLine();
it.append("@SuppressWarnings(\"all\")"); //$NON-NLS-1$
it.newLine();
it.append("public class "); //$NON-NLS-1$
it.append(appender.getSimpleName());
it.append(" extends "); //$NON-NLS-1$
it.append(getCodeElementExtractor().getAbstractAppenderImpl());
it.append(" implements "); //$NON-NLS-1$
it.append(description.getElementDescription().getBuilderInterfaceType());
it.append(" {"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append(generateAppenderMembers(appender.getSimpleName(),
description.getElementDescription().getBuilderInterfaceType(), generatedFieldAccessor));
it.append(generateMembers(description, false, true));
it.append("}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
}
};
final JavaFileAccess javaFile = getFileAccessFactory().createJavaFile(appender, content);
javaFile.writeTo(getSrcGen());
} |
python | def getWorkingStandingZeroPoseToRawTrackingPose(self):
"""Returns the standing origin from the working copy."""
fn = self.function_table.getWorkingStandingZeroPoseToRawTrackingPose
pmatStandingZeroPoseToRawTrackingPose = HmdMatrix34_t()
result = fn(byref(pmatStandingZeroPoseToRawTrackingPose))
return result, pmatStandingZeroPoseToRawTrackingPose |
java | public int sizeOfIn(String expr, Map<String, Object> map) {
int result;
Object val = getValue(map, expr);
if (val instanceof Map) {
result = ((Map) val).size();
} else if (val instanceof Collection) {
result = ((Collection) val).size();
} else {
throw new SlimFixtureException(false, expr + " is not a collection");
}
return result;
} |
python | def get_associated_profile_names(profile_path, result_role, org_vm, server,
include_classnames=False):
"""
Get the Associated profiles and return the string names (org:name:version)
for each profile as a list.
"""
insts = get_associated_profiles(profile_path, result_role, server)
names = []
for inst in insts:
if include_classnames:
names.append("(%s)%s" % (inst.classname,
profile_name(org_vm, inst)))
else:
names.append(profile_name(org_vm, inst))
return names |
python | def create_from_remote_file(self, group, snapshot=True, **args):
"""
Creates from remote GAF
"""
import requests
url = "http://snapshot.geneontology.org/annotations/{}.gaf.gz".format(group)
r = requests.get(url, stream=True, headers={'User-Agent': get_user_agent(modules=[requests], caller_name=__name__)})
p = GafParser()
results = p.skim(r.raw)
return self.create_from_tuples(results, **args) |
java | public ApiResponse<MoonResponse> getUniverseMoonsMoonIdWithHttpInfo(Integer moonId, String datasource,
String ifNoneMatch) throws ApiException {
com.squareup.okhttp.Call call = getUniverseMoonsMoonIdValidateBeforeCall(moonId, datasource, ifNoneMatch, null);
Type localVarReturnType = new TypeToken<MoonResponse>() {
}.getType();
return apiClient.execute(call, localVarReturnType);
} |
java | @Override
public DurationFormatterFactory setLocale(String localeName) {
if (!localeName.equals(this.localeName)) {
this.localeName = localeName;
if (builder != null) {
builder = builder.withLocale(localeName);
}
if (formatter != null) {
formatter = formatter.withLocale(localeName);
}
reset();
}
return this;
} |
java | @Override
public String getValue(String propertyName) {
String theValue = null;
String theModifiedPropertyName = null;
// First pass
theValue = super.getValue(propertyName);
// Second pass, replace Non-Alphanumeric characters
if (theValue == null) {
theModifiedPropertyName = replaceNonAlpha(propertyName);
theValue = super.getValue(theModifiedPropertyName);
}
// Third pass, convert to upper case and search
if (theValue == null && theModifiedPropertyName != null) {
theModifiedPropertyName = theModifiedPropertyName.toUpperCase();
theValue = super.getValue(theModifiedPropertyName);
}
return theValue;
} |
java | public long writeFrom(Readable readable) throws IOException {
checkNotNull(readable);
Closer closer = Closer.create();
try {
Writer out = closer.register(openStream());
long written = CharStreams.copy(readable, out);
out.flush(); // https://code.google.com/p/guava-libraries/issues/detail?id=1330
return written;
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
} |
java | public static Field[] getFields(Class<?> clazz, String... fieldNames) {
final List<Field> fields = new LinkedList<Field>();
for (Field field : getAllFields(clazz)) {
for (String fieldName : fieldNames) {
if (field.getName().equals(fieldName)) {
fields.add(field);
}
}
}
final Field[] fieldArray = fields.toArray(new Field[fields.size()]);
if (fieldArray.length == 0) {
throw new FieldNotFoundException(String.format(
"No fields matching the name(s) %s were found in the class hierarchy of %s.",
concatenateStrings(fieldNames), getType(clazz)));
}
return fieldArray;
} |
python | def _add_width_of(self, other_tc):
"""
Add the width of *other_tc* to this cell. Does nothing if either this
tc or *other_tc* does not have a specified width.
"""
if self.width and other_tc.width:
self.width += other_tc.width |
java | protected int maxDepth(Layout.Node node) {
int depth = 0;
for(int i = 0; i < node.numChildren(); i++) {
depth = Math.max(depth, maxDepth(node.getChild(i)));
}
return depth + 1;
} |
java | public void setHorizontalAlignment(final HorizontalAlignment align) {
if(cell.getCellStyle().getAlignmentEnum().equals(align)) {
// 既に横位置が同じ値
return;
}
cloneStyle();
cell.getCellStyle().setAlignment(align);
} |
python | def _start_plasma_store(plasma_store_memory,
use_valgrind=False,
use_profiler=False,
stdout_file=None,
stderr_file=None,
plasma_directory=None,
huge_pages=False,
socket_name=None):
"""Start a plasma store process.
Args:
plasma_store_memory (int): The amount of memory in bytes to start the
plasma store with.
use_valgrind (bool): True if the plasma store should be started inside
of valgrind. If this is True, use_profiler must be False.
use_profiler (bool): True if the plasma store should be started inside
a profiler. If this is True, use_valgrind must be False.
stdout_file: A file handle opened for writing to redirect stdout to. If
no redirection should happen, then this should be None.
stderr_file: A file handle opened for writing to redirect stderr to. If
no redirection should happen, then this should be None.
plasma_directory: A directory where the Plasma memory mapped files will
be created.
huge_pages: a boolean flag indicating whether to start the
Object Store with hugetlbfs support. Requires plasma_directory.
socket_name (str): If provided, it will specify the socket
name used by the plasma store.
Return:
A tuple of the name of the plasma store socket and ProcessInfo for the
plasma store process.
"""
if use_valgrind and use_profiler:
raise Exception("Cannot use valgrind and profiler at the same time.")
if huge_pages and not (sys.platform == "linux"
or sys.platform == "linux2"):
raise Exception("The huge_pages argument is only supported on "
"Linux.")
if huge_pages and plasma_directory is None:
raise Exception("If huge_pages is True, then the "
"plasma_directory argument must be provided.")
if not isinstance(plasma_store_memory, int):
raise Exception("plasma_store_memory should be an integer.")
command = [
PLASMA_STORE_EXECUTABLE, "-s", socket_name, "-m",
str(plasma_store_memory)
]
if plasma_directory is not None:
command += ["-d", plasma_directory]
if huge_pages:
command += ["-h"]
process_info = start_ray_process(
command,
ray_constants.PROCESS_TYPE_PLASMA_STORE,
use_valgrind=use_valgrind,
use_valgrind_profiler=use_profiler,
stdout_file=stdout_file,
stderr_file=stderr_file)
return process_info |
java | static int readRawVarint32(final InputStream input) throws IOException
{
final int firstByte = input.read();
if (firstByte == -1)
{
throw ProtobufException.truncatedMessage();
}
if ((firstByte & 0x80) == 0)
{
return firstByte;
}
return readRawVarint32(input, firstByte);
} |
java | public static boolean isFloat(String s) {
if (isEmpty(s)) return defaultEmptyOK;
boolean seenDecimalPoint = false;
if (s.startsWith(decimalPointDelimiter)) return false;
// Search through string's characters one by one
// until we find a non-numeric character.
// When we do, return false; if we don't, return true.
for (int i = 0; i < s.length(); i++) {
// Check that current character is number.
char c = s.charAt(i);
if (c == decimalPointDelimiter.charAt(0)) {
if (!seenDecimalPoint)
seenDecimalPoint = true;
else
return false;
} else {
if (!isDigit(c)) return false;
}
}
// All characters are numbers.
return true;
} |
java | public long count(List<Predicate> predicates) {
// Criteria Builder
CriteriaBuilder criteriaBuilder = getEntityManager().getCriteriaBuilder();
// Requete de criteres
CriteriaQuery<Long>criteriaQuery = criteriaBuilder.createQuery(Long.class);
// Construction de la racine
Root<T> root = criteriaQuery.from(entityClass);
// On positionne l'Alias
root.alias(ROOT_ALIAS);
// Selection de la racine
criteriaQuery.select(criteriaBuilder.count(root));
// Ajout des Prédicats
addPredicates(criteriaBuilder, root, criteriaQuery, predicates);
// Construction de la requete basée sur les critères
TypedQuery<Long> query = getEntityManager().createQuery(criteriaQuery);
// On retourne le resultat
return query.getSingleResult();
} |
python | def preprocess_data(Xs_raw):
'''Translate the center of mass (COM) of the data to the origin.
Return the prossed data and the shift of the COM'''
n = len(Xs_raw)
Xs_raw_mean = sum(X for X in Xs_raw) / n
return [X - Xs_raw_mean for X in Xs_raw], Xs_raw_mean |
java | public CertificateDeleteHeaders withLastModified(DateTime lastModified) {
if (lastModified == null) {
this.lastModified = null;
} else {
this.lastModified = new DateTimeRfc1123(lastModified);
}
return this;
} |
python | def _check_1st_line(line, **kwargs):
"""First line check.
Check that the first line has a known component name followed by a colon
and then a short description of the commit.
:param line: first line
:type line: str
:param components: list of known component names
:type line: list
:param max_first_line: maximum length of the first line
:type max_first_line: int
:return: errors as in (code, line number, *args)
:rtype: list
"""
components = kwargs.get("components", ())
max_first_line = kwargs.get("max_first_line", 50)
errors = []
lineno = 1
if len(line) > max_first_line:
errors.append(("M190", lineno, max_first_line, len(line)))
if line.endswith("."):
errors.append(("M191", lineno))
if ':' not in line:
errors.append(("M110", lineno))
else:
component, msg = line.split(':', 1)
if component not in components:
errors.append(("M111", lineno, component))
return errors |
python | async def get_async(self, type_name, **parameters):
"""Gets entities asynchronously using the API. Shortcut for using async_call() with the 'Get' method.
:param type_name: The type of entity.
:param parameters: Additional parameters to send.
:return: The JSON result (decoded into a dict) from the server.
:raise MyGeotabException: Raises when an exception occurs on the MyGeotab server.
:raise TimeoutException: Raises when the request does not respond after some time.
"""
if parameters:
results_limit = parameters.get('resultsLimit', None)
if results_limit is not None:
del parameters['resultsLimit']
if 'search' in parameters:
parameters.update(parameters['search'])
parameters = dict(search=parameters, resultsLimit=results_limit)
return await self.call_async('Get', type_name=type_name, **parameters) |
java | public static String getIdString(Object object) {
if (object == null) {
throw new NullPointerException();
}
return String.valueOf(getId(object));
} |
python | def guess_filename(obj):
"""Tries to guess the filename of the given object."""
name = getattr(obj, 'name', None)
if name and name[0] != '<' and name[-1] != '>':
return os.path.basename(name) |
python | def generate_overlay_urls(self):
"""Return dict with overlay/URL pairs for the dataset overlays."""
overlays = {}
for o in self.dataset.list_overlay_names():
url = self.generate_url(".dtool/overlays/{}.json".format(o))
overlays[o] = url
return overlays |
python | def access_token(self, request_token, request_secret):
"""Returns access_token, access_secret"""
logging.debug("Getting access token from %s:%d",
self.server, self.port)
self.access_token, self.access_secret = \
self._token("/oauth/accessToken", request_token, request_secret)
return self.access_token, self.access_secret |
java | public static int cusparseXbsrsm2_zeroPivot(
cusparseHandle handle,
bsrsm2Info info,
Pointer position)
{
return checkResult(cusparseXbsrsm2_zeroPivotNative(handle, info, position));
} |
java | private String computeSetterName(String name)
{
StringBuilder _result = new StringBuilder().append(PREFIX__SETTER);
_result.append(Character.toUpperCase(name.charAt(0))).append(name.substring(1));
return _result.toString();
} |
python | def find(self, query):
'''Passes the query to the upstream, if it exists'''
if self.upstream:
return self.upstream.find(query)
else:
return False |
java | public static boolean isQueueEntry(byte[] queueRowPrefix, KeyValue keyValue) {
return isQueueEntry(queueRowPrefix, keyValue.getBuffer(), keyValue.getRowOffset(), keyValue.getRowLength());
} |
python | def getWindowPID(self, hwnd):
""" Gets the process ID that the specified window belongs to """
pid = ctypes.c_ulong()
ctypes.windll.user32.GetWindowThreadProcessId(hwnd, ctypes.byref(pid))
return int(pid.value) |
python | def edges(self, tail_head_iter):
"""Create a bunch of edges.
Args:
tail_head_iter: Iterable of ``(tail_name, head_name)`` pairs.
"""
edge = self._edge_plain
quote = self._quote_edge
lines = (edge % (quote(t), quote(h)) for t, h in tail_head_iter)
self.body.extend(lines) |
python | def lemmas(self):
"""Returns the synset's lemmas/variants' literal represantions.
Returns
-------
list of Lemmas
List of its variations' literals as Lemma objects.
"""
return [lemma("%s.%s"%(self.name,variant.literal)) for variant in self._raw_synset.variants] |
java | @GwtIncompatible("Unnecessary")
private Writer fileNameToOutputWriter2(String fileName) throws IOException {
if (fileName == null) {
return null;
}
if (isInTestMode()) {
return new StringWriter();
}
return streamToOutputWriter2(filenameToOutputStream(fileName));
} |
java | @XmlElementDecl(namespace = "http://www.opengis.net/gml", name = "multiPointDomain", substitutionHeadNamespace = "http://www.opengis.net/gml", substitutionHeadName = "domainSet")
public JAXBElement<MultiPointDomainType> createMultiPointDomain(MultiPointDomainType value) {
return new JAXBElement<MultiPointDomainType>(_MultiPointDomain_QNAME, MultiPointDomainType.class, null, value);
} |
python | def index(environment, start_response, headers):
"""
Return the status of this Kronos instance + its backends>
Doesn't expect any URL parameters.
"""
response = {'service': 'kronosd',
'version': kronos.__version__,
'id': settings.node['id'],
'storage': {},
SUCCESS_FIELD: True}
# Check if each backend is alive
for name, backend in router.get_backends():
response['storage'][name] = {'alive': backend.is_alive(),
'backend': settings.storage[name]['backend']}
start_response('200 OK', headers)
return response |
python | def _build_voronoi_polygons(df):
"""
Given a GeoDataFrame of point geometries and pre-computed plot extrema, build Voronoi simplexes for the given
points in the given space and returns them.
Voronoi simplexes which are located on the edges of the graph may extend into infinity in some direction. In
other words, the set of points nearest the given point does not necessarily have to be a closed polygon. We force
these non-hermetic spaces into polygons using a subroutine.
Parameters
----------
df : GeoDataFrame instance
The `GeoDataFrame` of points being partitioned.
Returns
-------
polygons : list of shapely.geometry.Polygon objects
The Voronoi polygon output.
"""
from scipy.spatial import Voronoi
geom = np.array(df.geometry.map(lambda p: [p.x, p.y]).tolist())
vor = Voronoi(geom)
polygons = []
for idx_point, _ in enumerate(vor.points):
idx_point_region = vor.point_region[idx_point]
idxs_vertices = np.array(vor.regions[idx_point_region])
is_finite = not np.any(idxs_vertices == -1)
if is_finite:
# Easy case, the region is closed. Make a polygon out of the Voronoi ridge points.
idx_point_region = vor.point_region[idx_point]
idxs_vertices = np.array(vor.regions[idx_point_region])
region_vertices = vor.vertices[idxs_vertices]
region_poly = shapely.geometry.Polygon(region_vertices)
polygons.append(region_poly)
else:
# Hard case, the region is open. Project new edges out to the margins of the plot.
# See `scipy.spatial.voronoi_plot_2d` for the source of this calculation.
point_idx_ridges_idx = np.where((vor.ridge_points == idx_point).any(axis=1))[0]
# TODO: why does this happen?
if len(point_idx_ridges_idx) == 0:
continue
ptp_bound = vor.points.ptp(axis=0)
center = vor.points.mean(axis=0)
finite_segments = []
infinite_segments = []
pointwise_ridge_points = vor.ridge_points[point_idx_ridges_idx]
pointwise_ridge_vertices = np.asarray(vor.ridge_vertices)[point_idx_ridges_idx]
for pointidx, simplex in zip(pointwise_ridge_points, pointwise_ridge_vertices):
simplex = np.asarray(simplex)
if np.all(simplex >= 0):
finite_segments.append(vor.vertices[simplex])
else:
i = simplex[simplex >= 0][0] # finite end Voronoi vertex
t = vor.points[pointidx[1]] - vor.points[pointidx[0]] # tangent
t /= np.linalg.norm(t)
n = np.array([-t[1], t[0]]) # normal
midpoint = vor.points[pointidx].mean(axis=0)
direction = np.sign(np.dot(midpoint - center, n)) * n
far_point = vor.vertices[i] + direction * ptp_bound.max()
infinite_segments.append(np.asarray([vor.vertices[i], far_point]))
finite_segments = finite_segments if finite_segments else np.zeros(shape=(0,2,2))
ls = np.vstack([np.asarray(infinite_segments), np.asarray(finite_segments)])
# We have to trivially sort the line segments into polygonal order. The algorithm that follows is
# inefficient, being O(n^2), but "good enough" for this use-case.
ls_sorted = []
while len(ls_sorted) < len(ls):
l1 = ls[0] if len(ls_sorted) == 0 else ls_sorted[-1]
matches = []
for l2 in [l for l in ls if not (l == l1).all()]:
if np.any(l1 == l2):
matches.append(l2)
elif np.any(l1 == l2[::-1]):
l2 = l2[::-1]
matches.append(l2)
if len(ls_sorted) == 0:
ls_sorted.append(l1)
for match in matches:
# in list sytax this would be "if match not in ls_sorted"
# in numpy things are more complicated...
if not any((match == ls_sort).all() for ls_sort in ls_sorted):
ls_sorted.append(match)
break
# Build and return the final polygon.
polyline = np.vstack(ls_sorted)
geom = shapely.geometry.Polygon(polyline).convex_hull
polygons.append(geom)
return polygons |
python | def _section_from_spec(elf_file, spec):
'''
Retrieve a section given a "spec" (either number or name).
Return None if no such section exists in the file.
'''
if isinstance(spec, int):
num = int(spec)
if num < elf_file.num_sections():
return elf_file.get_section(num)
# Not a number. Must be a name then
if isinstance(spec, str):
try:
return elf_file.get_section_by_name(spec)
except AttributeError:
return None |
java | private List<String> parseComment(List<String> content, String propertyName) {
List<String> comments = new ArrayList<>();
String commentEnd = content.get(annotationIndex - 1).trim();
if (commentEnd.equals("*/")) {
int startCommentIndex = -1;
for (int index = annotationIndex - 1; index >= 0; index--) {
String line = content.get(index).trim();
if (line.equals("/**")) {
startCommentIndex = index + 1;
break;
}
}
if (startCommentIndex == -1) {
throw new BeanCodeGenException("Unable to locate comment start at line " + annotationIndex, beanParser.getFile(), annotationIndex);
}
if (startCommentIndex < annotationIndex - 1) {
for (int i = startCommentIndex; i < annotationIndex - 1; i++) {
String commentLine = content.get(i).trim();
if (commentLine.startsWith("*")) {
commentLine = commentLine.substring(1).trim();
}
if (commentLine.startsWith("@return") == false && commentLine.startsWith("@param") == false &&
commentLine.startsWith("@throws") == false && commentLine.startsWith("@exception") == false) {
comments.add(commentLine);
}
}
String firstLine = comments.get(0);
if (firstLine.length() > 0) {
comments.set(0, firstLine.substring(0, 1).toLowerCase(Locale.ENGLISH) + firstLine.substring(1));
} else {
comments.remove(0);
}
}
} else if (commentEnd.startsWith("/**") && commentEnd.endsWith("*/")) {
int startPos = commentEnd.indexOf("/**") + 3;
int endPos = commentEnd.lastIndexOf("*/");
String comment = commentEnd.substring(startPos, endPos).trim();
if (comment.length() > 0) {
comments.add(comment.substring(0, 1).toLowerCase(Locale.ENGLISH) + comment.substring(1));
}
}
if (comments.size() == 0) {
comments.add("the " + propertyName + ".");
}
return comments;
} |
java | private boolean isTypedTimeFullyLegal() {
if (mIs24HourMode) {
// For 24-hour mode, the time is legal if the hours and minutes are each legal. Note:
// getEnteredTime() will ONLY call isTypedTimeFullyLegal() when NOT in 24hour mode.
int[] values = getEnteredTime(null);
return (values[0] >= 0 && values[1] >= 0 && values[1] < 60);
} else {
// For AM/PM mode, the time is legal if it contains an AM or PM, as those can only be
// legally added at specific times based on the tree's algorithm.
return (mTypedTimes.contains(getAmOrPmKeyCode(AM)) ||
mTypedTimes.contains(getAmOrPmKeyCode(PM)));
}
} |
java | private void initInflightMessageStore() {
BTreeIndexFactory<String, StoredPublishEvent> indexFactory = new BTreeIndexFactory<String, StoredPublishEvent>();
indexFactory.setKeyCodec(StringCodec.INSTANCE);
m_inflightStore = (SortedIndex<String, StoredPublishEvent>) m_multiIndexFactory.openOrCreate("inflight", indexFactory);
} |
java | @Override
public Access authenticate() {
try {
JSONObject user = new JSONObject();
user.put("id", mUserId);
user.put("password", mPassword);
JSONObject password = new JSONObject();
password.put("user", user);
JSONArray methods = new JSONArray();
methods.add("password");
JSONObject identity = new JSONObject();
identity.put("methods", methods);
identity.put("password", password);
JSONObject project = new JSONObject();
project.put("id", mProjectId);
JSONObject scope = new JSONObject();
scope.put("project", project);
JSONObject auth = new JSONObject();
auth.put("identity", identity);
auth.put("scope", scope);
JSONObject requestBody = new JSONObject();
requestBody.put("auth", auth);
HttpURLConnection connection =
(HttpURLConnection) new URL(mAuthUrl).openConnection();
connection.setDoOutput(true);
connection.setRequestProperty("Accept", "application/json");
connection.setRequestProperty("Content-Type", "application/json");
OutputStream output = connection.getOutputStream();
output.write(requestBody.toString().getBytes());
HttpStatusChecker.verifyCode(STATUS_CHECKERS, connection.getResponseCode());
final String res;
try (final BufferedReader bufReader =
new BufferedReader(new InputStreamReader(connection.getInputStream()))) {
res = bufReader.readLine();
}
JSONParser parser = new JSONParser();
JSONObject jsonResponse = (JSONObject) parser.parse(res);
String token = connection.getHeaderField("X-Subject-Token");
PasswordScopeAccess access = new PasswordScopeAccess(jsonResponse, token,
mPrefferedRegion);
connection.disconnect();
return access;
} catch (IOException | ParseException e) {
LOG.error(e.getMessage());
throw new CommandException("Unable to execute the HTTP call or to convert the HTTP Response",
e);
}
} |
python | def post(self, request, *args, **kwargs):
""" Handles POST requests. """
self.init_attachment_cache()
# Stores a boolean indicating if we are considering a preview
self.preview = 'preview' in self.request.POST
# Initializes the forms
post_form_class = self.get_post_form_class()
post_form = self.get_post_form(post_form_class)
attachment_formset_class = self.get_attachment_formset_class()
attachment_formset = self.get_attachment_formset(attachment_formset_class)
self.attachment_preview = (
self.preview if attachment_formset and attachment_formset.is_valid() else None
)
post_form_valid = post_form.is_valid()
if (
(post_form_valid and attachment_formset is None) or
(post_form_valid and attachment_formset.is_valid())
):
return self.form_valid(post_form, attachment_formset)
else:
return self.form_invalid(post_form, attachment_formset) |
java | public static URI asURI(final String algorithm, final String value) {
try {
final String scheme = DIGEST_ALGORITHM.getScheme(algorithm);
return new URI(scheme, value, null);
} catch (final URISyntaxException unlikelyException) {
LOGGER.warn("Exception creating checksum URI: {}",
unlikelyException);
throw new RepositoryRuntimeException(unlikelyException);
}
} |
python | def median(arr):
"""median of the values, must have more than 0 entries.
:param arr: list of numbers
:type arr: number[] a number array
:return: median
:rtype: float
"""
if len(arr) == 0:
sys.stderr.write("ERROR: no content in array to take average\n")
sys.exit()
if len(arr) == 1: return arr[0]
quot = len(arr)/2
rem = len(arr)%2
if rem != 0:
return sorted(arr)[quot]
return float(sum(sorted(arr)[quot-1:quot+1]))/float(2) |
java | public static authenticationnegotiatepolicy_binding get(nitro_service service, String name) throws Exception{
authenticationnegotiatepolicy_binding obj = new authenticationnegotiatepolicy_binding();
obj.set_name(name);
authenticationnegotiatepolicy_binding response = (authenticationnegotiatepolicy_binding) obj.get_resource(service);
return response;
} |
java | @Override
public List<CPInstance> getCPInstances(int start, int end) {
return cpInstancePersistence.findAll(start, end);
} |
python | def load_yamlf(fpath, encoding):
"""
:param unicode fpath:
:param unicode encoding:
:rtype: dict | list
"""
with codecs.open(fpath, encoding=encoding) as f:
return yaml.safe_load(f) |
java | @Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case XtextPackage.ABSTRACT_NEGATED_TOKEN__TERMINAL:
setTerminal((AbstractElement)newValue);
return;
}
super.eSet(featureID, newValue);
} |
java | public static Set<String> getBuildFilesForAjdtFile( String ajdtBuildDefFile, File basedir )
throws MojoExecutionException
{
Set<String> result = new LinkedHashSet<String>();
Properties ajdtBuildProperties = new Properties();
try
{
ajdtBuildProperties.load( new FileInputStream( new File( basedir, ajdtBuildDefFile ) ) );
}
catch ( FileNotFoundException e )
{
throw new MojoExecutionException( "Build properties file specified not found", e );
}
catch ( IOException e )
{
throw new MojoExecutionException( "IO Error reading build properties file specified", e );
}
result.addAll( resolveIncludeExcludeString( ajdtBuildProperties.getProperty( "src.includes" ), basedir ) );
Set<String> exludes = resolveIncludeExcludeString( ajdtBuildProperties.getProperty( "src.excludes" ), basedir );
result.removeAll( exludes );
return result;
} |
python | def get_action_side_effects(self):
"""Returns all side effects for all batches of this
Executor used by the underlying Action.
"""
result = SCons.Util.UniqueList([])
for target in self.get_action_targets():
result.extend(target.side_effects)
return result |
python | def char(self, c: str) -> None:
"""Parse the specified character.
Args:
c: One-character string.
Raises:
EndOfInput: If past the end of `self.input`.
UnexpectedInput: If the next character is different from `c`.
"""
if self.peek() == c:
self.offset += 1
else:
raise UnexpectedInput(self, f"char '{c}'") |
java | public final ByteArrayOutputStream encode() throws IOException
{
final ByteArrayOutputStream buffer = new ByteArrayOutputStream();
buffer.write(type);
buffer.write(clientId);
buffer.write(clientDestId);
encode(buffer);
return buffer;
} |
python | def del_value(self, keys, complete=False, on_projects=False,
on_globals=False, projectname=None, base='', dtype=None,
**kwargs):
"""
Delete a value in the configuration
Parameters
----------
keys: list of str
A list of keys to be deleted. %(get_value_note)s
%(ModelOrganizer.info.common_params)s
base: str
A base string that shall be put in front of each key in `values` to
avoid typing it all the time
"""
config = self.info(complete=complete, on_projects=on_projects,
on_globals=on_globals, projectname=projectname,
return_dict=True, insert_id=False, **kwargs)
for key in keys:
if base:
key = base + key
key, sub_config = utils.go_through_dict(key, config)
del sub_config[key] |
java | public RadarSeries setLineStyle(BasicStroke basicStroke) {
stroke = basicStroke;
if (this.lineWidth > 0.0f) {
stroke =
new BasicStroke(
lineWidth,
this.stroke.getEndCap(),
this.stroke.getLineJoin(),
this.stroke.getMiterLimit(),
this.stroke.getDashArray(),
this.stroke.getDashPhase());
}
return this;
} |
java | public JobResponseInner getJob(String resourceGroupName, String resourceName, String jobId) {
return getJobWithServiceResponseAsync(resourceGroupName, resourceName, jobId).toBlocking().single().body();
} |
python | async def send_event(self, client_id, service_name, event_name, event_info, directed_client=None):
"""Send an event to a client."""
if directed_client is not None and directed_client != client_id:
return
client_info = self.clients.get(client_id)
if client_info is None:
self._logger.warning("Attempted to send event to invalid client id: %s", client_id)
return
conn = client_info['connection']
event = dict(service=service_name)
if event_info is not None:
event['payload'] = event_info
self._logger.debug("Sending event: %s", event)
await self.server.send_event(conn, event_name, event) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.