language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public static Diff fromDelta(String text1, String delta)
throws IllegalArgumentException {
return new Diff(changesFromDelta(text1, delta), DiffOptions.defaults());
} |
python | def _find_best_fit(self, pbin):
"""
Return best fitness rectangle from rectangles packing _sorted_rect list
Arguments:
pbin (PackingAlgorithm): Packing bin
Returns:
key of the rectangle with best fitness
"""
fit = ((pbin.fitness(r[0], r[1]), k) for k, r in self._sorted_rect.items())
fit = (f for f in fit if f[0] is not None)
try:
_, rect = min(fit, key=self.first_item)
return rect
except ValueError:
return None |
java | @Indexable(type = IndexableType.REINDEX)
@Override
public CommerceAvailabilityEstimate addCommerceAvailabilityEstimate(
CommerceAvailabilityEstimate commerceAvailabilityEstimate) {
commerceAvailabilityEstimate.setNew(true);
return commerceAvailabilityEstimatePersistence.update(commerceAvailabilityEstimate);
} |
java | private DefBase getDefForLevel(String level)
{
if (LEVEL_CLASS.equals(level))
{
return _curClassDef;
}
else if (LEVEL_FIELD.equals(level))
{
return _curFieldDef;
}
else if (LEVEL_REFERENCE.equals(level))
{
return _curReferenceDef;
}
else if (LEVEL_COLLECTION.equals(level))
{
return _curCollectionDef;
}
else if (LEVEL_OBJECT_CACHE.equals(level))
{
return _curObjectCacheDef;
}
else if (LEVEL_INDEX_DESC.equals(level))
{
return _curIndexDescriptorDef;
}
else if (LEVEL_TABLE.equals(level))
{
return _curTableDef;
}
else if (LEVEL_COLUMN.equals(level))
{
return _curColumnDef;
}
else if (LEVEL_FOREIGNKEY.equals(level))
{
return _curForeignkeyDef;
}
else if (LEVEL_INDEX.equals(level))
{
return _curIndexDef;
}
else if (LEVEL_PROCEDURE.equals(level))
{
return _curProcedureDef;
}
else if (LEVEL_PROCEDURE_ARGUMENT.equals(level))
{
return _curProcedureArgumentDef;
}
else
{
return null;
}
} |
python | def guest_unpause(self, userid):
"""Unpause a virtual machine.
:param str userid: the id of the virtual machine to be unpaused
:returns: None
"""
action = "unpause guest '%s'" % userid
with zvmutils.log_and_reraise_sdkbase_error(action):
self._vmops.guest_unpause(userid) |
python | def _get_stack(self, orchestration_client, stack_name):
"""Get the ID for the current deployed overcloud stack if it exists."""
try:
stack = orchestration_client.stacks.get(stack_name)
self.log.info("Stack found, will be doing a stack update")
return stack
except HTTPNotFound:
self.log.info("No stack found, will be doing a stack create") |
python | def distinguish(self, as_made_by='mod', sticky=False):
"""Distinguish object as made by mod, admin or special.
Distinguished objects have a different author color. With Reddit
Enhancement Suite it is the background color that changes.
`sticky` argument only used for top-level Comments.
:returns: The json response from the server.
"""
url = self.reddit_session.config['distinguish']
data = {'id': self.fullname,
'how': 'yes' if as_made_by == 'mod' else as_made_by}
if isinstance(self, Comment) and self.is_root:
data['sticky'] = sticky
return self.reddit_session.request_json(url, data=data) |
python | def t_PARBREAK(self, token):
ur'\n{2,}'
token.lexer.lineno += len(token.value)
return token |
python | def add_index(self, mode, blob_id, path):
"""
Add new entry to the current index
:param tree:
:return:
"""
self.command_exec(['update-index', '--add', '--cacheinfo', mode, blob_id, path]) |
python | def _do_search(conf):
'''
Builds connection and search arguments, performs the LDAP search and
formats the results as a dictionary appropriate for pillar use.
'''
# Build LDAP connection args
connargs = {}
for name in ['server', 'port', 'tls', 'binddn', 'bindpw', 'anonymous']:
connargs[name] = _config(name, conf)
if connargs['binddn'] and connargs['bindpw']:
connargs['anonymous'] = False
# Build search args
try:
_filter = conf['filter']
except KeyError:
raise SaltInvocationError('missing filter')
_dn = _config('dn', conf)
scope = _config('scope', conf)
_lists = _config('lists', conf) or []
_attrs = _config('attrs', conf) or []
_dict_key_attr = _config('dict_key_attr', conf, 'dn')
attrs = _lists + _attrs + [_dict_key_attr]
if not attrs:
attrs = None
# Perform the search
try:
result = __salt__['ldap.search'](_filter, _dn, scope, attrs,
**connargs)['results']
except IndexError: # we got no results for this search
log.debug('LDAP search returned no results for filter %s', _filter)
result = {}
except Exception:
log.critical(
'Failed to retrieve pillar data from LDAP:\n', exc_info=True
)
return {}
return result |
python | def send_message(host, data, timeout=None, properties=None):
"""
Send message to given `host`.
Args:
host (str): Specified host: aleph/ftp/whatever available host.
data (str): JSON data.
timeout (int, default None): How much time wait for connection.
"""
channel = _get_channel(host, timeout)
if not properties:
properties = pika.BasicProperties(
content_type="application/json",
delivery_mode=2,
headers={"UUID": str(uuid.uuid4())}
)
parameters = settings.get_amqp_settings()[host]
channel.basic_publish(
exchange=parameters["exchange"],
routing_key=parameters["in_key"],
properties=properties,
body=data
) |
java | public Atom[] getAlignmentAtoms(Structure s) {
String[] atomNames = params.getUsedAtomNames();
return StructureTools.getAtomArray(s, atomNames);
} |
python | def add_documents(self, docs):
"""Update dictionary from a collection of documents. Each document is a list
of tokens.
Args:
docs (list): documents to add.
"""
for sent in docs:
sent = map(self.process_token, sent)
self._token_count.update(sent) |
java | public static List<IAtomContainer> getOverlaps(IAtomContainer sourceGraph, IAtomContainer targetGraph,
boolean shouldMatchBonds) throws CDKException {
List<List<CDKRMap>> rMapsList = search(sourceGraph, targetGraph, new BitSet(), new BitSet(), true, false,
shouldMatchBonds);
// projection on G1
ArrayList<IAtomContainer> graphList = projectList(rMapsList, sourceGraph, ID1);
// reduction of set of solution (isomorphism and substructure
// with different 'mappings'
return getMaximum(graphList, shouldMatchBonds);
} |
java | public String convertIfcActionSourceTypeEnumToString(EDataType eDataType, Object instanceValue) {
return instanceValue == null ? null : instanceValue.toString();
} |
python | def add_template_events_to_network(self, columns, vectors):
""" Add a vector indexed """
# Just call through to the standard function
self.template_events = self.template_event_dict['network']
self.add_template_network_events(columns, vectors)
self.template_event_dict['network'] = self.template_events
self.template_events = None |
python | def query_total_cat_recent_no_label(cat_id_arr, num=8, kind='1'):
'''
:param cat_id_arr: list of categories. ['0101', '0102']
'''
return TabPost.select().join(
TabPost2Tag,
on=(TabPost.uid == TabPost2Tag.post_id)
).where(
(TabPost.kind == kind) &
(TabPost2Tag.tag_id << cat_id_arr) # the "<<" operator signifies an "IN" query
).order_by(
TabPost.time_create.desc()
).limit(num) |
python | def send(scope, data):
"""
Like exec(), but does not wait for a response of the remote host after
sending the command.
:type data: string
:param data: The data that is sent.
"""
conn = scope.get('__connection__')
for line in data:
conn.send(line)
return True |
python | def generate_table(self, rows):
"""
Generates from a list of rows a PrettyTable object.
"""
table = PrettyTable(**self.kwargs)
for row in self.rows:
if len(row[0]) < self.max_row_width:
appends = self.max_row_width - len(row[0])
for i in range(1, appends):
row[0].append("-")
if row[1] is True:
self.make_fields_unique(row[0])
table.field_names = row[0]
else:
table.add_row(row[0])
return table |
java | @Override
public long skip(long n) throws IOException {
byte[] buf = new byte[(int)Math.min(n,64*1024)];
return read(buf,0,buf.length);
} |
python | def calculate_y_ticks(self, plot_height):
"""Calculate the y-axis items dependent on the plot height."""
calibrated_data_min = self.calibrated_data_min
calibrated_data_max = self.calibrated_data_max
calibrated_data_range = calibrated_data_max - calibrated_data_min
ticker = self.y_ticker
y_ticks = list()
for tick_value, tick_label in zip(ticker.values, ticker.labels):
if calibrated_data_range != 0.0:
y_tick = plot_height - plot_height * (tick_value - calibrated_data_min) / calibrated_data_range
else:
y_tick = plot_height - plot_height * 0.5
if y_tick >= 0 and y_tick <= plot_height:
y_ticks.append((y_tick, tick_label))
return y_ticks |
java | protected String getMessage(String key, String... args) {
return Messages.get().getBundle(m_cms.getRequestContext().getLocale()).key(key, args);
} |
java | @Override
public TreeMap<Integer, NavigableMap<Integer, IScan>> getScansByRtSpan(double rtStart,
double rtEnd) {
TreeMap<Integer, NavigableMap<Integer, IScan>> viewMap = new TreeMap<>();
boolean hasNonZeroElements = false;
for (Integer i : getMapMsLevel2index().keySet()) {
NavigableMap<Integer, IScan> scansByRtSpanAtMsLevel = getScansByRtSpanAtMsLevel(rtStart,
rtEnd, i);
if (scansByRtSpanAtMsLevel != null) {
hasNonZeroElements = true;
viewMap.put(i, scansByRtSpanAtMsLevel);
}
}
if (hasNonZeroElements) {
return viewMap;
}
return null;
} |
java | public void sendRequest(Message request, Object context, Message.Builder responseBuilder,
Duration timeout) {
// Pack it as a no-timeout request and send it!
final REQID rid = REQID.generate();
contextMap.put(rid, context);
responseMessageMap.put(rid, responseBuilder);
// Add timeout for this request if necessary
if (timeout.getSeconds() > 0) {
registerTimerEvent(timeout, new Runnable() {
@Override
public void run() {
handleTimeout(rid);
}
});
}
OutgoingPacket opk = new OutgoingPacket(rid, request);
socketChannelHelper.sendPacket(opk);
} |
java | @Nullable
public static short[] optShortArray(@Nullable Bundle bundle, @Nullable String key) {
return optShortArray(bundle, key, new short[0]);
} |
python | def click(self, selector, btn=0):
"""
Click the targeted element.
:param selector: A CSS3 selector to targeted element.
:param btn: The number of mouse button.
0 - left button,
1 - middle button,
2 - right button
"""
return self.evaluate("""
(function () {{
var element = document.querySelector({0});
var evt = document.createEvent("MouseEvents");
evt.initMouseEvent("click", true, true, window, 1, 1, 1, 1, 1,
false, false, false, false, {1}, element);
return element.dispatchEvent(evt);
}})();
""".format(repr(selector), repr(btn))) |
python | def _filter_by_m2m_schema(self, qs, lookup, sublookup, value, schema, model=None):
"""
Filters given entity queryset by an attribute which is linked to given
many-to-many schema.
"""
model = model or self.model
schemata = dict((s.name, s) for s in model.get_schemata_for_model()) # TODO cache this dict, see above too
try:
schema = schemata[lookup]
except KeyError:
# TODO: smarter error message, i.e. how could this happen and what to do
raise ValueError(u'Could not find schema for lookup "%s"' % lookup)
sublookup = '__%s'%sublookup if sublookup else ''
return {
'attrs__schema': schema,
'attrs__choice%s'%sublookup: value, # TODO: can we filter by id, not name?
} |
python | def _find_subclass(cls, module):
"""
Attempt to find subclass of :class:`RelationBase` in the given module.
Note: This means strictly subclasses and not :class:`RelationBase` itself.
This is to prevent picking up :class:`RelationBase` being imported to be
used as the base class.
"""
for attr in dir(module):
candidate = getattr(module, attr)
if (isclass(candidate) and issubclass(candidate, cls) and
candidate is not RelationBase):
return candidate
return None |
python | def get_search_regex(query, ignore_case=True):
"""Returns a compiled regex pattern to search for query letters in order.
Parameters
----------
query : str
String to search in another string (in order of character occurrence).
ignore_case : True
Optional value perform a case insensitive search (True by default).
Returns
-------
pattern : SRE_Pattern
Notes
-----
This function adds '.*' between the query characters and compiles the
resulting regular expression.
"""
regex_text = [char for char in query if char != ' ']
regex_text = '.*'.join(regex_text)
regex = r'({0})'.format(regex_text)
if ignore_case:
pattern = re.compile(regex, re.IGNORECASE)
else:
pattern = re.compile(regex)
return pattern |
java | public final void toJson(Writer out, T value) throws IOException {
JsonWriter writer = new JsonWriter(out);
write(writer, value);
} |
python | def update_buttons(self):
"""Updates the enable status of delete and reset buttons."""
current_scheme = self.current_scheme
names = self.get_option("names")
try:
names.pop(names.index(u'Custom'))
except ValueError:
pass
delete_enabled = current_scheme not in names
self.delete_button.setEnabled(delete_enabled)
self.reset_button.setEnabled(not delete_enabled) |
java | private void checkFile() throws IORuntimeException {
Assert.notNull(file, "File to write content is null !");
if(this.file.exists() && false == file.isFile()){
throw new IORuntimeException("File [{}] is not a file !", this.file.getAbsoluteFile());
}
} |
python | def longest_table(dfs):
""" Return this single longest DataFrame that among an array/list/tuple of DataFrames
Useful for automagically finding the DataFrame you want when using pd.read_html() on a Wikipedia page.
"""
sorted_indices = sorted((len(df if hasattr(df, '__len__') else []), i) for i, df in enumerate(dfs))
return dfs[sorted_indices[-1][1]] |
python | def _load_mapping() -> Optional[Dict[str, str]]:
"""Return list of mappings `package_name -> module_name`
Example:
django-haystack -> haystack
"""
if not pipreqs:
return None
path = os.path.dirname(inspect.getfile(pipreqs))
path = os.path.join(path, 'mapping')
with open(path) as f:
# pypi_name: import_name
mappings = {} # type: Dict[str, str]
for line in f:
import_name, _, pypi_name = line.strip().partition(":")
mappings[pypi_name] = import_name
return mappings |
java | public static Map<FieldType, String> getDefaultTaskFieldMap()
{
Map<FieldType, String> map = new LinkedHashMap<FieldType, String>();
map.put(TaskField.UNIQUE_ID, "task_id");
map.put(TaskField.GUID, "guid");
map.put(TaskField.NAME, "task_name");
map.put(TaskField.ACTUAL_DURATION, "act_drtn_hr_cnt");
map.put(TaskField.REMAINING_DURATION, "remain_drtn_hr_cnt");
map.put(TaskField.ACTUAL_WORK, "act_work_qty");
map.put(TaskField.REMAINING_WORK, "remain_work_qty");
map.put(TaskField.BASELINE_WORK, "target_work_qty");
map.put(TaskField.BASELINE_DURATION, "target_drtn_hr_cnt");
map.put(TaskField.DURATION, "target_drtn_hr_cnt");
map.put(TaskField.CONSTRAINT_DATE, "cstr_date");
map.put(TaskField.ACTUAL_START, "act_start_date");
map.put(TaskField.ACTUAL_FINISH, "act_end_date");
map.put(TaskField.LATE_START, "late_start_date");
map.put(TaskField.LATE_FINISH, "late_end_date");
map.put(TaskField.EARLY_START, "early_start_date");
map.put(TaskField.EARLY_FINISH, "early_end_date");
map.put(TaskField.REMAINING_EARLY_START, "restart_date");
map.put(TaskField.REMAINING_EARLY_FINISH, "reend_date");
map.put(TaskField.BASELINE_START, "target_start_date");
map.put(TaskField.BASELINE_FINISH, "target_end_date");
map.put(TaskField.CONSTRAINT_TYPE, "cstr_type");
map.put(TaskField.PRIORITY, "priority_type");
map.put(TaskField.CREATED, "create_date");
map.put(TaskField.TYPE, "duration_type");
map.put(TaskField.FREE_SLACK, "free_float_hr_cnt");
map.put(TaskField.TOTAL_SLACK, "total_float_hr_cnt");
map.put(TaskField.TEXT1, "task_code");
map.put(TaskField.TEXT2, "task_type");
map.put(TaskField.TEXT3, "status_code");
map.put(TaskField.NUMBER1, "rsrc_id");
return map;
} |
java | private Response cacheWritingResponse(final CacheRequest cacheRequest, Response response)
throws IOException {
// Some apps return a null body; for compatibility we treat that like a null cache request.
if (cacheRequest == null) return response;
Sink cacheBodyUnbuffered = cacheRequest.body();
if (cacheBodyUnbuffered == null) return response;
final BufferedSource source = response.body().source();
final BufferedSink cacheBody = Okio.buffer(cacheBodyUnbuffered);
Source cacheWritingSource = new Source() {
boolean cacheRequestClosed;
@Override public long read(Buffer sink, long byteCount) throws IOException {
long bytesRead;
try {
bytesRead = source.read(sink, byteCount);
} catch (IOException e) {
if (!cacheRequestClosed) {
cacheRequestClosed = true;
cacheRequest.abort(); // Failed to write a complete cache response.
}
throw e;
}
if (bytesRead == -1) {
if (!cacheRequestClosed) {
cacheRequestClosed = true;
cacheBody.close(); // The cache response is complete!
}
return -1;
}
sink.copyTo(cacheBody.buffer(), sink.size() - bytesRead, bytesRead);
cacheBody.emitCompleteSegments();
return bytesRead;
}
@Override public Timeout timeout() {
return source.timeout();
}
@Override public void close() throws IOException {
if (!cacheRequestClosed
&& !discard(this, ExchangeCodec.DISCARD_STREAM_TIMEOUT_MILLIS, MILLISECONDS)) {
cacheRequestClosed = true;
cacheRequest.abort();
}
source.close();
}
};
String contentType = response.header("Content-Type");
long contentLength = response.body().contentLength();
return response.newBuilder()
.body(new RealResponseBody(contentType, contentLength, Okio.buffer(cacheWritingSource)))
.build();
} |
java | private float[] getReduceAvarageProgresses(int tasksPerBar, int index
, TaskReport[] reports ) {
float[] progresses = new float[] {0,0,0};
int k=0;
for(;k < tasksPerBar && index + k < reports.length; k++) {
float progress = reports[index+k].getProgress();
for(int j=0; progress > 0 ; j++, progress -= oneThird) {
if(progress > oneThird)
progresses[j] += 1f;
else
progresses[j] += progress * 3 ;
}
}
for(int j=0; j<3; j++) { progresses[j] /= k;}
return progresses;
} |
python | def _maybeCleanSessions(self):
"""
Clean expired sessions if it's been long enough since the last clean.
"""
sinceLast = self._clock.seconds() - self._lastClean
if sinceLast > self.sessionCleanFrequency:
self._cleanSessions() |
python | def get_value_for_datastore(self, model_instance):
"""Get key of reference rather than reference itself."""
table_id = getattr(model_instance, self.table_fieldname, None)
object_id = getattr(model_instance, self.object_fieldname, None)
return table_id, object_id |
java | public static <T> ScopedBindingBuilder createChoiceWithDefault(
Binder binder,
String property,
Key<T> interfaceKey,
String defaultPropertyValue
)
{
Preconditions.checkNotNull(defaultPropertyValue);
ConfiggedProvider<T> provider = new ConfiggedProvider<>(interfaceKey, property, null, defaultPropertyValue);
return binder.bind(interfaceKey).toProvider(provider);
} |
java | public static Histogram divide(Histogram x, double y) {
return x.modifyEventCounters((r, d) -> d / y);
} |
java | public static boolean isDepthRenderable(final JCGLTextureFormat f)
{
switch (f) {
case TEXTURE_FORMAT_DEPTH_16_2BPP:
case TEXTURE_FORMAT_DEPTH_24_4BPP:
case TEXTURE_FORMAT_DEPTH_24_STENCIL_8_4BPP:
case TEXTURE_FORMAT_DEPTH_32F_4BPP:
return true;
case TEXTURE_FORMAT_R_16_2BPP:
case TEXTURE_FORMAT_R_16F_2BPP:
case TEXTURE_FORMAT_R_16I_2BPP:
case TEXTURE_FORMAT_R_16U_2BPP:
case TEXTURE_FORMAT_R_32F_4BPP:
case TEXTURE_FORMAT_R_32I_4BPP:
case TEXTURE_FORMAT_R_32U_4BPP:
case TEXTURE_FORMAT_R_8_1BPP:
case TEXTURE_FORMAT_R_8I_1BPP:
case TEXTURE_FORMAT_R_8U_1BPP:
case TEXTURE_FORMAT_RG_16_4BPP:
case TEXTURE_FORMAT_RG_16F_4BPP:
case TEXTURE_FORMAT_RG_16I_4BPP:
case TEXTURE_FORMAT_RG_16U_4BPP:
case TEXTURE_FORMAT_RG_32F_8BPP:
case TEXTURE_FORMAT_RG_32I_8BPP:
case TEXTURE_FORMAT_RG_32U_8BPP:
case TEXTURE_FORMAT_RG_8_2BPP:
case TEXTURE_FORMAT_RG_8I_2BPP:
case TEXTURE_FORMAT_RG_8U_2BPP:
case TEXTURE_FORMAT_RGB_16_6BPP:
case TEXTURE_FORMAT_RGB_16F_6BPP:
case TEXTURE_FORMAT_RGB_16I_6BPP:
case TEXTURE_FORMAT_RGB_16U_6BPP:
case TEXTURE_FORMAT_RGB_32F_12BPP:
case TEXTURE_FORMAT_RGB_32I_12BPP:
case TEXTURE_FORMAT_RGB_32U_12BPP:
case TEXTURE_FORMAT_RGB_8_3BPP:
case TEXTURE_FORMAT_RGB_8I_3BPP:
case TEXTURE_FORMAT_RGB_8U_3BPP:
case TEXTURE_FORMAT_RGBA_1010102_4BPP:
case TEXTURE_FORMAT_RGBA_16_8BPP:
case TEXTURE_FORMAT_RGBA_16F_8BPP:
case TEXTURE_FORMAT_RGBA_16I_8BPP:
case TEXTURE_FORMAT_RGBA_16U_8BPP:
case TEXTURE_FORMAT_RGBA_32F_16BPP:
case TEXTURE_FORMAT_RGBA_32I_16BPP:
case TEXTURE_FORMAT_RGBA_32U_16BPP:
case TEXTURE_FORMAT_RGBA_8_4BPP:
case TEXTURE_FORMAT_RGBA_8I_4BPP:
case TEXTURE_FORMAT_RGBA_8U_4BPP:
return false;
}
throw new UnreachableCodeException();
} |
java | @InterfaceAudience.Public
public void setIsDeletion(boolean isDeletion) {
if (isDeletion == true) {
properties.put("_deleted", true);
} else {
properties.remove("_deleted");
}
} |
python | def maximal_matching(G, sampler=None, **sampler_args):
"""Finds an approximate maximal matching.
Defines a QUBO with ground states corresponding to a maximal
matching and uses the sampler to sample from it.
A matching is a subset of edges in which no node occurs more than
once. A maximal matching is one in which no edges from G can be
added without violating the matching rule.
Parameters
----------
G : NetworkX graph
The graph on which to find a maximal matching.
sampler
A binary quadratic model sampler. A sampler is a process that
samples from low energy states in models defined by an Ising
equation or a Quadratic Unconstrained Binary Optimization
Problem (QUBO). A sampler is expected to have a 'sample_qubo'
and 'sample_ising' method. A sampler is expected to return an
iterable of samples, in order of increasing energy. If no
sampler is provided, one must be provided using the
`set_default_sampler` function.
sampler_args
Additional keyword parameters are passed to the sampler.
Returns
-------
matching : set
A maximal matching of the graph.
Notes
-----
Samplers by their nature may not return the optimal solution. This
function does not attempt to confirm the quality of the returned
sample.
References
----------
`Matching on Wikipedia <https://en.wikipedia.org/wiki/Matching_(graph_theory)>`_
`QUBO on Wikipedia <https://en.wikipedia.org/wiki/Quadratic_unconstrained_binary_optimization>`_
Based on the formulation presented in [AL]_
"""
# the maximum degree
delta = max(G.degree(node) for node in G)
# use the maximum degree to determine the infeasible gaps
A = 1.
if delta == 2:
B = .75
else:
B = .75 * A / (delta - 2.) # we want A > (delta - 2) * B
# each edge in G gets a variable, so let's create those
edge_mapping = _edge_mapping(G)
# build the QUBO
Q = _maximal_matching_qubo(G, edge_mapping, magnitude=B)
Qm = _matching_qubo(G, edge_mapping, magnitude=A)
for edge, bias in Qm.items():
if edge not in Q:
Q[edge] = bias
else:
Q[edge] += bias
# use the sampler to find low energy states
response = sampler.sample_qubo(Q, **sampler_args)
# we want the lowest energy sample
sample = next(iter(response))
# the matching are the edges that are 1 in the sample
return set(edge for edge in G.edges if sample[edge_mapping[edge]] > 0) |
java | public String keyToString(Object key) {
// This string should be in the format of:
// "<TYPE>:<KEY>" for internally supported types or "T:<KEY_CLASS>:<KEY>" for custom types
// e.g.:
// "S:my string key"
// "I:75"
// "D:5.34"
// "B:f"
// "T:com.myorg.MyType:STRING_GENERATED_BY_TRANSFORMER_FOR_MY_TYPE"
// First going to check if the key is a primitive or a String. Otherwise, check if it's a transformable.
// If none of those conditions are satisfied, we'll throw a CacheException.
// Using 'X' for Shorts and 'Y' for Bytes because 'S' is used for Strings and 'B' is being used for Booleans.
if (key instanceof byte[])
return "A:" + Base64.getEncoder().encodeToString((byte[]) key); //todo [anistor] need to profile Base64 versus simple hex encoding of the raw bytes
if (key instanceof String)
return "S:" + key;
else if (key instanceof Integer)
return "I:" + key;
else if (key instanceof Boolean)
return "B:" + key;
else if (key instanceof Long)
return "L:" + key;
else if (key instanceof Float)
return "F:" + key;
else if (key instanceof Double)
return "D:" + key;
else if (key instanceof Short)
return "X:" + key;
else if (key instanceof Byte)
return "Y:" + key;
else if (key instanceof Character)
return "C:" + key;
else if (key instanceof UUID)
return "U:" + key;
else {
Transformer t = getTransformer(key.getClass());
if (t != null) {
return "T:" + key.getClass().getName() + ":" + t.toString(key);
} else {
throw log.noTransformerForKey(key.getClass().getName());
}
}
} |
java | public void crawlToOpen(TreePath path, ArrayList<BugLeafNode> bugLeafNodes, ArrayList<TreePath> treePaths) {
for (int i = 0; i < getChildCount(path.getLastPathComponent()); i++) {
if (!isLeaf(getChild(path.getLastPathComponent(), i))) {
for (BugLeafNode p : bugLeafNodes) {
if (p.matches((BugAspects) getChild(path.getLastPathComponent(), i))) {
tree.expandPath(path);
crawlToOpen(path.pathByAddingChild(getChild(path.getLastPathComponent(), i)), bugLeafNodes, treePaths);
break;
}
}
} else {
for (BugLeafNode b : bugLeafNodes) {
if (getChild(path.getLastPathComponent(), i).equals(b)) {
tree.expandPath(path);
treePaths.add(path.pathByAddingChild(getChild(path.getLastPathComponent(), i)));
}
}
}
}
} |
java | private Class<?> getClassFromBundle(Object resource, String className, String versionRange)
{
Class<?> c = null;
if (resource == null)
{
Object classAccess = this.getClassBundleService(null, className, versionRange, null, 0);
if (classAccess != null)
{
try {
c = Class.forName(className);
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
}
else
{
Bundle bundle = this.findBundle(resource, bundleContext, ClassFinderActivator.getPackageName(className, false), versionRange);
try {
c = bundle.loadClass(className);
} catch (ClassNotFoundException e) {
c = null;
}
}
return c;
} |
java | private ModelNode removeAliasFromList(ModelNode list, String alias) throws OperationFailedException {
// check for empty string
if (alias == null || alias.equals(""))
return list ;
// check for undefined list (AS7-3476)
if (!list.isDefined()) {
throw InfinispanMessages.MESSAGES.cannotRemoveAliasFromEmptyList(alias);
}
ModelNode newList = new ModelNode() ;
List<ModelNode> listElements = list.asList();
for (ModelNode listElement : listElements) {
if (!listElement.asString().equals(alias)) {
newList.add().set(listElement);
}
}
return newList ;
} |
java | public AddPermissionRequest withAWSAccountIds(String... aWSAccountIds) {
if (this.aWSAccountIds == null) {
setAWSAccountIds(new com.amazonaws.internal.SdkInternalList<String>(aWSAccountIds.length));
}
for (String ele : aWSAccountIds) {
this.aWSAccountIds.add(ele);
}
return this;
} |
python | def _headers(self):
"""Ensure the Authorization Header has a valid Access Token."""
if not self.access_token or not self.access_token_expires:
self._basic_login()
elif datetime.now() > self.access_token_expires - timedelta(seconds=30):
self._basic_login()
return {'Accept': HEADER_ACCEPT,
'Authorization': 'bearer ' + self.access_token} |
python | def get_lambda_to_execute(self):
"""
return a function that executes the function assigned to this job.
If job.track_progress is None (the default), the returned function accepts no argument
and simply needs to be called. If job.track_progress is True, an update_progress function
is passed in that can be used by the function to provide feedback progress back to the
job scheduling system.
:return: a function that executes the original function assigned to this job.
"""
def y(update_progress_func, cancel_job_func):
"""
Call the function stored in self.func, and passing in update_progress_func
or cancel_job_func depending if self.track_progress or self.cancellable is defined,
respectively.
:param update_progress_func: The callback for when the job updates its progress.
:param cancel_job_func: The function that the function has to call occasionally to see
if the user wants to cancel the currently running job.
:return: Any
"""
func = import_stringified_func(self.func)
extrafunckwargs = {}
args, kwargs = copy.copy(self.args), copy.copy(self.kwargs)
if self.track_progress:
extrafunckwargs["update_progress"] = partial(update_progress_func, self.job_id)
if self.cancellable:
extrafunckwargs["check_for_cancel"] = partial(cancel_job_func, self.job_id)
kwargs.update(extrafunckwargs)
return func(*args, **kwargs)
return y |
python | def concat(dfs):
"""Concatenate a series of `pyam.IamDataFrame`-like objects together"""
if isstr(dfs) or not hasattr(dfs, '__iter__'):
msg = 'Argument must be a non-string iterable (e.g., list or tuple)'
raise TypeError(msg)
_df = None
for df in dfs:
df = df if isinstance(df, IamDataFrame) else IamDataFrame(df)
if _df is None:
_df = copy.deepcopy(df)
else:
_df.append(df, inplace=True)
return _df |
java | private List<EnhanceEntity> populateEnhanceEntities(EntityMetadata m, List<String> relationNames, List result)
{
List<EnhanceEntity> ls = null;
if (!result.isEmpty())
{
ls = new ArrayList<EnhanceEntity>(result.size());
for (Object o : result)
{
EnhanceEntity entity = null;
if (!o.getClass().isAssignableFrom(EnhanceEntity.class))
{
entity = new EnhanceEntity(o, PropertyAccessorHelper.getId(o, m), null);
}
else
{
entity = (EnhanceEntity) o;
}
ls.add(entity);
}
}
return ls;
} |
java | public <T> Set<T> toSet(Class<T> classOfT) {
return toSet(classOfT, null);
} |
java | public static Number or(Number left, Number right) {
return NumberMath.or(left, right);
} |
python | def fetch_sampling_rules(self):
"""
Use X-Ray botocore client to get the centralized sampling rules
from X-Ray service. The call is proxied and signed by X-Ray Daemon.
"""
new_rules = []
resp = self._xray_client.get_sampling_rules()
records = resp['SamplingRuleRecords']
for record in records:
rule_def = record['SamplingRule']
if self._is_rule_valid(rule_def):
rule = SamplingRule(name=rule_def['RuleName'],
priority=rule_def['Priority'],
rate=rule_def['FixedRate'],
reservoir_size=rule_def['ReservoirSize'],
host=rule_def['Host'],
service=rule_def['ServiceName'],
method=rule_def['HTTPMethod'],
path=rule_def['URLPath'],
service_type=rule_def['ServiceType'])
new_rules.append(rule)
return new_rules |
python | def get_queue_settings(self, project_key):
"""
Get queue settings on project
:param project_key: str
:return:
"""
url = 'rest/servicedeskapi/queues/{}'.format(project_key)
return self.get(url, headers=self.experimental_headers) |
java | private void processOrphanCommits(GitHubRepo repo) {
long refTime = Math.min(System.currentTimeMillis() - gitHubSettings.getCommitPullSyncTime(), gitHubClient.getRepoOffsetTime(repo));
List<Commit> orphanCommits = commitRepository.findCommitsByCollectorItemIdAndTimestampAfterAndPullNumberIsNull(repo.getId(), refTime);
List<GitRequest> pulls = gitRequestRepository.findByCollectorItemIdAndMergedAtIsBetween(repo.getId(), refTime, System.currentTimeMillis());
orphanCommits = CommitPullMatcher.matchCommitToPulls(orphanCommits, pulls);
List<Commit> orphanSaveList = orphanCommits.stream().filter(c -> !StringUtils.isEmpty(c.getPullNumber())).collect(Collectors.toList());
orphanSaveList.forEach( c -> LOG.info( "Updating orphan " + c.getScmRevisionNumber() + " " +
new DateTime(c.getScmCommitTimestamp()).toString("yyyy-MM-dd hh:mm:ss.SSa") + " with pull " + c.getPullNumber()));
commitRepository.save(orphanSaveList);
} |
java | public Image getFlippedCopy(boolean flipHorizontal, boolean flipVertical) {
init();
Image image = copy();
if (flipHorizontal) {
image.textureOffsetX = textureOffsetX + textureWidth;
image.textureWidth = -textureWidth;
}
if (flipVertical) {
image.textureOffsetY = textureOffsetY + textureHeight;
image.textureHeight = -textureHeight;
}
return image;
} |
java | @SuppressWarnings("unchecked")
<T> void processBean(@Observes ProcessBean<T> processBean) {
Bean<T> bean = processBean.getBean();
for (Type type : bean.getTypes()) {
if (!(type instanceof Class<?>)) {
continue;
}
// Check if the bean is an RedisURI.
if (RedisURI.class.isAssignableFrom((Class<?>) type)) {
Set<Annotation> qualifiers = LettuceSets.newHashSet(bean.getQualifiers());
if (bean.isAlternative() || !redisUris.containsKey(qualifiers)) {
LOGGER.debug(String.format("Discovered '%s' with qualifiers %s.", RedisURI.class.getName(), qualifiers));
redisUris.put(qualifiers, (Bean<RedisURI>) bean);
}
}
if (ClientResources.class.isAssignableFrom((Class<?>) type)) {
Set<Annotation> qualifiers = LettuceSets.newHashSet(bean.getQualifiers());
if (bean.isAlternative() || !clientResources.containsKey(qualifiers)) {
LOGGER.debug(String.format("Discovered '%s' with qualifiers %s.", ClientResources.class.getName(),
qualifiers));
clientResources.put(qualifiers, (Bean<ClientResources>) bean);
}
}
}
} |
python | def wrapped_help_text(wrapped_func):
"""Decorator to pass through the documentation from a wrapped function.
"""
def decorator(wrapper_func):
"""The decorator.
Parameters
----------
f : callable
The wrapped function.
"""
wrapper_func.__doc__ = ('This method wraps the following method:\n\n' +
pydoc.text.document(wrapped_func))
return wrapper_func
return decorator |
python | def process_data(self, data):
"""Convert an unknown data input into a geojson dictionary."""
if isinstance(data, dict):
self.embed = True
return data
elif isinstance(data, str):
if data.lower().startswith(('http:', 'ftp:', 'https:')):
if not self.embed:
self.embed_link = data
return requests.get(data).json()
elif data.lstrip()[0] in '[{': # This is a GeoJSON inline string
self.embed = True
return json.loads(data)
else: # This is a filename
if not self.embed:
self.embed_link = data
with open(data) as f:
return json.loads(f.read())
elif hasattr(data, '__geo_interface__'):
self.embed = True
if hasattr(data, 'to_crs'):
data = data.to_crs(epsg='4326')
return json.loads(json.dumps(data.__geo_interface__))
else:
raise ValueError('Cannot render objects with any missing geometries'
': {!r}'.format(data)) |
java | public static BitfinexOrderBookSymbol fromJSON(final JSONObject jsonObject) {
BitfinexCurrencyPair symbol = BitfinexCurrencyPair.fromSymbolString(jsonObject.getString("symbol"));
Precision prec = Precision.valueOf(jsonObject.getString("prec"));
Frequency freq = null;
Integer len = null;
if (prec != Precision.R0) {
freq = Frequency.valueOf(jsonObject.getString("freq"));
len = jsonObject.getInt("len");
}
return new BitfinexOrderBookSymbol(symbol, prec, freq, len);
} |
java | public static final SerIterable map(final Class<?> keyType, final Class<?> valueType, final List<Class<?>> valueTypeTypes) {
final Map<Object, Object> map = new HashMap<>();
return map(keyType, valueType, valueTypeTypes, map);
} |
java | void putAll(BitArray array) {
assert data.length == array.data.length : "BitArrays must be of equal length when merging";
long bitCount = 0;
for (int i = 0; i < data.length; i++) {
data[i] |= array.data[i];
bitCount += Long.bitCount(data[i]);
}
this.bitCount = bitCount;
} |
java | private Version getProductVersion(File wlpInstallationDirectory) throws VersionParsingException {
// First get the properties
Map<String, ProductInfo> productProperties = VersionUtils.getAllProductInfo(wlpInstallationDirectory);
// Get the properties for WAS
ProductInfo wasProperties = productProperties.get("com.ibm.websphere.appserver");
if (wasProperties == null) {
throw new VersionParsingException(getMessage("compare.no.was.properties.found"));
}
Version version = convertVersion(wasProperties);
return version;
} |
python | def report_usage_to_host(host_ip, vmid):
#base value
cpu_usage = 0.0
os_mem_usage = 0.0
task_mem_usage = 0.0
io_usage = 0.0
cpu_usage = get_cpu_usage()
os_mem_usage = get_os_mem_usage()
task_mem_usage = get_task_mem_usage()
io_usage = get_io_usage()
usage = str(vmid.strip())+' | '+str(cpu_usage)+' | '+str(os_mem_usage)+' | '+str(task_mem_usage)+' | '+str(io_usage)
#usage = "'cpu |sdbfsj |sdfsdhf |sdfvsdvfgdfvj'"
#cmd = 'python /var/lib/virtdc/vmonere/host/vmonere_listener.py '+usage
'''cmd = '/bin/ssh -n -q -o StrictHostKeyChecking=no root@host_ip \"/bin/nohup /bin/python /var/lib/virtdc/vmonere/host/vmonere_listener.py '+usage+' &\"'
cmd = cmd.replace("host_ip",str(host_ip).strip())'''
#report usage via socket
start_client_socket(host_ip, usage) |
java | private Force computeVector(Force vector, Localizable target)
{
final double sx = localizable.getX();
final double sy = localizable.getY();
double dx = target.getX();
double dy = target.getY();
if (target instanceof Transformable)
{
final Transformable transformable = (Transformable) target;
final double ray = UtilMath.getDistance(localizable.getX(),
localizable.getY(),
target.getX(),
target.getY());
dx += (int) ((target.getX() - transformable.getOldX()) / vector.getDirectionHorizontal() * ray);
dy += (int) ((target.getY() - transformable.getOldY()) / vector.getDirectionVertical() * ray);
}
final double dist = Math.max(Math.abs(sx - dx), Math.abs(sy - dy));
final double vecX = (dx - sx) / dist * vector.getDirectionHorizontal();
final double vecY = (dy - sy) / dist * vector.getDirectionVertical();
final Force force = new Force(vector);
force.setDestination(vecX, vecY);
return force;
} |
python | def _clean_background(data):
"""Clean up background specification, remaining back compatible.
"""
allowed_keys = set(["variant", "cnv_reference"])
val = tz.get_in(["algorithm", "background"], data)
errors = []
if val:
out = {}
# old style specification, single string for variant
if isinstance(val, six.string_types):
out["variant"] = _file_to_abs(val, [os.getcwd()])
elif isinstance(val, dict):
for k, v in val.items():
if k in allowed_keys:
if isinstance(v, six.string_types):
out[k] = _file_to_abs(v, [os.getcwd()])
else:
assert isinstance(v, dict)
for ik, iv in v.items():
v[ik] = _file_to_abs(iv, [os.getcwd()])
out[k] = v
else:
errors.append("Unexpected key: %s" % k)
else:
errors.append("Unexpected input: %s" % val)
if errors:
raise ValueError("Problematic algorithm background specification for %s:\n %s" %
(data["description"], "\n".join(errors)))
out["cnv_reference"] = structural.standardize_cnv_reference({"config": data,
"description": data["description"]})
data["algorithm"]["background"] = out
return data |
java | public static CommerceCountry fetchByG_S_A_Last(long groupId,
boolean shippingAllowed, boolean active,
OrderByComparator<CommerceCountry> orderByComparator) {
return getPersistence()
.fetchByG_S_A_Last(groupId, shippingAllowed, active,
orderByComparator);
} |
java | public static String formatIPAddressForURI(InetAddress inet){
if(inet == null){
throw new IllegalArgumentException();
}
if(inet instanceof Inet4Address){
return inet.getHostAddress();
} else if (inet instanceof Inet6Address){
return '[' + formatAddress(inet) + ']';
} else {
return inet.getHostAddress();
}
} |
python | def list_commands(self, ctx):
"""Override for showing commands in particular order"""
commands = super(LegitGroup, self).list_commands(ctx)
return [cmd for cmd in order_manually(commands)] |
java | CpcSketch copy() {
final CpcSketch copy = new CpcSketch(lgK, seed);
copy.numCoupons = numCoupons;
copy.mergeFlag = mergeFlag;
copy.fiCol = fiCol;
copy.windowOffset = windowOffset;
copy.slidingWindow = (slidingWindow == null) ? null : slidingWindow.clone();
copy.pairTable = (pairTable == null) ? null : pairTable.copy();
copy.kxp = kxp;
copy.hipEstAccum = hipEstAccum;
return copy;
} |
java | public TransactionInfo queryTransactionByID(String txID, User userContext) throws ProposalException, InvalidArgumentException {
return queryTransactionByID(getShuffledPeers(EnumSet.of(PeerRole.LEDGER_QUERY)), txID, userContext);
} |
python | def create_css(self, fileid=None):
"""
Generate the final CSS string
"""
if fileid:
rules = self._rules.get(fileid) or []
else:
rules = self.rules
compress = self._scss_opts.get('compress', True)
if compress:
sc, sp, tb, nl = False, '', '', ''
else:
sc, sp, tb, nl = True, ' ', ' ', '\n'
scope = set()
return self._create_css(rules, scope, sc, sp, tb, nl, not compress and self._scss_opts.get('debug_info', False)) |
java | public static <I, D> int findLinearReverse(Query<I, D> ceQuery,
AccessSequenceTransformer<I> asTransformer,
SuffixOutput<I, D> hypOutput,
MembershipOracle<I, D> oracle) {
return AcexLocalSuffixFinder.findSuffixIndex(AcexAnalyzers.LINEAR_BWD,
true,
ceQuery,
asTransformer,
hypOutput,
oracle);
} |
java | public static <INPUT extends Comparable<INPUT>> Iterable<List<INPUT>> toUniqueAndSortedPartitions(Collection<INPUT> inputs) {
return toUniqueAndSortedPartitions(inputs, i -> i);
} |
java | public static CommerceRegion removeByC_C(long commerceCountryId, String code)
throws com.liferay.commerce.exception.NoSuchRegionException {
return getPersistence().removeByC_C(commerceCountryId, code);
} |
java | public static Map<String, Object> getDefaultValueMap(Schema avroRecordSchema) {
List<Field> defaultFields = new ArrayList<Field>();
for (Field f : avroRecordSchema.getFields()) {
if (f.defaultValue() != null) {
// Need to create a new Field here or we will get
// org.apache.avro.AvroRuntimeException: Field already used:
// schemaVersion
defaultFields.add(new Field(f.name(), f.schema(), f.doc(), f
.defaultValue(), f.order()));
}
}
Schema defaultSchema = Schema.createRecord(defaultFields);
Schema emptyRecordSchema = Schema.createRecord(new ArrayList<Field>());
DatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(
emptyRecordSchema);
DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>(
emptyRecordSchema, defaultSchema);
GenericRecord emptyRecord = new GenericData.Record(emptyRecordSchema);
GenericRecord defaultRecord = AvroUtils.readAvroEntity(
AvroUtils.writeAvroEntity(emptyRecord, writer), reader);
Map<String, Object> defaultValueMap = new HashMap<String, Object>();
for (Field f : defaultFields) {
defaultValueMap.put(f.name(), defaultRecord.get(f.name()));
}
return defaultValueMap;
} |
python | def upload_superfile(self, remote_path, block_list, ondup=None, **kwargs):
"""分片上传—合并分片文件.
与分片文件上传的 ``upload_tmpfile`` 方法配合使用,
可实现超大文件(>2G)上传,同时也可用于断点续传的场景。
:param remote_path: 网盘中文件的保存路径(包含文件名)。
必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:param block_list: 子文件内容的 MD5 值列表;子文件至少两个,最多1024个。
:type block_list: list
:param ondup: (可选)
* 'overwrite':表示覆盖同名文件;
* 'newcopy':表示生成文件副本并进行重命名,命名规则为“
文件名_日期.后缀”。
:return: Response 对象
"""
params = {
'path': remote_path,
'ondup': ondup
}
data = {
'param': json.dumps({'block_list': block_list}),
}
return self._request('file', 'createsuperfile', extra_params=params,
data=data, **kwargs) |
java | public Tree<T> build() {
Tree<T> result = base;
if (null == base && treeStack.size() == 1) {
result = treeStack.get(0);
} else if (treeStack.size() > 0) {
result = new TreeStack<T>(treeStack, base);
}else if(null==base) {
throw new IllegalArgumentException("base tree was not set");
}
return result;
} |
java | public static String stringToString(String dateString, String desfmt) {
// ISO_DATE_FORMAT = "yyyyMMdd";
if (dateString.trim().length() == 8) {
return stringToString(dateString, ISO_DATE_FORMAT, desfmt);
} else if (dateString.trim().length() == 10) {
// ISO_EXPANDED_DATE_FORMAT = "yyyy-MM-dd";
return stringToString(dateString, ISO_EXPANDED_DATE_FORMAT, desfmt);
} else if (dateString.trim().length() == 19) {
// DATETIME_PATTERN = "yyyy-MM-dd hh:mm:ss";
return stringToString(dateString.substring(0, 10),
ISO_EXPANDED_DATE_FORMAT, desfmt);
} else if (dateString.trim().length() == 11) {
// CHINESE_EXPANDED_DATE_FORMAT = "yyyy年MM月dd日";
return stringToString(dateString, CHINESE_EXPANDED_DATE_FORMAT,
desfmt);
}
return null;
} |
java | protected PluginStrategy createPluginStrategy() {
String strategyName = SystemProperties.getString(PluginStrategy.class.getName());
if (strategyName != null) {
try {
Class<?> klazz = getClass().getClassLoader().loadClass(strategyName);
Object strategy = klazz.getConstructor(PluginManager.class)
.newInstance(this);
if (strategy instanceof PluginStrategy) {
LOGGER.info("Plugin strategy: " + strategyName);
return (PluginStrategy) strategy;
} else {
LOGGER.warning("Plugin strategy (" + strategyName +
") is not an instance of hudson.PluginStrategy");
}
} catch (ClassNotFoundException e) {
LOGGER.warning("Plugin strategy class not found: "
+ strategyName);
} catch (Exception e) {
LOGGER.log(WARNING, "Could not instantiate plugin strategy: "
+ strategyName + ". Falling back to ClassicPluginStrategy", e);
}
LOGGER.info("Falling back to ClassicPluginStrategy");
}
// default and fallback
return new ClassicPluginStrategy(this);
} |
java | public void setChildren(List<PrintComponent<?>> children) {
this.children = children;
// needed for Json unmarshall !!!!
for (PrintComponent<?> child : children) {
child.setParent(this);
}
} |
java | @Override
public List<ConfigPropType> getConfigProps(Definition def)
{
return def.getMcfDefs().get(getNumOfMcf()).getMcfConfigProps();
} |
java | public void setVastRedirectType(com.google.api.ads.admanager.axis.v201811.VastRedirectType vastRedirectType) {
this.vastRedirectType = vastRedirectType;
} |
python | def update_slaves(self):
"""Update all `slave` |Substituter| objects.
See method |Substituter.update_masters| for further information.
"""
for slave in self.slaves:
slave._medium2long.update(self._medium2long)
slave.update_slaves() |
java | @MustBeLocked (ELockType.WRITE)
protected final void internalUpdateItem (@Nonnull final IMPLTYPE aItem)
{
internalUpdateItem (aItem, true);
} |
python | def _insert_contents(self, fzpage, newcont, overlay):
"""_insert_contents(self, fzpage, newcont, overlay) -> PyObject *"""
return _fitz.Tools__insert_contents(self, fzpage, newcont, overlay) |
python | def getOutput(self, command, env={}, path=None,
uid=None, gid=None, usePTY=0, childFDs=None):
"""Execute a command and get the output of the finished process.
"""
deferred = defer.Deferred()
processProtocol = _SummaryProcessProtocol(deferred)
self.execute(processProtocol, command, env,
path, uid, gid, usePTY, childFDs)
@deferred.addCallback
def getStdOut(tuple_):
stdout, _stderr, _returnCode = tuple_
return stdout
return deferred |
python | def load_folder_content(folder_path):
""" load api/testcases/testsuites definitions from folder.
Args:
folder_path (str): api/testcases/testsuites files folder.
Returns:
dict: api definition mapping.
{
"tests/api/basic.yml": [
{"api": {"def": "api_login", "request": {}, "validate": []}},
{"api": {"def": "api_logout", "request": {}, "validate": []}}
]
}
"""
items_mapping = {}
for file_path in load_folder_files(folder_path):
items_mapping[file_path] = load_file(file_path)
return items_mapping |
java | private Object getObjectValue(Node node, String fieldName) {
// we have to take into account the fact that fieldName will be in the lower case
if (node != null) {
String name = node.getLocalName();
switch (node.getNodeType()) {
case Node.ATTRIBUTE_NODE:
return name.equalsIgnoreCase(fieldName) ? node : null;
case Node.ELEMENT_NODE: {
if (name.equalsIgnoreCase(fieldName)) {
return new NodeArray(node.getChildNodes());
} else {
NamedNodeMap namedNodeMap = node.getAttributes();
for (int attributeIndex = 0; attributeIndex < namedNodeMap.getLength(); ++attributeIndex) {
Node attribute = namedNodeMap.item(attributeIndex);
if (attribute.getLocalName().equalsIgnoreCase(fieldName)) {
return attribute;
}
}
return null;
}
}
default:
return null;
}
}
return null;
} |
java | protected void destroy() {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "CompletionKey::destroy entered for:" + this);
}
// Free existing ByteBuffer objects.
if (this.rawData != null) {
if (this.wsByteBuf != null) {
this.wsByteBuf.release();
this.wsByteBuf = null;
}
this.rawData = null;
this.stagingByteBuffer = null;
}
} |
python | def decode(self, codes):
"""Given PQ-codes, reconstruct original D-dimensional vectors via :func:`PQ.decode`,
and applying an inverse-rotation.
Args:
codes (np.ndarray): PQ-cdoes with shape=(N, M) and dtype=self.code_dtype.
Each row is a PQ-code
Returns:
np.ndarray: Reconstructed vectors with shape=(N, D) and dtype=np.float32
"""
# Because R is a rotation matrix (R^t * R = I), R^-1 should be R^t
return self.pq.decode(codes) @ self.R.T |
java | public ListVirtualNodesResult withVirtualNodes(VirtualNodeRef... virtualNodes) {
if (this.virtualNodes == null) {
setVirtualNodes(new java.util.ArrayList<VirtualNodeRef>(virtualNodes.length));
}
for (VirtualNodeRef ele : virtualNodes) {
this.virtualNodes.add(ele);
}
return this;
} |
java | public List<GridCell<P>> consumeCells() {
final List<GridCell<P>> list = new ArrayList<>(this.cells);
this.cells.clear();
return list;
} |
java | protected void paint(SeaGlassContext context, Graphics g) {
JSeparator separator = (JSeparator) context.getComponent();
context.getPainter().paintSeparatorForeground(context, g, 0, 0, separator.getWidth(), separator.getHeight(),
separator.getOrientation());
} |
python | def init_scalable(
X, n_clusters, random_state=None, max_iter=None, oversampling_factor=2
):
"""K-Means initialization using k-means||
This is algorithm 2 in Scalable K-Means++ (2012).
"""
logger.info("Initializing with k-means||")
# Step 1: Initialize Centers
idx = 0
centers = da.compute(X[idx, np.newaxis])[0]
c_idx = {idx}
# Step 2: Initialize cost
cost, = compute(evaluate_cost(X, centers))
if cost == 0:
n_iter = 0
else:
n_iter = int(np.round(np.log(cost)))
if max_iter is not None:
n_iter = min(max_iter, n_iter)
# Steps 3 - 6: update candidate Centers
for i in range(n_iter):
with _timer(
"init iteration %2d/%2d , %2d centers" % (i + 1, n_iter, len(c_idx)),
_logger=logger,
):
new_idxs = _sample_points(X, centers, oversampling_factor, random_state)
new_idxs = set(*compute(new_idxs))
c_idx |= new_idxs
# Sort before slicing, for better performance / memory
# usage with the scheduler.
# See https://github.com/dask/dask-ml/issues/39
centers = X[sorted(c_idx)].compute()
# XXX: scikit-learn doesn't have weighted k-means.
# The paper weights each center by the number of points closest to it.
# https://stackoverflow.com/a/37198799/1889400 claims you can scale the
# features before clustering, but that doesn't seem right.
# I think that replicating the *points*, proportional to the number of
# original points closest to the candidate centers, would be a better way
# to do that.
if len(centers) < n_clusters:
logger.warning("Found fewer than %d clusters in init.", n_clusters)
# supplement with random
need = n_clusters - len(centers)
locs = sorted(
random_state.choice(
np.arange(0, len(X)), size=need, replace=False, chunks=len(X)
)
)
extra = X[locs].compute()
return np.vstack([centers, extra])
else:
# Step 7, 8 without weights
# dask RandomState objects aren't valid for scikit-learn
rng2 = (
random_state.randint(0, np.iinfo("i4").max - 1, chunks=())
.compute(scheduler="single-threaded")
.item()
)
km = sk_k_means.KMeans(n_clusters, random_state=rng2)
km.fit(centers)
return km.cluster_centers_ |
python | def get_events_in_both_arrays(events_one, events_two):
"""
Calculates the events that exist in both arrays.
"""
events_one = np.ascontiguousarray(events_one) # change memory alignement for c++ library
events_two = np.ascontiguousarray(events_two) # change memory alignement for c++ library
event_result = np.empty_like(events_one)
count = analysis_functions.get_events_in_both_arrays(events_one, events_two, event_result)
return event_result[:count] |
python | def reply_bytes(self, request):
"""Take a `Request` and return an OP_MSG message as bytes."""
flags = struct.pack("<I", self._flags)
payload_type = struct.pack("<b", 0)
payload_data = bson.BSON.encode(self.doc)
data = b''.join([flags, payload_type, payload_data])
reply_id = random.randint(0, 1000000)
response_to = request.request_id
header = struct.pack(
"<iiii", 16 + len(data), reply_id, response_to, OP_MSG)
return header + data |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.