language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public String hackInv(ARecordDeclIR type) {
if (type.getInvariant() != null) {
AFuncDeclIR invFunc = (AFuncDeclIR) type.getInvariant();
StringBuilder sb = new StringBuilder();
sb.append("inv ");
sb.append(invFunc.getFormalParams().get(0).getPattern().toString());
sb.append(" == ");
sb.append(invFunc.getName());
sb.append("(");
sb.append("&");
sb.append(invFunc.getFormalParams().get(0).getPattern().toString());
sb.append(")");
return sb.toString();
}
return "";
} |
java | private void overChoiceEntry(CmsChoiceMenuEntryWidget entryWidget) {
cancelChoiceTimer();
cleanUpSubmenus(entryWidget);
CmsChoiceMenuEntryBean entryBean = entryWidget.getEntryBean();
if (!entryBean.isLeaf()) {
addSubmenu(entryWidget);
}
} |
python | def _get_assignment_target_end(self, ast_module):
"""Returns position of 1st char after assignment traget.
If there is no assignment, -1 is returned
If there are more than one of any ( expressions or assigments)
then a ValueError is raised.
"""
if len(ast_module.body) > 1:
raise ValueError("More than one expression or assignment.")
elif len(ast_module.body) > 0 and \
type(ast_module.body[0]) is ast.Assign:
if len(ast_module.body[0].targets) != 1:
raise ValueError("More than one assignment target.")
else:
return len(ast_module.body[0].targets[0].id)
return -1 |
java | public void marshall(DomainDescriptionType domainDescriptionType, ProtocolMarshaller protocolMarshaller) {
if (domainDescriptionType == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(domainDescriptionType.getUserPoolId(), USERPOOLID_BINDING);
protocolMarshaller.marshall(domainDescriptionType.getAWSAccountId(), AWSACCOUNTID_BINDING);
protocolMarshaller.marshall(domainDescriptionType.getDomain(), DOMAIN_BINDING);
protocolMarshaller.marshall(domainDescriptionType.getS3Bucket(), S3BUCKET_BINDING);
protocolMarshaller.marshall(domainDescriptionType.getCloudFrontDistribution(), CLOUDFRONTDISTRIBUTION_BINDING);
protocolMarshaller.marshall(domainDescriptionType.getVersion(), VERSION_BINDING);
protocolMarshaller.marshall(domainDescriptionType.getStatus(), STATUS_BINDING);
protocolMarshaller.marshall(domainDescriptionType.getCustomDomainConfig(), CUSTOMDOMAINCONFIG_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
python | def _cumprod(l):
"""Cumulative product of a list.
Args:
l: a list of integers
Returns:
a list with one more element (starting with 1)
"""
ret = [1]
for item in l:
ret.append(ret[-1] * item)
return ret |
python | def fill_archive(self, stream=None, kind='tgz', prefix=None,
subrepos=False):
"""
Fills up given stream.
:param stream: file like object.
:param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
Default: ``tgz``.
:param prefix: name of root directory in archive.
Default is repository name and changeset's raw_id joined with dash
(``repo-tip.<KIND>``).
:param subrepos: include subrepos in this archive.
:raise ImproperArchiveTypeError: If given kind is wrong.
:raise VcsError: If given stream is None
"""
allowed_kinds = settings.ARCHIVE_SPECS.keys()
if kind not in allowed_kinds:
raise ImproperArchiveTypeError('Archive kind not supported use one'
'of %s', allowed_kinds)
if prefix is None:
prefix = '%s-%s' % (self.repository.name, self.short_id)
elif prefix.startswith('/'):
raise VCSError("Prefix cannot start with leading slash")
elif prefix.strip() == '':
raise VCSError("Prefix cannot be empty")
if kind == 'zip':
frmt = 'zip'
else:
frmt = 'tar'
_git_path = settings.GIT_EXECUTABLE_PATH
cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path,
frmt, prefix, self.raw_id)
if kind == 'tgz':
cmd += ' | gzip -9'
elif kind == 'tbz2':
cmd += ' | bzip2 -9'
if stream is None:
raise VCSError('You need to pass in a valid stream for filling'
' with archival data')
popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True,
cwd=self.repository.path)
buffer_size = 1024 * 8
chunk = popen.stdout.read(buffer_size)
while chunk:
stream.write(chunk)
chunk = popen.stdout.read(buffer_size)
# Make sure all descriptors would be read
popen.communicate() |
java | private void initLocaleSelect() {
if (m_availableLocales.size() < 2) {
return;
}
Map<String, String> selectOptions = new HashMap<String, String>();
for (Entry<String, String> localeEntry : m_availableLocales.entrySet()) {
if (m_contentLocales.contains(localeEntry.getKey())) {
selectOptions.put(localeEntry.getKey(), localeEntry.getValue());
} else {
selectOptions.put(localeEntry.getKey(), localeEntry.getValue() + " [-]");
}
}
if (m_localeSelect == null) {
m_localeSelect = new CmsSelectBox(selectOptions);
m_toolbar.insertRight(m_localeSelect, 1);
m_localeSelect.addStyleName(I_CmsLayoutBundle.INSTANCE.generalCss().inlineBlock());
m_localeSelect.getElement().getStyle().setWidth(100, Unit.PX);
m_localeSelect.getElement().getStyle().setVerticalAlign(VerticalAlign.MIDDLE);
m_localeSelect.addValueChangeHandler(new ValueChangeHandler<String>() {
public void onValueChange(ValueChangeEvent<String> event) {
switchLocale(event.getValue());
}
});
} else {
m_localeSelect.setItems(selectOptions);
}
m_localeSelect.setFormValueAsString(m_locale);
if (m_deleteLocaleButton == null) {
m_deleteLocaleButton = createButton(
Messages.get().key(Messages.GUI_TOOLBAR_DELETE_LOCALE_0),
"opencms-icon-remove-locale");
m_deleteLocaleButton.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
confirmDeleteLocale();
}
});
m_toolbar.insertRight(m_deleteLocaleButton, 2);
}
if (m_contentLocales.size() > 1) {
m_deleteLocaleButton.enable();
} else {
m_deleteLocaleButton.disable(Messages.get().key(Messages.GUI_TOOLBAR_CANT_DELETE_LAST_LOCALE_0));
}
if (m_copyLocaleButton == null) {
m_copyLocaleButton = createButton(
I_CmsButton.ButtonData.COPY_LOCALE_BUTTON.getTitle(),
I_CmsButton.ButtonData.COPY_LOCALE_BUTTON.getIconClass());
m_copyLocaleButton.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
openCopyLocaleDialog();
}
});
m_toolbar.insertRight(m_copyLocaleButton, 3);
}
} |
python | def decrypt(data, _key):
"""
ACCEPT BYTES -> UTF8 -> JSON -> {"salt":s, "length":l, "data":d}
"""
# Key and iv have not been generated or provided, bail out
if _key is None:
Log.error("Expecting a key")
_input = get_module("mo_json").json2value(data.decode('utf8'), leaves=False, flexible=False)
# Initialize encryption using key and iv
key_expander_256 = key_expander.KeyExpander(256)
expanded_key = key_expander_256.expand(_key)
aes_cipher_256 = aes_cipher.AESCipher(expanded_key)
aes_cbc_256 = cbc_mode.CBCMode(aes_cipher_256, 16)
aes_cbc_256.set_iv(base642bytearray(_input.salt))
raw = base642bytearray(_input.data)
out_data = bytearray()
for _, e in _groupby16(raw):
out_data.extend(aes_cbc_256.decrypt_block(e))
if _input.encoding:
return binary_type(out_data[:_input.length:]).decode(_input.encoding)
else:
return binary_type(out_data[:_input.length:]) |
java | final public Expression ValuePrefix() throws ParseException {
Expression ret;
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case INTEGER_LITERAL:
case FLOATING_POINT_LITERAL:
case STRING_LITERAL:
case TRUE:
case FALSE:
case NULL:
ret = Literal();
break;
case LPAREN:
jj_consume_token(LPAREN);
ret = Expression();
jj_consume_token(RPAREN);
break;
default:
jj_la1[27] = jj_gen;
if (jj_2_1(2147483647)) {
ret = FunctionInvocation();
} else {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case IDENTIFIER:
ret = NamedValue();
break;
default:
jj_la1[28] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
}
}
{if (true) return ret;}
throw new Error("Missing return statement in function");
} |
java | public void setOptionGroupOptionSettings(java.util.Collection<OptionGroupOptionSetting> optionGroupOptionSettings) {
if (optionGroupOptionSettings == null) {
this.optionGroupOptionSettings = null;
return;
}
this.optionGroupOptionSettings = new com.amazonaws.internal.SdkInternalList<OptionGroupOptionSetting>(optionGroupOptionSettings);
} |
java | void setEndPoints() {
float angleNum = 360f / (rayNum - 1);
for (int i = 0; i < rayNum; i++) {
final float angle = angleNum * i;
sin[i] = MathUtils.sinDeg(angle);
cos[i] = MathUtils.cosDeg(angle);
endX[i] = distance * cos[i];
endY[i] = distance * sin[i];
}
} |
python | def filtre(liste_base, criteres) -> groups.Collection:
"""
Return a filter list, bases on criteres
:param liste_base: Acces list
:param criteres: Criteria { `attribut`:[valeurs,...] }
"""
def choisi(ac):
for cat, li in criteres.items():
v = ac[cat]
if not (v in li):
return False
return True
return groups.Collection(a for a in liste_base if choisi(a)) |
python | def destroy_plugin(plugin_name, conn=None):
"""
Creates a new plugin
:param plugin_name: <str>
:param conn:
:return: <dict> rethinkdb response
"""
results = {}
if plugin_exists(plugin_name, conn=conn):
results = RPX.table_drop(plugin_name).run(conn)
return results |
python | def matplotlibensure(func):
"""If matplotlib isn't installed, this decorator alerts the user and
suggests how one might obtain the package."""
@wraps(func)
def wrap(*args):
if MPLINSTALLED == False:
raise ImportError(msg)
return func(*args)
return wrap |
python | def edges2nodes(edges):
"""gather the nodes from the edges"""
nodes = []
for e1, e2 in edges:
nodes.append(e1)
nodes.append(e2)
nodedict = dict([(n, None) for n in nodes])
justnodes = list(nodedict.keys())
# justnodes.sort()
justnodes = sorted(justnodes, key=lambda x: str(x[0]))
return justnodes |
python | def create_commerce():
"""
Creates commerce from environment variables ``TBK_COMMERCE_ID``, ``TBK_COMMERCE_KEY``
or for testing purposes ``TBK_COMMERCE_TESTING``.
"""
commerce_id = os.getenv('TBK_COMMERCE_ID')
commerce_key = os.getenv('TBK_COMMERCE_KEY')
commerce_testing = os.getenv('TBK_COMMERCE_TESTING') == 'True'
if not commerce_testing:
if commerce_id is None:
raise ValueError("create_commerce needs TBK_COMMERCE_ID environment variable")
if commerce_key is None:
raise ValueError("create_commerce needs TBK_COMMERCE_KEY environment variable")
return Commerce(
id=commerce_id or Commerce.TEST_COMMERCE_ID,
key=commerce_key,
testing=commerce_testing
) |
python | def end_policy_update(self):
"""
Inform Metrics class that policy update has started.
"""
if self.time_policy_update_start:
self.delta_policy_update = time() - self.time_policy_update_start
else:
self.delta_policy_update = 0
delta_train_start = time() - self.time_training_start
LOGGER.debug(" Policy Update Training Metrics for {}: "
"\n\t\tTime to update Policy: {:0.3f} s \n"
"\t\tTime elapsed since training: {:0.3f} s \n"
"\t\tTime for experience collection: {:0.3f} s \n"
"\t\tBuffer Length: {} \n"
"\t\tReturns : {:0.3f}\n"
.format(self.brain_name, self.delta_policy_update,
delta_train_start, self.delta_last_experience_collection,
self.last_buffer_length, self.last_mean_return))
self._add_row(delta_train_start) |
java | public List<Row> getAllRawExcelRows(String sheetName, boolean heading) {
return excelReader.getAllExcelRows(sheetName, heading);
} |
python | def list_milestones(self, project_id, find=None):
"""
This lets you query the list of milestones for a project. You can
either return all milestones, or only those that are late, completed,
or upcoming.
"""
path = '/projects/%u/milestones/list' % project_id
req = ET.Element('request')
if find is not None:
ET.SubElement(req, 'find').text = str(find)
return self._request(path, req) |
python | def get_bookmarks(self, time=None, chan=None):
"""
Raises
------
IndexError
When there is no selected rater
"""
# get bookmarks inside window
try:
bookmarks = self.rater.find('bookmarks')
except AttributeError:
raise IndexError('You need to have at least one rater')
mrks = []
for m in bookmarks:
bookmark_start = float(m.find('bookmark_start').text)
bookmark_end = float(m.find('bookmark_end').text)
bookmark_chan = m.find('bookmark_chan').text
if bookmark_chan is None: # xml doesn't store empty string
bookmark_chan = ''
if time is None:
time_cond = True
else:
time_cond = (time[0] <= bookmark_end and
time[1] >= bookmark_start)
if chan is None:
chan_cond = True
else:
chan_cond = bookmark_chan == chan
if time_cond and chan_cond:
one_mrk = {'name': m.find('bookmark_name').text,
'start': bookmark_start,
'end': bookmark_end,
'chan': bookmark_chan.split(', '), # always a list
}
mrks.append(one_mrk)
return mrks |
java | public static void main(String[] argv) throws Exception {
System.out.println("Enter an English word in plural form and press ENTER");
BufferedReader in=new BufferedReader(new InputStreamReader(System.in));
while(true) {
String w=in.readLine();
if(w.length()==0) break;
if(isPlural(w)) System.out.println("This word is plural");
if(isSingular(w)) System.out.println("This word is singular");
System.out.println("Stemmed to singular: "+stem(w));
}
} |
java | public @Nonnull final <T> T getRequiredValue(Class<T> type) {
return getRequiredValue(AnnotationMetadata.VALUE_MEMBER, type);
} |
java | public List<CmsModelPageConfig> getModelPages(boolean includeDisable) {
CmsADEConfigData parentData = parent();
List<CmsModelPageConfig> parentModelPages;
if ((parentData != null) && !m_data.isDiscardInheritedModelPages()) {
parentModelPages = parentData.getModelPages();
} else {
parentModelPages = Collections.emptyList();
}
List<CmsModelPageConfig> result = combineConfigurationElements(
parentModelPages,
m_data.getOwnModelPageConfig(),
includeDisable);
return result;
} |
java | private void setupUI() {
add(new WMessageBox(WMessageBox.WARN,
"This example is for framework testing ONLY and must not be used as an example of how to set up any UI controls"));
WFieldLayout layout = new WFieldLayout();
add(layout);
layout.addField("Select an option with spaces", drop);
layout.addField(submit);
add(new ExplanatoryText("In the result output space characters are replaced with '%20'."));
add(new WHeading(HeadingLevel.H2, "Result Text"));
add(text);
} |
java | public static base_response delete(nitro_service client, dnsnsrec resource) throws Exception {
dnsnsrec deleteresource = new dnsnsrec();
deleteresource.domain = resource.domain;
deleteresource.nameserver = resource.nameserver;
return deleteresource.delete_resource(client);
} |
python | def _iter_grouped_shortcut(self):
"""Fast version of `_iter_grouped` that yields Variables without
metadata
"""
var = self._obj.variable
for indices in self._group_indices:
yield var[{self._group_dim: indices}] |
java | public void marshall(DescribeActivitiesRequest describeActivitiesRequest, ProtocolMarshaller protocolMarshaller) {
if (describeActivitiesRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeActivitiesRequest.getAuthenticationToken(), AUTHENTICATIONTOKEN_BINDING);
protocolMarshaller.marshall(describeActivitiesRequest.getStartTime(), STARTTIME_BINDING);
protocolMarshaller.marshall(describeActivitiesRequest.getEndTime(), ENDTIME_BINDING);
protocolMarshaller.marshall(describeActivitiesRequest.getOrganizationId(), ORGANIZATIONID_BINDING);
protocolMarshaller.marshall(describeActivitiesRequest.getActivityTypes(), ACTIVITYTYPES_BINDING);
protocolMarshaller.marshall(describeActivitiesRequest.getResourceId(), RESOURCEID_BINDING);
protocolMarshaller.marshall(describeActivitiesRequest.getUserId(), USERID_BINDING);
protocolMarshaller.marshall(describeActivitiesRequest.getIncludeIndirectActivities(), INCLUDEINDIRECTACTIVITIES_BINDING);
protocolMarshaller.marshall(describeActivitiesRequest.getLimit(), LIMIT_BINDING);
protocolMarshaller.marshall(describeActivitiesRequest.getMarker(), MARKER_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
java | @Override
public boolean isUnknownType() {
// If the object is unknown now, check the supertype again,
// because it might have been resolved since the last check.
if (unknown) {
ObjectType implicitProto = getImplicitPrototype();
if (implicitProto == null || implicitProto.isNativeObjectType()) {
unknown = false;
for (ObjectType interfaceType : getCtorExtendedInterfaces()) {
if (interfaceType.isUnknownType()) {
unknown = true;
break;
}
}
} else {
unknown = implicitProto.isUnknownType();
}
}
return unknown;
} |
java | public KafkaClient init() throws Exception {
if (executorService == null) {
int numThreads = Math.min(Math.max(Runtime.getRuntime().availableProcessors(), 1), 4);
executorService = Executors.newFixedThreadPool(numThreads);
myOwnExecutorService = true;
} else {
myOwnExecutorService = false;
}
if (metadataConsumer == null) {
metadataConsumer = KafkaHelper.createKafkaConsumer(getKafkaBootstrapServers(),
getMetadataConsumerGroupId(), false, true);
}
if (cacheProducers == null) {
cacheProducers = new ConcurrentHashMap<>();
for (ProducerType type : ProducerType.ALL_TYPES) {
cacheProducers.put(type, KafkaHelper.createKafkaProducer(type,
kafkaBootstrapServers, producerProperties));
}
}
return this;
} |
java | public static Mapping<Boolean> bool(Constraint... constraints) {
return new FieldMapping(
InputMode.SINGLE,
mkSimpleConverter(s ->
isEmptyStr(s) ? false : Boolean.parseBoolean(s)
), new MappingMeta(MAPPING_BOOLEAN, Boolean.class)
).constraint(checking(Boolean::parseBoolean, "error.boolean", true))
.constraint(constraints);
} |
java | public <T> void notifyObserversOfRequestSuccess(CachedSpiceRequest<T> request) {
RequestProcessingContext requestProcessingContext = new RequestProcessingContext();
requestProcessingContext.setExecutionThread(Thread.currentThread());
post(new RequestSucceededNotifier<T>(request, spiceServiceListenerList, requestProcessingContext));
} |
python | def options(cls):
"""Provide a sorted list of options."""
return sorted((value, name) for (name, value) in cls.__dict__.items() if not name.startswith('__')) |
python | def _get_principal_axes_from_ndk_string(self, ndk_string, exponent):
"""
Gets the principal axes from the ndk string and returns an instance
of the GCMTPrincipalAxes class
"""
axes = GCMTPrincipalAxes()
# The principal axes is defined in characters 3:48 of the 5th line
exponent = 10. ** exponent
axes.t_axis = {'eigenvalue': exponent * float(ndk_string[0:8]),
'plunge': float(ndk_string[8:11]),
'azimuth': float(ndk_string[11:15])}
axes.b_axis = {'eigenvalue': exponent * float(ndk_string[15:23]),
'plunge': float(ndk_string[23:26]),
'azimuth': float(ndk_string[26:30])}
axes.p_axis = {'eigenvalue': exponent * float(ndk_string[30:38]),
'plunge': float(ndk_string[38:41]),
'azimuth': float(ndk_string[41:])}
return axes |
python | def customFilter(self, filterFunc):
'''
customFilter - Apply a custom filter to elements and return a QueryableList of matches
@param filterFunc <lambda/function< - A lambda/function that is passed an item, and
returns True if the item matches (will be returned), otherwise False.
@return - A QueryableList object of the same type, with only the matching objects returned.
'''
ret = self.__class__()
for item in self:
if filterFunc(item):
ret.append(item)
return ret |
python | def conf_budget(self, budget):
"""
Set limit on the number of conflicts.
"""
if self.glucose:
pysolvers.glucose41_cbudget(self.glucose, budget) |
java | static
public Function getFunction(String name){
if(name == null){
return null;
} // End if
if(FunctionRegistry.functions.containsKey(name)){
Function function = FunctionRegistry.functions.get(name);
return function;
}
Class<?> functionClazz;
if(FunctionRegistry.functionClazzes.containsKey(name)){
functionClazz = FunctionRegistry.functionClazzes.get(name);
} else
{
functionClazz = loadFunctionClass(name);
FunctionRegistry.functionClazzes.put(name, functionClazz);
} // End if
if(functionClazz != null){
Function function;
try {
function = (Function)functionClazz.newInstance();
} catch(IllegalAccessException | InstantiationException | ExceptionInInitializerError e){
throw new EvaluationException("Function class " + PMMLException.formatKey(functionClazz.getName()) + " could not be instantiated")
.initCause(e);
}
return function;
}
return null;
} |
python | def ttl(self, value):
"""Change self ttl with input value.
:param float value: new ttl in seconds.
"""
# get timer
timer = getattr(self, Annotation.__TIMER, None)
# if timer is running, stop the timer
if timer is not None:
timer.cancel()
# initialize timestamp
timestamp = None
# if value is None
if value is None:
# nonify timer
timer = None
else: # else, renew a timer
# get timestamp
timestamp = time() + value
# start a new timer
timer = Timer(value, self.__del__)
timer.start()
# set/update attributes
setattr(self, Annotation.__TIMER, timer)
setattr(self, Annotation.__TS, timestamp) |
python | def _RemoveRegistryKeys(self, metadata_value_pairs):
"""Filter out registry keys to operate on files."""
filtered_pairs = []
for metadata, stat_entry in metadata_value_pairs:
# Ignore registry keys.
if stat_entry.pathspec.pathtype != rdf_paths.PathSpec.PathType.REGISTRY:
filtered_pairs.append((metadata, stat_entry))
return filtered_pairs |
python | def userBrowser(self, request, tag):
"""
Render a TDB of local users.
"""
f = LocalUserBrowserFragment(self.browser)
f.docFactory = webtheme.getLoader(f.fragmentName)
f.setFragmentParent(self)
return f |
java | public String encodeURL(String relativePath, String scheme) {
return convergedSessionDelegate.encodeURL(relativePath, scheme);
} |
python | def recv(self, stream, crc_mode=1, retry=16, timeout=60, delay=1, quiet=0):
'''
Receive a stream via the XMODEM protocol.
>>> stream = file('/etc/issue', 'wb')
>>> print modem.recv(stream)
2342
Returns the number of bytes received on success or ``None`` in case of
failure.
'''
# initiate protocol
error_count = 0
char = 0
cancel = 0
while True:
# first try CRC mode, if this fails,
# fall back to checksum mode
if error_count >= retry:
self.abort(timeout=timeout)
return None
elif crc_mode and error_count < (retry / 2):
if not self.putc(CRC):
time.sleep(delay)
error_count += 1
else:
crc_mode = 0
if not self.putc(NAK):
time.sleep(delay)
error_count += 1
char = self.getc(1, timeout)
if not char:
error_count += 1
continue
elif char == SOH:
#crc_mode = 0
break
elif char == STX:
break
elif char == CAN:
if cancel:
return None
else:
cancel = 1
else:
error_count += 1
# read data
error_count = 0
income_size = 0
packet_size = 128
sequence = 1
cancel = 0
while True:
while True:
if char == SOH:
packet_size = 128
break
elif char == STX:
packet_size = 1024
break
elif char == EOT:
# We received an EOT, so send an ACK and return the received
# data length
self.putc(ACK)
return income_size
elif char == CAN:
# cancel at two consecutive cancels
if cancel:
return None
else:
cancel = 1
else:
if not quiet:
print >> sys.stderr, \
'recv ERROR expected SOH/EOT, got', ord(char)
error_count += 1
if error_count >= retry:
self.abort()
return None
# read sequence
error_count = 0
cancel = 0
seq1 = ord(self.getc(1))
seq2 = 0xff - ord(self.getc(1))
if seq1 == sequence and seq2 == sequence:
# sequence is ok, read packet
# packet_size + checksum
data = self.getc(packet_size + 1 + crc_mode, timeout)
if crc_mode:
csum = (ord(data[-2]) << 8) + ord(data[-1])
data = data[:-2]
log.debug('CRC (%04x <> %04x)' % \
(csum, self.calc_crc(data)))
valid = csum == self.calc_crc(data)
else:
csum = data[-1]
data = data[:-1]
log.debug('checksum (checksum(%02x <> %02x)' % \
(ord(csum), self.calc_checksum(data)))
valid = ord(csum) == self.calc_checksum(data)
# valid data, append chunk
if valid:
income_size += len(data)
stream.write(data)
self.putc(ACK)
sequence = (sequence + 1) % 0x100
char = self.getc(1, timeout)
continue
else:
# consume data
self.getc(packet_size + 1 + crc_mode)
self.debug('expecting sequence %d, got %d/%d' % \
(sequence, seq1, seq2))
# something went wrong, request retransmission
self.putc(NAK) |
java | @Override
public ExecuteChangeSetResult executeChangeSet(ExecuteChangeSetRequest request) {
request = beforeClientExecution(request);
return executeExecuteChangeSet(request);
} |
java | @Sensitive
public static byte[] decodeLTPAToken(Codec codec, @Sensitive byte[] token_arr) throws SASException {
byte[] ltpaTokenBytes = null;
try {
byte[] data = readGSSTokenData(LTPAMech.LTPA_OID.substring(4), token_arr);
if (data != null) {
// Check if it is double-encoded WAS classic token and get the encoded ltpa token bytes
if (isGSSToken(LTPAMech.LTPA_OID.substring(4), data)) {
data = readGSSTokenData(LTPAMech.LTPA_OID.substring(4), data);
}
Any any = codec.decode_value(data, org.omg.Security.OpaqueHelper.type());
ltpaTokenBytes = org.omg.Security.OpaqueHelper.extract(any);
}
} catch (Exception ex) {
// TODO: Modify SASException to take a message?
throw new SASException(2, ex);
}
if (ltpaTokenBytes == null || ltpaTokenBytes.length == 0) {
throw new SASException(2);
}
return ltpaTokenBytes;
} |
java | public void executeSearch(boolean replaceOldTab, boolean freshQuery)
{
if (freshQuery)
{
getState().getOffset().setValue(0l);
getState().getSelectedMatches().setValue(new TreeSet<Long>());
// get the value for the visible segmentation from the configured context
Set<String> selectedCorpora = getState().getSelectedCorpora().getValue();
CorpusConfig config = new CorpusConfig();
if(selectedCorpora != null && !selectedCorpora.isEmpty())
{
config = ui.getCorpusConfigWithCache(selectedCorpora.iterator().next());
}
if(config.containsKey(SearchOptionsPanel.KEY_DEFAULT_BASE_TEXT_SEGMENTATION))
{
String configVal = config.getConfig(SearchOptionsPanel.KEY_DEFAULT_BASE_TEXT_SEGMENTATION);
if("".equals(configVal) || "tok".equals(configVal))
{
configVal = null;
}
getState().getVisibleBaseText().setValue(configVal);
}
else
{
getState().getVisibleBaseText().setValue(getState().getContextSegmentation().getValue());
}
}
// construct a query from the current properties
DisplayedResultQuery displayedQuery = getSearchQuery();
searchView.getControlPanel().getQueryPanel().setStatus("Searching...");
cancelSearch();
// cleanup resources
VaadinSession session = VaadinSession.getCurrent();
session.setAttribute(IFrameResourceMap.class, new IFrameResourceMap());
if (session.getAttribute(MediaController.class) != null)
{
session.getAttribute(MediaController.class).clearMediaPlayers();
}
searchView.updateFragment(displayedQuery);
if (displayedQuery.getCorpora() == null || displayedQuery.getCorpora().
isEmpty())
{
Notification.show("Please select a corpus",
Notification.Type.WARNING_MESSAGE);
return;
}
if ("".equals(displayedQuery.getQuery()))
{
Notification.show("Empty query", Notification.Type.WARNING_MESSAGE);
return;
}
addHistoryEntry(displayedQuery);
AsyncWebResource res = Helper.getAnnisAsyncWebResource();
//
// begin execute match fetching
//
ResultViewPanel oldPanel = searchView.getLastSelectedResultView();
if (replaceOldTab)
{
// remove old panel from view
searchView.closeTab(oldPanel);
}
ResultViewPanel newResultView = new ResultViewPanel(ui, ui,
ui.getInstanceConfig(), displayedQuery);
newResultView.getPaging().addCallback(new SpecificPagingCallback(
ui, searchView, newResultView, displayedQuery));
TabSheet.Tab newTab;
List<ResultViewPanel> existingResultPanels = getResultPanels();
String caption = existingResultPanels.isEmpty()
? "Query Result" : "Query Result #" + (existingResultPanels.size() + 1);
newTab = searchView.getMainTab().addTab(newResultView, caption);
newTab.setClosable(true);
newTab.setIcon(FontAwesome.SEARCH);
searchView.getMainTab().setSelectedTab(newResultView);
searchView.notifiyQueryStarted();
Background.run(new ResultFetchJob(displayedQuery,
newResultView, ui));
//
// end execute match fetching
//
//
// begin execute count
//
// start count query
searchView.getControlPanel().getQueryPanel().setCountIndicatorEnabled(true);
AsyncWebResource countRes = res.path("query").path("search").
path("count").
queryParam("q", Helper.encodeJersey(displayedQuery.getQuery()))
.queryParam("corpora", Helper.encodeJersey(StringUtils.join(displayedQuery.getCorpora(), ",")));
Future<MatchAndDocumentCount> futureCount = countRes.get(
MatchAndDocumentCount.class);
state.getExecutedTasks().put(QueryUIState.QueryType.COUNT, futureCount);
Background.run(new CountCallback(newResultView, displayedQuery.getLimit(), ui));
//
// end execute count
//
} |
java | public BaseJsonBo removeSubAttr(String attrName, String dPath) {
Lock lock = lockForWrite();
try {
JsonNode attr = cacheJsonObjs.get(attrName);
JacksonUtils.deleteValue(attr, dPath);
return (BaseJsonBo) setAttribute(attrName, SerializationUtils.toJsonString(attr),
false);
} finally {
lock.unlock();
}
} |
python | def reconstruct(self, b, X=None):
"""Reconstruct representation of signal b in signal set."""
if X is None:
X = self.getcoef()
Xf = sl.rfftn(X, None, self.cbpdn.cri.axisN)
slc = (slice(None),)*self.dimN + \
(slice(self.chncs[b], self.chncs[b+1]),)
Sf = np.sum(self.cbpdn.Df[slc] * Xf, axis=self.cbpdn.cri.axisM)
return sl.irfftn(Sf, self.cbpdn.cri.Nv, self.cbpdn.cri.axisN) |
python | def _get_site_amplification(self, sites, coeffs):
"""
Compute fourth term of equation (1) on p. 1200:
``b5 * S``
"""
is_rock = self.get_site_type_dummy_variables(sites)
return coeffs['b5']*is_rock |
java | public CMAArray<CMASpaceMembership> fetchAll(String spaceId, Map<String, String> query) {
assertNotNull(spaceId, "spaceId");
if (query == null) {
return service.fetchAll(spaceId).blockingFirst();
} else {
return service.fetchAll(spaceId, query).blockingFirst();
}
} |
java | public static void write(Writer writer, Object jsonObject)
throws JsonGenerationException, IOException {
final JsonGenerator jw = JSON_FACTORY.createGenerator(writer);
jw.writeObject(jsonObject);
} |
java | public static aaaglobal_aaapreauthenticationpolicy_binding[] get_filtered(nitro_service service, String filter) throws Exception{
aaaglobal_aaapreauthenticationpolicy_binding obj = new aaaglobal_aaapreauthenticationpolicy_binding();
options option = new options();
option.set_filter(filter);
aaaglobal_aaapreauthenticationpolicy_binding[] response = (aaaglobal_aaapreauthenticationpolicy_binding[]) obj.getfiltered(service, option);
return response;
} |
java | public static org.jfrog.hudson.ArtifactoryServer prepareArtifactoryServer(String artifactoryServerID,
ArtifactoryServer pipelineServer) {
if (artifactoryServerID == null && pipelineServer == null) {
return null;
}
if (artifactoryServerID != null && pipelineServer != null) {
return null;
}
if (pipelineServer != null) {
CredentialsConfig credentials = pipelineServer.createCredentialsConfig();
return new org.jfrog.hudson.ArtifactoryServer(null, pipelineServer.getUrl(), credentials,
credentials, pipelineServer.getConnection().getTimeout(), pipelineServer.isBypassProxy(), pipelineServer.getConnection().getRetry(), pipelineServer.getDeploymentThreads());
}
org.jfrog.hudson.ArtifactoryServer server = RepositoriesUtils.getArtifactoryServer(artifactoryServerID, RepositoriesUtils.getArtifactoryServers());
if (server == null) {
return null;
}
return server;
} |
java | @Override
public Component getTreeCellRendererComponent(JTree tree, Object value,
boolean sel, boolean expanded, boolean leaf, int row,
boolean hasFocus) {
super.getTreeCellRendererComponent(tree, value, sel, expanded, leaf, row, hasFocus);
SiteNode node = null;
Target target = null;
if (value instanceof SiteNode) {
node = (SiteNode) value;
if (node.getUserObject() instanceof Target) {
target = (Target) node.getUserObject();
}
}
if (node != null) {
if (node.isRoot()) {
setIcon(DisplayUtils.getScaledIcon(ROOT_ICON));
} else if (target != null) {
if (target.getContext() != null) {
if (target.getContext().isInScope()) {
setIcon(DisplayUtils.getScaledIcon(CONTEXT_IN_SCOPE_ICON));
} else {
setIcon(DisplayUtils.getScaledIcon(CONTEXT_ICON));
}
} else if (target.isInScopeOnly()) {
setIcon(DisplayUtils.getScaledIcon(ALL_IN_SCOPE_ICON));
}
}
}
return this;
} |
java | public int readMessages(Reader is)
throws IOException
{
//InboxAmp oldInbox = null;
try (OutboxAmp outbox = OutboxAmp.currentOrCreate(getManager())) {
//OutboxThreadLocal.setCurrent(_outbox);
return readMessages(is, outbox);
} finally {
//OutboxThreadLocal.setCurrent(null);
}
} |
python | def _time_from_json(value, field):
"""Coerce 'value' to a datetime date, if set or not nullable"""
if _not_null(value, field):
if len(value) == 8: # HH:MM:SS
fmt = _TIMEONLY_WO_MICROS
elif len(value) == 15: # HH:MM:SS.micros
fmt = _TIMEONLY_W_MICROS
else:
raise ValueError("Unknown time format: {}".format(value))
return datetime.datetime.strptime(value, fmt).time() |
python | def get_compatible_generator_action(self, filename):
"""
Return the **first** compatible :class:`GeneratorAction` for a given filename or ``None`` if none is found.
Args:
filename (str): The filename of the template to process.
"""
# find first compatible generator action
for action in self.__generator_actions:
if action.act_on_file(filename):
return action
return None |
python | def blocks(self, lines):
"""Groups lines into markdown blocks"""
state = markdown.blockparser.State()
blocks = []
# We use three states: start, ``` and '\n'
state.set('start')
# index of current block
currblock = 0
for line in lines:
line += '\n'
if state.isstate('start'):
if line[:3] == '```':
state.set('```')
else:
state.set('\n')
blocks.append('')
currblock = len(blocks) - 1
else:
marker = line[:3] # Will capture either '\n' or '```'
if state.isstate(marker):
state.reset()
blocks[currblock] += line
return blocks |
java | protected void generateConstructor(SourceWriter sourceWriter, String simpleName, JClassType inputType, PojoDescriptor<?> pojoDescriptor,
GeneratorContext context) {
generateSourcePublicConstructorDeclaration(sourceWriter, simpleName);
sourceWriter.print("super(new ");
sourceWriter.print(SimpleGenericTypeLimited.class.getSimpleName());
sourceWriter.print("(");
sourceWriter.print(inputType.getName());
sourceWriter.print(".class), ");
sourceWriter.print(AbstractPojoDescriptorBuilderLimited.class.getSimpleName());
sourceWriter.println(".getInstance());");
// local variable for property descriptor
sourceWriter.print(PojoPropertyDescriptorImpl.class.getSimpleName());
sourceWriter.println(" propertyDescriptor;");
JClassType superType = getConfiguration().getSupportedSuperType(inputType, context.getTypeOracle());
StatefulPropertyGenerator state = new StatefulPropertyGenerator(sourceWriter, superType);
for (PojoPropertyDescriptor propertyDescriptor : pojoDescriptor.getPropertyDescriptors()) {
state.generatePropertyDescriptorBlock(propertyDescriptor);
}
generateSourceCloseBlock(sourceWriter);
} |
python | def load(self):
""" Loads the user's account details and
Raises
parseException
"""
pg = self.usr.getPage("http://www.neopets.com/bank.phtml")
# Verifies account exists
if not "great to see you again" in pg.content:
logging.getLogger("neolib.user").info("Could not load user's bank. Most likely does not have an account.", {'pg': pg})
raise noBankAcct
self.__loadDetails(pg) |
java | public void decode(DataItemListener dataItemListener) throws CborException {
Objects.requireNonNull(dataItemListener);
DataItem dataItem = decodeNext();
while (dataItem != null) {
dataItemListener.onDataItem(dataItem);
dataItem = decodeNext();
}
} |
python | def configure():
"""
Configures YAML parser for Step serialization and deserialization
Called in drain/__init__.py
"""
yaml.add_multi_representer(Step, step_multi_representer)
yaml.add_multi_constructor('!step', step_multi_constructor)
yaml.Dumper.ignore_aliases = lambda *args: True |
python | def iscomplex(polynomial):
"""Returns whether the polynomial has complex coefficients
:param polynomial: Polynomial of noncommutive variables.
:type polynomial: :class:`sympy.core.expr.Expr`.
:returns: bool -- whether there is a complex coefficient.
"""
if isinstance(polynomial, (int, float)):
return False
if isinstance(polynomial, complex):
return True
polynomial = polynomial.expand()
for monomial in polynomial.as_coefficients_dict():
for variable in monomial.as_coeff_mul()[1]:
if isinstance(variable, complex) or variable == I:
return True
return False |
java | public static TimecodeDuration calculateDuration(Timecode inPoint, Timecode outPoint)
{
if (!inPoint.isCompatible(outPoint)) {
MutableTimecode mutableTimecode = new MutableTimecode(outPoint);
mutableTimecode.setTimecodeBase(inPoint.getTimecodeBase());
mutableTimecode.setDropFrame(inPoint.isDropFrame());
outPoint = new Timecode(mutableTimecode);
}
long frameNumber = outPoint.getFrameNumber() - inPoint.getFrameNumber();
if (frameNumber < 0) {
frameNumber += (24 * 6 * inPoint.framesPerTenMinutes);
}
return new TimecodeDuration(inPoint.getTimecodeBase(), frameNumber, inPoint.isDropFrame());
} |
python | def get_child(parent, child_index):
"""
Get the child at the given index, or return None if it doesn't exist.
"""
if child_index < 0 or child_index >= len(parent.childNodes):
return None
return parent.childNodes[child_index] |
java | private Subject authenticateWithCertificateChain(SSLSession session) throws SSLPeerUnverifiedException, AuthenticationException, CredentialExpiredException, CredentialDestroyedException {
Subject transportSubject = null;
if (session != null) {
Certificate[] certificateChain = session.getPeerCertificates();
transportSubject = authenticator.authenticate((X509Certificate[]) certificateChain);
/* Here we need to get the WSCredential from the subject. We will use the subject manager for the same. */
SubjectHelper subjectHelper = new SubjectHelper();
WSCredential wsCredential = subjectHelper.getWSCredential(transportSubject);
/*
* First we tell the WSCredential that the identity token is in the form of a certificate chain. This is
* done by setting the property "wssecurity.identity_name" to "ClientCertificate"
*/
wsCredential.set(Constants.IDENTITY_NAME, Constants.ClientCertificate);
/*
* Now we need to put the certificate chain into the WScredential object. By doing this, we
* make sure that, the authenticated certificates are indeed part of the credential. This credential
* can be used further down the CSIv2 flow. The certificate is set as a name value pair, with the name
* "wssecurity.identity_value"
*/
wsCredential.set(Constants.IDENTITY_VALUE, certificateChain);
}
return transportSubject;
} |
python | def _toOriginal(self, val):
""" Pitty attempt to convert itertools result into a real object
"""
if self._clean.isTuple():
return tuple(val)
elif self._clean.isList():
return list(val)
elif self._clean.isDict():
return dict(val)
else:
return val |
java | public void marshall(ConferenceProvider conferenceProvider, ProtocolMarshaller protocolMarshaller) {
if (conferenceProvider == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(conferenceProvider.getArn(), ARN_BINDING);
protocolMarshaller.marshall(conferenceProvider.getName(), NAME_BINDING);
protocolMarshaller.marshall(conferenceProvider.getType(), TYPE_BINDING);
protocolMarshaller.marshall(conferenceProvider.getIPDialIn(), IPDIALIN_BINDING);
protocolMarshaller.marshall(conferenceProvider.getPSTNDialIn(), PSTNDIALIN_BINDING);
protocolMarshaller.marshall(conferenceProvider.getMeetingSetting(), MEETINGSETTING_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
java | @Api
public void initializeList(Collection<SearchWidget> searchWidgets) {
LinkedHashMap<String, String> values = new LinkedHashMap<String, String>();
for (SearchWidget searchWidget : searchWidgets) {
values.put(searchWidget.getSearchWidgetId(), searchWidget.getName());
}
selectSearch.setValueMap(values);
this.searchWidgets.addAll(searchWidgets);
} |
python | def getMeasurement(self, measurementId, measurementStatus=None):
"""
Gets the measurement with the given id.
:param measurementId: the id.
:param measurementStatus: the status of the requested measurement.
:return: the matching measurement or none if it doesn't exist.
"""
return next((x for x in self.getMeasurements(measurementStatus) if x.id == measurementId), None) |
java | private boolean validate(TextInputControl tf) {
boolean res = tf.getText().trim().length() == 0;
ObservableList<String> styleClass = tf.getStyleClass();
if (res) {
if (!styleClass.contains("error")) {
tf.setStyle("-text-area-background: " + errorColor + ";");
styleClass.add("error");
}
} else {
// remove all occurrences:
tf.setStyle("-text-area-background: " + defaultColor + ";");
styleClass.removeAll(Collections.singleton("error"));
}
return res;
} |
python | def cache_infos(self, queryset):
"""
Cache the number of entries published and the last
modification date under each tag.
"""
self.cache = {}
for item in queryset:
# If the sitemap is too slow, don't hesitate to do this :
# self.cache[item.pk] = (item.count, None)
self.cache[item.pk] = (
item.count, TaggedItem.objects.get_by_model(
self.entries_qs, item)[0].last_update) |
python | def get_gated_grpc_tensors(self, matching_debug_op=None):
"""Extract all nodes with gated-gRPC debug ops attached.
Uses cached values if available.
This method is thread-safe.
Args:
graph_def: A tf.GraphDef proto.
matching_debug_op: Return tensors and nodes with only matching the
specified debug op name (optional). If `None`, will extract only
`DebugIdentity` debug ops.
Returns:
A list of (node_name, op_type, output_slot, debug_op) tuples.
"""
with self._grpc_gated_lock:
matching_debug_op = matching_debug_op or 'DebugIdentity'
if matching_debug_op not in self._grpc_gated_tensors:
# First, construct a map from node name to op type.
node_name_to_op_type = dict(
(node.name, node.op) for node in self._graph_def.node)
# Second, populate the output list.
gated = []
for node in self._graph_def.node:
if node.op == matching_debug_op:
for attr_key in node.attr:
if attr_key == 'gated_grpc' and node.attr[attr_key].b:
node_name, output_slot, _, debug_op = (
debug_graphs.parse_debug_node_name(node.name))
gated.append(
(node_name, node_name_to_op_type[node_name], output_slot,
debug_op))
break
self._grpc_gated_tensors[matching_debug_op] = gated
return self._grpc_gated_tensors[matching_debug_op] |
python | def page(title=None, pageid=None, auto_suggest=True, redirect=True, preload=False):
'''
Get a WikipediaPage object for the page with title `title` or the pageid
`pageid` (mutually exclusive).
Keyword arguments:
* title - the title of the page to load
* pageid - the numeric pageid of the page to load
* auto_suggest - let Wikipedia find a valid page title for the query
* redirect - allow redirection without raising RedirectError
* preload - load content, summary, images, references, and links during initialization
'''
if title is not None:
if auto_suggest:
results, suggestion = search(title, results=1, suggestion=True)
try:
title = suggestion or results[0]
except IndexError:
# if there is no suggestion or search results, the page doesn't exist
raise PageError(title)
return WikipediaPage(title, redirect=redirect, preload=preload)
elif pageid is not None:
return WikipediaPage(pageid=pageid, preload=preload)
else:
raise ValueError("Either a title or a pageid must be specified") |
python | def _method_response_handler(self, response: Dict[str, Any]):
"""处理200~399段状态码,为对应的响应设置结果.
Parameters:
(response): - 响应的python字典形式数据
Return:
(bool): - 准确地说没有错误就会返回True
"""
code = response.get("CODE")
if code in (200, 300):
self._result_handler(response)
else:
asyncio.ensure_future(self._gen_result_handler(response)) |
python | def on_get(resc, req, resp):
""" Get the models identified by query parameters
We return an empty list if no models are found.
"""
signals.pre_req.send(resc.model)
signals.pre_req_search.send(resc.model)
models = goldman.sess.store.search(resc.rtype, **{
'filters': req.filters,
'pages': req.pages,
'sorts': req.sorts,
})
props = to_rest_models(models, includes=req.includes)
resp.serialize(props)
signals.post_req.send(resc.model)
signals.post_req_search.send(resc.model) |
python | def _process_name_or_alias_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks for a match against an Entity's name or alias.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, containing the value to check the name or alias against;
if the parameter is optional and missing, the check will return True
Returns:
a Filter basic block that performs the check against the name or alias
"""
filtered_field_type = filter_operation_info.field_type
if isinstance(filtered_field_type, GraphQLUnionType):
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to union type '
u'{}'.format(filtered_field_type))
current_type_fields = filtered_field_type.fields
name_field = current_type_fields.get('name', None)
alias_field = current_type_fields.get('alias', None)
if not name_field or not alias_field:
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because it lacks a '
u'"name" or "alias" field.'.format(filtered_field_type))
name_field_type = strip_non_null_from_type(name_field.type)
alias_field_type = strip_non_null_from_type(alias_field.type)
if not isinstance(name_field_type, GraphQLScalarType):
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because its "name" '
u'field is not a scalar.'.format(filtered_field_type))
if not isinstance(alias_field_type, GraphQLList):
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because its '
u'"alias" field is not a list.'.format(filtered_field_type))
alias_field_inner_type = strip_non_null_from_type(alias_field_type.of_type)
if alias_field_inner_type != name_field_type:
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because the '
u'"name" field and the inner type of the "alias" field '
u'do not match: {} vs {}'.format(filtered_field_type,
name_field_type,
alias_field_inner_type))
argument_inferred_type = name_field_type
argument_expression, non_existence_expression = _represent_argument(
location, context, parameters[0], argument_inferred_type)
check_against_name = expressions.BinaryComposition(
u'=', expressions.LocalField('name'), argument_expression)
check_against_alias = expressions.BinaryComposition(
u'contains', expressions.LocalField('alias'), argument_expression)
filter_predicate = expressions.BinaryComposition(
u'||', check_against_name, check_against_alias)
if non_existence_expression is not None:
# The argument comes from an optional block and might not exist,
# in which case the filter expression should evaluate to True.
filter_predicate = expressions.BinaryComposition(
u'||', non_existence_expression, filter_predicate)
return blocks.Filter(filter_predicate) |
python | def _nvram_file(self):
"""
Path to the nvram file
"""
return os.path.join(self.working_dir, "nvram_{:05d}".format(self.application_id)) |
python | def showPopup( self ):
"""
Displays a custom popup widget for this system if a checkable state \
is setup.
"""
if not self.isCheckable():
return super(XComboBox, self).showPopup()
if not self.isVisible():
return
# update the checkable widget popup
point = self.mapToGlobal(QPoint(0, self.height() - 1))
popup = self.checkablePopup()
popup.setModel(self.model())
popup.move(point)
popup.setFixedWidth(self.width())
height = (self.count() * 19) + 2
if height > 400:
height = 400
popup.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn)
else:
popup.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
popup.setFixedHeight(height)
popup.show()
popup.raise_() |
java | public static ExceptionAnnotation exception(Throwable t)
{
if (t instanceof InvocationTargetException)
{
if (GreenPepper.isDebugEnabled()) {
LOGGER.info("Caught exception in fixture execution", t);
}
return new ExceptionAnnotation( ((InvocationTargetException) t).getTargetException() );
}
else
{
if (GreenPepper.isDebugEnabled()) {
LOGGER.info("Caught exception in fixture execution", t);
}
return new ExceptionAnnotation( t );
}
} |
java | public void fillTo(int size)
{
if (size <= 0)
return;
if (pool.getLogger().isTraceEnabled())
{
synchronized (this)
{
pool.getLogger().trace(ManagedConnectionPoolUtility.fullDetails(this,
"fillTo(" + size + ")",
pool.getConnectionManager().getManagedConnectionFactory(),
pool.getConnectionManager(),
pool, pool.getConfiguration(),
listeners, pool.getInternalStatistics(),
credential.getSubject(),
credential.getConnectionRequestInfo()));
}
}
else if (pool.getLogger().isDebugEnabled())
{
pool.getLogger().debug(ManagedConnectionPoolUtility.details(
"fillTo(" + size + ")",
pool.getConfiguration().getId(),
getCount(IN_USE, listeners),
pool.getConfiguration().getMaxSize()));
}
while (!pool.isFull())
{
// Get a permit - avoids a race when the pool is nearly full
// Also avoids unnessary fill checking when all connections are checked out
try
{
//TODO:statistics
if (pool.getRequestSemaphore()
.tryAcquire(pool.getConfiguration().getBlockingTimeout(), TimeUnit.MILLISECONDS))
{
try
{
if (pool.isShutdown())
{
return;
}
// We already have enough connections
if (listeners.size() >= size)
{
return;
}
// Create a connection to fill the pool
try
{
ConnectionListener cl = pool.createConnectionListener(credential, this);
if (Tracer.isEnabled())
Tracer.createConnectionListener(pool.getConfiguration().getId(), this, cl,
cl.getManagedConnection(),
false, true, false,
Tracer.isRecordCallstacks() ?
new Throwable("CALLSTACK") : null);
boolean added = false;
if (listeners.size() < size)
{
listeners.add(cl);
added = true;
}
if (!added)
{
if (Tracer.isEnabled())
Tracer.destroyConnectionListener(pool.getConfiguration().getId(), this, cl,
false, false, false, false,
false, true, false,
Tracer.isRecordCallstacks() ?
new Throwable("CALLSTACK") : null);
pool.destroyConnectionListener(cl);
return;
}
}
catch (ResourceException re)
{
return;
}
}
finally
{
pool.getRequestSemaphore().release();
}
}
}
catch (InterruptedException ignored)
{
Thread.interrupted();
//TODO:trace
}
}
} |
python | def read_data(self, **kwargs):
"""
get the data from the service
:param kwargs: contain keyword args : trigger_id at least
:type kwargs: dict
:rtype: list
"""
trigger_id = kwargs.get('trigger_id')
date_triggered = str(kwargs.get('date_triggered')).replace(' ', 'T')
data = list()
if self.token:
# check if it remains more than 1 access
# then we can create an issue
if self.gh.ratelimit_remaining > 1:
trigger = Github.objects.get(trigger_id=trigger_id)
issues = self.gh.issues_on(trigger.repo, trigger.project, since=date_triggered)
for issue in issues:
content = pypandoc.convert(issue.body, 'md', format='html')
content += self.gh_footer(trigger, issue)
data.append({'title': issue.title, 'content': content})
# digester
self.send_digest_event(trigger_id, issue.title, '')
cache.set('th_github_' + str(trigger_id), data)
else:
# rate limit reach, do nothing right now
logger.warning("Rate limit reached")
update_result(trigger_id, msg="Rate limit reached", status=True)
else:
logger.critical("no token provided")
update_result(trigger_id, msg="No token provided", status=True)
return data |
java | public boolean add(Cell c) {
if (c == null) {
throw new DeepGenericException(new IllegalArgumentException("cell parameter cannot be null"));
}
return getCellsByTable(nameSpace).add(c);
} |
java | @Override
protected boolean copyRequestedContentToResponse(String requestedPath, HttpServletResponse response,
String contentType) throws IOException {
boolean copyDone = false;
if (isValidRequestedPath(requestedPath)) {
try {
writeContent(requestedPath, null, response);
response.setContentType(contentType);
copyDone = true;
} catch (ResourceNotFoundException e) {
// Nothing to do here
}
}
return copyDone;
} |
java | private void removeEventWait(String eventName) throws SQLException {
String query = "delete from EVENT_WAIT_INSTANCE where EVENT_NAME=?";
db.runUpdate(query, eventName);
this.recordEventHistory(eventName, EventLog.SUBCAT_DEREGISTER,
"N/A", 0L, "Deregister all existing waiters");
} |
java | private static <T> List<T> unmodifiableList(List<T> list) {
if (list == null) {
return Collections.emptyList();
}
return Collections.unmodifiableList(list);
} |
java | private long doUnwrap(ByteBuffer[] userBuffers, int off, int len) throws IOException {
if(anyAreSet(state, FLAG_CLOSED)) {
throw new ClosedChannelException();
}
if(outstandingTasks > 0) {
return 0;
}
if(anyAreSet(state, FLAG_READ_REQUIRES_WRITE)) {
doWrap(null, 0, 0);
if(allAreClear(state, FLAG_WRITE_REQUIRES_READ)) { //unless a wrap is immediately required we just return
return 0;
}
}
boolean bytesProduced = false;
PooledByteBuffer unwrappedData = this.unwrappedData;
//copy any exiting data
if(unwrappedData != null) {
if(userBuffers != null) {
long copied = Buffers.copy(userBuffers, off, len, unwrappedData.getBuffer());
if (!unwrappedData.getBuffer().hasRemaining()) {
unwrappedData.close();
this.unwrappedData = null;
}
if(copied > 0) {
readListenerInvocationCount = 0;
}
return copied;
}
}
try {
//we need to store how much data is in the unwrap buffer. If no progress can be made then we unset
//the data to unwrap flag
int dataToUnwrapLength;
//try and read some data if we don't already have some
if (allAreClear(state, FLAG_DATA_TO_UNWRAP)) {
if (dataToUnwrap == null) {
dataToUnwrap = bufferPool.allocate();
}
int res;
try {
res = source.read(dataToUnwrap.getBuffer());
} catch (IOException | RuntimeException | Error e) {
dataToUnwrap.close();
dataToUnwrap = null;
throw e;
}
dataToUnwrap.getBuffer().flip();
if (res == -1) {
dataToUnwrap.close();
dataToUnwrap = null;
notifyReadClosed();
return -1;
} else if (res == 0 && engine.getHandshakeStatus() == SSLEngineResult.HandshakeStatus.FINISHED) {
//its possible there was some data in the buffer from a previous unwrap that had a buffer underflow
//if not we just close the buffer so it does not hang around
if(!dataToUnwrap.getBuffer().hasRemaining()) {
dataToUnwrap.close();
dataToUnwrap = null;
}
return 0;
}
}
dataToUnwrapLength = dataToUnwrap.getBuffer().remaining();
long original = 0;
if (userBuffers != null) {
original = Buffers.remaining(userBuffers);
}
//perform the actual unwrap operation
//if possible this is done into the the user buffers, however
//if none are supplied or this results in a buffer overflow then we allocate our own
SSLEngineResult result;
boolean unwrapBufferUsed = false;
try {
if (userBuffers != null) {
result = engine.unwrap(this.dataToUnwrap.getBuffer(), userBuffers, off, len);
if (result.getStatus() == SSLEngineResult.Status.BUFFER_OVERFLOW) {
//not enough space in the user buffers
//we use our own
unwrappedData = bufferPool.allocate();
ByteBuffer[] d = new ByteBuffer[len + 1];
System.arraycopy(userBuffers, off, d, 0, len);
d[len] = unwrappedData.getBuffer();
result = engine.unwrap(this.dataToUnwrap.getBuffer(), d);
unwrapBufferUsed = true;
}
bytesProduced = result.bytesProduced() > 0;
} else {
unwrapBufferUsed = true;
if (unwrappedData == null) {
unwrappedData = bufferPool.allocate();
} else {
unwrappedData.getBuffer().compact();
}
result = engine.unwrap(this.dataToUnwrap.getBuffer(), unwrappedData.getBuffer());
bytesProduced = result.bytesProduced() > 0;
}
} finally {
if (unwrapBufferUsed) {
unwrappedData.getBuffer().flip();
if (!unwrappedData.getBuffer().hasRemaining()) {
unwrappedData.close();
unwrappedData = null;
}
}
this.unwrappedData = unwrappedData;
}
if (result.getStatus() == SSLEngineResult.Status.CLOSED) {
if(dataToUnwrap != null) {
dataToUnwrap.close();
dataToUnwrap = null;
}
notifyReadClosed();
return -1;
}
if (!handleHandshakeResult(result)) {
if (this.dataToUnwrap.getBuffer().hasRemaining()
&& result.getStatus() != SSLEngineResult.Status.BUFFER_UNDERFLOW
&& dataToUnwrap.getBuffer().remaining() != dataToUnwrapLength) {
state |= FLAG_DATA_TO_UNWRAP;
} else {
state &= ~FLAG_DATA_TO_UNWRAP;
}
return 0;
}
if (result.getStatus() == SSLEngineResult.Status.BUFFER_UNDERFLOW) {
state &= ~FLAG_DATA_TO_UNWRAP;
} else if (result.getStatus() == SSLEngineResult.Status.BUFFER_OVERFLOW) {
UndertowLogger.REQUEST_LOGGER.sslBufferOverflow(this);
IoUtils.safeClose(delegate);
} else if (this.dataToUnwrap.getBuffer().hasRemaining() && dataToUnwrap.getBuffer().remaining() != dataToUnwrapLength) {
state |= FLAG_DATA_TO_UNWRAP;
} else {
state &= ~FLAG_DATA_TO_UNWRAP;
}
if (userBuffers == null) {
return 0;
} else {
long res = original - Buffers.remaining(userBuffers);
if(res > 0) {
//if data has been successfully returned this is not a read loop
readListenerInvocationCount = 0;
}
return res;
}
} catch (SSLException e) {
try {
try {
//we make an effort to write out the final record
//this is best effort, there are no guarantees
clearWriteRequiresRead();
doWrap(null, 0, 0);
flush();
} catch (Exception e2) {
UndertowLogger.REQUEST_LOGGER.debug("Failed to write out final SSL record", e2);
}
close();
} catch (Throwable ex) {
//we ignore this
UndertowLogger.REQUEST_LOGGER.debug("Exception closing SSLConduit after exception in doUnwrap", ex);
}
throw e;
} catch (RuntimeException|IOException|Error e) {
try {
close();
} catch (Throwable ex) {
//we ignore this
UndertowLogger.REQUEST_LOGGER.debug("Exception closing SSLConduit after exception in doUnwrap", ex);
}
throw e;
} finally {
boolean requiresListenerInvocation = false; //if there is data in the buffer and reads are resumed we should re-run the listener
//we always need to re-invoke if bytes have been produced, as the engine may have buffered some data
if (bytesProduced || (unwrappedData != null && unwrappedData.isOpen() && unwrappedData.getBuffer().hasRemaining())) {
requiresListenerInvocation = true;
}
if (dataToUnwrap != null) {
//if there is no data in the buffer we just free it
if (!dataToUnwrap.getBuffer().hasRemaining()) {
dataToUnwrap.close();
dataToUnwrap = null;
state &= ~FLAG_DATA_TO_UNWRAP;
} else if (allAreClear(state, FLAG_DATA_TO_UNWRAP)) {
//if there is not enough data in the buffer we compact it to make room for more
dataToUnwrap.getBuffer().compact();
} else {
//there is more data, make sure we trigger a read listener invocation
requiresListenerInvocation = true;
}
}
//if we are in the read listener handshake we don't need to invoke
//as it is about to be invoked anyway
if (requiresListenerInvocation && (anyAreSet(state, FLAG_READS_RESUMED) || allAreSet(state, FLAG_WRITE_REQUIRES_READ | FLAG_WRITES_RESUMED)) && !invokingReadListenerHandshake) {
runReadListener(false);
}
}
} |
java | public XMLNode parseXML(String root) {
if (xmlElementsMap.containsKey(root)) {
return xmlElementsMap.get(root);
}
try {
currentRoot = root;
isParsing = false;
SAXParserFactory factory = SAXParserFactory.newInstance();
SAXParser saxParser = factory.newSAXParser();
InputStream in = configuration.getBuilderXML();
saxParser.parse(in, this);
return xmlElementsMap.get(root);
} catch (Throwable t) {
t.printStackTrace();
throw new DocletAbortException(t);
}
} |
python | def event(self, event):
"""
Forwards events to the corresponding instance of your event handler
for this process.
If you subclass L{EventSift} and reimplement this method, no event
will be forwarded at all unless you call the superclass implementation.
If your filtering is based on the event type, there's a much easier way
to do it: just implement a handler for it.
"""
eventCode = event.get_event_code()
pid = event.get_pid()
handler = self.forward.get(pid, None)
if handler is None:
handler = self.cls(*self.argv, **self.argd)
if eventCode != win32.EXIT_PROCESS_DEBUG_EVENT:
self.forward[pid] = handler
elif eventCode == win32.EXIT_PROCESS_DEBUG_EVENT:
del self.forward[pid]
return handler(event) |
java | static <T> T transform(Object value, Class<T> toClazz, Registry registry) {
try {
if (toClazz.isInstance(value)) {
return (T) value;
}
else if (value instanceof BindObject) {
T bean = newInstance(toClazz);
for(Map.Entry<String, Object> entry : (BindObject) value) {
Class<?> requiredType = PropertyUtils.getPropertyType(toClazz, entry.getKey());
Object propValue = transform(entry.getValue(), requiredType, registry);
if (propValue != null) {
PropertyUtils.writeProperty(bean, entry.getKey(), propValue);
}
}
return bean;
}
else if (value instanceof Optional) {
Optional<?> optional = (Optional) value;
if (toClazz == Optional.class) {
Class<?> targetType = PropertyUtils.getGenericParamTypes(toClazz)[0];
return (T) optional.map(v -> transform(v, targetType, registry));
} else {
return transform(optional.orElse(null), toClazz, registry);
}
}
else if (value instanceof Map) {
Map<?, ?> values = (Map) value;
if (Map.class.isAssignableFrom(toClazz)) {
Class<?> keyType = PropertyUtils.getGenericParamTypes(toClazz)[0];
Class<?> valueType = PropertyUtils.getGenericParamTypes(toClazz)[1];
values = values.entrySet().stream().map(e ->
entry(transform(e.getKey(), keyType, registry),
transform(e.getValue(), valueType, registry))
).collect(Collectors.toMap(
Map.Entry::getKey,
Map.Entry::getValue
));
return doTransform(values, toClazz, registry);
}
else throw new IllegalArgumentException(
"INCOMPATIBLE transform: " + value.getClass().getName() + " -> " + toClazz.getName());
}
else if (value instanceof Collection) {
Collection<?> values = (Collection) value;
if (Collection.class.isAssignableFrom(toClazz) || toClazz.isArray()) {
Class<?> elemType = toClazz.isArray() ? toClazz.getComponentType()
: PropertyUtils.getGenericParamTypes(toClazz)[0];
values = values.stream().map(v -> transform(v, elemType, registry))
.collect(Collectors.<Object>toList());
return doTransform(values, toClazz, registry);
}
else throw new IllegalArgumentException(
"INCOMPATIBLE transform: " + value.getClass().getName() + " -> " + toClazz.getName());
}
else {
return doTransform(value, toClazz, registry);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
} |
python | def new_feed(self, name: str, layer_shape: tuple):
"""
Creates a feed layer. This is usually the first layer in the network.
:param name: name of the layer
:return:
"""
feed_data = tf.placeholder(tf.float32, layer_shape, 'input')
self.__network.add_layer(name, layer_output=feed_data) |
java | public Item getItem(int position) {
//if we are out of range just return null
if (position < 0 || position >= mGlobalSize) {
return null;
}
//now get the adapter which is responsible for the given position
int index = floorIndex(mAdapterSizes, position);
return mAdapterSizes.valueAt(index).getAdapterItem(position - mAdapterSizes.keyAt(index));
} |
python | def _writeSedimentTable(self, session, fileObject, mapTable, replaceParamFile):
"""
Write Sediment Mapping Table Method
This method writes the sediments special mapping table case.
"""
# Write the sediment mapping table header
fileObject.write('%s\n' % (mapTable.name))
fileObject.write('NUM_SED %s\n' % (mapTable.numSed))
# Write the value header line
fileObject.write(
'Sediment Description%sSpec. Grav%sPart. Dia%sOutput Filename\n' % (' ' * 22, ' ' * 3, ' ' * 5))
# Retrive the sediment mapping table values
sediments = session.query(MTSediment). \
filter(MTSediment.mapTable == mapTable). \
order_by(MTSediment.id). \
all()
# Write sediments out to file
for sediment in sediments:
# Determine spacing for aesthetics
space1 = 42 - len(sediment.description)
# Pad values with zeros / Get replacement variable
specGravString = vwp(sediment.specificGravity, replaceParamFile)
partDiamString = vwp(sediment.particleDiameter, replaceParamFile)
try:
specGrav = '%.6f' % specGravString
except:
specGrav = '%s' % specGravString
try:
partDiam = '%.6f' % partDiamString
except:
partDiam = '%s' % partDiamString
fileObject.write('%s%s%s%s%s%s%s\n' % (
sediment.description, ' ' * space1, specGrav, ' ' * 5, partDiam, ' ' * 6, sediment.outputFilename)) |
java | @SuppressWarnings("resource")
private InputStream loadFile(String trustStorePath) throws FileNotFoundException {
InputStream input;
try {
input = new FileInputStream(trustStorePath);
} catch (FileNotFoundException e) {
LOGGER.warn("File {} not found. Fallback to classpath.", trustStorePath);
input = Thread.currentThread().getContextClassLoader().getResourceAsStream(trustStorePath);
}
if (input == null) {
throw new FileNotFoundException("File " + trustStorePath + " does not exist");
}
return input;
} |
python | def get_unique_constraint_declaration_sql(self, name, index):
"""
Obtains DBMS specific SQL code portion needed to set a unique
constraint declaration to be used in statements like CREATE TABLE.
:param name: The name of the unique constraint.
:type name: str
:param index: The index definition
:type index: Index
:return: DBMS specific SQL code portion needed to set a constraint.
:rtype: str
"""
columns = index.get_quoted_columns(self)
name = Identifier(name)
if not columns:
raise DBALException('Incomplete definition. "columns" required.')
return "CONSTRAINT %s UNIQUE (%s)%s" % (
name.get_quoted_name(self),
self.get_index_field_declaration_list_sql(columns),
self.get_partial_index_sql(index),
) |
python | def OnCellSelected(self, event):
"""Cell selection event handler"""
key = row, col, tab = event.Row, event.Col, self.grid.current_table
# Is the cell merged then go to merging cell
cell_attributes = self.grid.code_array.cell_attributes
merging_cell = cell_attributes.get_merging_cell(key)
if merging_cell is not None and merging_cell != key:
post_command_event(self.grid, self.grid.GotoCellMsg,
key=merging_cell)
# Check if the merging cell is a button cell
if cell_attributes[merging_cell]["button_cell"]:
# Button cells shall be executed on click
self.grid.EnableCellEditControl()
return
# If in selection mode do nothing
# This prevents the current cell from changing
if not self.grid.IsEditable():
return
# Redraw cursor
self.grid.ForceRefresh()
# Disable entry line if cell is locked
self.grid.lock_entry_line(
self.grid.code_array.cell_attributes[key]["locked"])
# Update entry line
self.grid.update_entry_line(key)
# Update attribute toolbar
self.grid.update_attribute_toolbar(key)
self.grid._last_selected_cell = key
event.Skip() |
python | def delete_dagobah(self, dagobah_id):
""" Deletes the Dagobah and all child Jobs from the database.
Related run logs are deleted as well.
"""
rec = self.dagobah_coll.find_one({'_id': dagobah_id})
for job in rec.get('jobs', []):
if 'job_id' in job:
self.delete_job(job['job_id'])
self.log_coll.remove({'parent_id': dagobah_id})
self.dagobah_coll.remove({'_id': dagobah_id}) |
java | public static String data(byte[] data, int offset, int length) {
try {
return stream(new ByteArrayInputStream(data, offset, length), length);
} catch (IOException e) {
throw new AssertionError(e);
}
} |
java | private void setParam(ElementEnum elementEnumParam, String objectParam, LiteralOption selector)
{
this.elementEnumParam = elementEnumParam;
this.objectParam = objectParam;
this.selector = selector;
} |
java | private static void decodeEdifactSegment(BitSource bits, StringBuilder result) {
do {
// If there is only two or less bytes left then it will be encoded as ASCII
if (bits.available() <= 16) {
return;
}
for (int i = 0; i < 4; i++) {
int edifactValue = bits.readBits(6);
// Check for the unlatch character
if (edifactValue == 0x1F) { // 011111
// Read rest of byte, which should be 0, and stop
int bitsLeft = 8 - bits.getBitOffset();
if (bitsLeft != 8) {
bits.readBits(bitsLeft);
}
return;
}
if ((edifactValue & 0x20) == 0) { // no 1 in the leading (6th) bit
edifactValue |= 0x40; // Add a leading 01 to the 6 bit binary value
}
result.append((char) edifactValue);
}
} while (bits.available() > 0);
} |
python | def compile_flags(args):
"""
Build a dictionnary with an entry for cppflags, ldflags, and cxxflags.
These options are filled according to the command line defined options
"""
compiler_options = {
'define_macros': args.defines,
'undef_macros': args.undefs,
'include_dirs': args.include_dirs,
'extra_compile_args': args.extra_flags,
'library_dirs': args.libraries_dir,
'extra_link_args': args.extra_flags,
}
for param in ('opts', ):
val = getattr(args, param, None)
if val:
compiler_options[param] = val
return compiler_options |
java | public static <T> Iterator<T> toUnique(Iterator<T> self, Comparator<T> comparator) {
return new UniqueIterator<T>(self, comparator);
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.