language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python | def by_type(self, identifier_class):
"""
returns all unique instances of a type of identifier
:param identifier_class: the class to match identifier with
:returns: a tuple
"""
myidentifiers = set()
for identifier in self.source_identifiers.values():
if (isinstance(identifier, identifier_class)):
myidentifiers.update([identifier])
return set(myidentifiers) |
python | def data(self):
"""Parameters passed to the API containing the details to create a new
alert.
:return: parameters to create new alert.
:rtype: dict
"""
data = {}
data["name"] = self.name
data["query"] = self.queryd
data["languages"] = self.languages
data["countries"] = self.countries if self.countries else ""
data["sources"] = self.sources if self.sources else ""
data["blocked_sites"] = self.blocked_sites if self.blocked_sites else ""
data["noise_detection"] = self.noise_detection if self.noise_detection else ""
data["reviews_pages"] = self.reviews_pages if self.reviews_pages else ""
# Deletes parameter if it does not have a value
for key, value in list(data.items()):
if value == '':
del data[key]
data = json.dumps(data)
return data |
python | def logv(msg, *args, **kwargs):
"""
Print out a log message, only if verbose mode.
"""
if settings.VERBOSE:
log(msg, *args, **kwargs) |
java | public void marshall(UpdateRdsDbInstanceRequest updateRdsDbInstanceRequest, ProtocolMarshaller protocolMarshaller) {
if (updateRdsDbInstanceRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateRdsDbInstanceRequest.getRdsDbInstanceArn(), RDSDBINSTANCEARN_BINDING);
protocolMarshaller.marshall(updateRdsDbInstanceRequest.getDbUser(), DBUSER_BINDING);
protocolMarshaller.marshall(updateRdsDbInstanceRequest.getDbPassword(), DBPASSWORD_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
python | def get_items_shuffled_metadata(self):
"""Gets the metadata for shuffling items.
return: (osid.Metadata) - metadata for the shuffled flag
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceForm.get_group_metadata_template
metadata = dict(self._mdata['items_shuffled'])
metadata.update({'existing_boolean_values': self._my_map['itemsShuffled']})
return Metadata(**metadata) |
java | public static <T> List<T> transformThriftResult(List<ColumnOrSuperColumn> coscList,
ColumnFamilyType columnFamilyType, ThriftRow row)
{
List result = new ArrayList(coscList.size());
for (ColumnOrSuperColumn cosc : coscList)
{
result.add(transformThriftResult(cosc, columnFamilyType, row));
}
return result;
} |
java | private void decreaseCache(I_CmsLruCacheObject theCacheObject) {
// notify the object that it was now removed from the cache
//theCacheObject.notify();
theCacheObject.removeFromLruCache();
// set the list pointers to null
theCacheObject.setNextLruObject(null);
theCacheObject.setPreviousLruObject(null);
// update the cache stats.
m_objectCosts -= theCacheObject.getLruCacheCosts();
m_objectCount--;
} |
python | def observe_root_state_assignments(self, model, prop_name, info):
""" The method relieves observed root_state models and observes newly assigned root_state models.
"""
if info['old']:
self.relieve_model(info['old'])
if info['new']:
self.observe_model(info['new'])
self.logger.info("Exchange observed old root_state model with newly assigned one. sm_id: {}"
"".format(info['new'].state.parent.state_machine_id)) |
java | public static boolean executeModifyOperation(final String currentDn, final ConnectionFactory connectionFactory, final LdapEntry entry) {
final Map<String, Set<String>> attributes = entry.getAttributes().stream()
.collect(Collectors.toMap(LdapAttribute::getName, ldapAttribute -> new HashSet<>(ldapAttribute.getStringValues())));
return executeModifyOperation(currentDn, connectionFactory, attributes);
} |
python | def read(database, table, key):
"""Does a single read operation."""
with database.snapshot() as snapshot:
result = snapshot.execute_sql('SELECT u.* FROM %s u WHERE u.id="%s"' %
(table, key))
for row in result:
key = row[0]
for i in range(NUM_FIELD):
field = row[i + 1] |
python | def user_parse(data):
"""Parse information from the provider."""
_user = data.get('oauth', {}).get('user', {})
yield 'id', _user.get('id')
yield 'username', _user.get('username')
first_name, _, last_name = _user.get('display_name').partition(' ')
yield 'first_name', first_name
yield 'last_name', last_name |
python | def do_execute(self):
"""
The actual execution of the actor.
:return: None if successful, otherwise error message
:rtype: str
"""
data = self.input.payload
if self.storagehandler is None:
return "No storage handler available!"
sname = str(self.resolve_option("storage_name"))
if sname not in self.storagehandler.storage:
return "No storage item called '" + sname + "' present!"
cls = self.storagehandler.storage[sname]
if isinstance(cls, Classifier):
evl = Evaluation(data)
evl.discard_predictions = bool(self.resolve_option("discard_predictions"))
evl.test_model(
cls,
data,
self.resolve_option("output"))
elif isinstance(cls, Clusterer):
evl = ClusterEvaluation()
evl.set_model(cls)
evl.test_model(data)
else:
return "Unhandled class: " + classes.get_classname(cls)
self._output.append(Token(evl))
return None |
java | public static int getNumCores() {
//Private Class to display only CPU devices in the directory listing
class CpuFilter implements FileFilter {
@Override
public boolean accept(File pathname) {
//Check if filename is "cpu", followed by a single digit number
if(Pattern.matches("cpu[0-9]", pathname.getName())) {
return true;
}
return false;
}
}
try {
//Get directory containing CPU info
File dir = new File("/sys/devices/system/cpu/");
//Filter to only list the devices we care about
File[] files = dir.listFiles(new CpuFilter());
if (files == null)
return Runtime.getRuntime().availableProcessors();
//Return the number of cores (virtual CPU devices)
return files.length;
} catch(Exception e) {
// The number of cores can vary with JVM status
return Runtime.getRuntime().availableProcessors();
}
} |
python | def _upcoming_datetime_from(self):
"""
The datetime this event next starts in the local time zone, or None if
it is finished.
"""
nextDt = self.__localAfter(timezone.localtime(), dt.time.max,
excludeCancellations=True,
excludeExtraInfo=True)
return nextDt |
python | def load_from_docinfo(self, docinfo, delete_missing=False, raise_failure=False):
"""Populate the XMP metadata object with DocumentInfo
Arguments:
docinfo: a DocumentInfo, e.g pdf.docinfo
delete_missing: if the entry is not DocumentInfo, delete the equivalent
from XMP
raise_failure: if True, raise any failure to convert docinfo;
otherwise warn and continue
A few entries in the deprecated DocumentInfo dictionary are considered
approximately equivalent to certain XMP records. This method copies
those entries into the XMP metadata.
"""
for uri, shortkey, docinfo_name, converter in self.DOCINFO_MAPPING:
qname = QName(uri, shortkey)
# docinfo might be a dict or pikepdf.Dictionary, so lookup keys
# by str(Name)
val = docinfo.get(str(docinfo_name))
if val is None:
if delete_missing and qname in self:
del self[qname]
continue
try:
val = str(val)
if converter:
val = converter.xmp_from_docinfo(val)
if not val:
continue
self[qname] = val
except (ValueError, AttributeError) as e:
msg = "The metadata field {} could not be copied to XMP".format(
docinfo_name
)
if raise_failure:
raise ValueError(msg) from e
else:
warn(msg) |
python | def add_grid_data(self, data):
""" Return id
"""
self.grid_data.append(data)
return len(self.grid_data) - 1 |
java | @SuppressWarnings("squid:S1452")
public static <T, K, U> Collector<T, ?, Map<K, U>> toLinkedMap(
Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper) {
return Collectors.toMap(keyMapper, valueMapper, throwingMerger(), LinkedHashMap::new);
} |
java | public Class defineClass(ClassNode classNode, String file, String newCodeBase) {
CodeSource codeSource = null;
try {
codeSource = new CodeSource(new URL("file", "", newCodeBase), (java.security.cert.Certificate[]) null);
} catch (MalformedURLException e) {
//swallow
}
CompilationUnit unit = createCompilationUnit(config, codeSource);
ClassCollector collector = createCollector(unit, classNode.getModule().getContext());
try {
unit.addClassNode(classNode);
unit.setClassgenCallback(collector);
unit.compile(Phases.CLASS_GENERATION);
definePackage(collector.generatedClass.getName());
return collector.generatedClass;
} catch (CompilationFailedException e) {
throw new RuntimeException(e);
}
} |
python | def getObjective(self, name):
"""
Get the objective with the corresponding name.
Args:
name: Name of the objective to be found.
Raises:
TypeError: if the specified objective does not exist.
"""
return lock_and_call(
lambda: Objective(self._impl.getObjective(name)),
self._lock
) |
java | public void parseSignaturesString(String signatures) throws IOException,ParseException {
logger.info("Reading inline API signatures...");
final Set<String> missingClasses = new TreeSet<String>();
parseSignaturesFile(new StringReader(signatures), false, missingClasses);
reportMissingSignatureClasses(missingClasses);
} |
python | def chooser_menu():
""" Master jump off point to ancillary functionality """
title = TITLE + "The" + Colors.ORANGE + " Metal" + Colors.RESET + TITLE + " Menu" + RESET
menu = """
________________________________________________________________
""" + title + """
________________________________________________________________
( """ + TITLE + "a" + RESET + """ ) : Install RKhunter Rootkit Scanner (v1.6)
( """ + TITLE + "b" + RESET + """ ) : Install Chkrootkit Rootkit Scanner (latest)
( """ + TITLE + "c" + RESET + """ ) : Run RKhunter Scan of Local Machine
( """ + TITLE + "d" + RESET + """ ) : Run Chkrootkit Scan of Local Machine
"""
print(menu)
answer: str = input("\t\tEnter Choice [quit]: ") or ''
if answer == 'a':
return rkhunter()
elif answer == 'b':
print('\t\nrun chkrootkit installer\n')
return chkrootkit.main()
elif answer == 'c':
print('\t\nrun rkhunter scan of local machine\n')
elif answer == 'd':
return chkrootkit_exec()
return True |
java | @Override
public void flushBufferStopAtMark() throws IOException {
final int end = pos;
pos = mark;
flushBuffer(true);
out.flush();
startPacket(0);
System.arraycopy(buf, mark, buf, pos, end - mark);
pos += end - mark;
mark = -1;
bufferContainDataAfterMark = true;
} |
python | def nonraw_instance(receiver):
"""
A signal receiver decorator that fetch the complete instance from db when
it's passed as raw
"""
@wraps(receiver)
def wrapper(sender, instance, raw, using, **kwargs):
if raw:
instance = sender._default_manager.using(using).get(pk=instance.pk)
return receiver(sender=sender, raw=raw, instance=instance, using=using,
**kwargs)
return wrapper |
python | def _string_width(string, *, _IS_ASCII=_IS_ASCII):
"""Returns string's width."""
match = _IS_ASCII.match(string)
if match:
return match.endpos
UNICODE_WIDE_CHAR_TYPE = 'WFA'
width = 0
func = unicodedata.east_asian_width
for char in string:
width += 2 if func(char) in UNICODE_WIDE_CHAR_TYPE else 1
return width |
python | def stop(self):
"""Stop the communication with the shield."""
with self.lock:
self._message_received(ConnectionClosed(self._file, self)) |
java | public final Command createIncrDecrCommand(final String key, final byte[] keyBytes,
final long amount, long initial, int exptime, CommandType cmdType, boolean noreply) {
return new TextIncrDecrCommand(key, keyBytes, cmdType, new CountDownLatch(1), amount, initial,
noreply);
} |
python | def assignmentCommand(FrequencyList_presence=0,
CellChannelDescription_presence=0,
CellChannelDescription_presence1=0,
MultislotAllocation_presence=0,
ChannelMode_presence=0, ChannelMode_presence1=0,
ChannelMode_presence2=0, ChannelMode_presence3=0,
ChannelMode_presence4=0, ChannelMode_presence5=0,
ChannelMode_presence6=0, ChannelMode_presence7=0,
ChannelDescription=0, ChannelMode2_presence=0,
MobileAllocation_presence=0, StartingTime_presence=0,
FrequencyList_presence1=0,
ChannelDescription2_presence=0,
ChannelDescription_presence=0,
FrequencyChannelSequence_presence=0,
MobileAllocation_presence1=0,
CipherModeSetting_presence=0,
VgcsTargetModeIdentication_presence=0,
MultiRateConfiguration_presence=0):
"""ASSIGNMENT COMMAND Section 9.1.2"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x2e) # 101110
c = ChannelDescription2()
d = PowerCommand()
packet = a / b / c / d
if FrequencyList_presence is 1:
e = FrequencyListHdr(ieiFL=0x05, eightBitFL=0x0)
packet = packet / e
if CellChannelDescription_presence is 1:
f = CellChannelDescriptionHdr(ieiCCD=0x62, eightBitCCD=0x0)
packet = packet / f
if MultislotAllocation_presence is 1:
g = MultislotAllocationHdr(ieiMSA=0x10, eightBitMSA=0x0)
packet = packet / g
if ChannelMode_presence is 1:
h = ChannelModeHdr(ieiCM=0x63, eightBitCM=0x0)
packet = packet / h
if ChannelMode_presence1 is 1:
i = ChannelModeHdr(ieiCM=0x11, eightBitCM=0x0)
packet = packet / i
if ChannelMode_presence2 is 1:
j = ChannelModeHdr(ieiCM=0x13, eightBitCM=0x0)
packet = packet / j
if ChannelMode_presence3 is 1:
k = ChannelModeHdr(ieiCM=0x14, eightBitCM=0x0)
packet = packet / k
if ChannelMode_presence4 is 1:
l = ChannelModeHdr(ieiCM=0x15, eightBitCM=0x0)
packet = packet / l
if ChannelMode_presence5 is 1:
m = ChannelModeHdr(ieiCM=0x16, eightBitCM=0x0)
packet = packet / m
if ChannelMode_presence6 is 1:
n = ChannelModeHdr(ieiCM=0x17, eightBitCM=0x0)
packet = packet / n
if ChannelMode_presence7 is 1:
o = ChannelModeHdr(ieiCM=0x18, eightBitCM=0x0)
packet = packet / o
if ChannelDescription_presence is 1:
p = ChannelDescriptionHdr(ieiCD=0x64, eightBitCD=0x0)
packet = packet / p
if ChannelMode2_presence is 1:
q = ChannelMode2Hdr(ieiCM2=0x66, eightBitCM2=0x0)
packet = packet / q
if MobileAllocation_presence is 1:
r = MobileAllocationHdr(ieiMA=0x72, eightBitMA=0x0)
packet = packet / r
if StartingTime_presence is 1:
s = StartingTimeHdr(ieiST=0x7C, eightBitST=0x0)
packet = packet / s
if FrequencyList_presence1 is 1:
t = FrequencyListHdr(ieiFL=0x19, eightBitFL=0x0)
packet = packet / t
if ChannelDescription2_presence is 1:
u = ChannelDescription2Hdr(ieiCD2=0x1C, eightBitCD2=0x0)
packet = packet / u
if ChannelDescription_presence is 1:
v = ChannelDescriptionHdr(ieiCD=0x1D, eightBitCD=0x0)
packet = packet / v
if FrequencyChannelSequence_presence is 1:
w = FrequencyChannelSequenceHdr(ieiFCS=0x1E, eightBitFCS=0x0)
packet = packet / w
if MobileAllocation_presence1 is 1:
x = MobileAllocationHdr(ieiMA=0x21, eightBitMA=0x0)
packet = packet / x
if CipherModeSetting_presence is 1:
y = CipherModeSettingHdr(ieiCMS=0x9, eightBitCMS=0x0)
packet = packet / y
if VgcsTargetModeIdentication_presence is 1:
z = VgcsTargetModeIdenticationHdr(ieiVTMI=0x01, eightBitVTMI=0x0)
packet = packet / z
if MultiRateConfiguration_presence is 1:
aa = MultiRateConfigurationHdr(ieiMRC=0x03, eightBitMRC=0x0)
packet = packet / aa
return packet |
java | private static Locale getLocaleForRequest(HttpServletRequest req) {
CmsAcceptLanguageHeaderParser parser = new CmsAcceptLanguageHeaderParser(
req,
OpenCms.getWorkplaceManager().getDefaultLocale());
List<Locale> acceptedLocales = parser.getAcceptedLocales();
List<Locale> workplaceLocales = OpenCms.getWorkplaceManager().getLocales();
Locale locale = OpenCms.getLocaleManager().getFirstMatchingLocale(acceptedLocales, workplaceLocales);
if (locale == null) {
// no match found - use OpenCms default locale
locale = OpenCms.getWorkplaceManager().getDefaultLocale();
}
return locale;
} |
python | def clean_directories(builddir, in_dir=True, out_dir=True):
"""Remove the in and out of the container if confirmed by the user."""
container_in = local.path(builddir) / "container-in"
container_out = local.path(builddir) / "container-out"
if in_dir and container_in.exists():
if ui.ask("Should I delete '{0}'?".format(container_in)):
container_in.delete()
if out_dir and container_out.exists():
if ui.ask("Should I delete '{0}'?".format(container_out)):
container_out.delete() |
java | public static long parseLong (@Nullable final Object aObject, @Nonnegative final int nRadix, final long nDefault)
{
if (aObject == null)
return nDefault;
if (aObject instanceof Number)
return ((Number) aObject).longValue ();
return parseLong (aObject.toString (), nRadix, nDefault);
} |
java | public static void loadFrom(Reader reader, Proto target) throws Exception
{
loadFrom(new ANTLRReaderStream(reader), target);
} |
java | public static CommerceCountry findByG_N(long groupId, int numericISOCode)
throws com.liferay.commerce.exception.NoSuchCountryException {
return getPersistence().findByG_N(groupId, numericISOCode);
} |
python | def itervalues(obj):
"Get value iterator from dictionary for Python 2 and 3"
return iter(obj.values()) if sys.version_info.major == 3 else obj.itervalues() |
java | public void addInvolvedExecution(ExecutionEntity executionEntity) {
if (executionEntity.getId() != null) {
involvedExecutions.put(executionEntity.getId(),
executionEntity);
}
} |
python | def get_starting_chunk(filename, length=1024):
"""
:param filename: File to open and get the first little chunk of.
:param length: Number of bytes to read, default 1024.
:returns: Starting chunk of bytes.
"""
# Ensure we open the file in binary mode
try:
with open(filename, 'rb') as f:
chunk = f.read(length)
return chunk
except IOError as e:
print(e) |
java | @Override
public String getMassPrintURL(String CorpNum, MgtKeyType KeyType,
String[] MgtKeyList, String UserID) throws PopbillException {
if (KeyType == null)
throw new PopbillException(-99999999, "๊ด๋ฆฌ๋ฒํธํํ๊ฐ ์
๋ ฅ๋์ง ์์์ต๋๋ค.");
if (MgtKeyList == null || MgtKeyList.length == 0)
throw new PopbillException(-99999999, "๊ด๋ฆฌ๋ฒํธ ๋ชฉ๋ก์ด ์
๋ ฅ๋์ง ์์์ต๋๋ค.");
String PostData = toJsonString(MgtKeyList);
URLResponse response = httppost("/Taxinvoice/" + KeyType.name()
+ "?Print", CorpNum, PostData, UserID, URLResponse.class);
return response.url;
} |
python | def get_file(self, file_id):
"""
Use this method to get basic info about a file and prepare it for downloading. For the moment, bots can download files of up to 20MB in size. On success, a File object is returned. The file can then be downloaded via the link https://api.telegram.org/file/bot<token>/<file_path>, where <file_path> is taken from the response. It is guaranteed that the link will be valid for at least 1 hour. When the link expires, a new one can be requested by calling getFile again.
Note: This function may not preserve the original file name and MIME type. You should save the file's MIME type and name (if available) when the File object is received.
https://core.telegram.org/bots/api#getfile
Parameters:
:param file_id: File identifier to get info about
:type file_id: str|unicode
Returns:
:return: On success, a File object is returned
:rtype: pytgbot.api_types.receivable.media.File
"""
assert_type_or_raise(file_id, unicode_type, parameter_name="file_id")
result = self.do("getFile", file_id=file_id)
if self.return_python_objects:
logger.debug("Trying to parse {data}".format(data=repr(result)))
from pytgbot.api_types.receivable.media import File
try:
return File.from_array(result)
except TgApiParseException:
logger.debug("Failed parsing as api_type File", exc_info=True)
# end try
# no valid parsing so far
raise TgApiParseException("Could not parse result.") # See debug log for details!
# end if return_python_objects
return result |
python | def load_config(self, conf):
"""
Load configurations from an rc file
Parameters
----------
rc: str
path to the rc file
Returns
-------
None
"""
section = self.__class__.__name__
if section not in conf.sections():
logger.debug('Config section {} not in rc file'.format(
self.__class__.__name__))
return
for key in conf[section].keys():
if not hasattr(self, key):
logger.debug('Config key {}.{} skipped'.format(section, key))
continue
val = conf[section].get(key)
try:
val = conf[section].getfloat(key)
except ValueError:
try:
val = conf[section].getboolean(key)
except ValueError:
pass
self.__dict__.update({key: val})
self.check() |
java | public Content getNavLinkNext() {
Content li;
if (nextProfile == null) {
li = HtmlTree.LI(nextprofileLabel);
} else {
li = HtmlTree.LI(getHyperLink(pathToRoot.resolve(DocPaths.profileSummary(
nextProfile.name)), nextprofileLabel, "", ""));
}
return li;
} |
python | def _opening_bracket_index(self, text, bpair=('(', ')')):
"""Return the index of the opening bracket that matches the closing bracket at the end of the text."""
level = 1
for i, char in enumerate(reversed(text[:-1])):
if char == bpair[1]:
level += 1
elif char == bpair[0]:
level -= 1
if level == 0:
return len(text) - i - 2 |
python | def parse_bytes_str(value):
"""
Given a value return the integer number of bytes it represents.
Trailing "MB" causes the value multiplied by 1024*1024
:param value:
:return: int number of bytes represented by value.
"""
if type(value) == str:
if "MB" in value:
return int(value.replace("MB", "")) * MB_TO_BYTES
else:
return int(value)
else:
return value |
java | @Override
public ReadJobResult readJob(ReadJobRequest request) {
request = beforeClientExecution(request);
return executeReadJob(request);
} |
java | public BackupChainLog[] getBackupsLogs()
{
File[] cfs = PrivilegedFileHelper.listFiles(logsDirectory, new BackupLogsFilter());
List<BackupChainLog> logs = new ArrayList<BackupChainLog>();
for (int i = 0; i < cfs.length; i++)
{
File cf = cfs[i];
try
{
if (!isCurrentBackup(cf))
{
logs.add(new BackupChainLog(cf));
}
}
catch (BackupOperationException e)
{
LOG.warn("Log file " + PrivilegedFileHelper.getAbsolutePath(cf) + " is bussy or corrupted. Skipped. " + e,
e);
}
}
BackupChainLog[] ls = new BackupChainLog[logs.size()];
logs.toArray(ls);
return ls;
} |
java | public void send(byte[] buf) {
if (log.isTraceEnabled()) {
log.trace("send binary: {}", Arrays.toString(buf));
}
// send the incoming bytes
send(Packet.build(buf));
} |
java | private void addDelegateFields() {
visitField(ACC_PRIVATE + ACC_FINAL, CLOSURES_MAP_FIELD, "Ljava/util/Map;", null, null);
if (generateDelegateField) {
visitField(ACC_PRIVATE + ACC_FINAL, DELEGATE_OBJECT_FIELD, BytecodeHelper.getTypeDescription(delegateClass), null, null);
}
} |
java | public void setUnknownProperties(Map<String, Object> unknownProperties) {
this.unknownProperties = Collections.unmodifiableMap(
new HashMap<>(unknownProperties));
} |
java | static Object decode(String s) {
byte[] bytes = Base64.base64ToByteArray(s);
ObjectInputStream objectInputStream;
try {
objectInputStream = new ObjectInputStream(
new ByteArrayInputStream(bytes));
return objectInputStream.readObject();
} catch (IOException | ClassNotFoundException e) {
log.error(e);
return null;
}
} |
python | def set_bottom_margin(self, bottom_margin):
"""
Set the bottom margin of the menu. This will determine the number of console lines appear between the
bottom of the menu border and the menu input prompt.
:param bottom_margin: an integer value
"""
self.__footer.style.margins.bottom = bottom_margin
return self |
java | public List<Affiliation> getAffiliations() throws NoResponseException, XMPPErrorException,
NotConnectedException, InterruptedException {
return getAffiliations(null, null);
} |
python | def remote_url(connector, env, repo, filename):
"""
return a str containing a link to the rpm in the pulp repository
"""
dl_base = connector.base_url.replace('/pulp/api/v2', '/pulp/repos')
repoid = '%s-%s' % (repo, env)
_r = connector.get('/repositories/%s/' % repoid)
if not _r.status_code == Constants.PULP_GET_OK:
# maybe the repo name is the repoid
_r = connector.get('/repositories/%s/' % repo)
if not _r.status_code == Constants.PULP_GET_OK:
raise JuicerPulpError("%s was not found as a repoid. Status code %s returned by pulp" % \
(repoid, _r.status_code))
repo = juicer.utils.load_json_str(_r.content)['display_name']
link = '%s/%s/%s/%s' % (dl_base, env, repo, filename)
return link |
python | def fit(pointlist):
'''
Parameters
pointlist
[[x0,y0], [x1,y1], [x2,y2], ...]
Points [None, None] are allowed but ignored.
'''
# Separate x and y and clear Nones
is_num = lambda x: isinstance(x, Number)
pick_x = lambda x: x[0]
pick_y = lambda x: x[1]
xs = list(filter(is_num, map(pick_x, pointlist)))
ys = list(filter(is_num, map(pick_y, pointlist)))
# Note biased mean and variance
mean_x = sum(xs) / float(len(xs))
mean_y = sum(xs) / float(len(xs))
var_x = variance(xs, mean_x)
var_y = variance(ys, mean_y)
# Combine variances:
# variance_x = (dx0^2 + dx1^2 + ...) / n
# Therefore
# variance_xy = ((dx0^2 + dy0^2) + (dx1^2 + dy1^2) + ...) / n
# = variance_x + variance_y
var_xy = var_x + var_y
return {
'centroid': [mean_x, mean_y],
'mean_squared_error': var_xy
} |
python | def plot_to_svg(plot, width, height, unit=''):
"""Converts a plot (list of layers) into an SVG document.
Args:
plot (list): list of layers that make up the plot
width (float): the width of the resulting image
height (float): the height of the resulting image
unit (str): the units of the resulting image if not pixels
Returns:
str: A stringified XML document representing the image
"""
flipped_plot = [(x, -y) for x, y in plot]
aspect_ratio = height / width
view_box = calculate_view_box(flipped_plot, aspect_ratio=aspect_ratio)
view_box_str = '{} {} {} {}'.format(*view_box)
stroke_thickness = STROKE_THICKNESS * (view_box[2])
svg = ET.Element('svg', attrib={
'xmlns': 'http://www.w3.org/2000/svg',
'xmlns:inkscape': 'http://www.inkscape.org/namespaces/inkscape',
'width': '{}{}'.format(width, unit),
'height': '{}{}'.format(height, unit),
'viewBox': view_box_str})
for i, layer in enumerate(flipped_plot):
group = ET.SubElement(svg, 'g', attrib={
'inkscape:label': '{}-layer'.format(i),
'inkscape:groupmode': 'layer',
})
color = PLOT_COLORS[i % len(PLOT_COLORS)]
ET.SubElement(group, 'path', attrib={
'style': 'stroke-width: {}; stroke: {};'.format(stroke_thickness, color),
'fill': 'none',
'd': layer_to_path(layer)
})
try:
return ET.tostring(svg, encoding='unicode')
except LookupError:
# Python 2.x
return ET.tostring(svg) |
python | def _get_nets_radb(self, *args, **kwargs):
"""
Deprecated. This will be removed in a future release.
"""
from warnings import warn
warn('ASNOrigin._get_nets_radb() has been deprecated and will be '
'removed. You should now use ASNOrigin.get_nets_radb().')
return self.get_nets_radb(*args, **kwargs) |
java | protected InputStream getTemplateAsStream(TestContext context) {
Resource resource;
if (templateResource != null) {
resource = templateResource;
} else {
resource = FileUtils.getFileResource(template, context);
}
String templateYml;
try {
templateYml = context.replaceDynamicContentInString(FileUtils.readToString(resource));
} catch (IOException e) {
throw new CitrusRuntimeException("Failed to read template resource", e);
}
return new ByteArrayInputStream(templateYml.getBytes());
} |
java | public CmsProject readProject(CmsDbContext dbc, String name) throws CmsException {
CmsProject project = null;
project = m_monitor.getCachedProject(name);
if (project == null) {
project = getProjectDriver(dbc).readProject(dbc, name);
m_monitor.cacheProject(project);
}
return project;
} |
python | def movie(args):
"""
%prog movie input.bed scaffolds.fasta chr1
Visualize history of scaffold OO. The history is contained within the
tourfile, generated by path(). For each historical scaffold OO, the program
plots a separate PDF file. The plots can be combined to show the progression
as a little animation. The third argument limits the plotting to a
specific pseudomolecule, for example `chr1`.
"""
p = OptionParser(movie.__doc__)
p.add_option("--gapsize", default=100, type="int",
help="Insert gaps of size between scaffolds")
add_allmaps_plot_options(p)
opts, args = p.parse_args(args)
if len(args) != 3:
sys.exit(not p.print_help())
inputbed, scaffoldsfasta, seqid = args
gapsize = opts.gapsize
pf = inputbed.rsplit(".", 1)[0]
agpfile = pf + ".chr.agp"
tourfile = pf + ".tour"
fp = open(tourfile)
sizes = Sizes(scaffoldsfasta).mapping
ffmpeg = "ffmpeg"
mkdir(ffmpeg)
score = cur_score = None
i = 1
for header, block in read_block(fp, ">"):
s, tag, label = header[1:].split()
if s != seqid:
continue
tour = block[0].split()
tour = [(x[:-1], x[-1]) for x in tour]
if label.startswith("GA"):
cur_score = label.split("-")[-1]
if cur_score == score:
i += 1
continue
score = cur_score
image_name = ".".join((seqid, "{0:04d}".format(i), label, "pdf"))
if need_update(tourfile, image_name):
fwagp = must_open(agpfile, "w")
order_to_agp(seqid, tour, sizes, fwagp, gapsize=gapsize,
gaptype="map")
fwagp.close()
logging.debug("{0} written to `{1}`".format(header, agpfile))
build([inputbed, scaffoldsfasta, "--cleanup"])
pdf_name = plot([inputbed, seqid, "--title={0}".format(label)])
sh("mv {0} {1}".format(pdf_name, image_name))
if label in ("INIT", "FLIP", "TSP", "FINAL"):
for j in xrange(5): # Delay for 5 frames
image_delay = image_name.rsplit(".", 1)[0] + \
".d{0}.pdf".format(j)
sh("cp {0} {1}/{2}".format(image_name, ffmpeg, image_delay))
else:
sh("cp {0} {1}/".format(image_name, ffmpeg))
i += 1
make_movie(ffmpeg, pf) |
python | def run():
"""Run custom scalar demo and generate event files."""
step = tf.compat.v1.placeholder(tf.float32, shape=[])
with tf.name_scope('loss'):
# Specify 2 different loss values, each tagged differently.
summary_lib.scalar('foo', tf.pow(0.9, step))
summary_lib.scalar('bar', tf.pow(0.85, step + 2))
# Log metric baz as well as upper and lower bounds for a margin chart.
middle_baz_value = step + 4 * tf.random.uniform([]) - 2
summary_lib.scalar('baz', middle_baz_value)
summary_lib.scalar('baz_lower',
middle_baz_value - 6.42 - tf.random.uniform([]))
summary_lib.scalar('baz_upper',
middle_baz_value + 6.42 + tf.random.uniform([]))
with tf.name_scope('trigFunctions'):
summary_lib.scalar('cosine', tf.cos(step))
summary_lib.scalar('sine', tf.sin(step))
summary_lib.scalar('tangent', tf.tan(step))
merged_summary = tf.compat.v1.summary.merge_all()
with tf.compat.v1.Session() as sess, tf.summary.FileWriter(LOGDIR) as writer:
# We only need to specify the layout once (instead of per step).
layout_summary = summary_lib.custom_scalar_pb(
layout_pb2.Layout(category=[
layout_pb2.Category(
title='losses',
chart=[
layout_pb2.Chart(
title='losses',
multiline=layout_pb2.MultilineChartContent(
tag=[r'loss(?!.*margin.*)'],)),
layout_pb2.Chart(
title='baz',
margin=layout_pb2.MarginChartContent(
series=[
layout_pb2.MarginChartContent.Series(
value='loss/baz/scalar_summary',
lower='loss/baz_lower/scalar_summary',
upper='loss/baz_upper/scalar_summary'
),
],)),
]),
layout_pb2.Category(
title='trig functions',
chart=[
layout_pb2.Chart(
title='wave trig functions',
multiline=layout_pb2.MultilineChartContent(
tag=[
r'trigFunctions/cosine', r'trigFunctions/sine'
],)),
# The range of tangent is different. Give it its own chart.
layout_pb2.Chart(
title='tan',
multiline=layout_pb2.MultilineChartContent(
tag=[r'trigFunctions/tangent'],)),
],
# This category we care less about. Make it initially closed.
closed=True),
]))
writer.add_summary(layout_summary)
for i in xrange(42):
summary = sess.run(merged_summary, feed_dict={step: i})
writer.add_summary(summary, global_step=i) |
python | def has_ssd(self):
"""Return true if any of the drive under ArrayControllers is ssd"""
for member in self.get_members():
if member.physical_drives.has_ssd:
return True
return False |
python | def list_streams(self, file_type, start=0, limit=100,
filter_path=None, **kwargs):
"""ไปฅ่ง้ขใ้ณ้ขใๅพ็ๅๆๆกฃๅ็ง็ฑปๅ็่งๅพ่ทๅๆๅๅปบๅบ็จ็จๅบไธ็
ๆไปถๅ่กจ.
:param file_type: ็ฑปๅๅไธบvideoใaudioใimageๅdocๅ็งใ
:param start: ่ฟๅๆก็ฎๆงๅถ่ตทๅงๅผ๏ผ็ผบ็ๅผไธบ0ใ
:param limit: ่ฟๅๆก็ฎๆงๅถ้ฟๅบฆ๏ผ็ผบ็ไธบ1000๏ผๅฏ้
็ฝฎใ
:param filter_path: ้่ฆ่ฟๆปค็ๅ็ผ่ทฏๅพ๏ผๅฆ๏ผ/apps/album
.. warning::
* ่ทฏๅพ้ฟๅบฆ้ๅถไธบ1000๏ผ
* ๅพไธญไธ่ฝๅ
ๅซไปฅไธๅญ็ฌฆ๏ผ``\\\\ ? | " > < : *``๏ผ
* ๆไปถๅๆ่ทฏๅพๅๅผๅคด็ปๅฐพไธ่ฝๆฏ ``.``
ๆ็ฉบ็ฝๅญ็ฌฆ๏ผ็ฉบ็ฝๅญ็ฌฆๅ
ๆฌ๏ผ
``\\r, \\n, \\t, ็ฉบๆ ผ, \\0, \\x0B`` ใ
:return: Response ๅฏน่ฑก
"""
params = {
'type': file_type,
'start': start,
'limit': limit,
'filter_path': filter_path,
}
return self._request('stream', 'list', extra_params=params,
**kwargs) |
java | @Inject
public void preStart() {
// We have to do this before start, since some components may start before the actual cache and they
// have to have access to the default metadata on some operations
defaultMetadata = new EmbeddedMetadata.Builder()
.lifespan(config.expiration().lifespan()).maxIdle(config.expiration().maxIdle()).build();
transactional = config.transaction().transactionMode().isTransactional();
batchingEnabled = config.invocationBatching().enabled();
} |
python | def list_relations(self):
''' list every relation in the database as (src, relation, dst) '''
_ = self._execute('select * from relations').fetchall()
for i in _:
#print(i)
src, name, dst = i
src = self.deserialize(
next(self._execute('select code from objects where id=?',(src,)))[0]
)
dst = self.deserialize(
next(self._execute('select code from objects where id=?',(dst,)))[0]
)
yield src, name, dst |
python | def ctrl_c_handler(self, signum, frame):
self.ctrl_c_pressed = True
if self._cnf.dirty_playlist:
""" Try to auto save playlist on exit
Do not check result!!! """
self.saveCurrentPlaylist()
""" Try to auto save config on exit
Do not check result!!! """
self._cnf.save_config() |
python | def _GetNumberOfSeconds(self, fat_date_time):
"""Retrieves the number of seconds from a FAT date time.
Args:
fat_date_time (int): FAT date time.
Returns:
int: number of seconds since January 1, 1980 00:00:00.
Raises:
ValueError: if the month, day of month, hours, minutes or seconds
value is out of bounds.
"""
day_of_month = (fat_date_time & 0x1f)
month = ((fat_date_time >> 5) & 0x0f)
year = (fat_date_time >> 9) & 0x7f
days_per_month = self._GetDaysPerMonth(year, month)
if day_of_month < 1 or day_of_month > days_per_month:
raise ValueError('Day of month value out of bounds.')
number_of_days = self._GetDayOfYear(1980 + year, month, day_of_month)
number_of_days -= 1
for past_year in range(0, year):
number_of_days += self._GetNumberOfDaysInYear(past_year)
fat_date_time >>= 16
seconds = (fat_date_time & 0x1f) * 2
minutes = (fat_date_time >> 5) & 0x3f
hours = (fat_date_time >> 11) & 0x1f
if hours not in range(0, 24):
raise ValueError('Hours value out of bounds.')
if minutes not in range(0, 60):
raise ValueError('Minutes value out of bounds.')
if seconds not in range(0, 60):
raise ValueError('Seconds value out of bounds.')
number_of_seconds = (((hours * 60) + minutes) * 60) + seconds
number_of_seconds += number_of_days * definitions.SECONDS_PER_DAY
return number_of_seconds |
java | void enableOverviewMode(boolean enabled) {
boolean isOffline = !A_CmsUI.getCmsObject().getRequestContext().getCurrentProject().isOnlineProject();
m_publishButton.setVisible(!enabled);
m_publishButton.setEnabled(isOffline);
m_infoButton.setVisible(!enabled);
m_tableFilter.setVisible(enabled);
m_textSearch.setVisible(!enabled);
m_editCurrentButton.setVisible(!enabled);
m_editCurrentButton.setEnabled(isOffline);
m_toggleSeriesButton.setVisible(m_hasSeriesType && !enabled);
m_resultSorter.setVisible(!enabled);
m_localeSelect.setVisible(!enabled);
m_isOverView = enabled;
m_rootLayout.setMainContent(enabled ? m_overviewTable : m_resultLayout);
m_createNewButton.setVisible(enabled);
if (enabled) {
if (!isOffline) {
m_createNewButton.setEnabled(false);
m_createNewButton.setDescription(
CmsVaadinUtils.getMessageText(Messages.GUI_LISTMANAGER_NOT_CREATABLE_ONLINE_0));
} else {
CmsObject cms = A_CmsUI.getCmsObject();
CmsADEConfigData data = OpenCms.getADEManager().lookupConfiguration(
cms,
cms.getRequestContext().getSiteRoot());
CmsResourceTypeConfig typeConfig = data.getResourceType(RES_TYPE_LIST_CONFIG);
try {
if ((typeConfig == null)
|| !typeConfig.checkCreatable(cms, cms.getRequestContext().getSiteRoot())) {
m_createNewButton.setEnabled(false);
m_createNewButton.setDescription(
CmsVaadinUtils.getMessageText(Messages.GUI_LISTMANAGER_NOT_CREATABLE_TYPE_0));
} else {
m_createNewButton.setEnabled(true);
m_createNewButton.setDescription(
CmsVaadinUtils.getMessageText(Messages.GUI_LISTMANAGER_CREATE_NEW_0));
}
} catch (CmsException e) {
m_createNewButton.setEnabled(false);
m_createNewButton.setDescription(
CmsVaadinUtils.getMessageText(Messages.GUI_LISTMANAGER_NOT_CREATABLE_TYPE_0));
}
}
}
} |
java | public static Page parsePageFromSource(final SecurityContext securityContext, final String source, final String name) throws FrameworkException {
return parsePageFromSource(securityContext, source, name, false);
} |
java | @EnsuresNonNull("compiledScript")
private JavaScriptAggregator.ScriptAggregator getCompiledScript()
{
// JavaScript configuration should be checked when it's actually used because someone might still want Druid
// nodes to be able to deserialize JavaScript-based objects even though JavaScript is disabled.
Preconditions.checkState(config.isEnabled(), "JavaScript is disabled");
JavaScriptAggregator.ScriptAggregator syncedCompiledScript = compiledScript;
if (syncedCompiledScript == null) {
synchronized (config) {
syncedCompiledScript = compiledScript;
if (syncedCompiledScript == null) {
syncedCompiledScript = compileScript(fnAggregate, fnReset, fnCombine);
compiledScript = syncedCompiledScript;
}
}
}
return syncedCompiledScript;
} |
java | public static void copy(File from, Charset charset, Appendable to) throws IOException {
asCharSource(from, charset).copyTo(to);
} |
java | public void bind(final ValidationObject validationObject, final ProxyField proxyField, final FieldReference fieldReference, final ValidationObject owner)
{
final RuleProxyField ruleProxyField = getRuleProxyField(proxyField);
for (Rule rule: fieldReference.getListeners())
{
RuleContext ruleContext;
try {
ruleContext = getRuleContext(rule,validationObject,(owner==null)?validationObject:owner);
} catch (FailsToMatchException e) {
continue;
}
ruleProxyField.addInputRule(ruleContext);
}
for (Rule rule: fieldReference.getOutputs())
{
RuleContext ruleContext;
try {
ruleContext = getRuleContext(rule,validationObject,(owner==null)?validationObject:owner);
} catch (FailsToMatchException e) {
continue;
}
ruleProxyField.addOutputRule(ruleContext);
}
} |
java | public void setSupplementalImps(java.util.Collection<String> supplementalImps) {
if (supplementalImps == null) {
this.supplementalImps = null;
return;
}
this.supplementalImps = new java.util.ArrayList<String>(supplementalImps);
} |
java | public static void stringToFile(FileSystem fs, Path path, String string)
throws IOException {
OutputStream os = fs.create(path, true);
PrintWriter pw = new PrintWriter(os);
pw.append(string);
pw.close();
} |
python | def draw_edges(self):
"""
Renders edges to the figure.
"""
for i, (start, end) in enumerate(self.graph.edges()):
start_theta = node_theta(self.nodes, start)
end_theta = node_theta(self.nodes, end)
verts = [
get_cartesian(self.plot_radius, start_theta),
(0, 0),
get_cartesian(self.plot_radius, end_theta),
]
color = self.edge_colors[i]
codes = [Path.MOVETO, Path.CURVE3, Path.CURVE3]
lw = self.edge_widths[i]
path = Path(verts, codes)
patch = patches.PathPatch(
path, lw=lw, edgecolor=color, zorder=1, **self.edgeprops
)
self.ax.add_patch(patch) |
java | private void addNamedNativeQueryMetadata(Class clazz)
{
ApplicationMetadata appMetadata = kunderaMetadata.getApplicationMetadata();
String name, query = null;
if (clazz.isAnnotationPresent(NamedQuery.class))
{
NamedQuery ann = (NamedQuery) clazz.getAnnotation(NamedQuery.class);
appMetadata.addQueryToCollection(ann.name(), ann.query(), false, clazz);
}
if (clazz.isAnnotationPresent(NamedQueries.class))
{
NamedQueries ann = (NamedQueries) clazz.getAnnotation(NamedQueries.class);
NamedQuery[] anns = ann.value();
for (NamedQuery a : anns)
{
appMetadata.addQueryToCollection(a.name(), a.query(), false, clazz);
}
}
if (clazz.isAnnotationPresent(NamedNativeQuery.class))
{
NamedNativeQuery ann = (NamedNativeQuery) clazz.getAnnotation(NamedNativeQuery.class);
appMetadata.addQueryToCollection(ann.name(), ann.query(), true, clazz);
}
if (clazz.isAnnotationPresent(NamedNativeQueries.class))
{
NamedNativeQueries ann = (NamedNativeQueries) clazz.getAnnotation(NamedNativeQueries.class);
NamedNativeQuery[] anns = ann.value();
for (NamedNativeQuery a : anns)
{
appMetadata.addQueryToCollection(a.name(), a.query(), true, clazz);
}
}
} |
java | private void updateExportParams() {
m_exportParams.setExportAccountData(m_includeAccount.getValue().booleanValue());
m_exportParams.setExportAsFiles(m_asFiles.getValue().booleanValue());
m_exportParams.setExportProjectData(m_includeProject.getValue().booleanValue());
m_exportParams.setExportResourceData(m_includeResource.getValue().booleanValue());
m_exportParams.setInProject(m_modified.getValue().booleanValue());
m_exportParams.setIncludeSystemFolder(m_includeSystem.getValue().booleanValue());
m_exportParams.setIncludeUnchangedResources(m_includeUnchanged.getValue().booleanValue());
String exportFileName = OpenCms.getSystemInfo().getAbsoluteRfsPathRelativeToWebInf(
OpenCms.getSystemInfo().getPackagesRfsPath() + File.separator + (String)m_target.getValue());
m_exportParams.setPath(exportFileName);
m_exportParams.setRecursive(m_recursive.getValue().booleanValue());
m_exportParams.setResources(getResources());
if (m_changedSince.getValue() != null) {
m_exportParams.setContentAge(m_changedSince.getDate().getTime());
} else {
m_exportParams.setContentAge(0);
}
} |
java | public T update(T entity) throws RowNotFoundException, OptimisticLockException {
if (!hasPrimaryKey(entity)) {
throw new RuntimeException(String.format("Tried to update entity of type %s without a primary key", entity
.getClass().getSimpleName()));
}
UpdateCreator update = new UpdateCreator(table);
update.whereEquals(idColumn.getColumnName(), getPrimaryKey(entity));
if (versionColumn != null) {
update.set(versionColumn.getColumnName() + " = " + versionColumn.getColumnName() + " + 1");
update.whereEquals(versionColumn.getColumnName(), getVersion(entity));
}
for (Column column : columns) {
if (!column.isReadOnly()) {
update.setValue(column.getColumnName(), getFieldValueAsColumn(entity, column));
}
}
int rows = new JdbcTemplate(ormConfig.getDataSource()).update(update);
if (rows == 1) {
if (versionColumn != null) {
ReflectionUtils.setFieldValue(entity, versionColumn.getFieldName(), getVersion(entity) + 1);
}
return entity;
} else if (rows > 1) {
throw new RuntimeException(
String.format("Updating table %s with id %s updated %d rows. There must be a mapping problem. Is column %s really the primary key?",
table, getPrimaryKey(entity), rows, idColumn));
} else {
//
// Updated zero rows. This could be because our ID is wrong, or
// because our object is out-of date. Let's try querying just by ID.
//
SelectCreator selectById = new SelectCreator()
.column("count(*)")
.from(table)
.whereEquals(idColumn.getColumnName(), getPrimaryKey(entity));
rows = new JdbcTemplate(ormConfig.getDataSource()).query(selectById, new ResultSetExtractor<Integer>() {
@Override
public Integer extractData(ResultSet rs) throws SQLException, DataAccessException {
rs.next();
return rs.getInt(1);
}
});
if (rows == 0) {
throw new RowNotFoundException(table, getPrimaryKey(entity));
} else {
throw new OptimisticLockException(table, getPrimaryKey(entity));
}
}
} |
java | public static DcerpcHandle getHandle ( String url, CIFSContext tc, boolean unshared ) throws MalformedURLException, DcerpcException {
if ( url.startsWith("ncacn_np:") ) {
return new DcerpcPipeHandle(url, tc, unshared);
}
throw new DcerpcException("DCERPC transport not supported: " + url);
} |
python | def _add_row(self, index):
"""
Add a new row to the DataFrame
:param index: index of the new row
:return: nothing
"""
self._index.append(index)
for c, _ in enumerate(self._columns):
self._data[c].append(None) |
java | public static BigFloat acos(BigFloat x) {
return x.isNaN() || (!isRangeAbs1(x)) ? NaN :
x.context.valueOf(BigDecimalMath.acos(x.value, x.context.mathContext));
} |
java | public static boolean isWord(String wordString) {
return Optional.ofNullable(wordPattern)
.orElseGet(() -> wordPattern = Pattern.compile(WordPattern))
.matcher(wordString).matches();
} |
java | @Override
public void initialize(JsMessagingEngine engine) {
if (TraceComponent.isAnyTracingEnabled()
&& tc.isEntryEnabled())
SibTr.entry(tc, "initialize", engine);
this._engine = engine;
//Venu mock mock
//This is needed for MELockOwner in PersistentMessageStoreImpl' initialize. However the MELockOwer object not really relevant
// as uuid check is disabled in PersistentMessageStoreImpl
// In case of warm start, after reconstiturion _engine Messaging Engine Uuid is reset from restored Uuid
this._engine.setMEUUID(new SIBUuid8());
_mpio = new MPIO(this);
_meFactories = new HashMap<String, Object>();
try {
_meFactories.put(SIMPConstants.JS_MBEAN_FACTORY, engine.getMBeanFactory());
} catch (Exception e) {
// FFDC
FFDCFilter.processException(
e,
"com.ibm.ws.sib.processor.impl.MessageProcessor.initialize",
"1:1202:1.445",
this);
SIErrorException finalE =
new SIErrorException(
nls.getFormattedMessage(
"INTERNAL_MESSAGING_ERROR_CWSIP0002",
new Object[] { "com.ibm.ws.sib.processor.impl.MessageProcessor", "1:1209:1.445", e },
null),
e);
SibTr.exception(tc, finalE);
SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0002",
new Object[] { "com.ibm.ws.sib.processor.impl.MessageProcessor", "1:1215:1.445", SIMPUtils.getStackTrace(e) });
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "initialize", finalE);
throw finalE;
}
// Obtain instances of all singleton factories to be used in mp.
// Initialize is the only place we can get the instance and handle
// and exceptions correctly.
if (_factories == null) {
_factories = new HashMap<String, Object>();
try {
_factories.put(SIMPConstants.JS_DESTINATION_ADDRESS_FACTORY,
JsMainAdminComponentImpl.getSIDestinationAddressFactory());
_factories.put(SIMPConstants.SI_DESTINATION_ADDRESS_FACTORY,
JsMainAdminComponentImpl.getSIDestinationAddressFactory());
_factories.put(SIMPConstants.CONTROL_MESSAGE_FACTORY,
ControlMessageFactory.getInstance());
_factories.put(SIMPConstants.JS_ADMIN_FACTORY, JsAdminFactory
.getInstance());
// Matching instance
_factories.put(SIMPConstants.MATCHING_INSTANCE, Matching
.getInstance());
// SelectionCriteria
_factories.put(SIMPConstants.JS_SELECTION_CRITERIA_FACTORY,
JsMainAdminComponentImpl.getSelectionCriteriaFactory());
_factories.put(SIMPConstants.JS_MESSAGE_HANDLE_FACTORY,
JsMessageHandleFactory.getInstance());
} catch (Exception e) {
// FFDC
FFDCFilter
.processException(
e,
"com.ibm.ws.sib.processor.impl.MessageProcessor.initialize",
"1:1261:1.445", this);
SIErrorException finalE = new SIErrorException(
nls
.getFormattedMessage(
"INTERNAL_MESSAGING_ERROR_CWSIP0002",
new Object[] {
"com.ibm.ws.sib.processor.impl.MessageProcessor",
"1:1268:1.445", e }, null), e);
SibTr.exception(tc, finalE);
SibTr
.error(
tc,
"INTERNAL_MESSAGING_ERROR_CWSIP0002",
new Object[] {
"com.ibm.ws.sib.processor.impl.MessageProcessor",
"1:1274:1.445",
SIMPUtils.getStackTrace(e) });
if (TraceComponent.isAnyTracingEnabled()
&& tc.isEntryEnabled())
SibTr.exit(tc, "initialize", finalE);
throw finalE;
}
}
// Determine if messages on deleted destinations should be discarded
// _discardMsgsAfterQueueDeletion =
// (_engine.getBus()).getBoolean(CT_SIBus.DISCARDMSGSAFTERQUEUEDELETION_NAME,
// CT_SIBus.DISCARDMSGSAFTERQUEUEDELETION_DEFAULT);
// Venu temp
// this property is not configured in Libery profile and Admin has not
// exposed this...
// so hardcoding to false.
_discardMsgsAfterQueueDeletion = false;
if (TraceComponent.isAnyTracingEnabled()
&& tc.isEntryEnabled())
SibTr.exit(tc, "initialize");
} |
java | public ACallObjectExpStmIR consInstanceCallStm(STypeIR instanceType,
String instanceName, String memberName, SExpIR... args)
{
AIdentifierVarExpIR instance = new AIdentifierVarExpIR();
instance.setName(instanceName);
instance.setType(instanceType.clone());
instance.setIsLocal(true);
ACallObjectExpStmIR call = new ACallObjectExpStmIR();
call.setType(new AVoidTypeIR());
call.setFieldName(memberName);
call.setObj(instance);
for (SExpIR arg : args)
{
call.getArgs().add(arg);
}
return call;
} |
java | public static base_responses unset(nitro_service client, String sitename[], String args[]) throws Exception {
base_responses result = null;
if (sitename != null && sitename.length > 0) {
gslbsite unsetresources[] = new gslbsite[sitename.length];
for (int i=0;i<sitename.length;i++){
unsetresources[i] = new gslbsite();
unsetresources[i].sitename = sitename[i];
}
result = unset_bulk_request(client, unsetresources,args);
}
return result;
} |
python | def is_real_floating_dtype(dtype):
"""Return ``True`` if ``dtype`` is a real floating point type."""
dtype = np.dtype(dtype)
return np.issubsctype(getattr(dtype, 'base', None), np.floating) |
java | @SuppressWarnings("unchecked")
public static <T> ThreadLocalProxy<T> createThreadLocalProxy(Class<T> type) {
ThreadLocalProxy<?> proxy = null;
if (UriInfo.class.isAssignableFrom(type)) {
proxy = new ThreadLocalUriInfo();
} else if (HttpHeaders.class.isAssignableFrom(type)) {
proxy = new ThreadLocalHttpHeaders();
} else if (ProtocolHeaders.class.isAssignableFrom(type)) {
proxy = new ThreadLocalProtocolHeaders();
} else if (SecurityContext.class.isAssignableFrom(type)) {
proxy = new ThreadLocalSecurityContext();
} else if (ContextResolver.class.isAssignableFrom(type)) {
proxy = new ThreadLocalContextResolver<>();
} else if (Request.class.isAssignableFrom(type)) {
proxy = new ThreadLocalRequest();
} else if (Providers.class.isAssignableFrom(type)) {
proxy = new ThreadLocalProviders();
} else if (MessageContext.class.isAssignableFrom(type)) {
proxy = new ThreadLocalMessageContext();
// Liberty Change for CXF Begin
} else if (type.getName().equals(MessageContext.class.getName())) {
MessageContextProxyClassLoader loader = new MessageContextProxyClassLoader(getClassLoader(Proxy.class), getClassLoader(type), getClassLoader(ThreadLocalProxy.class));
proxy = (ThreadLocalProxy<T>) Proxy.newProxyInstance(loader,
new Class[] { type, ThreadLocalProxy.class },
new ProxyInvocationHandler(new ThreadLocalMessageContext()));
// Liberty Change for CXF Begin
}
if (proxy == null && isServletApiContext(type.getName())) {
proxy = createThreadLocalServletApiContext(type.getName());
}
if (proxy == null) {
ClassLoader loader
= proxyClassLoaderCache.getProxyClassLoader(Proxy.class.getClassLoader(),
new Class<?>[]{Proxy.class, ThreadLocalProxy.class, type});
if (!canSeeAllClasses(loader, new Class<?>[]{Proxy.class, ThreadLocalProxy.class, type})) {
// Liberty change start - (LOG to Tr)
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "find a loader from ProxyClassLoader cache,"
+ " but can't see all interfaces");
Tr.debug(tc, "create a new one with parent " + Proxy.class.getClassLoader());
}
//Liberty change end
proxyClassLoaderCache.removeStaleProxyClassLoader(type);
proxyClassLoaderCache.getProxyClassLoader(Proxy.class.getClassLoader(),
new Class<?>[]{Proxy.class, ThreadLocalProxy.class, type});
}
return (ThreadLocalProxy<T>)Proxy.newProxyInstance(loader,
new Class[] {type, ThreadLocalProxy.class },
new ThreadLocalInvocationHandler<T>());
}
return (ThreadLocalProxy<T>) proxy;
} |
java | protected final int getTypedAttribute(int nodeHandle, int attType)
{
int nodeID = makeNodeIdentity(nodeHandle);
if (nodeID == DTM.NULL)
return DTM.NULL;
int type = _type2(nodeID);
if (DTM.ELEMENT_NODE == type)
{
int expType;
while (true)
{
nodeID++;
expType = _exptype2(nodeID);
if (expType != DTM.NULL)
type = m_extendedTypes[expType].getNodeType();
else
return DTM.NULL;
if (type == DTM.ATTRIBUTE_NODE)
{
if (expType == attType) return makeNodeHandle(nodeID);
}
else if (DTM.NAMESPACE_NODE != type)
{
break;
}
}
}
return DTM.NULL;
} |
python | def resolve(self, graph):
"""
Resolve a scoped component, respecting the graph cache.
"""
cached = graph.get(self.scoped_key)
if cached:
return cached
component = self.create(graph)
graph.assign(self.scoped_key, component)
return component |
java | @Trivial
private void performAction(Runnable action, boolean addToQueue) {
ExecutorService exec = executorService.getService();
if (exec == null) {
// If we can't find the executor service, we have to run it in place.
action.run();
} else {
// If we can find the executor service, we'll add the action to the queue.
// If the actionFuture is null (no pending actions) and the configFuture is null (no
// pending configuration updates), we'll submit the actionsRunner to the executor
// service to drain the queue
//
// configFuture is used to avoid bouncing the endpoint multiple times because of a
// single configuration update.
//
// actionFuture is only set to a non-null value by kicking off the executor service here.
// actionsRunner syncs on actionQueue, so we can't add any new actions while we are
// draining the queue. When the queue is empty, actionFuture is explicitly set to null.
//
// Long story short, it prevents us from kicking off multiple executors which could run in
// random order.
if (addToQueue) {
synchronized (actionQueue) {
actionQueue.add(action);
if ((actionFuture == null) && (configFuture == null)) {
actionFuture = exec.submit(actionsRunner);
}
}
} else {
// Schedule immediately
exec.submit(action);
}
}
} |
java | public List<JoinNode> extractSubTrees() {
List<JoinNode> subTrees = new ArrayList<>();
// Extract the first sub-tree starting at the root
subTrees.add(this);
List<JoinNode> leafNodes = new ArrayList<>();
extractSubTree(leafNodes);
// Continue with the leafs
for (JoinNode leaf : leafNodes) {
subTrees.addAll(leaf.extractSubTrees());
}
return subTrees;
} |
java | public static BufferedImage disparity(ImageGray disparity, BufferedImage dst,
int minDisparity, int maxDisparity, int invalidColor) {
if( dst == null )
dst = new BufferedImage(disparity.getWidth(),disparity.getHeight(),BufferedImage.TYPE_INT_RGB);
if (disparity.getDataType().isInteger()) {
return disparity((GrayI) disparity, dst, minDisparity, maxDisparity, invalidColor);
} else if (disparity instanceof GrayF32) {
return disparity((GrayF32) disparity, dst, minDisparity, maxDisparity, invalidColor);
} else {
throw new RuntimeException("Add support");
}
} |
java | public String getTimexValueLB() {
if (Timex3Interval_Type.featOkTst && ((Timex3Interval_Type)jcasType).casFeat_TimexValueLB == null)
jcasType.jcas.throwFeatMissing("TimexValueLB", "de.unihd.dbs.uima.types.heideltime.Timex3Interval");
return jcasType.ll_cas.ll_getStringValue(addr, ((Timex3Interval_Type)jcasType).casFeatCode_TimexValueLB);} |
python | def stop_codon_spliced_offsets(self):
"""
Offsets from start of spliced mRNA transcript
of nucleotides in stop codon.
"""
offsets = [
self.spliced_offset(position)
for position
in self.stop_codon_positions
]
return self._contiguous_offsets(offsets) |
java | public static dnsrecords_stats[] get(nitro_service service) throws Exception{
dnsrecords_stats obj = new dnsrecords_stats();
dnsrecords_stats[] response = (dnsrecords_stats[])obj.stat_resources(service);
return response;
} |
java | public static void isAssignable(Class<?> superType, Class<?> subType, String errorMsgTemplate, Object... params) throws IllegalArgumentException {
notNull(superType, "Type to check against must not be null");
if (subType == null || !superType.isAssignableFrom(subType)) {
throw new IllegalArgumentException(StrUtil.format(errorMsgTemplate, params));
}
} |
python | def modSymbolsFromLabelInfo(labelDescriptor):
"""Returns a set of all modiciation symbols which were used in the
labelDescriptor
:param labelDescriptor: :class:`LabelDescriptor` describes the label setup
of an experiment
:returns: #TODO: docstring
"""
modSymbols = set()
for labelStateEntry in viewvalues(labelDescriptor.labels):
for labelPositionEntry in viewvalues(labelStateEntry['aminoAcidLabels']):
for modSymbol in aux.toList(labelPositionEntry):
if modSymbol != '':
modSymbols.add(modSymbol)
return modSymbols |
java | public GetPresetResponse getPreset(String name) {
GetPresetRequest request = new GetPresetRequest();
request.setName(name);
return getPreset(request);
} |
python | def next_channel_from_routes(
available_routes: List[RouteState],
channelidentifiers_to_channels: ChannelMap,
transfer_amount: PaymentAmount,
) -> Optional[NettingChannelState]:
""" Returns the first channel that can be used to start the transfer.
The routing service can race with local changes, so the recommended routes
must be validated.
"""
for route in available_routes:
channel_identifier = route.channel_identifier
channel_state = channelidentifiers_to_channels.get(channel_identifier)
if not channel_state:
continue
if channel.get_status(channel_state) != CHANNEL_STATE_OPENED:
continue
pending_transfers = channel.get_number_of_pending_transfers(channel_state.our_state)
if pending_transfers >= MAXIMUM_PENDING_TRANSFERS:
continue
distributable = channel.get_distributable(
channel_state.our_state,
channel_state.partner_state,
)
if transfer_amount > distributable:
continue
if channel.is_valid_amount(channel_state.our_state, transfer_amount):
return channel_state
return None |
python | def connect(self):
"""Initialize the database connection."""
self._client = self._create_client()
self._db = getattr(self._client, self._db_name)
self._generic_dao = GenericDAO(self._client, self._db_name) |
java | @Override
public R visitCompilationUnit(CompilationUnitTree node, P p) {
R r = scan(node.getPackage(), p);
r = scanAndReduce(node.getImports(), p, r);
r = scanAndReduce(node.getTypeDecls(), p, r);
return r;
} |
python | def dmtoind(dm, f_min, f_max, nchan0, inttime, it):
"""
Given FDMT state, return indices to slice partial FDMT solution and sump to a given DM
"""
# maxDT = dmtodt(dm) # need to write
if it>0:
correction = dF/2.
else:
correction = 0
shift = []
nchan = nchan0/2**(iteration_num)
for i_F in range(nchan):
f_start = (f_max - f_min)/float(nchan) * (i_F) + f_min
f_end = (f_max - f_min)/float(nchan) *(i_F+1) + f_min
f_middle = (f_end - f_start)/2. + f_start - correction
f_middle_larger = (f_end - f_start)/2 + f_start + correction
dT_middle = int(round(i_dT * (1./f_middle**2 - 1./f_start**2)/(1./f_end**2 - 1./f_start**2)))
dT_middle_larger = int(round(i_dT * (1./f_middle_larger**2 - 1./f_start**2)/(1./f_end**2 - 1./f_start**2)))
shift.append( (-dT_middle_larger, i_F) ) |
java | public final static String getDisplayName(String ID) {
return getDisplayName(ID, ULocale.getDefault(Category.DISPLAY));
} |
java | @Override
public void initialize(EntityExistValidator annotation, EntityManager entityManager, DAOMode mode, DAOValidatorEvaluationTime evaluationTime) {
// Sauvegarde des parametres
this.annotation = annotation;
this.entityManager = entityManager;
this.systemDAOMode = mode;
this.systemEvaluationTime = evaluationTime;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.