language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public static double diffNormF(DMatrixD1 a , DMatrixD1 b )
{
if( a.numRows != b.numRows || a.numCols != b.numCols ) {
throw new IllegalArgumentException("Both matrices must have the same shape.");
}
final int size = a.getNumElements();
DMatrixRMaj diff = new DMatrixRMaj(size,1);
for( int i = 0; i < size; i++ ) {
diff.set(i , b.get(i) - a.get(i));
}
return NormOps_DDRM.normF(diff);
} |
java | public void marshall(RemoveTagsFromCertificateRequest removeTagsFromCertificateRequest, ProtocolMarshaller protocolMarshaller) {
if (removeTagsFromCertificateRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(removeTagsFromCertificateRequest.getCertificateArn(), CERTIFICATEARN_BINDING);
protocolMarshaller.marshall(removeTagsFromCertificateRequest.getTags(), TAGS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
java | public Canvas draw (Drawable image, float x, float y) {
return draw(image, x, y, image.width(), image.height());
} |
java | public AnnotationTypeElementDoc wrap(AnnotationTypeElementDoc source) {
if (source == null || source instanceof Proxy<?> || !(source instanceof AnnotationTypeElementDocImpl)) {
return source;
}
return new AnnotationTypeElementDocWrapper((AnnotationTypeElementDocImpl) source);
} |
java | @SuppressWarnings("unchecked")
@Override
public Boolean execute() {
validateCommand();
User sender = CommandUtil.getSfsUser(user, api);
sender = sender != null ? sender : newFakeUser();
ISFSObject params = createResponseParams();
getRequestHandler().handleClientRequest(sender, params);
return Boolean.TRUE;
} |
python | def find(self, *args, **kwargs):
"""collection find method
"""
wrapper = kwargs.pop('wrapper', False)
if wrapper is True:
return self._wrapper_find(*args, **kwargs)
return self.__collect.find(*args, **kwargs) |
python | def _get_or_create_student_item(student_item_dict):
"""Gets or creates a Student Item that matches the values specified.
Attempts to get the specified Student Item. If it does not exist, the
specified parameters are validated, and a new Student Item is created.
Args:
student_item_dict (dict): The dict containing the student_id, item_id,
course_id, and item_type that uniquely defines a student item.
Returns:
StudentItem: The student item that was retrieved or created.
Raises:
SubmissionInternalError: Thrown if there was an internal error while
attempting to create or retrieve the specified student item.
SubmissionRequestError: Thrown if the given student item parameters fail
validation.
Examples:
>>> student_item_dict = dict(
>>> student_id="Tim",
>>> item_id="item_1",
>>> course_id="course_1",
>>> item_type="type_one"
>>> )
>>> _get_or_create_student_item(student_item_dict)
{'item_id': 'item_1', 'item_type': 'type_one', 'course_id': 'course_1', 'student_id': 'Tim'}
"""
try:
try:
return StudentItem.objects.get(**student_item_dict)
except StudentItem.DoesNotExist:
student_item_serializer = StudentItemSerializer(
data=student_item_dict
)
if not student_item_serializer.is_valid():
logger.error(
u"Invalid StudentItemSerializer: errors:{} data:{}".format(
student_item_serializer.errors,
student_item_dict
)
)
raise SubmissionRequestError(field_errors=student_item_serializer.errors)
return student_item_serializer.save()
except DatabaseError:
error_message = u"An error occurred creating student item: {}".format(
student_item_dict
)
logger.exception(error_message)
raise SubmissionInternalError(error_message) |
python | def collect_assets(result, force=False):
"""collect assets from meta file
Collecting assets only when the metafile is updated. If number of assets
are decreased, assets are reset and re-collect the assets.
"""
path_name = result.path_name
info_path = os.path.join(path_name, summary.CHAINERUI_ASSETS_METAFILE_NAME)
if not os.path.isfile(info_path):
return
start_idx = len(result.assets)
file_modified_at = datetime.datetime.fromtimestamp(os.path.getmtime(
info_path))
if start_idx > 0:
if result.assets[-1].file_modified_at == file_modified_at:
return
with open(info_path, 'r') as f:
info_list = json.load(f, object_pairs_hook=OrderedDict)
if len(info_list) < start_idx:
start_idx = 0
result.assets = []
for base_info in info_list[start_idx:]:
asset_path = base_info.pop('images', {})
asset_path.update(base_info.pop('audios', {}))
asset = Asset.create(
result_id=result.id, summary=base_info,
file_modified_at=file_modified_at)
for key, path in asset_path.items():
with open(os.path.join(path_name, path), 'rb') as f:
data = f.read()
content = Bindata(
asset_id=asset.id, name=path, tag=key, content=data)
asset.content_list.append(content)
result.assets.append(asset)
db.session.commit() |
python | def start_redis(node_ip_address,
redirect_files,
port=None,
redis_shard_ports=None,
num_redis_shards=1,
redis_max_clients=None,
redirect_worker_output=False,
password=None,
use_credis=None,
redis_max_memory=None,
include_java=False):
"""Start the Redis global state store.
Args:
node_ip_address: The IP address of the current node. This is only used
for recording the log filenames in Redis.
redirect_files: The list of (stdout, stderr) file pairs.
port (int): If provided, the primary Redis shard will be started on
this port.
redis_shard_ports: A list of the ports to use for the non-primary Redis
shards.
num_redis_shards (int): If provided, the number of Redis shards to
start, in addition to the primary one. The default value is one
shard.
redis_max_clients: If this is provided, Ray will attempt to configure
Redis with this maxclients number.
redirect_worker_output (bool): True if worker output should be
redirected to a file and false otherwise. Workers will have access
to this value when they start up.
password (str): Prevents external clients without the password
from connecting to Redis if provided.
use_credis: If True, additionally load the chain-replicated libraries
into the redis servers. Defaults to None, which means its value is
set by the presence of "RAY_USE_NEW_GCS" in os.environ.
redis_max_memory: The max amount of memory (in bytes) to allow each
redis shard to use. Once the limit is exceeded, redis will start
LRU eviction of entries. This only applies to the sharded redis
tables (task, object, and profile tables). By default, this is
capped at 10GB but can be set higher.
include_java (bool): If True, the raylet backend can also support
Java worker.
Returns:
A tuple of the address for the primary Redis shard, a list of
addresses for the remaining shards, and the processes that were
started.
"""
if len(redirect_files) != 1 + num_redis_shards:
raise ValueError("The number of redirect file pairs should be equal "
"to the number of redis shards (including the "
"primary shard) we will start.")
if redis_shard_ports is None:
redis_shard_ports = num_redis_shards * [None]
elif len(redis_shard_ports) != num_redis_shards:
raise Exception("The number of Redis shard ports does not match the "
"number of Redis shards.")
processes = []
if use_credis is None:
use_credis = ("RAY_USE_NEW_GCS" in os.environ)
if use_credis:
if password is not None:
# TODO(pschafhalter) remove this once credis supports
# authenticating Redis ports
raise Exception("Setting the `redis_password` argument is not "
"supported in credis. To run Ray with "
"password-protected Redis ports, ensure that "
"the environment variable `RAY_USE_NEW_GCS=off`.")
assert num_redis_shards == 1, (
"For now, RAY_USE_NEW_GCS supports 1 shard, and credis "
"supports 1-node chain for that shard only.")
if use_credis:
redis_executable = CREDIS_EXECUTABLE
# TODO(suquark): We need credis here because some symbols need to be
# imported from credis dynamically through dlopen when Ray is built
# with RAY_USE_NEW_GCS=on. We should remove them later for the primary
# shard.
# See src/ray/gcs/redis_module/ray_redis_module.cc
redis_modules = [CREDIS_MASTER_MODULE, REDIS_MODULE]
else:
redis_executable = REDIS_EXECUTABLE
redis_modules = [REDIS_MODULE]
redis_stdout_file, redis_stderr_file = redirect_files[0]
# Start the primary Redis shard.
port, p = _start_redis_instance(
redis_executable,
modules=redis_modules,
port=port,
password=password,
redis_max_clients=redis_max_clients,
# Below we use None to indicate no limit on the memory of the
# primary Redis shard.
redis_max_memory=None,
stdout_file=redis_stdout_file,
stderr_file=redis_stderr_file)
processes.append(p)
redis_address = address(node_ip_address, port)
# Register the number of Redis shards in the primary shard, so that clients
# know how many redis shards to expect under RedisShards.
primary_redis_client = redis.StrictRedis(
host=node_ip_address, port=port, password=password)
primary_redis_client.set("NumRedisShards", str(num_redis_shards))
# Put the redirect_worker_output bool in the Redis shard so that workers
# can access it and know whether or not to redirect their output.
primary_redis_client.set("RedirectOutput", 1
if redirect_worker_output else 0)
# put the include_java bool to primary redis-server, so that other nodes
# can access it and know whether or not to enable cross-languages.
primary_redis_client.set("INCLUDE_JAVA", 1 if include_java else 0)
# Store version information in the primary Redis shard.
_put_version_info_in_redis(primary_redis_client)
# Calculate the redis memory.
system_memory = ray.utils.get_system_memory()
if redis_max_memory is None:
redis_max_memory = min(
ray_constants.DEFAULT_REDIS_MAX_MEMORY_BYTES,
max(
int(system_memory * 0.2),
ray_constants.REDIS_MINIMUM_MEMORY_BYTES))
if redis_max_memory < ray_constants.REDIS_MINIMUM_MEMORY_BYTES:
raise ValueError("Attempting to cap Redis memory usage at {} bytes, "
"but the minimum allowed is {} bytes.".format(
redis_max_memory,
ray_constants.REDIS_MINIMUM_MEMORY_BYTES))
# Start other Redis shards. Each Redis shard logs to a separate file,
# prefixed by "redis-<shard number>".
redis_shards = []
for i in range(num_redis_shards):
redis_stdout_file, redis_stderr_file = redirect_files[i + 1]
if use_credis:
redis_executable = CREDIS_EXECUTABLE
# It is important to load the credis module BEFORE the ray module,
# as the latter contains an extern declaration that the former
# supplies.
redis_modules = [CREDIS_MEMBER_MODULE, REDIS_MODULE]
else:
redis_executable = REDIS_EXECUTABLE
redis_modules = [REDIS_MODULE]
redis_shard_port, p = _start_redis_instance(
redis_executable,
modules=redis_modules,
port=redis_shard_ports[i],
password=password,
redis_max_clients=redis_max_clients,
redis_max_memory=redis_max_memory,
stdout_file=redis_stdout_file,
stderr_file=redis_stderr_file)
processes.append(p)
shard_address = address(node_ip_address, redis_shard_port)
redis_shards.append(shard_address)
# Store redis shard information in the primary redis shard.
primary_redis_client.rpush("RedisShards", shard_address)
if use_credis:
# Configure the chain state. The way it is intended to work is
# the following:
#
# PRIMARY_SHARD
#
# SHARD_1 (master replica) -> SHARD_1 (member replica)
# -> SHARD_1 (member replica)
#
# SHARD_2 (master replica) -> SHARD_2 (member replica)
# -> SHARD_2 (member replica)
# ...
#
#
# If we have credis members in future, their modules should be:
# [CREDIS_MEMBER_MODULE, REDIS_MODULE], and they will be initialized by
# execute_command("MEMBER.CONNECT_TO_MASTER", node_ip_address, port)
#
# Currently we have num_redis_shards == 1, so only one chain will be
# created, and the chain only contains master.
# TODO(suquark): Currently, this is not correct because we are
# using the master replica as the primary shard. This should be
# fixed later. I had tried to fix it but failed because of heartbeat
# issues.
primary_client = redis.StrictRedis(
host=node_ip_address, port=port, password=password)
shard_client = redis.StrictRedis(
host=node_ip_address, port=redis_shard_port, password=password)
primary_client.execute_command("MASTER.ADD", node_ip_address,
redis_shard_port)
shard_client.execute_command("MEMBER.CONNECT_TO_MASTER",
node_ip_address, port)
return redis_address, redis_shards, processes |
python | def setCursorSize(self, p):
'sets width based on diagonal corner p'
self.cursorBox = BoundingBox(self.cursorBox.xmin, self.cursorBox.ymin, p.x, p.y)
self.cursorBox.w = max(self.cursorBox.w, self.canvasCharWidth)
self.cursorBox.h = max(self.cursorBox.h, self.canvasCharHeight) |
python | def nack(self, id, subscription, transaction=None, receipt=None):
"""
Let the server know that a message was not consumed.
:param str id: the unique id of the message to nack
:param str subscription: the subscription this message is associated with
:param str transaction: include this nack in a named transaction
"""
assert id is not None, "'id' is required"
assert subscription is not None, "'subscription' is required"
headers = {HDR_MESSAGE_ID: id, HDR_SUBSCRIPTION: subscription}
if transaction:
headers[HDR_TRANSACTION] = transaction
if receipt:
headers[HDR_RECEIPT] = receipt
self.send_frame(CMD_NACK, headers) |
java | private String buildObjectKey(final Object object) {
String objectKey = null;
final Class<?> objectClass = object.getClass();
final KeyGenerator typeGenerator = objectClass.getAnnotation(KeyGenerator.class);
if (typeGenerator == null) {
objectKey = generateAggregatedKey(object);
} else {
objectKey = generateTypeKey(object, typeGenerator);
}
// If no Type keyGenerator neither Method Generator were used, use the default toString method
if (objectKey == null) {
objectKey = object.toString();
}
return objectKey;
} |
python | def _restoreItemFromArchive(self):
"""Callback for item menu."""
if self._current_item is None:
return
dp = getattr(self._current_item, '_dp', None)
if dp and dp.archived:
dp.restore_from_archive(parent=self) |
java | @FFDCIgnore(NullPointerException.class)
protected void addLibraryFile(File f) {
if (!!!f.exists()) {
if (tc.isWarningEnabled()) {
Tr.warning(tc, "cls.library.archive", f, new FileNotFoundException(f.getName()));
}
return;
}
// Skip files that are not archives of some sort.
if (!f.isDirectory() && !isArchive(f))
return;
//this area subject to refactor following shared lib rework..
//ideally the shared lib code will start passing us ArtifactContainers, and it
//will own the management of the ACF via DS.
//NASTY.. need to use DS to get the ACF, not OSGi backdoor ;p
BundleContext bc = FrameworkUtil.getBundle(ContainerClassLoader.class).getBundleContext();
ServiceReference<ArtifactContainerFactory> acfsr = bc.getServiceReference(ArtifactContainerFactory.class);
if (acfsr != null) {
ArtifactContainerFactory acf = bc.getService(acfsr);
if (acf != null) {
//NASTY.. using this bundle as the cache dir location for the data file..
try {
ArtifactContainer ac = acf.getContainer(bc.getBundle().getDataFile(""), f);
smartClassPath.addArtifactContainer(ac);
} catch (NullPointerException e) {
// TODO completed under task 74097
if (tc.isDebugEnabled()) {
Tr.debug(tc, "Exception while adding files to classpath", e);
}
if (tc.isInfoEnabled()) {
Tr.info(tc, "cls.library.file.forbidden", f);
}
}
}
}
} |
java | @Override
public HTCashbillJobState getJobState(String CorpNum, String JobID, String UserID)
throws PopbillException {
if ( JobID.length() != 18 )
throw new PopbillException(-99999999, "작업아이디가 올바르지 않습니다.");
return httpget("/HomeTax/Cashbill/" + JobID + "/State", CorpNum, UserID, HTCashbillJobState.class);
} |
python | def leaf_asts(self):
"""
Return an iterator over the leaf ASTs.
"""
seen = set()
ast_queue = deque([self])
while ast_queue:
ast = ast_queue.pop()
if isinstance(ast, Base) and id(ast.cache_key) not in seen:
seen.add(id(ast.cache_key))
if ast.depth == 1:
yield ast
continue
ast_queue.extend(ast.args)
continue |
java | public AnnSchema getSchema(final Class<? extends Ann> klass) {
for (AnnSchema ann : annSchemas)
if (ann.getKlass().equals(klass))
return ann;
return null;
} |
java | public static boolean isVisarganta(String str)
{
Log.logInfo(" Checking if is_visarganta:::");
String s1 = VarnaUtil.getAntyaVarna(str);
if(isVisarga(s1) )
return true;
return false;
} |
python | def who_has(self, subid):
"""Return a list of names who own subid in their id range set."""
answer = []
for name in self.__map:
if subid in self.__map[name] and not name in answer:
answer.append(name)
return answer |
java | public final AbstractItem _findById(long itemID) throws SevereMessageStoreException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "_findById", Long.valueOf(itemID));
AbstractItem item = null;
AbstractItemLink itemLink = getLink(itemID);
if (null != itemLink)
{
item = itemLink.getItem();
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "_findById", item);
return item;
} |
python | def create_project(self, name, description):
"""
Create a project with the specified name and description
:param name: str: unique name for this project
:param description: str: long description of this project
:return: str: name of the project
"""
self._cache_project_list_once()
if name in [project.name for project in self.projects]:
raise DuplicateNameError("There is already a project named {}".format(name))
self.client.create_project(name, description)
self.clear_project_cache()
return name |
python | def pycache_clean(context):
"Remove __pycache__ directories"
#pylint: disable=unused-argument
dirs = set()
for root, dirnames, _ in os.walk(os.curdir):
if '__pycache__' in dirnames:
dirs.add(os.path.join(root, '__pycache__'))
print("Removing __pycache__ directories")
rmrf(dirs, verbose=False) |
java | @Pure
public static BusLayerDrawerType getPreferredLineDrawAlgorithm() {
final Preferences prefs = Preferences.userNodeForPackage(BusLayerConstants.class);
if (prefs != null) {
final String algo = prefs.get("DRAWING_ALGORITHM", null); //$NON-NLS-1$
if (algo != null && algo.length() > 0) {
try {
return BusLayerDrawerType.valueOf(algo);
} catch (Throwable exception) {
//
}
}
}
return BusLayerDrawerType.OVERLAP;
} |
python | def write_name(self, name):
"""Writes a domain name to the packet"""
try:
# Find existing instance of this name in packet
#
index = self.names[name]
except KeyError:
# No record of this name already, so write it
# out as normal, recording the location of the name
# for future pointers to it.
#
self.names[name] = self.size
parts = name.split('.')
if parts[-1] == '':
parts = parts[:-1]
for part in parts:
self.write_utf(part)
self.write_byte(0)
return
# An index was found, so write a pointer to it
#
self.write_byte((index >> 8) | 0xC0)
self.write_byte(index) |
python | def _readue(self, pos):
"""Return interpretation of next bits as unsigned exponential-Golomb code.
Raises ReadError if the end of the bitstring is encountered while
reading the code.
"""
oldpos = pos
try:
while not self[pos]:
pos += 1
except IndexError:
raise ReadError("Read off end of bitstring trying to read code.")
leadingzeros = pos - oldpos
codenum = (1 << leadingzeros) - 1
if leadingzeros > 0:
if pos + leadingzeros + 1 > self.len:
raise ReadError("Read off end of bitstring trying to read code.")
codenum += self._readuint(leadingzeros, pos + 1)
pos += leadingzeros + 1
else:
assert codenum == 0
pos += 1
return codenum, pos |
python | def pack_epub(directory, file):
"""Pack the given ``directory`` into an epub (i.e. zip) archive
given as ``file``, which can be a file-path or file-like object.
"""
with zipfile.ZipFile(file, 'w', zipfile.ZIP_DEFLATED) as zippy:
base_path = os.path.abspath(directory)
for root, dirs, filenames in os.walk(directory):
# Strip the absolute path
archive_path = os.path.relpath(root, base_path)
for filename in filenames:
filepath = os.path.join(root, filename)
archival_filepath = os.path.join(archive_path, filename)
zippy.write(filepath, archival_filepath) |
python | def load_files(self, path):
"""
Loads files in a given path and all its subdirectories
"""
if self.verbose == 2:
print("Indexing {}".format(path))
for filename in os.listdir(path):
file_path = path + "/" + filename
if os.path.isdir(file_path):
self.load_files(file_path)
elif filename.endswith(".yaml") or filename.endswith(".yml"):
self.unfold_yaml(file_path) |
java | public void stem(CoreLabel label,
Class<? extends CoreAnnotation<String>> ann) {
String lemma = lemmatize(label.word(), label.tag(), lexer, lexer.option(1));
label.set(ann, lemma);
} |
python | def secured_apps_copy(self, apps):
""" Given the http app list of a website, return what should be in the secure version """
return [[app_name, path] for app_name, path in apps if
app_name not in (self.LETSENCRYPT_VERIFY_APP_NAME,)] |
java | public WrappedByteBuffer putLongAt(int index, long v) {
_checkForWriteAt(index, 8);
_buf.putLong(index, v);
return this;
} |
python | def request_control(self, device_id, access_mode=True):
"""
Request exclusive control of device
:param device_id: id of device
:type device_id: int
:param access_mode: True=exclusive, False=shared
:type access_mode: bool
:returns: true if successful
:rtype: bool
"""
if access_mode:
if not request_control(self.corsair_sdk, device_id):
self._raise_corsair_error()
return True
else:
self.reload() |
java | private void releaseExtractionGuard(ExtractionGuard extractionLatch) {
synchronized( extractionsLock ) {
extractionLocks.remove( extractionLatch.path );
}
extractionLatch.completionLatch.countDown();
} |
java | public static boolean[] toPrimitive(Boolean[] array) {
if (array == null) {
return null;
} else if (array.length == 0) {
return EMPTY_BOOLEAN_ARRAY;
}
final boolean[] result = new boolean[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i];
}
return result;
} |
java | public List<CompletionItem> resolve(String filter, int startOffset, int caretOffset) {
final Set<ElementHandle<TypeElement>> result = classpath.getClassIndex().getDeclaredTypes("",
ClassIndex.NameKind.PREFIX, EnumSet.of(ClassIndex.SearchScope.SOURCE));
List<CompletionItem> ret = new ArrayList<>();
for (ElementHandle<TypeElement> te : result) {
if (te.getKind().isClass()) {
String binaryName = te.getBinaryName();
if (!StringUtils.equals(binaryName, "") && StringUtils.startsWith(binaryName, filter)) {
ret.add(new BasicCompletionItem(te.getBinaryName(), false, startOffset, caretOffset));
}
}
}
return ret;
} |
python | def read(self, size=None):
"""Reads a byte string from the file-like object at the current offset.
The function will read a byte string of the specified size or
all of the remaining data if no size was specified.
Args:
size (Optional[int]): number of bytes to read, where None is all
remaining data.
Returns:
bytes: data read.
Raises:
IOError: if the read failed.
OSError: if the read failed.
"""
if not self._is_open:
raise IOError('Not opened.')
if self._current_offset < 0:
raise IOError(
'Invalid current offset: {0:d} value less than zero.'.format(
self._current_offset))
if self._decrypted_stream_size is None:
self._decrypted_stream_size = self._GetDecryptedStreamSize()
if self._decrypted_stream_size < 0:
raise IOError('Invalid decrypted stream size.')
if self._current_offset >= self._decrypted_stream_size:
return b''
if self._realign_offset:
self._AlignDecryptedDataOffset(self._current_offset)
self._realign_offset = False
if size is None:
size = self._decrypted_stream_size
if self._current_offset + size > self._decrypted_stream_size:
size = self._decrypted_stream_size - self._current_offset
decrypted_data = b''
if size == 0:
return decrypted_data
while size > self._decrypted_data_size:
decrypted_data = b''.join([
decrypted_data,
self._decrypted_data[self._decrypted_data_offset:]])
remaining_decrypted_data_size = (
self._decrypted_data_size - self._decrypted_data_offset)
self._current_offset += remaining_decrypted_data_size
size -= remaining_decrypted_data_size
if self._current_offset >= self._decrypted_stream_size:
break
read_count = self._ReadEncryptedData(self._ENCRYPTED_DATA_BUFFER_SIZE)
self._decrypted_data_offset = 0
if read_count == 0:
break
if size > 0:
slice_start_offset = self._decrypted_data_offset
slice_end_offset = slice_start_offset + size
decrypted_data = b''.join([
decrypted_data,
self._decrypted_data[slice_start_offset:slice_end_offset]])
self._decrypted_data_offset += size
self._current_offset += size
return decrypted_data |
java | public static String getLabel(FacesContext context, UIComponent comp) {
Object o = comp.getAttributes().get("label");
if (o == null || (o instanceof String && ((String) o).length() == 0)) {
ValueExpression vex = comp.getValueExpression("label");
if (null != vex)
return (String) vex.getValue(context.getELContext());
}
if (o == null) {
// Use the "clientId" if there was no label specified.
o = comp.getClientId(context);
}
return (String) o;
} |
java | @Override
public String generateId() {
UUID uuid = UUID.randomUUID();
StringBuilder sb = new StringBuilder();
sb.append(Long.toHexString(uuid.getMostSignificantBits()));
sb.append(Long.toHexString(uuid.getLeastSignificantBits()));
while (sb.length() < 32) {
sb.append('0');
}
return sb.toString();
} |
java | @Override
public String getValue(int index) {
if (index < 0 || index >= attributesList.size()) {
return null;
}
return attributesList.get(index).value;
} |
python | def block(self, to_block):
"""
The method serves for imposing a constraint forbidding the hitting
set solver to compute a given hitting set. Each set to block is
encoded as a hard clause in the MaxSAT problem formulation, which
is then added to the underlying oracle.
:param to_block: a set to block
:type to_block: iterable(obj)
"""
# translating objects to variables
to_block = list(map(lambda obj: self.idpool.id(obj), to_block))
# a soft clause should be added for each new object
new_obj = list(filter(lambda vid: vid not in self.oracle.vmap.e2i, to_block))
# new hard clause
self.oracle.add_clause([-vid for vid in to_block])
# new soft clauses
for vid in new_obj:
self.oracle.add_clause([-vid], 1) |
python | def add_user_to_group(self, user_name, group_name):
"""Adds a user to a group.
:param user_name: name of user to be added
:param group_name: name of group user is to be added to
:returns: True if user added
:raises: HTTPResponseError in case an HTTP error status was returned
"""
res = self._make_ocs_request(
'POST',
self.OCS_SERVICE_CLOUD,
'users/' + user_name + '/groups',
data={'groupid': group_name}
)
if res.status_code == 200:
tree = ET.fromstring(res.content)
self._check_ocs_status(tree, [100])
return True
raise HTTPResponseError(res) |
python | def id_to_piece(input, model_file=None, model_proto=None, name=None):
"""Converts vocabulary id into piece.
Args:
input: An arbitrary tensor of int32.
model_file: The sentencepiece model file path.
model_proto: The sentencepiece model serialized proto.
Either `model_file` or `model_proto` must be set.
name: The name argument that is passed to the op function.
Returns:
A tensor of string with the same shape as input.
"""
return _gen_sentencepiece_processor_op.sentencepiece_id_to_piece(
input, model_file=model_file, model_proto=model_proto, name=name) |
java | void removeAll(BaseRow sortKey) {
Collection<BaseRow> list = treeMap.get(sortKey);
if (list != null) {
currentTopNum -= list.size();
treeMap.remove(sortKey);
}
} |
java | public Map<String, Object> serialize() {
Map<String, Object> serverInfo = new HashMap<String, Object>();
serverInfo.put("totalCount", totalCount);
serverInfo.put("cursor", cursor);
serverInfo.put("serviceName", serviceName);
serverInfo.put("columnNames", columns);
serverInfo.put("version", version);
serverInfo.put("id", id);
serverInfo.put("initialData", data);
return serverInfo;
} |
java | private Schema readMap(JsonReader reader, Set<String> knownRecords) throws IOException {
return Schema.mapOf(readInnerSchema(reader, "keys", knownRecords),
readInnerSchema(reader, "values", knownRecords));
} |
python | def GetGroupUUID(group,alias=None,location=None):
"""Given a group name return the unique group ID.
:param alias: short code for a particular account. If none will use account's default alias
:param location: datacenter where group resides
:param group: group name
"""
if alias is None: alias = clc.v1.Account.GetAlias()
if location is None: location = clc.v1.Account.GetLocation()
r = Group.GetGroups(location,alias)
for row in r:
if row['Name'] == group: return(row['UUID'])
else:
if clc.args: clc.v1.output.Status("ERROR",3,"Group %s not found in account %s datacenter %s" % (group,alias,location))
raise Exception("Group not found") |
java | @SuppressWarnings("unchecked")
public static List<Node> selectNodes(Node node, String xpathQuery, Map<String, String> namespaceUris) {
XPath xpath = DocumentHelper.createXPath(xpathQuery);
xpath.setNamespaceURIs(namespaceUris);
return xpath.selectNodes(node);
} |
python | def join(self, glue=" "):
""" Javascript's join implementation
"""
j = glue.join([str(x) for x in self.obj])
return self._wrap(j) |
java | public static LocalCall<String> mkdir(String path, String mode) {
return mkdir(path, Optional.empty(), Optional.empty(), Optional.of(mode));
} |
java | private List getXmlContentResourceTypes() {
// get all available resource types and filter XML content resource types
List resTypes = OpenCms.getResourceManager().getResourceTypes();
Iterator i = resTypes.iterator();
List resTypeNames = new ArrayList(resTypes.size());
while (i.hasNext()) {
I_CmsResourceType resType = (I_CmsResourceType)i.next();
if (!(resType instanceof CmsResourceTypeXmlContent)) {
// this is not XML content resource type, skip it
continue;
}
if (!resType.getTypeName().equals(TYPE_XMLCONTENT)) {
resTypeNames.add(resType.getTypeName());
}
}
// create the selector options
List result = new ArrayList(resTypeNames.size() + 2);
// add empty "please select" option to selector
result.add(new CmsSelectWidgetOption("", true, key(Messages.GUI_XMLCONTENTREPAIR_DIALOG_RESTYPE_SELECT_0)));
// sort the resource type names alphabetically
Collections.sort(resTypeNames);
i = resTypeNames.iterator();
while (i.hasNext()) {
// add all resource type names to the selector
String resTypeName = (String)i.next();
result.add(new CmsSelectWidgetOption(resTypeName));
}
// add option for generic XML content without "own" resource types at the end
result.add(
new CmsSelectWidgetOption(
TYPE_XMLCONTENT,
false,
key(Messages.GUI_XMLCONTENTREPAIR_DIALOG_RESTYPE_GENERIC_0)));
return result;
} |
python | def ichunk(iterable, n):
""" Split an iterable into n-sized chunks. """
it = iter(iterable)
while True:
chunk = tuple(itertools.islice(it, n))
if not chunk:
return
yield chunk |
python | def currentAbove(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by an integer N.
Out of all metrics passed, draws only the metrics whose value is above N
at the end of the time period specified.
Example::
&target=currentAbove(server*.instance*.threads.busy,50)
Draws the servers with more than 50 busy threads.
"""
results = []
for series in seriesList:
val = safeLast(series)
if val is not None and val >= n:
results.append(series)
return results |
python | def make_encoded_file_url_and_params(file_service, share, file_dir, file_name, sas_token, safe=SAFE_CHARS):
"""
Makes the file url using the service. Converts the file directory and name into byte-strings if needed and returns
(url, dir, file) as a tuple. This is needed to account for string encoding differences between python 2 and 3.
"""
try:
file_url = file_service.make_file_url(share, file_dir, file_name, sas_token=sas_token)
except UnicodeEncodeError:
file_dir = file_dir.encode('utf-8')
file_name = file_name.encode('utf-8')
file_url = file_service.make_file_url(share, file_dir, file_name, sas_token=sas_token)
if not file_dir:
sep = file_url.find('://')
file_url = file_url[:sep + 3] + file_url[sep + 3:].replace('//', '/')
return encode_url_path(file_url, safe), file_dir, file_name |
java | private void initialize() {
this.setTitle(Constant.messages.getString("cfu.manage.title"));
//this.setContentPane(getJTabbed());
this.setContentPane(getTopPanel());
this.pack();
centerFrame();
state = State.IDLE;
// Handle escape key to close the dialog
KeyStroke escape = KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0, false);
AbstractAction escapeAction = new AbstractAction() {
private static final long serialVersionUID = 3516424501887406165L;
@Override
public void actionPerformed(ActionEvent e) {
dispatchEvent(new WindowEvent(ManageAddOnsDialog.this, WindowEvent.WINDOW_CLOSING));
}
};
getRootPane().getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(escape, "ESCAPE");
getRootPane().getActionMap().put("ESCAPE",escapeAction);
} |
python | def age_to_BP(age, age_unit):
"""
Convert an age value into the equivalent in time Before Present(BP) where Present is 1950
Returns
---------
ageBP : number
"""
ageBP = -1e9
if age_unit == "Years AD (+/-)" or age_unit == "Years Cal AD (+/-)":
if age < 0:
age = age+1 # to correct for there being no 0 AD
ageBP = 1950-age
elif age_unit == "Years BP" or age_unit == "Years Cal BP":
ageBP = age
elif age_unit == "ka":
ageBP = age*1000
elif age_unit == "Ma":
ageBP = age*1e6
elif age_unit == "Ga":
ageBP = age*1e9
else:
print("Age unit invalid. Age set to -1.0e9")
return ageBP |
java | public static KeyPair generateKeyPair(String jcaAlgorithmName, String jcaProviderName, SignatureAlgorithm alg,
SecureRandom random) {
Assert.notNull(alg, "SignatureAlgorithm argument cannot be null.");
Assert.isTrue(alg.isEllipticCurve(), "SignatureAlgorithm argument must represent an Elliptic Curve algorithm.");
try {
KeyPairGenerator g;
if (Strings.hasText(jcaProviderName)) {
g = KeyPairGenerator.getInstance(jcaAlgorithmName, jcaProviderName);
} else {
g = KeyPairGenerator.getInstance(jcaAlgorithmName);
}
String paramSpecCurveName = EC_CURVE_NAMES.get(alg);
ECGenParameterSpec spec = new ECGenParameterSpec(paramSpecCurveName);
g.initialize(spec, random);
return g.generateKeyPair();
} catch (Exception e) {
throw new IllegalStateException("Unable to generate Elliptic Curve KeyPair: " + e.getMessage(), e);
}
} |
java | public String transformMessage(String strXML, StreamSource streamTransformer)
{
Reader reader = new StringReader(strXML);
StreamSource source = new StreamSource(reader);
StringWriter stringWriter = new StringWriter();
Result result = new StreamResult(stringWriter);
try {
TransformerFactory tFact = TransformerFactory.newInstance();
if (streamTransformer == null)
streamTransformer = this.getTransformerStream(null);
Transformer transformer = tFact.newTransformer(streamTransformer);
if (result == null)
{
DocumentBuilder db = Utility.getDocumentBuilder();
synchronized (db)
{
Document doc = this.getScratchDocument(db);
DocumentFragment docFrag = doc.createDocumentFragment();
// Add a soap body element to the soap body
result = new DOMResult(docFrag);
transformer.transform(source, result);
}
// this.printSource(new DOMSource(docFrag), System.out);
//? org.w3c.dom.Element elRoot = (org.w3c.dom.Element)docFrag.getFirstChild();
return stringWriter.toString();
}
else
{
transformer.transform(source, result);
return stringWriter.toString();
}
} catch (TransformerConfigurationException ex) {
ex.printStackTrace();
} catch (TransformerException ex) {
ex.printStackTrace();
}
return null;
} |
java | public static CmsDocumentDependency load(CmsObject cms, CmsPublishedResource pubRes) {
CmsDocumentDependency result = readFromContext(cms, pubRes.getRootPath());
if (result == null) {
result = new CmsDocumentDependency(pubRes);
result.readDependencies(cms);
}
return result;
} |
java | public static <T1, T2, R> BiFunction biFunction(final BiFunction<T1, T2, R> lambda) {
return lambda;
} |
python | def get_followers(self, first_user_id=None):
"""
获取关注者列表
详情请参考 http://mp.weixin.qq.com/wiki/index.php?title=获取关注者列表
:param first_user_id: 可选。第一个拉取的OPENID,不填默认从头开始拉取
:return: 返回的 JSON 数据包
"""
params = {"access_token": self.token}
if first_user_id:
params["next_openid"] = first_user_id
return self.get(
"https://api.weixin.qq.com/cgi-bin/user/get", params=params
) |
python | def Servers(self,cached=True):
"""Returns list of server objects, populates if necessary.
>>> clc.v2.Servers(["NY1BTDIPHYP0101","NY1BTDIWEB0101"]).Servers()
[<clc.APIv2.server.Server object at 0x1065b0d50>, <clc.APIv2.server.Server object at 0x1065b0e50>]
>>> print _[0]
NY1BTDIPHYP0101
"""
if not hasattr(self,'_servers') or not cached:
self._servers = []
for server in self.servers_lst:
self._servers.append(Server(id=server,alias=self.alias,session=self.session))
return(self._servers) |
java | @Override
public boolean isPunctuationWord(String str) {
return chineseCommaAcceptFilter().accept(str) || chineseEndSentenceAcceptFilter().accept(str) || chineseDouHaoAcceptFilter().accept(str) || chineseQuoteMarkAcceptFilter().accept(str) || chineseParenthesisAcceptFilter().accept(str) || chineseColonAcceptFilter().accept(str) || chineseDashAcceptFilter().accept(str) || chineseOtherAcceptFilter().accept(str);
} |
java | @SuppressWarnings("unchecked")
@Override
public EList<IfcComplexPropertyTemplate> getPartOfComplexTemplate() {
return (EList<IfcComplexPropertyTemplate>) eGet(
Ifc4Package.Literals.IFC_PROPERTY_TEMPLATE__PART_OF_COMPLEX_TEMPLATE, true);
} |
python | def _build_full_list(self):
"""Build a full list of pages.
Examples:
>>> _SlicedPaginator(1, 7, 5)._build_full_list()
[1, 2, 3, 4, 5]
>>> _SlicedPaginator(6, 7, 5)._build_full_list()
[3, 4, 5, 6, 7]
>>> _SlicedPaginator(6, 7, 5)._build_full_list()
[3, 4, 5, 6, 7]
>>> import itertools
>>> combinations = itertools.combinations(range(100), 2)
>>> combinations = filter(lambda (x,y): x<y, combinations)
>>> for page, maxpages in combinations:
... a = _SlicedPaginator(page + 1, maxpages, 7)
... b = a._build_full_list()
>>> _SlicedPaginator(2, 5, 7)._build_full_list()
[1, 2, 3, 4, 5]
>>> _SlicedPaginator(5, 5, 7)._build_full_list()
[1, 2, 3, 4, 5]
"""
if self.npages <= self.maxpages_items:
return range(1, self.npages + 1)
else:
l = range(self.curpage - self.max_prev_items,
self.curpage + self.max_next_items + 1)
while l and l[0] < 1:
l.append(l[-1] + 1)
del l[0]
while l and l[-1] > self.npages:
l.insert(0, l[0] - 1)
del l[-1]
return l |
java | @Override
public void process( T image ) {
// initialize data structures
this.image = image;
this.stopRequested = false;
modeLocation.reset();
modeColor.reset();
modeMemberCount.reset();
interpolate.setImage(image);
pixelToMode.reshape(image.width, image.height);
quickMode.reshape(image.width, image.height);
// mark as -1 so it knows which pixels have been assigned a mode already and can skip them
ImageMiscOps.fill(pixelToMode, -1);
// mark all pixels are not being a mode
ImageMiscOps.fill(quickMode,-1);
// use mean shift to find the peak of each pixel in the image
int indexImg = 0;
for( int y = 0; y < image.height&& !stopRequested; y++ ) {
for( int x = 0; x < image.width; x++ , indexImg++) {
if( pixelToMode.data[indexImg] != -1 ) {
int peakIndex = pixelToMode.data[indexImg];
modeMemberCount.data[peakIndex]++;
continue;
}
float meanColor = interpolate.get(x, y);
findPeak(x,y, meanColor);
// convert mean-shift location into pixel index
int modeX = (int)(this.modeX +0.5f);
int modeY = (int)(this.modeY +0.5f);
int modePixelIndex = modeY*image.width + modeX;
// get index in the list of peaks
int modeIndex = quickMode.data[modePixelIndex];
// If the mode is new add it to the list
if( modeIndex < 0 ) {
modeIndex = this.modeLocation.size();
this.modeLocation.grow().set(modeX, modeY);
// Save the peak's color
modeColor.grow()[0] = meanGray;
// Mark the mode in the segment image
quickMode.data[modePixelIndex] = modeIndex;
// Set the initial count to zero. This will be incremented when it is traversed later on
modeMemberCount.add(0);
}
// add this pixel to the membership list
modeMemberCount.data[modeIndex]++;
// Add all pixels it traversed through to the membership of this mode
// This is an approximate of mean-shift
for( int i = 0; i < history.size; i++ ) {
Point2D_F32 p = history.get(i);
int px = (int)(p.x+0.5f);
int py = (int)(p.y+0.5f);
int index = pixelToMode.getIndex(px,py);
if( pixelToMode.data[index] == -1 ) {
pixelToMode.data[index] = modeIndex;
}
}
}
}
} |
python | def setValidityErrorHandler(self, err_func, warn_func, arg=None):
"""
Register error and warning handlers for Schema validation.
These will be called back as f(msg,arg)
"""
libxml2mod.xmlSchemaSetValidErrors(self._o, err_func, warn_func, arg) |
java | public String resolveReposUrl() throws IOException {
// Zur Zeit nur für HTTP implementiert.
try {
URL url;
// TODO: Schön machen
try {
url = new URL(defaultRepositoryUrl);
} catch (MalformedURLException e) {
url = new File(defaultRepositoryUrl).toURI().toURL();
}
if (!url.getProtocol().equals("http")) {
log.info("Resolving repository-config not implemented for protocol {} yet", url.getProtocol());
return defaultRepositoryUrl;
}
Properties config = loadConfig();
if (config == null) {
return defaultRepositoryUrl;
}
String localIp = determinateLocalIP();
return resolveRepos(localIp, config);
} catch (Exception e) {
log.error(e.getMessage(), e);
return defaultRepositoryUrl;
}
} |
python | def invoke_ssh_shell(cls, *args, **kwargs):
"""invoke_ssh(arguments..., pty=False, echo=False)
Star a new shell on a remote server. It first calls
:meth:`Flow.connect_ssh` using all positional and keyword
arguments, then calls :meth:`SSHClient.invoke_shell` with the
pty / echo options.
Args:
arguments...: The options for the SSH connection.
pty(bool): Request a pseudo-terminal from the server.
echo(bool): Whether to echo read/written data to stdout by default.
Returns:
:class:`Flow`: A Flow instance initialised with the SSH channel.
"""
pty = kwargs.pop('pty', True)
echo = kwargs.pop('echo', False)
client = cls.connect_ssh(*args, **kwargs)
f = client.invoke_shell(pty=pty, echo=echo)
f.client = client
return f |
java | public Range<T> extend(T value) {
if (value == null)
throw new IllegalArgumentException("value must not be null");
return extend(value, value);
} |
python | def compare(node1, node2):
"""Compares two Werkzeug hg versions."""
if not os.path.isdir("a"):
print("error: comparison feature not initialized", file=sys.stderr)
sys.exit(4)
print("=" * 80)
print("WERKZEUG INTERNAL BENCHMARK -- COMPARE MODE".center(80))
print("-" * 80)
def _hg_update(repo, node):
def hg(*x):
return subprocess.call(
["hg"] + list(x), cwd=repo, stdout=null_out, stderr=null_out
)
hg("revert", "-a", "--no-backup")
client = subprocess.Popen(
["hg", "status", "--unknown", "-n", "-0"], stdout=subprocess.PIPE, cwd=repo
)
unknown = client.communicate()[0]
if unknown:
client = subprocess.Popen(
["xargs", "-0", "rm", "-f"],
cwd=repo,
stdout=null_out,
stdin=subprocess.PIPE,
)
client.communicate(unknown)
hg("pull", "../..")
hg("update", node)
if node == "tip":
diff = subprocess.Popen(
["hg", "diff"], cwd="..", stdout=subprocess.PIPE
).communicate()[0]
if diff:
client = subprocess.Popen(
["hg", "import", "--no-commit", "-"],
cwd=repo,
stdout=null_out,
stdin=subprocess.PIPE,
)
client.communicate(diff)
_hg_update("a", node1)
_hg_update("b", node2)
d1 = run("a", no_header=True)
d2 = run("b", no_header=True)
print("DIRECT COMPARISON".center(80))
print("-" * 80)
for key in sorted(d1):
delta = d1[key] - d2[key]
if abs(1 - d1[key] / d2[key]) < TOLERANCE or abs(delta) < MIN_RESOLUTION:
delta = "=="
else:
delta = "%+.4f (%+d%%)" % (delta, round(d2[key] / d1[key] * 100 - 100))
print("%36s %.4f %.4f %s" % (format_func(key), d1[key], d2[key], delta))
print("-" * 80) |
python | def main():
"""Main function"""
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--username',
required=True, help='EBox account')
parser.add_argument('-p', '--password',
required=True, help='Password')
parser.add_argument('-j', '--json', action='store_true',
default=False, help='Json output')
parser.add_argument('-t', '--timeout',
default=REQUESTS_TIMEOUT, help='Request timeout')
args = parser.parse_args()
client = EboxClient(args.username, args.password, args.timeout)
loop = asyncio.get_event_loop()
task = loop.create_task(client.fetch_data())
try:
loop.run_until_complete(task)
except PyEboxError as exp:
print(exp)
client.close_session()
return
if not client.get_data():
client.close_session()
return
if args.json:
print(json.dumps(client.get_data()))
else:
_format_output(args.username, client.get_data())
client.close_session() |
python | def load_projections(folder, indices=None):
"""Load geometry and data stored in Mayo format from folder.
Parameters
----------
folder : str
Path to the folder where the Mayo DICOM files are stored.
indices : optional
Indices of the projections to load.
Accepts advanced indexing such as slice or list of indices.
Returns
-------
geometry : ConeFlatGeometry
Geometry corresponding to the Mayo projector.
proj_data : `numpy.ndarray`
Projection data, given as the line integral of the linear attenuation
coefficient (g/cm^3). Its unit is thus g/cm^2.
"""
datasets, data_array = _read_projections(folder, indices)
# Get the angles
angles = [d.DetectorFocalCenterAngularPosition for d in datasets]
angles = -np.unwrap(angles) - np.pi # different defintion of angles
# Set minimum and maximum corners
shape = np.array([datasets[0].NumberofDetectorColumns,
datasets[0].NumberofDetectorRows])
pixel_size = np.array([datasets[0].DetectorElementTransverseSpacing,
datasets[0].DetectorElementAxialSpacing])
# Correct from center of pixel to corner of pixel
minp = -(np.array(datasets[0].DetectorCentralElement) - 0.5) * pixel_size
maxp = minp + shape * pixel_size
# Select geometry parameters
src_radius = datasets[0].DetectorFocalCenterRadialDistance
det_radius = (datasets[0].ConstantRadialDistance -
datasets[0].DetectorFocalCenterRadialDistance)
# For unknown reasons, mayo does not include the tag
# "TableFeedPerRotation", which is what we want.
# Instead we manually compute the pitch
pitch = ((datasets[-1].DetectorFocalCenterAxialPosition -
datasets[0].DetectorFocalCenterAxialPosition) /
((np.max(angles) - np.min(angles)) / (2 * np.pi)))
# Get flying focal spot data
offset_axial = np.array([d.SourceAxialPositionShift for d in datasets])
offset_angular = np.array([d.SourceAngularPositionShift for d in datasets])
offset_radial = np.array([d.SourceRadialDistanceShift for d in datasets])
# TODO(adler-j): Implement proper handling of flying focal spot.
# Currently we do not fully account for it, merely making some "first
# order corrections" to the detector position and radial offset.
# Update angles with flying focal spot (in plane direction).
# This increases the resolution of the reconstructions.
angles = angles - offset_angular
# We correct for the mean offset due to the rotated angles, we need to
# shift the detector.
offset_detector_by_angles = det_radius * np.mean(offset_angular)
minp[0] -= offset_detector_by_angles
maxp[0] -= offset_detector_by_angles
# We currently apply only the mean of the offsets
src_radius = src_radius + np.mean(offset_radial)
# Partially compensate for a movement of the source by moving the object
# instead. We need to rescale by the magnification to get the correct
# change in the detector. This approximation is only exactly valid on the
# axis of rotation.
mean_offset_along_axis_for_ffz = np.mean(offset_axial) * (
src_radius / (src_radius + det_radius))
# Create partition for detector
detector_partition = odl.uniform_partition(minp, maxp, shape)
# Convert offset to odl defintions
offset_along_axis = (mean_offset_along_axis_for_ffz +
datasets[0].DetectorFocalCenterAxialPosition -
angles[0] / (2 * np.pi) * pitch)
# Assemble geometry
angle_partition = odl.nonuniform_partition(angles)
geometry = odl.tomo.ConeFlatGeometry(angle_partition,
detector_partition,
src_radius=src_radius,
det_radius=det_radius,
pitch=pitch,
offset_along_axis=offset_along_axis)
# Create a *temporary* ray transform (we need its range)
spc = odl.uniform_discr([-1] * 3, [1] * 3, [32] * 3)
ray_trafo = odl.tomo.RayTransform(spc, geometry, interp='linear')
# convert coordinates
theta, up, vp = ray_trafo.range.grid.meshgrid
d = src_radius + det_radius
u = d * np.arctan(up / d)
v = d / np.sqrt(d**2 + up**2) * vp
# Calculate projection data in rectangular coordinates since we have no
# backend that supports cylindrical
proj_data_cylinder = ray_trafo.range.element(data_array)
interpolated_values = proj_data_cylinder.interpolation((theta, u, v))
proj_data = ray_trafo.range.element(interpolated_values)
return geometry, proj_data.asarray() |
java | public void insertParentAfter(Node node) throws ChildNodeException {
if (node != null) {
// give this node my children
node.setFirstChild(getFirstChild());
setFirstChild(null); // clear our list
addChild(node); // make our child
}
} |
java | public Surface draw (Tile tile, float x, float y) {
return draw(tile, x, y, tile.width(), tile.height());
} |
python | def realimag_files(xscript=0, yscript="d[1]+1j*d[2]", eyscript=None, exscript=None, paths=None, g=None, **kwargs):
"""
This will load a bunch of data files, generate data based on the supplied
scripts, and then plot the ydata's real and imaginary parts versus xdata.
Parameters
----------
xscript=0
Script for x data
yscript='d[1]+1j*d[2]'
Script for y data
eyscript=None
Script for y error
exscript=None
Script for x error
paths=None
List of paths to open.
g=None
Optional dictionary of globals for the scripts
See spinmob.plot.realimag.data() for additional optional arguments.
See spinmob.data.databox.execute_script() for more information about scripts.
Common additional parameters
----------------------------
filters="*.*"
Set the file filters for the dialog.
"""
return files(xscript, yscript, eyscript, exscript, plotter=realimag_databoxes, paths=paths, g=g, **kwargs) |
python | def acquire(self, timeout=0, force=False):
"""Attempt to acquire an exclusive lock on this record.
If a timeout is given (in seconds) then the acquire call will block for up to that much time attempting to
acquire the lock. If the lock can not be acquired (either because it is already set or we time out) a Locked
exception will be raised.
"""
if timeout and not (self.Queue.__collection__ and self.Queue.__capped__):
raise NotImplementedError(name(self.__class__) + ".Queue has not been prepared.")
D = self.__class__
collection = self.get_collection()
identity = self.Lock()
if force:
query = D.id == self
else:
query = D.lock == None
query |= D.lock.instance == identity.instance
query |= D.lock.time < (identity.time - identity.__period__)
query &= D.id == self
previous = collection.find_one_and_update(query, {'$set': {~D.lock: identity}}, {~D.lock: True})
if previous is None:
if timeout:
try:
self.wait(timeout)
except TimeoutError:
pass
return self.acquire()
lock = getattr(self.find_one(self, projection={~D.lock: True}), 'lock', None)
raise self.Locked("Unable to acquire lock.", lock)
if not force and ~D.lock in previous:
previous = self.Lock.from_mongo(previous.get(~D.lock))
if previous:
if previous.expires < identity.time:
previous.expired(self)
if previous.instance != identity.instance: # Dont re-broadcast acquisition of an already-held lock.
identity.acquired(self, force)
return identity |
java | public boolean isEncodedWith(Encoder encoder, CharSequence string) {
return getIdentityHashCodesForEncoder(encoder).contains(System.identityHashCode(string));
} |
python | def close(self, wait=False):
"""Close session, shutdown pool."""
self.session.close()
self.pool.shutdown(wait=wait) |
python | def _level_coords(self):
"""Return a mapping of all MultiIndex levels and their corresponding
coordinate name.
"""
level_coords = OrderedDict()
for name, index in self.indexes.items():
if isinstance(index, pd.MultiIndex):
level_names = index.names
(dim,) = self.variables[name].dims
level_coords.update({lname: dim for lname in level_names})
return level_coords |
java | public PreparedStatement getPreparedQuery(Query query, String storeName) {
String tableName = storeToCQLName(storeName);
return m_statementCache.getPreparedQuery(tableName, query);
} |
java | public int doAfterBody() {
if(bodyContent != null) {
_appender.append(bodyContent.getString());
bodyContent.clearBody();
}
/*
this loop exists so that the table is filled out correctly up to the specified
or guessed table dimensions. this is a little bit of a kludge; this logic should be done
in doEndTag()
*/
boolean haveNext = false;
while(!haveNext) {
_currentColumn++;
/* close the previous cell whose content was rendered the last time the tag body was executed */
closeCellTag(_appender);
/* open a new table row */
if(_currentColumn == _columns) {
_currentRow++;
_currentColumn = 0;
closeRowTag(_appender);
_appender.append("\n");
}
/* reached the end of the table as the current row is now equal to the total number of rows */
if(_currentRow == _rows)
return SKIP_BODY;
if(_currentColumn == 0)
openRowTag(_appender, _trState != null ? _trState : STATE_TR);
int itemIndex = -1;
if(_verticalRepeat)
itemIndex = _currentColumn * _rows + _currentRow;
else itemIndex = _currentRow * _columns + _currentColumn;
haveNext = ensureItem(itemIndex, _dataList);
openCellTag(_appender, computeStyleIndex(_currentRow, _currentColumn)) ;
/* render empty cell and continue filling the table */
if(!haveNext)
_htmlConstantRendering.NBSP(_appender);
/* open a new table cell and render the body once again. note, this exits the while loop above */
else return EVAL_BODY_AGAIN;
}
/* default is to skip the tag body */
return SKIP_BODY;
} |
java | public Observable<Page<FeatureResultInner>> listAsync() {
return listWithServiceResponseAsync()
.map(new Func1<ServiceResponse<Page<FeatureResultInner>>, Page<FeatureResultInner>>() {
@Override
public Page<FeatureResultInner> call(ServiceResponse<Page<FeatureResultInner>> response) {
return response.body();
}
});
} |
python | def confd_state_loaded_data_models_data_model_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
confd_state = ET.SubElement(config, "confd-state", xmlns="http://tail-f.com/yang/confd-monitoring")
loaded_data_models = ET.SubElement(confd_state, "loaded-data-models")
data_model = ET.SubElement(loaded_data_models, "data-model")
name = ET.SubElement(data_model, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
python | def get_node(self, index):
"""
Returns the Node at given index.
:param index: Index.
:type index: QModelIndex
:return: Node.
:rtype: AbstractCompositeNode or GraphModelNode
"""
if not index.isValid():
return self.__root_node
return index.internalPointer() or self.__root_node |
python | def breaks(self, frame, no_remove=False):
"""Return True if there's a breakpoint at frame"""
for breakpoint in set(self.breakpoints):
if breakpoint.breaks(frame):
if breakpoint.temporary and not no_remove:
self.breakpoints.remove(breakpoint)
return True
return False |
java | protected static String getArray(List<String> paths) {
StringBuilder builder = new StringBuilder();
for (String s : paths) {
if (builder.length() == 0) {
builder.append(s);
} else {
builder.append(",").append(s);
}
}
return builder.toString();
} |
java | protected static Invocable resolveJavaMethod(
Class<?> clas, String name, Class<?>[] types,
boolean staticOnly ) throws UtilEvalError {
if ( clas == null )
throw new InterpreterError("null class");
Invocable method = BshClassManager.memberCache
.get(clas).findMethod(name, types);
checkFoundStaticMethod( method, staticOnly, clas );
return method;
} |
java | @Override
public EClass getIfcNumericMeasure() {
if (ifcNumericMeasureEClass == null) {
ifcNumericMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(847);
}
return ifcNumericMeasureEClass;
} |
python | def get_model_file(name, root=os.path.join(base.data_dir(), 'models')):
r"""Return location for the pretrained on local file system.
This function will download from online model zoo when model cannot be found or has mismatch.
The root directory will be created if it doesn't exist.
Parameters
----------
name : str
Name of the model.
root : str, default $MXNET_HOME/models
Location for keeping the model parameters.
Returns
-------
file_path
Path to the requested pretrained model file.
"""
file_name = '{name}-{short_hash}'.format(name=name,
short_hash=short_hash(name))
root = os.path.expanduser(root)
file_path = os.path.join(root, file_name+'.params')
sha1_hash = _model_sha1[name]
if os.path.exists(file_path):
if check_sha1(file_path, sha1_hash):
return file_path
else:
logging.warning('Mismatch in the content of model file detected. Downloading again.')
else:
logging.info('Model file not found. Downloading to %s.', file_path)
util.makedirs(root)
zip_file_path = os.path.join(root, file_name+'.zip')
repo_url = os.environ.get('MXNET_GLUON_REPO', apache_repo_url)
if repo_url[-1] != '/':
repo_url = repo_url + '/'
download(_url_format.format(repo_url=repo_url, file_name=file_name),
path=zip_file_path,
overwrite=True)
with zipfile.ZipFile(zip_file_path) as zf:
zf.extractall(root)
os.remove(zip_file_path)
if check_sha1(file_path, sha1_hash):
return file_path
else:
raise ValueError('Downloaded file has different hash. Please try again.') |
python | def leaves_not_empty(self):
"""
Return the list of leaves not empty
in the tree rooted at this node,
in DFS order.
:rtype: list of :class:`~aeneas.tree.Tree`
"""
return [n for n in self.dfs if ((n.is_leaf) and (not n.is_empty))] |
java | public static Properties getEnvVariables()
throws EnvironmentException
{
if( isUnix() )
{
return getUnixShellVariables();
}
if( isWindows() )
{
return getWindowsShellVariables();
}
String message = "Environment operations not supported on unrecognized operatings system";
UnsupportedOperationException cause = new UnsupportedOperationException( message );
throw new EnvironmentException( cause );
} |
python | def setup(app):
"""Setup."""
# add_html_theme is new in Sphinx 1.6+
if hasattr(app, 'add_html_theme'):
theme_path = get_html_theme_path()[0]
app.add_html_theme('bootstrap', os.path.join(theme_path, 'bootstrap')) |
python | def get_extra_info(self, name, default=None):
"""Return transport specific data.
The following fields are available, in addition to the information
exposed by :meth:`Transport.get_extra_info`.
====================== ===============================================
Name Description
====================== ===============================================
``'ssl'`` The internal ``ssl.SSLObject`` instance used by
this transport.
``'sslctx'`` The ``ssl.SSLContext`` instance used to create
the SSL object.
====================== ===============================================
"""
if name == 'ssl':
return self._sslpipe.ssl_object
elif name == 'sslctx':
return self._sslpipe.context
else:
return super(SslTransport, self).get_extra_info(name, default) |
java | public Integer getSubscriptionsCount(final QueryParams params) {
FluentCaseInsensitiveStringsMap map = doHEAD(Subscription.SUBSCRIPTION_RESOURCE, params);
return Integer.parseInt(map.getFirstValue(X_RECORDS_HEADER_NAME));
} |
java | private Observable<Indexable> processCompletedTaskAsync(final TaskGroupEntry<TaskItem> completedEntry,
final InvocationContext context) {
reportCompletion(completedEntry);
if (isRootEntry(completedEntry)) {
return Observable.empty();
} else {
return invokeReadyTasksAsync(context);
}
} |
java | public PagedList<SecretItem> listSecretVersions(final String vaultBaseUrl, final String secretName,
final Integer maxresults) {
return getSecretVersions(vaultBaseUrl, secretName, maxresults);
} |
java | protected Object decode(ChannelHandlerContext ctx, Channel channel, ChannelBuffer buf) throws Exception {
// Make sure that we have received at least a short
long available = buf.readableBytes();
// Length of control message is 10.
// Minimum length of a task message is 6(short taskId, int length).
if (available < 6) {
// need more data
return null;
}
long startTime = 0L;
if (isServer && enableNettyMetrics) {
startTime = msgDecodeTimer.getTime();
}
try {
// Mark the current buffer position before reading task/len field
// because the whole frame might not be in the buffer yet.
// We will reset the buffer position to the marked position if
// there's not enough bytes in the buffer.
buf.markReaderIndex();
// read the short field
short code = buf.readShort();
available -= 2;
// case 1: Control message
ControlMessage ctrl_msg = ControlMessage.mkMessage(code);
if (ctrl_msg != null) {
if (available < 12) {
// The time stamp bytes were not received yet - return null.
buf.resetReaderIndex();
return null;
}
long timeStamp = buf.readLong();
int clientPort = buf.readInt();
available -= 12;
if (ctrl_msg == ControlMessage.EOB_MESSAGE) {
long interval = System.currentTimeMillis() - timeStamp;
if (interval < 0)
interval = 0;
if (enableNettyMetrics) {
AsmHistogram netTransTime = getTransmitHistogram(channel, clientPort);
if (netTransTime != null) {
netTransTime.update(interval * TimeUtils.NS_PER_US);
}
}
if (MetricUtils.metricAccurateCal)
NettyMetricInstance.networkWorkerTransmitTime.update(interval * TimeUtils.NS_PER_US);
}
NettyMetricInstance.nettyServerRecvSpeed.update(ctrl_msg.getEncodedLength());
return ctrl_msg;
}
// case 2: task Message
short type = code;
short task = buf.readShort();
available -= 2;
// Make sure that we have received at least an integer (length)
if (available < 4) {
// need more data
buf.resetReaderIndex();
return null;
}
// Read the length field.
int length = buf.readInt();
if (length <= 0) {
LOG.info("Receive one message whose TaskMessage's message length is {}", length);
return new TaskMessage(task, null);
}
// Make sure if there's enough bytes in the buffer.
available -= 4;
if (available < length) {
// The whole bytes were not received yet - return null.
buf.resetReaderIndex();
return null;
}
// There's enough bytes in the buffer. Read it.
ChannelBuffer payload = buf.readBytes(length);
// Successfully decoded a frame.
// Return a TaskMessage object
byte[] rawBytes = payload.array();
// @@@ TESTING CODE
// LOG.info("Receive task:{}, length: {}, data:{}",
// task, length, JStormUtils.toPrintableString(rawBytes));
TaskMessage ret = new TaskMessage(type, task, rawBytes);
NettyMetricInstance.nettyServerRecvSpeed.update(rawBytes.length + 8);
return ret;
} finally {
if (isServer && enableNettyMetrics && msgDecodeTimer != null) {
msgDecodeTimer.updateTime(startTime);
}
}
} |
python | def clean_comment(self):
"""
If COMMENTS_ALLOW_PROFANITIES is False, check that the comment doesn't
contain anything in PROFANITIES_LIST.
"""
comment = self.cleaned_data["text"]
if settings.COMMENTS_ALLOW_PROFANITIES is False:
bad_words = [w for w in settings.PROFANITIES_LIST if w in comment.lower()]
if bad_words:
raise forms.ValidationError(ungettext(
"Watch your mouth! The word %s is not allowed here.",
"Watch your mouth! The words %s are not allowed here.",
len(bad_words)) % get_text_list(
['"%s%s%s"' % (i[0], '-'*(len(i)-2), i[-1])
for i in bad_words], ugettext('and')))
return comment |
java | public boolean remove(V value) {
// peek at what is in the map
K key = value.getKey();
Ref<V> ref = map.get(key);
// only try to remove the mapping if it matches the provided class loader
return (ref != null && ref.get() == value) ? map.remove(key, ref) : false;
} |
python | def autopilot_version_send(self, capabilities, flight_sw_version, middleware_sw_version, os_sw_version, board_version, flight_custom_version, middleware_custom_version, os_custom_version, vendor_id, product_id, uid, force_mavlink1=False):
'''
Version and capability of autopilot software
capabilities : bitmask of capabilities (see MAV_PROTOCOL_CAPABILITY enum) (uint64_t)
flight_sw_version : Firmware version number (uint32_t)
middleware_sw_version : Middleware version number (uint32_t)
os_sw_version : Operating system version number (uint32_t)
board_version : HW / board version (last 8 bytes should be silicon ID, if any) (uint32_t)
flight_custom_version : Custom version field, commonly the first 8 bytes of the git hash. This is not an unique identifier, but should allow to identify the commit using the main version number even for very large code bases. (uint8_t)
middleware_custom_version : Custom version field, commonly the first 8 bytes of the git hash. This is not an unique identifier, but should allow to identify the commit using the main version number even for very large code bases. (uint8_t)
os_custom_version : Custom version field, commonly the first 8 bytes of the git hash. This is not an unique identifier, but should allow to identify the commit using the main version number even for very large code bases. (uint8_t)
vendor_id : ID of the board vendor (uint16_t)
product_id : ID of the product (uint16_t)
uid : UID if provided by hardware (uint64_t)
'''
return self.send(self.autopilot_version_encode(capabilities, flight_sw_version, middleware_sw_version, os_sw_version, board_version, flight_custom_version, middleware_custom_version, os_custom_version, vendor_id, product_id, uid), force_mavlink1=force_mavlink1) |
java | public boolean contains(Right right) {
if (isFull) {
return true;
}
if (right.isFull) {
return false;
}
if (!containsRights(isFullSelect, selectColumnSet,
right.selectColumnSet, right.isFullSelect)) {
return false;
}
if (!containsRights(isFullInsert, insertColumnSet,
right.insertColumnSet, right.isFullInsert)) {
return false;
}
if (!containsRights(isFullUpdate, updateColumnSet,
right.updateColumnSet, right.isFullUpdate)) {
return false;
}
if (!containsRights(isFullReferences, referencesColumnSet,
right.referencesColumnSet,
right.isFullReferences)) {
return false;
}
if (!containsRights(isFullTrigger, triggerColumnSet,
right.triggerColumnSet, right.isFullTrigger)) {
return false;
}
if (!isFullDelete && right.isFullDelete) {
return false;
}
return true;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.