language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public void addPoint(float x, float y) {
if (hasVertex(x,y) && (!allowDups)) {
return;
}
ArrayList tempPoints = new ArrayList();
for(int i=0;i<points.length;i++) {
tempPoints.add(new Float(points[i]));
}
tempPoints.add(new Float(x));
tempPoints.add(new Float(y));
int length = tempPoints.size();
points = new float[length];
for(int i=0;i<length;i++) {
points[i] = ((Float)tempPoints.get(i)).floatValue();
}
if(x > maxX) {
maxX = x;
}
if(y > maxY) {
maxY = y;
}
if(x < minX) {
minX = x;
}
if(y < minY) {
minY = y;
}
findCenter();
calculateRadius();
pointsDirty = true;
} |
java | protected final JSType getRestrictedWithoutNull(JSType type) {
return type == null ? null : type.visit(restrictNullVisitor);
} |
java | public void config(Config config) throws ConfigException, HibernateException
{
log.trace("config(Config)");
// Hibernate configuration class is the session factory builder
Configuration configuration = null;
// Hibernate configuration resource is not null if config parameter is null - for zero-config, or provided by config
// parameter itself as attribute
String configResource = config != null ? config.getAttribute("config") : DEFAULT_CONFIG;
if(configResource != null) {
log.debug("Configure Hibernate from configuration resource |%s|.", configResource);
configuration = new Configuration();
configuration.configure(configResource);
}
else {
// at this point config parameter is not null
log.debug("Configure Hibernate from j(s)-lib configuration object.");
configuration = hibernateConfiguration(config);
}
String timeout = configuration.getProperty("hibernate.transaction.timeout");
this.transactionTimeout = timeout != null ? Integer.parseInt(timeout) : 0;
String driverClassName = configuration.getProperty("hibernate.connection.driver_class");
if(driverClassName == null) {
throw new ConfigException("Missing driver class, e.g. property name='hibernate.connection.driver_class' and value 'com.mysql.jdbc.Driver'");
}
log.debug("Load database driver |%s|.", driverClassName);
Classes.forName(driverClassName);
log.debug("Create Hibernate session factory.");
sessionFactory = configuration.buildSessionFactory();
} |
python | def create_version(self, project_id_or_key, version_name, extra_request_params={}):
"""
client = BacklogClient("your_space_name", "your_api_key")
client.create_version("YOUR_PROJECT",
"VERSION_NAME",
{"description": "version description"})
"""
request_params = extra_request_params
request_params["name"] = version_name
return self.do("POST", "projects/{project_id_or_key}/versions",
url_params={"project_id_or_key": project_id_or_key},
request_params=request_params,
) |
python | def upsert(self, document, cond):
"""
Update a document, if it exist - insert it otherwise.
Note: this will update *all* documents matching the query.
:param document: the document to insert or the fields to update
:param cond: which document to look for
:returns: a list containing the updated document's ID
"""
updated_docs = self.update(document, cond)
if updated_docs:
return updated_docs
else:
return [self.insert(document)] |
java | public static Map<String,String> objectToMap(Object object,String... ignore){
Map<String,String> tempMap = new LinkedHashMap<String, String>();
for(Field f : getAllFields(object.getClass())){
if(!f.isAccessible()){
f.setAccessible(true);
}
boolean ig = false;
if(ignore!=null&&ignore.length>0){
for(String i : ignore){
if(i.equals(f.getName())){
ig = true;
break;
}
}
}
if(ig){
continue;
}else{
Object o = null;
try {
o = f.get(object);
} catch (IllegalArgumentException e) {
logger.error("", e);
} catch (IllegalAccessException e) {
logger.error("", e);
}
tempMap.put(f.getName(), o==null?"":o.toString());
}
}
return tempMap;
} |
python | def setup_pathing(self):
"""Format pathing for S3 deployments."""
self.s3_version_uri = self._path_formatter(self.version)
self.s3_latest_uri = self._path_formatter("LATEST")
self.s3_canary_uri = self._path_formatter("CANARY")
self.s3_alpha_uri = self._path_formatter("ALPHA")
self.s3_mirror_uri = self._path_formatter("MIRROR") |
python | def remove_hooks(target, **hooks):
"""
Remove the given hooks from the given target.
:param target: The object from which to remove hooks. If all hooks are removed from a given method, the
HookedMethod object will be removed and replaced with the original function.
:param hooks: Any keywords will be interpreted as hooks to remove. You must provide the exact hook that was applied
so that it can it can be identified for removal among any other hooks.
"""
for name, hook in hooks.items():
hooked = getattr(target, name)
if hook in hooked.pending:
try:
hooked.pending.remove(hook)
except ValueError as e:
raise ValueError("%s is not hooked by %s" % (target, hook)) from e
if not hooked.pending:
setattr(target, name, hooked.func) |
java | public static Condition startsWithUri(final String uri) {
return new Condition(input -> input.getUri().startsWith(uri));
} |
python | def k_nearest(self, vec, k):
"""Get the k nearest neighbors of a vector (in terms of highest inner products).
:param (np.array) vec: query vector
:param (int) k: number of top neighbors to return
:return (list[tuple[str, float]]): a list of (word, score) pairs, in descending order
"""
nbr_score_pairs = self.inner_products(vec)
return sorted(nbr_score_pairs.items(), key=lambda x: x[1], reverse=True)[:k] |
java | public static <T> Subscription includeWhen(
Collection<T> collection,
T element,
ObservableValue<Boolean> condition) {
return subscribe(condition, new Consumer<Boolean>() {
private boolean included = false;
@Override
public void accept(Boolean value) {
if(value && !included) {
included = collection.add(element);
} else if(!value && included) {
collection.remove(element);
included = false;
}
}
});
} |
python | def matches(self, filter_props):
"""Check if the filter matches the supplied properties."""
if filter_props is None:
return False
found_one = False
for key, value in filter_props.items():
if key in self.properties and value != self.properties[key]:
return False
elif key in self.properties and value == self.properties[key]:
found_one = True
return found_one |
java | @Override
public T call() throws Exception {
String contextualMethods = threadContextDescriptor.getExecutionProperties().get(WSContextService.CONTEXTUAL_METHODS);
boolean applyContext = contextualMethods == null || Arrays.asList(contextualMethods.split(",")).contains("call");
T result;
ArrayList<ThreadContext> contextAppliedToThread = applyContext ? threadContextDescriptor.taskStarting() : null;
try {
result = object.call();
} finally {
if (applyContext)
threadContextDescriptor.taskStopping(contextAppliedToThread);
}
return result;
} |
java | public static MavenCoord fromGAVPC(String coordGavpc)
{
Matcher mat = REGEX_GAVCP.matcher(coordGavpc);
if (!mat.matches())
throw new IllegalArgumentException("Wrong Maven coordinates format, must be G:A:V[:P[:C]] . " + coordGavpc);
return new MavenCoord()
.setGroupId(mat.group(1))
.setArtifactId(mat.group(2))
.setVersion(mat.group(3))
.setPackaging(mat.group(4))
.setClassifier(mat.group(5));
} |
python | def write(self, buf):
"""Write bytes to the stream."""
underflow = self._audio_stream.write(buf)
if underflow:
logging.warning('SoundDeviceStream write underflow (size: %d)',
len(buf))
return len(buf) |
python | def making_blockstr(varblock,count,colorline,element,zoomblock,filename,sidebarstring,colorkeyfields):
# starting wrapper that comes before html table code
'''
if not colorkeyfields == False:
start = """\n\tfunction addDataToMap%s(data, map) {\t\tvar skillsSelect = document.getElementById("mapStyle");\n\t\tvar selectedText = skillsSelect.options[skillsSelect.selectedIndex].text;\n\t\tvar selectedText = 'COLORKEY_' + selectedText\n\t\tvar dataLayer = L.geoJson(data, {\n\t\t\tonEachFeature: function(feature, layer) {""" % (count)
else:
'''
start = """\n\tfunction addDataToMap%s(data, map) {\n\t\tvar dataLayer = L.geoJson(data, {\n\t\t\tonEachFeature: function(feature, layer) {""" % (count)
# ending wrapper that comes after html table code
if count == 1 and colorkeyfields == False:
end = """
layer.bindPopup(popupText, {autoPan:false, maxHeight:500, maxWidth:350} ); }
});
dataLayer.addTo(map);
console.log(map.fitBounds(dataLayer.getBounds()))};\n\t};"""
else:
end = """
layer.bindPopup(popupText, {autoPan:false, maxHeight:500, maxWidth:350} ); }
});
dataLayer.addTo(map);
\n\t};\n\t}"""
'''
else:
end="""
layer.bindPopup(popupText, {autoPan:false, maxHeight:500, maxWidth:350} ); };
});
dataLayer.addTo(map);\nconsole.log(map.fitBounds(dataLayer.getBounds()));\n\t\tsetTimeout(function() {\n\t\t\t\tdataLayer.clearLayers();\n\t\t},%s);\n\t}\n}\nsetInterval(add%s,%s)""" % (time,count,time)
'''
# iterates through each varblock and returns the entire bindings javascript block
total = ''
# logic for appending check_dropdown line to zoomblock
if not zoomblock == '' and not colorkeyfields == False:
pass
# logic for replacing the datalayer add to line with the zoom text block
if not zoomblock == '':
end = end.replace('dataLayer.addTo(map);',zoomblock)
for row in varblock:
total += row
if element == 'Point':
return start + total + colorline + sidebarstring + end
else:
return start + total + '\n' + colorline + sidebarstring + end |
java | private void serializeAliasedDiscoveryConfig(
JsonObject object, String tag, AliasedDiscoveryConfig config) {
if (config != null) {
object.add(tag, new AliasedDiscoveryConfigDTO(config).toJson());
}
} |
java | @Override
public void start() throws Exception
{
try
{
reRegisterServices();
}
catch ( KeeperException e )
{
log.error("Could not register instances - will try again later", e);
}
client.getConnectionStateListenable().addListener(connectionStateListener);
} |
java | @Override
public String getAuthType() {
String type = null;
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "getAuthType: " + type);
}
return type;
} |
java | public static GatewayTimeout of(int errorCode, Throwable cause) {
if (_localizedErrorMsg()) {
return of(errorCode, cause, defaultMessage(GATEWAY_TIMEOUT));
} else {
touchPayload().errorCode(errorCode).cause(cause);
return _INSTANCE;
}
} |
java | @POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Override
public String getMessageToClient(@FormParam(Constants.Message.MFC) String json) {
HttpSession httpSession = getHttpSession();
setContext(httpSession);
MessageFromClient message = MessageFromClient.createFromJson(json);
MessageToClient mtc = getMessageToClientService().createMessageToClient(message, httpSession);
return mtc.toJson();
} |
python | def bp_commands(self, frame, breakpoint_hits):
"""Call every command that was set for the current active breakpoints.
Returns True if the normal interaction function must be called,
False otherwise."""
# Handle multiple breakpoints on the same line (issue 14789)
effective_bp_list, temporaries = breakpoint_hits
silent = True
doprompt = False
atleast_one_cmd = False
for bp in effective_bp_list:
if bp in self.commands:
if not atleast_one_cmd:
atleast_one_cmd = True
self.setup(frame, None)
lastcmd_back = self.lastcmd
for line in self.commands[bp]:
self.onecmd(line)
self.lastcmd = lastcmd_back
if not self.commands_silent[bp]:
silent = False
if self.commands_doprompt[bp]:
doprompt = True
# Delete the temporary breakpoints.
tmp_to_delete = ' '.join(str(bp) for bp in temporaries)
if tmp_to_delete:
self.do_clear(tmp_to_delete)
if atleast_one_cmd:
return doprompt, silent
return None |
java | private void init() {
setLayout(new BorderLayout());
initToolBar();
contentPane = new JPanel();
contentPane.setLayout(new GridLayout(0, 1, 0, 0));
contentPane.setBackground(Color.WHITE);
add(contentPane, BorderLayout.CENTER);
} |
java | private <L extends UriBaseListener> void analyzeInternal(final String input, L listener) {
pathSegmentIndex = -1;
walker.walk(listener, prepareUri(input).value0);
} |
java | public final Tuple7<T10, T11, T12, T13, T14, T15, T16> skip9() {
return new Tuple7<>(v10, v11, v12, v13, v14, v15, v16);
} |
java | public long guessNextBCFRecordStart(long beg, long end)
throws IOException
{
// Buffer what we need to go through.
byte[] arr = new byte[
bgzf ? BGZF_MAX_BYTES_READ : UNCOMPRESSED_BYTES_NEEDED];
this.inFile.seek(beg);
int totalRead = 0;
for (int left = Math.min((int)(end - beg), arr.length); left > 0;) {
final int r = inFile.read(arr, totalRead, left);
if (r < 0)
break;
totalRead += r;
left -= r;
}
arr = Arrays.copyOf(arr, totalRead);
this.in = new ByteArraySeekableStream(arr);
final int firstBGZFEnd;
if (this.bgzf) {
firstBGZFEnd = Math.min((int)(end - beg), 0xffff);
BlockCompressedInputStream bgzfStream =
new BlockCompressedInputStream(this.in);
bgzfStream.setCheckCrcs(true);
this.cin = bgzfStream;
} else {
this.cin = this.in;
firstBGZFEnd = 0; // Actually unused
}
// cp: Compressed Position, indexes the entire BGZF input. If
// we have uncompressed BCF, this loop does nothing.
for (int cp = 0;; ++cp) {
final int cp0;
final long cp0Virt;
final int blockLen;
if (this.bgzf) {
final PosSize psz = guessNextBGZFPos(cp, firstBGZFEnd);
if (psz == null)
break;
cp0 = cp = psz.pos;
cp0Virt = (long)cp0 << 16;
try {
cinSeek(cp0Virt);
// This has to catch Throwable, because it's possible to get an
// OutOfMemoryError due to an overly large size.
} catch (Throwable e) {
// Guessed BGZF position incorrectly: try the next guess.
continue;
}
blockLen = psz.size;
} else {
cp0 = 0; // Actually unused
cp0Virt = 0;
blockLen = Math.max(arr.length, UNCOMPRESSED_BYTES_NEEDED);
}
// up: Uncompressed Position, indexes the data inside the BGZF block.
for (int up = 0;; ++up) {
final int up0 = up = guessNextBCFPos(cp0Virt, up, blockLen);
if (up0 < 0) {
// No BCF records found in the BGZF block: try the next BGZF
// block.
break;
}
// Verification time.
cinSeek(cp0Virt | up0);
final PositionalBufferedStream pbIn =
new PositionalBufferedStream(cin);
boolean decodedAny = false;
try {
if (bgzf) {
byte b = 0;
int prevCP = cp0;
while (b < BGZF_BLOCKS_NEEDED_FOR_GUESS && pbIn.peek() != -1)
{
bcfCodec.decode(pbIn);
decodedAny = true;
final int cp2 = (int)
(((BlockCompressedInputStream)cin).getFilePointer()
>>> 16);
if (cp2 != prevCP) {
// The compressed position changed so we must be in a
// new block.
assert cp2 > prevCP;
cp = cp2;
++b;
}
}
// Running out of records to verify is fine as long as we
// verified at least something. It should only happen if we
// couldn't fill the array.
if (b < BGZF_BLOCKS_NEEDED_FOR_GUESS) {
assert arr.length < BGZF_MAX_BYTES_READ;
if (!decodedAny)
continue;
}
} else {
while (pbIn.getPosition() - up0 < UNCOMPRESSED_BYTES_NEEDED
&& pbIn.peek() != -1)
{
bcfCodec.decode(pbIn);
decodedAny = true;
}
// As in the BGZF case.
if (pbIn.getPosition() - up0 < UNCOMPRESSED_BYTES_NEEDED) {
assert arr.length < UNCOMPRESSED_BYTES_NEEDED;
if (!decodedAny)
continue;
}
}
} catch (FileTruncatedException e) { continue; }
catch (OutOfMemoryError e) { continue; }
catch (RuntimeEOFException e) { continue; }
catch (TribbleException e) {
// This is the way in which BCF2Codec reports unexpected EOF.
// Unfortunately, it also reports every other kind of error with
// the same exception. It even wraps IOException in
// TribbleException!
//
// We need to catch EOF in the middle of a record, which can
// happen legitimately if the [beg,end) range is too small and
// cuts off a record. First, require decodedAny, and then, assume
// that this exception means EOF if the stream has hit EOF.
if (!(decodedAny && pbIn.peek() == -1))
continue;
}
return this.bgzf ? beg+cp0 << 16 | up0 : beg + up0;
}
if (!this.bgzf)
break;
}
return end;
} |
java | @Override
public CommerceNotificationAttachment create(
long commerceNotificationAttachmentId) {
CommerceNotificationAttachment commerceNotificationAttachment = new CommerceNotificationAttachmentImpl();
commerceNotificationAttachment.setNew(true);
commerceNotificationAttachment.setPrimaryKey(commerceNotificationAttachmentId);
String uuid = PortalUUIDUtil.generate();
commerceNotificationAttachment.setUuid(uuid);
commerceNotificationAttachment.setCompanyId(companyProvider.getCompanyId());
return commerceNotificationAttachment;
} |
java | public static ProbeSender createMulticastProbeSender(String niName) throws TransportConfigException {
Transport mcastTransport = new MulticastTransport(niName);
ProbeSender gen = new ProbeSender(mcastTransport);
return gen;
} |
java | public boolean isDefault() {
return (valueAsString != null && valueAsString.equals(defaultValueAsString)) ||
(valueAsString == null && defaultValueAsString == null);
} |
java | private void commitEntries() {
raft.checkThread();
// Sort the list of replicas, order by the last index that was replicated
// to the replica. This will allow us to determine the median index
// for all known replicated entries across all cluster members.
List<RaftMemberContext> members = raft.getCluster().getActiveMemberStates((m1, m2) ->
Long.compare(m2.getMatchIndex() != 0 ? m2.getMatchIndex() : 0L, m1.getMatchIndex() != 0 ? m1.getMatchIndex() : 0L));
// If the active members list is empty (a configuration change occurred between an append request/response)
// ensure all commit futures are completed and cleared.
if (members.isEmpty()) {
long commitIndex = raft.getLogWriter().getLastIndex();
long previousCommitIndex = raft.setCommitIndex(commitIndex);
if (commitIndex > previousCommitIndex) {
log.trace("Committed entries up to {}", commitIndex);
completeCommits(previousCommitIndex, commitIndex);
}
return;
}
// Calculate the current commit index as the median matchIndex.
long commitIndex = members.get(getQuorumIndex()).getMatchIndex();
// If the commit index has increased then update the commit index. Note that in order to ensure
// the leader completeness property holds, we verify that the commit index is greater than or equal to
// the index of the leader's no-op entry. Update the commit index and trigger commit futures.
long previousCommitIndex = raft.getCommitIndex();
if (commitIndex > 0 && commitIndex > previousCommitIndex && (leaderIndex > 0 && commitIndex >= leaderIndex)) {
log.trace("Committed entries up to {}", commitIndex);
raft.setCommitIndex(commitIndex);
completeCommits(previousCommitIndex, commitIndex);
}
} |
java | public static DefaultFileSet.Builder with(BaseFolder baseFolder, AnnotatedFile manifest, String manifestPath) {
return new Builder(baseFolder, manifest, manifestPath);
} |
java | public static <T> Set<T> getRandomSubset(Set<T> set, int count) {
Set<T> result = new HashSet<>();
HashList<T> opSet = new HashList<>(set);
while (result.size() < count) {
int next = rand.nextInt(opSet.size());
if (!result.contains(opSet.get(next))) {
result.add(opSet.get(next));
}
}
return result;
} |
python | def populate_local_cache(self):
"""Populate the local cache from DB.
Read the entries from FW DB and Calls routines to populate the cache.
"""
fw_dict = self.get_all_fw_db()
for fw_id in fw_dict:
LOG.info("Populating cache for FW %s", fw_id)
fw_data = fw_dict[fw_id]
self.populate_local_cache_tenant(fw_id, fw_data) |
java | private List<Integer> getSplitPositions(String text) {
ArrayList<Integer> splitPositions = new ArrayList<>();
int position;
int currentPosition = 0;
while (true) {
int indexOfMaru = text.indexOf("。", currentPosition);
int indexOfTen = text.indexOf("、", currentPosition);
if (indexOfMaru < 0 || indexOfTen < 0) {
position = Math.max(indexOfMaru, indexOfTen);
} else {
position = Math.min(indexOfMaru, indexOfTen);
}
if (position >= 0) {
splitPositions.add(position);
currentPosition = position + 1;
} else {
break;
}
}
return splitPositions;
} |
python | def _read_callback(connection_id, data_buffer, data_length_pointer):
"""
SecureTransport read callback. This is called by ST to request that data
be returned from the socket.
"""
wrapped_socket = None
try:
wrapped_socket = _connection_refs.get(connection_id)
if wrapped_socket is None:
return SecurityConst.errSSLInternal
base_socket = wrapped_socket.socket
requested_length = data_length_pointer[0]
timeout = wrapped_socket.gettimeout()
error = None
read_count = 0
try:
while read_count < requested_length:
if timeout is None or timeout >= 0:
if not util.wait_for_read(base_socket, timeout):
raise socket.error(errno.EAGAIN, 'timed out')
remaining = requested_length - read_count
buffer = (ctypes.c_char * remaining).from_address(
data_buffer + read_count
)
chunk_size = base_socket.recv_into(buffer, remaining)
read_count += chunk_size
if not chunk_size:
if not read_count:
return SecurityConst.errSSLClosedGraceful
break
except (socket.error) as e:
error = e.errno
if error is not None and error != errno.EAGAIN:
data_length_pointer[0] = read_count
if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
data_length_pointer[0] = read_count
if read_count != requested_length:
return SecurityConst.errSSLWouldBlock
return 0
except Exception as e:
if wrapped_socket is not None:
wrapped_socket._exception = e
return SecurityConst.errSSLInternal |
java | @Deprecated
public void addSslListener(InetSocketAddress addr, String keystore,
String storPass, String keyPass) throws IOException {
if (webServer.isStarted()) {
throw new IOException("Failed to add ssl listener");
}
SslSocketConnector sslListener = new SslSocketConnector();
sslListener.setHost(addr.getAddress().getHostAddress());
sslListener.setPort(addr.getPort());
sslListener.setKeystore(keystore);
sslListener.setPassword(storPass);
sslListener.setKeyPassword(keyPass);
webServer.addConnector(sslListener);
} |
java | @Override
protected boolean validateSettings() throws IllegalArgumentException {
// Validate values which do not require user input first to avoid unnecesary
// work on the user part.
// dates
Date startDate = getStartDate();
Date stopDate = getStopDate();
if (startDate != null && stopDate != null) {
if (startDate.after(stopDate)) {
// stop date is before the start date.
throw new IllegalArgumentException(getLocalizedString("BL_MINDATE_AFTER_MAXDATE"));
}
}
// Validate levels if specified.
Level minLevel = getMinLevel();
Level maxLevel = getMaxLevel();
if (maxLevel != null && minLevel != null) {
// We have both a max and min level. Check that the min level is
// equal to or less than the max level
if (minLevel.intValue() > maxLevel.intValue()) {
// Min is greater than Max level. Report error
throw new IllegalArgumentException(getLocalizedString("BL_MINLEVEL_GREATER_THAN_MAXLEVEL"));
}
}
// Required options
try {
setBinaryRepositoryDir(unvalidatedRepositoryDir.getCanonicalPath());
System.out.println(getLocalizedParmString("BL_REPOSITORY_DIRECTORY", new Object[] { getValidatedBinaryRepositoryDir() }));
} catch (IOException e) {
throw new IllegalArgumentException(getLocalizedParmString("BL_INVALID_REPOSITORYDIR", new Object[] { unvalidatedRepositoryDir }));
}
// in case of copy action, we have an unvalidatedTargetDir set
try {
if (unvalidatedTargetDir != null) {
String dir = unvalidatedTargetDir.getCanonicalPath();
setOutputRepositoryDir(dir);
System.out.println(getLocalizedParmString("BL_TARGET_DIRECTORY", new Object[] { dir }));
}
} catch (IOException e) {
throw new IllegalArgumentException(getLocalizedParmString("BL_INVALID_TARGETDIR", new Object[] { unvalidatedTargetDir }));
}
return false;
} |
python | def _reset(self):
'''
_reset - reset this object. Assigned to .reset after __init__ call.
'''
HTMLParser.reset(self)
self.root = None
self.doctype = None
self._inTag = [] |
java | public <T> T doWithoutTransaction(final SpecificTxAction<T, C> action) {
checkNotx();
return template.doInTransaction(new TxConfig(OTransaction.TXTYPE.NOTX), action);
} |
python | def patches(self, basemap, simplify=None, predicate=None, args_f=None, **kwargs):
"""
Return geodata as a list of Matplotlib patches
:param basemap: A mpl_toolkits.basemap.Basemap
:param simplify: Integer or None. Simplify the geometry to a tolerance, in the units of the geometry.
:param predicate: A single-argument function to select which records to include in the output.
:param args_f: A function that takes a row and returns a dict of additional args for the Patch constructor
:param kwargs: Additional args to be passed to the descartes Path constructor
:return: A list of patch objects
"""
from descartes import PolygonPatch
from shapely.wkt import loads
from shapely.ops import transform
if not predicate:
predicate = lambda row: True
def map_xform(x, y, z=None):
return basemap(x, y)
def make_patch(shape, row):
args = dict(kwargs.items())
if args_f:
args.update(args_f(row))
return PolygonPatch(transform(map_xform, shape), **args)
def yield_patches(row):
if simplify:
shape = loads(row.geometry).simplify(simplify)
else:
shape = loads(row.geometry)
if shape.geom_type == 'MultiPolygon':
for subshape in shape.geoms:
yield make_patch(subshape, row)
else:
yield make_patch(shape, row)
return [patch for row in self if predicate(row)
for patch in yield_patches(row)] |
java | public boolean pushAll(V firstValue, V... moreValues) {
check();
context.getValueStack().pushAll(firstValue, moreValues);
return true;
} |
java | public final void mLIKE() throws RecognitionException {
try {
int _type = LIKE;
int _channel = DEFAULT_TOKEN_CHANNEL;
// hql.g:42:6: ( 'like' )
// hql.g:42:8: 'like'
{
match("like"); if (state.failed) return;
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
} |
java | public static CmsResource convertRawResource(CmsObject cms, Object input) throws CmsException {
CmsResource result;
if (input instanceof CmsResource) {
// input is already a resource
result = (CmsResource)input;
} else if (input instanceof String) {
if (CmsUUID.isValidUUID((String)input)) {
// input is a UUID as String
result = cms.readResource(CmsUUID.valueOf((String)input));
} else {
// input is a path as String
result = cms.readResource(cms.getRequestContext().removeSiteRoot((String)input));
}
} else if (input instanceof CmsUUID) {
// input is a UUID
result = cms.readResource((CmsUUID)input);
} else {
// input seems not really to make sense, try to use it like a String
result = cms.readResource(String.valueOf(input));
}
return result;
} |
python | def mk_getkw(kw, defaults,prefer_passed=False):
'''
a helper for generating a function for reading keywords in
interface functions with a dictionary with defaults
expects the defaults dictionary to have keywords you request.
example:
defaults = dict(a='a',b=3);
def bigfunc(**kw):
getkw=mk_getkw(kw,defaults);
# I want the callers' `a', or the default if s/he doesn't
# supply it
a=getkw('a');
c = [a]*getkw('b');
return c,c[0];
Option:
prefer_passed -- use "l in kw" only, not test.
'''
def getkw(*ls):
r = [ kw[l] if test(kw,l) else defaults[l]
for l in ls ];
if len(r) == 1: return r[0];
return r;
def getkw_prefer_passed(*ls):
r = [ kw[l] if l in kw else defaults[l]
for l in ls ];
if len(r) == 1: return r[0];
return r;
return getkw if not prefer_passed else getkw_prefer_passed; |
java | private static String encode(String pathElement) {
try {
return URLEncoder.encode(pathElement, Charsets.UTF_8.name());
} catch (UnsupportedEncodingException e) {
throw Throwables.propagate(e); // Should never happen
}
} |
python | def get_speaker_info(self, refresh=False, timeout=None):
"""Get information about the Sonos speaker.
Arguments:
refresh(bool): Refresh the speaker info cache.
timeout: How long to wait for the server to send
data before giving up, as a float, or a
`(connect timeout, read timeout)` tuple
e.g. (3, 5). Default is no timeout.
Returns:
dict: Information about the Sonos speaker, such as the UID,
MAC Address, and Zone Name.
"""
if self.speaker_info and refresh is False:
return self.speaker_info
else:
response = requests.get('http://' + self.ip_address +
':1400/xml/device_description.xml',
timeout=timeout)
dom = XML.fromstring(response.content)
device = dom.find('{urn:schemas-upnp-org:device-1-0}device')
if device is not None:
self.speaker_info['zone_name'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}roomName')
# no zone icon in device_description.xml -> player icon
self.speaker_info['player_icon'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}iconList/'
'{urn:schemas-upnp-org:device-1-0}icon/'
'{urn:schemas-upnp-org:device-1-0}url'
)
self.speaker_info['uid'] = self.uid
self.speaker_info['serial_number'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}serialNum')
self.speaker_info['software_version'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}softwareVersion')
self.speaker_info['hardware_version'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}hardwareVersion')
self.speaker_info['model_number'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}modelNumber')
self.speaker_info['model_name'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}modelName')
self.speaker_info['display_version'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}displayVersion')
# no mac address - extract from serial number
mac = self.speaker_info['serial_number'].split(':')[0]
self.speaker_info['mac_address'] = mac
return self.speaker_info
return None |
python | def get_aws_secrets_from_file(credentials_file): # type: (str) -> Set[str]
"""Extract AWS secrets from configuration files.
Read an ini-style configuration file and return a set with all found AWS
secret access keys.
"""
aws_credentials_file_path = os.path.expanduser(credentials_file)
if not os.path.exists(aws_credentials_file_path):
return set()
parser = configparser.ConfigParser()
try:
parser.read(aws_credentials_file_path)
except configparser.MissingSectionHeaderError:
return set()
keys = set()
for section in parser.sections():
for var in (
'aws_secret_access_key', 'aws_security_token',
'aws_session_token',
):
try:
key = parser.get(section, var).strip()
if key:
keys.add(key)
except configparser.NoOptionError:
pass
return keys |
java | @Override
public void setParameters(Object[] params) throws CDKException {
if (params.length != 1) {
throw new CDKException("AromaticBondsCountDescriptor expects one parameter");
}
if (!(params[0] instanceof Boolean)) {
throw new CDKException("The first parameter must be of type Boolean");
}
// ok, all should be fine
checkAromaticity = (Boolean) params[0];
} |
python | def _combine_to_jointcaller(processed):
"""Add joint calling information to variants, while collapsing independent regions.
"""
by_vrn_file = collections.OrderedDict()
for data in (x[0] for x in processed):
key = (tz.get_in(("config", "algorithm", "jointcaller"), data), data["vrn_file"])
if key not in by_vrn_file:
by_vrn_file[key] = []
by_vrn_file[key].append(data)
out = []
for grouped_data in by_vrn_file.values():
cur = grouped_data[0]
out.append([cur])
return out |
java | public Observable<ServiceResponse<UUID>> createPatternAnyEntityRoleWithServiceResponseAsync(UUID appId, String versionId, UUID entityId, CreatePatternAnyEntityRoleOptionalParameter createPatternAnyEntityRoleOptionalParameter) {
if (this.client.endpoint() == null) {
throw new IllegalArgumentException("Parameter this.client.endpoint() is required and cannot be null.");
}
if (appId == null) {
throw new IllegalArgumentException("Parameter appId is required and cannot be null.");
}
if (versionId == null) {
throw new IllegalArgumentException("Parameter versionId is required and cannot be null.");
}
if (entityId == null) {
throw new IllegalArgumentException("Parameter entityId is required and cannot be null.");
}
final String name = createPatternAnyEntityRoleOptionalParameter != null ? createPatternAnyEntityRoleOptionalParameter.name() : null;
return createPatternAnyEntityRoleWithServiceResponseAsync(appId, versionId, entityId, name);
} |
java | private String buildGoogMsgVarNameHelper(MsgNode msgNode) {
// NOTE: MSG_UNNAMED/MSG_EXTERNAL are a special tokens recognized by the jscompiler. MSG_UNNAMED
// disables the default logic that requires all messages to be uniquely named.
// and MSG_EXTERNAL causes the jscompiler to not extract these messages.
String desiredName =
jsSrcOptions.googMsgsAreExternal()
? "MSG_EXTERNAL_" + MsgUtils.computeMsgIdForDualFormat(msgNode)
: "MSG_UNNAMED";
return translationContext.nameGenerator().generateName(desiredName);
} |
python | def set_fig_title(self, title, **kwargs):
"""Set overall figure title.
Set title for overall figure. This is not for a specific plot.
It will place the title at the top of the figure with a call to ``fig.suptitle``.
Args:
title (str): Figure title.
Keywork Arguments:
x/y (float, optional): The x/y location of the text in figure coordinates.
Defaults are 0.5 for x and 0.98 for y.
horizontalalignment/ha (str, optional): The horizontal alignment of
the text relative to (x, y). Optionas are 'center', 'left', or 'right'.
Default is 'center'.
verticalalignment/va (str, optional): The vertical alignment of the text
relative to (x, y). Optionas are 'top', 'center', 'bottom',
or 'baseline'. Default is 'top'.
fontsize/size (int, optional): The font size of the text. Default is 20.
"""
prop_default = {
'fontsize': 20,
}
for prop, default in prop_default.items():
kwargs[prop] = kwargs.get(prop, default)
self.figure.fig_title = title
self.figure.fig_title_kwargs = kwargs
return |
python | def read_xdg_config_dirs(name, extension):
"""
Read from files found in XDG-specified system-wide configuration paths,
defaulting to ``/etc/xdg``. Depends on ``XDG_CONFIG_DIRS`` environment
variable.
:param name: application or configuration set name
:param extension: file extension to look for
:return: a `.Configuration` instance with values read from XDG-specified
directories
"""
# find optional value of ${XDG_CONFIG_DIRS}
config_dirs = environ.get('XDG_CONFIG_DIRS')
if config_dirs:
# PATH-like env vars operate in decreasing precedence, reverse this path set to mimic the end result
config_dirs = reversed(config_dirs.split(path.pathsep))
else:
# XDG spec: "If $XDG_CONFIG_DIRS is either not set or empty, a value equal to /etc/xdg should be used."
config_dirs = ['/etc/xdg']
# load a file from all config dirs, default to NotConfigured
fname = '{name}.{extension}'.format(name=name, extension=extension)
return loadf(*(path.join(config_dir, fname) for config_dir in config_dirs),
default=NotConfigured) |
java | public String getTreeSite(String type) {
String result = m_treeSite.get(type);
return result;
} |
python | def os_workload_status(configs, required_interfaces, charm_func=None):
"""
Decorator to set workload status based on complete contexts
"""
def wrap(f):
@wraps(f)
def wrapped_f(*args, **kwargs):
# Run the original function first
f(*args, **kwargs)
# Set workload status now that contexts have been
# acted on
set_os_workload_status(configs, required_interfaces, charm_func)
return wrapped_f
return wrap |
python | def remove_securitygroup_rules(self, group_id, rules):
"""Remove rules from a security group.
:param int group_id: The ID of the security group
:param list rules: The list of IDs to remove
"""
return self.security_group.removeRules(rules, id=group_id) |
python | def correct(self, temp, we_t):
"""
Compute weC from weT
"""
if not PIDTempComp.in_range(temp):
return None
n_t = self.cf_t(temp)
if n_t is None:
return None
we_c = we_t * n_t
return we_c |
java | @Override
public List<SimulatorEvent> accept(SimulatorEvent event) {
if (LOG.isDebugEnabled()) {
LOG.debug("Accepted event " + event);
}
if (event instanceof HeartbeatEvent) {
return processHeartbeatEvent((HeartbeatEvent)event);
} else if (event instanceof TaskAttemptCompletionEvent) {
return processTaskAttemptCompletionEvent((TaskAttemptCompletionEvent)
event);
} else {
throw new IllegalArgumentException("Unhandled event "+event);
}
} |
python | def get_default_session(self):
"""
The default session is nothing more than the first session added
into the session handler pool. This will likely change in the future
but for now each session identifies the domain and also manages
domain switching within a single session.
:rtype: Session
"""
if self._sessions:
return self.get_session(next(iter(self._sessions)))
return self.get_session() |
java | public static <T> Stream<T> reject(Stream<T> source, Predicate<? super T> predicate) {
return source.filter(predicate.negate());
} |
java | public void printUsage(PrintWriter pw, int width, String cmdLineSyntax)
{
int argPos = cmdLineSyntax.indexOf(' ') + 1;
printWrapped(pw, width, getSyntaxPrefix().length() + argPos, getSyntaxPrefix() + cmdLineSyntax);
} |
java | public Observable<Void> syncVirtualNetworkInfoAsync(String resourceGroupName, String name) {
return syncVirtualNetworkInfoWithServiceResponseAsync(resourceGroupName, name).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
} |
java | private static String driverVersion()
{
// "Session" is arbitrary - the only thing that matters is that the class we use here is in the
// 'org.neo4j.driver' package, because that is where the jar manifest specifies the version.
// This is done as part of the build, adding a MANIFEST.MF file to the generated jarfile.
Package pkg = Session.class.getPackage();
if ( pkg != null && pkg.getImplementationVersion() != null )
{
return pkg.getImplementationVersion();
}
// If there is no version, we're not running from a jar file, but from raw compiled class files.
// This should only happen during development, so call the version 'dev'.
return "dev";
} |
java | public void registerService(Object service)
{
this.setService(service);
String serviceClass = getInterfaceClassName();
if (service != null)
serviceRegistration = context.registerService(serviceClass, this.service, properties);
} |
python | def _apply_search_backrefs(pattern, flags=0):
"""Apply the search backrefs to the search pattern."""
if isinstance(pattern, (str, bytes)):
re_verbose = bool(VERBOSE & flags)
re_unicode = None
if bool((ASCII | LOCALE) & flags):
re_unicode = False
elif bool(UNICODE & flags):
re_unicode = True
if not (flags & DEBUG):
pattern = _cached_search_compile(pattern, re_verbose, re_unicode, type(pattern))
else: # pragma: no cover
pattern = _bre_parse._SearchParser(pattern, re_verbose, re_unicode).parse()
elif isinstance(pattern, Bre):
if flags:
raise ValueError("Cannot process flags argument with a compiled pattern")
pattern = pattern._pattern
elif isinstance(pattern, (_RE_TYPE, Bre)):
if flags:
raise ValueError("Cannot process flags argument with a compiled pattern!")
else:
raise TypeError("Not a string or compiled pattern!")
return pattern |
python | def get_object_cache_keys(instance):
"""
Return the cache keys associated with an object.
"""
if not instance.pk or instance._state.adding:
return []
keys = []
for language in _get_available_languages(instance):
keys.append(get_urlfield_cache_key(instance.__class__, instance.pk, language))
return keys |
java | private CmsResource getResourceToEdit(CmsObject cms) {
CmsResource resource = null;
if (m_uuid != null) {
try {
CmsUUID uuid = new CmsUUID(m_uuid);
resource = cms.readResource(uuid, CmsResourceFilter.ignoreExpirationOffline(cms));
} catch (NumberFormatException | CmsException e) {
LOG.warn("UUID was not valid or there is no resource with the given UUID.", e);
}
}
return resource;
} |
python | def ProcessIterator(pids, process_regex_string, ignore_grr_process, error_list):
"""Yields all (psutil-) processes that match certain criteria.
Args:
pids: A list of pids. If given, only the processes with those pids are
returned.
process_regex_string: If given, only processes whose name matches the regex
are returned.
ignore_grr_process: If True, the grr process itself will not be returned.
error_list: All errors while handling processes are appended to this list.
Type is repeated ProcessMemoryError.
Yields:
psutils.Process objects matching all criteria.
"""
pids = set(pids)
if ignore_grr_process:
grr_pid = psutil.Process().pid
else:
grr_pid = -1
if process_regex_string:
process_regex = re.compile(process_regex_string)
else:
process_regex = None
if pids:
process_iterator = []
for pid in pids:
try:
process_iterator.append(psutil.Process(pid=pid))
except Exception as e: # pylint: disable=broad-except
error_list.Append(
rdf_memory.ProcessMemoryError(
process=rdf_client.Process(pid=pid), error=str(e)))
else:
process_iterator = psutil.process_iter()
for p in process_iterator:
if process_regex and not process_regex.search(p.name()):
continue
if p.pid == grr_pid:
continue
yield p |
java | public void setWp(A_CmsListDialog wp) {
m_wp = wp;
Iterator<I_CmsListDirectAction> itActs = getDirectActions().iterator();
while (itActs.hasNext()) {
I_CmsListDirectAction action = itActs.next();
action.setWp(wp);
}
Iterator<CmsListDefaultAction> itDefActs = getDefaultActions().iterator();
while (itDefActs.hasNext()) {
CmsListDefaultAction action = itDefActs.next();
action.setWp(wp);
}
} |
java | private static void createRestorerInstance() {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "createRestorerInstance");
try {
Class cls = Class.forName(SI_MESSAGE_HANDLE_RESTORER_CLASS);
instance = (SIMessageHandleRestorer) cls.newInstance();
}
catch (Exception e) {
FFDCFilter.processException(e, "com.ibm.wsspi.sib.core.SIMessageHandleRestorer.createRestorerInstance", "100");
SibTr.error(tc,"UNABLE_TO_CREATE_HANDLERESTORER_CWSIB0010",e);
NoClassDefFoundError ncdfe = new NoClassDefFoundError(e.getMessage());
ncdfe.initCause(e);
throw ncdfe;
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "createRestorerInstance");
} |
java | public void setText(int index, String value)
{
set(selectField(AssignmentFieldLists.CUSTOM_TEXT, index), value);
} |
python | def _filter_path(self, path):
""" 筛选出 path 目录下所有的视频文件
"""
if self._is_videofile(path):
return [path, ]
if os.path.isdir(path):
result = []
for root, dirs, files in os.walk(path):
result.extend(filter(self._is_videofile, map(
lambda f: os.path.join(root, f), files)))
return result
else:
return [] |
java | public static int read(ReadableByteChannel ch, ByteBuffer bb)
throws IOException
{
if (ch instanceof SelectableChannel) {
SelectableChannel sc = (SelectableChannel)ch;
synchronized (sc.blockingLock()) {
boolean bm = sc.isBlocking();
if (!bm)
throw new IllegalBlockingModeException();
// Android-removed.
// if (bm != block)
// sc.configureBlocking(block);
int n = ch.read(bb);
// Android-removed.
// if (bm != block)
// sc.configureBlocking(bm);
return n;
}
} else {
return ch.read(bb);
}
} |
python | def _create_marks(fig, marks=[bq.Mark], options={}, params={}):
"""
Initializes and returns marks for a figure as a list. Each mark is passed
in as a class. The plot options should be passed into options.
Any additional parameters to initialize plot components are passed into
params as a dict of { 'mark': [{ trait: value, ... }, ...] }
For example, when initializing two marks you can assign different colors to
each one:
params={
'marks': [
{'colors': [DARK_BLUE]},
{'colors': [GOLDENROD]},
]
}
If the param value is a function, it will be called with the options dict
augmented with all previously created plot elements. This permits
dependencies on plot elements:
params={ 'marks': {'scale': lambda opts: opts['x_sc'] } }
"""
params = _merge_with_defaults(params)
# Although fig provides scale_x and scale_y properties, the scales on the
# axes are the only ones that are actually used.
x_ax, y_ax = fig.axes
x_sc, y_sc = x_ax.scale, y_ax.scale
options = tz.merge(options, {'x_sc': x_sc, 'y_sc': y_sc})
marks = [
mark_cls(**_call_params(mark_params, options))
for mark_cls, mark_params in zip(marks, params['marks'])
]
return marks |
java | public void setQuery(final String metric, final Map<String, String> tags) {
this.metric = metric;
this.tags = tags;
metric_uid = tsdb.getUID(UniqueIdType.METRIC, metric);
tag_uids = Tags.resolveAll(tsdb, tags);
} |
python | def _from_dict(cls, _dict):
"""Initialize a TableReturn object from a json dictionary."""
args = {}
if 'document' in _dict:
args['document'] = DocInfo._from_dict(_dict.get('document'))
if 'model_id' in _dict:
args['model_id'] = _dict.get('model_id')
if 'model_version' in _dict:
args['model_version'] = _dict.get('model_version')
if 'tables' in _dict:
args['tables'] = [
Tables._from_dict(x) for x in (_dict.get('tables'))
]
return cls(**args) |
java | static void createSynthesizedExternVar(AbstractCompiler compiler, String varName) {
Node nameNode = IR.name(varName);
// Mark the variable as constant if it matches the coding convention
// for constant vars.
// NOTE(nicksantos): honestly, I'm not sure how much this matters.
// AFAIK, all people who use the CONST coding convention also
// compile with undeclaredVars as errors. We have some test
// cases for this configuration though, and it makes them happier.
if (compiler.getCodingConvention().isConstant(varName)) {
nameNode.putBooleanProp(Node.IS_CONSTANT_NAME, true);
}
Node syntheticExternVar = IR.var(nameNode);
getSynthesizedExternsRoot(compiler).addChildToBack(syntheticExternVar);
compiler.reportChangeToEnclosingScope(syntheticExternVar);
} |
java | @SuppressWarnings("unchecked")
public Query limit(int limit) {
if (limit < 0) {
throw new IllegalArgumentException(format("Limit of %d is negative.", limit));
}
if (limit > MAX_LIMIT) {
throw new IllegalArgumentException(
format("Limit of %d is greater than %d.", limit, MAX_LIMIT)
);
}
params.put(PARAMETER_LIMIT, Integer.toString(limit));
return (Query) this;
} |
python | def mount_volume(volume, device='/dev/xvdf', mountpoint='/mnt/data', fstype='ext4'):
'''
Mount an EBS volume
Args:
volume (str): EBS volume ID
device (str): default /dev/xvdf
mountpoint (str): default /mnt/data
fstype (str): default ext4
'''
_ec2().attach_volume(volume, _host_node()['id'], device)
time.sleep(1)
sudo('mkdir -p "%s"' % mountpoint)
sudo('mount -t "%s" "%s" "%s"' % (fstype, device, mountpoint)) |
java | public void removeTextSearchParameter(String key) {
try {
ParamType type = ParamType.valueOf(key);
switch (type) {
case language:
m_searchObject.setLocale(getStartLocale());
break;
case text:
m_searchObject.setQuery(null);
break;
case expired:
m_searchObject.setIncludeExpired(false);
break;
case creation:
m_searchObject.setDateCreatedEnd(-1L);
m_searchObject.setDateCreatedStart(-1L);
break;
case modification:
m_searchObject.setDateModifiedEnd(-1L);
m_searchObject.setDateModifiedStart(-1L);
break;
default:
}
m_handler.onRemoveSearchParam(type);
updateResultsTab(false);
ValueChangeEvent.fire(this, m_searchObject);
} catch (IllegalArgumentException e) {
// should not happen
}
} |
java | public Observable<InputInner> updateAsync(String resourceGroupName, String jobName, String inputName, InputInner input) {
return updateWithServiceResponseAsync(resourceGroupName, jobName, inputName, input).map(new Func1<ServiceResponseWithHeaders<InputInner, InputsUpdateHeaders>, InputInner>() {
@Override
public InputInner call(ServiceResponseWithHeaders<InputInner, InputsUpdateHeaders> response) {
return response.body();
}
});
} |
python | def open_file(self) :
"""If this FSNode is a file, open it for reading and return the file handle"""
if self.isdir() : raise Exception("FSQuery tried to open a directory as a file : %s" % self.abs)
return open(self.abs) |
java | @Nullable
public static String removeSpacesAndHyphens(@Nullable String cardNumberWithSpaces) {
if (isBlank(cardNumberWithSpaces)) {
return null;
}
return cardNumberWithSpaces.replaceAll("\\s|-", "");
} |
python | def get_section_header(self, section):
"""
Get a specific section header by index or name.
Args:
section(int or str): The index or name of the section header to return.
Returns:
:class:`~ELF.SectionHeader`: The section header.
Raises:
KeyError: The requested section header does not exist.
"""
self._ensure_section_headers_loaded()
if type(section) is int:
return self._section_headers_by_index[section]
else:
return self._section_headers_by_name[section] |
python | def ensure_index(self, key, unique=False):
"""Wrapper for pymongo.Collection.ensure_index
"""
return self.collection.ensure_index(key, unique=unique) |
java | @Override
protected void addNavDetailLink(boolean link, Content liNav) {
if (link) {
liNav.addContent(writer.getHyperLink(
SectionName.PROPERTY_DETAIL,
contents.navProperty));
} else {
liNav.addContent(contents.navProperty);
}
} |
java | public com.google.api.ads.admanager.axis.v201808.ColumnType[] getColumnTypes() {
return columnTypes;
} |
java | public <V, T extends Enum & Option> OptionsMapper env(final String name, final T option,
final Function<String, V> converter) {
register("env: " + name, option, System.getenv(name), converter);
return this;
} |
python | def find_executable(cls, name, check_syspaths=False):
"""Find an executable.
Args:
name (str): Program name.
check_syspaths (bool): If True, check the standard system paths as
well, if program was not found on current $PATH.
Returns:
str: Full filepath of executable.
"""
exe = which(name)
if not exe and check_syspaths:
paths = cls.get_syspaths()
env = os.environ.copy()
env["PATH"] = os.pathsep.join(paths)
exe = which(name, env=env)
if not exe:
raise RuntimeError("Couldn't find executable '%s'." % name)
return exe |
java | public static <T> List<Number> findIndexValues(Iterator<T> self, Number startIndex, @ClosureParams(FirstParam.FirstGenericType.class) Closure condition) {
List<Number> result = new ArrayList<Number>();
long count = 0;
long startCount = startIndex.longValue();
BooleanClosureWrapper bcw = new BooleanClosureWrapper(condition);
while (self.hasNext()) {
Object value = self.next();
if (count++ < startCount) {
continue;
}
if (bcw.call(value)) {
result.add(count - 1);
}
}
return result;
} |
python | def findbeam_Guinier(data, orig_initial, mask, rmin, rmax, maxiter=100,
extent=10, callback=None):
"""Find the beam by minimizing the width of a Gaussian centered at the
origin (i.e. maximizing the radius of gyration in a Guinier scattering).
Inputs:
data: scattering matrix
orig_initial: first guess for the origin
mask: mask matrix. Nonzero is non-masked.
rmin,rmax: distance from the origin (in pixels) of the Guinier range.
extent: approximate distance of the current and the real origin in pixels.
Too high a value makes the fitting procedure unstable. Too low a value
does not permit to move away the current origin.
callback: callback function (expects no arguments)
Outputs:
the beam coordinates
Notes:
A Gaussian with its will be fitted.
"""
orig_initial = np.array(orig_initial)
mask = 1 - mask.astype(np.uint8)
data = data.astype(np.double)
pix = np.arange(rmin * 1.0, rmax * 1.0, 1)
pix2 = pix ** 2
def targetfunc(orig, data, mask, orig_orig, callback):
I = radintpix(
data, None, orig[0] + orig_orig[0], orig[1] + orig_orig[1], mask, pix)[1]
p = np.polyfit(pix2, np.log(I), 1)[0]
if callback is not None:
callback()
return p
orig1 = scipy.optimize.fmin(targetfunc, np.array([extent, extent]),
args=(
data, mask, orig_initial - extent, callback),
maxiter=maxiter, disp=0)
return np.array(orig_initial) - extent + orig1 |
java | public boolean isLockLive(String nodeId) throws LockException
{
try
{
return isLockLive.run(nodeId);
}
catch (LockException e)
{
if (LOG.isTraceEnabled())
{
LOG.trace("An exception occurred: " + e.getMessage());
}
}
return false;
} |
java | static ClassInfoList getAllStandardClasses(final Collection<ClassInfo> classes, final ScanSpec scanSpec) {
return new ClassInfoList(ClassInfo.filterClassInfo(classes, scanSpec, /* strictWhitelist = */ true,
ClassType.STANDARD_CLASS), /* sortByName = */ true);
} |
java | public static Method getRequiredMethod(Class type, String name, Class... argumentTypes) {
try {
return type.getDeclaredMethod(name, argumentTypes);
} catch (NoSuchMethodException e) {
return findMethod(type, name, argumentTypes)
.orElseThrow(() -> newNoSuchMethodError(type, name, argumentTypes));
}
} |
python | def exclude_time(self, start, end, days):
"""Added an excluded time by start, end times and the days.
``start`` and ``end`` are in military integer times (e.g. - 1200 1430).
``days`` is a collection of integers or strings of fully-spelt, lowercased days
of the week.
"""
self._excluded_times.append(TimeRange(start, end, days))
return self |
java | @Override
protected void initTableInfoUniqueKeys(final Connection _con,
final String _sql,
final Map<String, TableInformation> _cache4Name)
throws SQLException
{
final String sqlStmt = new StringBuilder()
.append("select t.tablename as TABLE_NAME, c.CONSTRAINTNAME as INDEX_NAME, g.DESCRIPTOR as COLUMN_NAME")
.append(" from SYS.SYSTABLES t, SYS.SYSCONSTRAINTS c, SYS.SYSKEYS k, SYS.SYSCONGLOMERATES g ")
.append(" where t.TABLEID=c.TABLEID")
.append(" AND c.TYPE='U'")
.append(" AND c.CONSTRAINTID = k.CONSTRAINTID")
.append(" AND k.CONGLOMERATEID = g.CONGLOMERATEID")
.toString();
super.initTableInfoUniqueKeys(_con, sqlStmt, _cache4Name);
} |
python | def _UserUpdateIgnoredDirs(self, origIgnoredDirs = []):
"""
Add ignored directories to database table. Always called if the
database table is empty.
User can build a list of entries to add to the database table
(one entry at a time). Once finished they select the finish option
and all entries will be added to the table. They can reset the
list at any time before finishing.
Parameters
----------
origIgnoredDirs : list [optional : default = []]
List of original ignored directories from database table.
Returns
----------
string
List of updated ignored directories from database table.
"""
ignoredDirs = list(origIgnoredDirs)
inputDone = None
while inputDone is None:
prompt = "Enter new directory to ignore (e.g. DONE), " \
"'r' to reset directory list, " \
"'f' to finish or " \
"'x' to exit: "
response = goodlogging.Log.Input("CLEAR", prompt)
if response.lower() == 'x':
sys.exit(0)
elif response.lower() == 'f':
inputDone = 1
elif response.lower() == 'r':
ignoredDirs = []
else:
if response is not None:
ignoredDirs.append(response)
ignoredDirs = set(ignoredDirs)
origIgnoredDirs = set(origIgnoredDirs)
if ignoredDirs != origIgnoredDirs:
self._db.PurgeIgnoredDirs()
for ignoredDir in ignoredDirs:
self._db.AddIgnoredDir(ignoredDir)
return list(ignoredDirs) |
python | def from_lasio(cls, l, remap=None, funcs=None, data=True, req=None, alias=None, fname=None):
"""
Constructor. If you already have the lasio object, then this makes a
well object from it.
Args:
l (lasio object): a lasio object.
remap (dict): Optional. A dict of 'old': 'new' LAS field names.
funcs (dict): Optional. A dict of 'las field': function() for
implementing a transform before loading. Can be a lambda.
data (bool): Whether to load curves or not.
req (dict): An alias list, giving all required curves. If not
all of the aliases are present, the well is empty.
Returns:
well. The well object.
"""
# Build a dict of curves.
curve_params = {}
for field, (sect, code) in LAS_FIELDS['data'].items():
curve_params[field] = utils.lasio_get(l,
sect,
code,
remap=remap,
funcs=funcs)
# This is annoying, but I need the whole depth array to
# deal with edge cases, eg non-uniform sampling.
# Add all required curves together.
if req:
reqs = utils.flatten_list([v for k, v in alias.items() if k in req])
# Using lasio's idea of depth in metres:
if l.depth_m[0] < l.depth_m[1]:
curve_params['depth'] = l.depth_m
else:
curve_params['depth'] = np.flipud(l.depth_m)
# Make the curve dictionary.
depth_curves = ['DEPT', 'TIME']
if data and req:
curves = {c.mnemonic: Curve.from_lasio_curve(c, **curve_params)
for c in l.curves
if (c.mnemonic[:4] not in depth_curves)
and (c.mnemonic in reqs)}
elif data and not req:
curves = {c.mnemonic: Curve.from_lasio_curve(c, **curve_params)
for c in l.curves
if (c.mnemonic[:4] not in depth_curves)}
elif (not data) and req:
curves = {c.mnemonic: True
for c in l.curves
if (c.mnemonic[:4] not in depth_curves)
and (c.mnemonic in reqs)}
else:
curves = {c.mnemonic: True
for c in l.curves
if (c.mnemonic[:4] not in depth_curves)}
if req:
aliases = utils.flatten_list([c.get_alias(alias)
for m, c
in curves.items()]
)
if len(set(aliases)) < len(req):
return cls(params={})
# Build a dict of the other well data.
params = {'las': l,
'header': Header.from_lasio(l, remap=remap, funcs=funcs),
'location': Location.from_lasio(l, remap=remap, funcs=funcs),
'data': curves,
'fname': fname}
for field, (sect, code) in LAS_FIELDS['well'].items():
params[field] = utils.lasio_get(l,
sect,
code,
remap=remap,
funcs=funcs)
return cls(params) |
python | def _pkcs1imify(self, data):
"""
turn a 20-byte SHA1 hash into a blob of data as large as the key's N,
using PKCS1's \"emsa-pkcs1-v1_5\" encoding. totally bizarre.
"""
size = len(util.deflate_long(self.n, 0))
filler = max_byte * (size - len(SHA1_DIGESTINFO) - len(data) - 3)
return zero_byte + one_byte + filler + zero_byte + SHA1_DIGESTINFO + data |
java | public int[] findLongest(CharSequence query, int start)
{
if ((query == null) || (start >= query.length()))
{
return new int[]{0, -1};
}
int state = 1;
int maxLength = 0;
int lastVal = -1;
for (int i = start; i < query.length(); i++)
{
int[] res = transferValues(state, query.charAt(i));
if (res[0] == -1)
{
break;
}
state = res[0];
if (res[1] != -1)
{
maxLength = i - start + 1;
lastVal = res[1];
}
}
return new int[]{maxLength, lastVal};
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.