language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public static UUID parseId(final String id) throws WarcFormatException {
if (! (id.startsWith(UUID_HEAD) && id.endsWith(UUID_TAIL))) throw new WarcFormatException("'" + id + "' wrong format for " + Name.WARC_RECORD_ID.value);
final int len = id.length();
UUID uuid;
try {
uuid = UUID.fromString(id.substring(UUID_HEAD_LENGTH, len - UUID_TAIL_LENGTH));
} catch (IllegalArgumentException e) {
throw new WarcFormatException("Error parsing uuid " + id, e);
}
return uuid;
} |
python | def main():
"""
NAME
huji_sample_magic.py
DESCRIPTION
takes tab delimited Hebrew University sample file and converts to MagIC formatted tables
SYNTAX
huji_sample_magic.py [command line options]
OPTIONS
-f FILE: specify input file
-Fsa FILE: specify sample output file, default is: samples.txt
-Fsi FILE: specify site output file, default is: sites.txt
-Iso: import sample orientation info - default is to set sample_az/dip to 0,0
-ncn NCON: specify naming convention: default is #1 below
-mcd: specify sampling method codes as a colon delimited string: [default is: FS-FD:SO-POM:SO-SUN]
FS-FD field sampling done with a drill
FS-H field sampling done with hand samples
FS-LOC-GPS field location done with GPS
FS-LOC-MAP field location done with map
SO-POM a Pomeroy orientation device was used
SO-ASC an ASC orientation device was used
SO-MAG orientation with magnetic compass
-loc: location name, default="unknown"
-DM: data model number (MagIC 2 or 3, default 3)
INPUT FORMAT
Input files must be tab delimited:
Samp Az Dip Dip_dir Dip
Orientation convention:
Lab arrow azimuth = mag_azimuth; Lab arrow dip = 90-field_dip
e.g. field_dip is degrees from horizontal of drill direction
Magnetic declination convention:
Az is already corrected in file
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name = sample name
[6] site name entered in site_name column in the orient.txt format input file -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail [email protected] for help.
OUTPUT
output saved in samples will overwrite any existing files
"""
args = sys.argv
if "-h" in args:
print(main.__doc__)
sys.exit()
#
# initialize variables
Z = 1
# get arguments from the command line
orient_file = pmag.get_named_arg("-f", reqd=True)
data_model_num = int(float(pmag.get_named_arg("-DM", 3)))
if data_model_num == 2:
samp_file = pmag.get_named_arg("-Fsa", "er_samples.txt")
site_file = pmag.get_named_arg("-Fsi", "er_sites.txt")
else:
samp_file = pmag.get_named_arg("-Fsa", "samples.txt")
site_file = pmag.get_named_arg("-Fsi", "sites.txt")
samp_con = pmag.get_named_arg("-ncn", "1")
if "4" in samp_con:
if "-" not in samp_con:
print("option [4] must be in form 3-Z where Z is an integer")
sys.exit()
else:
Z = samp_con.split("-")[1]
#samp_con = "4"
print(samp_con)#, Z)
meths = pmag.get_named_arg("-mcd", 'FS-FD:SO-POM:SO-SUN')
location_name = pmag.get_named_arg("-loc", "unknown")
if "-Iso" in args:
ignore = 0
else:
ignore = 1
convert.huji_sample(orient_file, meths, location_name, samp_con, ignore) |
java | @Override
public Path extract() {
if (sptEntry == null || edgeTo == null)
return this;
if (sptEntry.adjNode != edgeTo.adjNode)
throw new IllegalStateException("Locations of the 'to'- and 'from'-Edge have to be the same. " + toString() + ", fromEntry:" + sptEntry + ", toEntry:" + edgeTo);
extractSW.start();
if (switchFromAndToSPTEntry) {
SPTEntry ee = sptEntry;
sptEntry = edgeTo;
edgeTo = ee;
}
SPTEntry currEdge = sptEntry;
boolean nextEdgeValid = EdgeIterator.Edge.isValid(currEdge.edge);
int nextEdge;
while (nextEdgeValid) {
// the reverse search needs the next edge
nextEdgeValid = EdgeIterator.Edge.isValid(currEdge.parent.edge);
nextEdge = nextEdgeValid ? currEdge.parent.edge : EdgeIterator.NO_EDGE;
processEdge(currEdge.edge, currEdge.adjNode, nextEdge);
currEdge = currEdge.parent;
}
setFromNode(currEdge.adjNode);
reverseOrder();
currEdge = edgeTo;
int prevEdge = EdgeIterator.Edge.isValid(sptEntry.edge) ? sptEntry.edge : EdgeIterator.NO_EDGE;
int tmpEdge = currEdge.edge;
while (EdgeIterator.Edge.isValid(tmpEdge)) {
currEdge = currEdge.parent;
processEdge(tmpEdge, currEdge.adjNode, prevEdge);
prevEdge = tmpEdge;
tmpEdge = currEdge.edge;
}
setEndNode(currEdge.adjNode);
extractSW.stop();
return setFound(true);
} |
java | public int count(Entity where) throws SQLException {
Connection conn = null;
try {
conn = this.getConnection();
return runner.count(conn, where);
} catch (SQLException e) {
throw e;
} finally {
this.closeConnection(conn);
}
} |
java | private void verifyEditStreams() throws IOException {
// we check if the shared stream is still available
if (getFSImage().getEditLog().isSharedJournalAvailable()
&& InjectionHandler
.trueCondition(InjectionEvent.AVATARNODE_CHECKEDITSTREAMS)) {
return;
}
// for sanity check if the number of available journals
// is equal to the number of configured ones
int expectedEditStreams = NNStorageConfiguration.getNamespaceEditsDirs(
confg).size();
int actualEditStreams = this.namesystem.getFSImage().getEditLog()
.getNumberOfAvailableJournals();
if (expectedEditStreams == actualEditStreams
&& InjectionHandler
.trueCondition(InjectionEvent.AVATARNODE_CHECKEDITSTREAMS)) {
return;
}
String msg = "Failover: Cannot proceed - shared journal is not available. "
+ "Number of required edit streams: " + expectedEditStreams
+ " current number: " + actualEditStreams;
LOG.fatal(msg);
throw new IOException(msg);
} |
python | def mapper(module, entry_point,
modpath='pkg_resources', globber='root', modname='es6',
fext=JS_EXT, registry=_utils):
"""
General mapper
Loads components from the micro registry.
"""
modname_f = modname if callable(modname) else _utils['modname'][modname]
return {
modname_f(modname_fragments): join(base, subpath)
for modname_fragments, base, subpath in modgen(
module, entry_point=entry_point,
modpath=modpath, globber=globber,
fext=fext, registry=_utils)
} |
python | def agent_update(self, agent_id, data, **kwargs):
"https://developer.zendesk.com/rest_api/docs/chat/agents#update-agent"
api_path = "/api/v2/agents/{agent_id}"
api_path = api_path.format(agent_id=agent_id)
return self.call(api_path, method="PUT", data=data, **kwargs) |
python | async def create_post_request(self, method: str, params: Dict = None):
"""Call the given method over POST.
:param method: Name of the method
:param params: dict of parameters
:return: JSON object
"""
if params is None:
params = {}
headers = {"Content-Type": "application/json"}
payload = {
"method": method,
"params": [params],
"id": next(self.idgen),
"version": "1.0",
}
if self.debug > 1:
_LOGGER.debug("> POST %s with body: %s", self.guide_endpoint, payload)
async with aiohttp.ClientSession(headers=headers) as session:
res = await session.post(self.guide_endpoint, json=payload, headers=headers)
if self.debug > 1:
_LOGGER.debug("Received %s: %s" % (res.status_code, res.text))
if res.status != 200:
raise SongpalException(
"Got a non-ok (status %s) response for %s" % (res.status, method),
error=await res.json()["error"],
)
res = await res.json()
# TODO handle exceptions from POST? This used to raise SongpalException
# on requests.RequestException (Unable to get APIs).
if "error" in res:
raise SongpalException("Got an error for %s" % method, error=res["error"])
if self.debug > 1:
_LOGGER.debug("Got %s: %s", method, pf(res))
return res |
python | def _extract_email(gh):
"""Get user email from github."""
return next(
(x.email for x in gh.emails() if x.verified and x.primary), None) |
java | protected Transformer getTransformer() {
TransformerFactory transformerFactory = domXmlDataFormat.getTransformerFactory();
try {
Transformer transformer = transformerFactory.newTransformer();
transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
return transformer;
}
catch (TransformerConfigurationException e) {
throw LOG.unableToCreateTransformer(e);
}
} |
java | protected void onFaultedResolution(String dependencyKey, Throwable throwable) {
if (toBeResolved == 0) {
throw new RuntimeException("invalid state - " + this.key() + ": The dependency '" + dependencyKey + "' is already reported or there is no such dependencyKey");
}
toBeResolved--;
} |
java | private static boolean endsWith(final CharSequence str, final CharSequence suffix, final boolean ignoreCase) {
if (str == null || suffix == null) {
return str == null && suffix == null;
}
if (suffix.length() > str.length()) {
return false;
}
final int strOffset = str.length() - suffix.length();
return CharSequenceUtils.regionMatches(str, ignoreCase, strOffset, suffix, 0, suffix.length());
} |
java | public CamelRouteActionBuilder create(RouteBuilder routeBuilder) {
CreateCamelRouteAction camelRouteAction = new CreateCamelRouteAction();
try {
if (!routeBuilder.getContext().equals(getCamelContext())) {
routeBuilder.configureRoutes(getCamelContext());
} else {
routeBuilder.configure();
}
camelRouteAction.setRoutes(routeBuilder.getRouteCollection().getRoutes());
} catch (Exception e) {
throw new CitrusRuntimeException("Failed to configure route definitions with camel context", e);
}
camelRouteAction.setCamelContext(getCamelContext());
action.setDelegate(camelRouteAction);
return this;
} |
python | def status(self):
'''returns information about module'''
transfered = self.download - self.prev_download
now = time.time()
interval = now - self.last_status_time
self.last_status_time = now
return("DFLogger: %(state)s Rate(%(interval)ds):%(rate).3fkB/s Block:%(block_cnt)d Missing:%(missing)d Fixed:%(fixed)d Abandoned:%(abandoned)d" %
{"interval": interval,
"rate": transfered/(interval*1000),
"block_cnt": self.block_cnt,
"missing": len(self.missing_blocks),
"fixed": self.missing_found,
"abandoned": self.abandoned,
"state": "Inactive" if self.stopped else "Active"
}) |
python | def index(in_bam, config, check_timestamp=True):
"""Index a BAM file, skipping if index present.
Centralizes BAM indexing providing ability to switch indexing approaches.
"""
assert is_bam(in_bam), "%s in not a BAM file" % in_bam
index_file = "%s.bai" % in_bam
alt_index_file = "%s.bai" % os.path.splitext(in_bam)[0]
if check_timestamp:
bai_exists = utils.file_uptodate(index_file, in_bam) or utils.file_uptodate(alt_index_file, in_bam)
else:
bai_exists = utils.file_exists(index_file) or utils.file_exists(alt_index_file)
if not bai_exists:
# Remove old index files and re-run to prevent linking into tx directory
for fname in [index_file, alt_index_file]:
utils.remove_safe(fname)
samtools = config_utils.get_program("samtools", config)
num_cores = config["algorithm"].get("num_cores", 1)
with file_transaction(config, index_file) as tx_index_file:
cmd = "{samtools} index -@ {num_cores} {in_bam} {tx_index_file}"
do.run(cmd.format(**locals()), "Index BAM file: %s" % os.path.basename(in_bam))
return index_file if utils.file_exists(index_file) else alt_index_file |
java | public void addMPDestinationChangeListener(MPDestinationChangeListener destinationChangeListener)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "addMPDestinationChangeListener",
new Object[]{destinationChangeListener});
_destinationChangeListeners.add(destinationChangeListener);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "addMPDestinationChangeListener");
} |
python | def p_function(self, p):
'''function : oneway function_type IDENTIFIER '(' field_seq ')' \
throws annotations '''
p[0] = ast.Function(
name=p[3],
parameters=p[5],
return_type=p[2],
exceptions=p[7],
oneway=p[1],
annotations=p[8],
lineno=p.lineno(3),
) |
java | @Override
public boolean write(Writer writer) throws IOException {
writeBlock(writer, EmblPadding.PA_PADDING, accession);
return true;
} |
java | @NotNull
public IntStream scan(@NotNull final IntBinaryOperator accumulator) {
Objects.requireNonNull(accumulator);
return new IntStream(params, new IntScan(iterator, accumulator));
} |
java | PreparedStatement getPreparedStatementForFilter(Connection conn, String baseQuery, CmsUrlNameMappingFilter filter)
throws SQLException {
CmsPair<String, List<I_CmsPreparedStatementParameter>> conditionData = prepareUrlNameMappingConditions(filter);
String whereClause = "";
if (!conditionData.getFirst().equals("")) {
whereClause = " WHERE " + conditionData.getFirst();
}
String query = baseQuery + whereClause;
PreparedStatement stmt = m_sqlManager.getPreparedStatementForSql(conn, query);
int counter = 1;
for (I_CmsPreparedStatementParameter param : conditionData.getSecond()) {
param.insertIntoStatement(stmt, counter);
counter += 1;
}
return stmt;
} |
java | public static String encodeBase64URLSafeString(final byte[] binaryData) {
return new String(encodeBase64(binaryData, false, true), StandardCharsets.UTF_8);
} |
java | public static <T extends ImageGray<T>>
T convolve( T integral ,
IntegralKernel kernel,
T output ) {
if( integral instanceof GrayF32) {
return (T)IntegralImageOps.convolve((GrayF32)integral,kernel,(GrayF32)output);
} else if( integral instanceof GrayF64) {
return (T)IntegralImageOps.convolve((GrayF64)integral,kernel,(GrayF64)output);
} else if( integral instanceof GrayS32) {
return (T)IntegralImageOps.convolve((GrayS32)integral,kernel,(GrayS32)output);
} else if( integral instanceof GrayS64) {
return (T)IntegralImageOps.convolve((GrayS64)integral,kernel,(GrayS64)output);
} else {
throw new IllegalArgumentException("Unknown input type: "+integral.getClass().getSimpleName());
}
} |
java | protected String getTextLineNumber(int rowStartOffset) {
Element root = component.getDocument().getDefaultRootElement();
int index = root.getElementIndex(rowStartOffset);
Element line = root.getElement(index);
if (line.getStartOffset() == rowStartOffset)
return String.valueOf(index + 1);
else
return "";
} |
java | public DatabaseAccessConfiguration swap(final DataSource dataSource) {
DataSourcePropertyProvider provider = DataSourcePropertyProviderLoader.getProvider(dataSource);
try {
String url = (String) findGetterMethod(dataSource, provider.getURLPropertyName()).invoke(dataSource);
String username = (String) findGetterMethod(dataSource, provider.getUsernamePropertyName()).invoke(dataSource);
String password = (String) findGetterMethod(dataSource, provider.getPasswordPropertyName()).invoke(dataSource);
return new DatabaseAccessConfiguration(url, username, password);
} catch (final ReflectiveOperationException ex) {
throw new ShardingException("Cannot swap data source type: `%s`, please provide an implementation from SPI `%s`",
dataSource.getClass().getName(), DataSourcePropertyProvider.class.getName());
}
} |
java | public com.squareup.okhttp.Call getMarketsRegionIdOrdersAsync(String orderType, Integer regionId,
String datasource, String ifNoneMatch, Integer page, Integer typeId,
final ApiCallback<List<MarketOrdersResponse>> callback) throws ApiException {
com.squareup.okhttp.Call call = getMarketsRegionIdOrdersValidateBeforeCall(orderType, regionId, datasource,
ifNoneMatch, page, typeId, callback);
Type localVarReturnType = new TypeToken<List<MarketOrdersResponse>>() {
}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
} |
java | @Override
public List<CommerceOrderPayment> getCommerceOrderPayments(int start,
int end) {
return commerceOrderPaymentPersistence.findAll(start, end);
} |
java | public TimeZoneFormat setGMTOffsetDigits(String digits) {
if (isFrozen()) {
throw new UnsupportedOperationException("Attempt to modify frozen object");
}
if (digits == null) {
throw new NullPointerException("Null GMT offset digits");
}
String[] digitArray = toCodePoints(digits);
if (digitArray.length != 10) {
throw new IllegalArgumentException("Length of digits must be 10");
}
_gmtOffsetDigits = digitArray;
return this;
} |
python | def copy_data(from_client, from_project, from_logstore, from_time, to_time=None,
to_client=None, to_project=None, to_logstore=None,
shard_list=None,
batch_size=None, compress=None, new_topic=None, new_source=None):
"""
copy data from one logstore to another one (could be the same or in different region), the time is log received time on server side.
"""
to_client = to_client or from_client
# increase the timeout to 2 min at least
from_client.timeout = max(from_client.timeout, 120)
to_client.timeout = max(to_client.timeout, 120)
to_project = to_project or from_project
to_logstore = to_logstore or from_logstore
to_time = to_time or "end"
cpu_count = multiprocessing.cpu_count() * 2
shards = from_client.list_shards(from_project, from_logstore).get_shards_info()
current_shards = [str(shard['shardID']) for shard in shards]
target_shards = _parse_shard_list(shard_list, current_shards)
worker_size = min(cpu_count, len(target_shards))
result = dict()
total_count = 0
with ProcessPoolExecutor(max_workers=worker_size) as pool:
futures = [pool.submit(copy_worker, from_client, from_project, from_logstore, shard,
from_time, to_time,
to_client, to_project, to_logstore,
batch_size=batch_size, compress=compress,
new_topic=new_topic, new_source=new_source)
for shard in target_shards]
for future in as_completed(futures):
partition, count = future.result()
total_count += count
if count:
result[partition] = count
return LogResponse({}, {"total_count": total_count, "shards": result}) |
java | public ApiSuccessResponse getWorkbinContent(String workbinId, GetWorkbinContentData getWorkbinContentData) throws ApiException {
ApiResponse<ApiSuccessResponse> resp = getWorkbinContentWithHttpInfo(workbinId, getWorkbinContentData);
return resp.getData();
} |
java | public BytesWritable evaluate(DoubleWritable x, DoubleWritable y, DoubleWritable z, DoubleWritable m) {
if (x == null || y == null || z == null) {
return null;
}
Point stPt = new Point(x.get(), y.get(), z.get());
if (m != null)
stPt.setM(m.get());
return GeometryUtils.geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(stPt, null));
} |
python | def genargs() -> ArgumentParser:
"""
Create a command line parser
:return: parser
"""
parser = ArgumentParser()
parser.add_argument("spec", help="JSG specification - can be file name, URI or string")
parser.add_argument("-o", "--outfile", help="Output python file - if omitted, python is not saved")
parser.add_argument("-p", "--print", help="Print python file to stdout")
parser.add_argument("-id", "--inputdir", help="Input directory with JSON files")
parser.add_argument("-i", "--json", help="URL, file name or json text", nargs='*')
return parser |
python | async def messages(self):
"""Iterate through RMB posts from newest to oldest.
Returns
-------
an asynchronous generator that yields :class:`Post`
"""
# Messages may be posted on the RMB while the generator is running.
oldest_id_seen = float('inf')
for offset in count(step=100):
posts_bunch = await self._get_messages(offset=offset)
for post in reversed(posts_bunch):
if post.id < oldest_id_seen:
yield post
oldest_id_seen = posts_bunch[0].id
if len(posts_bunch) < 100:
break |
python | def _resolve_intervals_2dplot(val, func_name):
"""
Helper function to replace the values of a coordinate array containing
pd.Interval with their mid-points or - for pcolormesh - boundaries which
increases length by 1.
"""
label_extra = ''
if _valid_other_type(val, [pd.Interval]):
if func_name == 'pcolormesh':
val = _interval_to_bound_points(val)
else:
val = _interval_to_mid_points(val)
label_extra = '_center'
return val, label_extra |
java | public Map<String, ResourceList> getAllResourcesAsMap() {
if (pathToWhitelistedResourcesCached == null) {
final Map<String, ResourceList> pathToWhitelistedResourceListMap = new HashMap<>();
for (final Resource res : getAllResources()) {
ResourceList resList = pathToWhitelistedResourceListMap.get(res.getPath());
if (resList == null) {
pathToWhitelistedResourceListMap.put(res.getPath(), resList = new ResourceList());
}
resList.add(res);
}
// Set atomically for thread safety
pathToWhitelistedResourcesCached = pathToWhitelistedResourceListMap;
}
return pathToWhitelistedResourcesCached;
} |
python | def hide_object(self, key, hide=True):
'''hide an object on the map by key'''
self.object_queue.put(SlipHideObject(key, hide)) |
python | def _EvaluateExpressions(self, frame):
"""Evaluates watched expressions into a string form.
If expression evaluation fails, the error message is used as evaluated
expression string.
Args:
frame: Python stack frame of breakpoint hit.
Returns:
Array of strings where each string corresponds to the breakpoint
expression with the same index.
"""
return [self._FormatExpression(frame, expression) for expression in
self._definition.get('expressions') or []] |
java | public
static
String getFacilityString(int syslogFacility) {
switch(syslogFacility) {
case LOG_KERN: return "kern";
case LOG_USER: return "user";
case LOG_MAIL: return "mail";
case LOG_DAEMON: return "daemon";
case LOG_AUTH: return "auth";
case LOG_SYSLOG: return "syslog";
case LOG_LPR: return "lpr";
case LOG_NEWS: return "news";
case LOG_UUCP: return "uucp";
case LOG_CRON: return "cron";
case LOG_AUTHPRIV: return "authpriv";
case LOG_FTP: return "ftp";
case LOG_LOCAL0: return "local0";
case LOG_LOCAL1: return "local1";
case LOG_LOCAL2: return "local2";
case LOG_LOCAL3: return "local3";
case LOG_LOCAL4: return "local4";
case LOG_LOCAL5: return "local5";
case LOG_LOCAL6: return "local6";
case LOG_LOCAL7: return "local7";
default: return null;
}
} |
python | def var(
self, axis=None, skipna=None, level=None, ddof=1, numeric_only=None, **kwargs
):
"""Computes variance across the DataFrame.
Args:
axis (int): The axis to take the variance on.
skipna (bool): True to skip NA values, false otherwise.
ddof (int): degrees of freedom
Returns:
The variance of the DataFrame.
"""
axis = self._get_axis_number(axis) if axis is not None else 0
if numeric_only is not None and not numeric_only:
self._validate_dtypes(numeric_only=True)
return self._reduce_dimension(
self._query_compiler.var(
axis=axis,
skipna=skipna,
level=level,
ddof=ddof,
numeric_only=numeric_only,
**kwargs
)
) |
java | private Map<String, String> executeRequestWithBody(HttpClientService csHttpClient, HttpClientInputs httpClientInputs, String body) {
httpClientInputs.setBody(body);
Map<String, String> requestResponse = csHttpClient.execute(httpClientInputs);
if (UNAUTHORIZED_STATUS_CODE.equals(requestResponse.get(STATUS_CODE))) {
throw new RuntimeException(UNAUTHORIZED_EXCEPTION_MESSAGE);
}
return requestResponse;
} |
python | def _parse_output_for_errors(data, command, **kwargs):
'''
Helper method to parse command output for error information
'''
if re.search('% Invalid', data):
raise CommandExecutionError({
'rejected_input': command,
'message': 'CLI excution error',
'code': '400',
'cli_error': data.lstrip(),
})
if kwargs.get('error_pattern') is not None:
for re_line in kwargs.get('error_pattern'):
if re.search(re_line, data):
raise CommandExecutionError({
'rejected_input': command,
'message': 'CLI excution error',
'code': '400',
'cli_error': data.lstrip(),
}) |
python | def encode_request(uuid, address, interrupt):
""" Encode request into client_message"""
client_message = ClientMessage(payload_size=calculate_size(uuid, address, interrupt))
client_message.set_message_type(REQUEST_TYPE)
client_message.set_retryable(RETRYABLE)
client_message.append_str(uuid)
AddressCodec.encode(client_message, address)
client_message.append_bool(interrupt)
client_message.update_frame_length()
return client_message |
java | @Override
public final void makeOtherEntries(final Map<String, Object> pAddParam,
final PrepaymentFrom pEntity, final IRequestData pRequestData,
final boolean pIsNew) throws Exception {
// nothing
} |
java | private void processAssignments() throws SQLException
{
List<Row> permanentAssignments = getRows("select * from permanent_schedul_allocation inner join perm_resource_skill on permanent_schedul_allocation.allocatiop_of = perm_resource_skill.perm_resource_skillid where permanent_schedul_allocation.projid=? order by permanent_schedul_allocation.permanent_schedul_allocationid", m_projectID);
m_reader.processAssignments(permanentAssignments);
} |
java | public static UrlMappingsHolder lookupUrlMappings(ServletContext servletContext) {
WebApplicationContext wac = WebApplicationContextUtils.getRequiredWebApplicationContext(servletContext);
return (UrlMappingsHolder)wac.getBean(UrlMappingsHolder.BEAN_ID);
} |
python | def get_response(self):
'''
Returns response according submitted the data and method.
'''
self.process_commmon()
self.process_data()
urlencoded_data = urllib.urlencode(self.data)
if self.METHOD == POST:
req = urllib2.Request(self.URL, urlencoded_data)
else:
req = urllib2.Request('%s?%s' %(self.URL, urlencoded_data))
if not self.data['content']:
raise PasteException("No content to paste")
self.response = urllib2.urlopen(req)
return self.response |
python | def conv2d(self, filter_size, output_channels, stride=1, padding='SAME', activation_fn=tf.nn.relu, b_value=0.0, s_value=1.0, bn=True, stoch=False):
"""
:param filter_size: int. assumes square filter
:param output_channels: int
:param stride: int
:param padding: 'VALID' or 'SAME'
:param activation_fn: tf.nn function
:param b_value: float
:param s_value: float
"""
self.count['conv'] += 1
self._layer_count += 1
scope = 'conv_' + str(self.count['conv'])
if stoch is True:
clean = False
else:
clean = True
with tf.variable_scope(scope):
input_channels = self.input.get_shape()[3]
output_shape = [filter_size, filter_size, input_channels, output_channels]
w = self.weight_variable(name='weights', shape=output_shape)
self.input = tf.nn.conv2d(self.input, w, strides=[1, stride, stride, 1], padding=padding)
if bn is True:
self.input = self.conv_batch_norm(self.input, clean=clean, count=self._layer_count)
if stoch is True:
self.input = tf.random_normal(tf.shape(self.input)) + self.input
self._noisy_z_dict[self._layer_count] = self.input
if b_value is not None:
b = self.const_variable(name='bias', shape=[output_channels], value=b_value)
self.input = tf.add(self.input, b)
if s_value is not None:
s = self.const_variable(name='scale', shape=[output_channels], value=s_value)
self.input = tf.multiply(self.input, s)
if activation_fn is not None:
self.input = activation_fn(self.input)
self.print_log(scope + ' output: ' + str(self.input.get_shape())) |
java | public void matches(int expectedMatches, double seconds) {
double end = System.currentTimeMillis() + (seconds * 1000);
try {
if (expectedMatches > 0) {
elementPresent(seconds);
}
while (element.get().matchCount() != expectedMatches && System.currentTimeMillis() < end) ;
double timeTook = Math.min((seconds * 1000) - (end - System.currentTimeMillis()), seconds * 1000) / 1000;
checkMatches(expectedMatches, seconds, timeTook);
} catch (TimeoutException e) {
checkMatches(expectedMatches, seconds, seconds);
}
} |
java | public static Geometry drapePolygon(Polygon p, Geometry triangles, STRtree sTRtree) {
GeometryFactory factory = p.getFactory();
//Split the triangles in lines to perform all intersections
Geometry triangleLines = LinearComponentExtracter.getGeometry(triangles, true);
Polygon splittedP = processPolygon(p, triangleLines, factory);
CoordinateSequenceFilter drapeFilter = new DrapeFilter(sTRtree);
splittedP.apply(drapeFilter);
return splittedP;
} |
java | public synchronized void shutdownDbmsPools() {
//force shutdown
//runnable
datasourceCleaner.shutdown();
datasourceCleaner = null;
//shell for the runnable
cleanerThread.interrupt();//stop the thread
cleanerThread = null;
if (dbmsPoolTable == null) {
return;
}
Enumeration<String> allDbmsKeys = dbmsPoolTable.keys();
while (allDbmsKeys.hasMoreElements()) {
String dbmsKey = allDbmsKeys.nextElement();
PooledDataSourceProvider provider = this.getProvider(dbmsKey);
Hashtable<String, DataSource> dsTable = dbmsPoolTable.get(dbmsKey);
for (DataSource ds : dsTable.values()) {
try {
provider.closePooledDataSource(ds);
} catch (SQLException e) {
// todo logger.error("Failed to close datasource in dbms poolKey = "
// + dbmsKey);
}
}
dsTable.clear();
}
dbmsPoolTable.clear();
dbmsPoolTable = null;
} |
java | public WindowFuture offer(K key, R request, long offerTimeoutMillis, long expireTimeoutMillis, boolean callerWaitingHint) throws DuplicateKeyException, OfferTimeoutException, PendingOfferAbortedException, InterruptedException {
if (offerTimeoutMillis < 0) {
throw new IllegalArgumentException("offerTimeoutMillis must be >= 0 [actual=" + offerTimeoutMillis + "]");
}
// does this key already exist?
if (this.futures.containsKey(key)) {
throw new DuplicateKeyException("The key [" + key + "] already exists in the window");
}
long offerTimestamp = System.currentTimeMillis();
this.lock.lockInterruptibly();
try {
// does enough room exist in the "window" for another pending request?
// NOTE: wait for room up to the offerTimeoutMillis
// NOTE: multiple signals may be received that will need to be ignored
while (getFreeSize() <= 0) {
// check if there time remaining to wait
long currentOfferTime = System.currentTimeMillis() - offerTimestamp;
if (currentOfferTime >= offerTimeoutMillis) {
throw new OfferTimeoutException("Unable to accept offer within [" + offerTimeoutMillis + " ms] (window full)");
}
// check if slow waiting was canceled (terminate early)
if (this.pendingOffersAborted.get()) {
throw new PendingOfferAbortedException("Pending offer aborted (by an explicit call to abortPendingOffers())");
}
// calculate the amount of timeout remaining
long remainingOfferTime = offerTimeoutMillis - currentOfferTime;
try {
// await for a new signal for this max amount of time
this.beginPendingOffer();
this.completedCondition.await(remainingOfferTime, TimeUnit.MILLISECONDS);
} finally {
boolean abortPendingOffer = this.endPendingOffer();
if (abortPendingOffer) {
throw new PendingOfferAbortedException("Pending offer aborted (by an explicit call to abortPendingOffers())");
}
}
}
long acceptTimestamp = System.currentTimeMillis();
long expireTimestamp = (expireTimeoutMillis > 0 ? (acceptTimestamp + expireTimeoutMillis) : -1);
int callerStateHint = (callerWaitingHint ? WindowFuture.CALLER_WAITING : WindowFuture.CALLER_NOT_WAITING);
DefaultWindowFuture<K,R,P> future = new DefaultWindowFuture<K,R,P>(this, lock, completedCondition, key, request, callerStateHint, offerTimeoutMillis, (futures.size() + 1), offerTimestamp, acceptTimestamp, expireTimestamp);
this.futures.put(key, future);
return future;
} finally {
this.lock.unlock();
}
} |
java | public String selectColor(List<String> colors) {
String tr="transparent";
for (String c: colors) {
if (!(c.equals(tr))) return c;
}
return tr;
} |
python | def get_action_arguments(self, service_name, action_name):
"""
Returns a list of tuples with all known arguments for the given
service- and action-name combination. The tuples contain the
argument-name, direction and data_type.
"""
return self.services[service_name].actions[action_name].info |
python | def getuvalue(self):
"""
.. _getuvalue:
Get the unsigned value of the Integer, truncate it and handle Overflows.
"""
bitset = [0] * self.width
zero = [1] * self.width
for shift in range(self.width):
bitset[shift] = (self._value & (1 << shift)) >> shift
if(self._sign):
bitset = bitsetxor(zero, bitset)
value = [ bitset[shift] << shift for shift in range(self.width)]
return sum(value) |
java | @Override
public Certificates getAvailableCertificates(Integer pageNo, Integer perPage)
throws DigitalOceanException, RequestUnsuccessfulException {
validatePageNo(pageNo);
return (Certificates) perform(new ApiRequest(ApiAction.AVAILABLE_CERTIFICATES, pageNo, perPage))
.getData();
} |
python | def learn(self, state, action, reward, next_state, done):
"""
Update replay memory and learn from a batch of random experiences sampled
from the replay buffer
:return: optimization loss if enough experiences are available, None otherwise
"""
self.steps += 1
self.replay.append((state, action, reward, next_state, done))
if self.steps > self.min_steps and len(self.replay) > self.batch_size:
batch = random.sample(self.replay, self.batch_size)
return self.optimize(batch)
return None |
python | def os_info():
"""Returns os data.
"""
return {
'uname': dict(platform.uname()._asdict()),
'path': os.environ.get('PATH', '').split(':'),
'shell': os.environ.get('SHELL', '/bin/sh'),
} |
java | private boolean checkEsTin(final String ptin) {
final char[] checkArray = {'T', 'R', 'W', 'A', 'G', 'M', 'Y', 'F', 'P', 'D', 'X', 'B', 'N', 'J',
'Z', 'S', 'Q', 'V', 'H', 'L', 'C', 'K', 'E'};
final char checkSum = ptin.charAt(8);
final char calculatedCheckSum;
final int sum;
if (StringUtils.isNumeric(StringUtils.substring(ptin, 0, 8))) {
// dni
sum = Integer.parseInt(StringUtils.substring(ptin, 0, 8)) % 23;
calculatedCheckSum = checkArray[sum];
} else if (ptin.charAt(0) >= 'X' && ptin.charAt(0) <= 'Z') {
// cif
sum = Integer.parseInt(ptin.charAt(0) - '0' + StringUtils.substring(ptin, 1, 8)) % 23;
calculatedCheckSum = checkArray[sum];
} else {
// nie
final char letter = ptin.charAt(0);
final String number = StringUtils.substring(ptin, 1, 8);
int evenSum = 0;
int oddSum = 0;
for (int i = 0; i < number.length(); i++) {
int charAsNum = number.charAt(i) - '0';
// Odd positions (Even index equals to odd position. i=0 equals first position)
if (i % 2 == 0) {
// Odd positions are multiplied first.
charAsNum *= 2;
// If the multiplication is bigger than 10 we need to adjust
oddSum += charAsNum < 10 ? charAsNum : charAsNum - 9;
// Even positions
// Just sum them
} else {
evenSum += charAsNum;
}
}
final int control_digit = 10 - (evenSum + oddSum) % 10;
final char control_letter = "JABCDEFGHI".charAt(control_digit);
switch (letter) {
case 'A':
case 'B':
case 'E':
case 'H':
// Control must be a digit
calculatedCheckSum = (char) (control_digit + '0');
break;
case 'K':
case 'P':
case 'Q':
case 'S':
// Control must be a letter
calculatedCheckSum = control_letter;
break;
default:
// Can be either
if (control_letter == checkSum) {
calculatedCheckSum = control_letter;
} else {
calculatedCheckSum = (char) (control_digit + '0');
}
break;
}
}
return checkSum == calculatedCheckSum;
} |
java | @Override
public Set<String> getVariables() {
Set<String> variables = new HashSet<>();
if(lhs.getVariable() != null) variables.add(lhs.getVariable());
if(rhs.getVariable() != null) variables.add(rhs.getVariable());
return variables;
} |
python | def add_layer_to_canvas(layer, name):
"""Helper method to add layer to QGIS.
:param layer: The layer.
:type layer: QgsMapLayer
:param name: Layer name.
:type name: str
"""
if qgis_version() >= 21800:
layer.setName(name)
else:
layer.setLayerName(name)
QgsProject.instance().addMapLayer(layer, False) |
python | def sc_cuts_alg(self, viewer, event, msg=True):
"""Adjust cuts algorithm interactively.
"""
if self.cancut:
direction = self.get_direction(event.direction)
self._cycle_cuts_alg(viewer, msg, direction=direction)
return True |
python | def removeActor(self, a):
"""Remove ``vtkActor`` or actor index from current renderer."""
if not self.initializedPlotter:
save_int = self.interactive
self.show(interactive=0)
self.interactive = save_int
return
if self.renderer:
self.renderer.RemoveActor(a)
if hasattr(a, 'renderedAt'):
ir = self.renderers.index(self.renderer)
a.renderedAt.discard(ir)
if a in self.actors:
i = self.actors.index(a)
del self.actors[i] |
python | def pave_project(self):
"""
Usage:
containment pave_project
"""
settings.project_customization.path.mkdir()
settings.project_customization.entrypoint.write_text(
self.context.entrypoint_text
)
settings.project_customization.runfile.write_text(
self.context.run_text
)
settings.project_customization.os_packages.write_text("[]")
settings.project_customization.lang_packages.write_text("{}")
self.write_dockerfile() |
python | def arcsin_sqrt(biom_tbl):
"""
Applies the arcsine square root transform to the
given BIOM-format table
"""
arcsint = lambda data, id_, md: np.arcsin(np.sqrt(data))
tbl_relabd = relative_abd(biom_tbl)
tbl_asin = tbl_relabd.transform(arcsint, inplace=False)
return tbl_asin |
java | protected void update(long readBytes) throws IOException {
if (progressListener.isAborted()) {
throw new IOException("Reading Cancelled by ProgressListener");
}
this.bytesRead += readBytes;
int step = (int)(bytesRead / stepSize);
if(step > lastStep) {
lastStep = step;
progressListener.updateProgress(step, stepNumber);
}
} |
python | def _get_model_fields(self, field_names, declared_fields, extra_kwargs):
"""
Returns all the model fields that are being mapped to by fields
on the serializer class.
Returned as a dict of 'model field name' -> 'model field'.
Used internally by `get_uniqueness_field_options`.
"""
model = getattr(self.Meta, 'model')
model_fields = {}
for field_name in field_names:
if field_name in declared_fields:
# If the field is declared on the serializer
field = declared_fields[field_name]
source = field.source or field_name
else:
try:
source = extra_kwargs[field_name]['source']
except KeyError:
source = field_name
if '.' in source or source == '*':
# Model fields will always have a simple source mapping,
# they can't be nested attribute lookups.
continue
try:
field = model._meta.get_field(source)
if isinstance(field, DjangoModelField):
model_fields[source] = field
except FieldDoesNotExist:
pass
return model_fields |
java | public int interpolateARGB(final double xNormalized, final double yNormalized){
double xF = xNormalized * (getWidth()-1);
double yF = yNormalized * (getHeight()-1);
int x = (int)xF;
int y = (int)yF;
int c00 = getValue(x, y);
int c01 = getValue(x, (y+1 < getHeight() ? y+1:y));
int c10 = getValue((x+1 < getWidth() ? x+1:x), y);
int c11 = getValue((x+1 < getWidth() ? x+1:x), (y+1 < getHeight() ? y+1:y));
return interpolateColors(c00, c01, c10, c11, xF-x, yF-y);
} |
java | public static void closeChainedTasks(List<ChainedDriver<?, ?>> tasks, AbstractInvokable parent) throws Exception {
for (int i = 0; i < tasks.size(); i++) {
final ChainedDriver<?, ?> task = tasks.get(i);
task.closeTask();
if (LOG.isDebugEnabled()) {
LOG.debug(constructLogString("Finished task code", task.getTaskName(), parent));
}
}
} |
python | def render_tooltip(self, tooltip, obj):
"""Render the tooltip for this column for an object
"""
if self.tooltip_attr:
val = getattr(obj, self.tooltip_attr)
elif self.tooltip_value:
val = self.tooltip_value
else:
return False
setter = getattr(tooltip, TOOLTIP_SETTERS.get(self.tooltip_type))
if self.tooltip_type in TOOLTIP_SIZED_TYPES:
setter(val, self.tooltip_image_size)
else:
setter(val)
return True |
python | def cmdSubstitute(self, cmd, count):
""" s
"""
cursor = self._qpart.textCursor()
for _ in range(count):
cursor.movePosition(QTextCursor.Right, QTextCursor.KeepAnchor)
if cursor.selectedText():
_globalClipboard.value = cursor.selectedText()
cursor.removeSelectedText()
self._saveLastEditSimpleCmd(cmd, count)
self.switchMode(Insert) |
python | def generator_samples(tmp_dir, pb_cst):
"""Generator for the dataset samples.
If not present, download and extract the dataset.
Args:
tmp_dir: path to the directory where to download the dataset.
pb_cst: CodingPbConstants object defining paths
Yields:
A CodingPbInfo object containing the next challenge informations.
"""
# Step1: Download dataset (eventually)
data_zip_path = generator_utils.maybe_download_from_drive(
directory=tmp_dir,
filename=_DATASET_FILENAME,
url=_DATASET_URL,
)
tf.logging.info("Data downloaded in: {}".format(data_zip_path))
# Step2: Extract dataset
# We could deduce _DATASET_PB_PATH from the zip file (instead of
# hardcoded path)
data_rootdir = os.path.join(tmp_dir, _DATASET_PB_PATH)
if not tf.gfile.Exists(data_rootdir):
with zipfile.ZipFile(data_zip_path, "r") as corpus_zip:
corpus_zip.extractall(tmp_dir)
# We could remove the extracted __MACOSX folder
tf.logging.info("Data extracted in: {}".format(tmp_dir))
else:
tf.logging.info("Data already extracted in: {}".format(tmp_dir))
# Step3: Extract the problems list on the extracted folder
def contains_samples(subdir, dirs, files): # pylint: disable=unused-argument
"""Check that the folder contains a problem."""
return (
_DESC_DIR_NAME in dirs and
pb_cst.code_dir_name in dirs
)
def next_sample(subdir, dirs, files): # pylint: disable=unused-argument
"""Return the filenames of the problem."""
# More could be extracted (like the expected inputs/outputs
# pairs, the problem difficulty, the names of the algorithmic techniques
# needed)
desc_file = os.path.join(subdir, _DESC_DIR_NAME, "description.txt")
code_files = []
# As the dataset is noisy, the program deduce the language from the file
# content.
code_pattern = os.path.join(subdir, pb_cst.code_dir_name, "*.txt")
for f in tf.gfile.Glob(code_pattern):
with tf.gfile.GFile(f, mode="r") as target_file:
# Hack to filter C++/Java files. In theory some python comments could
# make the file be considered as C++ but in practice the chance of
# getting a false negative is low.
content = target_file.read()
if not any(p in content for p in pb_cst.filter_patterns):
code_files.append(f)
return CodingPbInfo(
desc_file=desc_file,
code_files=code_files
)
# The dataset contains problem from two different sources (CodeChef
# and CodeForces). Due to the limited number of samples, all problems from
# both sources are merged
for w in tf.gfile.Walk(data_rootdir):
if contains_samples(*w):
yield next_sample(*w) |
java | public Command receiveCommand(String queueName, long timeout) {
try {
Message message = receiveMessage(queueName, timeout);
if(message == null){
return null;
}else{
Command command;
if(binaryMode){
command = Command.fromBytes(getBytes((BytesMessage) message));
}else {
command = Command.fromXml(((TextMessage)message).getText());
}
command.setJMSMessageID(message.getJMSMessageID());
return command;
}
} catch (Exception e) {
throw new AsyncException("Could not get command", e);
}
} |
python | def insert_arguments_into_match_query(compilation_result, arguments):
"""Insert the arguments into the compiled MATCH query to form a complete query.
Args:
compilation_result: a CompilationResult object derived from the GraphQL compiler
arguments: dict, mapping argument name to its value, for every parameter the query expects.
Returns:
string, a MATCH query with inserted argument data
"""
if compilation_result.language != MATCH_LANGUAGE:
raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result))
base_query = compilation_result.query
argument_types = compilation_result.input_metadata
# The arguments are assumed to have already been validated against the query.
sanitized_arguments = {
key: _safe_match_argument(argument_types[key], value)
for key, value in six.iteritems(arguments)
}
return base_query.format(**sanitized_arguments) |
java | public void setConfigurationProperty(final String name, final Object value)
{
if (XIncProcConfiguration.ALLOW_FIXUP_BASE_URIS.equals(name))
{
if (value instanceof Boolean)
{
this.baseUrisFixup = (Boolean) value;
}
else if (value instanceof String)
{
this.baseUrisFixup = Boolean.valueOf((String) value);
}
}
if (XIncProcConfiguration.ALLOW_FIXUP_LANGUAGE.equals(name))
{
if (value instanceof Boolean)
{
this.languageFixup = (Boolean) value;
}
else if (value instanceof String)
{
this.languageFixup = Boolean.valueOf((String) value);
}
}
} |
python | def _Close(self):
"""Closes the file-like object."""
if self._database_object:
self._database_object.Close()
self._blob = None
self._current_offset = 0
self._size = 0
self._table_name = None |
java | @Override public int read() throws IOException
{
int c;
if (m_searching)
{
int index = 0;
c = -1;
while (m_searching)
{
c = m_stream.read();
if (c == -1)
{
m_searchFailed = true;
throw new IOException("Pattern not found");
}
if (c == m_pattern[index])
{
++index;
if (index == m_pattern.length)
{
m_searching = false;
c = m_stream.read();
}
}
else
{
index = 0;
}
}
}
else
{
c = m_stream.read();
}
return c;
} |
java | public static double[][] transpose(double[][] A) {
int m = A.length;
int n = A[0].length;
double[][] matrix = new double[n][m];
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
matrix[j][i] = A[i][j];
}
}
return matrix;
} |
java | private static int binarySearch(int[] array, int toIndex, int key) {
int low = 0;
int high = toIndex - 1;
while (low <= high) {
int mid = (low + high) >>> 1;
int midVal = array[mid];
if (midVal < key) {
low = mid + 1;
} else if (midVal > key) {
high = mid - 1;
} else {
return mid; // key found
}
}
return -(low + 1); // key not found.
} |
java | public final void exclusiveOrExpression() throws RecognitionException {
int exclusiveOrExpression_StartIndex = input.index();
try {
if ( state.backtracking>0 && alreadyParsedRule(input, 114) ) { return; }
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1154:5: ( andExpression ( '^' andExpression )* )
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1154:9: andExpression ( '^' andExpression )*
{
pushFollow(FOLLOW_andExpression_in_exclusiveOrExpression5158);
andExpression();
state._fsp--;
if (state.failed) return;
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1154:23: ( '^' andExpression )*
loop146:
while (true) {
int alt146=2;
int LA146_0 = input.LA(1);
if ( (LA146_0==61) ) {
alt146=1;
}
switch (alt146) {
case 1 :
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1154:25: '^' andExpression
{
match(input,61,FOLLOW_61_in_exclusiveOrExpression5162); if (state.failed) return;
pushFollow(FOLLOW_andExpression_in_exclusiveOrExpression5164);
andExpression();
state._fsp--;
if (state.failed) return;
}
break;
default :
break loop146;
}
}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
if ( state.backtracking>0 ) { memoize(input, 114, exclusiveOrExpression_StartIndex); }
}
} |
java | private void setFileSizesAfterRestart(boolean coldStart,
long minimumPermanentStoreSize, long maximumPermanentStoreSize, boolean isPermanentStoreSizeUnlimited,
long minimumTemporaryStoreSize, long maximumTemporaryStoreSize, boolean isTemporaryStoreSizeUnlimited,
long logSize) throws ObjectManagerException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "setFileSizesAfterRestart", new Object[]{"ColdStart="+coldStart, "LogSize="+logSize,
"MinimumPermanentStoreSize="+minimumPermanentStoreSize, "MaximumPermanentStoreSize="+maximumPermanentStoreSize, "IsPermanentStoreSizeUnlimited="+isPermanentStoreSizeUnlimited,
"MinimumTemporaryStoreSize="+minimumTemporaryStoreSize, "MaximumTemporaryStoreSize="+maximumTemporaryStoreSize, "IsTemporaryStoreSizeUnlimited="+isTemporaryStoreSizeUnlimited});
try
{
// The recommendation is to set the log file size
// before the store file sizes when cold-starting
if (coldStart)
{
long currentLogSize = _objectManager.getLogFileSize();
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
SibTr.debug(this, tc, "The current size of the log file is " + Long.valueOf(currentLogSize) + " bytes. The size in the configuration information of the log file is " + Long.valueOf(logSize) + " bytes.");
if (currentLogSize != logSize)
{
// Defect 326589
// Check the values provided to us to make sure we are attempting
// sensible modifications. Each check is only worth doing if the matching
// store size unlimited flag is set to false. If this is true then we are
// going to ignore the maximum store size parameters anyway.
if ((!isPermanentStoreSizeUnlimited && (logSize > maximumPermanentStoreSize)) || // Log larger than permanent store
(!isTemporaryStoreSizeUnlimited && (logSize > maximumTemporaryStoreSize))) // Log larger than temporary store
{
SibTr.warning(tc, "FILE_STORE_LOG_SIZE_CHANGE_PREVENTED_SIMS1548");
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Log size not changed!");
}
else
{
_objectManager.setLogFileSize(logSize);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Log size changed");
}
}
}
// Change the permanent store file sizes.
SingleFileObjectStore store = (SingleFileObjectStore)_permanentStore;
long currentPermanentStoreUsed = store.getStoreFileUsed();
long currentPermanentStoreSize = store.getStoreFileSize();
long currentMinimumPermanentStoreSize = store.getMinimumStoreFileSize();
long currentMaximumPermanentStoreSize = store.getMaximumStoreFileSize();
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
{
SibTr.debug(this, tc, "currentPermanentStoreUsed = " + currentPermanentStoreUsed);
SibTr.debug(this, tc, "currentPermanentStoreSize = " + currentPermanentStoreSize);
SibTr.debug(this, tc, "currentMinimumPermanentStoreSize = " + currentMinimumPermanentStoreSize);
SibTr.debug(this, tc, "currentMaximumPermanentStoreSize = " + currentMaximumPermanentStoreSize);
// Defect 342044
// Output the current size of the data in the permanent store
SibTr.debug(this, tc, "The data in the permanent store file occupies " + Long.valueOf(currentPermanentStoreUsed) + " bytes.");
// Output the current file size limits.
if (currentMaximumPermanentStoreSize != MAXIMUM_STORE_FILE_SIZE)
{
SibTr.debug(this, tc, "The current minimum reserved size of the permanent store file is " + Long.valueOf(currentMinimumPermanentStoreSize) + " bytes. The current maximum size is " + Long.valueOf(currentMaximumPermanentStoreSize) + " bytes.");
}
else
{
SibTr.debug(this, tc, "The current minimum reserved size of the permanent store file is " + Long.valueOf(currentMinimumPermanentStoreSize) + " bytes. The current maximum size is unlimited");
}
}
// Defect 326589
// Check the values provided to us to make sure we are attempting
// sensible modifications. Each check is only worth doing if the matching
// store size unlimited flag is set to false. If this is true then we are
// going to ignore the maximum store size parameters anyway.
if ((!isPermanentStoreSizeUnlimited && (minimumPermanentStoreSize > maximumPermanentStoreSize))) // Permanent store minimum larger than maximum
{
SibTr.info(tc, "FILE_STORE_PERMANENT_STORE_SIZE_CONFIGURATION_INFO_SIMS1553", new Object[] {Long.valueOf(minimumPermanentStoreSize), Long.valueOf(maximumPermanentStoreSize)});
SibTr.warning(tc, "FILE_STORE_STORE_SIZE_CHANGE_PREVENTED_SIMS1549");
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Permanent store size not changed!");
}
else
{
if ((currentMinimumPermanentStoreSize != minimumPermanentStoreSize) || /* Minimum is not the same as the current minimum. */
(!isPermanentStoreSizeUnlimited && (currentMaximumPermanentStoreSize != maximumPermanentStoreSize)) || /* Maximum is not the same as current limited maximum. */
(isPermanentStoreSizeUnlimited && (currentMaximumPermanentStoreSize != MAXIMUM_STORE_FILE_SIZE))) /* Maximum is not already set to unlimited. */
{
if (!isPermanentStoreSizeUnlimited)
{
SibTr.info(tc, "FILE_STORE_PERMANENT_STORE_SIZE_CONFIGURATION_INFO_SIMS1553", new Object[] {Long.valueOf(minimumPermanentStoreSize), Long.valueOf(maximumPermanentStoreSize)});
store.setStoreFileSize(minimumPermanentStoreSize, maximumPermanentStoreSize);
}
else
{
SibTr.info(tc, "FILE_STORE_PERMANENT_STORE_SIZE_CONFIGURATION_INFO_UNLIMITED_SIMS1554", new Object[] {Long.valueOf(minimumPermanentStoreSize)});
store.setStoreFileSize(minimumPermanentStoreSize, MAXIMUM_STORE_FILE_SIZE);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Permanent Store size changed");
}
}
// Change the temporary store file sizes.
store = (SingleFileObjectStore)_temporaryStore;
long currentTemporaryStoreUsed = store.getStoreFileUsed();
long currentTemporaryStoreSize = store.getStoreFileSize();
long currentMinimumTemporaryStoreSize = store.getMinimumStoreFileSize();
long currentMaximumTemporaryStoreSize = store.getMaximumStoreFileSize();
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
{
SibTr.debug(this, tc, "currentTemporaryStoreUsed = " + currentTemporaryStoreUsed);
SibTr.debug(this, tc, "currentTemporaryStoreSize = " + currentTemporaryStoreSize);
SibTr.debug(this, tc, "currentMinimumTemporaryStoreSize = " + currentMinimumTemporaryStoreSize);
SibTr.debug(this, tc, "currentMaximumTemporaryStoreSize = " + currentMaximumTemporaryStoreSize);
// Defect 342044
// Output the current size of the data in the temporary store
SibTr.debug(this, tc, "The data in the temporary store file occupies " + Long.valueOf(currentTemporaryStoreUsed) + " bytes.");
// Output the current file size limits.
if (currentMaximumTemporaryStoreSize != MAXIMUM_STORE_FILE_SIZE)
{
SibTr.debug(this, tc, "The current minimum reserved size of the temporary store file is " + Long.valueOf(currentMinimumTemporaryStoreSize) + " bytes. The current maximum size is " + Long.valueOf(currentMaximumTemporaryStoreSize) +" bytes.");
}
else
{
SibTr.debug(this, tc, "The current minimum reserved size of the temporary store file is " + Long.valueOf(currentMinimumTemporaryStoreSize) + " bytes. The current maximum size is unlimited.");
}
}
// Defect 326589
// Check the values provided to us to make sure we are attempting
// sensible modifications. Each check is only worth doing if the matching
// store size unlimited flag is set to false. If this is true then we are
// going to ignore the maximum store size parameters anyway.
if ((!isTemporaryStoreSizeUnlimited && (minimumTemporaryStoreSize > maximumTemporaryStoreSize))) // Temporary store minimum larger than maximum
{
SibTr.info(tc, "FILE_STORE_TEMPORARY_STORE_SIZE_CONFIGURATION_INFO_SIMS1557", new Object[] {Long.valueOf(minimumTemporaryStoreSize), Long.valueOf(maximumTemporaryStoreSize)});
SibTr.warning(tc, "FILE_STORE_STORE_SIZE_CHANGE_PREVENTED_SIMS1549");
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Temporary store size not changed!");
}
else
{
if ((currentMinimumTemporaryStoreSize != minimumTemporaryStoreSize) || /* Minimum is not the same as the current minimum. */
(!isTemporaryStoreSizeUnlimited && (currentMaximumTemporaryStoreSize != maximumTemporaryStoreSize)) || /* Maximum is not the same as current limited maximum. */
(isTemporaryStoreSizeUnlimited && (currentMaximumTemporaryStoreSize != MAXIMUM_STORE_FILE_SIZE))) /* Maximum is not already set to unlimited. */
{
if (!isTemporaryStoreSizeUnlimited)
{
SibTr.info(tc, "FILE_STORE_TEMPORARY_STORE_SIZE_CONFIGURATION_INFO_SIMS1557", new Object[] {Long.valueOf(minimumTemporaryStoreSize), Long.valueOf(maximumTemporaryStoreSize)});
store.setStoreFileSize(minimumTemporaryStoreSize, maximumTemporaryStoreSize);
}
else
{
SibTr.info(tc, "FILE_STORE_TEMPORARY_STORE_SIZE_CONFIGURATION_INFO_UNLIMITED_SIMS1558", new Object[] {Long.valueOf(minimumTemporaryStoreSize)});
store.setStoreFileSize(minimumTemporaryStoreSize, MAXIMUM_STORE_FILE_SIZE);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Temporary Store size changed");
}
}
// The recommendation is to set the log file size
// after the store file sizes when warm-starting
if (!coldStart)
{
long currentLogSize = _objectManager.getLogFileSize();
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
SibTr.debug(this, tc, "The current size of the log file is " + Long.valueOf(currentLogSize) + " bytes. The size in the configuration information of the log file is " + Long.valueOf(logSize) + " bytes.");
if (currentLogSize != logSize)
{
// Defect 326589
// Check the values provided to us to make sure we are attempting
// sensible modifications. Each check is only worth doing if the matching
// store size unlimited flag is set to false. If this is true then we are
// going to ignore the maximum store size parameters anyway.
if ((!isPermanentStoreSizeUnlimited && (logSize > maximumPermanentStoreSize)) || // Log larger than permanent store
(!isTemporaryStoreSizeUnlimited && (logSize > maximumTemporaryStoreSize))) // Log larger than temporary store
{
SibTr.warning(tc, "FILE_STORE_LOG_SIZE_CHANGE_PREVENTED_SIMS1548");
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Log size not changed!");
}
else
{
_objectManager.setLogFileSize(logSize);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Log size changed");
}
}
}
}
catch (LogFileSizeTooSmallException lfse) // This is due to the minimum log size not being big
{ // enough for existing data.
com.ibm.ws.ffdc.FFDCFilter.processException(lfse, "com.ibm.ws.sib.msgstore.persistence.objectManager.PersistableMessageStoreImpl.setFileSizesAfterRestart", "1:2531:1.81.1.6", this);
SibTr.warning(tc, "FILE_STORE_LOG_SIZE_CHANGE_PREVENTED_SIMS1548");
}
catch (IllegalArgumentException iae) // This is due to invalid file sizes i.e. min > max
{
com.ibm.ws.ffdc.FFDCFilter.processException(iae, "com.ibm.ws.sib.msgstore.persistence.objectManager.PersistableMessageStoreImpl.setFileSizesAfterRestart", "1:2536:1.81.1.6", this);
SibTr.warning(tc, "FILE_STORE_STORE_SIZE_CHANGE_PREVENTED_SIMS1549");
}
catch (StoreFileSizeTooSmallException sfse) // This is due to the minimum store size not being big
{ // enough for existing data.
com.ibm.ws.ffdc.FFDCFilter.processException(sfse, "com.ibm.ws.sib.msgstore.persistence.objectManager.PersistableMessageStoreImpl.setFileSizesAfterRestart", "1:2541:1.81.1.6", this);
SibTr.warning(tc, "FILE_STORE_STORE_SIZE_CHANGE_PREVENTED_SIMS1549");
}
catch (PermanentIOException pie) // This is due to an error setting the minimum store size
{ // on the local file system.
com.ibm.ws.ffdc.FFDCFilter.processException(pie, "com.ibm.ws.sib.msgstore.persistence.objectManager.PersistableMessageStoreImpl.setFileSizesAfterRestart", "1:2546:1.81.1.6", this);
SibTr.warning(tc, "FILE_STORE_STORE_SIZE_CHANGE_PREVENTED_SIMS1549");
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "setFileSizesAfterRestart");
} |
python | def write_gexf(docgraph, output_file):
"""
takes a document graph, converts it into GEXF format and writes it to
a file.
"""
dg_copy = deepcopy(docgraph)
remove_root_metadata(dg_copy)
layerset2str(dg_copy)
attriblist2str(dg_copy)
nx_write_gexf(dg_copy, output_file) |
python | def get_jac(self):
""" Derives the jacobian from ``self.exprs`` and ``self.dep``. """
if self._jac is True:
if self.sparse is True:
self._jac, self._colptrs, self._rowvals = self.be.sparse_jacobian_csc(self.exprs, self.dep)
elif self.band is not None: # Banded
self._jac = self.be.banded_jacobian(self.exprs, self.dep, *self.band)
else:
f = self.be.Matrix(1, self.ny, self.exprs)
self._jac = f.jacobian(self.be.Matrix(1, self.ny, self.dep))
elif self._jac is False:
return False
return self._jac |
java | public void setGenericKeywords(java.util.Collection<String> genericKeywords) {
if (genericKeywords == null) {
this.genericKeywords = null;
return;
}
this.genericKeywords = new java.util.ArrayList<String>(genericKeywords);
} |
python | def replace_label(self, oldLabel, newLabel):
""" Replaces old label with a new one
"""
if oldLabel == newLabel:
return
tmp = re.compile(r'\b' + oldLabel + r'\b')
last = 0
l = len(newLabel)
while True:
match = tmp.search(self.asm[last:])
if not match:
break
txt = self.asm
self.asm = txt[:last + match.start()] + newLabel + txt[last + match.end():]
last += match.start() + l |
python | def handle(self, key, value):
'''
Processes a vaild stats request
@param key: The key that matched the request
@param value: The value associated with the key
'''
# break down key
elements = key.split(":")
stats = elements[1]
appid = elements[2]
uuid = value
# log we received the stats request
extras = self.get_log_dict('stats', appid, uuid=uuid)
self.logger.info('Received {s} stats request'.format(s=stats),
extra=extras)
extras = {}
if stats == 'all':
extras = self.get_all_stats()
elif stats == 'kafka-monitor':
extras = self.get_kafka_monitor_stats()
elif stats == 'redis-monitor':
extras = self.get_redis_monitor_stats()
elif stats == 'crawler':
extras = self.get_crawler_stats()
elif stats == 'spider':
extras = self.get_spider_stats()
elif stats == 'machine':
extras = self.get_machine_stats()
elif stats == 'queue':
extras = self.get_queue_stats()
elif stats == 'rest':
extras = self.get_rest_stats()
else:
self.logger.warn('Received invalid stats request: {s}'\
.format(s=stats),
extra=extras)
return
extras['stats'] = stats
extras['appid'] = appid
extras['uuid'] = uuid
extras['server_time'] = int(self.get_current_time())
if self._send_to_kafka(extras):
extras['success'] = True
self.logger.info('Sent stats to kafka', extra=extras)
else:
extras['success'] = False
self.logger.error('Failed to send stats to kafka', extra=extras) |
java | private void validateInput() {
// Check for null values first.
if (url == null) {
throw new NullPointerException("URL should not be null");
}
if (!url.getProtocol().matches("^https?$")) {
throw new IllegalArgumentException(
"URL protocol should be HTTP or HTTPS");
}
if (url.getRef() != null) {
throw new IllegalArgumentException(
"URL should contain no reference");
}
if (url.getQuery() != null) {
throw new IllegalArgumentException(
"URL should contain no query string");
}
if (handler == null) {
throw new NullPointerException(
"Callback handler should not be null");
}
} |
python | def embed(self, x, nfeatures=2):
"""Embed all given tensors into an nfeatures-dim space. """
list_split = 0
if isinstance(x, list):
list_split = len(x)
x = tf.concat(x, 0)
# pre-process MNIST dataflow data
x = tf.expand_dims(x, 3)
x = x * 2 - 1
# the embedding network
net = slim.layers.conv2d(x, 20, 5, scope='conv1')
net = slim.layers.max_pool2d(net, 2, scope='pool1')
net = slim.layers.conv2d(net, 50, 5, scope='conv2')
net = slim.layers.max_pool2d(net, 2, scope='pool2')
net = slim.layers.flatten(net, scope='flatten3')
net = slim.layers.fully_connected(net, 500, scope='fully_connected4')
embeddings = slim.layers.fully_connected(net, nfeatures, activation_fn=None, scope='fully_connected5')
# if "x" was a list of tensors, then split the embeddings
if list_split > 0:
embeddings = tf.split(embeddings, list_split, 0)
return embeddings |
java | public PagedList<DscNodeReportInner> listByNodeNext(final String nextPageLink) {
ServiceResponse<Page<DscNodeReportInner>> response = listByNodeNextSinglePageAsync(nextPageLink).toBlocking().single();
return new PagedList<DscNodeReportInner>(response.body()) {
@Override
public Page<DscNodeReportInner> nextPage(String nextPageLink) {
return listByNodeNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
} |
java | public boolean limit() {
//get connection
Object connection = getConnection();
Object result = limitRequest(connection);
if (FAIL_CODE != (Long) result) {
return true;
} else {
return false;
}
} |
java | public String getLabel() {
String label = getProperty(labelProperty);
label = label == null ? node.getLabel() : label;
if (label == null) {
label = getDefaultInstanceName();
setProperty(labelProperty, label);
}
return label;
} |
java | @Override
public synchronized void close() {
if (currentStatus != StreamStatus.CLOSED) {
// Only flush the print stream: don't close it.
currentPrintStream.flush();
if (!LoggingFileUtils.tryToClose(currentPrintStream)) {
LoggingFileUtils.tryToClose(currentFileStream);
}
// Send to bit bucket again-- this holder is done (false -- do not replace)
setStreamStatus(StreamStatus.CLOSED, null, null, DummyOutputStream.psInstance);
}
} |
python | def transformer_aux_base():
"""Set of hyperparameters."""
hparams = transformer.transformer_base()
hparams.shared_embedding_and_softmax_weights = False
hparams.add_hparam("shift_values", "1,2,3,4")
return hparams |
python | def read_config(configpath=None, specpath=None, checks=None,
report_extra=False):
"""
get a (validated) config object for given config file path.
:param configpath: path to config-file or a list of lines as its content
:type configpath: str or list(str)
:param specpath: path to spec-file
:type specpath: str
:param checks: custom checks to use for validator.
see `validate docs <http://www.voidspace.org.uk/python/validate.html>`_
:type checks: dict str->callable,
:param report_extra: log if a setting is not present in the spec file
:type report_extra: boolean
:raises: :class:`~alot.settings.errors.ConfigError`
:rtype: `configobj.ConfigObj`
"""
checks = checks or {}
try:
config = ConfigObj(infile=configpath, configspec=specpath,
file_error=True, encoding='UTF8')
except ConfigObjError as e:
msg = 'Error when parsing `%s`:\n%s' % (configpath, e)
logging.error(msg)
raise ConfigError(msg)
except IOError:
raise ConfigError('Could not read %s and/or %s'
% (configpath, specpath))
except UnboundLocalError:
# this works around a bug in configobj
msg = '%s is malformed. Check for sections without parents..'
raise ConfigError(msg % configpath)
if specpath:
validator = Validator()
validator.functions.update(checks)
try:
results = config.validate(validator, preserve_errors=True)
except ConfigObjError as e:
raise ConfigError(str(e))
if results is not True:
error_msg = ''
for (section_list, key, res) in flatten_errors(config, results):
if key is not None:
if res is False:
msg = 'key "%s" in section "%s" is missing.'
msg = msg % (key, ', '.join(section_list))
else:
msg = 'key "%s" in section "%s" failed validation: %s'
msg = msg % (key, ', '.join(section_list), res)
else:
msg = 'section "%s" is missing' % '.'.join(section_list)
error_msg += msg + '\n'
raise ConfigError(error_msg)
extra_values = get_extra_values(config) if report_extra else None
if extra_values:
msg = ['Unknown values were found in `%s`. Please check for '
'typos if a specified setting does not seem to work:'
% configpath]
for sections, val in extra_values:
if sections:
msg.append('%s: %s' % ('->'.join(sections), val))
else:
msg.append(str(val))
logging.info('\n'.join(msg))
return config |
java | private void init() {
setTitle("Validation Framework Test");
setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
// Create content pane
JPanel contentPane = new JPanel(new MigLayout("fill, wrap 1"));
setContentPane(contentPane);
// Tabbed pane
JTabbedPane tabbedPane = new JTabbedPane();
contentPane.add(tabbedPane, "grow");
// Create tabs
tabbedPane.add("Constant info", createTabConstantInfo());
tabbedPane.add("Disabled", createTabDisabled());
tabbedPane.add("Single validation", createTabSingleCompWithValidation());
tabbedPane.add("Split pane", createTabSplitPane());
tabbedPane.add("Scroll pane", createTabScrollPane());
tabbedPane.add("Small panels", createTabSmallPanels());
// Set size
Dimension size = new Dimension(640, 480);
setSize(size);
// Set location
Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
setLocation((screenSize.width - size.width) / 2, (screenSize.height - size.height) / 3);
} |
java | @Handler
public void onStart(Start event) {
synchronized (this) {
if (runner != null && !runner.isInterrupted()) {
return;
}
runner = new Thread(this, Components.simpleObjectName(this));
runner.start();
}
} |
java | @Override
public final SessionDestroyedListener addingService(ServiceReference<SessionDestroyedListener> serviceReference) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Listener [" + serviceReference + "] has been added.");
}
final SessionDestroyedListener listener = context.getService(serviceReference);
handler.addListener(listener);
return listener;
} |
python | def must_contain(tag_name, tag_content, container_tag_name):
"""
Generate function, which checks if given element contains `tag_name` with
string content `tag_content` and also another tag named
`container_tag_name`.
This function can be used as parameter for .find() method in HTMLElement.
"""
def must_contain_closure(element):
# containing in first level of childs <tag_name> tag
matching_tags = element.match(tag_name, absolute=True)
if not matching_tags:
return False
# which's content match `tag_content`
if matching_tags[0].getContent() != tag_content:
return False
# and also contains <container_tag_name> tag
if container_tag_name and \
not element.match(container_tag_name, absolute=True):
return False
return True
return must_contain_closure |
java | public static int ofSetBits32_1(int i) {
i = i - ((i >>> 1) & 0x55555555);
i = (i & 0x33333333) + ((i >>> 2) & 0x33333333);
i = ((i + (i >>> 4)) & 0x0F0F0F0F);
return (i * (0x01010101)) >>> 24;
} |
java | @Override
public List<CommerceCountry> findByG_S_A(long groupId,
boolean shippingAllowed, boolean active) {
return findByG_S_A(groupId, shippingAllowed, active, QueryUtil.ALL_POS,
QueryUtil.ALL_POS, null);
} |
python | def get_line_color(self, increment=1):
"""
Returns the current color, then increments the color by what's specified
"""
i = self.line_colors_index
self.line_colors_index += increment
if self.line_colors_index >= len(self.line_colors):
self.line_colors_index = self.line_colors_index-len(self.line_colors)
if self.line_colors_index >= len(self.line_colors): self.line_colors_index=0 # to be safe
return self.line_colors[i] |
java | public void loadProfile(Object key)
{
if (!isEnablePerThreadChanges())
{
throw new MetadataException("Can not load profile with disabled per thread mode");
}
DescriptorRepository rep = (DescriptorRepository) metadataProfiles.get(key);
if (rep == null)
{
throw new MetadataException("Can not find profile for key '" + key + "'");
}
currentProfileKey.set(key);
setDescriptor(rep);
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.