language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public IoBuffer decodeFully(IoBuffer in) {
int contentLength = ctx.getContentLength();
IoBuffer decodedBuffer = ctx.getDecodedBuffer();
int oldLimit = in.limit();
// Retrieve fixed length content
if (contentLength > -1) {
if (decodedBuffer == null) {
decodedBuffer = IoBuffer.allocate(contentLength).setAutoExpand(
true);
}
// If not enough data to complete the decoding
if (in.remaining() < contentLength) {
int readBytes = in.remaining();
decodedBuffer.put(in);
ctx.setDecodedBuffer(decodedBuffer);
ctx.setContentLength(contentLength - readBytes);
return null;
}
int newLimit = in.position() + contentLength;
in.limit(newLimit);
decodedBuffer.put(in);
decodedBuffer.flip();
in.limit(oldLimit);
ctx.reset();
return decodedBuffer;
}
// Not a fixed length matching so try to find a delimiter match
int oldPos = in.position();
int matchCount = ctx.getMatchCount();
IoBuffer delimiter = ctx.getDelimiter();
while (in.hasRemaining()) {
byte b = in.get();
if (delimiter.get(matchCount) == b) {
matchCount++;
if (matchCount == delimiter.limit()) {
// Found a match.
int pos = in.position();
in.position(oldPos);
in.limit(pos);
if (decodedBuffer == null) {
decodedBuffer = IoBuffer.allocate(in.remaining())
.setAutoExpand(true);
}
decodedBuffer.put(in);
decodedBuffer.flip();
in.limit(oldLimit);
ctx.reset();
return decodedBuffer;
}
} else {
in.position(Math.max(0, in.position() - matchCount));
matchCount = 0;
}
}
// Copy remainder from buf.
if (in.remaining() > 0) {
in.position(oldPos);
decodedBuffer.put(in);
in.position(in.limit());
}
// Save decoding state
ctx.setMatchCount(matchCount);
ctx.setDecodedBuffer(decodedBuffer);
return decodedBuffer;
} |
python | def patch_webbrowser():
"""
Some custom patches on top of the python webbrowser module to fix
user reported bugs and limitations of the module.
"""
# https://bugs.python.org/issue31014
# https://github.com/michael-lazar/rtv/issues/588
def register_patch(name, klass, instance=None, update_tryorder=None, preferred=False):
"""
Wrapper around webbrowser.register() that detects if the function was
invoked with the legacy function signature. If so, the signature is
fixed before passing it along to the underlying function.
Examples:
register(name, klass, instance, -1)
register(name, klass, instance, update_tryorder=-1)
register(name, klass, instance, preferred=True)
"""
if update_tryorder is not None:
preferred = (update_tryorder == -1)
return webbrowser._register(name, klass, instance, preferred=preferred)
if sys.version_info[:2] >= (3, 7):
webbrowser._register = webbrowser.register
webbrowser.register = register_patch
# Add support for browsers that aren't defined in the python standard library
webbrowser.register('surf', None, webbrowser.BackgroundBrowser('surf'))
webbrowser.register('vimb', None, webbrowser.BackgroundBrowser('vimb'))
webbrowser.register('qutebrowser', None, webbrowser.BackgroundBrowser('qutebrowser'))
# Fix the opera browser, see https://github.com/michael-lazar/rtv/issues/476.
# By default, opera will open a new tab in the current window, which is
# what we want to do anyway.
webbrowser.register('opera', None, webbrowser.BackgroundBrowser('opera'))
# https://bugs.python.org/issue31348
# Use MacOS actionscript when opening the program defined in by $BROWSER
if sys.platform == 'darwin' and 'BROWSER' in os.environ:
_userchoices = os.environ["BROWSER"].split(os.pathsep)
for cmdline in reversed(_userchoices):
if cmdline in ('safari', 'firefox', 'chrome', 'default'):
browser = webbrowser.MacOSXOSAScript(cmdline)
webbrowser.register(cmdline, None, browser, update_tryorder=-1) |
python | def safe_makedirs(path):
"""A safe function for creating a directory tree."""
try:
os.makedirs(path)
except OSError as err:
if err.errno == errno.EEXIST:
if not os.path.isdir(path):
raise
else:
raise |
java | public void acceptAlert() {
String action = "Clicking 'OK' on an alert";
String expected = "Alert is present to be clicked";
// wait for element to be present
if (!is.alertPresent()) {
waitFor.alertPresent();
}
if (!is.alertPresent()) {
reporter.fail(action, expected, "Unable to click alert as it is not present");
return; // indicates element not present
}
accept(action, expected, "alert");
} |
java | @Override
public Pair<Integer, Integer> pPredecessor(int partition, int token) {
Integer partForToken = m_tokensMap.get().get(token);
if (partForToken != null && partForToken == partition) {
Map.Entry<Integer, Integer> predecessor = m_tokensMap.get().headMap(token).lastEntry();
if (predecessor == null) {
predecessor = m_tokensMap.get().lastEntry();
}
if (predecessor.getKey() != token) {
return Pair.of(predecessor.getKey(), predecessor.getValue());
} else {
// given token is the only one on the ring, umpossible
throw new RuntimeException("There is only one token on the hash ring");
}
} else {
// given token doesn't map to partition
throw new IllegalArgumentException("The given token " + token +
" does not map to partition " + partition);
}
} |
python | def sitemap_index(request):
"""Return a sitemap index xml file for search engines."""
sitemaps = []
with db_connect() as db_connection:
with db_connection.cursor() as cursor:
cursor.execute("""\
SELECT authors[1], max(revised)
FROM latest_modules
WHERE portal_type NOT IN ('CompositeModule', 'SubCollection')
GROUP BY authors[1]
""")
for author, revised in cursor.fetchall():
sitemaps.append(Sitemap(url=request.route_url(
'sitemap', from_id=author),
lastmod=revised))
si = SitemapIndex(sitemaps=sitemaps)
resp = request.response
resp.status = '200 OK'
resp.content_type = 'text/xml'
resp.body = si()
return resp |
java | public static boolean valueMatchesWildcardExpression(String val, String exp) {
//replace [\^$.|?*+() to make regexp do wildcard match
String expCopy = StringSupport.replaceAll(
exp,
new String[]{"[", "\\", "^", "$", ".", "?", "*", "+", "(", ")"},
new String[]{"\\[", "\\\\", "\\^", "\\$", "\\.",".?", ".*", "\\+", "\\(", "\\)"});
return (valueMatchesRegularExpression(val, expCopy));
} |
java | @SuppressWarnings("unchecked")
public boolean getBooleanValue() {
String stringValue = getStringValue();
checkState(
stringValue.equalsIgnoreCase("true") || stringValue.equalsIgnoreCase("false"),
"Field value is not of boolean type");
return Boolean.parseBoolean(stringValue);
} |
java | private TimeSeriesValue interpolateTSV(GroupName name) {
final Map.Entry<DateTime, TimeSeriesValue> backTSV = findName(backward, name),
forwTSV = findName(forward, name);
final long backMillis = max(new Duration(backTSV.getKey(), getTimestamp()).getMillis(), 0),
forwMillis = max(new Duration(getTimestamp(), forwTSV.getKey()).getMillis(), 0);
final double totalMillis = forwMillis + backMillis;
final double backWeight = forwMillis / totalMillis;
final double forwWeight = backMillis / totalMillis;
return new InterpolatedTSV(name, backTSV.getValue().getMetrics(), forwTSV.getValue().getMetrics(), backWeight, forwWeight);
} |
python | def get_recipe_intent_handler(request):
"""
You can insert arbitrary business logic code here
"""
# Get variables like userId, slots, intent name etc from the 'Request' object
ingredient = request.slots["Ingredient"] # Gets an Ingredient Slot from the Request object.
if ingredient == None:
return alexa.create_response("Could not find an ingredient!")
# All manipulations to the request's session object are automatically reflected in the request returned to Amazon.
# For e.g. This statement adds a new session attribute (automatically returned with the response) storing the
# Last seen ingredient value in the 'last_ingredient' key.
request.session['last_ingredient'] = ingredient # Automatically returned as a sessionAttribute
# Modifying state like this saves us from explicitly having to return Session objects after every response
# alexa can also build cards which can be sent as part of the response
card = alexa.create_card(title="GetRecipeIntent activated", subtitle=None,
content="asked alexa to find a recipe using {}".format(ingredient))
return alexa.create_response("Finding a recipe with the ingredient {}".format(ingredient),
end_session=False, card_obj=card) |
java | private Bindings createBindings(Entity entity, int depth) {
Bindings bindings = new SimpleBindings();
JSObject global = (JSObject) magmaBindings.get("nashorn.global");
JSObject magmaScript = (JSObject) global.getMember(KEY_MAGMA_SCRIPT);
JSObject dollarFunction = (JSObject) magmaScript.getMember(KEY_DOLLAR);
JSObject bindFunction = (JSObject) dollarFunction.getMember(BIND);
Object boundDollar = bindFunction.call(dollarFunction, toScriptEngineValueMap(entity, depth));
bindings.put(KEY_DOLLAR, boundDollar);
bindings.put(KEY_NEW_VALUE, magmaScript.getMember(KEY_NEW_VALUE));
bindings.put(KEY_IS_NULL, magmaScript.getMember(KEY_IS_NULL));
return bindings;
} |
java | public static String unicode2String(String unicode) {
StringBuffer string = new StringBuffer();
String[] hex = unicode.split("\\\\u");
for (int i = 1; i < hex.length; i++) {
int data = Integer.parseInt(hex[i], 16);
string.append((char) data);
}
return string.toString();
} |
python | def url(self, schemes=None):
"""
:param schemes: a list of strings to use as schemes, one will chosen randomly.
If None, it will generate http and https urls.
Passing an empty list will result in schemeless url generation like "://domain.com".
:returns: a random url string.
"""
if schemes is None:
schemes = ['http', 'https']
pattern = '{}://{}'.format(
self.random_element(schemes) if schemes else "",
self.random_element(self.url_formats),
)
return self.generator.parse(pattern) |
python | def eval(self, id1, id2, inst1):
"""
Computes the result of the kernel function for two instances. If id1 == -1, eval use inst1 instead of an
instance in the dataset.
:param id1: the index of the first instance in the dataset
:type id1: int
:param id2: the index of the second instance in the dataset
:type id2: int
:param inst1: the instance corresponding to id1 (used if id1 == -1)
:type inst1: Instance
"""
jinst1 = None
if inst1 is not None:
jinst1 = inst1.jobject
return javabridge.call(self.jobject, "eval", "(IILweka/core/Instance;)D", id1, id2, jinst1) |
java | public static <T extends View> T mount(T v, Renderable r) {
Mount m = new Mount(v, r);
mounts.put(v, m);
render(v);
return v;
} |
python | def native(self, value, context=None):
"""Convert a foreign value to a native boolean."""
value = super().native(value, context)
if self.none and (value is None):
return None
try:
value = value.lower()
except AttributeError:
return bool(value)
if value in self.truthy:
return True
if value in self.falsy:
return False
raise Concern("Unable to convert {0!r} to a boolean value.", value) |
java | private static String getProperty(String driverId, String propertyName) {
String propertyValue = PROPERTIES.getProperty(DEFAULT_PREFIX + "." + driverId + "." + propertyName);
if (propertyValue != null) {
propertyValue = propertyValue.trim();
if (propertyValue.isEmpty()) {
propertyValue = null;
}
}
return propertyValue;
} |
java | protected void addVisit(CmsDbContext dbc, String poolName, CmsVisitEntry visit) throws CmsDbSqlException {
Connection conn = null;
PreparedStatement stmt = null;
try {
if (CmsStringUtil.isNotEmpty(poolName)) {
conn = m_sqlManager.getConnection(poolName);
} else {
conn = m_sqlManager.getConnection(dbc);
}
stmt = m_sqlManager.getPreparedStatement(conn, "C_VISIT_CREATE_3");
stmt.setString(1, visit.getUserId().toString());
stmt.setLong(2, visit.getDate());
stmt.setString(3, visit.getStructureId() == null ? null : visit.getStructureId().toString());
try {
stmt.executeUpdate();
} catch (SQLException e) {
// ignore, most likely a duplicate entry
LOG.debug(
Messages.get().container(Messages.ERR_GENERIC_SQL_1, CmsDbSqlException.getErrorQuery(stmt)).key(),
e);
}
} catch (SQLException e) {
throw new CmsDbSqlException(
Messages.get().container(Messages.ERR_GENERIC_SQL_1, CmsDbSqlException.getErrorQuery(stmt)),
e);
} finally {
try {
m_sqlManager.closeAll(dbc, conn, stmt, null);
} catch (Throwable t) {
// this could happen during shutdown
LOG.debug(t.getLocalizedMessage(), t);
}
}
} |
python | def symlink(parser, cmd, args):
"""
Set up symlinks for (a subset of) the pwny apps.
"""
parser.add_argument(
'apps',
nargs=argparse.REMAINDER,
help='Which apps to create symlinks for.'
)
args = parser.parse_args(args)
base_dir, pwny_main = os.path.split(sys.argv[0])
for app_name, config in MAIN_FUNCTIONS.items():
if not config['symlink'] or (args.apps and app_name not in args.apps):
continue
dest = os.path.join(base_dir, app_name)
if not os.path.exists(dest):
print('Creating symlink %s' % dest)
os.symlink(pwny_main, dest)
else:
print('Not creating symlink %s (file already exists)' % dest) |
java | public static ProducerTemplate generateTemplete(String contextUri) throws Exception
{
String initKey = contextUri.intern();
synchronized (initKey)
{
ProducerTemplate template = templates.get(initKey);
// 初期化済みの場合は、初期化済みのProducerTemplateを返す。
if (template != null)
{
return template;
}
// 初期化を行う
Main main = new Main();
main.setApplicationContextUri(contextUri);
main.enableHangupSupport();
// run Camel
CamelInitializeThread initializeThread = new CamelInitializeThread(main);
initializeThread.start();
int timeCounter = 0;
// keep ProducerTemplate
while (!main.isStarted())
{
try
{
TimeUnit.SECONDS.sleep(1);
}
catch (InterruptedException ex)
{
if (logger.isDebugEnabled())
{
logger.debug("Occur interrupt. Ignore interrupt.", ex);
}
}
timeCounter++;
if (INITIALIZE_TIMEOUT < timeCounter)
{
logger.error("Timed out to camel initialization. Stop to application start.");
throw new Exception("Camel initialization time out.");
}
}
// ProducerTemplateを取得する
template = main.getCamelTemplate();
// 初期化済みのProducerTemplateを保持する
templates.put(initKey, template);
return template;
}
} |
java | private ImmutableSubstitution<ImmutableTerm> computeLiftableSubstitution(
ImmutableSubstitution<? extends ImmutableTerm> selectedSubstitution,
Optional<Variable> rightProvenanceVariable, ImmutableSet<Variable> leftVariables) {
ImmutableMap<Variable, ImmutableTerm> newMap;
if (rightProvenanceVariable.isPresent()) {
newMap = selectedSubstitution.getImmutableMap().entrySet().stream()
.filter(e -> !leftVariables.contains(e.getKey()))
.collect(ImmutableCollectors.toMap(
Map.Entry::getKey,
e -> transformRightSubstitutionValue(e.getValue(), leftVariables,
rightProvenanceVariable.get())));
}
else {
newMap = selectedSubstitution.getImmutableMap().entrySet().stream()
.filter(e -> !leftVariables.contains(e.getKey()))
.collect(ImmutableCollectors.toMap(
Map.Entry::getKey,
e -> (ImmutableTerm) e.getValue()));
}
return substitutionFactory.getSubstitution(newMap);
} |
python | def validate(reference_labels, estimated_labels):
"""Checks that the input annotations to a comparison function look like
valid chord labels.
Parameters
----------
reference_labels : list, len=n
Reference chord labels to score against.
estimated_labels : list, len=n
Estimated chord labels to score against.
"""
N = len(reference_labels)
M = len(estimated_labels)
if N != M:
raise ValueError(
"Chord comparison received different length lists: "
"len(reference)=%d\tlen(estimates)=%d" % (N, M))
for labels in [reference_labels, estimated_labels]:
for chord_label in labels:
validate_chord_label(chord_label)
# When either label list is empty, warn the user
if len(reference_labels) == 0:
warnings.warn('Reference labels are empty')
if len(estimated_labels) == 0:
warnings.warn('Estimated labels are empty') |
java | private void setSwatchDescription(int rowNumber, int index, int rowElements, boolean selected,
View swatch) {
int accessibilityIndex;
if (rowNumber % 2 == 0) {
// We're in a regular-ordered row
accessibilityIndex = index;
} else {
// We're in a backwards-ordered row.
int rowMax = ((rowNumber + 1) * mNumColumns);
accessibilityIndex = rowMax - rowElements;
}
String description;
if (selected) {
description = String.format(mDescriptionSelected, accessibilityIndex);
} else {
description = String.format(mDescription, accessibilityIndex);
}
swatch.setContentDescription(description);
} |
java | public void addRecord(String key,
boolean b)
throws TarMalformatException, IOException {
addRecord(key, Boolean.toString(b));
} |
java | public void setupKeys()
{
KeyAreaInfo keyArea = null;
keyArea = new KeyAreaInfo(this, Constants.UNIQUE, ID_KEY);
keyArea.addKeyField(ID, Constants.ASCENDING);
keyArea = new KeyAreaInfo(this, Constants.NOT_UNIQUE, CLASS_INFO_ID_KEY);
keyArea.addKeyField(CLASS_INFO_ID, Constants.ASCENDING);
} |
python | def get_zone():
"""make http response to AcraServer api to generate new zone and return tuple
of zone id and public key
"""
response = urlopen('{}/getNewZone'.format(ACRA_CONNECTOR_API_ADDRESS))
json_data = response.read().decode('utf-8')
zone_data = json.loads(json_data)
return zone_data['id'], b64decode(zone_data['public_key']) |
python | def cmd_startstop(options):
"""Start or Stop the specified instance.
Finds instances that match args and instance-state expected by the
command. Then, the target instance is determined, the action is
performed on the instance, and the eturn information is displayed.
Args:
options (object): contains args and data from parser.
"""
statelu = {"start": "stopped", "stop": "running"}
options.inst_state = statelu[options.command]
debg.dprint("toggle set state: ", options.inst_state)
(i_info, param_str) = gather_data(options)
(tar_inst, tar_idx) = determine_inst(i_info, param_str, options.command)
response = awsc.startstop(tar_inst, options.command)
responselu = {"start": "StartingInstances", "stop": "StoppingInstances"}
filt = responselu[options.command]
resp = {}
state_term = ('CurrentState', 'PreviousState')
for i, j in enumerate(state_term):
resp[i] = response["{0}".format(filt)][0]["{0}".format(j)]['Name']
print("Current State: {}{}{} - Previous State: {}{}{}\n".
format(C_STAT[resp[0]], resp[0], C_NORM,
C_STAT[resp[1]], resp[1], C_NORM)) |
java | @Override
public boolean contains(IAtom atom) {
for (int i = 0; i < getAtomCount(); i++) {
if (atoms[i].equals(atom)) return true;
}
return false;
} |
java | public void marshall(CreateDataSourceFromRDSRequest createDataSourceFromRDSRequest, ProtocolMarshaller protocolMarshaller) {
if (createDataSourceFromRDSRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(createDataSourceFromRDSRequest.getDataSourceId(), DATASOURCEID_BINDING);
protocolMarshaller.marshall(createDataSourceFromRDSRequest.getDataSourceName(), DATASOURCENAME_BINDING);
protocolMarshaller.marshall(createDataSourceFromRDSRequest.getRDSData(), RDSDATA_BINDING);
protocolMarshaller.marshall(createDataSourceFromRDSRequest.getRoleARN(), ROLEARN_BINDING);
protocolMarshaller.marshall(createDataSourceFromRDSRequest.getComputeStatistics(), COMPUTESTATISTICS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
java | private void addNode(final TrieNode<V> node, final CharSequence key,
final int beginIndex, final TrieNode<V> newNode) {
final int lastKeyIndex = key.length() - 1;
TrieNode<V> currentNode = node;
int i = beginIndex;
for (; i < lastKeyIndex; i++) {
final TrieNode<V> nextNode = new TrieNode<V>(false);
currentNode.children.put(key.charAt(i), nextNode);
currentNode = nextNode;
}
currentNode.children.put(key.charAt(i), newNode);
} |
java | public void setBaselineCost(int baselineNumber, Number value)
{
set(selectField(AssignmentFieldLists.BASELINE_COSTS, baselineNumber), value);
} |
python | def _resp_exception(self, resp):
"""If we encounter an exception in our upload.
we will look at how we can attempt to resolve the exception.
:param resp:
"""
message = [
'Url: [ %s ] Reason: [ %s ] Request: [ %s ] Status Code: [ %s ]. ',
resp.url,
resp.reason,
resp.request,
resp.status_code
]
# Check to make sure we have all the bits needed
if not hasattr(resp, 'status_code'):
message[0] += 'No Status to check. Turbolift will retry...'
raise exceptions.SystemProblem(message)
elif resp is None:
message[0] += 'No response information. Turbolift will retry...'
raise exceptions.SystemProblem(message)
elif resp.status_code == 401:
message[0] += (
'Turbolift experienced an Authentication issue. Turbolift'
' will retry...'
)
self.job_args.update(auth.authenticate(self.job_args))
raise exceptions.SystemProblem(message)
elif resp.status_code == 404:
message[0] += 'Item not found.'
LOG.debug(*message)
elif resp.status_code == 409:
message[0] += (
'Request Conflict. Turbolift is abandoning this...'
)
elif resp.status_code == 413:
return_headers = resp.headers
retry_after = return_headers.get('retry_after', 10)
cloud_utils.stupid_hack(wait=retry_after)
message[0] += (
'The System encountered an API limitation and will'
' continue in [ %s ] Seconds' % retry_after
)
raise exceptions.SystemProblem(message)
elif resp.status_code == 502:
message[0] += (
'Failure making Connection. Turbolift will retry...'
)
raise exceptions.SystemProblem(message)
elif resp.status_code == 503:
cloud_utils.stupid_hack(wait=10)
message[0] += 'SWIFT-API FAILURE'
raise exceptions.SystemProblem(message)
elif resp.status_code == 504:
cloud_utils.stupid_hack(wait=10)
message[0] += 'Gateway Failure.'
raise exceptions.SystemProblem(message)
elif resp.status_code >= 300:
message[0] += 'General exception.'
raise exceptions.SystemProblem(message)
else:
LOG.debug(*message) |
python | def needs_update(self, cache_key):
"""Check if the given cached item is invalid.
:param cache_key: A CacheKey object (as returned by CacheKeyGenerator.key_for().
:returns: True if the cached version of the item is out of date.
"""
if not self.cacheable(cache_key):
# An uncacheable CacheKey is always out of date.
return True
return self._read_sha(cache_key) != cache_key.hash |
python | def make_optimize_tensor(self, model, session=None, var_list=None, **kwargs):
"""
Make SciPy optimization tensor.
The `make_optimize_tensor` method builds optimization tensor and initializes
all necessary variables created by optimizer.
:param model: GPflow model.
:param session: Tensorflow session.
:param var_list: List of variables for training.
:param kwargs: Scipy optional optimization parameters,
- `maxiter`, maximal number of iterations to perform.
- `disp`, if True, prints convergence messages.
:return: Tensorflow operation.
"""
session = model.enquire_session(session)
with session.as_default():
var_list = self._gen_var_list(model, var_list)
optimizer_kwargs = self._optimizer_kwargs.copy()
options = optimizer_kwargs.get('options', {})
options.update(kwargs)
optimizer_kwargs.update(dict(options=options))
objective = model.objective
optimizer = external_optimizer.ScipyOptimizerInterface(
objective, var_list=var_list, **optimizer_kwargs)
model.initialize(session=session)
return optimizer |
python | def soap_action(self, service, action, payloadbody):
"""Do a soap request """
payload = self.soapenvelope.format(body=payloadbody).encode('utf-8')
headers = ['SOAPAction: ' + action,
'Content-Type: application/soap+xml; charset=UTF-8',
'Content-Length: ' + str(len(payload))]
try:
curl = pycurl.Curl()
curl.setopt(pycurl.SSL_CIPHER_LIST, "AES256-SHA")
curl.setopt(pycurl.SSLVERSION, pycurl.SSLVERSION_TLSv1_0)
# self.curl.setopt(pycurl.CAINFO,'ihc.crt')
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
curl.setopt(pycurl.SSL_VERIFYHOST, 0)
curl.setopt(pycurl.POST, 1)
curl.setopt(pycurl.HEADERFUNCTION, IHCCurlConnection._write_header)
curl.setopt(pycurl.HTTPHEADER, headers)
inbuffer = BytesIO(payload)
curl.setopt(pycurl.READDATA, inbuffer)
buffer = BytesIO()
curl.setopt(pycurl.WRITEDATA, buffer)
curl.setopt(pycurl.URL, self.url + service)
curl.setopt(pycurl.COOKIE, IHCCurlConnection.cookies)
# curl.setopt(pycurl.VERBOSE,1)
curl.perform()
body = buffer.getvalue().decode('utf-8')
code = curl.getinfo(pycurl.HTTP_CODE)
curl.close()
except Exception as exp:
return False
if code != 200:
return False
try:
xdoc = xml.etree.ElementTree.fromstring(body)
except xml.etree.ElementTree.ParseError:
return False
return xdoc |
java | @Pure
protected AStarNode<ST, PT> translateCandidate(PT endPoint, AStarNode<ST, PT> node) {
if (endPoint.equals(node.getGraphPoint())) {
return null;
}
return node;
} |
java | public boolean containsMapping(Object key, Object value) {
Set<V> s = map.get(key);
return s != null && s.contains(value);
} |
python | def _check_device(self, requested_device, map_device):
"""Compare the requested device with the map device and
return the map device if it differs from the requested device
along with a warning.
"""
type_1 = torch.device(requested_device)
type_2 = torch.device(map_device)
if type_1 != type_2:
warnings.warn(
'Setting self.device = {} since the requested device ({}) '
'is not available.'.format(map_device, requested_device),
DeviceWarning)
return map_device
# return requested_device instead of map_device even though we
# checked for *type* equality as we might have 'cuda:0' vs. 'cuda:1'.
return requested_device |
python | def is_file_url(url):
"""Returns true if the given url is a file url"""
from .misc import to_text
if not url:
return False
if not isinstance(url, six.string_types):
try:
url = getattr(url, "url")
except AttributeError:
raise ValueError("Cannot parse url from unknown type: {0!r}".format(url))
url = to_text(url, encoding="utf-8")
return urllib_parse.urlparse(url.lower()).scheme == "file" |
java | public CacheKeyTO getCacheKey(Object target, String methodName, Object[] arguments, String keyExpression,
String hfieldExpression, Object result, boolean hasRetVal) {
String key = null;
String hfield = null;
if (null != keyExpression && keyExpression.trim().length() > 0) {
try {
key = scriptParser.getDefinedCacheKey(keyExpression, target, arguments, result, hasRetVal);
if (null != hfieldExpression && hfieldExpression.trim().length() > 0) {
hfield = scriptParser.getDefinedCacheKey(hfieldExpression, target, arguments, result, hasRetVal);
}
} catch (Exception ex) {
log.error(ex.getMessage(), ex);
}
} else {
key = CacheUtil.getDefaultCacheKey(target.getClass().getName(), methodName, arguments);
}
if (null == key || key.trim().isEmpty()) {
throw new IllegalArgumentException("cache key for " + target.getClass().getName() + "." + methodName + " is empty");
}
return new CacheKeyTO(config.getNamespace(), key, hfield);
} |
java | private String readString(byte stringTag, String stringName,
String enc) throws IOException {
if (buffer.read() != stringTag)
throw new IOException("DER input not a " +
stringName + " string");
int length = getLength(buffer);
byte[] retval = new byte[length];
if ((length != 0) && (buffer.read(retval) != length))
throw new IOException("short read of DER " +
stringName + " string");
return new String(retval, enc);
} |
java | public static <T extends Tree> Matcher<T> isArrayType() {
return new Matcher<T>() {
@Override
public boolean matches(Tree t, VisitorState state) {
Type type = getType(t);
return type != null && state.getTypes().isArray(type);
}
};
} |
java | public StreamingJobInner beginCreateOrReplace(String resourceGroupName, String jobName, StreamingJobInner streamingJob) {
return beginCreateOrReplaceWithServiceResponseAsync(resourceGroupName, jobName, streamingJob).toBlocking().single().body();
} |
java | private void merge(NodeTask target, NodeTask source) {
List<StageType> stages = target.getStage();
List<TaskEvent> events = target.getEvent();
List<StageType> mergeStates = new ArrayList<StageType>();
List<TaskEvent> mergeEvents = new ArrayList<TaskEvent>();
// 合并两者的交集的数据
for (int i = 0; i < stages.size(); i++) {
StageType stage = stages.get(i);
TaskEvent event = events.get(i);
mergeStates.add(stage);
// 找到source节点对应的TaskEvent,使用最新值
TaskEvent sourceEvent = getMatchStage(source, stage);
if (sourceEvent == null) {
mergeEvents.add(event);
} else {
mergeEvents.add(sourceEvent);
}
}
// 添加两者的差集,添加source中特有的节点
List<StageType> sourceStages = source.getStage();
List<TaskEvent> sourceEvents = source.getEvent();
for (int i = 0; i < sourceStages.size(); i++) {
StageType stage = sourceStages.get(i);
TaskEvent event = sourceEvents.get(i);
if (mergeStates.contains(stage)) {
continue;
}
mergeStates.add(stage);
mergeEvents.add(event);
}
// 更新一下数据
target.setEvent(mergeEvents);
target.setStage(mergeStates);
target.setShutdown(source.isShutdown());// 更新下shutdown变量
} |
java | @Action(name = "Create Volume",
outputs = {
@Output(Outputs.RETURN_CODE),
@Output(Outputs.RETURN_RESULT),
@Output(Outputs.EXCEPTION)
},
responses = {
@Response(text = Outputs.SUCCESS, field = Outputs.RETURN_CODE, value = Outputs.SUCCESS_RETURN_CODE,
matchType = MatchType.COMPARE_EQUAL, responseType = ResponseType.RESOLVED),
@Response(text = Outputs.FAILURE, field = Outputs.RETURN_CODE, value = Outputs.FAILURE_RETURN_CODE,
matchType = MatchType.COMPARE_EQUAL, responseType = ResponseType.ERROR)
}
)
public Map<String, String> execute(@Param(value = ENDPOINT) String endpoint,
@Param(value = IDENTITY, required = true) String identity,
@Param(value = CREDENTIAL, required = true, encrypted = true) String credential,
@Param(value = PROXY_HOST) String proxyHost,
@Param(value = PROXY_PORT) String proxyPort,
@Param(value = PROXY_USERNAME) String proxyUsername,
@Param(value = PROXY_PASSWORD, encrypted = true) String proxyPassword,
@Param(value = HEADERS) String headers,
@Param(value = QUERY_PARAMS) String queryParams,
@Param(value = VERSION) String version,
@Param(value = AVAILABILITY_ZONE, required = true) String availabilityZone,
@Param(value = KMS_KEY_ID) String kmsKeyId,
@Param(value = VOLUME_TYPE) String volumeType,
@Param(value = ENCRYPTED) String encrypted,
@Param(value = IOPS) String iops,
@Param(value = SIZE) String size,
@Param(value = SNAPSHOT_ID) String snapshotId) {
try {
version = getDefaultStringInput(version, VOLUMES_DEFAULT_API_VERSION);
final CommonInputs commonInputs = new CommonInputs.Builder()
.withEndpoint(endpoint, EC2_API, EMPTY)
.withIdentity(identity)
.withCredential(credential)
.withProxyHost(proxyHost)
.withProxyPort(proxyPort)
.withProxyUsername(proxyUsername)
.withProxyPassword(proxyPassword)
.withHeaders(headers)
.withQueryParams(queryParams)
.withVersion(version)
.withAction(CREATE_VOLUME)
.withApiService(EC2_API)
.withRequestUri(EMPTY)
.withRequestPayload(EMPTY)
.withHttpClientMethod(HTTP_CLIENT_METHOD_GET)
.build();
final CustomInputs customInputs = new CustomInputs.Builder()
.withAvailabilityZone(availabilityZone)
.withKmsKeyId(kmsKeyId)
.withVolumeType(volumeType)
.build();
final VolumeInputs volumeInputs = new VolumeInputs.Builder()
.withEncrypted(encrypted)
.withIops(iops)
.withSize(size)
.withSnapshotId(snapshotId)
.build();
return new QueryApiExecutor().execute(commonInputs, customInputs, volumeInputs);
} catch (Exception e) {
return ExceptionProcessor.getExceptionResult(e);
}
} |
java | public void updateComment(int commentId, CommentUpdate comment) {
getResourceFactory().getApiResource("/comment/" + commentId)
.entity(comment, MediaType.APPLICATION_JSON_TYPE).put();
} |
python | def reconstruct_headers(self, response):
"""
Purpose of this method is to reconstruct the headers dictionary that
is normally passed in with a "response" object from scrapy.
Args:
response: A scrapy response object
Returns: A dictionary that mirrors the "response.headers" dictionary
that is normally within a response object
Raises: None
Reason: Originally, there was a bug where the json.dumps() did not
properly serialize the headers. This method is a way to circumvent
the known issue
"""
header_dict = {}
# begin reconstructing headers from scratch...
for key in list(response.headers.keys()):
key_item_list = []
key_list = response.headers.getlist(key)
for item in key_list:
key_item_list.append(item)
header_dict[key] = key_item_list
return header_dict |
python | def send(self, request, **kwargs):
# type: (ClientRequest, Any) -> ClientResponse
"""Send request object according to configuration.
Allowed kwargs are:
- session : will override the driver session and use yours. Should NOT be done unless really required.
- anything else is sent straight to requests.
:param ClientRequest request: The request object to be sent.
"""
# It's not recommended to provide its own session, and is mostly
# to enable some legacy code to plug correctly
session = kwargs.pop('session', self.session)
try:
response = session.request(
request.method,
request.url,
**kwargs)
except requests.RequestException as err:
msg = "Error occurred in request."
raise_with_traceback(ClientRequestError, msg, err)
return RequestsClientResponse(request, response) |
python | def df(self):
"""
Makes a pandas DataFrame containing Curve data for all the wells
in the Project. The DataFrame has a dual index of well UWI and
curve Depths. Requires `pandas`.
Args:
No arguments.
Returns:
`pandas.DataFrame`.
"""
import pandas as pd
return pd.concat([w.df(uwi=True) for w in self]) |
python | def multiline_regex_suggestor(regex, substitution=None, ignore_case=False):
"""
Return a suggestor function which, given a list of lines, generates patches
to substitute matches of the given regex with (if provided) the given
substitution.
@param regex Either a regex object or a string describing a regex.
@param substitution Either None (meaning that we should flag the matches
without suggesting an alternative), or a string (using
\1 notation to backreference match groups) or a
function (that takes a match object as input).
"""
if isinstance(regex, str):
if ignore_case is False:
regex = re.compile(regex, re.DOTALL)
else:
regex = re.compile(regex, re.DOTALL | re.IGNORECASE)
if isinstance(substitution, str):
def substitution_func(match):
return match.expand(substitution)
else:
substitution_func = substitution
def suggestor(lines):
pos = 0
while True:
match = regex.search(''.join(lines), pos)
if not match:
break
start_row, start_col = _index_to_row_col(lines, match.start())
end_row, end_col = _index_to_row_col(lines, match.end() - 1)
if substitution is None:
new_lines = None
else:
# TODO: ugh, this is hacky. Clearly I need to rewrite
# this to use
# character-level patches, rather than line-level patches.
new_lines = substitution_func(match)
if new_lines is not None:
new_lines = ''.join((
lines[start_row][:start_col],
new_lines,
lines[end_row][end_col + 1:]
))
yield Patch(
start_line_number=start_row,
end_line_number=end_row + 1,
new_lines=new_lines
)
delta = 1 if new_lines is None else min(1, len(new_lines))
pos = match.start() + delta
return suggestor |
python | def get_version(dunder_file):
"""Returns a version string for the current package, derived
either from git or from a .version file.
This function is expected to run in two contexts. In a development
context, where .git/ exists, the version is pulled from git tags.
Using the BuildPyCommand and SDistCommand classes for cmdclass in
setup.py will write a .version file into any dist.
In an installed context, the .version file written at dist build
time is the source of version information.
"""
path = abspath(expanduser(dirname(dunder_file)))
try:
return _get_version_from_version_file(path) or _get_version_from_git_tag(path)
except CalledProcessError as e:
log.warn(repr(e))
return None
except Exception as e:
log.exception(e)
return None |
python | def concat(self, *dss, **kwargs):
"""
Concatenate dataswim instances from and
set it to the main dataframe
:param dss: dataswim instances to concatenate
:type dss: Ds
:param kwargs: keyword arguments for ``pd.concat``
"""
try:
df = pd.DataFrame()
for dsx in dss:
df = pd.concat([df, dsx.df], **kwargs)
self.df = df
except Exception as e:
self.err(e, "Can not concatenate data") |
java | @Override
public synchronized ChainGroupData removeChainGroup(String groupName) throws ChainGroupException {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Removing chain group, " + groupName);
}
if (null == groupName) {
throw new ChainGroupException("Input group name is null");
}
ChainGroupData groupData = this.chainGroups.remove(groupName);
if (null == groupData) {
throw new ChainGroupException("Null group name");
}
return groupData;
} |
python | def _cp_embeds_into(cp1, cp2):
"""Check that any state in ComplexPattern2 is matched in ComplexPattern1.
"""
# Check that any state in cp2 is matched in cp1
# If the thing we're matching to is just a monomer pattern, that makes
# things easier--we just need to find the corresponding monomer pattern
# in cp1
if cp1 is None or cp2 is None:
return False
cp1 = as_complex_pattern(cp1)
cp2 = as_complex_pattern(cp2)
if len(cp2.monomer_patterns) == 1:
mp2 = cp2.monomer_patterns[0]
# Iterate over the monomer patterns in cp1 and see if there is one
# that has the same name
for mp1 in cp1.monomer_patterns:
if _mp_embeds_into(mp1, mp2):
return True
return False |
python | def encode(self, x):
"""
Given an input array `x` it returns its associated encoding `y(x)`, that is,
a stable configuration (local energy minimum) of the hidden units
while the visible units are clampled to `x`.
Note that NO learning takes place.
"""
E = self.energy
y_min = self.find_energy_minimum(E, x)
return y_min |
java | public ICalendar first() throws IOException {
StreamReader reader = constructReader();
if (index != null) {
reader.setScribeIndex(index);
}
try {
ICalendar ical = reader.readNext();
if (warnings != null) {
warnings.add(reader.getWarnings());
}
return ical;
} finally {
if (closeWhenDone()) {
reader.close();
}
}
} |
python | def configure_job():
"""Construct jobSpec for ML Engine job."""
# See documentation:
# https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#traininginput
training_input = {
"pythonModule": "tensor2tensor.bin.t2t_trainer",
"args": flags_as_args(),
"region": text_encoder.native_to_unicode(default_region()),
"runtimeVersion": RUNTIME_VERSION,
"pythonVersion": "3.5" if sys.version_info.major == 3 else "2.7",
"jobDir": FLAGS.output_dir,
"scaleTier": "CUSTOM",
"masterType": FLAGS.cloud_mlengine_master_type or get_default_master_type(
num_gpus=FLAGS.worker_gpu)
}
if FLAGS.use_tpu:
training_input["masterType"] = (FLAGS.cloud_mlengine_master_type or
"standard")
training_input["workerType"] = "cloud_tpu"
training_input["workerCount"] = 1
if FLAGS.hparams_range:
tf.logging.info("Configuring hyperparameter tuning.")
training_input["hyperparameters"] = configure_autotune(
FLAGS.hparams_range,
FLAGS.autotune_objective,
FLAGS.autotune_maximize,
FLAGS.autotune_max_trials,
FLAGS.autotune_parallel_trials,
)
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
job_spec = {
"jobId": "%s_%s_t2t_%s" % (FLAGS.model, FLAGS.problem, timestamp),
"labels": {
"model": FLAGS.model,
"problem": FLAGS.problem,
"hparams": FLAGS.hparams_set
},
"trainingInput": training_input,
}
return job_spec |
java | public int last() {
assertNonEmpty();
short lastKey = keys[size - 1];
Container container = values[size - 1];
return lastKey << 16 | container.last();
} |
java | public Observable<DatabaseVulnerabilityAssessmentInner> getAsync(String resourceGroupName, String serverName, String databaseName) {
return getWithServiceResponseAsync(resourceGroupName, serverName, databaseName).map(new Func1<ServiceResponse<DatabaseVulnerabilityAssessmentInner>, DatabaseVulnerabilityAssessmentInner>() {
@Override
public DatabaseVulnerabilityAssessmentInner call(ServiceResponse<DatabaseVulnerabilityAssessmentInner> response) {
return response.body();
}
});
} |
python | def select_date(self, rows: List[Row], column: DateColumn) -> Date:
"""
Select function takes a row as a list and a column name and returns the date in that column.
"""
dates: List[Date] = []
for row in rows:
cell_value = row.values[column.name]
if isinstance(cell_value, Date):
dates.append(cell_value)
return dates[0] if dates else Date(-1, -1, -1) |
python | def is_subsumed_by(x, y):
"""
Returns true if y subsumes x (for example P(x) subsumes P(A) as it is more
abstract)
"""
varsX = __split_expression(x)[1]
theta = unify(x, y)
if theta is problem.FAILURE:
return False
return all(__is_variable(theta[var]) for var in theta.keys()
if var in varsX) |
java | protected <R> R getWithTransaction(TransactionalFunction<R> function) {
Instant start = Instant.now();
LazyToString callingMethod = getCallingMethod();
logger.trace("{} : starting transaction", callingMethod);
try(Connection tx = dataSource.getConnection()) {
boolean previousAutoCommitMode = tx.getAutoCommit();
tx.setAutoCommit(false);
try {
R result = function.apply(tx);
tx.commit();
return result;
} catch (Throwable th) {
tx.rollback();
throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, th.getMessage(), th);
} finally {
tx.setAutoCommit(previousAutoCommitMode);
}
} catch (SQLException ex) {
throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, ex.getMessage(), ex);
} finally {
logger.trace("{} : took {}ms", callingMethod, Duration.between(start, Instant.now()).toMillis());
}
} |
python | def _set_request_referer_metric(self, request):
"""
Add metric 'request_referer' for http referer.
"""
if 'HTTP_REFERER' in request.META and request.META['HTTP_REFERER']:
monitoring.set_custom_metric('request_referer', request.META['HTTP_REFERER']) |
java | public String max(List<String> s) {
String max = "";
for (String p : s) {
if (p.length() > max.length()) {
max = p;
}
}
return max;
} |
java | @SuppressWarnings({"AssignmentToMethodParameter"})
@NotNull
StoreImpl openStoreImpl(@NotNull final String name,
@NotNull StoreConfig config,
@NotNull final TransactionBase txn,
@Nullable TreeMetaInfo metaInfo) {
checkIfTransactionCreatedAgainstThis(txn);
if (config.useExisting) { // this parameter requires to recalculate
if (metaInfo == null) {
throw new ExodusException("Can't restore meta information for store " + name);
} else {
config = TreeMetaInfo.toConfig(metaInfo);
}
}
final StoreImpl result;
if (metaInfo == null) {
if (txn.isReadonly() && ec.getEnvReadonlyEmptyStores()) {
return createTemporaryEmptyStore(name);
}
final int structureId = allocateStructureId();
metaInfo = TreeMetaInfo.load(this, config.duplicates, config.prefixing, structureId);
result = createStore(name, metaInfo);
final ReadWriteTransaction tx = throwIfReadonly(txn, "Can't create a store in read-only transaction");
tx.getMutableTree(result);
tx.storeCreated(result);
} else {
final boolean hasDuplicates = metaInfo.hasDuplicates();
if (hasDuplicates != config.duplicates) {
throw new ExodusException("Attempt to open store '" + name + "' with duplicates = " +
config.duplicates + " while it was created with duplicates =" + hasDuplicates);
}
if (metaInfo.isKeyPrefixing() != config.prefixing) {
if (!config.prefixing) {
throw new ExodusException("Attempt to open store '" + name +
"' with prefixing = false while it was created with prefixing = true");
}
// if we're trying to open existing store with prefixing which actually wasn't created as store
// with prefixing due to lack of the PatriciaTree feature, then open store with existing config
metaInfo = TreeMetaInfo.load(this, hasDuplicates, false, metaInfo.getStructureId());
}
result = createStore(name, metaInfo);
}
return result;
} |
java | protected void dispatch(Throwable object, boolean child) {
if (object instanceof CompilationFailedException) {
report((CompilationFailedException) object, child);
} else if (object instanceof GroovyExceptionInterface) {
report((GroovyExceptionInterface) object, child);
} else if (object instanceof GroovyRuntimeException) {
report((GroovyRuntimeException) object, child);
} else if (object instanceof Exception) {
report((Exception) object, child);
} else {
report(object, child);
}
} |
python | def NamedTemporaryFile(
mode="w+b",
buffering=-1,
encoding=None,
newline=None,
suffix=None,
prefix=None,
dir=None,
delete=True,
wrapper_class_override=None,
):
"""Create and return a temporary file.
Arguments:
'prefix', 'suffix', 'dir' -- as for mkstemp.
'mode' -- the mode argument to io.open (default "w+b").
'buffering' -- the buffer size argument to io.open (default -1).
'encoding' -- the encoding argument to io.open (default None)
'newline' -- the newline argument to io.open (default None)
'delete' -- whether the file is deleted on close (default True).
The file is created as mkstemp() would do it.
Returns an object with a file-like interface; the name of the file
is accessible as its 'name' attribute. The file will be automatically
deleted when it is closed unless the 'delete' argument is set to False.
"""
prefix, suffix, dir, output_type = _sanitize_params(prefix, suffix, dir)
flags = _bin_openflags
# Setting O_TEMPORARY in the flags causes the OS to delete
# the file when it is closed. This is only supported by Windows.
if not wrapper_class_override:
wrapper_class_override = _TemporaryFileWrapper
if os.name == "nt" and delete:
flags |= os.O_TEMPORARY
if sys.version_info < (3, 5):
(fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
else:
(fd, name) = _mkstemp_inner(dir, prefix, suffix, flags, output_type)
try:
file = io.open(fd, mode, buffering=buffering, newline=newline, encoding=encoding)
if wrapper_class_override is not None:
return type(str("_TempFileWrapper"), (wrapper_class_override, object), {})(
file, name, delete
)
else:
return _TemporaryFileWrapper(file, name, delete)
except BaseException:
os.unlink(name)
os.close(fd)
raise |
java | public IotHubDescriptionInner createOrUpdate(String resourceGroupName, String resourceName, IotHubDescriptionInner iotHubDescription, String ifMatch) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, resourceName, iotHubDescription, ifMatch).toBlocking().last().body();
} |
python | def exec_command(
client, container, command, interactive=True, stdout=None, stderr=None, stdin=None):
"""
Run provided command via exec API in provided container.
This is just a wrapper for PseudoTerminal(client, container).exec_command()
"""
exec_id = exec_create(client, container, command, interactive=interactive)
operation = ExecOperation(client, exec_id,
interactive=interactive, stdout=stdout, stderr=stderr, stdin=stdin)
PseudoTerminal(client, operation).start() |
python | def measure_time(func_to_measure):
"""
This decorator allows to measure the execution time
of a function and prints it to the console.
:param func_to_measure: function to be decorated
"""
def wrap(*args, **kwargs):
start_time = time()
return_value = func_to_measure(*args, **kwargs)
finish_time = time()
log = "%s took %0.4f seconds. start_time = %0.4f - finish_time = %0.4f\n" % (func_to_measure.func_name,
finish_time - start_time,
start_time,
finish_time)
print log
return return_value
return wrap |
java | public static PageFlowController getCurrentPageFlow( HttpServletRequest request, ServletContext servletContext )
{
ActionResolver cur = getCurrentActionResolver( request, servletContext );
if (cur != null && cur.isPageFlow()) {
PageFlowController pfc = (PageFlowController) cur;
pfc.reinitializeIfNecessary(request, null, servletContext);
return pfc;
}
return null;
} |
python | def cmd_rot(self, deg=None, ch=None):
"""rot deg=num_deg ch=chname
Rotate the image for the given viewer/channel by the given
number of degrees.
If no value is given, reports the current rotation.
"""
viewer = self.get_viewer(ch)
if viewer is None:
self.log("No current viewer/channel.")
return
if deg is None:
self.log("%f deg" % (viewer.get_rotation()))
else:
viewer.rotate(deg) |
python | def _call_vecfield_p(self, vf, out):
"""Implement ``self(vf, out)`` for exponent 1 < p < ``inf``."""
# Optimization for 1 component - just absolute value (maybe weighted)
if len(self.domain) == 1:
vf[0].ufuncs.absolute(out=out)
if self.is_weighted:
out *= self.weights[0] ** (1 / self.exponent)
return
# Initialize out, avoiding one copy
self._abs_pow_ufunc(vf[0], out=out, p=self.exponent)
if self.is_weighted:
out *= self.weights[0]
tmp = self.range.element()
for fi, wi in zip(vf[1:], self.weights[1:]):
self._abs_pow_ufunc(fi, out=tmp, p=self.exponent)
if self.is_weighted:
tmp *= wi
out += tmp
self._abs_pow_ufunc(out, out=out, p=(1 / self.exponent)) |
java | public Observable<P2SVpnServerConfigurationInner> beginCreateOrUpdateAsync(String resourceGroupName, String virtualWanName, String p2SVpnServerConfigurationName, P2SVpnServerConfigurationInner p2SVpnServerConfigurationParameters) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, virtualWanName, p2SVpnServerConfigurationName, p2SVpnServerConfigurationParameters).map(new Func1<ServiceResponse<P2SVpnServerConfigurationInner>, P2SVpnServerConfigurationInner>() {
@Override
public P2SVpnServerConfigurationInner call(ServiceResponse<P2SVpnServerConfigurationInner> response) {
return response.body();
}
});
} |
java | public void removeElementsWithNoRelationships() {
Set<RelationshipView> relationships = getRelationships();
Set<String> elementIds = new HashSet<>();
relationships.forEach(rv -> elementIds.add(rv.getRelationship().getSourceId()));
relationships.forEach(rv -> elementIds.add(rv.getRelationship().getDestinationId()));
for (ElementView elementView : getElements()) {
if (!elementIds.contains(elementView.getId())) {
removeElement(elementView.getElement());
}
}
} |
java | private int getIntForType(JSType type) {
// Templatized types don't exist at runtime, so collapse to raw type
if (type != null && type.isGenericObjectType()) {
type = type.toMaybeObjectType().getRawType();
}
if (intForType.containsKey(type)) {
return intForType.get(type).intValue();
}
int newInt = intForType.size() + 1;
intForType.put(type, newInt);
return newInt;
} |
java | public List<ConnectionParams> resolveAll(String correlationId, String key) {
List<ConnectionParams> connections = new ArrayList<ConnectionParams>();
synchronized (_lock) {
for (DiscoveryItem item : _items) {
if (item.key == key && item.connection != null)
connections.add(item.connection);
}
}
return connections;
} |
python | def grade(adjective, suffix=COMPARATIVE):
""" Returns the comparative or superlative form of the given (inflected) adjective.
"""
b = predicative(adjective)
# groß => großt, schön => schönst
if suffix == SUPERLATIVE and b.endswith(("s", u"ß")):
suffix = suffix[1:]
# große => großere, schönes => schöneres
return adjective[:len(b)] + suffix + adjective[len(b):] |
java | public long installBundle( final String bundleUrl )
throws BundleException
{
LOG.info( "Install bundle from URL [" + bundleUrl + "]" );
return m_bundleContext.installBundle( bundleUrl ).getBundleId();
} |
java | @Override
public void serialize(final DataOutput pOutput) throws TTIOException {
try {
pOutput.writeInt(IConstants.ROOT);
mDel.serialize(pOutput);
mStrucDel.serialize(pOutput);
} catch (final IOException exc) {
throw new TTIOException(exc);
}
} |
java | public static Failure parse(String failure) {
Failure result = new Failure();
int dash = failure.indexOf('-');
int leftBracet = failure.indexOf('(');
int rightBracet = failure.indexOf(')');
result.title = failure.substring(0, leftBracet).trim();
result.code = failure.substring(leftBracet + 1, rightBracet).trim();
if (dash > 0) {
result.details = failure.substring(dash + 1).trim();
}
return result;
} |
java | protected void refreshItem(CacheItem item, Cache cache) throws Exception {
CacheLoader loader = item.getLoader();
Object[] loaderParams = item.getLoaderParams();
if (loader == null) {
throw new InternalCacheEngineException("No cache loader for " + getScopeAndKeyString(item));
}
if (logger.isDebugEnabled()) {
logger.debug("Refreshing " + getScopeAndKeyString(item));
}
Object newValue = loader.load(loaderParams);
if (newValue != null) {
cache.put(item.getScope(), item.getKey(), newValue, item.getTicksToExpire(), item.getTicksToRefresh(),
item.getLoader(), item.getLoaderParams());
} else {
// If newValue returned is null, remove the item from the cache
cache.remove(item.getScope(), item.getKey());
}
} |
java | public List<NamedStoredProcedureQuery<OrmDescriptor>> getAllNamedStoredProcedureQuery()
{
List<NamedStoredProcedureQuery<OrmDescriptor>> list = new ArrayList<NamedStoredProcedureQuery<OrmDescriptor>>();
List<Node> nodeList = model.get("named-stored-procedure-query");
for(Node node: nodeList)
{
NamedStoredProcedureQuery<OrmDescriptor> type = new NamedStoredProcedureQueryImpl<OrmDescriptor>(this, "named-stored-procedure-query", model, node);
list.add(type);
}
return list;
} |
python | def _validate_date_like_dtype(dtype):
"""
Check whether the dtype is a date-like dtype. Raises an error if invalid.
Parameters
----------
dtype : dtype, type
The dtype to check.
Raises
------
TypeError : The dtype could not be casted to a date-like dtype.
ValueError : The dtype is an illegal date-like dtype (e.g. the
the frequency provided is too specific)
"""
try:
typ = np.datetime_data(dtype)[0]
except ValueError as e:
raise TypeError('{error}'.format(error=e))
if typ != 'generic' and typ != 'ns':
msg = '{name!r} is too specific of a frequency, try passing {type!r}'
raise ValueError(msg.format(name=dtype.name, type=dtype.type.__name__)) |
java | private static void assertAllLoopablePasses(List<PassFactory> passes) {
for (PassFactory pass : passes) {
checkState(!pass.isOneTimePass());
}
} |
java | public void setBundleStartLevel( long bundleId, int startLevel )
throws RemoteException, BundleException
{
try
{
final StartLevel startLevelService = getService( StartLevel.class, 0 );
startLevelService.setBundleStartLevel( m_bundleContext.getBundle( bundleId ), startLevel );
}
catch( NoSuchServiceException e )
{
throw new BundleException( "Cannot get the start level service to set bundle start level" );
}
} |
python | def setErrorHandler(self, errorhandler):
"""
Sets a new error handler.
Args:
errorhandler: The object handling AMPL errors and warnings.
"""
class ErrorHandlerWrapper(ErrorHandler):
def __init__(self, errorhandler):
self.errorhandler = errorhandler
self.last_exception = None
def error(self, exception):
if isinstance(exception, amplpython.AMPLException):
exception = AMPLException(exception)
try:
self.errorhandler.error(exception)
except Exception as e:
self.last_exception = e
def warning(self, exception):
if isinstance(exception, amplpython.AMPLException):
exception = AMPLException(exception)
try:
self.errorhandler.warning(exception)
except Exception as e:
self.last_exception = e
def check(self):
if self.last_exception is not None:
e, self.last_exception = self.last_exception, None
raise e
errorhandler_wrapper = ErrorHandlerWrapper(errorhandler)
class InnerErrorHandler(amplpython.ErrorHandler):
def error(self, exception):
errorhandler_wrapper.error(exception)
def warning(self, exception):
errorhandler_wrapper.warning(exception)
self._errorhandler = errorhandler
self._errorhandler_inner = InnerErrorHandler()
self._errorhandler_wrapper = errorhandler_wrapper
lock_and_call(
lambda: self._impl.setErrorHandler(self._errorhandler_inner),
self._lock
) |
python | def meta_description(request):
"""
{% meta_description request %}
"""
try:
fragments = request._feincms_fragments
except:
fragments = {}
if '_meta_description' in fragments and fragments.get("_meta_description"):
return fragments.get("_meta_description")
else:
# append desc
site_desc = getattr(settings, 'META_DESCRIPTION', '')
if site_desc != '':
return getattr(request.leonardo_page,
"meta_description", request.leonardo_page.meta_description) \
+ ' - ' + site_desc
return getattr(request.leonardo_page,
"meta_description", request.leonardo_page.meta_description) |
python | def get_method(self, name, descriptor):
"""
Get the method by name and descriptor,
or create a new one if the requested method does not exists.
:param name: method name
:param descriptor: method descriptor, for example `'(I)V'`
:return: :class:`ExternalMethod`
"""
key = name + str(descriptor)
if key not in self.methods:
self.methods[key] = ExternalMethod(self.name, name, descriptor)
return self.methods[key] |
java | public static authenticationradiuspolicy_vpnvserver_binding[] get(nitro_service service, String name) throws Exception{
authenticationradiuspolicy_vpnvserver_binding obj = new authenticationradiuspolicy_vpnvserver_binding();
obj.set_name(name);
authenticationradiuspolicy_vpnvserver_binding response[] = (authenticationradiuspolicy_vpnvserver_binding[]) obj.get_resources(service);
return response;
} |
java | public <S, T> FromUnmarshaller<S, T> findUnmarshaller(ConverterKey<S,T> key) {
Converter<T,S> converter = findConverter(key.invert());
if (converter == null) {
return null;
}
if (FromUnmarshallerConverter.class.isAssignableFrom(converter.getClass())) {
return ((FromUnmarshallerConverter<S, T>)converter).getUnmarshaller();
} else {
return new ConverterFromUnmarshaller<S, T>(converter);
}
} |
java | public void convertToConstants(List<SDVariable> variables){
if(variables.size() == 0)
return;
boolean allConst = true;
for(SDVariable variable : variables) {
if (variable.getVariableType() != VariableType.CONSTANT) {
allConst = false;
Preconditions.checkState(variable.getVariableType() != VariableType.ARRAY, "Cannot convert variable of type ARRAY to a constant: %s", variable);
}
}
if(allConst){
return; //No op
}
//Remove all sessions in case they have any cached arrays/state
sessions.clear();
//If gradient function has been defined, remove it (so it will be recreated later)
sameDiffFunctionInstances.remove("grad");
for(SDVariable variable : variables ) {
String n = variable.getVarName();
INDArray arr = variable.getArr();
Preconditions.checkNotNull(arr, "Could not get array for variable %s: if this is a placeholder, use SDVariable.setArray before converting", variable);
constantArrays.put(n, new DeviceLocalNDArray(arr));
variablesArrays.remove(n);
if(!placeholdersPerThread.isEmpty()){
for(Map<String,INDArray> m : placeholdersPerThread.values()){
m.remove(n);
}
}
variable.setVariableType(VariableType.CONSTANT);
}
if(trainingConfig != null){
Set<String> toRemove = new HashSet<>();
boolean anyTrainableParmsModified = false;
List<String> origTrainableParams = trainingConfig.getTrainableParams();
for(SDVariable v : variables){
toRemove.add(v.getVarName());
if(!anyTrainableParmsModified && origTrainableParams.contains(v.getVarName())){
anyTrainableParmsModified = true;
}
}
//Remove updater state for this variable: updaterState, updaterViews, updaterMap
if(anyTrainableParmsModified) {
List<String> newTrainableParams = new ArrayList<>();
for (String s : origTrainableParams) {
if (!toRemove.contains(s)) {
newTrainableParams.add(s);
}
}
trainingConfig.setTrainableParams(newTrainableParams);
}
if(initializedTraining){
List<INDArray> newUpdaterState = new ArrayList<>();
for (String s : origTrainableParams) {
INDArray stateArr = updaterViews.get(s);
if (!toRemove.contains(s)) {
newUpdaterState.add(stateArr);
}
}
updaterState = newUpdaterState.isEmpty() ? null : Nd4j.concat(0, newUpdaterState.toArray(new INDArray[newUpdaterState.size()]));
//Now, update updaterViews map:
long viewSoFar = 0;
updaterViews = new HashMap<>();
updaterMap = new HashMap<>();
for(String s : trainingConfig.getTrainableParams()) {
long thisSize = trainingConfig.getUpdater().stateSize(this.variables.get(s).getVariable().getArr().length());
INDArray view = (updaterState == null || thisSize == 0 ? null :
updaterState.get(NDArrayIndex.interval(0, 1), NDArrayIndex.interval(viewSoFar, viewSoFar + thisSize)));
updaterViews.put(s, view);
updaterMap.put(s, trainingConfig.getUpdater().instantiate(view, false));
viewSoFar += thisSize;
}
}
}
} |
python | def symmetric_difference(self, other):
"""
Return a tree with elements only in self or other but not
both.
"""
if not isinstance(other, set): other = set(other)
me = set(self)
ivs = me.difference(other).union(other.difference(me))
return IntervalTree(ivs) |
java | public void writeExternal(PofWriter writer)
throws IOException {
super.writeExternal(writer);
writer.writeBinary(10, toBinary(value));
writer.writeBoolean(11, fAllowInsert);
writer.writeBoolean(12, fReturn);
} |
java | public Map<ModelField,Set<Command>> process(ModelFactory modelFactory, Erector erector, Object model) throws PolicyException {
Map<ModelField,Set<Command>> modelFieldCommands = new HashMap<ModelField,Set<Command>>();
for ( ModelField modelField : erector.getModelFields() ) {
logger.debug( " {} {}", getTarget(), modelField);
if ( modelField.getName().equals( field ) ) {
Set<Command> commands = modelFieldCommands.get( modelField );
if ( commands == null ) {
commands = new HashSet<Command>();
}
commands.add( Command.SKIP_REFERENCE_INJECTION );
modelFieldCommands.put( modelField, commands );
}
}
return modelFieldCommands;
} |
python | def process_read_batch(self, batch):
"""Process a single, partitioned read.
:type batch: mapping
:param batch:
one of the mappings returned from an earlier call to
:meth:`generate_read_batches`.
:rtype: :class:`~google.cloud.spanner_v1.streamed.StreamedResultSet`
:returns: a result set instance which can be used to consume rows.
"""
kwargs = copy.deepcopy(batch["read"])
keyset_dict = kwargs.pop("keyset")
kwargs["keyset"] = KeySet._from_dict(keyset_dict)
return self._get_snapshot().read(partition=batch["partition"], **kwargs) |
java | private static String getResourceSuffix(Locale locale)
{
String suffix = "_" + locale.getLanguage();
String country = locale.getCountry();
if (country.equals("TW"))
suffix += "_" + country;
return suffix;
} |
python | def get_rich_menu(self, rich_menu_id, timeout=None):
"""Call get rich menu API.
https://developers.line.me/en/docs/messaging-api/reference/#get-rich-menu
:param str rich_menu_id: ID of the rich menu
:param timeout: (optional) How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is self.http_client.timeout
:type timeout: float | tuple(float, float)
:rtype: :py:class:`linebot.models.responses.RichMenuResponse`
:return: RichMenuResponse instance
"""
response = self._get(
'/v2/bot/richmenu/{rich_menu_id}'.format(rich_menu_id=rich_menu_id),
timeout=timeout
)
return RichMenuResponse.new_from_json_dict(response.json) |
java | public static <T extends Object> T[] splice (T[] values, int offset)
{
int length = (values == null) ? 0 : values.length - offset;
return splice(values, offset, length);
} |
java | public static Vec compose(TransfVec origVec, int[][] transfMap, String[] domain, boolean keepOrig) {
// Do a mapping from INT -> ENUM -> this vector ENUM
int[][] domMap = Utils.compose(new int[][] {origVec._values, origVec._indexes }, transfMap);
Vec result = origVec.masterVec().makeTransf(domMap[0], domMap[1], domain);;
if (!keepOrig) DKV.remove(origVec._key);
return result;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.