language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | @Pure
public BusStop[] busStopsArray() {
return Collections.unmodifiableList(this.busStops).toArray(new BusStop[this.busStops.size()]);
} |
python | def _ParseNoHeaderSingleLine(self, parser_mediator, structure):
"""Parse an isolated header line and store appropriate attributes.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
structure (pyparsing.ParseResults): structure of tokens derived from
a line of a text file.
"""
if not self._last_event_data:
logger.debug('SkyDrive, found isolated line with no previous events')
return
event_data = SkyDriveOldLogEventData()
event_data.offset = self._last_event_data.offset
event_data.text = structure.text
event = time_events.DateTimeValuesEvent(
self._last_date_time, definitions.TIME_DESCRIPTION_ADDED)
parser_mediator.ProduceEventWithEventData(event, event_data)
# TODO think to a possible refactoring for the non-header lines.
self._last_date_time = None
self._last_event_data = None |
python | def make(self, pnum):
"""
Make a PID file and populate with PID number.
"""
try:
# Create the PID file
self.mkfile(self.pid_file, pnum)
except Exception as e:
self.die('Failed to generate PID file: {}'.format(str(e))) |
java | public static UUID getNext(UUID uuid) {
checkArgument(uuid.version() == 1, "Not a time UUID");
UUID max = maximumUuid();
long lsb = uuid.getLeastSignificantBits();
if (lsb < max.getLeastSignificantBits()) {
return new UUID(uuid.getMostSignificantBits(), lsb + 1);
}
long timestamp = uuid.timestamp();
if (timestamp < max.timestamp()) {
return new UUID(getMostSignificantBits(timestamp + 1), minimumUuid().getLeastSignificantBits());
}
return null; // No next exists since uuid == maximumUuid()
} |
java | public static void postAction (Component source, String command)
{
// slip things onto the event queue for later
ActionEvent event = new ActionEvent(source, 0, command);
EventQueue.invokeLater(new ActionInvoker(event));
} |
java | @Override
public <Y> Expression<Y> coalesce(Expression<? extends Y> arg0, Y arg1)
{
// TODO Auto-generated method stub
return null;
} |
java | @SuppressWarnings("unchecked")
public void clear()
{
for (@DoNotSub int i = 0, size = values.length; i < size; i++)
{
final Object value = values[i];
if (null != value)
{
values[i] = null;
this.size--;
evictionConsumer.accept((V)value);
}
}
} |
python | def hide_filter_change_update_delay_holder_filter_change_update_delay_filter_delay_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_filter_change_update_delay_holder = ET.SubElement(config, "hide-filter-change-update-delay-holder", xmlns="urn:brocade.com:mgmt:brocade-ip-policy")
filter_change_update_delay = ET.SubElement(hide_filter_change_update_delay_holder, "filter-change-update-delay")
filter_delay_value = ET.SubElement(filter_change_update_delay, "filter-delay-value")
filter_delay_value.text = kwargs.pop('filter_delay_value')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
java | private JsonObject toJsonObject() {
JsonObjectBuilder factory = Json.createObjectBuilder();
if (profile != null) {
factory.add("profile", profile);
}
if (!hosts.isEmpty()) {
JsonArrayBuilder ab = Json.createArrayBuilder();
for (String host : hosts) {
ab.add(host);
}
factory.add("hosts", ab.build());
}
if (label != null) {
factory.add("label", label);
}
if (caName != null) {
factory.add(HFCAClient.FABRIC_CA_REQPROP, caName);
}
factory.add("certificate_request", csr);
if (attrreqs != null) {
JsonArrayBuilder ab = Json.createArrayBuilder();
for (AttrReq attrReq : attrreqs.values()) {
JsonObjectBuilder i = Json.createObjectBuilder();
i.add("name", attrReq.name);
if (attrReq.optional != null) {
i.add("optional", attrReq.optional);
}
ab.add(i);
}
factory.add("attr_reqs", ab.build());
}
return factory.build();
} |
java | @Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final RelativeLayout rl = new RelativeLayout(this);
this.mMapView = new MapView(this);
this.mMapView.setTilesScaledToDpi(true);
rl.addView(this.mMapView, new RelativeLayout.LayoutParams(LayoutParams.FILL_PARENT,
LayoutParams.FILL_PARENT));
/* Itemized Overlay */
{
/* Create a static ItemizedOverlay showing a some Markers on some cities. */
final ArrayList<OverlayItem> items = new ArrayList<>();
items.add(new OverlayItem("Hannover", "SampleDescription", new GeoPoint(52.370816, 9.735936)));
items.add(new OverlayItem("Berlin", "SampleDescription", new GeoPoint(52.518333, 13.408333)));
items.add(new OverlayItem("Washington", "SampleDescription", new GeoPoint(38.895000, -77.036667)));
items.add(new OverlayItem("San Francisco", "SampleDescription", new GeoPoint(37.779300, -122.419200)));
items.add(new OverlayItem("Tolaga Bay", "SampleDescription", new GeoPoint(-38.371000, 178.298000)));
/* OnTapListener for the Markers, shows a simple Toast. */
this.mMyLocationOverlay = new ItemizedIconOverlay<>(items,
new ItemizedIconOverlay.OnItemGestureListener<OverlayItem>() {
@Override
public boolean onItemSingleTapUp(final int index, final OverlayItem item) {
Toast.makeText(
SampleWithMinimapItemizedoverlay.this,
"Item '" + item.getTitle() + "' (index=" + index
+ ") got single tapped up", Toast.LENGTH_LONG).show();
return true; // We 'handled' this event.
}
@Override
public boolean onItemLongPress(final int index, final OverlayItem item) {
Toast.makeText(
SampleWithMinimapItemizedoverlay.this,
"Item '" + item.getTitle() + "' (index=" + index
+ ") got long pressed", Toast.LENGTH_LONG).show();
return true;
}
}, getApplicationContext());
this.mMapView.getOverlays().add(this.mMyLocationOverlay);
}
/* MiniMap */
{
final MinimapOverlay miniMapOverlay = new MinimapOverlay(this,
mMapView.getTileRequestCompleteHandler());
this.mMapView.getOverlays().add(miniMapOverlay);
}
/* list of items currently displayed */
{
final MapEventsReceiver mReceive = new MapEventsReceiver() {
@Override
public boolean singleTapConfirmedHelper(GeoPoint p) {
return false;
}
@Override
public boolean longPressHelper(final GeoPoint p) {
final List<OverlayItem> displayed = mMyLocationOverlay.getDisplayedItems();
final StringBuilder buffer = new StringBuilder();
String sep = "";
for (final OverlayItem item : displayed) {
buffer.append(sep).append('\'').append(item.getTitle()).append('\'');
sep = ", ";
}
Toast.makeText(
SampleWithMinimapItemizedoverlay.this,
"Currently displayed: " + buffer.toString(), Toast.LENGTH_LONG).show();
return true;
}
};
mMapView.getOverlays().add(new MapEventsOverlay(mReceive));
final RotationGestureOverlay rotationGestureOverlay = new RotationGestureOverlay(mMapView);
rotationGestureOverlay.setEnabled(true);
mMapView.getOverlays().add(rotationGestureOverlay);
}
this.setContentView(rl);
// Default location and zoom level
IMapController mapController = mMapView.getController();
mapController.setZoom(5.);
GeoPoint startPoint = new GeoPoint(50.936255, 6.957779);
mapController.setCenter(startPoint);
} |
java | private void seekToChunkStart()
{
if (getOnDiskFilePointer() != chunkOffset)
{
try
{
out.seek(chunkOffset);
}
catch (IOException e)
{
throw new FSReadError(e, getPath());
}
}
} |
java | public void doPost (HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String alertId = request.getParameter("alertId");
String tel = request.getParameter("tel");
String alertText = request.getParameter("alertText");
if(alertText == null || alertText.length() < 1) {
// Get the content of the request as the text to parse
byte[] content = new byte[request.getContentLength()];
request.getInputStream().read(content,0, request.getContentLength());
alertText = new String(content);
}
if(logger.isInfoEnabled()) {
logger.info("Got an alert : \n alertID : " + alertId + " \n tel : " + tel + " \n text : " +alertText);
}
//
try {
Properties jndiProps = new Properties();
Context initCtx = new InitialContext(jndiProps);
// Commented out since the preferred way is through SMS Servlets
// SleeConnectionFactory factory = (SleeConnectionFactory) initCtx.lookup("java:/MobicentsConnectionFactory");
//
// SleeConnection conn1 = factory.getConnection();
// ExternalActivityHandle handle = conn1.createActivityHandle();
//
// EventTypeID requestType = conn1.getEventTypeID(
// EVENT_TYPE,
// "org.mobicents", "1.0");
// SmsAlertingCustomEvent smsAlertingCustomEvent = new SmsAlertingCustomEvent(alertId, tel, alertText);
//
// conn1.fireEvent(smsAlertingCustomEvent, requestType, handle, null);
// conn1.close();
} catch (Exception e) {
logger.error("unexpected exception while firing the event " + EVENT_TYPE + " into jslee", e);
}
sendHttpResponse(response, OK_BODY);
} |
java | public List<java.lang.Object> getStartPageOrEndPageOrMedlinePgn() {
if (startPageOrEndPageOrMedlinePgn == null) {
startPageOrEndPageOrMedlinePgn = new ArrayList<java.lang.Object>();
}
return this.startPageOrEndPageOrMedlinePgn;
} |
java | @Override
public PipelineEventReader<XMLEventReader, XMLEvent> getEventReader(
HttpServletRequest request, HttpServletResponse response) {
// Get the reader from the parent and add the attribute incorporating wrapper
final PipelineEventReader<XMLEventReader, XMLEvent> cachingEventReader =
this.wrappedComponent.getEventReader(request, response);
final XMLEventReader eventReader =
new AttributeIncorporatingXMLEventReader(
request, response, cachingEventReader.getEventReader());
final Map<String, String> outputProperties = cachingEventReader.getOutputProperties();
return new PipelineEventReaderImpl<XMLEventReader, XMLEvent>(eventReader, outputProperties);
} |
python | def teleport(start_index, end_index, ancilla_index):
"""Teleport a qubit from start to end using an ancilla qubit
"""
program = make_bell_pair(end_index, ancilla_index)
ro = program.declare('ro', memory_size=3)
# do the teleportation
program.inst(CNOT(start_index, ancilla_index))
program.inst(H(start_index))
# measure the results and store them in classical registers [0] and [1]
program.measure(start_index, ro[0])
program.measure(ancilla_index, ro[1])
program.if_then(ro[1], X(2))
program.if_then(ro[0], Z(2))
program.measure(end_index, ro[2])
print(program)
return program |
java | @Override
public List<BundlePath> getItemPathList(Map<String, String> variants) {
return getItemPathList(bundlePathMapping.getItemPathList(), variants);
} |
python | def _add_sj_index_commands(fq1, ref_file, gtf_file):
"""
newer versions of STAR can generate splice junction databases on thephfly
this is preferable since we can tailor it to the read lengths
"""
if _has_sj_index(ref_file):
return ""
else:
rlength = fastq.estimate_maximum_read_length(fq1)
cmd = " --sjdbGTFfile %s " % gtf_file
cmd += " --sjdbOverhang %s " % str(rlength - 1)
return cmd |
java | static <S extends Sequence<C>, C extends Compound> List<ProfilePair<S, C>>
runProfileAligners(List<ProfileProfileAligner<S, C>> aligners) {
int n = 1, all = aligners.size();
List<Future<ProfilePair<S, C>>> futures = new ArrayList<Future<ProfilePair<S, C>>>();
for (ProfileProfileAligner<S, C> aligner : aligners) {
futures.add(ConcurrencyTools.submit(new CallableProfileProfileAligner<S, C>(aligner),
String.format("Aligning pair %d of %d", n++, all)));
}
return getListFromFutures(futures);
} |
python | def _substitute_default(s, new_value):
"""Replaces the default value in a parameter docstring by a new value.
The docstring must conform to the numpydoc style and have the form
"something (keyname=<value-to-replace>)"
If no matching pattern is found or ``new_value`` is None, return
the input untouched.
Examples
--------
>>> _replace_default('int (default=128)', 256)
'int (default=256)'
>>> _replace_default('nonlin (default = ReLU())', nn.Hardtanh(1, 2))
'nonlin (default = Hardtanh(min_val=1, max_val=2))'
"""
if new_value is None:
return s
# BB: ideally, I would like to replace the 'default*' group
# directly but I haven't found a way to do this
i, j = _get_span(s, pattern=P_DEFAULTS)
if (i, j) == (-1, -1):
return s
return '{}{}{}'.format(s[:i], new_value, s[j:]) |
python | def reset(self):
""" Resets the readonly variables.
"""
self.p_lmbda = 0.0
self.q_lmbda = 0.0
self.mu_vmin = 0.0
self.mu_vmax = 0.0 |
python | def cast_datetime(ts, fmt=None):
"""cast timestamp to datetime or date str"""
dt = datetime.datetime.fromtimestamp(ts)
if fmt:
return dt.strftime(fmt)
return dt |
java | public static byte[] encodeMultiple(final byte[] input) {
if (input == null) {
throw new NullPointerException("input");
}
final byte[] output = new byte[input.length << 1]; // * 2
encodeMultiple(input, 0, output, 0, input.length);
return output;
} |
python | def get_related(self, content_id, min_strength=None):
'''Get positive relation labels for ``cid``.
If ``min_strength`` is set, will restrict results to labels
with a ``rel_strength`` greater or equal to the provided
``RelationStrength`` value. Note: ``min_strength`` should be of
type ``RelationStrength``.
'''
def is_related(label):
if min_strength is not None:
return label.rel_strength >= min_strength
else:
return label.rel_strength.is_positive
labels = self.everything(content_id=content_id)
return ifilter(is_related, labels) |
java | public static void localRunnerNotification(JobConf conf, JobStatus status) {
JobEndStatusInfo notification = createNotification(conf, status);
if (notification != null) {
while (notification.configureForRetry()) {
try {
int code = httpNotification(notification.getUri());
if (code != 200) {
throw new IOException("Invalid response status code: " + code);
}
else {
break;
}
}
catch (IOException ioex) {
LOG.error("Notification error [" + notification.getUri() + "]", ioex);
}
catch (Exception ex) {
LOG.error("Notification error [" + notification.getUri() + "]", ex);
}
try {
synchronized (Thread.currentThread()) {
Thread.currentThread().sleep(notification.getRetryInterval());
}
}
catch (InterruptedException iex) {
LOG.error("Notification retry error [" + notification + "]", iex);
}
}
}
} |
java | public void addChild(String label, CmsAppHierarchyPanel child) {
Panel panel = new Panel();
panel.setCaption(label);
panel.setContent(child);
addComponent(panel);
} |
java | @SneakyThrows
public CouchDbConsentDecision copyDetailsFrom(final ConsentDecision other) {
setAttributes(other.getAttributes());
setPrincipal(other.getPrincipal());
setCreatedDate(other.getCreatedDate());
setId(other.getId());
setOptions(other.getOptions());
setReminder(other.getReminder());
setReminderTimeUnit(other.getReminderTimeUnit());
setService(other.getService());
return this;
} |
java | public static void proxyUserKillAllSpawnedHadoopJobs(final String logFilePath, Props jobProps,
File tokenFile, final Logger log) {
Properties properties = new Properties();
properties.putAll(jobProps.getFlattened());
try {
if (HadoopSecureWrapperUtils.shouldProxy(properties)) {
UserGroupInformation proxyUser =
HadoopSecureWrapperUtils.setupProxyUser(properties,
tokenFile.getAbsolutePath(), log);
proxyUser.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
HadoopJobUtils.killAllSpawnedHadoopJobs(logFilePath, log);
return null;
}
});
} else {
HadoopJobUtils.killAllSpawnedHadoopJobs(logFilePath, log);
}
} catch (Throwable t) {
log.warn("something happened while trying to kill all spawned jobs", t);
}
} |
python | def _insertFont(self, fontname, bfname, fontfile, fontbuffer, set_simple, idx, wmode, serif, encoding, ordering):
"""_insertFont(self, fontname, bfname, fontfile, fontbuffer, set_simple, idx, wmode, serif, encoding, ordering) -> PyObject *"""
return _fitz.Page__insertFont(self, fontname, bfname, fontfile, fontbuffer, set_simple, idx, wmode, serif, encoding, ordering) |
python | def _ValidateArguments(arg_dict, type_check_dict):
"""Validate dictionary of arguments and return list of errors messages."""
messages = []
for arg_name, arg_value in arg_dict.items():
if arg_name in type_check_dict:
type_check = type_check_dict[arg_name]
res = _ValidateValue(arg_value, type_check)
if not res:
message = ("Invalid value '%s' for argument %s. Expected %s" %
(arg_value, arg_name, _FormatTypeCheck(type_check)))
messages.append(message)
return messages |
java | @Override
public void preProcess(DataSet toPreProcess) {
for (DataSetPreProcessor preProcessor : preProcessors) {
preProcessor.preProcess(toPreProcess);
}
} |
python | def str2dict_values(str_in):
'''
Extracts the values from a string that represents a dict and returns them
sorted by key.
Args:
str_in (string) that contains python dict
Returns:
(list) with values or None if no valid dict was found
Raises:
-
'''
tmp_dict = str2dict(str_in)
if tmp_dict is None:
return None
return [tmp_dict[key] for key in sorted(k for k in tmp_dict)] |
java | public <R> void visit(String key, Visitor<T, R> visitor) {
if (root != null) {
visit(key, visitor, null, root);
}
} |
java | private void writeMethod(Definition def, Writer out, int indent) throws IOException
{
if (def.getMcfDefs().get(getNumOfMcf()).isDefineMethodInConnection())
{
if (def.getMcfDefs().get(getNumOfMcf()).getMethods().size() > 0)
{
for (MethodForConnection method : def.getMcfDefs().get(getNumOfMcf()).getMethods())
{
writeMethodSignature(out, indent, method);
writeLeftCurlyBracket(out, indent);
writeLogging(def, out, indent + 1, "trace", method.getMethodName());
if (!method.getReturnType().equals("void"))
{
writeEol(out);
if (BasicType.isPrimitiveType(method.getReturnType()))
{
writeWithIndent(out, indent + 1, "return " + BasicType.defaultValue(method.getReturnType()) + ";");
}
else
{
writeWithIndent(out, indent + 1, "return null;");
}
}
writeRightCurlyBracket(out, indent);
}
}
}
else
{
writeSimpleMethodSignature(out, indent, " * Call me", "void callMe()");
writeLeftCurlyBracket(out, indent);
writeLogging(def, out, indent + 1, "trace", "callMe");
writeRightCurlyBracket(out, indent);
}
} |
java | private boolean processBlockSignature(ReadBuffer readBuffer) {
if (this.mapFileHeader.getMapFileInfo().debugFile) {
// get and check the block signature
String signatureBlock = readBuffer.readUTF8EncodedString(SIGNATURE_LENGTH_BLOCK);
if (!signatureBlock.startsWith("###TileStart")) {
LOGGER.warning("invalid block signature: " + signatureBlock);
return false;
}
}
return true;
} |
python | def p_parens_expr(p):
"""
expr : LPAREN expr RPAREN
"""
p[0] = node.expr(op="parens", args=node.expr_list([p[2]])) |
python | def _render(roster_file, **kwargs):
"""
Render the roster file
"""
renderers = salt.loader.render(__opts__, {})
domain = __opts__.get('roster_domain', '')
try:
result = salt.template.compile_template(roster_file,
renderers,
__opts__['renderer'],
__opts__['renderer_blacklist'],
__opts__['renderer_whitelist'],
mask_value='passw*',
**kwargs)
result.setdefault('host', '{}.{}'.format(os.path.basename(roster_file), domain))
return result
except: # pylint: disable=W0702
log.warning('Unable to render roster file "%s".', roster_file, exc_info=True)
return {} |
python | def edf_totdev(N, m, alpha):
""" Equivalent degrees of freedom for Total Deviation
FIXME: what is the right behavior for alpha outside 0,-1,-2?
NIST SP1065 page 41, Table 7
"""
alpha = int(alpha)
if alpha in [0, -1, -2]:
# alpha 0 WFM
# alpha -1 FFM
# alpha -2 RWFM
NIST_SP1065_table7 = [(1.50, 0.0), (1.17, 0.22), (0.93, 0.36)]
(b, c) = NIST_SP1065_table7[int(abs(alpha))]
return b*(float(N)/float(m))-c
else:
return edf_simple(N, m, alpha) |
java | public synchronized void deploy(final DeploymentPhaseContext phaseContext) throws DeploymentUnitProcessingException {
final DeploymentUnit deploymentUnit = phaseContext.getDeploymentUnit();
final DeploymentUnit parent = deploymentUnit.getParent();
final DeploymentUnit topLevelDeployment = parent == null ? deploymentUnit : parent;
final VirtualFile topLevelRoot = topLevelDeployment.getAttachment(Attachments.DEPLOYMENT_ROOT).getRoot();
final ExternalModuleService externalModuleService = topLevelDeployment.getAttachment(Attachments.EXTERNAL_MODULE_SERVICE);
final ResourceRoot deploymentRoot = deploymentUnit.getAttachment(Attachments.DEPLOYMENT_ROOT);
//These are resource roots that are already accessible by default
//such as ear/lib jars an web-inf/lib jars
final Set<VirtualFile> existingAccessibleRoots = new HashSet<VirtualFile>();
final Map<VirtualFile, ResourceRoot> subDeployments = new HashMap<VirtualFile, ResourceRoot>();
for (ResourceRoot root : DeploymentUtils.allResourceRoots(topLevelDeployment)) {
if (SubDeploymentMarker.isSubDeployment(root)) {
subDeployments.put(root.getRoot(), root);
} else if (ModuleRootMarker.isModuleRoot(root)) {
//top level module roots are already accessible, as they are either
//ear/lib jars, or jars that are already part of the deployment
existingAccessibleRoots.add(root.getRoot());
}
}
final ArrayDeque<RootEntry> resourceRoots = new ArrayDeque<RootEntry>();
if (deploymentUnit.getParent() != null) {
//top level deployments already had their exiting roots processed above
for (ResourceRoot root : DeploymentUtils.allResourceRoots(deploymentUnit)) {
if (ModuleRootMarker.isModuleRoot(root)) {
//if this is a sub deployment of an ear we need to make sure we don't
//re-add existing module roots as class path entries
//this will mainly be WEB-INF/(lib|classes) entries
existingAccessibleRoots.add(root.getRoot());
}
}
}
for (ResourceRoot root : DeploymentUtils.allResourceRoots(deploymentUnit)) {
//add this to the list of roots to be processed
resourceRoots.add(new RootEntry(deploymentUnit, root));
}
// build a map of the additional module locations
// note that if a resource root has been added to two different additional modules
// and is then referenced via a Class-Path entry the behaviour is undefined
final Map<VirtualFile, AdditionalModuleSpecification> additionalModules = new HashMap<VirtualFile, AdditionalModuleSpecification>();
for (AdditionalModuleSpecification module : topLevelDeployment.getAttachmentList(Attachments.ADDITIONAL_MODULES)) {
for (ResourceRoot additionalModuleResourceRoot : module.getResourceRoots()) {
additionalModules.put(additionalModuleResourceRoot.getRoot(), module);
}
}
//additional resource roots may be added as
while (!resourceRoots.isEmpty()) {
final RootEntry entry = resourceRoots.pop();
final ResourceRoot resourceRoot = entry.resourceRoot;
final Attachable target = entry.target;
//if this is a top level deployment we do not want to process sub deployments
if (SubDeploymentMarker.isSubDeployment(resourceRoot) && resourceRoot != deploymentRoot) {
continue;
}
final String[] items = getClassPathEntries(resourceRoot);
for (final String item : items) {
if (item.isEmpty() || item.equals(".")) { //a class path of . causes problems and is unnecessary, see WFLY-2725
continue;
}
//first try and resolve relative to the manifest resource root
final VirtualFile classPathFile = resourceRoot.getRoot().getParent().getChild(item);
//then resolve relative to the deployment root
final VirtualFile topLevelClassPathFile = deploymentRoot.getRoot().getParent().getChild(item);
if (item.startsWith("/")) {
if (externalModuleService.isValid(item)) {
final ModuleIdentifier moduleIdentifier = externalModuleService.addExternalModule(item);
target.addToAttachmentList(Attachments.CLASS_PATH_ENTRIES, moduleIdentifier);
ServerLogger.DEPLOYMENT_LOGGER.debugf("Resource %s added as external jar %s", classPathFile, resourceRoot.getRoot());
} else {
ServerLogger.DEPLOYMENT_LOGGER.classPathEntryNotValid(item, resourceRoot.getRoot().getPathName());
}
} else {
if (classPathFile.exists()) {
//we need to check that this class path item actually lies within the deployment
boolean found = false;
VirtualFile file = classPathFile.getParent();
while (file != null) {
if (file.equals(topLevelRoot)) {
found = true;
}
file = file.getParent();
}
if (!found) {
ServerLogger.DEPLOYMENT_LOGGER.classPathEntryNotValid(item, resourceRoot.getRoot().getPathName());
} else {
handlingExistingClassPathEntry(resourceRoots, topLevelDeployment, topLevelRoot, subDeployments, additionalModules, existingAccessibleRoots, resourceRoot, target, classPathFile);
}
} else if (topLevelClassPathFile.exists()) {
boolean found = false;
VirtualFile file = topLevelClassPathFile.getParent();
while (file != null) {
if (file.equals(topLevelRoot)) {
found = true;
}
file = file.getParent();
}
if (!found) {
ServerLogger.DEPLOYMENT_LOGGER.classPathEntryNotValid(item, resourceRoot.getRoot().getPathName());
} else {
handlingExistingClassPathEntry(resourceRoots, topLevelDeployment, topLevelRoot, subDeployments, additionalModules, existingAccessibleRoots, resourceRoot, target, topLevelClassPathFile);
}
} else {
ServerLogger.DEPLOYMENT_LOGGER.classPathEntryNotValid(item, resourceRoot.getRoot().getPathName());
}
}
}
}
} |
python | def filename_from_webname(self, webname):
"""
A 1<->1 map is maintained between a URL pointing to a file and
the name of the file in the file system.
As an example if the base_url is 'https://example.com' and a jwks_uri
is 'https://example.com/jwks_uri.json' then the filename of the
corresponding file on the local filesystem would be 'jwks_uri'.
Relative to the directory from which the RP instance is run.
:param webname: The published URL
:return: local filename
"""
if not webname.startswith(self.base_url):
raise ValueError("Webname doesn't match base_url")
_name = webname[len(self.base_url):]
if _name.startswith('/'):
return _name[1:]
else:
return _name |
python | def update_group_properties(self, group_id, group_properties, headers=None, endpoint_url=None):
"""
Update a group's properties with values provided in "group_properties" dictionary
:param str group_id: group ID
:param dict group_properties: group properties to update with a new values
:param dict headers: custom request headers (if isn't set default values are used)
:param str endpoint_url: where to send the request (if isn't set default value is used)
:return: Response
"""
endpoint_url = endpoint_url or self._endpoint_url
url = endpoint_url + '/groups/' + group_id + '/properties'
headers = headers or self._default_headers()
payload = group_properties
response = requests.put(url, headers=headers, json=payload)
return response |
python | def summary(self):
"""Summary statistics describing the fit.
Set alpha property in the object before calling.
Returns
-------
df : DataFrame
Contains columns coef, np.exp(coef), se(coef), z, p, lower, upper"""
ci = 1 - self.alpha
with np.errstate(invalid="ignore", divide="ignore"):
df = pd.DataFrame(index=self.hazards_.index)
df["coef"] = self.hazards_
df["exp(coef)"] = np.exp(self.hazards_)
df["se(coef)"] = self.standard_errors_
df["z"] = self._compute_z_values()
df["p"] = self._compute_p_values()
df["-log2(p)"] = -np.log2(df["p"])
df["lower %g" % ci] = self.confidence_intervals_["lower-bound"]
df["upper %g" % ci] = self.confidence_intervals_["upper-bound"]
return df |
java | static String enclosing(String identifier) {
int hash = identifier.lastIndexOf('#');
if (hash != -1) return identifier.substring(0, hash);
int from = identifier.endsWith(".*") ? identifier.length() - 3 : identifier.length() - 1;
int dot = identifier.lastIndexOf('.', from);
if (dot != -1) return identifier.substring(0, dot) + ".*";
return !identifier.equals("*") ? "*" : null;
} |
java | public Response batchCorrectLocation(Float lat, Float lon, Integer accuracy, String placeId, String woeId) throws JinxException {
JinxUtils.validateParams(lat, lon, accuracy);
if (JinxUtils.isNullOrEmpty(placeId)) {
JinxUtils.validateParams(woeId);
}
Map<String, String> params = new TreeMap<>();
params.put("method", "flickr.photos.geo.batchCorrectLocation");
params.put("lat", lat.toString());
params.put("lon", lon.toString());
params.put("accuracy", accuracy.toString());
if (!JinxUtils.isNullOrEmpty(placeId)) {
params.put("place_id", placeId);
}
if (!JinxUtils.isNullOrEmpty(woeId)) {
params.put("woe_id", woeId);
}
return jinx.flickrPost(params, Response.class);
} |
python | def head(self, path, query=None, data=None, redirects=True):
"""
HEAD request wrapper for :func:`request()`
"""
return self.request('HEAD', path, query, None, redirects) |
java | @Override
public GetAliasResult getAlias(GetAliasRequest request) {
request = beforeClientExecution(request);
return executeGetAlias(request);
} |
java | public GlobalCluster withGlobalClusterMembers(GlobalClusterMember... globalClusterMembers) {
if (this.globalClusterMembers == null) {
setGlobalClusterMembers(new com.amazonaws.internal.SdkInternalList<GlobalClusterMember>(globalClusterMembers.length));
}
for (GlobalClusterMember ele : globalClusterMembers) {
this.globalClusterMembers.add(ele);
}
return this;
} |
java | public static DeviceType getDeviceType(@NonNull final Context context) {
Condition.INSTANCE.ensureNotNull(context, "The context may not be null");
return DeviceType.fromValue(context.getString(R.string.device_type));
} |
python | def get_published_or_draft(self):
"""
Return the published item, if it exists, otherwise, for privileged
users, return the draft version.
"""
if self.is_published:
return self
elif self.publishing_linked_id:
return self.publishing_linked
if is_draft_request_context():
return self.get_draft()
# There is no public version, and there is no privilege to view the
# draft version
return None |
java | @VisibleForTesting
public static <T> Iterable<T> getCandidatesViaServiceLoader(Class<T> klass, ClassLoader cl) {
Iterable<T> i = ServiceLoader.load(klass, cl);
// Attempt to load using the context class loader and ServiceLoader.
// This allows frameworks like http://aries.apache.org/modules/spi-fly.html to plug in.
if (!i.iterator().hasNext()) {
i = ServiceLoader.load(klass);
}
return i;
} |
java | public ItemData getItemData(final NodeData parentData, final QPathEntry name, ItemType itemType)
throws RepositoryException
{
final WorkspaceStorageConnection con = dataContainer.openConnection();
try
{
return con.getItemData(parentData, name, itemType);
}
finally
{
con.close();
}
} |
java | @Override
public void removeNotificationListener(ObjectName name, ObjectName listener) throws InstanceNotFoundException, ListenerNotFoundException, IOException {
checkConnection();
getNotificationRegistry().removeNotificationListener(name, listener);
} |
java | public static /*@pure@*/ double mean(double[] vector) {
double sum = 0;
if (vector.length == 0) {
return 0;
}
for (int i = 0; i < vector.length; i++) {
sum += vector[i];
}
return sum / (double) vector.length;
} |
python | def checkversion(version, REFVERSION=FOLIAVERSION):
"""Checks FoLiA version, returns 1 if the document is newer than the library, -1 if it is older, 0 if it is equal"""
try:
for refversion, docversion in zip([int(x) for x in REFVERSION.split('.')], [int(x) for x in version.split('.')]):
if docversion > refversion:
return 1 #doc is newer than library
elif docversion < refversion:
return -1 #doc is older than library
return 0 #versions are equal
except ValueError:
raise ValueError("Unable to parse document FoLiA version, invalid syntax") |
python | def add_figure(self, key, url, **kwargs):
"""Add a figure.
Args:
key (string): document key
url (string): document url
Keyword Args:
caption (string): simple description
label (string):
material (string):
original_url (string): original url
filename (string): current url
Returns: None
"""
figure = self._check_metadata_for_file(key=key, url=url, **kwargs)
for dict_key in (
'caption',
'label',
'material',
'filename',
'url',
'original_url',
):
if kwargs.get(dict_key) is not None:
figure[dict_key] = kwargs[dict_key]
if key_already_there(figure, self.record.get('figures', ())):
raise ValueError(
'There\'s already a figure with the key %s.'
% figure['key']
)
self._append_to('figures', figure)
self.add_document |
java | BigInteger setMultiChoiceCount() {
if (fields != null)
for (int i = 0; i < fields.length; i++)
multiChoiceCount = multiChoiceCount.multiply(fields[i].setMultiChoiceCount());
return multiChoiceCount;
} |
python | def set_variables(self, data):
"""
Set variables for the network.
Parameters
----------
data: dict
dict for variable in the form of example as shown.
Examples
--------
>>> from pgmpy.readwrite.XMLBeliefNetwork import XBNWriter
>>> writer = XBNWriter()
>>> writer.set_variables({'a': {'TYPE': 'discrete', 'XPOS': '13495',
... 'YPOS': '10465', 'DESCRIPTION': '(a) Metastatic Cancer',
... 'STATES': ['Present', 'Absent']}
... 'b': {'TYPE': 'discrete', 'XPOS': '11290',
... 'YPOS': '11965', 'DESCRIPTION': '(b) Serum Calcium Increase',
... 'STATES': ['Present', 'Absent']}})
"""
variables = etree.SubElement(self.bnmodel, "VARIABLES")
for var in sorted(data):
variable = etree.SubElement(variables, 'VAR', attrib={'NAME': var, 'TYPE': data[var]['TYPE'],
'XPOS': data[var]['XPOS'], 'YPOS': data[var]['YPOS']})
etree.SubElement(variable, 'DESCRIPTION', attrib={'DESCRIPTION': data[var]['DESCRIPTION']})
for state in data[var]['STATES']:
etree.SubElement(variable, 'STATENAME').text = state |
java | private void removeDeletable()
{
String fileName = "deletable";
try
{
if (indexDir.fileExists(fileName))
{
indexDir.deleteFile(fileName);
}
}
catch (IOException e)
{
LOG.warn("Unable to remove file 'deletable'.", e);
}
} |
python | def _format_list(result):
"""Format list responses into a table."""
if not result:
return result
if isinstance(result[0], dict):
return _format_list_objects(result)
table = Table(['value'])
for item in result:
table.add_row([iter_to_table(item)])
return table |
python | async def finish_authentication(self, username, password):
"""Finish authentication process.
A username (generated by new_credentials) and the PIN code shown on
screen must be provided.
"""
# Step 1
self.srp.step1(username, password)
data = await self._send_plist(
'step1', method='pin', user=username)
resp = plistlib.loads(data)
# Step 2
pub_key, key_proof = self.srp.step2(resp['pk'], resp['salt'])
await self._send_plist(
'step2',
pk=binascii.unhexlify(pub_key),
proof=binascii.unhexlify(key_proof))
# Step 3
epk, tag = self.srp.step3()
await self._send_plist('step3', epk=epk, authTag=tag)
return True |
java | private float[] generateParticleVelocities()
{
float velocities[] = new float[mEmitRate * 3];
for ( int i = 0; i < mEmitRate * 3; i +=3 )
{
Vector3f nexVel = getNextVelocity();
velocities[i] = nexVel.x;
velocities[i+1] = nexVel.y;
velocities[i+2] = nexVel.z;
}
return velocities;
} |
python | def checkVersion(self):
"""Check if the server use the same version of our protocol"""
r = self.doQuery('version')
if r.status_code == 200: # Query ok ?
data = r.json()
if data['result'] == 'Ok' and data['version'] == self.PI_API_VERSION and data['protocol'] == self.PI_API_NAME:
return True
return False |
java | @SuppressWarnings("UnstableApiUsage")
private void computeProjectDependencies(IdeaModule module) {
ARTIFACT_DEPS_OF_PRJ.computeIfAbsent(module.getName(), moduleName -> {
Map<String, Set<ArtifactSpec>> dependencies = new HashMap<>();
module.getDependencies().forEach(dep -> {
if (dep instanceof IdeaModuleDependency) {
// Add the dependency to the list.
String name = ((IdeaModuleDependency) dep).getTargetModuleName();
PRJ_DEPS_OF_PRJ.computeIfAbsent(moduleName, key -> new HashSet<>()).add(name);
} else if (dep instanceof ExternalDependency) {
ExternalDependency extDep = (ExternalDependency) dep;
GradleModuleVersion gav = extDep.getGradleModuleVersion();
ArtifactSpec spec = new ArtifactSpec("compile", gav.getGroup(), gav.getName(), gav.getVersion(),
"jar", null, extDep.getFile());
String depScope = dep.getScope().getScope();
dependencies.computeIfAbsent(depScope, s -> new HashSet<>()).add(spec);
}
});
return dependencies;
});
} |
java | private CmsRectangle getNativeCropRegion() {
CmsCroppingParamBean crop = m_croppingProvider.get();
CmsImageInfoBean info = m_imageInfoProvider.get();
if ((crop == null) || !crop.isCropped()) {
return CmsRectangle.fromLeftTopWidthHeight(0, 0, info.getWidth(), info.getHeight());
} else {
return CmsRectangle.fromLeftTopWidthHeight(
crop.getCropX(),
crop.getCropY(),
crop.getCropWidth(),
crop.getCropHeight());
}
} |
python | def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix") |
java | public void buildFieldComments(XMLNode node, Content fieldDocTree) {
if (!configuration.nocomment) {
writer.addComments(currentElement, fieldDocTree);
}
} |
java | public void endElement(String uri, String name, String qName)
{
boolean isDebug = logger.isDebugEnabled();
try
{
switch (getLiteralId(qName))
{
case MAPPING_REPOSITORY:
{
currentAttributeContainer = null;
break;
}
case CLASS_DESCRIPTOR:
{
currentAttributeContainer = null;
break;
}
case JDBC_CONNECTION_DESCRIPTOR:
{
logger.debug(" < " + tags.getTagById(JDBC_CONNECTION_DESCRIPTOR));
m_CurrentJCD = null;
currentAttributeContainer = null;
break;
}
case CONNECTION_POOL:
{
logger.debug(" < " + tags.getTagById(CONNECTION_POOL));
currentAttributeContainer = m_CurrentJCD;
break;
}
case SEQUENCE_MANAGER:
{
if (isDebug) logger.debug(" < " + tags.getTagById(SEQUENCE_MANAGER));
// set to null at the end of the tag!!
this.currentSequenceDescriptor = null;
currentAttributeContainer = m_CurrentJCD;
break;
}
case OBJECT_CACHE:
{
if(currentAttributeContainer != null)
{
if (isDebug) logger.debug(" < " + tags.getTagById(OBJECT_CACHE));
// set to null or previous element level at the end of the tag!!
currentAttributeContainer = m_CurrentJCD;
}
break;
}
case ATTRIBUTE:
{
if(currentAttributeContainer != null)
{
if (isDebug) logger.debug(" < " + tags.getTagById(ATTRIBUTE));
}
break;
}
default :
{
// noop
}
}
}
catch (Exception ex)
{
logger.error(ex);
throw new PersistenceBrokerException(ex);
}
} |
python | def fs_r(self, percent=0.9, N=None):
"""Get the row factor scores (dimensionality-reduced representation),
choosing how many factors to retain, directly or based on the explained
variance.
'percent': The minimum variance that the retained factors are required
to explain (default: 90% = 0.9)
'N': The number of factors to retain. Overrides 'percent'.
If the rank is less than N, N is ignored.
"""
if not 0 <= percent <= 1:
raise ValueError("Percent should be a real number between 0 and 1.")
if N:
if not isinstance(N, (int, int64)) or N <= 0:
raise ValueError("N should be a positive integer.")
N = min(N, self.rank)
self.k = 1 + flatnonzero(cumsum(self.L) >= sum(self.L)*percent)[0]
# S = zeros((self._numitems, self.k))
# the sign of the square root can be either way; singular value vs. eigenvalue
# fill_diagonal(S, -sqrt(self.E) if self.cor else self.s)
num2ret = N if N else self.k
s = -sqrt(self.L) if self.cor else self.s
S = diagsvd(s[:num2ret], self._numitems, num2ret)
self.F = self.D_r.dot(self.P).dot(S)
return self.F |
java | @XmlElementDecl(namespace = "http://www.w3.org/1998/Math/MathML", name = "notin")
public JAXBElement<NotinType> createNotin(NotinType value) {
return new JAXBElement<NotinType>(_Notin_QNAME, NotinType.class, null, value);
} |
java | static void requestOverlay(Source source) {
Intent intent = new Intent(source.getContext(), BridgeActivity.class);
intent.putExtra(KEY_TYPE, BridgeRequest.TYPE_OVERLAY);
source.startActivity(intent);
} |
python | def corenlp2naf(xml_bytes, annotators):
"""
Call from on the text and return a Naf object
"""
naf = KafNafParser(type="NAF")
try:
doc = Document(xml_bytes)
except:
log.exception("Error on parsing xml")
raise
terms = {} # (xml_sentid, xml_tokenid) : term
for sent in doc.sentences:
for t in sent.tokens:
wf = naf.create_wf(t.word, sent.id, t.character_offset_begin)
term = naf.create_term(t.lemma, POSMAP[t.pos], t.pos, [wf])
terms[sent.id, t.id] = term
if t.ner not in (None, 'O'):
naf.create_entity(t.ner, [term.get_id()])
if sent.collapsed_ccprocessed_dependencies:
dependencies = True
for dep in sent.collapsed_ccprocessed_dependencies.links:
if dep.type != 'root':
child = terms[sent.id, dep.dependent.idx]
parent = terms[sent.id, dep.governor.idx]
comment = "{t}({o}, {s})".format(s=child.get_lemma(), t=dep.type, o=parent.get_lemma())
naf.create_dependency(child.get_id(), parent.get_id(), dep.type, comment=comment)
if doc.coreferences:
for coref in doc.coreferences:
cterms = set()
for m in coref.mentions:
cterms |= {terms[m.sentence.id, t.id].get_id() for t in m.tokens}
naf.create_coreference("term", cterms)
for annotator in annotators:
if annotator in LAYERMAP:
naf.create_linguistic_processor(LAYERMAP[annotator], "CoreNLP {annotator}".format(**locals()),
get_corenlp_version())
s = BytesIO()
naf.dump(s)
return s.getvalue() |
python | def clean(feed: "Feed") -> "Feed":
"""
Apply
#. :func:`drop_zombies`
#. :func:`clean_ids`
#. :func:`clean_times`
#. :func:`clean_route_short_names`
to the given "Feed" in that order.
Return the resulting "Feed".
"""
feed = feed.copy()
ops = [
"clean_ids",
"clean_times",
"clean_route_short_names",
"drop_zombies",
]
for op in ops:
feed = globals()[op](feed)
return feed |
java | public List<IoDevice> getAvailableIoDevices() {
List<IoDevice> returnList = new ArrayList<IoDevice>();
for (PriorityIoDeviceTuple compatibleIoDeviceTuple : mCompatibleDevices) {
IoDevice compatibleIoDevice = compatibleIoDeviceTuple.getIoDevice();
if (compatibleIoDevice.equals(mIODevice)) {
returnList.add(mIODevice);
} else {
IoDevice ioDevice = mCursorManager.getIoDevice(compatibleIoDevice);
if (ioDevice != null) {
returnList.add(ioDevice);
}
}
}
return returnList;
} |
python | def update_file(dk_api, kitchen, recipe_name, message, files_to_update_param):
"""
reutrns a string.
:param dk_api: -- api object
:param kitchen: string
:param recipe_name: string -- kitchen name, string
:param message: string message -- commit message, string
:param files_to_update_param: string -- file system directory where the recipe file lives
:rtype: string
"""
rc = DKReturnCode()
if kitchen is None or recipe_name is None or message is None or files_to_update_param is None:
s = 'ERROR: DKCloudCommandRunner bad input parameters'
rc.set(rc.DK_FAIL, s)
return rc
# Take a simple string or an array
if isinstance(files_to_update_param, basestring):
files_to_update = [files_to_update_param]
else:
files_to_update = files_to_update_param
msg = ''
for file_to_update in files_to_update:
try:
with open(file_to_update, 'r') as f:
file_contents = f.read()
except IOError as e:
if len(msg) != 0:
msg += '\n'
msg += '%s' % (str(e))
rc.set(rc.DK_FAIL, msg)
return rc
except ValueError as e:
if len(msg) != 0:
msg += '\n'
msg += 'ERROR: %s' % e.message
rc.set(rc.DK_FAIL, msg)
return rc
rc = dk_api.update_file(kitchen, recipe_name, message, file_to_update, file_contents)
if not rc.ok():
if len(msg) != 0:
msg += '\n'
msg += 'DKCloudCommand.update_file for %s failed\n\tmessage: %s' % (file_to_update, rc.get_message())
rc.set_message(msg)
return rc
else:
if len(msg) != 0:
msg += '\n'
msg += 'DKCloudCommand.update_file for %s succeeded' % file_to_update
rc.set_message(msg)
return rc |
java | static boolean containsMatch(String actual, String regex) {
return Pattern.compile(regex).matcher(actual).find();
} |
python | def min(self):
"""
The minimum integer value of a value-set. It is only defined when there is exactly one region.
:return: A integer that represents the minimum integer value of this value-set.
:rtype: int
"""
if len(self.regions) != 1:
raise ClaripyVSAOperationError("'min()' onlly works on single-region value-sets.")
return self.get_si(next(iter(self.regions))).min |
python | def list(self, request, *args, **kwargs):
"""
To list all services without regard to its type, run **GET** against */api/services/* as an authenticated user.
To list services of specific type issue **GET** to specific endpoint from a list above as a customer owner.
Individual endpoint used for every service type.
To create a service, issue a **POST** to specific endpoint from a list above as a customer owner.
Individual endpoint used for every service type.
You can create service based on shared service settings. Example:
.. code-block:: http
POST /api/digitalocean/ HTTP/1.1
Content-Type: application/json
Accept: application/json
Authorization: Token c84d653b9ec92c6cbac41c706593e66f567a7fa4
Host: example.com
{
"name": "Common DigitalOcean",
"customer": "http://example.com/api/customers/1040561ca9e046d2b74268600c7e1105/",
"settings": "http://example.com/api/service-settings/93ba615d6111466ebe3f792669059cb4/"
}
Or provide your own credentials. Example:
.. code-block:: http
POST /api/oracle/ HTTP/1.1
Content-Type: application/json
Accept: application/json
Authorization: Token c84d653b9ec92c6cbac41c706593e66f567a7fa4
Host: example.com
{
"name": "My Oracle",
"customer": "http://example.com/api/customers/1040561ca9e046d2b74268600c7e1105/",
"backend_url": "https://oracle.example.com:7802/em",
"username": "admin",
"password": "secret"
}
"""
return super(BaseServiceViewSet, self).list(request, *args, **kwargs) |
java | public TimeZoneRule[] getTimeZoneRules(long start) {
TimeZoneRule[] all = getTimeZoneRules();
TimeZoneTransition tzt = getPreviousTransition(start, true);
if (tzt == null) {
// No need to filter out rules only applicable to time before the start
return all;
}
BitSet isProcessed = new BitSet(all.length);
List<TimeZoneRule> filteredRules = new LinkedList<TimeZoneRule>();
// Create initial rule
TimeZoneRule initial = new InitialTimeZoneRule(tzt.getTo().getName(),
tzt.getTo().getRawOffset(), tzt.getTo().getDSTSavings());
filteredRules.add(initial);
isProcessed.set(0);
// Mark rules which does not need to be processed
for (int i = 1; i < all.length; i++) {
Date d = all[i].getNextStart(start, initial.getRawOffset(),
initial.getDSTSavings(), false);
if (d == null) {
isProcessed.set(i);
}
}
long time = start;
boolean bFinalStd = false, bFinalDst = false;
while(!bFinalStd || !bFinalDst) {
tzt = getNextTransition(time, false);
if (tzt == null) {
break;
}
time = tzt.getTime();
TimeZoneRule toRule = tzt.getTo();
int ruleIdx = 1;
for (; ruleIdx < all.length; ruleIdx++) {
if (all[ruleIdx].equals(toRule)) {
break;
}
}
if (ruleIdx >= all.length) {
throw new IllegalStateException("The rule was not found");
}
if (isProcessed.get(ruleIdx)) {
continue;
}
if (toRule instanceof TimeArrayTimeZoneRule) {
TimeArrayTimeZoneRule tar = (TimeArrayTimeZoneRule)toRule;
// Get the previous raw offset and DST savings before the very first start time
long t = start;
while(true) {
tzt = getNextTransition(t, false);
if (tzt == null) {
break;
}
if (tzt.getTo().equals(tar)) {
break;
}
t = tzt.getTime();
}
if (tzt != null) {
// Check if the entire start times to be added
Date firstStart = tar.getFirstStart(tzt.getFrom().getRawOffset(),
tzt.getFrom().getDSTSavings());
if (firstStart.getTime() > start) {
// Just add the rule as is
filteredRules.add(tar);
} else {
// Collect transitions after the start time
long[] times = tar.getStartTimes();
int timeType = tar.getTimeType();
int idx;
for (idx = 0; idx < times.length; idx++) {
t = times[idx];
if (timeType == DateTimeRule.STANDARD_TIME) {
t -= tzt.getFrom().getRawOffset();
}
if (timeType == DateTimeRule.WALL_TIME) {
t -= tzt.getFrom().getDSTSavings();
}
if (t > start) {
break;
}
}
int asize = times.length - idx;
if (asize > 0) {
long[] newtimes = new long[asize];
System.arraycopy(times, idx, newtimes, 0, asize);
TimeArrayTimeZoneRule newtar = new TimeArrayTimeZoneRule(
tar.getName(), tar.getRawOffset(), tar.getDSTSavings(),
newtimes, tar.getTimeType());
filteredRules.add(newtar);
}
}
}
} else if (toRule instanceof AnnualTimeZoneRule) {
AnnualTimeZoneRule ar = (AnnualTimeZoneRule)toRule;
Date firstStart = ar.getFirstStart(tzt.getFrom().getRawOffset(),
tzt.getFrom().getDSTSavings());
if (firstStart.getTime() == tzt.getTime()) {
// Just add the rule as is
filteredRules.add(ar);
} else {
// Calculate the transition year
int[] dfields = new int[6];
Grego.timeToFields(tzt.getTime(), dfields);
// Recreate the rule
AnnualTimeZoneRule newar = new AnnualTimeZoneRule(ar.getName(),
ar.getRawOffset(), ar.getDSTSavings(),
ar.getRule(), dfields[0], ar.getEndYear());
filteredRules.add(newar);
}
// Check if this is a final rule
if (ar.getEndYear() == AnnualTimeZoneRule.MAX_YEAR) {
// After both final standard and dst rule are processed,
// exit this while loop.
if (ar.getDSTSavings() == 0) {
bFinalStd = true;
} else {
bFinalDst = true;
}
}
}
isProcessed.set(ruleIdx);
}
TimeZoneRule[] rules = filteredRules.toArray(new TimeZoneRule[filteredRules.size()]);
return rules;
} |
python | def create(entropy_coefficient, value_coefficient, max_grad_norm, discount_factor, gae_lambda=1.0):
""" Vel factory function """
return A2CPolicyGradient(
entropy_coefficient,
value_coefficient,
max_grad_norm,
discount_factor,
gae_lambda
) |
java | protected Path downloadPlugin(String id, String version) throws PluginException {
try {
PluginRelease release = findReleaseForPlugin(id, version);
Path downloaded = getFileDownloader(id).downloadFile(new URL(release.url));
getFileVerifier(id).verify(new FileVerifier.Context(id, release), downloaded);
return downloaded;
} catch (IOException e) {
throw new PluginException(e, "Error during download of plugin {}", id);
}
} |
java | public void simpleLogout(HttpServletRequest req, HttpServletResponse res) {
createSubjectAndPushItOnThreadAsNeeded(req, res);
AuthenticationResult authResult = new AuthenticationResult(AuthResult.SUCCESS, subjectManager.getCallerSubject());
authResult.setAuditCredType(req.getAuthType());
authResult.setAuditOutcome(AuditEvent.OUTCOME_SUCCESS);
Audit.audit(Audit.EventID.SECURITY_API_AUTHN_TERMINATE_01, req, authResult, Integer.valueOf(res.getStatus()));
removeEntryFromAuthCacheForUser(req, res);
invalidateSession(req);
ssoCookieHelper.removeSSOCookieFromResponse(res);
ssoCookieHelper.createLogoutCookies(req, res);
subjectManager.clearSubjects();
} |
python | def _continuous_colormap(hue, cmap, vmin, vmax):
"""
Creates a continuous colormap.
Parameters
----------
hue : iterable
The data column whose entries are being discretely colorized. Note that although top-level plotter ``hue``
parameters ingest many argument signatures, not just iterables, they are all preprocessed to standardized
iterables before this method is called.
cmap : ``matplotlib.cm`` instance
The `matplotlib` colormap instance which will be used to colorize the geometries.
vmin : float
A strict floor on the value associated with the "bottom" of the colormap spectrum. Data column entries whose
value is below this level will all be colored by the same threshold value. The value for this variable is
meant to be inherited from the top-level variable of the same name.
vmax : float
A strict ceiling on the value associated with the "top" of the colormap spectrum. Data column entries whose
value is above this level will all be colored by the same threshold value. The value for this variable is
meant to be inherited from the top-level variable of the same name.
Returns
-------
cmap : ``mpl.cm.ScalarMappable`` instance
A normalized scalar version of the input ``cmap`` which has been fitted to the data and inputs.
"""
mn = min(hue) if vmin is None else vmin
mx = max(hue) if vmax is None else vmax
norm = mpl.colors.Normalize(vmin=mn, vmax=mx)
return mpl.cm.ScalarMappable(norm=norm, cmap=cmap) |
python | def absolute(requestContext, seriesList):
"""
Takes one metric or a wildcard seriesList and applies the mathematical abs
function to each datapoint transforming it to its absolute value.
Example::
&target=absolute(Server.instance01.threads.busy)
&target=absolute(Server.instance*.threads.busy)
"""
for series in seriesList:
series.name = "absolute(%s)" % (series.name)
series.pathExpression = series.name
for i, value in enumerate(series):
series[i] = safeAbs(value)
return seriesList |
python | def fromJSON(cls, jdata):
"""
Generates a new column from the given json data. This should
be already loaded into a Python dictionary, not a JSON string.
:param jdata | <dict>
:return <orb.Column> || None
"""
cls_type = jdata.get('type')
col_cls = cls.byName(cls_type)
if not col_cls:
raise orb.errors.ColumnTypeNotFound(cls_type)
else:
col = col_cls()
col.loadJSON(jdata)
return col |
java | private void addConstructorsForBeanWithPrivateConstructors(ClassFile proxyClassType) {
ClassMethod ctor = proxyClassType.addMethod(AccessFlag.PUBLIC, INIT_METHOD_NAME, BytecodeUtils.VOID_CLASS_DESCRIPTOR, LJAVA_LANG_BYTE);
CodeAttribute b = ctor.getCodeAttribute();
b.aload(0);
b.aconstNull();
b.aconstNull();
b.invokespecial(proxyClassType.getName(), INIT_METHOD_NAME, "(" + LJAVA_LANG_BYTE + LJAVA_LANG_BYTE + ")" + BytecodeUtils.VOID_CLASS_DESCRIPTOR);
b.returnInstruction();
ctor = proxyClassType.addMethod(AccessFlag.PUBLIC, INIT_METHOD_NAME, BytecodeUtils.VOID_CLASS_DESCRIPTOR, LJAVA_LANG_BYTE, LJAVA_LANG_BYTE);
b = ctor.getCodeAttribute();
b.aload(0);
b.aconstNull();
b.invokespecial(proxyClassType.getName(), INIT_METHOD_NAME, "(" + LJAVA_LANG_BYTE + ")" + BytecodeUtils.VOID_CLASS_DESCRIPTOR);
b.returnInstruction();
} |
java | private void get(final StringBuilder sql) {
sql.append("SELECT * FROM ").append(getName()).append(" WHERE ").append(JdbcRepositories.getDefaultKeyName()).append(" = ?");
} |
java | public static protocolhttp_stats get(nitro_service service, options option) throws Exception{
protocolhttp_stats obj = new protocolhttp_stats();
protocolhttp_stats[] response = (protocolhttp_stats[])obj.stat_resources(service,option);
return response[0];
} |
java | public static void i(Object source, String format, Object... args) {
log(source, Log.INFO, format, args);
} |
python | def MakeExponentialPmf(lam, high, n=200):
"""Makes a PMF discrete approx to an exponential distribution.
lam: parameter lambda in events per unit time
high: upper bound
n: number of values in the Pmf
returns: normalized Pmf
"""
pmf = Pmf()
for x in numpy.linspace(0, high, n):
p = EvalExponentialPdf(x, lam)
pmf.Set(x, p)
pmf.Normalize()
return pmf |
java | public PauseAppStreamResponse pauseAppStream(String app, String stream) {
PauseAppStreamRequest pauseAppStreamRequest = new PauseAppStreamRequest();
pauseAppStreamRequest.setApp(app);
pauseAppStreamRequest.setStream(stream);
return pauseAppStream(pauseAppStreamRequest);
} |
java | @Nonnull
public final LTriBoolFunction<R> build() {
final LTriBoolFunction<R> eventuallyFinal = this.eventually;
LTriBoolFunction<R> retval;
final Case<LLogicalTernaryOperator, LTriBoolFunction<R>>[] casesArray = cases.toArray(new Case[cases.size()]);
retval = LTriBoolFunction.<R> triBoolFunc((a1, a2, a3) -> {
try {
for (Case<LLogicalTernaryOperator, LTriBoolFunction<R>> aCase : casesArray) {
if (aCase.casePredicate().apply(a1, a2, a3)) {
return aCase.caseFunction().apply(a1, a2, a3);
}
}
return eventuallyFinal.apply(a1, a2, a3);
} catch (Error e) { // NOSONAR
throw e;
} catch (Throwable e) { // NOSONAR
throw Handler.handleOrPropagate(e, handling);
}
});
if (consumer != null) {
consumer.accept(retval);
}
return retval;
} |
java | public void setIdentifier(String newIdentifier) {
String oldIdentifier = identifier;
identifier = newIdentifier;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, DroolsPackage.GLOBAL_TYPE__IDENTIFIER, oldIdentifier, identifier));
} |
java | public NodeIterator selectNodeIterator(
Node contextNode, String str, Node namespaceNode)
throws TransformerException
{
// Execute the XPath, and have it return the result
XObject list = eval(contextNode, str, namespaceNode);
// Have the XObject return its result as a NodeSetDTM.
return list.nodeset();
} |
python | def add(self, operator):
'''Add an operator to the Slicer
Parameters
----------
operator : Scope (TaskTransformer or FeatureExtractor)
The new operator to add
'''
if not isinstance(operator, Scope):
raise ParameterError('Operator {} must be a TaskTransformer '
'or FeatureExtractor'.format(operator))
for key in operator.fields:
self._time[key] = []
# We add 1 to the dimension here to account for batching
for tdim, idx in enumerate(operator.fields[key].shape, 1):
if idx is None:
self._time[key].append(tdim) |
java | public JavaFile createPojo(final String name,
final JCTree.JCLiteral literal,
final List<VariableAndValue> usedVariables) throws PackageNameException {
String eventName;
String packageName;
if (name != null) {
//get event name and package name from qualified name
final String[] split = name.split("\\.");
eventName = split[split.length-1];
checkStringIsValidName(eventName);
final StringBuffer stringBuffer = new StringBuffer();
for (int i = 0; i < split.length - 1; i++) {
if (i != 0) {
stringBuffer.append(".");
}
stringBuffer.append(split[i]);
checkStringIsValidName(split[i]);
}
packageName = stringBuffer.toString();
//check that packageName does not contain java keyword
}
else {
eventName = "Event" + hash(literal.getValue().toString());
packageName = generatedEventsPackage;
}
final TypeSpec.Builder classBuilder = TypeSpec.classBuilder(eventName)
.addModifiers(Modifier.PUBLIC)
.superclass(TypeName.get(LoggingEvent.class));
final MethodSpec.Builder constructorBuilder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC);
addCommonLoggingEventFieldsToConstructor(constructorBuilder);
for (VariableAndValue variableAndValue : usedVariables) {
addPojoField(classBuilder, constructorBuilder, variableAndValue.getVariable().getName().toString(), TypeName.get(variableAndValue.getVariable().getType()));
}
final TypeSpec build = classBuilder.addMethod(constructorBuilder.build()).build();
return JavaFile.builder(packageName, build).build();
} |
python | def python_version_matchers():
"""Return set of string representations of current python version"""
version = sys.version_info
patterns = [
"{0}",
"{0}{1}",
"{0}.{1}",
]
matchers = [
pattern.format(*version)
for pattern in patterns
] + [None]
return set(matchers) |
java | public void readFields(DataInput in) throws IOException {
name = UTF8.readString(in);
storageID = UTF8.readString(in);
// the infoPort read could be negative, if the port is a large number (more
// than 15 bits in storage size (but less than 16 bits).
// So chop off the first two bytes (and hence the signed bits) before
// setting the field.
this.infoPort = in.readShort() & 0x0000ffff;
} |
java | public void createLinks(ServiceReference<S> declarationBinderRef) {
for (D declaration : linkerManagement.getMatchedDeclaration()) {
if (linkerManagement.canBeLinked(declaration, declarationBinderRef)) {
linkerManagement.link(declaration, declarationBinderRef);
}
}
} |
java | private static void omitEdge(int[] edges, int[] idx, int[] sizes, int omit) {
for(int i = 0; i < idx.length; i++) {
idx[i] = i;
}
Arrays.fill(sizes, 1);
for(int i = 0, j = 0, e = edges.length - 1; j < e; i++, j += 2) {
if(i == omit) {
continue;
}
int ea = edges[j + 1], eb = edges[j];
if(eb < ea) { // Swap
int tmp = eb;
eb = ea;
ea = tmp;
}
final int pa = follow(ea, idx), pb = follow(eb, idx);
assert (pa != pb) : "Must be disjoint - MST inconsistent.";
sizes[idx[pa]] += sizes[idx[pb]];
idx[pb] = idx[pa];
}
} |
java | protected Features getFeaturesFromRequestEncoded(HttpServletRequest request, Features defaultFeatures) throws IOException {
final String methodName = "getFeaturesFromRequestEncoded"; //$NON-NLS-1$
boolean traceLogging = log.isLoggable(Level.FINER);
if (traceLogging) {
log.entering(sourceClass, methodName, new Object[]{request});
}
if (depsInitialized == null) {
if (traceLogging) {
log.finer("No initialization semphore"); //$NON-NLS-1$
log.exiting(sourceClass, methodName);
}
return null;
}
String encoded = request.getParameter(ENCODED_FEATURE_MAP_REQPARAM);
if (encoded == null) {
if (traceLogging) {
log.finer(ENCODED_FEATURE_MAP_REQPARAM + " param not specified in request"); //$NON-NLS-1$
log.exiting(sourceClass, methodName);
}
return null;
}
if (traceLogging) {
log.finer(ENCODED_FEATURE_MAP_REQPARAM + " param = " + encoded); //$NON-NLS-1$
}
byte[] decoded = Base64.decodeBase64(encoded);
int len = dependentFeatures.size();
ByteArrayOutputStream bos = new ByteArrayOutputStream(len);
// Validate the input - first two bytes specify length of feature list on the client
if (len != (decoded[0]&0xFF)+((decoded[1]&0xFF)<< 8) || decoded.length != len/5 + (len%5==0?0:1) + 2) {
if (log.isLoggable(Level.FINER)) {
log.finer("Invalid encoded feature list. Expected feature list length = " + len); //$NON-NLS-1$
}
throw new BadRequestException("Invalid encoded feature list"); //$NON-NLS-1$
}
// Now decode the trit map
for (int i = 2; i < decoded.length; i++) {
int q = decoded[i] & 0xFF;
for (int j = 0; j < 5 && (i-2)*5+j < len; j++) {
bos.write(q % 3);
q = q / 3;
}
}
Features result = new Features(defaultFeatures);
int i = 0;
for (byte b : bos.toByteArray()) {
if (b < 2) {
result.put(dependentFeatures.get(i), b == 1);
}
i++;
}
if (traceLogging) {
log.exiting(sourceClass, methodName, result);
}
return result;
} |
python | def reset_catalog():
'''
.. versionadded:: 2016.3.0
Reset the Software Update Catalog to the default.
:return: True if successful, False if not
:rtype: bool
CLI Example:
.. code-block:: bash
salt '*' softwareupdates.reset_catalog
'''
# This command always returns an error code, though it completes
# successfully. Success will be determined by making sure get_catalog
# returns 'Default'
cmd = ['softwareupdate', '--clear-catalog']
try:
salt.utils.mac_utils.execute_return_success(cmd)
except CommandExecutionError as exc:
pass
return get_catalog() == 'Default' |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.