language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java
|
public static String format(final LocalTime self, FormatStyle timeStyle) {
return self.format(DateTimeFormatter.ofLocalizedTime(timeStyle));
}
|
java
|
public void makeAllRepeatUnlimited(int profileId, String client_uuid) {
PreparedStatement statement = null;
try (Connection sqlConnection = sqlService.getConnection()) {
statement = sqlConnection.prepareStatement(
"UPDATE " + Constants.DB_TABLE_REQUEST_RESPONSE +
" SET " + Constants.REQUEST_RESPONSE_REPEAT_NUMBER + " = ?" +
" WHERE " + Constants.GENERIC_PROFILE_ID + " = ?" +
" AND " + Constants.GENERIC_CLIENT_UUID + " = ?"
);
statement.setInt(1, -1);
statement.setInt(2, profileId);
statement.setString(3, client_uuid);
statement.executeUpdate();
} catch (SQLException e) {
e.printStackTrace();
} finally {
try {
if (statement != null) {
statement.close();
}
} catch (Exception e) {
}
}
}
|
python
|
def setActivities(self, *args, **kwargs):
"""Adds the activities for this group to a 'activities' field.
Activities are MambuActivity objects.
Activities get sorted by activity timestamp.
Returns the number of requests done to Mambu.
"""
def activityDate(activity):
"""Util function used for sorting activities according to timestamp"""
try:
return activity['activity']['timestamp']
except KeyError as kerr:
return None
try:
activities = self.mambuactivitiesclass(groupId=self['encodedKey'], *args, **kwargs)
except AttributeError as ae:
from .mambuactivity import MambuActivities
self.mambuactivitiesclass = MambuActivities
activities = self.mambuactivitiesclass(groupId=self['encodedKey'], *args, **kwargs)
activities.attrs = sorted(activities.attrs, key=activityDate)
self['activities'] = activities
return 1
|
python
|
def send_exit_status(self, status):
"""
Send the exit status of an executed command to the client. (This
really only makes sense in server mode.) Many clients expect to
get some sort of status code back from an executed command after
it completes.
@param status: the exit code of the process
@type status: int
@since: 1.2
"""
# in many cases, the channel will not still be open here.
# that's fine.
m = Message()
m.add_byte(chr(MSG_CHANNEL_REQUEST))
m.add_int(self.remote_chanid)
m.add_string('exit-status')
m.add_boolean(False)
m.add_int(status)
self.transport._send_user_message(m)
|
java
|
public static void renderDOT(Reader r, boolean modal) {
final DOTComponent cmp = createDOTComponent(r);
if (cmp == null) {
return;
}
final JDialog frame = new JDialog((Dialog) null, modal);
JScrollPane scrollPane = new JScrollPane(cmp);
frame.setContentPane(scrollPane);
frame.setMaximumSize(new Dimension(MAX_WIDTH, MAX_HEIGHT));
frame.pack();
JMenu menu = new JMenu("File");
menu.add(cmp.getSavePngAction());
menu.add(cmp.getSaveDotAction());
menu.addSeparator();
menu.add(new AbstractAction("Close") {
private static final long serialVersionUID = -1L;
@Override
public void actionPerformed(ActionEvent e) {
frame.dispatchEvent(new WindowEvent(frame, WindowEvent.WINDOW_CLOSING));
}
});
JMenuBar menuBar = new JMenuBar();
menuBar.add(menu);
frame.setJMenuBar(menuBar);
frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
frame.setVisible(true);
frame.addKeyListener(new KeyAdapter() {
@Override
public void keyTyped(KeyEvent e) {
if (e.getKeyChar() == KeyEvent.VK_ESCAPE) {
frame.dispatchEvent(new WindowEvent(frame, WindowEvent.WINDOW_CLOSING));
}
}
});
}
|
python
|
def ConsultarLocalidades(self, cod_provincia, sep="||"):
"Consulta las localidades habilitadas"
ret = self.client.consultarLocalidadesPorProvincia(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
solicitud={'codProvincia': cod_provincia},
)['respuesta']
self.__analizar_errores(ret)
array = ret.get('localidad', [])
if sep is None:
return dict([(it['codigo'], it['descripcion']) for it in array])
else:
return [("%s %%s %s %%s %s" % (sep, sep, sep)) %
(it['codigo'], it['descripcion']) for it in array]
|
python
|
def set_desktop_for_window(self, window, desktop):
"""
Move a window to another desktop
Uses _NET_WM_DESKTOP of the EWMH spec.
:param wid: the window to move
:param desktop: the desktop destination for the window
"""
_libxdo.xdo_set_desktop_for_window(self._xdo, window, desktop)
|
java
|
public GalleryInfo create(String title, String description, String primaryPhotoId, boolean fullResult) throws JinxException {
JinxUtils.validateParams(title, description);
Map<String, String> params = new TreeMap<>();
params.put("method", "flickr.galleries.create");
params.put("title", title);
params.put("description", description);
if (!JinxUtils.isNullOrEmpty(primaryPhotoId)) {
params.put("primary_photo_id", primaryPhotoId);
}
if (fullResult) {
params.put("full_result", "1");
}
return jinx.flickrPost(params, GalleryInfo.class);
}
|
java
|
@Override
public void removeByUuid(String uuid) {
for (CommercePriceList commercePriceList : findByUuid(uuid,
QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) {
remove(commercePriceList);
}
}
|
java
|
public LineString toLineString(Polyline polyline, boolean hasZ, boolean hasM) {
return toLineString(polyline.getPoints(), hasZ, hasM);
}
|
java
|
@Reference(name = REFERENCE_ENDPOINT_ACTIVATION_SERVICES,
service = EndpointActivationService.class,
policy = ReferencePolicy.DYNAMIC,
cardinality = ReferenceCardinality.MULTIPLE)
protected synchronized void addEndPointActivationService(ServiceReference<EndpointActivationService> reference) {
String activationSvcId = (String) reference.getProperty(ACT_SPEC_CFG_ID);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "activationSvcId : " + activationSvcId);
}
EndpointActivationServiceInfo easInfo = createEndpointActivationServiceInfo(activationSvcId);
// Deactivate any endpoints that were using the old service.
if (easInfo.service != null) {
deactivateEndpoints(easInfo.endpointFactories);
}
// Activate any endpoints with the new service.
easInfo.setReference(reference);
activateDeferredEndpoints(easInfo.endpointFactories);
}
|
java
|
public static CPRuleUserSegmentRel[] findByCPRuleId_PrevAndNext(
long CPRuleUserSegmentRelId, long CPRuleId,
OrderByComparator<CPRuleUserSegmentRel> orderByComparator)
throws com.liferay.commerce.product.exception.NoSuchCPRuleUserSegmentRelException {
return getPersistence()
.findByCPRuleId_PrevAndNext(CPRuleUserSegmentRelId,
CPRuleId, orderByComparator);
}
|
java
|
public static @Nonnull ClusterConfig from(@Nonnull HttpReplica local, HttpReplica... remotes) {
return new HttpClusterConfig(local, remotes);
}
|
java
|
public static Properties mergeAndSkipExisting(Properties props1, Properties props2) {
Properties properties = new Properties();
properties.putAll(props1);
for (String key : props2.stringPropertyNames()) {
if (!props1.containsKey(key) && !Strings.isNullOrEmpty(props2.getProperty(key))) {
properties.put(key, props2.getProperty(key));
}
}
return properties;
}
|
python
|
def scaleBy(self, value, origin=None):
"""
Scale the object.
>>> obj.transformBy(2.0)
>>> obj.transformBy((0.5, 2.0), origin=(500, 500))
**value** must be an iterable containing two
:ref:`type-int-float` values defining the x and y
values to scale the object by. **origin** defines the
point at with the scale should originate. It must be
a :ref:`type-coordinate` or ``None``. The default is
``(0, 0)``.
"""
value = normalizers.normalizeTransformationScale(value)
if origin is None:
origin = (0, 0)
origin = normalizers.normalizeCoordinateTuple(origin)
self._scaleBy(value, origin=origin)
|
python
|
def render_to_string(self, template, context=None, def_name=None, subdir='templates'):
'''App-specific render function that renders templates in the *current app*, attached to the request for convenience'''
template_adapter = self.get_template_loader(subdir).get_template(template)
return getattr(template_adapter, 'render')(context=context, request=self.request, def_name=def_name)
|
java
|
private int[] rows() {
int[] rowIndexes = new int[rowCount()];
for (int i = 0; i < rowCount(); i++) {
rowIndexes[i] = i;
}
return rowIndexes;
}
|
java
|
void enclose(TaskRequest req) {
parent = req;
if (this.parent != null) {
final HashMap<String, Object> mergedAttributes = new HashMap<String, Object>(this.parent.getAttributes());
mergedAttributes.putAll(this.attributes);
this.mergedAttributes = mergedAttributes;
}
else {
this.mergedAttributes = this.attributes;
}
}
|
java
|
public boolean needsRefresh() {
boolean needsRefresh;
this.refreshLock.readLock().lock();
long now = System.currentTimeMillis();
long tokenDuration = (now - this.lastRefresh);
needsRefresh = (tokenDuration >= this.expires - REFRESH_EPSILON);
this.refreshLock.readLock().unlock();
return needsRefresh;
}
|
java
|
public static RpcInternalContext getContext() {
RpcInternalContext context = LOCAL.get();
if (context == null) {
context = new RpcInternalContext();
LOCAL.set(context);
}
return context;
}
|
java
|
public static InterleavedF32 nv21ToInterleaved( byte[] data , int width , int height ,
InterleavedF32 output ) {
if( output == null ) {
output = new InterleavedF32(width,height,3);
} else {
output.reshape(width, height, 3);
}
if(BoofConcurrency.USE_CONCURRENT ) {
ImplConvertNV21_MT.nv21ToInterleaved_F32(data, output);
} else {
ImplConvertNV21.nv21ToInterleaved_F32(data, output);
}
return output;
}
|
python
|
def update_metadata_from_rmd_options(name, value, metadata):
"""
Update metadata using the _BOOLEAN_OPTIONS_DICTIONARY mapping
:param name: option name
:param value: option value
:param metadata:
:return:
"""
for jupyter_option, rmd_option, rev in _BOOLEAN_OPTIONS_DICTIONARY:
if name == rmd_option:
try:
metadata[jupyter_option] = _py_logical_values(value) != rev
return True
except RLogicalValueError:
pass
return False
|
java
|
@Override
public void remove(String oid) throws IndexerException {
log.debug("Deleting " + oid + " from index");
try {
solr.deleteByQuery("storage_id:\"" + oid + "\"");
solr.commit();
} catch (SolrServerException sse) {
throw new IndexerException(sse);
} catch (IOException ioe) {
throw new IndexerException(ioe);
}
}
|
java
|
public final void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException {
HttpServletRequest request = (HttpServletRequest) servletRequest;
HttpServletResponse response = (HttpServletResponse) servletResponse;
String localPath = request.getRequestURI().substring(request.getContextPath().length());
if (consolePath != null && (localPath.equals(printTreePath) || localPath.startsWith(consolePath))) {
consolePage(request, response, localPath);
return;
}
doFilterWithMonitoring(filterChain, request, response);
}
|
python
|
def transform_annotation(self, ann, duration):
'''Apply the vector transformation.
Parameters
----------
ann : jams.Annotation
The input annotation
duration : number > 0
The duration of the track
Returns
-------
data : dict
data['vector'] : np.ndarray, shape=(dimension,)
Raises
------
DataError
If the input dimension does not match
'''
_, values = ann.to_interval_values()
vector = np.asarray(values[0], dtype=self.dtype)
if len(vector) != self.dimension:
raise DataError('vector dimension({:0}) '
'!= self.dimension({:1})'
.format(len(vector), self.dimension))
return {'vector': vector}
|
python
|
def logging_syslog_server_secure(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras")
syslog_server = ET.SubElement(logging, "syslog-server")
syslogip_key = ET.SubElement(syslog_server, "syslogip")
syslogip_key.text = kwargs.pop('syslogip')
use_vrf_key = ET.SubElement(syslog_server, "use-vrf")
use_vrf_key.text = kwargs.pop('use_vrf')
secure = ET.SubElement(syslog_server, "secure")
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
python
|
def title(self, value=None):
"""Get or set the document's title from/in the metadata
No arguments: Get the document's title from metadata
Argument: Set the document's title in metadata
"""
if not (value is None):
if (self.metadatatype == "native"):
self.metadata['title'] = value
else:
self._title = value
if (self.metadatatype == "native"):
if 'title' in self.metadata:
return self.metadata['title']
else:
return None
else:
return self._title
|
java
|
public synchronized AbstractCompactionTask getNextBackgroundTask(int gcBefore)
{
if (!isEnabled())
return null;
Collection<AbstractCompactionTask> tasks = getMaximalTask(gcBefore);
if (tasks == null || tasks.size() == 0)
return null;
return tasks.iterator().next();
}
|
python
|
def within_duration(events, time, limits):
"""Check whether event is within time limits.
Parameters
----------
events : ndarray (dtype='int')
N x M matrix with start sample first and end samples last on M
time : ndarray (dtype='float')
vector with time points
limits : tuple of float
low and high limit for spindle duration
Returns
-------
ndarray (dtype='int')
N x M matrix with start sample first and end samples last on M
"""
min_dur = max_dur = ones(events.shape[0], dtype=bool)
if limits[0] is not None:
min_dur = time[events[:, -1] - 1] - time[events[:, 0]] >= limits[0]
if limits[1] is not None:
max_dur = time[events[:, -1] - 1] - time[events[:, 0]] <= limits[1]
return events[min_dur & max_dur, :]
|
java
|
public void load(File mean, File std) throws IOException {
this.mean = Nd4j.readBinary(mean);
this.std = Nd4j.readBinary(std);
}
|
python
|
def register_factory(self, bundle_context, factory):
# type: (BundleContext, type) -> bool
"""
Registers a manually created factory, using decorators programmatically
:param bundle_context: The factory bundle context
:param factory: A manipulated class
:return: True if the factory has been registered
:raise ValueError: Invalid parameter, or factory already registered
:raise TypeError: Invalid factory type (not a manipulated class)
"""
if factory is None or bundle_context is None:
# Invalid parameter, to nothing
raise ValueError("Invalid parameter")
context = _set_factory_context(factory, bundle_context)
if not context:
raise TypeError("Not a manipulated class (no context found)")
self._register_factory(context.name, factory, False)
return True
|
java
|
public static DoubleStreamEx of(java.nio.DoubleBuffer buf) {
return IntStreamEx.range(buf.position(), buf.limit()).mapToDouble(buf::get);
}
|
java
|
public static INDArray pinvert(INDArray arr, boolean inPlace) {
// TODO : do it natively instead of relying on commons-maths
RealMatrix realMatrix = CheckUtil.convertToApacheMatrix(arr);
QRDecomposition decomposition = new QRDecomposition(realMatrix, 0);
DecompositionSolver solver = decomposition.getSolver();
if (!solver.isNonSingular()) {
throw new IllegalArgumentException("invalid array: must be singular matrix");
}
RealMatrix pinvRM = solver.getInverse();
INDArray pseudoInverse = CheckUtil.convertFromApacheMatrix(pinvRM, arr.dataType());
if (inPlace)
arr.assign(pseudoInverse);
return pseudoInverse;
}
|
python
|
def _get_nr_bins(count):
"""depending on the number of data points, compute a best guess for an
optimal number of bins
https://en.wikipedia.org/wiki/Histogram#Number_of_bins_and_width
"""
if count <= 30:
# use the square-root choice, used by Excel and Co
k = np.ceil(np.sqrt(count))
else:
# use Sturges' formula
k = np.ceil(np.log2(count)) + 1
return int(k)
|
java
|
public Observable<RunCommandResultInner> beginRunCommandAsync(String resourceGroupName, String vmName, RunCommandInput parameters) {
return beginRunCommandWithServiceResponseAsync(resourceGroupName, vmName, parameters).map(new Func1<ServiceResponse<RunCommandResultInner>, RunCommandResultInner>() {
@Override
public RunCommandResultInner call(ServiceResponse<RunCommandResultInner> response) {
return response.body();
}
});
}
|
java
|
public String getDateTime(Date date) {
return CmsDateTimeUtil.getDateTime(date, CmsDateTimeUtil.Format.SHORT);
}
|
python
|
def partition(f, xs):
"""
Works similar to filter, except it returns a two-item tuple where the
first item is the sequence of items that passed the filter and the
second is a sequence of items that didn't pass the filter
"""
t = type(xs)
true = filter(f, xs)
false = [x for x in xs if x not in true]
return t(true), t(false)
|
java
|
@Override
public final String getName() {
String name = getProperty(AbstractNode.name);
if (name == null) {
name = getUuid();
}
return name;
}
|
java
|
public Report compare(Xml a, Xml b) {
return compare(a, b, null, null);
}
|
java
|
public static <K, V> Map<K, V> zip(Collection<K> keys, Collection<V> values) {
if (isEmpty(keys) || isEmpty(values)) {
return null;
}
final List<K> keyList = new ArrayList<K>(keys);
final List<V> valueList = new ArrayList<V>(values);
final int size = Math.min(keys.size(), values.size());
final Map<K, V> map = new HashMap<K, V>((int) (size / 0.75));
for (int i = 0; i < size; i++) {
map.put(keyList.get(i), valueList.get(i));
}
return map;
}
|
java
|
public void execute() throws ActivityException{
isSynchronized = checkIfSynchronized();
if (!isSynchronized) {
EventWaitInstance received = registerWaitEvents(false, true);
if (received!=null)
resume(getExternalEventInstanceDetails(received.getMessageDocumentId()), received.getCompletionCode());
}
}
|
python
|
def get_dynamodb_type(self, val):
"""
Take a scalar Python value and return a string representing
the corresponding Amazon DynamoDB type. If the value passed in is
not a supported type, raise a TypeError.
"""
dynamodb_type = None
if is_num(val):
dynamodb_type = 'N'
elif is_str(val):
dynamodb_type = 'S'
elif isinstance(val, (set, frozenset)):
if False not in map(is_num, val):
dynamodb_type = 'NS'
elif False not in map(is_str, val):
dynamodb_type = 'SS'
if dynamodb_type is None:
raise TypeError('Unsupported type "%s" for value "%s"' % (type(val), val))
return dynamodb_type
|
python
|
def get(self, name=None):
"""
Returns commands, which can be filtered by name.
:param name: name of the command
:type name: str
:return: None, single command or dict of commands
"""
return self.app.commands.get(name, self.plugin)
|
python
|
def read_tmy3(filename=None, coerce_year=None, recolumn=True):
'''
Read a TMY3 file in to a pandas dataframe.
Note that values contained in the metadata dictionary are unchanged
from the TMY3 file (i.e. units are retained). In the case of any
discrepencies between this documentation and the TMY3 User's Manual
[1], the TMY3 User's Manual takes precedence.
The TMY3 files were updated in Jan. 2015. This function requires the
use of the updated files.
Parameters
----------
filename : None or string, default None
If None, attempts to use a Tkinter file browser. A string can be
a relative file path, absolute file path, or url.
coerce_year : None or int, default None
If supplied, the year of the data will be set to this value.
recolumn : bool, default True
If True, apply standard names to TMY3 columns. Typically this
results in stripping the units from the column name.
Returns
-------
Tuple of the form (data, metadata).
data : DataFrame
A pandas dataframe with the columns described in the table
below. For more detailed descriptions of each component, please
consult the TMY3 User's Manual ([1]), especially tables 1-1
through 1-6.
metadata : dict
The site metadata available in the file.
Notes
-----
The returned structures have the following fields.
=============== ====== ===================
key format description
=============== ====== ===================
altitude Float site elevation
latitude Float site latitudeitude
longitude Float site longitudeitude
Name String site name
State String state
TZ Float UTC offset
USAF Int USAF identifier
=============== ====== ===================
============================= ======================================================================================================================================================
TMYData field description
============================= ======================================================================================================================================================
TMYData.Index A pandas datetime index. NOTE, the index is currently timezone unaware, and times are set to local standard time (daylight savings is not included)
TMYData.ETR Extraterrestrial horizontal radiation recv'd during 60 minutes prior to timestamp, Wh/m^2
TMYData.ETRN Extraterrestrial normal radiation recv'd during 60 minutes prior to timestamp, Wh/m^2
TMYData.GHI Direct and diffuse horizontal radiation recv'd during 60 minutes prior to timestamp, Wh/m^2
TMYData.GHISource See [1], Table 1-4
TMYData.GHIUncertainty Uncertainty based on random and bias error estimates see [2]
TMYData.DNI Amount of direct normal radiation (modeled) recv'd during 60 mintues prior to timestamp, Wh/m^2
TMYData.DNISource See [1], Table 1-4
TMYData.DNIUncertainty Uncertainty based on random and bias error estimates see [2]
TMYData.DHI Amount of diffuse horizontal radiation recv'd during 60 minutes prior to timestamp, Wh/m^2
TMYData.DHISource See [1], Table 1-4
TMYData.DHIUncertainty Uncertainty based on random and bias error estimates see [2]
TMYData.GHillum Avg. total horizontal illuminance recv'd during the 60 minutes prior to timestamp, lx
TMYData.GHillumSource See [1], Table 1-4
TMYData.GHillumUncertainty Uncertainty based on random and bias error estimates see [2]
TMYData.DNillum Avg. direct normal illuminance recv'd during the 60 minutes prior to timestamp, lx
TMYData.DNillumSource See [1], Table 1-4
TMYData.DNillumUncertainty Uncertainty based on random and bias error estimates see [2]
TMYData.DHillum Avg. horizontal diffuse illuminance recv'd during the 60 minutes prior to timestamp, lx
TMYData.DHillumSource See [1], Table 1-4
TMYData.DHillumUncertainty Uncertainty based on random and bias error estimates see [2]
TMYData.Zenithlum Avg. luminance at the sky's zenith during the 60 minutes prior to timestamp, cd/m^2
TMYData.ZenithlumSource See [1], Table 1-4
TMYData.ZenithlumUncertainty Uncertainty based on random and bias error estimates see [1] section 2.10
TMYData.TotCld Amount of sky dome covered by clouds or obscuring phenonema at time stamp, tenths of sky
TMYData.TotCldSource See [1], Table 1-5, 8760x1 cell array of strings
TMYData.TotCldUnertainty See [1], Table 1-6
TMYData.OpqCld Amount of sky dome covered by clouds or obscuring phenonema that prevent observing the sky at time stamp, tenths of sky
TMYData.OpqCldSource See [1], Table 1-5, 8760x1 cell array of strings
TMYData.OpqCldUncertainty See [1], Table 1-6
TMYData.DryBulb Dry bulb temperature at the time indicated, deg C
TMYData.DryBulbSource See [1], Table 1-5, 8760x1 cell array of strings
TMYData.DryBulbUncertainty See [1], Table 1-6
TMYData.DewPoint Dew-point temperature at the time indicated, deg C
TMYData.DewPointSource See [1], Table 1-5, 8760x1 cell array of strings
TMYData.DewPointUncertainty See [1], Table 1-6
TMYData.RHum Relatitudeive humidity at the time indicated, percent
TMYData.RHumSource See [1], Table 1-5, 8760x1 cell array of strings
TMYData.RHumUncertainty See [1], Table 1-6
TMYData.Pressure Station pressure at the time indicated, 1 mbar
TMYData.PressureSource See [1], Table 1-5, 8760x1 cell array of strings
TMYData.PressureUncertainty See [1], Table 1-6
TMYData.Wdir Wind direction at time indicated, degrees from north (360 = north; 0 = undefined,calm)
TMYData.WdirSource See [1], Table 1-5, 8760x1 cell array of strings
TMYData.WdirUncertainty See [1], Table 1-6
TMYData.Wspd Wind speed at the time indicated, meter/second
TMYData.WspdSource See [1], Table 1-5, 8760x1 cell array of strings
TMYData.WspdUncertainty See [1], Table 1-6
TMYData.Hvis Distance to discernable remote objects at time indicated (7777=unlimited), meter
TMYData.HvisSource See [1], Table 1-5, 8760x1 cell array of strings
TMYData.HvisUncertainty See [1], Table 1-6
TMYData.CeilHgt Height of cloud base above local terrain (7777=unlimited), meter
TMYData.CeilHgtSource See [1], Table 1-5, 8760x1 cell array of strings
TMYData.CeilHgtUncertainty See [1], Table 1-6
TMYData.Pwat Total precipitable water contained in a column of unit cross section from earth to top of atmosphere, cm
TMYData.PwatSource See [1], Table 1-5, 8760x1 cell array of strings
TMYData.PwatUncertainty See [1], Table 1-6
TMYData.AOD The broadband aerosol optical depth per unit of air mass due to extinction by aerosol component of atmosphere, unitless
TMYData.AODSource See [1], Table 1-5, 8760x1 cell array of strings
TMYData.AODUncertainty See [1], Table 1-6
TMYData.Alb The ratio of reflected solar irradiance to global horizontal irradiance, unitless
TMYData.AlbSource See [1], Table 1-5, 8760x1 cell array of strings
TMYData.AlbUncertainty See [1], Table 1-6
TMYData.Lprecipdepth The amount of liquid precipitation observed at indicated time for the period indicated in the liquid precipitation quantity field, millimeter
TMYData.Lprecipquantity The period of accumulatitudeion for the liquid precipitation depth field, hour
TMYData.LprecipSource See [1], Table 1-5, 8760x1 cell array of strings
TMYData.LprecipUncertainty See [1], Table 1-6
TMYData.PresWth Present weather code, see [2].
TMYData.PresWthSource Present weather code source, see [2].
TMYData.PresWthUncertainty Present weather code uncertainty, see [2].
============================= ======================================================================================================================================================
References
----------
[1] Wilcox, S and Marion, W. "Users Manual for TMY3 Data Sets".
NREL/TP-581-43156, Revised May 2008.
[2] Wilcox, S. (2007). National Solar Radiation Database 1991 2005
Update: Users Manual. 472 pp.; NREL Report No. TP-581-41364.
'''
if filename is None:
try:
filename = _interactive_load()
except ImportError:
raise ImportError('Interactive load failed. Tkinter not supported '
'on this system. Try installing X-Quartz and '
'reloading')
head = ['USAF', 'Name', 'State', 'TZ', 'latitude', 'longitude', 'altitude']
if filename.startswith('http'):
request = Request(filename, headers={'User-Agent': (
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) '
'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 '
'Safari/537.36')})
response = urlopen(request)
csvdata = io.StringIO(response.read().decode(errors='ignore'))
else:
# assume it's accessible via the file system
csvdata = open(filename, 'r')
# read in file metadata, advance buffer to second line
firstline = csvdata.readline()
if 'Request Rejected' in firstline:
raise IOError('Remote server rejected TMY file request')
meta = dict(zip(head, firstline.rstrip('\n').split(",")))
# convert metadata strings to numeric types
meta['altitude'] = float(meta['altitude'])
meta['latitude'] = float(meta['latitude'])
meta['longitude'] = float(meta['longitude'])
meta['TZ'] = float(meta['TZ'])
meta['USAF'] = int(meta['USAF'])
# use pandas to read the csv file/stringio buffer
# header is actually the second line in file, but tell pandas to look for
# header information on the 1st line (0 indexing) because we've already
# advanced past the true first line with the readline call above.
data = pd.read_csv(
csvdata, header=0,
parse_dates={'datetime': ['Date (MM/DD/YYYY)', 'Time (HH:MM)']},
date_parser=lambda *x: _parsedate(*x, year=coerce_year),
index_col='datetime')
if recolumn:
data = _recolumn(data) # rename to standard column names
data = data.tz_localize(int(meta['TZ'] * 3600))
return data, meta
|
java
|
public Result removeEntry(Object dependency, Object entry) {
Result result = this.htod.getFromResultPool();
ValueSet valueSet = (ValueSet) dependencyToEntryTable.get(dependency);
if (valueSet == null) {
return result;
}
result.bExist = HTODDynacache.EXIST;
valueSet.remove(entry);
dependencyNotUpdatedTable.remove(dependency);
if (valueSet.isEmpty()) {
dependencyToEntryTable.remove(dependency);
if (this.type == DEP_ID_TABLE) {
result.returnCode = this.htod.delValueSet(HTODDynacache.DEP_ID_DATA, dependency);
} else {
result.returnCode = this.htod.delValueSet(HTODDynacache.TEMPLATE_ID_DATA, dependency);
}
}
return result;
}
|
python
|
def __update_molar_mass(self, compound='', element=''):
"""Re-calculate the molar mass of the element given due to stoichiometric changes
Parameters:
==========
compound: string (default is '') name of compound
element: string (default is '') name of element
"""
_molar_mass_element = 0
list_ratio = self.stack[compound][element]['isotopes']['isotopic_ratio']
list_mass = self.stack[compound][element]['isotopes']['mass']['value']
ratio_mass = zip(list_ratio, list_mass)
for _ratio, _mass in ratio_mass:
_molar_mass_element += np.float(_ratio) * np.float(_mass)
self.stack[compound][element]['molar_mass']['value'] = _molar_mass_element
|
java
|
@Override
protected Observable construct() {
Observable result;
try {
Observable observable = toObservable(commandActions.getCommandAction().execute(executionType));
result = observable
.onErrorResumeNext(new Func1<Throwable, Observable>() {
@Override
public Observable call(Throwable throwable) {
if (isIgnorable(throwable)) {
return Observable.error(new HystrixBadRequestException(throwable.getMessage(), throwable));
}
return Observable.error(throwable);
}
});
flushCache();
} catch (CommandActionExecutionException throwable) {
Throwable cause = throwable.getCause();
if (isIgnorable(cause)) {
throw new HystrixBadRequestException(cause.getMessage(), cause);
}
throw throwable;
}
return result;
}
|
java
|
public RepositoryType getTypeByName(String repositoryTypeName)
{
final Criteria crit = sessionService.getSession().createCriteria(RepositoryType.class);
crit.add(Property.forName("name").eq(repositoryTypeName));
RepositoryType repositoryType = (RepositoryType) crit.uniqueResult();
HibernateLazyInitializer.init(repositoryType);
return repositoryType;
}
|
java
|
void performPostExecute() throws SQLException {
resultOut.clearLobResults();
generatedResult = null;
if (resultIn == null) {
return;
}
Result current = resultIn;
while (current.getChainedResult() != null) {
current = current.getUnlinkChainedResult();
if (current.getType() == ResultConstants.WARNING) {
SQLWarning w = Util.sqlWarning(current);
if (rootWarning == null) {
rootWarning = w;
} else {
rootWarning.setNextWarning(w);
}
} else if (current.getType() == ResultConstants.ERROR) {
errorResult = current;
} else if (current.getType() == ResultConstants.DATA) {
generatedResult = current;
}
}
if (resultIn.isData()) {
currentResultSet = new JDBCResultSet(connection.sessionProxy,
this, resultIn, resultIn.metaData,
connection.connProperties);
}
}
|
python
|
def _delete_account(self, account, username):
""" Called when account is deleted. With username override. """
# account deleted
ds_user = self.get_user(username)
if ds_user is not None:
self._call(["grmuser", "-u", username], ignore_errors=[8])
return
|
java
|
public static void notifyAboutCloudSize(InetAddress ip, int port, InetAddress leaderIp, int leaderPort, int size) {
if (ARGS.notify_local != null && !ARGS.notify_local.trim().isEmpty()) {
final File notifyFile = new File(ARGS.notify_local);
final File parentDir = notifyFile.getParentFile();
if (parentDir != null && !parentDir.isDirectory()) {
if (!parentDir.mkdirs()) {
Log.err("Cannot make parent dir for notify file.");
H2O.exit(-1);
}
}
try(BufferedWriter output = new BufferedWriter(new FileWriter(notifyFile))) {
output.write(SELF_ADDRESS.getHostAddress());
output.write(':');
output.write(Integer.toString(API_PORT));
output.flush();
} catch ( IOException e ) {
e.printStackTrace();
}
}
if (embeddedH2OConfig == null) { return; }
embeddedH2OConfig.notifyAboutCloudSize(ip, port, leaderIp, leaderPort, size);
}
|
java
|
public static double convertExcelNumber(final Date value, final boolean startDate1904) {
ArgUtils.notNull(value, "value");
/*
* Excelの時間の表現に直す。
* ・Excelの日付の形式の場合小数部が時間を示すため、24時間分のミリ秒を考慮する。
*/
long utcDay = value.getTime();
BigDecimal numValue = new BigDecimal(utcDay);
numValue = numValue.divide(new BigDecimal(SECONDS_IN_DAYS * 1000), 17, BigDecimal.ROUND_HALF_UP);
if(startDate1904) {
// 1904年始まりの場合
numValue = numValue.subtract(new BigDecimal(OFFSET_DAYS_1904));
} else {
// 1900年始まりの場合
numValue = numValue.subtract(new BigDecimal(OFFSET_DAYS_1900));
if(numValue.compareTo(new BigDecimal(NON_LEAP_DAY - 1)) >= 0) {
numValue = numValue.add(new BigDecimal(1));
}
}
return numValue.doubleValue();
}
|
python
|
def cmd(send, msg, args):
"""Summons a user
Syntax: {command} <nick>
"""
if args['type'] == 'privmsg':
send("Note-passing should be done in public.")
return
arguments = msg.split()
if len(arguments) > 1:
send("Sorry, I can only perform the summoning ritual for one person at a time")
return
elif len(arguments) == 0:
send("Who shall I summon?")
return
nick = arguments[0]
message = "You have been summoned!"
row = Notes(note=message, submitter="The Dark Gods", nick=nick, time=datetime.now())
args['db'].add(row)
send("%s has been summoned!" % nick)
|
java
|
@Override
protected void channelRead0(ChannelHandlerContext ctx, FullBinaryMemcacheResponse msg) throws Exception {
if (msg.getOpcode() == SASL_LIST_MECHS_OPCODE) {
handleListMechsResponse(ctx, msg);
} else if (msg.getOpcode() == SASL_AUTH_OPCODE) {
handleAuthResponse(ctx, msg);
} else if (msg.getOpcode() == SASL_STEP_OPCODE) {
checkIsAuthed(msg);
}
}
|
java
|
void finishConnection() {
NioSocketConnector conn = this.connector;
if (conn != null) {
this.connector = null;
conn.dispose();
for (ConnectionListener listener : this.connectionListeners) {
listener.connectionEnded(this);
}
}
ExecutorFilter execs = this.executors;
if (execs != null) {
this.executors = null;
execs.destroy();
}
}
|
java
|
protected void verifyAuthenticationContextSignature(final Pair<? extends SignableSAMLObject, MessageContext> authenticationContext,
final HttpServletRequest request, final RequestAbstractType authnRequest,
final SamlRegisteredServiceServiceProviderMetadataFacade adaptor) throws Exception {
val ctx = authenticationContext.getValue();
verifyAuthenticationContextSignature(ctx, request, authnRequest, adaptor);
}
|
java
|
private static int getAnnotationDepth(final Method m, final Class<? extends Annotation> annotationClass) {
// if we have invalid data the result is -1
if (m == null || annotationClass == null) {
return -1;
}
if (m.isAnnotationPresent(annotationClass)) {
return 1;
}
// if we've already reached the Object class, return -1;
Class<?> c = m.getDeclaringClass();
if (c.getSuperclass() == null) {
return -1;
}
// check directly implemented interfaces for the method being checked
for (Class<?> i : c.getInterfaces()) {
try {
Method im = i.getMethod(m.getName(), m.getParameterTypes());
int d = getAnnotationDepth(im, annotationClass);
if (d > 0) {
// since the annotation was on the interface, add 1
return d + 1;
}
} catch (final SecurityException ex) {
continue;
} catch (final NoSuchMethodException ex) {
continue;
}
}
try {
int d = getAnnotationDepth(
c.getSuperclass().getMethod(m.getName(), m.getParameterTypes()),
annotationClass);
if (d > 0) {
// since the annotation was on the superclass, add 1
return d + 1;
}
return -1;
} catch (final SecurityException ex) {
return -1;
} catch (final NoSuchMethodException ex) {
return -1;
}
}
|
python
|
def load(self, path: str) -> ClassFile:
"""Load the class at `path` and return it.
Load will attempt to load the file at `path` and `path` + .class
before failing.
:param path: Fully-qualified path to a ClassFile.
"""
# Try to refresh the class from the cache, loading it from disk
# if not found.
try:
r = self.class_cache.pop(path)
except KeyError:
with self.open(f'{path}.class') as source:
r = self.klass(source)
r.classloader = self
# Even if it was found re-set the key to update the OrderedDict
# ordering.
self.class_cache[path] = r
# If the cache is enabled remove every item over N started from
# the least-used.
if self.max_cache > 0:
to_pop = max(len(self.class_cache) - self.max_cache, 0)
for _ in repeat(None, to_pop):
self.class_cache.popitem(last=False)
return r
|
python
|
def fail_unknown_dialect(compiler: "SQLCompiler", task: str) -> None:
"""
Raise :exc:`NotImplementedError` in relation to a dialect for which a
function hasn't been implemented (with a helpful error message).
"""
raise NotImplementedError(
"Don't know how to {task} on dialect {dialect!r}. "
"(Check also: if you printed the SQL before it was bound to an "
"engine, you will be trying to use a dialect like StrSQLCompiler, "
"which could be a reason for failure.)".format(
task=task,
dialect=compiler.dialect
)
)
|
java
|
private static MQProperties buildMQProperties(Properties properties) {
MQProperties mqProperties = new MQProperties();
String servers = CanalController.getProperty(properties, CanalConstants.CANAL_MQ_SERVERS);
if (!StringUtils.isEmpty(servers)) {
mqProperties.setServers(servers);
}
String retires = CanalController.getProperty(properties, CanalConstants.CANAL_MQ_RETRIES);
if (!StringUtils.isEmpty(retires)) {
mqProperties.setRetries(Integer.valueOf(retires));
}
String batchSize = CanalController.getProperty(properties, CanalConstants.CANAL_MQ_BATCHSIZE);
if (!StringUtils.isEmpty(batchSize)) {
mqProperties.setBatchSize(Integer.valueOf(batchSize));
}
String lingerMs = CanalController.getProperty(properties, CanalConstants.CANAL_MQ_LINGERMS);
if (!StringUtils.isEmpty(lingerMs)) {
mqProperties.setLingerMs(Integer.valueOf(lingerMs));
}
String maxRequestSize = CanalController.getProperty(properties, CanalConstants.CANAL_MQ_MAXREQUESTSIZE);
if (!StringUtils.isEmpty(maxRequestSize)) {
mqProperties.setMaxRequestSize(Integer.valueOf(maxRequestSize));
}
String bufferMemory = CanalController.getProperty(properties, CanalConstants.CANAL_MQ_BUFFERMEMORY);
if (!StringUtils.isEmpty(bufferMemory)) {
mqProperties.setBufferMemory(Long.valueOf(bufferMemory));
}
String canalBatchSize = CanalController.getProperty(properties, CanalConstants.CANAL_MQ_CANALBATCHSIZE);
if (!StringUtils.isEmpty(canalBatchSize)) {
mqProperties.setCanalBatchSize(Integer.valueOf(canalBatchSize));
}
String canalGetTimeout = CanalController.getProperty(properties, CanalConstants.CANAL_MQ_CANALGETTIMEOUT);
if (!StringUtils.isEmpty(canalGetTimeout)) {
mqProperties.setCanalGetTimeout(Long.valueOf(canalGetTimeout));
}
String flatMessage = CanalController.getProperty(properties, CanalConstants.CANAL_MQ_FLATMESSAGE);
if (!StringUtils.isEmpty(flatMessage)) {
mqProperties.setFlatMessage(Boolean.valueOf(flatMessage));
}
String compressionType = CanalController.getProperty(properties, CanalConstants.CANAL_MQ_COMPRESSION_TYPE);
if (!StringUtils.isEmpty(compressionType)) {
mqProperties.setCompressionType(compressionType);
}
String acks = CanalController.getProperty(properties, CanalConstants.CANAL_MQ_ACKS);
if (!StringUtils.isEmpty(acks)) {
mqProperties.setAcks(acks);
}
String aliyunAccessKey = CanalController.getProperty(properties, CanalConstants.CANAL_ALIYUN_ACCESSKEY);
if (!StringUtils.isEmpty(aliyunAccessKey)) {
mqProperties.setAliyunAccessKey(aliyunAccessKey);
}
String aliyunSecretKey = CanalController.getProperty(properties, CanalConstants.CANAL_ALIYUN_SECRETKEY);
if (!StringUtils.isEmpty(aliyunSecretKey)) {
mqProperties.setAliyunSecretKey(aliyunSecretKey);
}
String transaction = CanalController.getProperty(properties, CanalConstants.CANAL_MQ_TRANSACTION);
if (!StringUtils.isEmpty(transaction)) {
mqProperties.setTransaction(Boolean.valueOf(transaction));
}
String producerGroup = CanalController.getProperty(properties, CanalConstants.CANAL_MQ_PRODUCERGROUP);
if (!StringUtils.isEmpty(producerGroup)) {
mqProperties.setProducerGroup(producerGroup);
}
for (Object key : properties.keySet()) {
key = StringUtils.trim(key.toString());
if (((String) key).startsWith(CanalConstants.CANAL_MQ_PROPERTIES)) {
String value = CanalController.getProperty(properties, (String) key);
String subKey = ((String) key).substring(CanalConstants.CANAL_MQ_PROPERTIES.length() + 1);
mqProperties.getProperties().put(subKey, value);
}
}
return mqProperties;
}
|
java
|
@Override
public void setCenter(double cx, double cy, double cz) {
this.center.set(cx, cy, cz);
}
|
java
|
public static String readlineFromStdIn() throws IOException {
StringBuilder ret = new StringBuilder();
int c;
while ((c = System.in.read()) != '\n' && c != -1) {
if (c != '\r')
ret.append((char) c);
}
return ret.toString();
}
|
python
|
def enable_debug(self):
"""If the cli arg for foreground is set, set the configuration option
for debug.
"""
if self.args.foreground:
self.config.application[config.DEBUG] = True
|
java
|
public void set(final int row, final int col, final Object o) {
if (o == null)
cellValues[row][col] = new IcedWrapper(null);
else if (o instanceof Double && Double.isNaN((double)o))
cellValues[row][col] = new IcedWrapper(Double.NaN);
else if (o instanceof int[])
cellValues[row][col] = new IcedWrapper(Arrays.toString((int[])o));
else if (o instanceof long[])
cellValues[row][col] = new IcedWrapper(Arrays.toString((long[])o));
else if (o instanceof float[])
cellValues[row][col] = new IcedWrapper(Arrays.toString((float[])o));
else if (o instanceof double[])
cellValues[row][col] = new IcedWrapper(Arrays.toString((double[])o));
else if (colTypes[col]=="string")
cellValues[row][col] = new IcedWrapper(o.toString());
else
cellValues[row][col] = new IcedWrapper(o);
}
/**
* Print table to String, using 2 spaces for padding between columns
* @return String containing the ASCII version of the table
*/
public String toString() {
return toString(2, true);
}
/**
* Print table to String, using user-given padding
* @param pad number of spaces for padding between columns
* @return String containing the ASCII version of the table
*/
public String toString(final int pad) {
return toString(pad, true);
}
private static int PRINTOUT_ROW_LIMIT = 20;
private boolean skip(int row) {
assert(PRINTOUT_ROW_LIMIT % 2 == 0);
if (getRowDim() <= PRINTOUT_ROW_LIMIT) return false;
if (row <= PRINTOUT_ROW_LIMIT/2) return false;
if (row >= getRowDim()-PRINTOUT_ROW_LIMIT/2) return false;
return true;
}
/**
* Print table to String, using user-given padding
* @param pad number of spaces for padding between columns
* @param full whether to print the full table (otherwise top 5 and bottom 5 rows only)
* @return String containing the ASCII version of the table
*/
public String toString(final int pad, boolean full) {
if (pad < 0)
throw new IllegalArgumentException("pad must be a non-negative integer");
final int rowDim = getRowDim();
final int colDim = getColDim();
final int actualRowDim = full ? rowDim : Math.min(PRINTOUT_ROW_LIMIT+1, rowDim);
final String[][] cellStrings = new String[actualRowDim + 1][colDim + 1];
for (String[] row: cellStrings)
Arrays.fill(row, "");
cellStrings[0][0] = colHeaderForRowHeaders != null ? colHeaderForRowHeaders : "";
int row = 0;
for (int r = 0; r < rowDim; ++r) {
if (!full && skip(r)) continue;
cellStrings[row+1][0] = rowHeaders[r];
row++;
}
for (int c = 0; c < colDim; ++c)
cellStrings[0][c+1] = colHeaders[c];
for (int c = 0; c < colDim; ++c) {
final String formatString = colFormats[c];
row = 0;
for (int r = 0; r < rowDim; ++r) {
if (!full && skip(r)) continue;
Object o = get(r,c);
if ((o == null) || o instanceof Double && isEmpty((double)o)){
cellStrings[row + 1][c + 1] = "";
row++;
continue;
} else if (o instanceof Double && Double.isNaN((double)o)) {
cellStrings[row + 1][c + 1] = "NaN";
row++;
continue;
}
try {
if (o instanceof Double) cellStrings[row + 1][c + 1] = String.format(formatString, (Double) o);
else if (o instanceof Float) cellStrings[row + 1][c + 1] = String.format(formatString, (Float) o);
else if (o instanceof Integer) cellStrings[row + 1][c + 1] = String.format(formatString, (Integer) o);
else if (o instanceof Long) cellStrings[row + 1][c + 1] = String.format(formatString, (Long) o);
else if (o instanceof String) cellStrings[row + 1][c + 1] = (String)o;
else cellStrings[row + 1][c + 1] = String.format(formatString, cellValues[r][c]);
} catch(Throwable t) {
cellStrings[row + 1][c + 1] = o.toString();
}
row++;
}
}
final int[] colLen = new int[colDim + 1];
for (int c = 0; c <= colDim; ++c) {
for (int r = 0; r <= actualRowDim; ++r) {
colLen[c] = Math.max(colLen[c], cellStrings[r][c].length());
}
}
final StringBuilder sb = new StringBuilder();
if (tableHeader.length() > 0) {
sb.append(tableHeader);
}
if (tableDescription.length() > 0) {
sb.append(" (").append(tableDescription).append(")");
}
sb.append(":\n");
for (int r = 0; r <= actualRowDim; ++r) {
int len = colLen[0];
if (actualRowDim != rowDim && r - 1 == PRINTOUT_ROW_LIMIT/2) {
assert(!full);
sb.append("---");
} else {
if (len > 0)
sb.append(String.format("%" + colLen[0] + "s", cellStrings[r][0]));
for (int c = 1; c <= colDim; ++c) {
len = colLen[c];
if (len > 0)
sb.append(String.format("%" + (len + pad) + "s", cellStrings[r][c].equals("null") ? "" : cellStrings[r][c]));
}
}
sb.append("\n");
}
return sb.toString();
}
|
java
|
public void addPropertyChangeListener(String propertyName, PropertyChangeListener listener) {
if (children == null) {
children = new HashMap();
}
PropertyChangeSupport child = (PropertyChangeSupport) children.get(propertyName);
if (child == null) {
child = new PropertyChangeSupport(source);
children.put(propertyName, child);
}
child.addPropertyChangeListener(listener);
}
|
java
|
public void setDataType(com.google.api.ads.admanager.axis.v201902.CustomFieldDataType dataType) {
this.dataType = dataType;
}
|
java
|
public static String quote(final String string) {
int last = 0;
final StringBuilder sb = new StringBuilder();
for (int i = 0; i < string.length(); i++) {
if (needsQuoting(string.charAt(i))) {
sb.append(string, last, i);
last = i = quote(sb, string, i);
}
}
sb.append(string, last, string.length());
return sb.toString();
}
|
python
|
def get_all_sizes_checksums():
"""Returns dict associating URL to (size, sha256)."""
sizes_checksums = {}
for path in _checksum_paths().values():
data = _get_sizes_checksums(path)
for url, size_checksum in data.items():
if (url in sizes_checksums and
sizes_checksums[url] != size_checksum):
raise AssertionError(
'URL %s is registered with 2+ distinct size/checksum tuples.' % url)
sizes_checksums.update(data)
return sizes_checksums
|
python
|
def create_shot(self, sequence):
"""Create and return a new shot
:param sequence: the sequence for the shot
:type sequence: :class:`jukeboxcore.djadapter.models.Sequence`
:returns: The created shot or None
:rtype: None | :class:`jukeboxcore.djadapter.models.Shot`
:raises: None
"""
dialog = ShotCreatorDialog(sequence=sequence, parent=self)
dialog.exec_()
shot = dialog.shot
return shot
|
python
|
def sum(arrays, masks=None, dtype=None, out=None,
zeros=None, scales=None):
"""Combine arrays by addition, with masks and offsets.
Arrays and masks are a list of array objects. All input arrays
have the same shape. If present, the masks have the same shape
also.
The function returns an array with one more dimension than the
inputs and with size (3, shape). out[0] contains the sum,
out[1] the variance and out[2] the number of points used.
:param arrays: a list of arrays
:param masks: a list of mask arrays, True values are masked
:param dtype: data type of the output
:param out: optional output, with one more axis than the input arrays
:return: sum, variance of the sum and number of points stored
Example:
>>> import numpy
>>> image = numpy.array([[1., 3.], [1., -1.4]])
>>> inputs = [image, image + 1]
>>> sum(inputs)
array([[[ 1.5, 3.5],
[ 1.5, -0.9]],
<BLANKLINE>
[[ 0.5, 0.5],
[ 0.5, 0.5]],
<BLANKLINE>
[[ 2. , 2. ],
[ 2. , 2. ]]])
"""
return generic_combine(intl_combine.sum_method(), arrays, masks=masks,
dtype=dtype, out=out,
zeros=zeros, scales=scales)
|
java
|
public static String render(String path, String templateFileName, Map<String, Object> bindingMap) {
if (FileUtil.isAbsolutePath(path)) {
return render(getFileTemplate(path, templateFileName), bindingMap);
} else {
return render(getClassPathTemplate(path, templateFileName), bindingMap);
}
}
|
python
|
def pypi_link(pkg_filename):
"""
Given the filename, including md5 fragment, construct the
dependency link for PyPI.
"""
root = 'https://files.pythonhosted.org/packages/source'
name, sep, rest = pkg_filename.partition('-')
parts = root, name[0], name, pkg_filename
return '/'.join(parts)
|
python
|
def queuify_logger(logger, queue_handler, queue_listener):
"""Replace logger's handlers with a queue handler while adding existing
handlers to a queue listener.
This is useful when you want to use a default logging config but then
optionally add a logger's handlers to a queue during runtime.
Args:
logger (mixed): Logger instance or string name of logger to queue-ify
handlers.
queue_handler (QueueHandler): Instance of a ``QueueHandler``.
queue_listener (QueueListener): Instance of a ``QueueListener``.
"""
if isinstance(logger, str):
logger = logging.getLogger(logger)
# Get handlers that aren't being listened for.
handlers = [handler for handler in logger.handlers
if handler not in queue_listener.handlers]
if handlers:
# The default QueueListener stores handlers as a tuple.
queue_listener.handlers = \
tuple(list(queue_listener.handlers) + handlers)
# Remove logger's handlers and replace with single queue handler.
del logger.handlers[:]
logger.addHandler(queue_handler)
|
java
|
@Override
public boolean moveToTrash(Path path) throws IOException {
Path fullyResolvedPath = path.isAbsolute() ? path : new Path(this.fs.getWorkingDirectory(), path);
Path targetPathInTrash = PathUtils.mergePaths(this.trashLocation, fullyResolvedPath);
if (!this.fs.exists(targetPathInTrash.getParent())) {
this.fs.mkdirs(targetPathInTrash.getParent());
} else if (this.fs.exists(targetPathInTrash)) {
targetPathInTrash = targetPathInTrash.suffix("_" + System.currentTimeMillis());
}
return this.fs.rename(fullyResolvedPath, targetPathInTrash);
}
|
python
|
def sort_segment_points(Aps, Bps):
"""Takes two line segments and sorts all their points,
so that they form a continuous path
Args:
Aps: Array of tracktotrip.Point
Bps: Array of tracktotrip.Point
Returns:
Array with points ordered
"""
mid = []
j = 0
mid.append(Aps[0])
for i in range(len(Aps)-1):
dist = distance_tt_point(Aps[i], Aps[i+1])
for m in range(j, len(Bps)):
distm = distance_tt_point(Aps[i], Bps[m])
if dist > distm:
direction = dot(normalize(line(Aps[i].gen2arr(), Aps[i+1].gen2arr())), normalize(Bps[m].gen2arr()))
if direction > 0:
j = m + 1
mid.append(Bps[m])
break
mid.append(Aps[i+1])
for m in range(j, len(Bps)):
mid.append(Bps[m])
return mid
|
java
|
public ZonedDateTime minusNanos(long nanos) {
return (nanos == Long.MIN_VALUE ? plusNanos(Long.MAX_VALUE).plusNanos(1) : plusNanos(-nanos));
}
|
python
|
def kinetic(alpha1,lmn1,A,alpha2,lmn2,B):
"""
The full form of the kinetic energy integral
>>> isclose(kinetic(1,(0,0,0),array((0,0,0),'d'),1,(0,0,0),array((0,0,0),'d')),2.953052)
True
"""
l1,m1,n1 = lmn1
l2,m2,n2 = lmn2
term0 = alpha2*(2*(l2+m2+n2)+3)*\
overlap(alpha1,(l1,m1,n1),A,\
alpha2,(l2,m2,n2),B)
term1 = -2*pow(alpha2,2)*\
(overlap(alpha1,(l1,m1,n1),A,
alpha2,(l2+2,m2,n2),B)
+ overlap(alpha1,(l1,m1,n1),A,
alpha2,(l2,m2+2,n2),B)
+ overlap(alpha1,(l1,m1,n1),A,
alpha2,(l2,m2,n2+2),B))
term2 = -0.5*(l2*(l2-1)*overlap(alpha1,(l1,m1,n1),A,
alpha2,(l2-2,m2,n2),B) +
m2*(m2-1)*overlap(alpha1,(l1,m1,n1),A,
alpha2,(l2,m2-2,n2),B) +
n2*(n2-1)*overlap(alpha1,(l1,m1,n1),A,
alpha2,(l2,m2,n2-2),B))
return term0+term1+term2
|
java
|
public void addItemDetails(CmsListItemDetails itemDetail) {
itemDetail.setListId(getListId());
m_itemDetails.addIdentifiableObject(itemDetail.getId(), itemDetail);
}
|
python
|
def post_register_hook(self, verbosity=1):
"""Pull Docker images needed by processes after registering."""
if not getattr(settings, 'FLOW_DOCKER_DONT_PULL', False):
call_command('list_docker_images', pull=True, verbosity=verbosity)
|
python
|
def showMenu(self, point):
"""
Displays the menu for this filter widget.
"""
menu = QMenu(self)
acts = {}
acts['edit'] = menu.addAction('Edit quick filter...')
trigger = menu.exec_(self.mapToGlobal(point))
if trigger == acts['edit']:
text, accepted = XTextEdit.getText(self.window(),
'Edit Format',
'Format:',
self.filterFormat(),
wrapped=False)
if accepted:
self.setFilterFormat(text)
|
java
|
public void marshall(CreateCampaignRequest createCampaignRequest, ProtocolMarshaller protocolMarshaller) {
if (createCampaignRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(createCampaignRequest.getApplicationId(), APPLICATIONID_BINDING);
protocolMarshaller.marshall(createCampaignRequest.getWriteCampaignRequest(), WRITECAMPAIGNREQUEST_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
@Unstable
protected MetaData getNonGeneratedContentMetaData(String parameterName)
{
MetaData metaData = new MetaData();
Type contentType;
if (this.macroDescriptor != null
&& this.macroDescriptor.getParameterDescriptorMap() != null
&& this.macroDescriptor.getParameterDescriptorMap().containsKey(parameterName)) {
contentType = this.macroDescriptor.getParameterDescriptorMap().get(parameterName).getDisplayType();
} else {
contentType = DefaultParameterDescriptor.DEFAULT_PARAMETER_TYPE;
}
String converted = ReflectionUtils.serializeType(contentType);
metaData.addMetaData(MetaData.NON_GENERATED_CONTENT, converted);
metaData.addMetaData(MetaData.PARAMETER_NAME, parameterName);
return metaData;
}
|
java
|
protected void validate(String operationType) throws Exception
{
super.validate(operationType);
MPSString image_name_validator = new MPSString();
image_name_validator.setConstraintIsReq(MPSConstants.GENERIC_CONSTRAINT, true);
image_name_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 128);
image_name_validator.setConstraintMinStrLen(MPSConstants.GENERIC_CONSTRAINT, 1);
image_name_validator.validate(operationType, image_name, "\"image_name\"");
MPSString doc_file_validator = new MPSString();
doc_file_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 128);
doc_file_validator.setConstraintMinStrLen(MPSConstants.GENERIC_CONSTRAINT, 1);
doc_file_validator.validate(operationType, doc_file, "\"doc_file\"");
MPSIPAddress ns_ip_address_arr_validator = new MPSIPAddress();
ns_ip_address_arr_validator.setConstraintIsReq(MPSConstants.GENERIC_CONSTRAINT, true);
if(ns_ip_address_arr != null)
{
for(int i=0; i<ns_ip_address_arr.length; i++)
{
ns_ip_address_arr_validator.validate(operationType, ns_ip_address_arr[i], "ns_ip_address_arr["+i+"]");
}
}
MPSString act_id_validator = new MPSString();
act_id_validator.validate(operationType, act_id, "\"act_id\"");
}
|
java
|
public void marshall(DetachSecurityProfileRequest detachSecurityProfileRequest, ProtocolMarshaller protocolMarshaller) {
if (detachSecurityProfileRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(detachSecurityProfileRequest.getSecurityProfileName(), SECURITYPROFILENAME_BINDING);
protocolMarshaller.marshall(detachSecurityProfileRequest.getSecurityProfileTargetArn(), SECURITYPROFILETARGETARN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def _command(self, event, command, *args, **kwargs):
"""
Context state controller.
Check whether the transition is possible or not, it executes it and
triggers the Hooks with the pre_* and post_* events.
@param event: (str) event generated by the command.
@param command: (virDomain.method) state transition to impose.
@raise: RuntimeError.
"""
self._assert_transition(event)
self.trigger('pre_%s' % event, **kwargs)
self._execute_command(command, *args)
self.trigger('post_%s' % event, **kwargs)
|
java
|
public List<LabelledTriple> getTriples() {
return triples.stream().map(this::labelTriple)
.sorted(sortSubjects.thenComparing(sortPredicates).thenComparing(sortObjects)).collect(toList());
}
|
python
|
def watch_one_event(event_filter, callback, timeout_callback, timeout, args,
start_time=None):
"""
Start to watch one event.
:param event_filter:
:param callback:
:param timeout_callback:
:param timeout:
:param args:
:param start_time:
:return:
"""
if timeout and not start_time:
start_time = int(datetime.now().timestamp())
if not args:
args = []
while True:
try:
events = event_filter.get_all_entries()
if events:
callback(events[0], *args)
return
except (ValueError, Exception) as err:
# ignore error, but log it
logger.debug(f'Got error grabbing keeper events: {str(err)}')
time.sleep(0.1)
if timeout:
elapsed = int(datetime.now().timestamp()) - start_time
if elapsed > timeout:
if timeout_callback:
timeout_callback(*args)
else:
callback(None, *args)
break
|
python
|
def GetMACBRepresentationFromDescriptions(self, timestamp_descriptions):
"""Determines the MACB representation from the timestamp descriptions.
MACB representation is a shorthand for representing one or more of
modification, access, change, birth timestamp descriptions as the letters
"MACB" or a "." if the corresponding timestamp is not set.
Note that this is an output format shorthand and does not guarantee that
the timestamps represent the same occurrence.
Args:
timestamp_descriptions (list[str]): timestamp descriptions, which are
defined in definitions.TIME_DESCRIPTIONS.
Returns:
str: MACB representation.
"""
macb_representation = []
if ('mtime' in timestamp_descriptions or
definitions.TIME_DESCRIPTION_MODIFICATION in timestamp_descriptions):
macb_representation.append('M')
else:
macb_representation.append('.')
if ('atime' in timestamp_descriptions or
definitions.TIME_DESCRIPTION_LAST_ACCESS in timestamp_descriptions):
macb_representation.append('A')
else:
macb_representation.append('.')
if ('ctime' in timestamp_descriptions or
definitions.TIME_DESCRIPTION_CHANGE in timestamp_descriptions):
macb_representation.append('C')
else:
macb_representation.append('.')
if ('crtime' in timestamp_descriptions or
definitions.TIME_DESCRIPTION_CREATION in timestamp_descriptions):
macb_representation.append('B')
else:
macb_representation.append('.')
return ''.join(macb_representation)
|
java
|
@Override
public int compareTo(final RDFItem o) {
if (id != null && o != null && o.id != null) {
return id.compareTo(o.id);
}
return -1;
}
|
python
|
def apply_transformation(self, structure, return_ranked_list=False):
"""
Args:
structure (Structure): Input structure to dope
Returns:
[{"structure": Structure, "energy": float}]
"""
comp = structure.composition
logger.info("Composition: %s" % comp)
for sp in comp:
try:
sp.oxi_state
except AttributeError:
analyzer = BVAnalyzer()
structure = analyzer.get_oxi_state_decorated_structure(
structure)
comp = structure.composition
break
ox = self.dopant.oxi_state
radius = self.dopant.ionic_radius
compatible_species = [
sp for sp in comp if sp.oxi_state == ox and
abs(sp.ionic_radius / radius - 1) < self.ionic_radius_tol]
if (not compatible_species) and self.alio_tol:
# We only consider aliovalent doping if there are no compatible
# isovalent species.
compatible_species = [
sp for sp in comp
if abs(sp.oxi_state - ox) <= self.alio_tol and
abs(sp.ionic_radius / radius - 1) < self.ionic_radius_tol and
sp.oxi_state * ox >= 0]
if self.allowed_doping_species is not None:
# Only keep allowed doping species.
compatible_species = [
sp for sp in compatible_species
if sp in [get_el_sp(s) for s in self.allowed_doping_species]]
logger.info("Compatible species: %s" % compatible_species)
lengths = structure.lattice.abc
scaling = [max(1, int(round(math.ceil(self.min_length / x))))
for x in lengths]
logger.info("Lengths are %s" % str(lengths))
logger.info("Scaling = %s" % str(scaling))
all_structures = []
t = EnumerateStructureTransformation(**self.kwargs)
for sp in compatible_species:
supercell = structure * scaling
nsp = supercell.composition[sp]
if sp.oxi_state == ox:
supercell.replace_species({sp: {sp: (nsp - 1) / nsp,
self.dopant: 1 / nsp}})
logger.info("Doping %s for %s at level %.3f" % (
sp, self.dopant, 1 / nsp))
elif self.codopant:
codopant = _find_codopant(sp, 2 * sp.oxi_state - ox)
supercell.replace_species({sp: {sp: (nsp - 2) / nsp,
self.dopant: 1 / nsp,
codopant: 1 / nsp}})
logger.info("Doping %s for %s + %s at level %.3f" % (
sp, self.dopant, codopant, 1 / nsp))
elif abs(sp.oxi_state) < abs(ox):
# Strategy: replace the target species with a
# combination of dopant and vacancy.
# We will choose the lowest oxidation state species as a
# vacancy compensation species as it is likely to be lower in
# energy
sp_to_remove = min([s for s in comp if s.oxi_state * ox > 0],
key=lambda ss: abs(ss.oxi_state))
if sp_to_remove == sp:
common_charge = lcm(int(abs(sp.oxi_state)), int(abs(ox)))
ndopant = common_charge / abs(ox)
nsp_to_remove = common_charge / abs(sp.oxi_state)
logger.info("Doping %d %s with %d %s." %
(nsp_to_remove, sp, ndopant, self.dopant))
supercell.replace_species(
{sp: {sp: (nsp - nsp_to_remove) / nsp,
self.dopant: ndopant / nsp}})
else:
ox_diff = int(abs(round(sp.oxi_state - ox)))
vac_ox = int(abs(sp_to_remove.oxi_state))
common_charge = lcm(vac_ox, ox_diff)
ndopant = common_charge / ox_diff
nx_to_remove = common_charge / vac_ox
nx = supercell.composition[sp_to_remove]
logger.info("Doping %d %s with %s and removing %d %s." %
(ndopant, sp, self.dopant,
nx_to_remove, sp_to_remove))
supercell.replace_species(
{sp: {sp: (nsp - ndopant) / nsp,
self.dopant: ndopant / nsp},
sp_to_remove: {
sp_to_remove: (nx - nx_to_remove) / nx}})
elif abs(sp.oxi_state) > abs(ox):
# Strategy: replace the target species with dopant and also
# remove some opposite charged species for charge neutrality
if ox > 0:
sp_to_remove = max(supercell.composition.keys(),
key=lambda el: el.X)
else:
sp_to_remove = min(supercell.composition.keys(),
key=lambda el: el.X)
# Confirm species are of opposite oxidation states.
assert sp_to_remove.oxi_state * sp.oxi_state < 0
ox_diff = int(abs(round(sp.oxi_state - ox)))
anion_ox = int(abs(sp_to_remove.oxi_state))
nx = supercell.composition[sp_to_remove]
common_charge = lcm(anion_ox, ox_diff)
ndopant = common_charge / ox_diff
nx_to_remove = common_charge / anion_ox
logger.info("Doping %d %s with %s and removing %d %s." %
(ndopant, sp, self.dopant,
nx_to_remove, sp_to_remove))
supercell.replace_species(
{sp: {sp: (nsp - ndopant) / nsp,
self.dopant: ndopant / nsp},
sp_to_remove: {sp_to_remove: (nx - nx_to_remove) / nx}})
ss = t.apply_transformation(
supercell, return_ranked_list=self.max_structures_per_enum)
logger.info("%s distinct structures" % len(ss))
all_structures.extend(ss)
logger.info("Total %s doped structures" % len(all_structures))
if return_ranked_list:
return all_structures[:return_ranked_list]
return all_structures[0]["structure"]
|
python
|
def after(self, time, function, args = []):
"""Call `function` after `time` milliseconds."""
callback_id = self.tk.after(time, self._call_wrapper, time, function, *args)
self._callback[function] = [callback_id, False]
|
java
|
@Override
public MailSendResult send(MailMessage message) {
Assert.notNull(message, "Missing mail message!");
Properties props = new Properties();
props.put("mail.transport.protocol", "smtp");
props.put("mail.smtp.host", smtpHost);
props.put("mail.smtp.port", smtpPort);
Session session = getSession(props, smtpUsername, smtpPassword);
try {
// build mime message
Message msg = message.getMessage(session);
Enumeration enumer = msg.getAllHeaders();
while (enumer.hasMoreElements()) {
Header header = (Header) enumer.nextElement();
log.info(header.getName() + ": " + header.getValue());
}
log.info("Getting transport...");
Transport transport = session.getTransport("smtp");
log.info("Connecting to SMTP server: " + smtpHost + ":" + smtpPort);
transport.connect();
transport.sendMessage(msg, msg.getAllRecipients());
log.info("Closing transport...");
transport.close();
}
catch (Exception e) {
log.error(e.getMessage(), e);
return MailSendResult.fail();
}
return MailSendResult.ok();
}
|
java
|
@Override
public Object find(Class entityClass, Object key)
{
HttpResponse response = null;
EntityMetadata entityMetadata = KunderaMetadataManager.getEntityMetadata(kunderaMetadata, entityClass);
MetamodelImpl metaModel = (MetamodelImpl) kunderaMetadata.getApplicationMetadata().getMetamodel(
entityMetadata.getPersistenceUnit());
try
{
if (key instanceof JsonElement)
{
key = ((JsonElement) key).getAsString();
}
String _id = null;
if (metaModel.isEmbeddable(entityMetadata.getIdAttribute().getBindableJavaType()))
{
Field field = (Field) entityMetadata.getIdAttribute().getJavaMember();
EmbeddableType embeddableType = metaModel.embeddable(entityMetadata.getIdAttribute()
.getBindableJavaType());
_id = CouchDBObjectMapper.get_Id(field, key, embeddableType, entityMetadata.getTableName());
}
else
{
_id = entityMetadata.getTableName() + PropertyAccessorHelper.getString(key);
}
URI uri = new URI(CouchDBConstants.PROTOCOL, null, httpHost.getHostName(), httpHost.getPort(),
CouchDBConstants.URL_SEPARATOR + entityMetadata.getSchema().toLowerCase()
+ CouchDBConstants.URL_SEPARATOR + _id, null, null);
HttpGet get = new HttpGet(uri);
get.addHeader("Accept", "application/json");
response = httpClient.execute(httpHost, get, CouchDBUtils.getContext(httpHost));
InputStream content = response.getEntity().getContent();
Reader reader = new InputStreamReader(content);
JsonObject jsonObject = gson.fromJson(reader, JsonObject.class);
// Check for deleted object. if object is deleted then return null.
if (jsonObject.get(((AbstractAttribute) entityMetadata.getIdAttribute()).getJPAColumnName()) == null)
{
return null;
}
return CouchDBObjectMapper.getEntityFromJson(entityClass, entityMetadata, jsonObject,
entityMetadata.getRelationNames(), kunderaMetadata);
}
catch (Exception e)
{
log.error("Error while finding object by key {}, Caused by {}.", key, e);
throw new KunderaException(e);
}
finally
{
closeContent(response);
}
}
|
java
|
private void addAllNamesIntoTrie() {
for (Map.Entry<String, ZNames> entry : _tzNamesMap.entrySet()) {
entry.getValue().addAsTimeZoneIntoTrie(entry.getKey(), _namesTrie);
}
for (Map.Entry<String, ZNames> entry : _mzNamesMap.entrySet()) {
entry.getValue().addAsMetaZoneIntoTrie(entry.getKey(), _namesTrie);
}
}
|
python
|
def fetch_api_key_from_uname(username, password, server_url):
"""
Retrieves an API key from the One Codex webpage given the username and password
"""
# TODO: Hit programmatic endpoint to fetch JWT key, not API key
with requests.Session() as session:
# get the login page normally
text = session.get(server_url + "login").text
# retrieve the CSRF token out of it
csrf = re.search('type="hidden" value="([^"]+)"', text).group(1)
# and resubmit using the username/password *and* the CSRF
login_data = {
"email": username,
"password": password,
"csrf_token": csrf,
"next": "/api/get_token",
}
page = session.post(server_url + "login", data=login_data)
try:
key = page.json()["key"]
except (ValueError, KeyError): # ValueError includes simplejson.decoder.JSONDecodeError
key = None
return key
|
java
|
@SuppressWarnings("unchecked")
private SimpleOrderedMap<Object> createSpan(ComponentSpan span,
Boolean encode) throws IOException {
// System.out.println("Create stats span " + span.dataType + " "
// + span.statsType + " " + span.statsItems + " --- " + encode);
SimpleOrderedMap<Object> mtasSpanResponse = new SimpleOrderedMap<>();
mtasSpanResponse.add("key", span.key);
HashMap<MtasDataCollector<?, ?>, HashMap<String, MtasSolrMtasResult>> functionData = new HashMap<>();
HashMap<String, MtasSolrMtasResult> functionDataItem = new HashMap<>();
functionData.put(span.dataCollector, functionDataItem);
if (span.functions != null) {
for (SubComponentFunction function : span.functions) {
function.dataCollector.close();
functionDataItem.put(function.key, new MtasSolrMtasResult(
function.dataCollector, new String[] { function.dataType },
new String[] { function.statsType },
new SortedSet[] { function.statsItems }, new List[] { null },
new String[] { null }, new String[] { null }, new Integer[] { 0 },
new Integer[] { Integer.MAX_VALUE }, null));
}
}
MtasSolrMtasResult data = new MtasSolrMtasResult(span.dataCollector,
span.dataType, span.statsType, span.statsItems, null, functionData);
if (encode) {
mtasSpanResponse.add("_encoded_data", MtasSolrResultUtil.encode(data));
} else {
mtasSpanResponse.add(span.dataCollector.getCollectorType(), data);
MtasSolrResultUtil.rewrite(mtasSpanResponse, searchComponent);
}
return mtasSpanResponse;
}
|
python
|
def list_files(tag=None, sat_id=None, data_path=None, format_str=None):
"""Produce a list of files corresponding to format_str located at data_path.
This routine is invoked by pysat and is not intended for direct use by the end user.
Multiple data levels may be supported via the 'tag' and 'sat_id' input strings.
Parameters
----------
tag : string ('')
tag name used to identify particular data set to be loaded.
This input is nominally provided by pysat itself.
sat_id : string ('')
Satellite ID used to identify particular data set to be loaded.
This input is nominally provided by pysat itself.
data_path : string
Full path to directory containing files to be loaded. This
is provided by pysat. The user may specify their own data path
at Instrument instantiation and it will appear here.
format_str : string (None)
String template used to parse the datasets filenames. If a user
supplies a template string at Instrument instantiation
then it will appear here, otherwise defaults to None.
Returns
-------
pandas.Series
Series of filename strings, including the path, indexed by datetime.
Examples
--------
::
If a filename is SPORT_L2_IVM_2019-01-01_v01r0000.NC then the template
is 'SPORT_L2_IVM_{year:04d}-{month:02d}-{day:02d}_v{version:02d}r{revision:04d}.NC'
Note
----
The returned Series should not have any duplicate datetimes. If there are
multiple versions of a file the most recent version should be kept and the rest
discarded. This routine uses the pysat.Files.from_os constructor, thus
the returned files are up to pysat specifications.
Normally the format_str for each supported tag and sat_id is defined within this routine.
However, as this is a generic routine, those definitions can't be made here. This method
could be used in an instrument specific module where the list_files routine in the
new package defines the format_str based upon inputs, then calls this routine passing
both data_path and format_str.
Alternately, the list_files routine in nasa_cdaweb_methods may also be used and has
more built in functionality. Supported tages and format strings may be defined
within the new instrument module and passed as arguments to nasa_cdaweb_methods.list_files .
For an example on using this routine, see pysat/instrument/cnofs_ivm.py or cnofs_vefi, cnofs_plp,
omni_hro, timed_see, etc.
"""
return pysat.Files.from_os(data_path=data_path, format_str=format_str)
|
java
|
public EmbeddedMongoDB withVersion(Version.Main version) {
Objects.requireNonNull(version, "version can not be null");
this.version = version;
return this;
}
|
java
|
public static DateTime offsetDay(Date date, int offset) {
return offset(date, DateField.DAY_OF_YEAR, offset);
}
|
java
|
public DirectoryEntry mkdir( DirectoryEntry parent, String name )
{
if ( readOnly )
{
throw new UnsupportedOperationException();
}
File file = new File( toFile( parent ), name );
file.mkdirs();
return new DefaultDirectoryEntry( this, parent, name );
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.