language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public IfcElectricTimeControlTypeEnum createIfcElectricTimeControlTypeEnumFromString(EDataType eDataType,
String initialValue) {
IfcElectricTimeControlTypeEnum result = IfcElectricTimeControlTypeEnum.get(initialValue);
if (result == null)
throw new IllegalArgumentException(
"The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'");
return result;
} |
java | public EClass getIfcTextFontName() {
if (ifcTextFontNameEClass == null) {
ifcTextFontNameEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(747);
}
return ifcTextFontNameEClass;
} |
python | def read_bom(data):
"""Read the byte order mark in the text, if present, and
return the encoding represented by the BOM and the BOM.
If no BOM can be detected, (None, None) is returned.
"""
# common case is no BOM, so this is fast
if data and data[0] in _FIRST_CHARS:
for bom, encoding in _BOM_TABLE:
if data.startswith(bom):
return encoding, bom
return None, None |
python | def convert_odt_to_text(filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
"""
Converts an OpenOffice ODT file to text.
Pass either a filename or a binary object.
"""
# We can't use exactly the same method as for DOCX files, using docx:
# sometimes that works, but sometimes it falls over with:
# KeyError: "There is no item named 'word/document.xml' in the archive"
with get_filelikeobject(filename, blob) as fp:
z = zipfile.ZipFile(fp)
tree = ElementTree.fromstring(z.read('content.xml'))
# ... may raise zipfile.BadZipfile
textlist = [] # type: List[str]
for element in tree.iter():
if element.text:
textlist.append(element.text.strip())
return '\n\n'.join(textlist) |
java | public boolean process(ContentEvent event) {
// distinguish between ClusteringContentEvent and ClusteringEvaluationContentEvent
if (event instanceof ClusteringContentEvent) {
ClusteringContentEvent cce = (ClusteringContentEvent) event;
outputStream.put(event);
if (cce.isSample()) {
evaluationStream.put(new ClusteringEvaluationContentEvent(null, new DataPoint(cce.getInstance(), numInstances++), cce.isLastEvent()));
}
} else if (event instanceof ClusteringEvaluationContentEvent) {
evaluationStream.put(event);
}
return true;
} |
java | public static Boolean evaluateAsBoolean(Node node, String xPathExpression, NamespaceContext nsContext) {
return (Boolean) evaluateExpression(node, xPathExpression, nsContext, XPathConstants.BOOLEAN);
} |
python | def extract_bytes(mv):
"""Retrieve bytes out of memoryview/buffer or bytes."""
if isinstance(mv, memoryview):
return mv.tobytes() if six.PY3 else bytes(mv)
if isinstance(mv, bytes):
return mv
raise ValueError |
java | public PaymillList<Payment> list( Payment.Filter filter, Payment.Order order, Integer count, Integer offset ) {
return RestfulUtils.list( PaymentService.PATH, filter, order, count, offset, Payment.class, super.httpClient );
} |
java | public static String getErrorResult(Process process, Charset charset) {
InputStream in = null;
try {
in = process.getErrorStream();
return IoUtil.read(in, charset);
} finally {
IoUtil.close(in);
destroy(process);
}
} |
java | @Override
public void visitInnerClasses(InnerClasses obj) {
super.visitInnerClasses(obj);
InnerClass[] inner_classes = obj.getInnerClasses();
for (InnerClass inner_class : inner_classes) {
inner_class.accept(this);
}
} |
java | public void setupKeys()
{
KeyAreaInfo keyArea = null;
keyArea = new KeyAreaInfo(this, Constants.UNIQUE, ID_KEY);
keyArea.addKeyField(ID, Constants.ASCENDING);
keyArea = new KeyAreaInfo(this, Constants.NOT_UNIQUE, START_DATE_TIME_KEY);
keyArea.addKeyField(START_DATE_TIME, Constants.ASCENDING);
keyArea = new KeyAreaInfo(this, Constants.NOT_UNIQUE, ANNIV_MASTER_ID_KEY);
keyArea.addKeyField(ANNIV_MASTER_ID, Constants.ASCENDING);
keyArea.addKeyField(START_DATE_TIME, Constants.ASCENDING);
keyArea = new KeyAreaInfo(this, Constants.NOT_UNIQUE, CALENDAR_CATEGORY_ID_KEY);
keyArea.addKeyField(CALENDAR_CATEGORY_ID, Constants.ASCENDING);
keyArea.addKeyField(START_DATE_TIME, Constants.ASCENDING);
} |
java | @Override
public void eUnset(int featureID) {
switch (featureID) {
case XtextPackage.NAMED_ARGUMENT__PARAMETER:
setParameter((Parameter)null);
return;
case XtextPackage.NAMED_ARGUMENT__VALUE:
setValue((Condition)null);
return;
case XtextPackage.NAMED_ARGUMENT__CALLED_BY_NAME:
setCalledByName(CALLED_BY_NAME_EDEFAULT);
return;
}
super.eUnset(featureID);
} |
python | def add(self, properties):
"""
Add a faked NIC resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'nic',
if not specified.
* Either 'network-adapter-port-uri' (for backing ROCE adapters) or
'virtual-switch-uri'(for backing OSA or Hipersockets adapters) is
required to be specified.
* 'device-number' will be auto-generated with a unique value
within the partition in the range 0x8000 to 0xFFFF, if not
specified.
This method also updates the 'nic-uris' property in the parent
faked Partition resource, by adding the URI for the faked NIC
resource.
This method also updates the 'connected-vnic-uris' property in the
virtual switch referenced by 'virtual-switch-uri' property,
and sets it to the URI of the faked NIC resource.
Returns:
:class:`zhmcclient_mock.FakedNic`: The faked NIC resource.
Raises:
:exc:`zhmcclient_mock.InputError`: Some issue with the input
properties.
"""
new_nic = super(FakedNicManager, self).add(properties)
partition = self.parent
# For OSA-backed NICs, reflect the new NIC in the virtual switch
if 'virtual-switch-uri' in new_nic.properties:
vswitch_uri = new_nic.properties['virtual-switch-uri']
# Even though the URI handler when calling this method ensures that
# the vswitch exists, this method can be called by the user as
# well, so we have to handle the possibility that it does not
# exist:
try:
vswitch = self.hmc.lookup_by_uri(vswitch_uri)
except KeyError:
raise InputError("The virtual switch specified in the "
"'virtual-switch-uri' property does not "
"exist: {!r}".format(vswitch_uri))
connected_uris = vswitch.properties['connected-vnic-uris']
if new_nic.uri not in connected_uris:
connected_uris.append(new_nic.uri)
# Create a default device-number if not specified
if 'device-number' not in new_nic.properties:
devno = partition.devno_alloc()
new_nic.properties['device-number'] = devno
# Reflect the new NIC in the partition
assert 'nic-uris' in partition.properties
partition.properties['nic-uris'].append(new_nic.uri)
return new_nic |
python | def find_all_by_parameters(self, task_name, session=None, **task_params):
"""
Find tasks with the given task_name and the same parameters as the kwargs.
"""
with self._session(session) as session:
query = session.query(TaskRecord).join(TaskEvent).filter(TaskRecord.name == task_name)
for (k, v) in six.iteritems(task_params):
alias = sqlalchemy.orm.aliased(TaskParameter)
query = query.join(alias).filter(alias.name == k, alias.value == v)
tasks = query.order_by(TaskEvent.ts)
for task in tasks:
# Sanity check
assert all(k in task.parameters and v == str(task.parameters[k].value) for (k, v) in six.iteritems(task_params))
yield task |
python | def angle_solver(AA, timeseries, N_max, sign, symNx = 2, throw_out_modes=False):
""" Constructs the matrix A and the vector b from a timeseries of toy
action-angles AA to solve for the vector x = (theta_0,theta_1,theta_2,omega_1,
omega_2,omega_3, dSdx..., dSdy..., dSdz...) where x contains all derivatives
of the Fourier components of the generating function with |n| < N_max """
# First unroll angles
angs = unroll_angles(AA.T[3:].T,sign)
# Same considerations as above
symNz = 2
NNx = range(-N_max, N_max+1, symNx)
NNy = range(-N_max, N_max+1, symNz)
NNz = range(-N_max, N_max+1, symNz)
n_vectors = np.array([[i,j,k] for (i,j,k) in product(NNx,NNy,NNz)
if(not(i==0 and j==0 and k==0) # exclude zero vector
and (k>0 # northern hemisphere
or (k==0 and j>0) # half of x-y plane
or (k==0 and j==0 and i>0)) # half of x axis
and np.sqrt(i*i+j*j+k*k)<=N_max # inside sphere
)])
if(throw_out_modes):
n_vectors = np.delete(n_vectors,check_each_direction(n_vectors,angs),axis=0)
nv = len(n_vectors)
n = 3*nv+6
b = np.zeros(shape=(n, ))
a = np.zeros(shape=(n,n))
a[:3,:3]=len(AA)*np.identity(3)
a[:3,3:6]=np.sum(timeseries)*np.identity(3)
a[3:6,:3]=a[:3,3:6]
a[3:6,3:6]=np.sum(timeseries*timeseries)*np.identity(3)
for i,j in zip(angs,timeseries):
a[6:6+nv,0]+=-2.*np.sin(np.dot(n_vectors,i))
a[6:6+nv,3]+=-2.*j*np.sin(np.dot(n_vectors,i))
a[6:6+nv,6:6+nv]+=4.*np.outer(np.sin(np.dot(n_vectors,i)),np.sin(np.dot(n_vectors,i)))
b[:3]+=i
b[3:6]+=j*i
b[6:6+nv]+=-2.*i[0]*np.sin(np.dot(n_vectors,i))
b[6+nv:6+2*nv]+=-2.*i[1]*np.sin(np.dot(n_vectors,i))
b[6+2*nv:6+3*nv]+=-2.*i[2]*np.sin(np.dot(n_vectors,i))
a[6+nv:6+2*nv,1]=a[6:6+nv,0]
a[6+2*nv:6+3*nv,2]=a[6:6+nv,0]
a[6+nv:6+2*nv,4]=a[6:6+nv,3]
a[6+2*nv:6+3*nv,5]=a[6:6+nv,3]
a[6+nv:6+2*nv,6+nv:6+2*nv]=a[6:6+nv,6:6+nv]
a[6+2*nv:6+3*nv,6+2*nv:6+3*nv]=a[6:6+nv,6:6+nv]
a[:6,:]=a[:,:6].T
return np.array(solve(a,b)) |
java | Field dereference(Field self, String field) {
if (field.startsWith("[") && field.endsWith("]")) {
field = field.substring(1, field.length() - 1);
}
Type type = protoTypeNames.get(self.type().toString());
if (type instanceof MessageType) {
MessageType messageType = (MessageType) type;
Field messageField = messageType.field(field);
if (messageField != null) return messageField;
Map<String, Field> typeExtensions = messageType.extensionFieldsMap();
Field extensionField = resolve(field, typeExtensions);
if (extensionField != null) return extensionField;
}
return null; // Unable to traverse this field path.
} |
java | public T withSerialConsistencyLevel(ConsistencyLevel serialConsistencyLevel) {
getOptions().setSerialCL(Optional.of(serialConsistencyLevel));
return getThis();
} |
python | def known(self, object):
""" get the type specified in the object's metadata """
try:
md = object.__metadata__
known = md.sxtype
return known
except:
pass |
java | public String formatDurationUnrounded(Date then)
{
Duration duration = approximateDuration(then);
return formatDurationUnrounded(duration);
} |
java | public static TimeBasedKeys create( int bitsUsedInCounter ) {
CheckArg.isPositive(bitsUsedInCounter, "bitsUsedInCounter");
int maxAvailableBitsToShift = Long.numberOfLeadingZeros(System.currentTimeMillis());
CheckArg.isLessThan(bitsUsedInCounter, maxAvailableBitsToShift, "bitsUsedInCounter");
return new TimeBasedKeys((short)bitsUsedInCounter);
} |
java | public static List<String> getManagableOUs(CmsObject cms) {
List<String> ous = new ArrayList<String>();
try {
for (CmsRole role : OpenCms.getRoleManager().getRolesOfUser(
cms,
cms.getRequestContext().getCurrentUser().getName(),
"",
true,
false,
true)) {
if (role.getRoleName().equals(CmsRole.ACCOUNT_MANAGER.getRoleName())) {
if (role.getOuFqn().equals("")) {
ous.add(0, role.getOuFqn());
} else {
ous.add(role.getOuFqn());
}
}
}
} catch (CmsException e) {
//
}
return ous;
} |
python | def partymode(self):
"""Put all the speakers in the network in the same group, a.k.a Party
Mode.
This blog shows the initial research responsible for this:
http://blog.travelmarx.com/2010/06/exploring-sonos-via-upnp.html
The trick seems to be (only tested on a two-speaker setup) to tell each
speaker which to join. There's probably a bit more to it if multiple
groups have been defined.
"""
# Tell every other visible zone to join this one
# pylint: disable = expression-not-assigned
[zone.join(self) for zone in self.visible_zones if zone is not self] |
java | public Project.Resources.Resource.Rates.Rate createProjectResourcesResourceRatesRate()
{
return new Project.Resources.Resource.Rates.Rate();
} |
java | public ConstraintValidatorFactory getConstraintValidatorFactoryOverride(Configuration<?> config) {
ValidationReleasable<ConstraintValidatorFactory> releasable = null;
String cvfClassName = config.getBootstrapConfiguration().getConstraintValidatorFactoryClassName();
// If the validation.xml ConstraintValidatorFactory is null AND the CDI feature is enabled
// we can try to create our own implementation of it.
if (cvfClassName == null && releasableFactory != null) {
releasable = releasableFactory.createConstraintValidatorFactory();
}
if (releasable != null) {
if (releasables == null) {
releasables = new LinkedList<ValidationReleasable<?>>();
}
releasables.add(releasable);
return releasable.getInstance();
}
return null;
} |
java | public static boolean isIgnorableInternalName(String className) {
return (className.startsWith("[", 0)
|| className.startsWith(JAVA_VM, 0)
|| className.startsWith(JAVAX_VM, 0)
|| className.startsWith(JDK_VM, 0)
|| className.startsWith(SUN_VM, 0)
|| className.startsWith(COM_SUN_VM, 0)
|| className.startsWith(ORG_XML_SAX_VM, 0)
|| className.startsWith(ORG_IETF_JGSS_VM, 0)
|| className.startsWith(ORG_OMG_VM, 0)
|| className.startsWith(ORG_W3C_VM, 0)
|| className.startsWith(XTS_VM, 0)
|| className.startsWith(ORG_MOCKITO_VM, 0)
|| className.startsWith(ORG_JACOCO_AGENT_VM, 0));
} |
python | def as_dict(self, depth=0):
"""Return a dictionary containing only the attributes which map to
an instance's database columns.
:param int depth: Maximum depth to recurse subobjects
:rtype: dict
"""
result_dict = {}
for column in self.__table__.columns.keys():
result_dict[column] = getattr(self, column, None)
if isinstance(result_dict[column], Decimal):
result_dict[column] = str(result_dict[column])
result_dict['links'] = self.links()
for foreign_key in self.__table__.foreign_keys:
column_name = foreign_key.column.name
column_value = getattr(self, column_name, None)
if column_value:
table = foreign_key.column.table.name
with app.app_context():
endpoint = current_app.class_references[table]
session = db.session()
resource = session.query(endpoint).get(column_value)
if depth > 0:
result_dict.update({
'rel': endpoint.__name__,
endpoint.__name__.lower(): resource.as_dict(depth - 1)
})
else:
result_dict[
endpoint.__name__.lower() + '_url'] = '/{}/{}'.format(
endpoint.__name__, column_value)
result_dict['self'] = self.resource_uri()
return result_dict |
python | def image(random=random, width=800, height=600, https=False, *args, **kwargs):
"""
Generate the address of a placeholder image.
>>> mock_random.seed(0)
>>> image(random=mock_random)
'http://dummyimage.com/800x600/292929/e3e3e3&text=mighty poop'
>>> image(random=mock_random, width=60, height=60)
'http://placekitten.com/60/60'
>>> image(random=mock_random, width=1920, height=1080)
'http://dummyimage.com/1920x1080/292929/e3e3e3&text=To get to Westeros, you need to go to Britchestown, then drive west.'
>>> image(random=mock_random, https=True, width=1920, height=1080)
'https://dummyimage.com/1920x1080/292929/e3e3e3&text=East Mysteryhall is in Westeros.'
"""
target_fn = noun
if width+height > 300:
target_fn = thing
if width+height > 2000:
target_fn = sentence
s = ""
if https:
s = "s"
if random.choice([True, False]):
return "http{s}://dummyimage.com/{width}x{height}/292929/e3e3e3&text={text}".format(
s=s,
width=width,
height=height,
text=target_fn(random=random))
else:
return "http{s}://placekitten.com/{width}/{height}".format(s=s, width=width, height=height) |
java | public QueryContext with( Problems problems ) {
return new QueryContext(context, repositoryCache, workspaceNames, overriddenNodeCachesByWorkspaceName, schemata,
indexDefns, nodeTypes, bufferManager, hints, problems, variables);
} |
python | def expand_query(config, kwds):
"""
Expand `kwds` based on `config.search.query_expander`.
:type config: .config.Configuration
:type kwds: dict
:rtype: dict
:return: Return `kwds`, modified in place.
"""
pattern = []
for query in kwds.pop('pattern', []):
expansion = config.search.alias.get(query)
if expansion is None:
pattern.append(query)
else:
parser = SafeArgumentParser()
search_add_arguments(parser)
ns = parser.parse_args(expansion)
for (key, value) in vars(ns).items():
if isinstance(value, (list, tuple)):
if not kwds.get(key):
kwds[key] = value
else:
kwds[key].extend(value)
else:
kwds[key] = value
kwds['pattern'] = pattern
return config.search.kwds_adapter(kwds) |
java | public static Map<String, Integer> getGeometryTypes(Connection connection, TableLocation location)
throws SQLException {
Map<String, Integer> map = new HashMap<>();
ResultSet geomResultSet = getGeometryColumnsView(connection,location.getCatalog(),location.getSchema(),
location.getTable());
boolean isH2 = JDBCUtilities.isH2DataBase(connection.getMetaData());
while(geomResultSet.next()) {
String fieldName = geomResultSet.getString("F_GEOMETRY_COLUMN");
int type;
if(isH2) {
type = geomResultSet.getInt("GEOMETRY_TYPE");
} else {
type = GEOM_TYPE_TO_SFS_CODE.get(geomResultSet.getString("type").toLowerCase());
}
map.put(fieldName, type);
}
return map;
} |
java | public final CompletableFuture<Integer> send(Object message, boolean last) {
return send(false, message, last);
} |
java | public Object getObject(int index)
{
try (InputStream is = openInputStream(index)) {
if (is == null) {
return null;
}
try (InH3 in = serializer().in(is)) {
return in.readObject();
}
} catch (IOException e) {
throw new RuntimeException(e);
} catch (Exception e) {
e.printStackTrace();;
throw e;
}
} |
java | @Override
public int getCacheIdsHashcodeInPushPullTable(boolean debug) {
final String methodName = "getCacheIdsHashcodeInPushPullTable()";
if (this.featureSupport.isReplicationSupported()) {
// TODO write code to support getCacheIdsHashcodeInPushPullTable function
if (tc.isDebugEnabled()) {
Tr.debug(tc, methodName + " cacheName=" + cacheName + " ERROR because it is not implemented yet");
}
} else {
Tr.error(tc, "DYNA1065E", new Object[] { methodName, cacheName, this.cacheProviderName });
}
return 0;
} |
python | def getAllData(self, temp = True, accel = True, gyro = True):
"""!
Get all the available data.
@param temp: True - Allow to return Temperature data
@param accel: True - Allow to return Accelerometer data
@param gyro: True - Allow to return Gyroscope data
@return a dictionary data
@retval {} Did not read any data
@retval {"temp":32.3,"accel":{"x":0.45634,"y":0.2124,"z":1.334},"gyro":{"x":0.45634,"y":0.2124,"z":1.334}} Returned all data
"""
allData = {}
if temp:
allData["temp"] = self.getTemp()
if accel:
allData["accel"] = self.getAccelData( raw = False )
if gyro:
allData["gyro"] = self.getGyroData()
return allData |
java | @Bean
public BraveModule braveModule() {
String serviceName = "exampleApp";
Endpoint localEndpoint = Endpoint.builder().serviceName(serviceName).build();
InheritableServerClientAndLocalSpanState spanState = new InheritableServerClientAndLocalSpanState(localEndpoint);
Brave.Builder builder = new Brave.Builder(spanState);
builder = builder.reporter(new LoggingReporter());
Brave brave = builder.build();
return BraveModule.newServerModule(brave);
} |
java | public Response updateItems( HttpServletRequest request,
String repositoryName,
String workspaceName,
String requestContent ) throws JSONException, RepositoryException {
JSONObject requestBody = stringToJSONObject(requestContent);
if (requestBody.length() == 0) {
return Response.ok().build();
}
Session session = getSession(request, repositoryName, workspaceName);
TreeMap<String, JSONObject> nodesByPath = createNodesByPathMap(requestBody);
List<RestItem> result = updateMultipleNodes(request, session, nodesByPath);
return createOkResponse(result);
} |
java | public void removeStickyFooterItemAtPosition(int position) {
if (mDrawerBuilder.mStickyDrawerItems != null && mDrawerBuilder.mStickyDrawerItems.size() > position) {
mDrawerBuilder.mStickyDrawerItems.remove(position);
}
DrawerUtils.rebuildStickyFooterView(mDrawerBuilder);
} |
python | def sanitize_html(value, valid_tags=VALID_TAGS, strip=True):
"""
Strips unwanted markup out of HTML.
"""
return bleach.clean(value, tags=list(VALID_TAGS.keys()), attributes=VALID_TAGS, strip=strip) |
python | def save_file(self, filename, text):
"""Save the given text under the given control filename and the
current path."""
if not filename.endswith('.py'):
filename += '.py'
path = os.path.join(self.currentpath, filename)
with open(path, 'w', encoding="utf-8") as file_:
file_.write(text) |
java | protected void bindClass(PersistentEntity domainClass, PersistentClass persistentClass, InFlightMetadataCollector mappings) {
// set lazy loading for now
persistentClass.setLazy(true);
final String entityName = domainClass.getName();
persistentClass.setEntityName(entityName);
persistentClass.setJpaEntityName(unqualify(entityName));
persistentClass.setProxyInterfaceName(entityName);
persistentClass.setClassName(entityName);
// set dynamic insert to false
persistentClass.setDynamicInsert(false);
// set dynamic update to false
persistentClass.setDynamicUpdate(false);
// set select before update to false
persistentClass.setSelectBeforeUpdate(false);
// add import to mappings
String en = persistentClass.getEntityName();
if (mappings.getMetadataBuildingOptions().getMappingDefaults().isAutoImportEnabled() && en.indexOf('.') > 0) {
String unqualified = unqualify(en);
mappings.addImport(unqualified, en);
}
} |
python | def get_shard_stats(self):
"""
:return: get stats for this mongodb shard
"""
return requests.get(self._stats_url, params={'include_stats': True},
headers={'X-Auth-Token': self._client.auth._token}
).json()['data']['stats'] |
java | public static boolean isAvailable() throws Exception {
try {
Registry myRegistry = LocateRegistry.getRegistry("127.0.0.1", port);
com.groupon.odo.proxylib.hostsedit.rmi.Message impl = (com.groupon.odo.proxylib.hostsedit.rmi.Message) myRegistry.lookup(SERVICE_NAME);
return true;
} catch (Exception e) {
return false;
}
} |
python | async def check_user(self, request, func=None, location=None, **kwargs):
"""Check for user is logged and pass the given func.
:param func: user checker function, defaults to default_user_checker
:param location: where to redirect if user is not logged in.
May be either string (URL) or function which accepts request as argument
and returns string URL.
"""
user = await self.load_user(request)
func = func or self.cfg.default_user_checker
if not func(user):
location = location or self.cfg.login_url
while callable(location):
location = location(request)
while asyncio.iscoroutine(location):
location = await location
raise HTTPFound(location, **kwargs)
return user |
java | public void setFpgaImageIds(java.util.Collection<String> fpgaImageIds) {
if (fpgaImageIds == null) {
this.fpgaImageIds = null;
return;
}
this.fpgaImageIds = new com.amazonaws.internal.SdkInternalList<String>(fpgaImageIds);
} |
java | public static <L, R> @NonNull Pair<L, R> of(final @Nullable L left, final @Nullable R right) {
return new Pair<>(left, right);
} |
python | def write_gtiff_file(f_name, n_rows, n_cols, data, geotransform, srs, nodata_value,
gdal_type=GDT_Float32):
"""Output Raster to GeoTiff format file.
Args:
f_name: output gtiff file name.
n_rows: Row count.
n_cols: Col count.
data: 2D array data.
geotransform: geographic transformation.
srs: coordinate system.
nodata_value: nodata value.
gdal_type (:obj:`pygeoc.raster.GDALDataType`): output raster data type,
GDT_Float32 as default.
"""
UtilClass.mkdir(os.path.dirname(FileClass.get_file_fullpath(f_name)))
driver = gdal_GetDriverByName(str('GTiff'))
try:
ds = driver.Create(f_name, n_cols, n_rows, 1, gdal_type)
except Exception:
print('Cannot create output file %s' % f_name)
return
ds.SetGeoTransform(geotransform)
try:
ds.SetProjection(srs.ExportToWkt())
except AttributeError or Exception:
ds.SetProjection(srs)
ds.GetRasterBand(1).SetNoDataValue(nodata_value)
# if data contains numpy.nan, then replaced by nodata_value
if isinstance(data, numpy.ndarray) and data.dtype in [numpy.dtype('int'),
numpy.dtype('float')]:
data = numpy.where(numpy.isnan(data), nodata_value, data)
ds.GetRasterBand(1).WriteArray(data)
ds = None |
java | public void setListeners(Collection<IWebSocketDataListener> listeners) {
log.trace("setListeners: {}", listeners);
this.listeners.addAll(listeners);
} |
python | def flatten_list(lobj):
"""
Recursively flattens a list.
:param lobj: List to flatten
:type lobj: list
:rtype: list
For example:
>>> import pmisc
>>> pmisc.flatten_list([1, [2, 3, [4, 5, 6]], 7])
[1, 2, 3, 4, 5, 6, 7]
"""
ret = []
for item in lobj:
if isinstance(item, list):
for sub_item in flatten_list(item):
ret.append(sub_item)
else:
ret.append(item)
return ret |
java | public void marshall(DeleteProjectRequest deleteProjectRequest, ProtocolMarshaller protocolMarshaller) {
if (deleteProjectRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deleteProjectRequest.getName(), NAME_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
python | def get_new_messages(self, domain):
"""
Returns new valid messages after operation.
@type domain: str
@rtype: dict
"""
if domain not in self.domains:
raise ValueError('Invalid domain: {0}'.format(domain))
if domain not in self.messages or 'new' not in self.messages[domain]:
self._process_domain(domain)
return self.messages[domain]['new'] |
java | protected List<SchemaDescriptor> scanConnection(String url, String user, String password, String infoLevelName, String bundledDriverName,
Properties properties, Store store) throws IOException {
LOGGER.info("Scanning schema '{}'", url);
Catalog catalog = getCatalog(url, user, password, infoLevelName, bundledDriverName, properties);
return createSchemas(catalog, store);
} |
python | def idle_all_workers(self):
'''Set the global mode to :attr:`IDLE` and wait for workers to stop.
This can wait arbitrarily long before returning. The worst
case in "normal" usage involves waiting five minutes for a
"lost" job to expire; a well-behaved but very-long-running job
can extend its own lease further, and this function will not
return until that job finishes (if ever).
.. deprecated:: 0.4.5
There isn't an obvious use case for this function, and its
"maybe wait forever for something out of my control" nature
makes it hard to use in real code. Polling all of the work
specs and their :meth:`num_pending` in application code if
you really needed this operation would have the same
semantics and database load.
'''
self.set_mode(self.IDLE)
while 1:
num_pending = dict()
for work_spec_name in self.registry.pull(NICE_LEVELS).keys():
num_pending[work_spec_name] = self.num_pending(work_spec_name)
if sum(num_pending.values()) == 0:
break
logger.warn('waiting for pending work_units: %r', num_pending)
time.sleep(1) |
python | def check_lengths(*arrays):
"""
tool to ensure input and output data have the same number of samples
Parameters
----------
*arrays : iterable of arrays to be checked
Returns
-------
None
"""
lengths = [len(array) for array in arrays]
if len(np.unique(lengths)) > 1:
raise ValueError('Inconsistent data lengths: {}'.format(lengths)) |
java | public List<String> getVariableNames() {
return variables.asList().stream() //
.map(TemplateVariable::getName) //
.collect(Collectors.toList());
} |
python | def _get_data_segments(channels, start, end, connection):
"""Get available data segments for the given channels
"""
allsegs = io_nds2.get_availability(channels, start, end,
connection=connection)
return allsegs.intersection(allsegs.keys()) |
python | def alterar(self, id_perm, id_permission, read, write, id_group):
"""Change Administrative Permission from by the identifier.
:param id_perm: Identifier of the Administrative Permission. Integer value and greater than zero.
:param id_permission: Identifier of the Permission. Integer value and greater than zero.
:param read: Read. 0 or 1
:param write: Write. 0 or 1
:param id_group: Identifier of the Group of User. Integer value and greater than zero.
:return: None
:raise InvalidParameterError: The identifier of Administrative Permission, identifier of Permission, identifier of Group of User, read or write is null and invalid.
:raise ValorIndicacaoPermissaoInvalidoError: The value of read or write is null and invalid.
:raise PermissaoAdministrativaNaoExisteError: Administrative Permission not registered.
:raise GrupoUsuarioNaoExisteError: Group of User not registered.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
if not is_valid_int_param(id_perm):
raise InvalidParameterError(
u'The identifier of Administrative Permission is invalid or was not informed.')
url = 'aperms/' + str(id_perm) + '/'
perms_map = dict()
perms_map['id_perm'] = id_perm
perms_map['id_permission'] = id_permission
perms_map['read'] = read
perms_map['write'] = write
perms_map['id_group'] = id_group
code, xml = self.submit(
{'administrative_permission': perms_map}, 'PUT', url)
return self.response(code, xml) |
python | def color_cycle(colors=None):
"""An infinite iterator of the given (or default) colors
"""
if colors:
return itertools.cycle(colors)
try:
return itertools.cycle(p["color"] for p in rcParams["axes.prop_cycle"])
except KeyError: # matplotlib < 1.5
return itertools.cycle(rcParams["axes.color_cycle"]) |
java | @Override
public BlockingMessage createBLO() {
BlockingMessageImpl blo = new BlockingMessageImpl(_BLO_HOLDER.mandatoryCodes, _BLO_HOLDER.mandatoryVariableCodes,
_BLO_HOLDER.optionalCodes, _BLO_HOLDER.mandatoryCodeToIndex, _BLO_HOLDER.mandatoryVariableCodeToIndex,
_BLO_HOLDER.optionalCodeToIndex);
return blo;
} |
java | public synchronized Object lastKey(Transaction transaction)
throws ObjectManagerException
{
if (Tracing.isAnyTracingEnabled() && trace.isEntryEnabled())
trace.entry(this,
cclass,
"lastKey",
new Object[] { transaction });
Entry entry = lastEntry(transaction);
Object returnKey = entry.getKey();
if (Tracing.isAnyTracingEnabled() && trace.isEntryEnabled())
trace.exit(this,
cclass,
"lastKey",
new Object[] { returnKey });
return returnKey;
} |
python | def write_cell(self, x, y, value, style=None):
"""
writing style and value in the cell of x and y position
"""
if isinstance(style, str):
style = self.xlwt.easyxf(style)
if style:
self._sheet.write(x, y, label=value, style=style)
else:
self._sheet.write(x, y, label=value) |
java | public void moveItem(int start, int end){
M startItem = filteredItems.get(start);
M endItem = filteredItems.get(end);
int realStart = items.indexOf(startItem);
int realEnd = items.indexOf(endItem);
Collections.swap(items, realStart, realEnd);
applyFilter();
onItemMoved(startItem, realStart, realEnd);
notifyItemMoved(realStart, realEnd);
} |
java | @Override
public ConsumableKey attachToDurableSubscription(
LocalConsumerPoint consumerPoint,
ConsumerDispatcherState subState)
throws SIDurableSubscriptionMismatchException, SIDurableSubscriptionNotFoundException, SIDestinationLockedException, SISelectorSyntaxException, SIDiscriminatorSyntaxException, SINotPossibleInCurrentConfigurationException, SIResourceException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(
tc,
"attachToDurableSubscription",
new Object[] { consumerPoint, subState });
ConsumableKey result =
_pubSubRealization.attachToDurableSubscription(
consumerPoint,
subState);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "attachToDurableSubscription", result);
return result;
} |
java | public static ShardedJedisPool newShardedJedisPool(JedisPoolConfig poolConfig,
String hostsAndPorts, int timeoutMs) {
return newShardedJedisPool(poolConfig, hostsAndPorts, null, timeoutMs);
} |
python | def _map(self, data_item):
"Map ``data_item`` separately in each thread."
delegate = self.delegate
logger.debug(f'mapping: {data_item}')
if self.clobber or not self.exists(data_item.id):
logger.debug(f'exist: {data_item.id}: {self.exists(data_item.id)}')
delegate.dump(data_item.id, data_item) |
java | public boolean add(WeightedDirectedTypedEdge<T> e) {
if (e.from() == rootVertex) {
Set<WeightedDirectedTypedEdge<T>> edges = outEdges.get(e.to());
if (edges.contains(e))
return false;
// We can't rely on the edge's equality method since that uses the
// edge weight, so we'll have to iterate over the edges to see if we
// have one that matches the type, and if so replace it with the
// different weight
if (!edges.isEmpty()) {
Iterator<WeightedDirectedTypedEdge<T>> iter = edges.iterator();
WeightedDirectedTypedEdge<T> existing = null;
while (iter.hasNext()) {
WeightedDirectedTypedEdge<T> n = iter.next();
if (n.to() == e.to()
&& n.edgeType().equals(e.edgeType())) {
existing = n;
break;
}
}
if (existing == null) {
outEdges.put(e.to(), e);
return true;
}
// Check if has the same weight;
else if (e.weight() != existing.weight()) {
outEdges.remove(e.to(), existing);
outEdges.put(e.to(), e);
return true;
}
else
return false;
}
else
return outEdges.put(e.to(), e);
}
else if (e.to() == rootVertex) {
Set<WeightedDirectedTypedEdge<T>> edges = inEdges.get(e.from());
if (edges.contains(e))
return false;
// We can't rely on the edge's equality method since that uses the
// edge weight, so we'll have to iterate over the edges to see if we
// have one that matches the type, and if so replace it with the
// different weight
if (!edges.isEmpty()) {
Iterator<WeightedDirectedTypedEdge<T>> iter = edges.iterator();
WeightedDirectedTypedEdge<T> existing = null;
while (iter.hasNext()) {
WeightedDirectedTypedEdge<T> n = iter.next();
if (n.from() == e.from()
&& n.edgeType().equals(e.edgeType())) {
existing = n;
break;
}
}
if (existing == null) {
inEdges.put(e.from(), e);
return true;
}
// Check if has the same weight;
else if (e.weight() != existing.weight()) {
inEdges.remove(e.from(), existing);
inEdges.put(e.from(), e);
return true;
}
return false;
}
else
return inEdges.put(e.from(), e);
}
return false;
} |
python | def _restore_file(path, delete_backup=True):
"""
Restore a file if it exists and remove the backup
"""
backup_base = '/var/local/woven-backup'
backup_path = ''.join([backup_base,path])
if exists(backup_path):
if delete_backup:
sudo('mv -f %s %s'% (backup_path,path))
else:
sudo('cp -f %s %s'% (backup_path,path)) |
python | def _convert(self, format):
"""Return a new Image instance with the given format.
Returns self if the format is already the same.
"""
if self.format == format:
return self
else:
image = Image(self.pil_image)
image._format = format
return image |
java | public Login login(TokenResult token, String username, String password)
throws Exception {
return login(token, username, password, null);
} |
python | def normalize_list(text):
"""
Get a list of word stems that appear in the text. Stopwords and an initial
'to' will be stripped, unless this leaves nothing in the stem.
>>> normalize_list('the dog')
['dog']
>>> normalize_list('big dogs')
['big', 'dog']
>>> normalize_list('the')
['the']
"""
pieces = [morphy_stem(word) for word in tokenize(text)]
pieces = [piece for piece in pieces if good_lemma(piece)]
if not pieces:
return [text]
if pieces[0] == 'to':
pieces = pieces[1:]
return pieces |
python | def getCandScoresMap(self, profile):
"""
Returns a dictionary that associates integer representations of each candidate with their
Bucklin score.
:ivar Profile profile: A Profile object that represents an election profile.
"""
# Currently, we expect the profile to contain complete ordering over candidates.
elecType = profile.getElecType()
if elecType != "soc" and elecType != "toc":
print("ERROR: unsupported profile type")
exit()
bucklinScores = dict()
rankMaps = profile.getRankMaps()
preferenceCounts = profile.getPreferenceCounts()
for cand in profile.candMap.keys():
# We keep track of the number of times a candidate is ranked in the first t positions.
numTimesRanked = 0
# We increase t in increments of 1 until we find t such that the candidate is ranked in the
# first t positions in at least half the votes.
for t in range(1, profile.numCands + 1):
for i in range(0, len(rankMaps)):
if (rankMaps[i][cand] == t):
numTimesRanked += preferenceCounts[i]
if numTimesRanked >= math.ceil(float(profile.numVoters) / 2):
bucklinScores[cand] = t
break
return bucklinScores |
java | public Set<BugPattern> getReportedBugPatterns() {
Set<BugPattern> result = new TreeSet<>();
StringTokenizer tok = new StringTokenizer(reports, ",");
while (tok.hasMoreTokens()) {
String type = tok.nextToken();
BugPattern bugPattern = DetectorFactoryCollection.instance().lookupBugPattern(type);
if (bugPattern != null) {
result.add(bugPattern);
}
}
return result;
} |
java | public static <S, SS extends S, T, TT extends T> Tuple2<S, T> of(SS s, TT t)
{
return new Tuple2<S, T>(s, t);
} |
python | def Message(self, text):
"""Inform about what we are doing right now, e.g.
'Checking for SOMETHING ... '
"""
self.Display(text)
self.sconf.cached = 1
self.did_show_result = 0 |
python | def close(self):
"""
Collects the result from the workers and closes the thread pool.
"""
self.pool.close()
self.pool.terminate()
self.pool.join() |
java | public static final Function<String,Short> toShort(final RoundingMode roundingMode, final DecimalPoint decimalPoint) {
return new ToShort(roundingMode, decimalPoint);
} |
java | public String get(Context context, String url) {
final Cursor cursor = context.getContentResolver().query(getUri(OfflinerDBHelper.TABLE_CACHE),
OfflinerDBHelper.PARAMS_CACHE, OfflinerDBHelper.REQUEST_URL
+ " = '" + url + "'", null, null);
String result = null;
if (cursor != null) {
if (cursor.getCount() != 0) {
cursor.moveToFirst();
result = cursor.getString(cursor.getColumnIndex(OfflinerDBHelper.REQUEST_RESULT));
}
cursor.close();
}
return result;
} |
java | @Override
public long dynamicQueryCount(DynamicQuery dynamicQuery,
Projection projection) {
return cpDefinitionGroupedEntryPersistence.countWithDynamicQuery(dynamicQuery,
projection);
} |
java | public ApiResponse<Float> postCharactersCharacterIdCspaWithHttpInfo(Integer characterId, List<Integer> requestBody,
String datasource, String token) throws ApiException {
com.squareup.okhttp.Call call = postCharactersCharacterIdCspaValidateBeforeCall(characterId, requestBody,
datasource, token, null);
Type localVarReturnType = new TypeToken<Float>() {
}.getType();
return apiClient.execute(call, localVarReturnType);
} |
java | public void close()
{
synchronized (closeLock)
{
if (closed)
return;
closed = true;
}
synchronized (this)
{
Iterator<T> allObjects = all.iterator();
while (allObjects.hasNext())
{
T poolObject = allObjects.next();
internalDestroyPoolObject(poolObject);
}
all.clear();
available.clear();
// Unlock all waiting threads
notifyAll();
}
} |
python | def set_security_groups(mounttargetid,
securitygroup,
keyid=None,
key=None,
profile=None,
region=None,
**kwargs):
'''
Modifies the set of security groups in effect for a mount target
mounttargetid
(string) - ID of the mount target whose security groups will be modified
securitygroups
(list[string]) - list of no more than 5 VPC security group IDs.
CLI Example:
.. code-block:: bash
salt 'my-minion' boto_efs.set_security_groups my-mount-target-id my-sec-group
'''
client = _get_conn(key=key, keyid=keyid, profile=profile, region=region)
client.modify_mount_target_security_groups(MountTargetId=mounttargetid,
SecurityGroups=securitygroup) |
java | private void overrideAbstractMethods() throws IOException
{
for (final ExecutableElement method : El.getEffectiveMethods(superClass))
{
if (method.getModifiers().contains(Modifier.ABSTRACT))
{
if (
method.getAnnotation(Terminal.class) != null ||
method.getAnnotation(Rule.class) != null ||
method.getAnnotation(Rules.class) != null )
{
implementedAbstractMethods.add(method);
MethodCompiler mc = new MethodCompiler()
{
@Override
protected void implement() throws IOException
{
TypeMirror returnType = method.getReturnType();
List<? extends VariableElement> params = method.getParameters();
if (returnType.getKind() != TypeKind.VOID && params.size() == 1)
{
nameArgument(ARG, 1);
try
{
convert(ARG, returnType);
}
catch (IllegalConversionException ex)
{
throw new IOException("bad conversion with "+method, ex);
}
treturn();
}
else
{
if (returnType.getKind() == TypeKind.VOID && params.size() == 0)
{
treturn();
}
else
{
throw new IllegalArgumentException("cannot implement abstract method "+method);
}
}
}
};
subClass.overrideMethod(mc, method, Modifier.PROTECTED);
}
}
}
} |
java | public ServiceFuture<List<SecretItem>> getSecretVersionsAsync(final String vaultBaseUrl, final String secretName, final Integer maxresults, final ListOperationCallback<SecretItem> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
getSecretVersionsSinglePageAsync(vaultBaseUrl, secretName, maxresults),
new Func1<String, Observable<ServiceResponse<Page<SecretItem>>>>() {
@Override
public Observable<ServiceResponse<Page<SecretItem>>> call(String nextPageLink) {
return getSecretVersionsNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
} |
java | public Response setTags(String photoId, List<String> tags) throws JinxException {
JinxUtils.validateParams(photoId);
Map<String, String> params = new TreeMap<>();
params.put("method", "flickr.photos.setTags");
params.put("photo_id", photoId);
if (tags == null || tags.size() == 0) {
params.put("tags", "");
} else {
StringBuilder sb = new StringBuilder();
for (String tag : tags) {
if (tag.contains(" ")) {
sb.append('"').append(tag).append('"');
} else {
sb.append(tag);
}
sb.append(' ');
}
sb.deleteCharAt(sb.length() - 1);
params.put("tags", sb.toString());
}
return this.jinx.flickrPost(params, Response.class);
} |
python | def StartTiming(self, profile_name):
"""Starts timing CPU time.
Args:
profile_name (str): name of the profile to sample.
"""
if profile_name not in self._profile_measurements:
self._profile_measurements[profile_name] = CPUTimeMeasurement()
self._profile_measurements[profile_name].SampleStart() |
python | def _register_key(fingerprint, gpg):
"""Registers key in config"""
for private_key in gpg.list_keys(True):
try:
if str(fingerprint) == private_key['fingerprint']:
config["gpg_key_fingerprint"] = \
repr(private_key['fingerprint'])
except KeyError:
pass |
python | def _pyxb_from_perm_dict(self, perm_dict):
"""Return an AccessPolicy PyXB representation of ``perm_dict``
- If ``norm_perm_list`` is empty, None is returned. The schema does not allow
AccessPolicy to be empty, but in SystemMetadata, it can be left out
altogether. So returning None instead of an empty AccessPolicy allows the
result to be inserted directly into a SystemMetadata PyXB object.
"""
norm_perm_list = self._norm_perm_list_from_perm_dict(perm_dict)
return self._pyxb_from_norm_perm_list(norm_perm_list) |
python | def swapoff(name):
'''
Deactivate a named swap mount
.. versionchanged:: 2016.3.2
CLI Example:
.. code-block:: bash
salt '*' mount.swapoff /root/swapfile
'''
on_ = swaps()
if name in on_:
if __grains__['kernel'] == 'SunOS':
if __grains__['virtual'] != 'zone':
__salt__['cmd.run']('swap -a {0}'.format(name), python_shell=False)
else:
return False
elif __grains__['os'] != 'OpenBSD':
__salt__['cmd.run']('swapoff {0}'.format(name), python_shell=False)
else:
__salt__['cmd.run']('swapctl -d {0}'.format(name),
python_shell=False)
on_ = swaps()
if name in on_:
return False
return True
return None |
java | @Override
public void serializeInstance(SerializationStreamWriter streamWriter, OWLLiteralImpl instance) throws SerializationException {
serialize(streamWriter, instance);
} |
python | def get_top_headlines(self, q=None, sources=None, language='en', country=None, category=None, page_size=None,
page=None):
"""
Returns live top and breaking headlines for a country, specific category in a country, single source, or multiple sources..
Optional parameters:
(str) q - return headlines w/ specific keyword or phrase. For example:
'bitcoin', 'trump', 'tesla', 'ethereum', etc.
(str) sources - return headlines of news sources! some Valid values are:
'bbc-news', 'the-verge', 'abc-news', 'crypto coins news',
'ary news','associated press','wired','aftenposten','australian financial review','axios',
'bbc news','bild','blasting news','bloomberg','business insider','engadget','google news',
'hacker news','info money,'recode','techcrunch','techradar','the next web','the verge' etc.
(str) language - The 2-letter ISO-639-1 code of the language you want to get headlines for. Valid values are:
'ar','de','en','es','fr','he','it','nl','no','pt','ru','se','ud','zh'
(str) country - The 2-letter ISO 3166-1 code of the country you want to get headlines! Valid values are:
'ae','ar','at','au','be','bg','br','ca','ch','cn','co','cu','cz','de','eg','fr','gb','gr',
'hk','hu','id','ie','il','in','it','jp','kr','lt','lv','ma','mx','my','ng','nl','no','nz',
'ph','pl','pt','ro','rs','ru','sa','se','sg','si','sk','th','tr','tw','ua','us'
(str) category - The category you want to get headlines for! Valid values are:
'business','entertainment','general','health','science','sports','technology'
(int) page_size - The number of results to return per page (request). 20 is the default, 100 is the maximum.
(int) page - Use this to page through the results if the total results found is greater than the page size.
"""
# Define Payload
payload = {}
# Keyword/Phrase
if q is not None:
if type(q) == str:
payload['q'] = q
else:
raise TypeError('keyword/phrase q param should be a of type str')
# Sources
if (sources is not None) and ((country is not None) or (category is not None)):
raise ValueError('cannot mix country/category param with sources param.')
# Sources
if sources is not None:
if type(sources) == str:
payload['sources'] = sources
else:
raise TypeError('sources param should be of type str')
# Language
if language is not None:
if type(language) == str:
if language in const.languages:
payload['language'] = language
else:
raise ValueError('invalid language')
else:
raise TypeError('language param should be of type str')
# Country
if country is not None:
if type(country) == str:
if country in const.countries:
payload['country'] = country
else:
raise ValueError('invalid country')
else:
raise TypeError('country param should be of type str')
# Category
if category is not None:
if type(category) == str:
if category in const.categories:
payload['category'] = category
else:
raise ValueError('invalid category')
else:
raise TypeError('category param should be of type str')
# Page Size
if page_size is not None:
if type(page_size) == int:
if 0 <= page_size <= 100:
payload['pageSize'] = page_size
else:
raise ValueError('page_size param should be an int between 1 and 100')
else:
raise TypeError('page_size param should be an int')
# Page
if page is not None:
if type(page) == int:
if page > 0:
payload['page'] = page
else:
raise ValueError('page param should be an int greater than 0')
else:
raise TypeError('page param should be an int')
# Send Request
r = requests.get(const.TOP_HEADLINES_URL, auth=self.auth, timeout=30, params=payload)
# Check Status of Request
if r.status_code != requests.codes.ok:
raise NewsAPIException(r.json())
return r.json() |
java | public <T> T get(Class<T> type) {
return get(type.getName(), type);
} |
java | public Connection getConnection() throws SQLException {
try {
return new DrizzleConnection(new MySQLProtocol(hostname, port, database, null, null, new Properties()),
new DrizzleQueryFactory());
} catch (QueryException e) {
throw SQLExceptionMapper.get(e);
}
} |
python | def request(self, path, data=None, method='GET'):
""" Convenience Facebook request function.
Utility function to request resources via the graph API, with the
format expected by Facebook.
"""
url = '%s%s?access_token=%s' % (
'https://graph.facebook.com',
path,
self['oauth_token'])
req = Request(url, data=data)
req.get_method = lambda: method
return loads(urlopen(req).read()) |
python | def sizes(self, fileids=None) -> Generator[int, int, None]:
"""
Returns a list of tuples, the fileid and size on disk of the file.
This function is used to detect oddly large files in the corpus.
"""
if not fileids:
fileids = self.fileids()
# Create a generator, getting every path and computing filesize
for path in self.abspaths(fileids):
yield os.path.getsize(path) |
java | public final static void appendMailto(final StringBuilder out, final String in, final int start, final int end)
{
for (int i = start; i < end; i++)
{
final char c;
final int r = rnd();
switch (c = in.charAt(i))
{
case '&':
case '<':
case '>':
case '"':
case '\'':
case '@':
if (r < 512)
{
appendDecEntity(out, c);
}
else
{
appendHexEntity(out, c);
}
break;
default:
if (r < 32)
{
out.append(c);
}
else if (r < 520)
{
appendDecEntity(out, c);
}
else
{
appendHexEntity(out, c);
}
break;
}
}
} |
java | protected base_resource[] get_nitro_bulk_response(nitro_service service, String response) throws Exception
{
br_enable_responses result = (br_enable_responses) service.get_payload_formatter().string_to_resource(br_enable_responses.class, response);
if(result.errorcode != 0)
{
if (result.errorcode == SESSION_NOT_EXISTS)
service.clear_session();
throw new nitro_exception(result.message, result.errorcode, (base_response [])result.br_enable_response_array);
}
br_enable[] result_br_enable = new br_enable[result.br_enable_response_array.length];
for(int i = 0; i < result.br_enable_response_array.length; i++)
{
result_br_enable[i] = result.br_enable_response_array[i].br_enable[0];
}
return result_br_enable;
} |
python | def overlays_at(self, key):
"""
Key may be a slice or a point.
"""
if isinstance(key, slice):
s, e, _ = key.indices(len(self.text))
else:
s = e = key
return [o for o in self.overlays if o.start in Rng(s, e)] |
python | def names(cls):
"""A list of all emoji names without file extension."""
if not cls._files:
for f in os.listdir(cls._image_path):
if(not f.startswith('.') and
os.path.isfile(os.path.join(cls._image_path, f))):
cls._files.append(os.path.splitext(f)[0])
return cls._files |
java | public void onFailure() throws InterruptedException {
int val = currentFailureCount.incrementAndGet();
if (val > 50) {
currentFailureCount.compareAndSet(val, MAX_FAILURE_COUNT);
val = MAX_FAILURE_COUNT;
}
int delay = MIN_DELAY + ((MAX_DELAY - MIN_DELAY) / MAX_FAILURE_COUNT) * val;
synchronized (this) {
Logger.d(TAG, "onFailure: wait " + delay + " ms");
wait(delay);
}
} |
python | def try_recv(self):
"""Return None immediately if nothing is waiting"""
try:
lenstr = self.sock.recv(4, socket.MSG_DONTWAIT)
except socket.error:
return None
if len(lenstr) < 4:
raise EOFError("Socket closed")
length = struct.unpack("<I", lenstr)[0]
return self._get_next_obj(length) |
python | def check_support_cyclic_msg(cls, hw_info_ex):
"""
Checks whether the module supports automatically transmission of cyclic CAN messages.
:param HardwareInfoEx hw_info_ex:
Extended hardware information structure (see method :meth:`get_hardware_info`).
:return: True when the module does support cyclic CAN messages, otherwise False.
:rtype: bool
"""
return cls.check_is_systec(hw_info_ex) and \
cls.check_version_is_equal_or_higher(hw_info_ex.m_dwFwVersionEx, 3, 6) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.