language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def get_nn_info(self, structure, n): """ Get all near-neighbor sites and weights (orders) of bonds for a given atom. :param molecule: input Molecule. :param n: index of site for which to determine near neighbors. :return: [dict] representing a neighboring site and the type of bond present between site n and the neighboring site. """ from pymatgen.io.babel import BabelMolAdaptor obmol = BabelMolAdaptor(structure).openbabel_mol siw = [] # Get only the atom of interest site_atom = [a for i, a in enumerate(ob.OBMolAtomDFSIter(obmol)) if [a.GetX(), a.GetY(), a.GetZ()] == list( structure[n].coords)][0] for neighbor in ob.OBAtomAtomIter(site_atom): coords = [neighbor.GetX(), neighbor.GetY(), neighbor.GetZ()] site = [a for a in structure if list(a.coords) == coords][0] index = structure.index(site) bond = site_atom.GetBond(neighbor) if self.order: obmol.PerceiveBondOrders() weight = bond.GetBondOrder() else: weight = bond.GetLength() siw.append({"site": site, "image": (0, 0, 0), "weight": weight, "site_index": index}) return siw
python
def keyring_auth(self, username=None): """ Uses the keyring module to retrieve the user's password or api_key. """ if not keyring: # Module not installed raise exc.KeyringModuleNotInstalled("The 'keyring' Python module " "is not installed on this system.") if username is None: username = pyrax.get_setting("keyring_username") if not username: raise exc.KeyringUsernameMissing("No username specified for " "keyring authentication.") password = keyring.get_password("pyrax", username) if password is None: raise exc.KeyringPasswordNotFound("No password was found for the " "username '%s'." % username) style = self._creds_style or self._default_creds_style # Keyring username may be different than the credentials. Use the # existing username, if present; otherwise, use the supplied username. username = self.username or username if style == "apikey": return self.authenticate(username=username, api_key=password) else: return self.authenticate(username=username, password=password)
python
def find_tendril(cls, proto, addr): """ Finds the tendril corresponding to the protocol and address tuple. Returns the Tendril object, or raises KeyError if the tendril is not tracked. The address tuple is the tuple of the local address and the remote address for the tendril. """ # First, normalize the proto proto = proto.lower() # Now, find and return the tendril return cls._tendrils[proto][addr]
python
def upload(server, session, base_file, charset='UTF-8'): """Push a layer to a Geonode instance. :param server: The Geonode server URL. :type server: basestring :param base_file: The base file layer to upload such as a shp, geojson, ... :type base_file: basestring :param charset: The encoding to use. Default to UTF-8. :type charset: basestring """ file_ext = os.path.splitext(base_file)[1] is_geojson = file_ext in ['.geojson', '.json'] original_sibling_files, _ = siblings_files(base_file) if is_geojson: # base_file = os.path.splitext(base_file)[0] # create temp shapefile convert_geojson_to_shapefile(base_file) base_file = os.path.splitext(base_file)[0] + '.shp' upload_url = urljoin(server, upload_url_prefix) result = session.get(upload_url) # Get the upload CSRF token expression = re.compile('csrf_token(\s*)=(\s*)"([a-zA-Z0-9]*?)",') match = expression.search(result.text) csrf_token = match.groups()[2] # Start the data dict payload = { 'charset': charset, 'permissions': ( '{"users":{"AnonymousUser":' '["view_resourcebase","download_resourcebase"]},"groups":{}}' ) } headers = dict() headers['referer'] = upload_url headers['X-CSRFToken'] = csrf_token files, mime = siblings_files(base_file) if len(files) < 1: raise RuntimeError(tr('The base layer is not recognised.')) name_file = split(base_file)[1] multiple_files = [ ('base_file', (name_file, open(base_file, 'rb'), mime)), ] for sibling, mime in list(files.items()): if sibling != base_file: name_param = splitext(sibling)[1][1:] name_file = split(sibling)[1] open_file = (name_file, open(sibling, 'rb'), mime) definition = ('{}_file'.format(name_param), open_file) multiple_files.append(definition) # For debug upload_request = requests.Request( 'POST', upload_url, data=payload, files=multiple_files, headers=headers) prepared_request = session.prepare_request(upload_request) # For debug # pretty_print_post(prepared_request) result = session.send(prepared_request) # Clean up shapefile and its sibling friends if is_geojson: for filename in files.keys(): if filename not in original_sibling_files: try: os.remove(filename) except OSError: pass if result.ok: result = json.loads(result.content) full_url = server + result['url'] result['full_url'] = full_url return result else: message = (tr( 'Failed to upload layer. Got HTTP Status Code {status_code} and ' 'the reason is {reason}').format( status_code=result.status_code, reason=result.reason)) raise GeoNodeLayerUploadError(message)
java
public synchronized String makeEscapedCompactString() { StringBuffer buffer = new StringBuffer(); for(Group group: this){ buffer.append(group.makeEscapedCompactString()); } return buffer.toString(); }
python
def _split_raw_signature(sig): """ Split raw signature into components :param sig: The signature :return: A 2-tuple """ c_length = len(sig) // 2 r = int_from_bytes(sig[:c_length], byteorder='big') s = int_from_bytes(sig[c_length:], byteorder='big') return r, s
python
def plotline(plt, alpha, taus, style,label=""): """ plot a line with the slope alpha """ y = [pow(tt, alpha) for tt in taus] plt.loglog(taus, y, style,label=label)
python
def AddFiles(self, hash_id_metadatas): """Adds multiple files to the file store. Args: hash_id_metadatas: A dictionary mapping hash ids to file metadata (a tuple of hash client path and blob references). """ for hash_id, metadata in iteritems(hash_id_metadatas): self.AddFile(hash_id, metadata)
java
public java.lang.String getFooterClass() { return (java.lang.String) getStateHelper().eval(PropertyKeys.footerClass); }
python
def frequency_model( self ): """ build a letter frequency model for Tamil letters from a corpus """ # use a generator in corpus for next_letter in self.corpus.next_tamil_letter(): # update frequency from corpus self.letter[next_letter] = self.letter[next_letter] + 1
java
public Matrix4d rotationTowards(Vector3dc dir, Vector3dc up) { return rotationTowards(dir.x(), dir.y(), dir.z(), up.x(), up.y(), up.z()); }
python
def run(configobj=None): """ TEAL interface for the `acs2d` function. """ acs2d(configobj['input'], exec_path=configobj['exec_path'], time_stamps=configobj['time_stamps'], verbose=configobj['verbose'], quiet=configobj['quiet'] )
java
public InputMapTemplate<S, E> ifIgnored(BiConsumer<? super S, ? super E> postIgnore) { return postResult(Result.IGNORE, postIgnore); }
java
@SuppressWarnings({"WeakerAccess", "unused"}) public Picture renderViewToPicture(String viewId, int widthInPixels, int heightInPixels) { RenderOptions renderOptions = new RenderOptions(); renderOptions.view(viewId) .viewPort(0f, 0f, (float) widthInPixels, (float) heightInPixels); Picture picture = new Picture(); Canvas canvas = picture.beginRecording(widthInPixels, heightInPixels); SVGAndroidRenderer renderer = new SVGAndroidRenderer(canvas, this.renderDPI); renderer.renderDocument(this, renderOptions); picture.endRecording(); return picture; }
python
def C_Reader_Harris_Gallagher_wet_venturi_tube(mg, ml, rhog, rhol, D, Do, H=1): r'''Calculates the coefficient of discharge of the wet gas venturi tube based on the geometry of the tube, mass flow rates of liquid and vapor through the tube, the density of the liquid and gas phases, and an adjustable coefficient `H`. .. math:: C = 1 - 0.0463\exp(-0.05Fr_{gas, th}) \cdot \min\left(1, \sqrt{\frac{X}{0.016}}\right) .. math:: Fr_{gas, th} = \frac{Fr_{\text{gas, densionetric }}}{\beta^{2.5}} .. math:: \phi = \sqrt{1 + C_{Ch} X + X^2} .. math:: C_{Ch} = \left(\frac{\rho_l}{\rho_{1,g}}\right)^n + \left(\frac{\rho_{1, g}}{\rho_{l}}\right)^n .. math:: n = \max\left[0.583 - 0.18\beta^2 - 0.578\exp\left(\frac{-0.8 Fr_{\text{gas, densiometric}}}{H}\right),0.392 - 0.18\beta^2 \right] .. math:: X = \left(\frac{m_l}{m_g}\right) \sqrt{\frac{\rho_{1,g}}{\rho_l}} .. math:: {Fr_{\text{gas, densiometric}}} = \frac{v_{gas}}{\sqrt{gD}} \sqrt{\frac{\rho_{1,g}}{\rho_l - \rho_{1,g}}} = \frac{4m_g}{\rho_{1,g} \pi D^2 \sqrt{gD}} \sqrt{\frac{\rho_{1,g}}{\rho_l - \rho_{1,g}}} Parameters ---------- mg : float Mass flow rate of gas through the venturi tube, [kg/s] ml : float Mass flow rate of liquid through the venturi tube, [kg/s] rhog : float Density of gas at `P1`, [kg/m^3] rhol : float Density of liquid at `P1`, [kg/m^3] D : float Upstream internal pipe diameter, [m] Do : float Diameter of venturi tube at flow conditions, [m] H : float, optional A surface-tension effect coefficient used to adjust for different fluids, (1 for a hydrocarbon liquid, 1.35 for water, 0.79 for water in steam) [-] Returns ------- C : float Coefficient of discharge of the wet gas venturi tube flow meter (includes flow rate of gas ONLY), [-] Notes ----- This model has more error than single phase differential pressure meters. The model was first published in [1]_, and became ISO 11583 later. The limits of this correlation according to [2]_ are as follows: .. math:: 0.4 \le \beta \le 0.75 .. math:: 0 < X \le 0.3 .. math:: Fr_{gas, th} > 3 .. math:: \frac{\rho_g}{\rho_l} > 0.02 .. math:: D \ge 50 \text{ mm} Examples -------- >>> C_Reader_Harris_Gallagher_wet_venturi_tube(mg=5.31926, ml=5.31926/2, ... rhog=50.0, rhol=800., D=.1, Do=.06, H=1) 0.9754210845876333 References ---------- .. [1] Reader-harris, Michael, and Tuv Nel. An Improved Model for Venturi-Tube Over-Reading in Wet Gas, 2009. .. [2] ISO/TR 11583:2012 Measurement of Wet Gas Flow by Means of Pressure Differential Devices Inserted in Circular Cross-Section Conduits. ''' V = 4*mg/(rhog*pi*D**2) Frg = Froude_densimetric(V, L=D, rho1=rhol, rho2=rhog, heavy=False) beta = Do/D beta2 = beta*beta Fr_gas_th = Frg*beta**-2.5 n = max(0.583 - 0.18*beta2 - 0.578*exp(-0.8*Frg/H), 0.392 - 0.18*beta2) C_Ch = (rhol/rhog)**n + (rhog/rhol)**n X = ml/mg*(rhog/rhol)**0.5 OF = (1.0 + C_Ch*X + X*X)**0.5 C = 1.0 - 0.0463*exp(-0.05*Fr_gas_th)*min(1.0, (X/0.016)**0.5) return C
java
public void saveWithTrainingConfig(OutputStream outputStream) throws IOException { if(this.trainingConfig == null) { throw new IllegalStateException("No training configuration found!"); } saveWithTrainingConfig(this.trainingConfig,outputStream); }
python
def filter_service_by_servicegroup_name(group): """Filter for service Filter on group :param group: group to filter :type group: str :return: Filter :rtype: bool """ def inner_filter(items): """Inner filter for service. Accept if group in service.servicegroups""" service = items["service"] if service is None: return False return group in [items["servicegroups"][g].servicegroup_name for g in service.servicegroups] return inner_filter
java
public String getPath() { StringBuilder result = new StringBuilder().append('$'); for (int i = 0, size = stackSize; i < size; i++) { switch (stack[i]) { case JsonScope.EMPTY_ARRAY: case JsonScope.NONEMPTY_ARRAY: result.append('[').append(pathIndices[i]).append(']'); break; case JsonScope.EMPTY_OBJECT: case JsonScope.DANGLING_NAME: case JsonScope.NONEMPTY_OBJECT: result.append('.'); if (pathNames[i] != null) { result.append(pathNames[i]); } break; case JsonScope.NONEMPTY_DOCUMENT: case JsonScope.EMPTY_DOCUMENT: case JsonScope.CLOSED: break; } } return result.toString(); }
java
public ServiceFuture<List<AppServiceCertificateResourceInner>> listCertificatesNextAsync(final String nextPageLink, final ServiceFuture<List<AppServiceCertificateResourceInner>> serviceFuture, final ListOperationCallback<AppServiceCertificateResourceInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listCertificatesNextSinglePageAsync(nextPageLink), new Func1<String, Observable<ServiceResponse<Page<AppServiceCertificateResourceInner>>>>() { @Override public Observable<ServiceResponse<Page<AppServiceCertificateResourceInner>>> call(String nextPageLink) { return listCertificatesNextSinglePageAsync(nextPageLink); } }, serviceCallback); }
java
public static base_response update(nitro_service client, clusternode resource) throws Exception { clusternode updateresource = new clusternode(); updateresource.nodeid = resource.nodeid; updateresource.state = resource.state; updateresource.backplane = resource.backplane; updateresource.priority = resource.priority; return updateresource.update_resource(client); }
python
def server_mechanisms(self): """List of available :class:`ServerMechanism` objects.""" return [mech for mech in self.mechs.values() if isinstance(mech, ServerMechanism)]
python
def set_data_points(self, points): """ Input `points` must be in data coordinates, will be converted to the coordinate space of the object and stored. """ self.points = np.asarray(self.crdmap.data_to(points))
java
public void clearFields() { this.requiredFields = NO_FIELDS; this.optionalFields = NO_FIELDS; this.textFields = Collections.emptyMap(); removeAll(); validate(); }
python
def name(self): """str: name of the file entry, which does not include the full path. Raises: BackEndError: if pytsk3 returns a non UTF-8 formatted name. """ if self._name is None: # If pytsk3.FS_Info.open() was used file.info has an attribute name # (pytsk3.TSK_FS_FILE) that contains the name string. Otherwise the # name from the path specification is used. if getattr(self._tsk_file.info, 'name', None) is not None: name = getattr(self._tsk_file.info.name, 'name', None) try: # pytsk3 returns an UTF-8 encoded byte string. self._name = name.decode('utf8') except UnicodeError: raise errors.BackEndError( 'pytsk3 returned a non UTF-8 formatted name.') else: location = getattr(self.path_spec, 'location', None) if location: self._name = self._file_system.BasenamePath(location) return self._name
java
@Override protected String createDialogHtml(String dialog) { StringBuffer result = new StringBuffer(512); result.append(createWidgetTableStart()); // show error header once if there were validation errors result.append(createWidgetErrorHeader()); if (dialog.equals(PAGES[0])) { // create the widgets for the first dialog page result.append(dialogBlockStart(key(Messages.GUI_LIST_FIELD_ACTION_DELETE_NAME_0))); result.append(createWidgetTableStart()); result.append(key(Messages.GUI_LIST_FIELD_ACTION_DELETE_CONF_1, new Object[] {m_field.getName()})); result.append(createWidgetTableEnd()); result.append(dialogBlockEnd()); } result.append(createWidgetTableEnd()); // See CmsWidgetDialog.dialogButtonsCustom(): if no widgets are defined that are non-display-only widgets, // no dialog buttons (Ok, Cancel) will be visible.... result.append(dialogButtons(new int[] {BUTTON_OK, BUTTON_CANCEL}, new String[2])); return result.toString(); }
python
def prebuild_arch(self, arch): """Make the build and target directories""" path = self.get_build_dir(arch.arch) if not exists(path): info("creating {}".format(path)) shprint(sh.mkdir, '-p', path)
python
def list(self, platformIdentifier, configuration, libOverrides = {}): """ Returns the list of supported UE4-bundled third-party libraries """ modules = self._getThirdPartyLibs(platformIdentifier, configuration) return sorted([m['Name'] for m in modules] + [key for key in libOverrides])
python
def eval_master_func(opts): ''' Evaluate master function if master type is 'func' and save it result in opts['master'] ''' if '__master_func_evaluated' not in opts: # split module and function and try loading the module mod_fun = opts['master'] mod, fun = mod_fun.split('.') try: master_mod = salt.loader.raw_mod(opts, mod, fun) if not master_mod: raise KeyError # we take whatever the module returns as master address opts['master'] = master_mod[mod_fun]() # Check for valid types if not isinstance(opts['master'], (six.string_types, list)): raise TypeError opts['__master_func_evaluated'] = True except KeyError: log.error('Failed to load module %s', mod_fun) sys.exit(salt.defaults.exitcodes.EX_GENERIC) except TypeError: log.error('%s returned from %s is not a string', opts['master'], mod_fun) sys.exit(salt.defaults.exitcodes.EX_GENERIC) log.info('Evaluated master from module: %s', mod_fun)
java
public boolean isCurrentIgnoreSpace(char c) { if (!hasNext()) return false; int start = getPos(); removeSpace(); boolean is = isCurrent(c); setPos(start); return is; }
java
public synchronized AcceleratedScreen getAcceleratedScreen(int[] attributes) throws GLException, UnsatisfiedLinkError { if (accScreen == null) { accScreen = new AcceleratedScreen(attributes); } return accScreen; }
java
public void setProfileTaskRange(boolean isMap, String newValue) { // parse the value to make sure it is legal new Configuration.IntegerRanges(newValue); set((isMap ? "mapred.task.profile.maps" : "mapred.task.profile.reduces"), newValue); }
java
public static ApnsSigningKey loadFromPkcs8File(final File pkcs8File, final String teamId, final String keyId) throws IOException, NoSuchAlgorithmException, InvalidKeyException { try (final FileInputStream fileInputStream = new FileInputStream(pkcs8File)) { return ApnsSigningKey.loadFromInputStream(fileInputStream, teamId, keyId); } }
java
private List<BaseInvoker> getInvokersForPointcut(Pointcut thePointcut) { List<BaseInvoker> invokers; synchronized (myRegistryMutex) { List<BaseInvoker> globalInvokers = myGlobalInvokers.get(thePointcut); List<BaseInvoker> anonymousInvokers = myAnonymousInvokers.get(thePointcut); List<BaseInvoker> threadLocalInvokers = null; if (myThreadlocalInvokersEnabled) { ListMultimap<Pointcut, BaseInvoker> pointcutToInvokers = myThreadlocalInvokers.get(); if (pointcutToInvokers != null) { threadLocalInvokers = pointcutToInvokers.get(thePointcut); } } invokers = union(globalInvokers, anonymousInvokers, threadLocalInvokers); } return invokers; }
java
public synchronized void gc() { if (!configuration.isUseGc()) { LOG.trace("GC deactivated, no beans will be removed!"); return; } LOG.trace("Garbage collection started! GC will remove {} beans!", removeOnGC.size()); onRemoveCallback.onReject(removeOnGC.keySet()); for (Map.Entry<Instance, Object> entry : removeOnGC.entrySet()) { Instance removedInstance = entry.getKey(); for (Property property : removedInstance.getProperties()) { propertyToParent.remove(property); } for (ObservableList list : removedInstance.getLists()) { listToParent.remove(list); } allInstances.remove(entry.getValue()); } removedBeansCount = removedBeansCount + removeOnGC.size(); removeOnGC.clear(); gcCalls = gcCalls + 1; LOG.trace("Garbage collection done! GC currently manages {} referenced beans!", allInstances.size()); }
java
@Override public java.util.concurrent.Future<ListQueueTagsResult> listQueueTagsAsync(String queueUrl) { return listQueueTagsAsync(new ListQueueTagsRequest().withQueueUrl(queueUrl)); }
java
@SuppressWarnings("unchecked") private static LogAdapterFactory createAdapterFactory() { List<LogAdapterFactory> factories = Iterators.asList(ServiceLoader.load(LogAdapterFactory.class)); if (factories.isEmpty()) { /* * Always fall back to the JDK, even if our service loading mechanism is hosed for some reason. */ factories = Arrays.asList((LogAdapterFactory) new JDKLogAdapterFactory()); } /* * Sort factories by priority. */ Collections.sort(factories, new WeightedComparator()); /* * Use the factory with the highest priority. */ return factories.get(0); }
python
def move_application(self, app_id, queue): """Move an application to a different queue. Parameters ---------- app_id : str The id of the application to move. queue : str The queue to move the application to. """ self._call('moveApplication', proto.MoveRequest(id=app_id, queue=queue))
python
def _sign_input(cls, input_, message, key_pairs): """Signs a single Input. Note: This method works only for the following Cryptoconditions currently: - Ed25519Fulfillment - ThresholdSha256. Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The Input to be signed. message (str): The message to be signed key_pairs (dict): The keys to sign the Transaction with. """ if isinstance(input_.fulfillment, Ed25519Sha256): return cls._sign_simple_signature_fulfillment(input_, message, key_pairs) elif isinstance(input_.fulfillment, ThresholdSha256): return cls._sign_threshold_signature_fulfillment(input_, message, key_pairs) else: raise ValueError("Fulfillment couldn't be matched to " 'Cryptocondition fulfillment type.')
python
def get_order_specification_visitor(name, registry=None): """ Returns the class registered as the order specification visitor utility under the given name (one of the :const:`everest.querying.base.EXPRESSION_KINDS` constants). :returns: class implementing :class:`everest.interfaces.IOrderSpecificationVisitor` """ if registry is None: registry = get_current_registry() return registry.getUtility(IOrderSpecificationVisitor, name=name)
python
def create_from_xml(root, extdir=None): """Create a Source object from an XML node. Parameters ---------- root : `~xml.etree.ElementTree.Element` XML node containing the source. extdir : str Path to the extended source archive. """ src_type = root.attrib['type'] spec = utils.load_xml_elements(root, 'spectrum') spectral_pars = utils.load_xml_elements(root, 'spectrum/parameter') spectral_type = spec['type'] spectral_pars = cast_pars_dict(spectral_pars) spat = {} spatial_pars = {} nested_sources = [] if src_type == 'CompositeSource': spatial_type = 'CompositeSource' source_library = root.findall('source_library')[0] for node in source_library.findall('source'): nested_sources += [Source.create_from_xml(node, extdir=extdir)] else: spat = utils.load_xml_elements(root, 'spatialModel') spatial_pars = utils.load_xml_elements( root, 'spatialModel/parameter') spatial_pars = cast_pars_dict(spatial_pars) spatial_type = spat['type'] xml_dict = copy.deepcopy(root.attrib) src_dict = {'catalog': xml_dict} src_dict['Source_Name'] = xml_dict['name'] src_dict['SpectrumType'] = spectral_type src_dict['SpatialType'] = spatial_type src_dict['SourceType'] = src_type src_dict['Spatial_Filename'] = None src_dict['Spectrum_Filename'] = None if 'file' in spat: src_dict['Spatial_Filename'] = utils.xmlpath_to_path(spat['file']) if not os.path.isfile(src_dict['Spatial_Filename']) \ and extdir is not None: src_dict['Spatial_Filename'] = \ os.path.join(extdir, 'Templates', src_dict['Spatial_Filename']) if 'file' in spec: src_dict['Spectrum_Filename'] = utils.xmlpath_to_path(spec['file']) if src_type == 'PointSource': src_dict['SpatialModel'] = 'PointSource' elif src_type == 'CompositeSource': src_dict['SpatialModel'] = 'CompositeSource' elif spatial_type == 'SpatialMap': src_dict['SpatialModel'] = 'SpatialMap' else: src_dict['SpatialModel'] = spatial_type if src_type == 'PointSource' or \ spatial_type in ['SpatialMap', 'RadialGaussian', 'RadialDisk']: if 'RA' in xml_dict: src_dict['RAJ2000'] = float(xml_dict['RA']) src_dict['DEJ2000'] = float(xml_dict['DEC']) elif 'RA' in spatial_pars: src_dict['RAJ2000'] = float(spatial_pars['RA']['value']) src_dict['DEJ2000'] = float(spatial_pars['DEC']['value']) else: try: skydir = wcs_utils.get_map_skydir(os.path.expandvars( src_dict['Spatial_Filename'])) except Exception: skydir = hpx_utils.get_map_skydir(os.path.expandvars( src_dict['Spatial_Filename'])) src_dict['RAJ2000'] = skydir.ra.deg src_dict['DEJ2000'] = skydir.dec.deg radec = np.array([src_dict['RAJ2000'], src_dict['DEJ2000']]) src_dict['spectral_pars'] = spectral_pars src_dict['spatial_pars'] = spatial_pars return Source(src_dict['Source_Name'], src_dict, radec=radec) elif src_type == 'DiffuseSource' and spatial_type == 'ConstantValue': return IsoSource(src_dict['Source_Name'], {'Spectrum_Filename': spec['file'], 'spectral_pars': spectral_pars, 'spatial_pars': spatial_pars}) elif src_type == 'DiffuseSource' and spatial_type == 'MapCubeFunction': return MapCubeSource(src_dict['Source_Name'], {'Spatial_Filename': spat['file'], 'SpectrumType': spectral_type, 'spectral_pars': spectral_pars, 'spatial_pars': spatial_pars}) elif src_type == 'CompositeSource': return CompositeSource(src_dict['Source_Name'], {'SpectrumType': spectral_type, 'nested_sources': nested_sources}) else: raise Exception( 'Unrecognized type for source: %s %s' % (src_dict['Source_Name'], src_type))
python
def ggsave(name, plot, data=None, *args, **kwargs): """Save a GGStatements object to destination name @param name output file name. if None, don't run R command @param kwargs keyword args to pass to ggsave. The following are special keywords for the python save method data: a python data object (list, dict, DataFrame) used to populate the `data` variable in R libs: list of library names to load in addition to ggplot2 prefix: string containing R code to run before any ggplot commands (including data loading) postfix: string containing R code to run after data is loaded (e.g., if you want to rename variable names) quiet: if Truthy, don't print out R program string """ # constants kwdefaults = { 'width': 10, 'height': 8, 'scale': 1 } keys_to_rm = ["prefix", "quiet", "postfix", 'libs'] varname = 'p' # process arguments prefix = kwargs.get('prefix', '') postfix = kwargs.get('postfix', '') libs = kwargs.get('libs', []) libs = '\n'.join(["library(%s)" % lib for lib in libs]) quiet = kwargs.get("quiet", False) kwargs = {k: v for k, v in kwargs.iteritems() if v is not None and k not in keys_to_rm} kwdefaults.update(kwargs) kwargs = kwdefaults # figure out how to load data in the R environment if data is None: data = plot.data if data is None: # Don't load anything, the data source is already present in R data_src = '' elif isinstance(data, basestring) and 'RPostgreSQL' in data: # Hack to allow through data_sql results data_src = data elif isinstance(data, GGData): data_src = str(data) else: # format the python data object data_src = str(data_py(data)) prog = "%(header)s\n%(libs)s\n%(prefix)s\n%(data)s\n%(postfix)s\n%(varname)s = %(prog)s" % { 'header': "library(ggplot2)", 'libs': libs, 'data': data_src, 'prefix': prefix, 'postfix': postfix, 'varname': varname, 'prog': plot.r } if name: stmt = GGStatement("ggsave", esc(name), varname, *args, **kwargs) prog = "%s\n%s" % (prog, stmt.r) if not quiet: print prog print if name: execute_r(prog, quiet) return prog
java
public static String readStringFromFile(String path, SparkContext sc) throws IOException { return readStringFromFile(path, sc.hadoopConfiguration()); }
python
def herp_derp_interp(place): """simple interpolation of GFS forecast""" lat, lon = place #begin=2014-02-14T00%3A00%3A00&end=2018-02-22T00%3A00%3A00 fmt = '%Y-%m-%dT00:00:00' fmt = '%Y-%m-%dT%H:%M:00' begin = (datetime.datetime.now()-datetime.timedelta(hours=12)).strftime(fmt) #end=(datetime.datetime.now()+datetime.timedelta(hours=48)).strftime(fmt) url = "http://graphical.weather.gov/xml/SOAP_server/ndfdXMLclient.php?" + \ "whichClient=NDFDgen&lat=%s&lon=%s&" % (lat, lon) + \ "Unit=e&temp=temp&wspd=wspd&sky=sky&wx=wx&rh=rh&" + \ "product=time-series&begin=%s&end=2018-02-22T00:00:00" % begin + \ "&Submit=Submit""" res = urllib2.urlopen(url).read() root = ET.fromstring(res) time_series = [_cast_float(i.text) for i in \ root.findall('./data/time-layout')[0].iterfind('start-valid-time')] #knots to mph wind_speed = [eval(i.text)*1.15 for i in \ root.findall('./data/parameters/wind-speed')[0].iterfind('value')] cloud_cover = [eval(i.text)/100.0 for i in \ root.findall('./data/parameters/cloud-amount')[0].iterfind('value')] temperature = [eval(i.text) for i in \ root.findall('./data/parameters/temperature')[0].iterfind('value')] ws_interp = interp1d(time_series, wind_speed, kind='cubic') cc_interp = interp1d(time_series, cloud_cover, kind='cubic') t_interp = interp1d(time_series, temperature, kind='cubic') start_date = datetime.datetime.utcfromtimestamp(time_series[0]) series = [] for i in range(48): try: temp_dict = {} forecast_dt = start_date + datetime.timedelta(hours=i) temp_dict['utc_datetime'] = forecast_dt temp_dict['windSpeed'] = ws_interp(_cast_float(forecast_dt)).item() temp_dict['temperature'] = t_interp(_cast_float(forecast_dt)).item() temp_dict['cloudCover'] = cc_interp(_cast_float(forecast_dt)).item() series.append(temp_dict) except: pass return series
java
public ExecutionContext transform(Consumer<ExecutionContextBuilder> builderConsumer) { ExecutionContextBuilder builder = ExecutionContextBuilder.newExecutionContextBuilder(this); builderConsumer.accept(builder); return builder.build(); }
python
def main(args): """ Main function - launches the program """ if args: if not args.outputRepository: HOME_DIR = os.path.expanduser('~') # Utility's base directory BASE_DIR = os.path.abspath(os.path.dirname(__file__)) DOWNLOAD_DIR = HOME_DIR + '/landsat' ZIP_DIR = DOWNLOAD_DIR + '/zip' else: ZIP_DIR = args.outputRepository if args.subs == 'search': try: if args.start: args.start = reformat_date(parse(args.start)) if args.end: args.end = reformat_date(parse(args.end)) except TypeError: exit("You date format is incorrect. Please try again!", 1) s = Search() clipper = Clipper() if args.search_subs == 'shapefile': clipper.shapefile(args.path) elif args.search_subs == 'query': clipper.query(args.name) result = s.search(args.limit,args.start,args.end,clipper) try: if result['status'] == 'SUCCESS': if result['total'] > 200: exit('Too many results. Please narrow your search or limit your query with -l options') else: if args.outputRepository: with open(ZIP_DIR+'/result.geojson', 'w') as outfile: json.dump(result['results'], outfile) print ("The geojsonFIle have been created here: %s" % ZIP_DIR) else: print ("the IDs which matched with request are : ") for i in result['ID']: print (i) if args.download: gs = GsHelper(ZIP_DIR) if (args.password) and (args.user): print('Starting the download:') for item in result['downloads']: login=args.user mdp=args.password gs.single_download(login,mdp,item['download'],item['id'],ZIP_DIR) print ("%s have been downloaded ... continuing downloading" % item['id']) print("%s images were downloaded" % result['total']) exit("The downloaded images are located here: %s" % ZIP_DIR) else: exit("Please give a loggin and a password for theia downloading") else: exit("") elif result['status'] == 'error': exit(result['message']) except KeyError: exit('Too Many API queries. You can only query DevSeed\'s ' 'API 5 times per minute', 1) elif args.subs == 'download': gs = GsHelper(ZIP_DIR) print('Starting the download:') if (args.password) and (args.user): for scene in args.scenes: login=args.user mdp=args.password download='http://spirit.cnes.fr/resto/Landsat/'+scene+'/$download' testD=gs.checkifDownloadExist(login,mdp,download,scene) if testD: gs.single_download(login,mdp,download,scene,ZIP_DIR) else: exit("SceneID has not been founded or wrong User/Password given!") exit("The downloaded images are located here: %s" % gs.zip_dir) else: exit("Please give a loggin and a password for theia downloading")
java
public static String elasticSearchTimeFormatToISO8601(String time) { try { DateTime dt = DateTime.parse(time, ES_DATE_FORMAT_FORMATTER); return getISO8601String(dt); } catch (IllegalArgumentException e) { return time; } }
python
def forward(self, speed=1): """ Drive the motor forwards. :param float speed: The speed at which the motor should turn. Can be any value between 0 (stopped) and the default 1 (maximum speed). """ if isinstance(self.enable_device, DigitalOutputDevice): if speed not in (0, 1): raise ValueError( 'forward speed must be 0 or 1 with non-PWM Motors') self.enable_device.off() self.phase_device.off() self.enable_device.value = speed
python
def _pages_to_generate(self): '''Return list of slugs that correspond to pages to generate.''' # right now it gets all the files. In theory, It should only # get what's changed... but the program is not doing that yet. all_pages = self.get_page_names() # keep only those whose status is published ptg = [] for slug in all_pages: p = s2page.Page(self, slug, isslug=True) if p.published: ptg.append({'slug': p.slug, 'title':p.title, 'date': p.creation_date }) # sort the ptg array in reverse chronological order of its entries. sptg = sorted(ptg, key=lambda x : x['date'],reverse=True) res = [ pinfo['slug'] for pinfo in sptg] return res
python
def _get_command(classes): """Associates each command class with command depending on setup.cfg """ commands = {} setup_file = os.path.join( os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')), 'setup.cfg') for line in open(setup_file, 'r'): for cl in classes: if cl in line: commands[cl] = line.split(' = ')[0].strip().replace('_', ' ') return commands
python
def kill(name, signal): ''' sends a kill signal to process 1 of ths container <name> :param signal: numeric signal ''' if not exists(name): raise ContainerNotExists("The container (%s) does not exist!" % name) cmd = ['lxc-kill', '--name=%s' % name, signal] subprocess.check_call(cmd)
python
def log_create(self, instance, **kwargs): """ Helper method to create a new log entry. This method automatically populates some fields when no explicit value is given. :param instance: The model instance to log a change for. :type instance: Model :param kwargs: Field overrides for the :py:class:`LogEntry` object. :return: The new log entry or `None` if there were no changes. :rtype: LogEntry """ changes = kwargs.get('changes', None) pk = self._get_pk_value(instance) if changes is not None: kwargs.setdefault('content_type', ContentType.objects.get_for_model(instance)) kwargs.setdefault('object_pk', pk) kwargs.setdefault('object_repr', smart_text(instance)) if isinstance(pk, integer_types): kwargs.setdefault('object_id', pk) get_additional_data = getattr(instance, 'get_additional_data', None) if callable(get_additional_data): kwargs.setdefault('additional_data', get_additional_data()) # Delete log entries with the same pk as a newly created model. This should only be necessary when an pk is # used twice. if kwargs.get('action', None) is LogEntry.Action.CREATE: if kwargs.get('object_id', None) is not None and self.filter(content_type=kwargs.get('content_type'), object_id=kwargs.get('object_id')).exists(): self.filter(content_type=kwargs.get('content_type'), object_id=kwargs.get('object_id')).delete() else: self.filter(content_type=kwargs.get('content_type'), object_pk=kwargs.get('object_pk', '')).delete() # save LogEntry to same database instance is using db = instance._state.db return self.create(**kwargs) if db is None or db == '' else self.using(db).create(**kwargs) return None
java
static String convertURLToString(URL url) { if (URISchemeType.FILE.isURL(url)) { final StringBuilder externalForm = new StringBuilder(); externalForm.append(url.getPath()); final String ref = url.getRef(); if (!Strings.isEmpty(ref)) { externalForm.append("#").append(ref); //$NON-NLS-1$ } return externalForm.toString(); } return url.toExternalForm(); }
python
def form_uri(item_content, item_class): """Form the URI for a music service element. :param item_content: The content dict of the item :type item_content: dict :param item_class: The class of the item :type item_class: Sub-class of :py:class:`soco.data_structures.MusicServiceItem` """ extension = None if 'mime_type' in item_content: extension = MIME_TYPE_TO_EXTENSION[item_content['mime_type']] out = URIS.get(item_class) if out: out = out.format(extension=extension, **item_content) return out
java
private Object getComponentProperty(_PropertyDescriptorHolder propertyDescriptor) { Method readMethod = propertyDescriptor.getReadMethod(); if (readMethod == null) { throw new IllegalArgumentException("Component property " + propertyDescriptor.getName() + " is not readable"); } try { return readMethod.invoke(_component, EMPTY_ARGS); } catch (Exception e) { FacesContext facesContext = _component.getFacesContext(); throw new FacesException("Could not get property " + propertyDescriptor.getName() + " of component " + _component.getClientId(facesContext), e); } }
java
public static IntegerBinding getExponent(final ObservableFloatValue f) { return createIntegerBinding(() -> Math.getExponent(f.get()), f); }
java
public static DataSource createDataSource(final Map<String, DataSource> dataSourceMap, final File yamlFile) throws SQLException, IOException { YamlRootShardingConfiguration config = YamlEngine.unmarshal(yamlFile, YamlRootShardingConfiguration.class); return ShardingDataSourceFactory.createDataSource(dataSourceMap, new ShardingRuleConfigurationYamlSwapper().swap(config.getShardingRule()), config.getProps()); }
python
def _cellmagic(cls, options, obj, strict=False): "Deprecated, not expected to be used by any current code" options, failure = cls._process_magic(options, strict) if failure: return obj if not isinstance(obj, Dimensioned): return obj else: return StoreOptions.set_options(obj, options)
python
def find_transport_reactions(model): """ Return a list of all transport reactions. Parameters ---------- model : cobra.Model The metabolic model under investigation. Notes ----- A transport reaction is defined as follows: 1. It contains metabolites from at least 2 compartments and 2. at least 1 metabolite undergoes no chemical reaction, i.e., the formula and/or annotation stays the same on both sides of the equation. A notable exception is transport via PTS, which also contains the following restriction: 3. The transported metabolite(s) are transported into a compartment through the exchange of a phosphate group. An example of transport via PTS would be pep(c) + glucose(e) -> glucose-6-phosphate(c) + pyr(c) Reactions similar to transport via PTS (referred to as "modified transport reactions") follow a similar pattern: A(x) + B-R(y) -> A-R(y) + B(y) Such modified transport reactions can be detected, but only when a formula field exists for all metabolites in a particular reaction. If this is not the case, transport reactions are identified through annotations, which cannot detect modified transport reactions. """ transport_reactions = [] transport_rxn_candidates = set(model.reactions) - set(model.boundary) \ - set(find_biomass_reaction(model)) transport_rxn_candidates = set( [rxn for rxn in transport_rxn_candidates if len(rxn.compartments) >= 2] ) # Add all labeled transport reactions sbo_matches = set([rxn for rxn in transport_rxn_candidates if rxn.annotation is not None and 'sbo' in rxn.annotation and rxn.annotation['sbo'] in TRANSPORT_RXN_SBO_TERMS]) if len(sbo_matches) > 0: transport_reactions += list(sbo_matches) # Find unlabeled transport reactions via formula or annotation checks for rxn in transport_rxn_candidates: # Check if metabolites have formula field rxn_mets = set([met.formula for met in rxn.metabolites]) if (None not in rxn_mets) and (len(rxn_mets) != 0): if is_transport_reaction_formulae(rxn): transport_reactions.append(rxn) elif is_transport_reaction_annotations(rxn): transport_reactions.append(rxn) return set(transport_reactions)
python
def param_extract(args, short_form, long_form, default=None): """ Quick extraction of a parameter from the command line argument list. In some cases we need to parse a few arguments before the official arg-parser starts. Returns parameter value, or None if not present. """ val = default for i, a in enumerate(args): # Long form may use "--xyz=foo", so need to split on '=', but it # doesn't necessarily do that, can also be "--xyz foo". elems = a.split("=", 1) if elems[0] in [short_form, long_form]: # At least make sure that an actual name was specified if len(elems) == 1: if i + 1 < len(args) and not args[i + 1].startswith("-"): val = args[i + 1] else: val = "" # Invalid value was specified else: val = elems[1] break return val
java
private void readMetadata() throws SQLException { if (valid) { return; } String[] metaInfos = queryMetaInfos(isFunction); String paramList = metaInfos[0]; String functionReturn = metaInfos[1]; parseParamList(isFunction, paramList); // parse type of the return value (for functions) if (isFunction) { parseFunctionReturnParam(functionReturn); } }
java
public int read() throws IOException { if (_peek >= 0) { int peek = _peek; _peek = -1; return peek; } InputStream is = _is; int ch1 = is.read(); if (ch1 < 0x80) { return ch1; } else if ((ch1 & 0xe0) == 0xc0) { int ch2 = is.read(); if (ch2 < 0) { return error("unexpected end of file in utf8 character"); } else if ((ch2 & 0xc0) != 0x80) { return error("utf-8 character conversion error for '{0}' because second byte is invalid at " + String.format("0x%02x 0x%02x", ch1, ch2)); } return ((ch1 & 0x1f) << 6) + (ch2 & 0x3f); } else if ((ch1 & 0xf0) == 0xe0) { int ch2 = is.read(); int ch3 = is.read(); if (ch2 < 0) return error("unexpected end of file in utf8 character"); else if ((ch2 & 0xc0) != 0x80) { return error("illegal utf8 encoding at " + "\\x" + Integer.toHexString(ch1) + "\\x" + Integer.toHexString(ch2) + "\\x" + Integer.toHexString(ch3)); } if (ch3 < 0) return error("unexpected end of file in utf8 character"); else if ((ch3 & 0xc0) != 0x80) return error("illegal utf8 encoding at " + "\\x" + Integer.toHexString(ch1) + "\\x" + Integer.toHexString(ch2) + "\\x" + Integer.toHexString(ch3)); int ch = ((ch1 & 0x1f) << 12) + ((ch2 & 0x3f) << 6) + (ch3 & 0x3f); if (ch == 0xfeff) { // If byte-order-mark, read next character // server/1m00 return read(); } else return ch; } else if ((ch1 & 0xf0) == 0xf0) { int ch2 = is.read(); int ch3 = is.read(); int ch4 = is.read(); if (ch2 < 0) return error("unexpected end of file in utf8 character"); else if ((ch2 & 0xc0) != 0x80) return error("illegal utf8 encoding at 0x" + Integer.toHexString(ch2)); if (ch3 < 0) return error("unexpected end of file in utf8 character"); else if ((ch3 & 0xc0) != 0x80) return error("illegal utf8 encoding at 0x" + Integer.toHexString(ch3)); if (ch4 < 0) return error("unexpected end of file in utf8 character"); else if ((ch4 & 0xc0) != 0x80) return error("illegal utf8 encoding at 0x" + Integer.toHexString(ch4)); int ch = (((ch1 & 0xf) << 18) + ((ch2 & 0x3f) << 12) + ((ch3 & 0x3f) << 6) + ((ch4 & 0x3f))); _peek = 0xdc00 + (ch & 0x3ff); return 0xd800 + ((ch - 0x10000) / 0x400); } else { return error("illegal utf8 encoding at (0x" + Integer.toHexString(ch1) + ")"); } }
python
def __store_config(self, args, kwargs): """ Assign args to kwargs and store configuration. """ signature = ( 'schema', 'ignore_none_values', 'allow_unknown', 'require_all', 'purge_unknown', 'purge_readonly', ) for i, p in enumerate(signature[: len(args)]): if p in kwargs: raise TypeError("__init__ got multiple values for argument " "'%s'" % p) else: kwargs[p] = args[i] self._config = kwargs """ This dictionary holds the configuration arguments that were used to initialize the :class:`Validator` instance except the ``error_handler``. """
java
public void configInterceptor(Interceptors me) { // add excetion interceptor me.add(new ExceptionInterceptor()); if (this.getHttpPostMethod()) { me.add(new POST()); } // config others configMoreInterceptors(me); }
python
def _write_docstring_parameters(self, routine): """ Writes the parameters part of the docstring for the wrapper method of a stored routine. :param dict routine: The metadata of the stored routine. """ if routine['pydoc']['parameters']: self._write_line('') for param in routine['pydoc']['parameters']: lines = param['description'].split(os.linesep) self._write_line(':param {0} {1}: {2}'.format(param['python_type'], param['parameter_name'], lines[0])) del lines[0] tmp = ':param {0} {1}:'.format(param['python_type'], param['parameter_name']) indent = ' ' * len(tmp) for line in lines: self._write_line('{0} {1}'.format(indent, line)) self._write_line('{0} {1}'.format(indent, param['data_type_descriptor']))
python
def batch_run_many(player, positions, batch_size=100): """Used to avoid a memory oveflow issue when running the network on too many positions. TODO: This should be a member function of player.network?""" prob_list = [] value_list = [] for idx in range(0, len(positions), batch_size): probs, values = player.network.run_many(positions[idx:idx + batch_size]) prob_list.append(probs) value_list.append(values) return np.concatenate(prob_list, axis=0), np.concatenate(value_list, axis=0)
python
def set_collections_acl(self): """ Calculate and set ACL valid for requested collections. DENY_ALL is added to ACL to make sure no access rules are inherited. """ acl = [(Allow, 'g:admin', ALL_PERMISSIONS)] collections = self.get_collections() resources = self.get_resources(collections) aces = self._get_least_permissions_aces(resources) if aces is not None: for ace in aces: acl.append(ace) acl.append(DENY_ALL) self.__acl__ = tuple(acl)
python
def load(self, context): """Returns the plugin, if possible. Args: context: The TBContext flags. Returns: A InteractiveInferencePlugin instance or None if it couldn't be loaded. """ try: # pylint: disable=g-import-not-at-top,unused-import import tensorflow except ImportError: return # pylint: disable=line-too-long,g-import-not-at-top from tensorboard.plugins.interactive_inference.interactive_inference_plugin import InteractiveInferencePlugin return InteractiveInferencePlugin(context)
java
public static Filter toTimestampRangeFilter(long bigtableStartTimestamp, long bigtableEndTimestamp) { return FILTERS.timestamp().range().of(bigtableStartTimestamp, bigtableEndTimestamp); }
python
def generate_sample_cdk_tsc_module(env_root, module_dir=None): """Generate skeleton CDK TS sample module.""" if module_dir is None: module_dir = os.path.join(env_root, 'sampleapp.cdk') generate_sample_module(module_dir) for i in ['.npmignore', 'cdk.json', 'package.json', 'runway.module.yml', 'tsconfig.json', 'README.md']: shutil.copyfile( os.path.join(ROOT, 'templates', 'cdk-tsc', i), os.path.join(module_dir, i), ) for i in [['bin', 'sample.ts'], ['lib', 'sample-stack.ts']]: os.mkdir(os.path.join(module_dir, i[0])) shutil.copyfile( os.path.join(ROOT, 'templates', 'cdk-tsc', i[0], i[1]), os.path.join(module_dir, i[0], i[1]), ) with open(os.path.join(module_dir, '.gitignore'), 'w') as stream: stream.write('*.js\n') stream.write('*.d.ts\n') stream.write('node_modules\n') LOGGER.info("Sample CDK module created at %s", module_dir) LOGGER.info('To finish its setup, change to the %s directory and execute ' '"npm install" to generate its lockfile.', module_dir)
java
public static void convertBaseClasses (ImportSet imports) { // replace primitive types with OOO types (required for unboxing) imports.replace("byte", "com.threerings.util.Byte"); imports.replace("boolean", "com.threerings.util.langBoolean"); imports.replace("[B", "flash.utils.ByteArray"); imports.replace("float", "com.threerings.util.Float"); imports.replace("long", "com.threerings.util.Long"); if (imports.removeAll("[*") > 0) { imports.add("com.threerings.io.TypedArray"); } // convert java primitive boxes to their ooo counterparts imports.replace(Integer.class, "com.threerings.util.Integer"); // convert some java.util types to their ooo counterparts imports.replace(Map.class, "com.threerings.util.Map"); // get rid of java.lang stuff and any remaining primitives imports.removeGlobals(); // get rid of remaining arrays imports.removeArrays(); }
java
public ClientContact instantiateForInsert() { ClientContact entity = new ClientContact(); entity.setCategory(new Category(512973)); entity.setClientCorporation(new ClientCorporation(1)); entity.setEmail("unknown"); entity.setNumEmployees(1); entity.setIsDeleted(Boolean.FALSE); entity.setPreferredContact("Email"); entity.setStatus("Client"); entity.setUsername(new DateTime().toString()); entity.setPassword("secret"); entity.setType("Unknown"); return entity; }
java
public boolean equalAny(final TokenType... tokenTypes) { for (TokenType each : tokenTypes) { if (each == lexer.getCurrentToken().getType()) { return true; } } return false; }
java
private static boolean isKnownLeafClassLoader(final ClassLoader classLoader) { if (classLoader == null) { return false; } if (!isKnownClassLoaderAccessibleFrom(classClassLoader, classLoader)) { // We cannot access the class class loader from the specified class loader, so this is not a leaf return false; } // Now we know there is a way to reach the class class loader from the argument class loader, so we should // base or results on whether there is a way to reach the system class loader from the class class loader. return systemClassLoaderAccessibleFromClassClassLoader; }
python
def setLoggingFromOptions(options): """Sets the logging from a dictionary of name/value options. """ #We can now set up the logging info. if options.logLevel is not None: setLogLevel(options.logLevel) #Use log level, unless flags are set.. if options.logOff: setLogLevel("OFF") elif options.logInfo: setLogLevel("INFO") elif options.logDebug: setLogLevel("DEBUG") logger.info("Logging set at level: %s" % logLevelString) if options.logFile is not None: addLoggingFileHandler(options.logFile, options.logRotating) logger.info("Logging to file: %s" % options.logFile)
python
def _to_roman(num): """Convert integer to roman numerals.""" roman_numeral_map = ( ('M', 1000), ('CM', 900), ('D', 500), ('CD', 400), ('C', 100), ('XC', 90), ('L', 50), ('XL', 40), ('X', 10), ('IX', 9), ('V', 5), ('IV', 4), ('I', 1) ) if not (0 < num < 5000): log(WARN, 'Number out of range for roman (must be 1..4999)') return str(num) result = '' for numeral, integer in roman_numeral_map: while num >= integer: result += numeral num -= integer return result
python
def ListChildPathInfos(self, client_id, path_type, components, timestamp=None): """Lists path info records that correspond to children of given path. Args: client_id: An identifier string for a client. path_type: A type of a path to retrieve path information for. components: A tuple of path components of a path to retrieve child path information for. timestamp: If set, lists only descendants that existed only at that timestamp. Returns: A list of `rdf_objects.PathInfo` instances sorted by path components. """ return self.ListDescendentPathInfos( client_id, path_type, components, max_depth=1, timestamp=timestamp)
python
def finalize_oauth(self, access_token, access_token_secret): """ Called internally once auth process is complete. """ self.access_token = access_token self.access_token_secret = access_token_secret # Final OAuth object self.oauth = OAuth1( self.consumer_key, client_secret=self.consumer_secret, resource_owner_key=self.access_token, resource_owner_secret=self.access_token_secret)
python
def clear(self): """Clear ErrorMessage. """ self.problems = [] self.details = [] self.suggestions = [] self.tracebacks = []
java
public Schemata getSchemataForSession( JcrSession session ) { assert session != null; // If the session does not override any namespace mappings used in this schemata ... if (!overridesNamespaceMappings(session)) { // Then we can just use this schemata instance ... return this; } // Otherwise, the session has some custom namespace mappings, so we need to return a session-specific instance... return new SessionSchemata(session); }
java
Stream<String> writeMeter(Meter m) { List<Field> fields = new ArrayList<>(); for (Measurement measurement : m.measure()) { double value = measurement.getValue(); if (!Double.isFinite(value)) { continue; } String fieldKey = measurement.getStatistic().getTagValueRepresentation() .replaceAll("(.)(\\p{Upper})", "$1_$2").toLowerCase(); fields.add(new Field(fieldKey, value)); } if (fields.isEmpty()) { return Stream.empty(); } return Stream.of(influxLineProtocol(m.getId(), "unknown", fields.stream())); }
java
public static void logDiffException(final Logger logger, final DiffException e) { logger.logException(Level.ERROR, "DiffException", e); }
java
@SuppressWarnings("unchecked") // safe covariant cast public static ImmutableSet<OpenOption> getOptionsForInputStream(OpenOption... options) { boolean nofollowLinks = false; for (OpenOption option : options) { if (checkNotNull(option) != READ) { if (option == LinkOption.NOFOLLOW_LINKS) { nofollowLinks = true; } else { throw new UnsupportedOperationException("'" + option + "' not allowed"); } } } // just return the link options for finding the file, nothing else is needed return (ImmutableSet<OpenOption>) (ImmutableSet<?>) (nofollowLinks ? NOFOLLOW_LINKS : FOLLOW_LINKS); }
java
private static void parseChildShapes(ArrayList<Shape> shapes, JSONObject modelJSON, Shape current) throws JSONException { if (modelJSON.has("childShapes")) { ArrayList<Shape> childShapes = new ArrayList<Shape>(); JSONArray childShapeObject = modelJSON.getJSONArray("childShapes"); for (int i = 0; i < childShapeObject.length(); i++) { childShapes.add(getShapeWithId(childShapeObject.getJSONObject(i).getString("resourceId"), shapes)); } if (childShapes.size() > 0) { for (Shape each : childShapes) { each.setParent(current); } current.setChildShapes(childShapes); } ; } }
java
public static ResourceList<Message> list(final BandwidthClient client, final int page, final int size) throws Exception { final String messageUri = client.getUserResourceUri(BandwidthConstants.MESSAGES_URI_PATH); final ResourceList<Message> messages = new ResourceList<Message>(page, size, messageUri, Message.class); messages.setClient(client); messages.initialize(); return messages; }
java
public static ApruveResponse<Payment> get(String paymentRequestId, String paymentId) { return ApruveClient.getInstance().get( getPaymentsPath(paymentRequestId) + paymentId, Payment.class); }
java
public String getObjectType(int groupID, int objectID) { if (groupID >= 0 && groupID < objectGroups.size()) { ObjectGroup grp = (ObjectGroup) objectGroups.get(groupID); if (objectID >= 0 && objectID < grp.objects.size()) { GroupObject object = (GroupObject) grp.objects.get(objectID); return object.type; } } return null; }
python
def in_list(list_to_search, string_to_search): """ Verify if the list contains the item. :param list_to_search: The list. :type list_to_search: str :param string_to_search: The value of item. :type string_to_search: str :return: True if the list contains the item or False is not contains. :rtype: bool """ if (bool(list_to_search)) and (bool(string_to_search)): elements = re.split('[ \n\t\r]+', list_to_search) for element in elements: if element == string_to_search: return True return False
python
def deploy(project_name): """Assemble the middleware pipeline""" request_log = requestlog.RequestLog header_addon = HeaderControl fault_wrapper = FaultWrapper application = handler.SdkHandler() # currently we have 3 middleware for middleware in (header_addon, fault_wrapper, request_log, ): if middleware: application = middleware(application) return application
java
public boolean containsFunction(String ns, String name) { for (int i = 0; i < this.libraries.length; i++) { if (this.libraries[i].containsFunction(ns, name)) { return true; } } return false; }
python
def fingerprint_relaxation(T, p0, obs, tau=1, k=None, ncv=None): r"""Dynamical fingerprint for relaxation experiment. The dynamical fingerprint is given by the implied time-scale spectrum together with the corresponding amplitudes. Parameters ---------- T : (M, M) ndarray or scipy.sparse matrix Transition matrix obs1 : (M,) ndarray Observable, represented as vector on state space obs2 : (M,) ndarray (optional) Second observable, for cross-correlations k : int (optional) Number of time-scales and amplitudes to compute tau : int (optional) Lag time of given transition matrix, for correct time-scales ncv : int (optional) The number of Lanczos vectors generated, `ncv` must be greater than k; it is recommended that ncv > 2*k Returns ------- timescales : (N,) ndarray Time-scales of the transition matrix amplitudes : (N,) ndarray Amplitudes for the relaxation experiment See also -------- relaxation, fingerprint_correlation References ---------- .. [1] Noe, F, S Doose, I Daidone, M Loellmann, M Sauer, J D Chodera and J Smith. 2010. Dynamical fingerprints for probing individual relaxation processes in biomolecular dynamics with simulations and kinetic experiments. PNAS 108 (12): 4822-4827. Notes ----- Fingerprints are a combination of time-scale and amplitude spectrum for a equilibrium correlation or a non-equilibrium relaxation experiment. **Relaxation** A relaxation experiment looks at the time dependent expectation value of an observable for a system out of equilibrium .. math:: \mathbb{E}_{w_{0}}[a(x, t)]=\sum_x w_0(x) a(x, t)=\sum_x w_0(x) \sum_y p^t(x, y) a(y). The fingerprint amplitudes :math:`\gamma_i` are given by .. math:: \gamma_i=\langle w_0, r_i\rangle \langle l_i, a \rangle. And the fingerprint time scales :math:`t_i` are given by .. math:: t_i=-\frac{\tau}{\log \lvert \lambda_i \rvert}. Examples -------- >>> import numpy as np >>> from msmtools.analysis import fingerprint_relaxation >>> T = np.array([[0.9, 0.1, 0.0], [0.5, 0.0, 0.5], [0.0, 0.1, 0.9]]) >>> p0 = np.array([1.0, 0.0, 0.0]) >>> a = np.array([1.0, 0.0, 0.0]) >>> ts, amp = fingerprint_relaxation(T, p0, a) >>> ts array([ inf, 9.49122158, 0.43429448]) >>> amp array([ 0.45454545, 0.5 , 0.04545455]) """ # check if square matrix and remember size T = _types.ensure_ndarray_or_sparse(T, ndim=2, uniform=True, kind='numeric') n = T.shape[0] # will not do fingerprint analysis for nonreversible matrices if not is_reversible(T): raise ValueError('Fingerprint calculation is not supported for nonreversible transition matrices. ') p0 = _types.ensure_ndarray(p0, ndim=1, size=n, kind='numeric') obs = _types.ensure_ndarray(obs, ndim=1, size=n, kind='numeric') # go if _issparse(T): return sparse.fingerprints.fingerprint_relaxation(T, p0, obs, tau=tau, k=k, ncv=ncv) else: return dense.fingerprints.fingerprint_relaxation(T, p0, obs, tau=tau, k=k)
python
def align_unaligned_seqs(seqs_fp, moltype=DNA, params=None, accurate=False): """Aligns unaligned sequences Parameters ---------- seqs_fp : string file path of the input fasta file moltype : {skbio.DNA, skbio.RNA, skbio.Protein} params : dict-like type It pass the additional parameter settings to the application. Default is None. accurate : boolean Perform accurate alignment or not. It will sacrifice performance if set to True. Default is False. Returns ------- Alignment object The aligned sequences. See Also -------- skbio.Alignment skbio.DNA skbio.RNA skbio.Protein """ # Create Mafft app. app = Mafft(InputHandler='_input_as_path', params=params) # Turn on correct sequence type app.Parameters[MOLTYPE_MAP[moltype]].on() # Do not report progress app.Parameters['--quiet'].on() # More accurate alignment, sacrificing performance. if accurate: app.Parameters['--globalpair'].on() app.Parameters['--maxiterate'].Value = 1000 # Get results using int_map as input to app res = app(seqs_fp) # Get alignment as dict out of results alignment = Alignment.read(res['StdOut'], constructor=moltype) # Clean up res.cleanUp() return alignment
java
public int activeGroupCount() { int ngroupsSnapshot; ThreadGroup[] groupsSnapshot; synchronized (this) { if (destroyed) { return 0; } ngroupsSnapshot = ngroups; if (groups != null) { groupsSnapshot = Arrays.copyOf(groups, ngroupsSnapshot); } else { groupsSnapshot = null; } } int n = ngroupsSnapshot; for (int i = 0 ; i < ngroupsSnapshot ; i++) { n += groupsSnapshot[i].activeGroupCount(); } return n; }
python
def servertoken(self,serverURL,referer): """ returns the server token for the server """ if self._server_token is None or self._server_token_expires_on is None or \ datetime.datetime.now() >= self._server_token_expires_on or \ self._server_url != serverURL: self._server_url = serverURL result = self._generateForServerTokenSecurity(serverURL=serverURL, token=self.token, tokenUrl=self._token_url, referer=referer) if 'error' in result: self._valid = False self._message = result else: self._valid = True self._message = "Server Token Generated" return self._server_token
python
def apply_motion_tracks(self, tracks, accuracy=0.004): """ Similar to click but press the screen for the given time interval and then release Args: tracks (:py:obj:`list`): list of :py:class:`poco.utils.track.MotionTrack` object accuracy (:py:obj:`float`): motion accuracy for each motion steps in normalized coordinate metrics. """ if not tracks: raise ValueError('Please provide at least one track. Got {}'.format(repr(tracks))) tb = MotionTrackBatch(tracks) return self.agent.input.applyMotionEvents(tb.discretize(accuracy))
python
def inverse_transform(self, maps): """This function transforms from luminosity distance to chirp distance, given the chirp mass. Parameters ---------- maps : a mapping object Examples -------- Convert a dict of numpy.array: >>> import numpy as np >>> from pycbc import transforms >>> t = transforms.ChirpDistanceToDistance() >>> t.inverse_transform({'distance': np.array([40.]), 'mchirp': np.array([1.2])}) {'distance': array([ 40.]), 'chirp_distance': array([ 40.52073522]), 'mchirp': array([ 1.2])} Returns ------- out : dict A dict with key as parameter name and value as numpy.array or float of transformed values. """ out = {} out[parameters.chirp_distance] = \ conversions.chirp_distance(maps[parameters.distance], maps[parameters.mchirp], ref_mass=self.ref_mass) return self.format_output(maps, out)
java
public int getInt() { int v = 0; v |= ((getByte() & 0xFF) << 24); v |= ((getByte() & 0xFF) << 16); v |= ((getByte() & 0xFF) << 8); v |= (getByte() & 0xFF); return v; }
java
@Deprecated public static SqlRunner newSqlRunner(DataSource ds, Dialect dialect) { return SqlRunner.create(ds, dialect); }
python
def ed(simulated_array, observed_array, replace_nan=None, replace_inf=None, remove_neg=False, remove_zero=False): """ Compute the Euclidean distance between predicted and observed values in vector space. .. image:: /pictures/ED.png **Range** 0 ≤ ED < inf, smaller is better. **Notes** Also sometimes referred to as the L2-norm. Parameters ---------- simulated_array: one dimensional ndarray An array of simulated data from the time series. observed_array: one dimensional ndarray An array of observed data from the time series. replace_nan: float, optional If given, indicates which value to replace NaN values with in the two arrays. If None, when a NaN value is found at the i-th position in the observed OR simulated array, the i-th value of the observed and simulated array are removed before the computation. replace_inf: float, optional If given, indicates which value to replace Inf values with in the two arrays. If None, when an inf value is found at the i-th position in the observed OR simulated array, the i-th value of the observed and simulated array are removed before the computation. remove_neg: boolean, optional If True, when a negative value is found at the i-th position in the observed OR simulated array, the i-th value of the observed AND simulated array are removed before the computation. remove_zero: boolean, optional If true, when a zero value is found at the i-th position in the observed OR simulated array, the i-th value of the observed AND simulated array are removed before the computation. Examples -------- >>> import HydroErr as he >>> import numpy as np >>> sim = np.array([5, 7, 9, 2, 4.5, 6.7]) >>> obs = np.array([4.7, 6, 10, 2.5, 4, 7]) >>> he.ed(sim, obs) 1.63707055437449 Returns ------- float The euclidean distance error value. References ---------- - Kennard, M. J., Mackay, S. J., Pusey, B. J., Olden, J. D., & Marsh, N. (2010). Quantifying uncertainty in estimation of hydrologic metrics for ecohydrological studies. River Research and Applications, 26(2), 137-156. """ # Checking and cleaning the data simulated_array, observed_array = treat_values( simulated_array, observed_array, replace_nan=replace_nan, replace_inf=replace_inf, remove_neg=remove_neg, remove_zero=remove_zero ) return np.linalg.norm(observed_array - simulated_array)
java
@Nonnull public T create( @Nonnull DocumentReference documentReference, @Nonnull Map<String, Object> fields) { return performCreate(documentReference, fields); }
python
def load_mnist(size: int = None, border: int = _MNIST_BORDER, blank_corners: bool = False, nums: List[int] = None) \ -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: """Download and rescale the MNIST database of handwritten digits MNIST is a dataset of 60,000 28x28 grayscale images handwritten digits, along with a test set of 10,000 images. We use Keras to download and access the dataset. The first invocation of this method may take a while as the dataset has to be downloaded and cached. If size is None, then we return the original MNIST data. For rescaled MNIST, we chop off the border, downsample to the desired size with Lanczos resampling, and then (optionally) zero out the corner pixels. Returns (x_train, y_train, x_test, y_test) x_train ndarray of shape (60000, size, size) y_train ndarray of shape (60000,) x_test ndarray of shape (10000, size, size) y_test ndarray of shape (10000,) """ # DOCME: Fix up formatting above, # DOCME: Explain nums argument # JIT import since keras startup is slow from keras.datasets import mnist def _filter_mnist(x: np.ndarray, y: np.ndarray, nums: List[int] = None) \ -> Tuple[np.ndarray, np.ndarray]: xt = [] yt = [] items = len(y) for n in range(items): if nums is not None and y[n] in nums: xt.append(x[n]) yt.append(y[n]) xt = np.stack(xt) yt = np.stack(yt) return xt, yt def _rescale(imgarray: np.ndarray, size: int) -> np.ndarray: N = imgarray.shape[0] # Chop off border imgarray = imgarray[:, border:-border, border:-border] rescaled = np.zeros(shape=(N, size, size), dtype=np.float) for n in range(0, N): img = Image.fromarray(imgarray[n]) img = img.resize((size, size), Image.LANCZOS) rsc = np.asarray(img).reshape((size, size)) rsc = 256.*rsc/rsc.max() rescaled[n] = rsc return rescaled.astype(dtype=np.uint8) def _blank_corners(imgarray: np.ndarray) -> None: # Zero out corners sz = imgarray.shape[1] corner = (sz//2)-1 for x in range(0, corner): for y in range(0, corner-x): imgarray[:, x, y] = 0 imgarray[:, -(1+x), y] = 0 imgarray[:, -(1+x), -(1+y)] = 0 imgarray[:, x, -(1+y)] = 0 (x_train, y_train), (x_test, y_test) = mnist.load_data() if nums: x_train, y_train = _filter_mnist(x_train, y_train, nums) x_test, y_test = _filter_mnist(x_test, y_test, nums) if size: x_train = _rescale(x_train, size) x_test = _rescale(x_test, size) if blank_corners: _blank_corners(x_train) _blank_corners(x_test) return x_train, y_train, x_test, y_test