language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def depthtospace(attrs, inputs, proto_obj): """Rearranges data from depth into blocks of spatial data.""" new_attrs = translation_utils._fix_attribute_names(attrs, {'blocksize':'block_size'}) return "depth_to_space", new_attrs, inputs
python
def download_channel_image_file(self, channel_name, plate_name, well_name, well_pos_y, well_pos_x, cycle_index, tpoint, zplane, correct, align, directory): '''Downloads a channel image and writes it to a `PNG` file on disk. Parameters ---------- channel_name: str name of the channel plate_name: str name of the plate well_name: str name of the well well_pos_x: int zero-based x cooridinate of the acquisition site within the well well_pos_y: int zero-based y cooridinate of the acquisition site within the well cycle_index: str zero-based cycle index tpoint: int zero-based time point index zplane: int zero-based z-plane index correct: bool whether image should be corrected for illumination artifacts align: bool whether image should be aligned to the other cycles directory: str absolute path to the directory on disk where the file should be saved Note ---- Image gets automatically aligned between cycles. See also -------- :meth:`tmclient.api.TmClient.download_channel_image` ''' response = self._download_channel_image( channel_name, plate_name, well_name, well_pos_y, well_pos_x, cycle_index=cycle_index, tpoint=tpoint, zplane=zplane, correct=correct, align = align ) data = response.content filename = self._extract_filename_from_headers(response.headers) self._write_file(directory, os.path.basename(filename), data)
python
def get_archives(self, title_id, language_code): """Get the archive list from a given `title_id` and `language_code`. :param int title_id: title id. :param int language_code: language code. :return: the archives. :rtype: list of :class:`LegendasTVArchive` """ logger.info('Getting archives for title %d and language %d', title_id, language_code) archives = [] page = 1 while True: # get the archive page url = self.server_url + 'util/carrega_legendas_busca_filme/{title}/{language}/-/{page}'.format( title=title_id, language=language_code, page=page) r = self.session.get(url) r.raise_for_status() # parse the results soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser']) for archive_soup in soup.select('div.list_element > article > div'): # create archive archive = LegendasTVArchive(archive_soup.a['href'].split('/')[2], archive_soup.a.text, 'pack' in archive_soup['class'], 'destaque' in archive_soup['class'], self.server_url + archive_soup.a['href'][1:]) # extract text containing downloads, rating and timestamp data_text = archive_soup.find('p', class_='data').text # match downloads archive.downloads = int(downloads_re.search(data_text).group('downloads')) # match rating match = rating_re.search(data_text) if match: archive.rating = int(match.group('rating')) # match timestamp and validate it time_data = {k: int(v) for k, v in timestamp_re.search(data_text).groupdict().items()} archive.timestamp = pytz.timezone('America/Sao_Paulo').localize(datetime(**time_data)) if archive.timestamp > datetime.utcnow().replace(tzinfo=pytz.utc): raise ProviderError('Archive timestamp is in the future') # add archive archives.append(archive) # stop on last page if soup.find('a', attrs={'class': 'load_more'}, string='carregar mais') is None: break # increment page count page += 1 logger.debug('Found %d archives', len(archives)) return archives
java
public INode[] getExistingPathINodes(String path) { byte[][] components = getPathComponents(path); INode[] inodes = new INode[components.length]; this.getExistingPathINodes(components, inodes); return inodes; }
java
protected final boolean matchString(int state, char expected[]) throws Exception{ int length = expected.length; while(state<length && position<limit){ if(input[position]!=expected[state]) throw expected(codePoint(), new String(new int[]{ Character.codePointAt(expected, state) }, 0, 1)); state++; position++; } if(state==length) return true; else{ if(marker==EOF) throw expected(EOF, new String(expected, state, length-state)); exiting(RULE_DYNAMIC_STRING_MATCH, state); return false; } }
python
def write_single_response(self, response_obj): """ Writes a json rpc response ``{"result": result, "error": error, "id": id}``. If the ``id`` is ``None``, the response will not contain an ``id`` field. The response is sent to the client as an ``application/json`` response. Only one call per response is allowed :param response_obj: A Json rpc response object :return: """ if not isinstance(response_obj, JsonRpcResponse): raise ValueError( "Expected JsonRpcResponse, but got {} instead".format(type(response_obj).__name__)) if not self.response_is_sent: self.set_status(200) self.set_header("Content-Type", "application/json") self.finish(response_obj.to_string()) self.response_is_sent = True
java
@SuppressWarnings({ "unchecked" }) public <VV> EntryStream<K, VV> selectValues(Class<VV> clazz) { return (EntryStream<K, VV>) filter(e -> clazz.isInstance(e.getValue())); }
java
public SAXParseException[] getXMLWarnings() { if (mWarnings == null) { return null; } return (SAXParseException[]) mWarnings.toArray(new SAXParseException[mWarnings.size()]); }
python
def _get_block_header(self, block_hash, num): """Get block header by block header hash & number. :param block_hash: :param num: :return: """ header_key = header_prefix + num + block_hash block_header_data = self.db.get(header_key) header = rlp.decode(block_header_data, sedes=BlockHeader) return header
python
def from_jd(jd: float, fmt: str = 'jd') -> datetime: """ Converts a Julian Date to a datetime object. Algorithm is from Fliegel and van Flandern (1968) Parameters ---------- jd: float Julian Date as type specified in the string fmt fmt: str Returns ------- dt: datetime """ jd, jdf = __from_format(jd, fmt) l = jd+68569 n = 4*l//146097 l = l-(146097*n+3)//4 i = 4000*(l+1)//1461001 l = l-1461*i//4+31 j = 80*l//2447 k = l-2447*j//80 l = j//11 j = j+2-12*l i = 100*(n-49)+i+l year = int(i) month = int(j) day = int(k) # in microseconds frac_component = int(jdf * (1e6*24*3600)) hours = int(frac_component // (1e6*3600)) frac_component -= hours * 1e6*3600 minutes = int(frac_component // (1e6*60)) frac_component -= minutes * 1e6*60 seconds = int(frac_component // 1e6) frac_component -= seconds*1e6 frac_component = int(frac_component) dt = datetime(year=year, month=month, day=day, hour=hours, minute=minutes, second=seconds, microsecond=frac_component) return dt
python
def decode_solution(self, encoded_solution): """Return solution from an encoded representation.""" return self._decode_function(encoded_solution, *self._decode_args, **self._decode_kwargs)
java
protected String getArgumentValue(String arg, String[] args, String defalt) { String argName = getArgName(arg); for (int i = 1; i < args.length; i++) { String currentArgName = getArgName(args[i]); // return what's to left of = if there is one if (currentArgName.equalsIgnoreCase(argName)) { return getValue(args[i]); } } return defalt; }
java
public List<CmsGroup> getGroups( CmsDbContext dbc, CmsOrganizationalUnit orgUnit, boolean includeSubOus, boolean readRoles) throws CmsException { return getUserDriver(dbc).getGroups(dbc, orgUnit, includeSubOus, readRoles); }
python
def _known_stale(self): """ The commit is known to be from a file (and therefore stale) if a SHA is supplied by git archive and doesn't match the parsed commit. """ if self._output_from_file() is None: commit = None else: commit = self.commit known_stale = (self.archive_commit is not None and not self.archive_commit.startswith('$Format') and self.archive_commit != commit) if known_stale: self._commit_count = None return known_stale
python
def CreateBitmap(self, artid, client, size): """Adds custom images to Artprovider""" if artid in self.extra_icons: return wx.Bitmap(self.extra_icons[artid], wx.BITMAP_TYPE_ANY) else: return wx.ArtProvider.GetBitmap(artid, client, size)
java
private Deferred<ChannelBuffer> serialize() throws Exception { final long start = System.currentTimeMillis(); // buffers and an array list to stored the deferreds final ChannelBuffer response = ChannelBuffers.dynamicBuffer(); final OutputStream output_stream = new ChannelBufferOutputStream(response); final JsonGenerator json = JSON.getFactory().createGenerator(output_stream); json.writeStartObject(); json.writeFieldName("outputs"); json.writeStartArray(); // We want the serializer to execute serially so we need to create a callback // chain so that when one DPsResolver is finished, it triggers the next to // start serializing. final Deferred<Object> cb_chain = new Deferred<Object>(); // default to the expressions if there, or fall back to the metrics final List<Output> outputs; if (query.getOutputs() == null || query.getOutputs().isEmpty()) { if (query.getExpressions() != null && !query.getExpressions().isEmpty()) { outputs = new ArrayList<Output>(query.getExpressions().size()); for (final Expression exp : query.getExpressions()) { outputs.add(Output.Builder().setId(exp.getId()).build()); } } else if (query.getMetrics() != null && !query.getMetrics().isEmpty()) { outputs = new ArrayList<Output>(query.getMetrics().size()); for (final Metric metric : query.getMetrics()) { outputs.add(Output.Builder().setId(metric.getId()).build()); } } else { throw new IllegalArgumentException( "How did we get here?? No metrics or expressions??"); } } else { outputs = query.getOutputs(); } for (final Output output : outputs) { if (expressions != null) { final ExpressionIterator it = expressions.get(output.getId()); if (it != null) { cb_chain.addCallback(new SerializeExpressionIterator(tsdb, json, output, it, ts_query)); continue; } } if (query.getMetrics() != null && !query.getMetrics().isEmpty()) { final TSSubQuery sub = sub_queries.get(output.getId()); if (sub != null) { final TimeSyncedIterator it = new TimeSyncedIterator(output.getId(), sub.getFilterTagKs(), sub_query_results.get(output.getId())); cb_chain.addCallback(new SerializeSubIterator(tsdb, json, output, it)); continue; } } else { LOG.warn("Couldn't find a variable matching: " + output.getId() + " in query " + query); } } /** Final callback to close out the JSON array and return our results */ class FinalCB implements Callback<ChannelBuffer, Object> { public ChannelBuffer call(final Object obj) throws Exception { json.writeEndArray(); // ts_query.getQueryStats().setTimeSerialization( // DateTime.currentTimeMillis() - start); ts_query.getQueryStats().markSerializationSuccessful(); // dump overall stats as an extra object in the array // if (true) { // final QueryStats stats = ts_query.getQueryStats(); // json.writeFieldName("statsSummary"); // json.writeStartObject(); // //json.writeStringField("hostname", TSDB.getHostname()); // //json.writeNumberField("runningQueries", stats.getNumRunningQueries()); // json.writeNumberField("datapoints", stats.getAggregatedSize()); // json.writeNumberField("rawDatapoints", stats.getSize()); // //json.writeNumberField("rowsFetched", stats.getRowsFetched()); // json.writeNumberField("aggregationTime", stats.getTimeAggregation()); // json.writeNumberField("serializationTime", stats.getTimeSerialization()); // json.writeNumberField("storageTime", stats.getTimeStorage()); // json.writeNumberField("timeTotal", // ((double)stats.getTimeTotal() / (double)1000000)); // json.writeEndObject(); // } // dump the original query if (true) { json.writeFieldName("query"); json.writeObject(QueryExecutor.this.query); } // IMPORTANT Make sure the close the JSON array and the generator json.writeEndObject(); json.close(); return response; } } // trigger the callback chain here cb_chain.callback(null); return cb_chain.addCallback(new FinalCB()); }
java
@XmlElementDecl(namespace = "http://www.opengis.net/citygml/waterbody/1.0", name = "WaterGroundSurface", substitutionHeadNamespace = "http://www.opengis.net/citygml/waterbody/1.0", substitutionHeadName = "_WaterBoundarySurface") public JAXBElement<WaterGroundSurfaceType> createWaterGroundSurface(WaterGroundSurfaceType value) { return new JAXBElement<WaterGroundSurfaceType>(_WaterGroundSurface_QNAME, WaterGroundSurfaceType.class, null, value); }
java
public void execute() throws MojoExecutionException { if (watchFilter == null) { removeFromWatching(); } NPM.npm(this, name, version).execute( binary != null ? binary : name, arguments); }
java
public static Object evaluateAsObject(Node node, String xPathExpression, NamespaceContext nsContext, QName resultType) { return evaluateExpression(node, xPathExpression, nsContext, resultType); }
python
def attr_fill_null(args): """ Assign the null sentinel value for all entities which do not have a value for the given attributes. see gs://broad-institute-gdac/GDAC_FC_NULL for more details """ NULL_SENTINEL = "gs://broad-institute-gdac/GDAC_FC_NULL" attrs = args.attributes if not attrs: print("Error: provide at least one attribute to set") return 1 if 'participant' in attrs or 'samples' in attrs: print("Error: can't assign null to samples or participant") return 1 # Set entity attributes if args.entity_type is not None: print("Collecting entity data...") # Get existing attributes entities = _entity_paginator(args.project, args.workspace, args.entity_type, page_size=1000, filter_terms=None, sort_direction="asc") # samples need participant_id as well #TODO: This may need more fixing for other types orig_attrs = list(attrs) if args.entity_type == "sample": attrs.insert(0, "participant_id") header = "entity:" + args.entity_type + "_id\t" + "\t".join(attrs) # Book keep the number of updates for each attribute attr_update_counts = {a : 0 for a in orig_attrs} # construct new entity data by inserting null sentinel, and counting # the number of updates entity_data = [] for entity_dict in entities: name = entity_dict['name'] etype = entity_dict['entityType'] e_attrs = entity_dict['attributes'] line = name altered = False for attr in attrs: if attr == "participant_id": line += "\t" + e_attrs['participant']['entityName'] continue # This attribute is never updated by fill_null if attr not in e_attrs: altered = True attr_update_counts[attr] += 1 line += "\t" + str(e_attrs.get(attr, NULL_SENTINEL)) # Improve performance by only updating records that have changed if altered: entity_data.append(line) # Check to see if all entities are being set to null for any attributes # This is usually a mistake, so warn the user num_entities = len(entities) prompt = "Continue? [Y\\n]: " for attr in orig_attrs: if num_entities == attr_update_counts[attr]: message = "WARNING: no {0}s with attribute '{1}'\n".format( args.entity_type, attr ) if not args.yes and not _confirm_prompt(message, prompt): return # check to see if no sentinels are necessary if not any(c != 0 for c in itervalues(attr_update_counts)): print("No null sentinels required, exiting...") return 0 if args.to_loadfile: print("Saving loadfile to " + args.to_loadfile) with open(args.to_loadfile, "w") as f: f.write(header + '\n') f.write("\n".join(entity_data)) return 0 updates_table = " count attribute\n" for attr in sorted(attr_update_counts): count = attr_update_counts[attr] updates_table += "{0:>10} {1}\n".format(count, attr) message = "WARNING: This will insert null sentinels for " \ "these attributes:\n" + updates_table if not args.yes and not _confirm_prompt(message): return 0 # Chunk the entities into batches of 500, and upload to FC print("Batching " + str(len(entity_data)) + " updates to Firecloud...") chunk_len = 500 total = int(len(entity_data) / chunk_len) + 1 batch = 0 for i in range(0, len(entity_data), chunk_len): batch += 1 print("Updating samples {0}-{1}, batch {2}/{3}".format( i+1, min(i+chunk_len, len(entity_data)), batch, total )) this_data = header + '\n' + '\n'.join(entity_data[i:i+chunk_len]) # Now push the entity data back to firecloud r = fapi.upload_entities(args.project, args.workspace, this_data) fapi._check_response_code(r, 200) return 0 else: # TODO: set workspace attributes print("attr_fill_null requires an entity type") return 1
python
def create_restore_point(self, name=None): '''Creating a configuration restore point. Parameters ---------- name : str Name of the restore point. If not given, a md5 hash will be generated. ''' if name is None: for i in iter(int, 1): name = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S%f') + '_' + str(i) try: self.config_state[name] except KeyError: break else: pass if name in self.config_state: raise ValueError('Restore point %s already exists' % name) self.config_state[name] = (copy.deepcopy(self.global_registers), copy.deepcopy(self.pixel_registers)) return name
java
protected IPortletRenderExecutionWorker startPortletRenderInternal( IPortletWindowId portletWindowId, HttpServletRequest request, HttpServletResponse response) { // first check to see if there is a Throwable in the session for this IPortletWindowId final Map<IPortletWindowId, Exception> portletFailureMap = getPortletErrorMap(request); final Exception cause = portletFailureMap.remove(portletWindowId); final IPortletRenderExecutionWorker portletRenderExecutionWorker; if (null != cause) { // previous action failed, dispatch to errorPortlet immediately portletRenderExecutionWorker = this.portletWorkerFactory.createFailureWorker( request, response, portletWindowId, cause); } else { IPortletWindow portletWindow = portletWindowRegistry.getPortletWindow(request, portletWindowId); IPortletDefinition portletDef = portletWindow.getPortletEntity().getPortletDefinition(); if (portletDef.getLifecycleState().equals(PortletLifecycleState.MAINTENANCE)) { // Prevent the portlet from rendering; replace with a helpful "Out of Service" // message portletRenderExecutionWorker = this.portletWorkerFactory.createFailureWorker( request, response, portletWindowId, new MaintenanceModeException()); } else { // Happy path portletRenderExecutionWorker = this.portletWorkerFactory.createRenderWorker( request, response, portletWindowId); } } portletRenderExecutionWorker.submit(); final Map<IPortletWindowId, IPortletRenderExecutionWorker> portletRenderingMap = this.getPortletRenderingMap(request); portletRenderingMap.put(portletWindowId, portletRenderExecutionWorker); return portletRenderExecutionWorker; }
python
def get_site_symmetries(wyckoff): """List up site symmetries The data structure is as follows: wyckoff[0]['wyckoff'][0]['site_symmetry'] Note ---- Maximum length of string is 6. """ ssyms = [] for w in wyckoff: ssyms += ["\"%-6s\"" % w_s['site_symmetry'] for w_s in w['wyckoff']] damp_array_site_symmetries(ssyms)
python
def _new(self, src_path, dry_run=False, remove_uploaded=False): 'Code to upload' # are we getting a symbolic link? if os.path.islink(src_path): sourcefile = os.path.normpath(os.path.join(self.topdir, os.readlink(src_path))) if not os.path.exists(sourcefile): # broken symlink log.error("broken symlink %s->%s", src_path, sourcefile) raise IOError("broken symliknk %s->%s", src_path, sourcefile) jottapath = self.get_jottapath(src_path, filename=os.path.basename(sourcefile)) elif os.path.splitext(src_path)[1].lower() == '.lnk': # windows .lnk sourcefile = os.path.normpath(readlnk(src_path)) if not os.path.exists(sourcefile): # broken symlink log.error("broken fat32lnk %s->%s", src_path, sourcefile) raise IOError("broken fat32lnk %s->%s", src_path, sourcefile) jottapath = self.get_jottapath(src_path, filename=os.path.basename(sourcefile)) else: sourcefile = src_path if not os.path.exists(sourcefile): # file not exis log.error("file does not exist: %s", sourcefile) raise IOError("file does not exist: %s", sourcefile) jottapath = self.get_jottapath(src_path) log.info('Uploading file %s to %s', sourcefile, jottapath) if not dry_run: if not jottacloud.new(sourcefile, jottapath, self.jfs): log.error('Uploading file %s failed', sourcefile) raise if remove_uploaded: log.info('Removing file after upload: %s', src_path) if not dry_run: os.remove(src_path)
java
public static int requireNonNegative(int i, String message) { Objects.requireNonNull(message, "message must not be null"); if (i < 0) { throw new IllegalArgumentException(message); } return i; }
java
public static <T> void forEach(Iterable<T> iterable, Procedure<? super T> procedure) { if (iterable instanceof InternalIterable) { ((InternalIterable<T>) iterable).forEach(procedure); } else if (iterable instanceof ArrayList) { ArrayListIterate.forEach((ArrayList<T>) iterable, procedure); } else if (iterable instanceof List) { ListIterate.forEach((List<T>) iterable, procedure); } else if (iterable != null) { IterableIterate.forEach(iterable, procedure); } else { throw new IllegalArgumentException("Cannot perform a forEach on null"); } }
python
def get_server_capabilities(self): """Get hardware properties which can be used for scheduling :return: a dictionary of server capabilities. :raises: IloError, on an error from iLO. :raises: IloCommandNotSupportedError, if the command is not supported on the server. """ capabilities = self._call_method('get_server_capabilities') # TODO(nisha): Assumption is that Redfish always see the pci_device # member name field populated similarly to IPMI. # If redfish is not able to get nic_capacity, we can fall back to # IPMI way of retrieving nic_capacity in the future. As of now # the IPMI is not tested on Gen10, hence assuming that # Redfish will always be able to give the data. if ('Gen10' not in self.model): major_minor = ( self._call_method('get_ilo_firmware_version_as_major_minor')) # NOTE(vmud213): Even if it is None, pass it on to get_nic_capacity # as we still want to try getting nic capacity through ipmitool # irrespective of what firmware we are using. nic_capacity = ipmi.get_nic_capacity(self.ipmi_host_info, major_minor) if nic_capacity: capabilities.update({'nic_capacity': nic_capacity}) if capabilities: return capabilities
python
def roles_required(*roles): """Decorator which specifies that a user must have all the specified roles. Example:: @app.route('/dashboard') @roles_required('admin', 'editor') def dashboard(): return 'Dashboard' The current user must have both the `admin` role and `editor` role in order to view the page. :param args: The required roles. """ def wrapper(fn): @wraps(fn) def decorated_view(*args, **kwargs): perms = [Permission(RoleNeed(role)) for role in roles] for perm in perms: if not perm.can(): if _security._unauthorized_callback: return _security._unauthorized_callback() else: return _get_unauthorized_view() return fn(*args, **kwargs) return decorated_view return wrapper
python
def reverse( self, query, exactly_one=DEFAULT_SENTINEL, timeout=DEFAULT_SENTINEL, kind=None, ): """ Return an address by location point. :param query: The coordinates for which you wish to obtain the closest human-readable addresses. :type query: :class:`geopy.point.Point`, list or tuple of ``(latitude, longitude)``, or string as ``"%(latitude)s, %(longitude)s"``. :param bool exactly_one: Return one result or a list of results, if available. .. versionchanged:: 1.14.0 Default value for ``exactly_one`` was ``False``, which differs from the conventional default across geopy. Please always pass this argument explicitly, otherwise you would get a warning. In geopy 2.0 the default value will become ``True``. :param int timeout: Time, in seconds, to wait for the geocoding service to respond before raising a :class:`geopy.exc.GeocoderTimedOut` exception. Set this only if you wish to override, on this call only, the value set during the geocoder's initialization. :param str kind: Type of toponym. Allowed values: `house`, `street`, `metro`, `district`, `locality`. .. versionadded:: 1.14.0 :rtype: ``None``, :class:`geopy.location.Location` or a list of them, if ``exactly_one=False``. """ if exactly_one is DEFAULT_SENTINEL: warnings.warn('%s.reverse: default value for `exactly_one` ' 'argument will become True in geopy 2.0. ' 'Specify `exactly_one=False` as the argument ' 'explicitly to get rid of this warning.' % type(self).__name__, DeprecationWarning, stacklevel=2) exactly_one = False try: point = self._coerce_point_to_string(query, "%(lon)s,%(lat)s") except ValueError: raise ValueError("Must be a coordinate pair or Point") params = { 'geocode': point, 'format': 'json' } if self.api_key: params['apikey'] = self.api_key if self.lang: params['lang'] = self.lang if kind: params['kind'] = kind url = "?".join((self.api, urlencode(params))) logger.debug("%s.reverse: %s", self.__class__.__name__, url) return self._parse_json( self._call_geocoder(url, timeout=timeout), exactly_one )
python
def _get_package_data(): """ Import a set of important packages and return relevant data about them in a dict. Imports are done in here to avoid potential for circular imports and other problems, and to make iteration simpler. """ moddata = [] modlist = ( "click", "configobj", "cryptography", "globus_cli", "globus_sdk", "jmespath", "requests", "six", ) if verbosity() < 2: modlist = ("globus_cli", "globus_sdk", "requests") for mod in modlist: cur = [mod] try: loaded_mod = __import__(mod) except ImportError: loaded_mod = None for attr in ("__version__", "__file__", "__path__"): # if loading failed, be sure to pad with error messages if loaded_mod is None: cur.append("[import failed]") continue try: attrval = getattr(loaded_mod, attr) except AttributeError: attrval = "" cur.append(attrval) moddata.append(cur) return moddata
python
def cumulative_value(self, slip, mmax, mag_value, bbar, dbar, beta): ''' Returns the rate of events with M > mag_value :param float slip: Slip rate in mm/yr :param float mmax: Maximum magnitude :param float mag_value: Magnitude value :param float bbar: \bar{b} parameter (effectively = b * log(10.)) :param float dbar: \bar{d} parameter :param float beta: Beta value of formula defined in Eq. 20 of Anderson & Luco (1983) ''' delta_m = mmax - mag_value a_3 = self._get_a3_value(bbar, dbar, slip / 10., beta, mmax) central_term = np.exp(bbar * delta_m) - 1.0 - (bbar * delta_m) return a_3 * central_term * (delta_m > 0.0)
java
@Override public boolean eIsSet(int featureID) { switch (featureID) { case AfplibPackage.BII__IMO_NAME: return IMO_NAME_EDEFAULT == null ? imoName != null : !IMO_NAME_EDEFAULT.equals(imoName); } return super.eIsSet(featureID); }
python
def upload(self, resource_id, data): """Update the request URI to upload the a document to this resource. Args: resource_id (integer): The group id. data (any): The raw data to upload. """ self.body = data self.content_type = 'application/octet-stream' self.resource_id(str(resource_id)) self._request_uri = '{}/upload'.format(self._request_uri)
java
private static String getJsPackage(FileDescriptor file) { String protoPackage = file.getPackage(); if (!protoPackage.isEmpty()) { return "proto." + protoPackage; } return "proto"; }
java
@Override public void exec(Result<Object> result, Object ...args) { TableKelp tableKelp = _table.getTableKelp(); RowCursor minCursor = tableKelp.cursor(); RowCursor maxCursor = tableKelp.cursor(); minCursor.clear(); maxCursor.setKeyMax(); _keyExpr.fillMinCursor(minCursor, args); _keyExpr.fillMaxCursor(maxCursor, args); //QueryKelp whereKelp = _whereExpr.bind(args); // XXX: binding should be with unique EnvKelp whereKelp = new EnvKelp(_whereKelp, args); //tableKelp.findOne(minCursor, maxCursor, whereKelp, // result.from((cursor,r)->remove(cursor,r))); BackupKelp backup = _table.getBackupCallback(); if (isStaticNode()) { tableKelp.remove(minCursor, backup, (Result) result); } else { tableKelp.removeRange(minCursor, maxCursor, whereKelp, backup, (Result) result); } // result.completed(null); }
java
public Vector<Object> toValuesVector() { final Vector<Object> values = new Vector<Object>(); for (final JKTableColumnValue value : this.columnsValues) { values.add(value); } return values; }
java
public boolean completed() { if (engine == null) { return false; } if (length.isEnabled()) { if (timeout > 0) { return false; } return completed; } if (emitCount.isEnabled()) { if (leftToEmit > 0) { return false; } return completed; } if (wrapUp) { return completed; } return false; }
java
public List<DataSourceType<WebFragmentDescriptor>> getAllDataSource() { List<DataSourceType<WebFragmentDescriptor>> list = new ArrayList<DataSourceType<WebFragmentDescriptor>>(); List<Node> nodeList = model.get("data-source"); for(Node node: nodeList) { DataSourceType<WebFragmentDescriptor> type = new DataSourceTypeImpl<WebFragmentDescriptor>(this, "data-source", model, node); list.add(type); } return list; }
python
def _tumor_normal_stats(rec, somatic_info, vcf_rec): """Retrieve depth and frequency of tumor and normal samples. """ out = {"normal": {"alt": None, "depth": None, "freq": None}, "tumor": {"alt": 0, "depth": 0, "freq": None}} if hasattr(vcf_rec, "samples"): samples = [(s, {}) for s in vcf_rec.samples] for fkey in ["AD", "AO", "RO", "AF", "DP"]: try: for i, v in enumerate(rec.format(fkey)): samples[i][1][fkey] = v except KeyError: pass # Handle INFO only inputs elif len(rec.samples) == 0: samples = [(somatic_info.tumor_name, None)] else: samples = rec.samples.items() for name, sample in samples: alt, depth, freq = sample_alt_and_depth(rec, sample) if depth is not None and freq is not None: if name == somatic_info.normal_name: key = "normal" elif name == somatic_info.tumor_name: key = "tumor" out[key]["freq"] = freq out[key]["depth"] = depth out[key]["alt"] = alt return out
python
def _log_multivariate_normal_density_full(X, means, covars, min_covar=1.e-7): """Log probability for full covariance matrices.""" n_samples, n_dim = X.shape nmix = len(means) log_prob = np.empty((n_samples, nmix)) for c, (mu, cv) in enumerate(zip(means, covars)): try: cv_chol = linalg.cholesky(cv, lower=True) except linalg.LinAlgError: # The model is most probably stuck in a component with too # few observations, we need to reinitialize this components try: cv_chol = linalg.cholesky(cv + min_covar * np.eye(n_dim), lower=True) except linalg.LinAlgError: raise ValueError("'covars' must be symmetric, " "positive-definite") cv_log_det = 2 * np.sum(np.log(np.diagonal(cv_chol))) cv_sol = linalg.solve_triangular(cv_chol, (X - mu).T, lower=True).T log_prob[:, c] = - .5 * (np.sum(cv_sol ** 2, axis=1) + n_dim * np.log(2 * np.pi) + cv_log_det) return log_prob
java
public void setInitializer(/* @Nullable */ JvmField field, /* @Nullable */ StringConcatenationClient strategy) { if (field == null || strategy == null) return; removeExistingBody(field); setCompilationStrategy(field, strategy); }
python
def localization_feature(app): """ Localization feature This will initialize support for translations and localization of values such as numbers, money, dates and formatting timezones. """ # apply app default to babel app.config['BABEL_DEFAULT_LOCALE'] = app.config['DEFAULT_LOCALE'] app.config['BABEL_DEFAULT_TIMEZONE'] = app.config['DEFAULT_TIMEZONE'] # init babel babel = Babel() babel.init_app(app)
python
def new_from_list(cls, items, **kwargs): """Populates the ListView with a string list. Args: items (list): list of strings to fill the widget with. """ obj = cls(**kwargs) for item in items: obj.append(ListItem(item)) return obj
java
public void connect() throws DBException { try { LOGGER.debug("Initializing MongoDB client"); mongoClient = new MongoClient(this.host, this.port); } catch (UnknownHostException e) { throw new DBException(e.toString()); } }
java
public void fireDepthThresholdReachedEvent(ControlAdapter cAdapter, boolean reachedHigh, long numMsgs) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "fireDepthThresholdReachedEvent", new Object[] { cAdapter, new Boolean(reachedHigh), new Long(numMsgs), messagingEngineUuid }); // Retrieve appropriate information String meName = destinationHandler. getMessageProcessor(). getMessagingEngineName(); // The message for the Notification String message = null; // Build the properties for the Notification Properties props = new Properties(); String evType = null; if(destinationHandler.isLink()) { String busName = ((LinkHandler)destinationHandler).getBusName(); evType = SibNotificationConstants.TYPE_SIB_LINK_DEPTH_THRESHOLD_REACHED; props.put(SibNotificationConstants.KEY_FOREIGN_BUS_NAME, busName); props.put(SibNotificationConstants.KEY_LINK_NAME, destinationHandler.getName()); props.put(SibNotificationConstants.KEY_LINK_UUID, destinationHandler.getUuid().toString()); // Build the message for the Notification if(reachedHigh) { message = nls.getFormattedMessage("NOTIFY_SIBLINK_DEPTH_THRESHOLD_REACHED_CWSIP0559", new Object[] {destinationHandler.getName(), busName, meName}, null); // Write the message to the log if requested (510343) if(mp.getCustomProperties().getOutputLinkThresholdEventsToLog()) SibTr.info(tc, "NOTIFY_SIBLINK_DEPTH_THRESHOLD_REACHED_CWSIP0559", new Object[] {destinationHandler.getName(), busName, meName}); } else { message = nls.getFormattedMessage("NOTIFY_SIBLINK_DEPTH_THRESHOLD_REACHED_CWSIP0560", new Object[] {destinationHandler.getName(), busName, meName}, null); // Write the message to the log if requested (510343) if(mp.getCustomProperties().getOutputLinkThresholdEventsToLog()) SibTr.info(tc, "NOTIFY_SIBLINK_DEPTH_THRESHOLD_REACHED_CWSIP0560", new Object[] {destinationHandler.getName(), busName, meName}); } } else { evType = SibNotificationConstants.TYPE_SIB_REMOTE_MESSAGEPOINT_DEPTH_THRESHOLD_REACHED; props.put(SibNotificationConstants.KEY_DESTINATION_NAME, destinationHandler.getName()); props.put(SibNotificationConstants.KEY_DESTINATION_UUID, destinationHandler.getUuid().toString()); // Build the message for the Notification if(reachedHigh) { message = nls.getFormattedMessage("NOTIFY_REMOTE_DEPTH_THRESHOLD_REACHED_CWSIP0555", new Object[] { meName, destinationHandler.getName(), messagingEngineUuid.toString()}, null); // Write the message to the log if requested (510343) if(mp.getCustomProperties().getOutputDestinationThresholdEventsToLog()) SibTr.info(tc, "NOTIFY_REMOTE_DEPTH_THRESHOLD_REACHED_CWSIP0555", new Object[] {meName, destinationHandler.getName(), messagingEngineUuid.toString()}); } else { message = nls.getFormattedMessage("NOTIFY_REMOTE_DEPTH_THRESHOLD_REACHED_CWSIP0556", new Object[] { meName, destinationHandler.getName(), messagingEngineUuid.toString()}, null); // Write the message to the log if requested (510343) if(mp.getCustomProperties().getOutputDestinationThresholdEventsToLog()) SibTr.info(tc, "NOTIFY_REMOTE_DEPTH_THRESHOLD_REACHED_CWSIP0556", new Object[] {meName, destinationHandler.getName(), messagingEngineUuid.toString()}); } } if(_isEventNotificationEnabled) { props.put(SibNotificationConstants.KEY_REMOTE_MESSAGING_ENGINE_UUID,messagingEngineUuid.toString()); if(reachedHigh) props.put(SibNotificationConstants.KEY_DEPTH_THRESHOLD_REACHED, SibNotificationConstants.DEPTH_THRESHOLD_REACHED_HIGH); else props.put(SibNotificationConstants.KEY_DEPTH_THRESHOLD_REACHED, SibNotificationConstants.DEPTH_THRESHOLD_REACHED_LOW); // Number of Messages props.put(SibNotificationConstants.KEY_MESSAGES, String.valueOf(numMsgs)); if(cAdapter != null) { // Now create the Event object to pass to the control adapter MPRuntimeEvent MPevent = new MPRuntimeEvent(evType, message, props); // Fire the event if (tc.isDebugEnabled()) SibTr.debug(tc, "fireDepthThresholdReachedEvent","Drive runtimeEventOccurred against Control adapter: " + cAdapter); cAdapter.runtimeEventOccurred(MPevent); } else { if (tc.isDebugEnabled()) SibTr.debug(tc, "fireDepthThresholdReachedEvent", "Control adapter is null, cannot fire event" ); } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "fireDepthThresholdReachedEvent"); }
python
def reg_to_lex(conditions, wildcards): """Transform a regular expression into a LEPL object. Replace the wildcards in the conditions by LEPL elements, like xM will be replaced by Any() & 'M'. In case of multiple same wildcards (like xMx), aliases are created to allow the regexp to compile, like Any() > 'x_0' & 'M' & Any() > 'x_1', and we chech that the matched values for all aliases like x_0, x_1 are the same. """ aliases = defaultdict(set) n_conds = [] # All conditions for i, _ in enumerate(conditions): n_cond = [] for char in conditions[i]: if char in wildcards: alias = '%s_%s' % (char, len(aliases[char])) aliases[char].add(alias) n_cond.append(make_token(alias, reg=wildcards[char])) else: n_cond.append(~Literal(char)) n_cond.append(Eos()) n_conds.append(reduce(operator.and_, n_cond) > make_dict) return tuple(n_conds), aliases
java
protected void postLayoutChild(final int dataIndex) { if (!mContainer.isDynamic()) { boolean visibleInLayout = !mViewPort.isClippingEnabled() || inViewPort(dataIndex); ViewPortVisibility visibility = visibleInLayout ? ViewPortVisibility.FULLY_VISIBLE : ViewPortVisibility.INVISIBLE; Log.d(Log.SUBSYSTEM.LAYOUT, TAG, "onLayout: child with dataId [%d] viewportVisibility = %s", dataIndex, visibility); Widget childWidget = mContainer.get(dataIndex); if (childWidget != null) { childWidget.setViewPortVisibility(visibility); } } }
python
def preprocess_async(train_dataset, output_dir, eval_dataset=None, checkpoint=None, cloud=None): """Preprocess data. Produce output that can be used by training efficiently. Args: train_dataset: training data source to preprocess. Can be CsvDataset or BigQueryDataSet. If eval_dataset is None, the pipeline will randomly split train_dataset into train/eval set with 7:3 ratio. output_dir: The output directory to use. Preprocessing will create a sub directory under it for each run, and also update "latest" file which points to the latest preprocessed directory. Users are responsible for cleanup. Can be local or GCS path. eval_dataset: evaluation data source to preprocess. Can be CsvDataset or BigQueryDataSet. If specified, it will be used for evaluation during training, and train_dataset will be completely used for training. checkpoint: the Inception checkpoint to use. If None, a default checkpoint is used. cloud: a DataFlow pipeline option dictionary such as {'num_workers': 3}. If anything but not None, it will run in cloud. Otherwise, it runs locally. Returns: A google.datalab.utils.Job object that can be used to query state from or wait. """ with warnings.catch_warnings(): warnings.simplefilter("ignore") if cloud is None: return _local.Local.preprocess(train_dataset, output_dir, eval_dataset, checkpoint) if not isinstance(cloud, dict): cloud = {} return _cloud.Cloud.preprocess(train_dataset, output_dir, eval_dataset, checkpoint, cloud)
python
def from_wei(number: int, unit: str) -> Union[int, decimal.Decimal]: """ Takes a number of wei and converts it to any other ether unit. """ if unit.lower() not in units: raise ValueError( "Unknown unit. Must be one of {0}".format("/".join(units.keys())) ) if number == 0: return 0 if number < MIN_WEI or number > MAX_WEI: raise ValueError("value must be between 1 and 2**256 - 1") unit_value = units[unit.lower()] with localcontext() as ctx: ctx.prec = 999 d_number = decimal.Decimal(value=number, context=ctx) result_value = d_number / unit_value return result_value
python
def on_rabbitmq_close(self, reply_code, reply_text): """Called when RabbitMQ has been connected to. :param int reply_code: The code for the disconnect :param str reply_text: The disconnect reason """ global rabbitmq_connection LOGGER.warning('RabbitMQ has disconnected (%s): %s', reply_code, reply_text) rabbitmq_connection = None self._set_rabbitmq_channel(None) self._connect_to_rabbitmq()
python
def batch_mutate(self, mutation_map, consistency_level): """ Mutate many columns or super columns for many row keys. See also: Mutation. mutation_map maps key to column family to a list of Mutation objects to take place at that scope. * Parameters: - mutation_map - consistency_level """ self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_batch_mutate(mutation_map, consistency_level) return d
java
public static appfwlearningsettings get(nitro_service service, String profilename) throws Exception{ appfwlearningsettings obj = new appfwlearningsettings(); obj.set_profilename(profilename); appfwlearningsettings response = (appfwlearningsettings) obj.get_resource(service); return response; }
python
def get_function_class(function_name): """ Return the type for the requested function :param function_name: the function to return :return: the type for that function (i.e., this is a class, not an instance) """ if function_name in _known_functions: return _known_functions[function_name] else: raise UnknownFunction("Function %s is not known. Known functions are: %s" % (function_name, ",".join(_known_functions.keys())))
python
def get_template_path(self, meta=None, **kwargs): """ Formats template_name_path_pattern with kwargs given. """ if 'template_name_suffix' not in kwargs or kwargs.get('template_name_suffix') is None: kwargs['template_name_suffix'] = self.get_template_name_suffix() return self.template_name_path_pattern.format(**kwargs)
python
def path_parts(path): """Split path into container, object. :param path: Path to resource (including container). :type path: `string` :return: Container, storage object tuple. :rtype: `tuple` of `string`, `string` """ path = path if path is not None else '' container_path = object_path = '' parts = path_list(path) if len(parts) >= 1: container_path = parts[0] if len(parts) > 1: object_path = path_join(*parts[1:]) return container_path, object_path
java
public void setBackground(Color color) { predraw(); GL.glClearColor(color.r, color.g, color.b, color.a); postdraw(); }
java
public void cancel() { Exchange exchangeToCancel; RealConnection connectionToCancel; synchronized (connectionPool) { canceled = true; exchangeToCancel = exchange; connectionToCancel = exchangeFinder != null && exchangeFinder.connectingConnection() != null ? exchangeFinder.connectingConnection() : connection; } if (exchangeToCancel != null) { exchangeToCancel.cancel(); } else if (connectionToCancel != null) { connectionToCancel.cancel(); } }
java
protected PrivateKey getSigningPrivateKey() throws Exception { val samlIdp = casProperties.getAuthn().getSamlIdp(); val signingKey = samlIdPMetadataLocator.getSigningKey(); val privateKeyFactoryBean = new PrivateKeyFactoryBean(); privateKeyFactoryBean.setLocation(signingKey); privateKeyFactoryBean.setAlgorithm(samlIdp.getMetadata().getPrivateKeyAlgName()); privateKeyFactoryBean.setSingleton(false); LOGGER.debug("Locating signature signing key from [{}]", signingKey); return privateKeyFactoryBean.getObject(); }
java
public WaiterState accepts(AmazonServiceException exception) throws AmazonServiceException { for (WaiterAcceptor<Output> acceptor : acceptors) { if (acceptor.matches(exception)) { return acceptor.getState(); } } throw exception; }
java
public void fire(StepEvent event) { Step step = stepStorage.getLast(); event.process(step); notifier.fire(event); }
python
def calculate_sets(rules): """Calculate FOLLOW sets. Adapted from: http://lara.epfl.ch/w/cc09:algorithm_for_first_and_follow_sets""" symbols = {sym for rule in rules for sym in rule.expansion} | {rule.origin for rule in rules} # foreach grammar rule X ::= Y(1) ... Y(k) # if k=0 or {Y(1),...,Y(k)} subset of NULLABLE then # NULLABLE = NULLABLE union {X} # for i = 1 to k # if i=1 or {Y(1),...,Y(i-1)} subset of NULLABLE then # FIRST(X) = FIRST(X) union FIRST(Y(i)) # for j = i+1 to k # if i=k or {Y(i+1),...Y(k)} subset of NULLABLE then # FOLLOW(Y(i)) = FOLLOW(Y(i)) union FOLLOW(X) # if i+1=j or {Y(i+1),...,Y(j-1)} subset of NULLABLE then # FOLLOW(Y(i)) = FOLLOW(Y(i)) union FIRST(Y(j)) # until none of NULLABLE,FIRST,FOLLOW changed in last iteration NULLABLE = set() FIRST = {} FOLLOW = {} for sym in symbols: FIRST[sym]={sym} if sym.is_term else set() FOLLOW[sym]=set() # Calculate NULLABLE and FIRST changed = True while changed: changed = False for rule in rules: if set(rule.expansion) <= NULLABLE: if update_set(NULLABLE, {rule.origin}): changed = True for i, sym in enumerate(rule.expansion): if set(rule.expansion[:i]) <= NULLABLE: if update_set(FIRST[rule.origin], FIRST[sym]): changed = True # Calculate FOLLOW changed = True while changed: changed = False for rule in rules: for i, sym in enumerate(rule.expansion): if i==len(rule.expansion)-1 or set(rule.expansion[i+1:]) <= NULLABLE: if update_set(FOLLOW[sym], FOLLOW[rule.origin]): changed = True for j in range(i+1, len(rule.expansion)): if set(rule.expansion[i+1:j]) <= NULLABLE: if update_set(FOLLOW[sym], FIRST[rule.expansion[j]]): changed = True return FIRST, FOLLOW, NULLABLE
java
public static String toBundleName(String bundleBaseName, Locale locale) { String baseName = bundleBaseName.replace('.', '/'); if (locale == null) { return baseName; } String language = locale.getLanguage(); String country = locale.getCountry(); String variant = locale.getVariant(); if (StringUtils.isEmpty(language) && StringUtils.isEmpty(country) && StringUtils.isEmpty(variant)) { return baseName; } StringBuilder sb = new StringBuilder(baseName); sb.append('_'); if (StringUtils.isNotEmpty(variant)) { sb.append(language).append('_').append(country).append('_').append(variant); } else if (StringUtils.isNotEmpty(country)) { sb.append(language).append('_').append(country); } else { sb.append(language); } return sb.toString(); }
java
public void handle(RequestContext context, AuthenticationException e) throws SecurityProviderException, IOException { saveRequest(context); String loginFormUrl = getLoginFormUrl(); if (StringUtils.isNotEmpty(loginFormUrl)) { RedirectUtils.redirect(context.getRequest(), context.getResponse(), loginFormUrl); } else { sendError(e, context); } }
python
def _updateall(self, query, vars, returning=False): """ Update, with optional return. """ cursor = self.get_db().cursor() self._log(cursor, query, vars) cursor.execute(query, vars) self.get_db().commit() return cursor.fetchall() if returning else None
java
public static XorPeerAddressAttribute createXorPeerAddressAttribute( TransportAddress address, byte[] tranID) { XorPeerAddressAttribute attribute = new XorPeerAddressAttribute(); // TODO shouldn't we be XORing the address before setting it? attribute.setAddress(address, tranID); return attribute; }
python
def swo_disable(self, port_mask): """Disables ITM & Stimulus ports. Args: self (JLink): the ``JLink`` instance port_mask (int): mask specifying which ports to disable Returns: ``None`` Raises: JLinkException: on error """ res = self._dll.JLINKARM_SWO_DisableTarget(port_mask) if res != 0: raise errors.JLinkException(res) return None
java
public void marshall(ReplicaSettingsDescription replicaSettingsDescription, ProtocolMarshaller protocolMarshaller) { if (replicaSettingsDescription == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(replicaSettingsDescription.getRegionName(), REGIONNAME_BINDING); protocolMarshaller.marshall(replicaSettingsDescription.getReplicaStatus(), REPLICASTATUS_BINDING); protocolMarshaller.marshall(replicaSettingsDescription.getReplicaBillingModeSummary(), REPLICABILLINGMODESUMMARY_BINDING); protocolMarshaller.marshall(replicaSettingsDescription.getReplicaProvisionedReadCapacityUnits(), REPLICAPROVISIONEDREADCAPACITYUNITS_BINDING); protocolMarshaller.marshall(replicaSettingsDescription.getReplicaProvisionedReadCapacityAutoScalingSettings(), REPLICAPROVISIONEDREADCAPACITYAUTOSCALINGSETTINGS_BINDING); protocolMarshaller.marshall(replicaSettingsDescription.getReplicaProvisionedWriteCapacityUnits(), REPLICAPROVISIONEDWRITECAPACITYUNITS_BINDING); protocolMarshaller.marshall(replicaSettingsDescription.getReplicaProvisionedWriteCapacityAutoScalingSettings(), REPLICAPROVISIONEDWRITECAPACITYAUTOSCALINGSETTINGS_BINDING); protocolMarshaller.marshall(replicaSettingsDescription.getReplicaGlobalSecondaryIndexSettings(), REPLICAGLOBALSECONDARYINDEXSETTINGS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
@Override public void selectRange(int startParagraphIndex, int startColPosition, int endParagraphIndex, int endColPosition) { selectRange(textPosition(startParagraphIndex, startColPosition), textPosition(endParagraphIndex, endColPosition)); }
python
def package_node(root=None, name=None): '''package node aims to package a (present working node) for a user into a container. This assumes that the node is a single partition. :param root: the root of the node to package, default is / :param name: the name for the image. If not specified, will use machine's psutil.disk_partitions() ''' if name is None: name = platform.node() if root is None: root = "/" tmpdir = tempfile.mkdtemp() image = "%s/%s.tgz" %(tmpdir,name) print("Preparing to package root %s into %s" %(root,name)) cmd = ["tar","--one-file-system","-czvSf", image, root,"--exclude",image] output = run_command(cmd) return image
python
def write(self, src, dest=None): """Schedules a write of the file at ``src`` to the ``dest`` path in this jar. If the ``src`` is a file, then ``dest`` must be specified. If the ``src`` is a directory then by default all descendant files will be added to the jar as entries carrying their relative path. If ``dest`` is specified it will be prefixed to each descendant's relative path to form its jar entry path. :param string src: the path to the pre-existing source file or directory :param string dest: the path the source file or directory should have in this jar """ if not src or not isinstance(src, string_types): raise ValueError('The src path must be a non-empty string, got {} of type {}.'.format( src, type(src))) if dest and not isinstance(dest, string_types): raise ValueError('The dest entry path must be a non-empty string, got {} of type {}.'.format( dest, type(dest))) if not os.path.isdir(src) and not dest: raise self.Error('Source file {} must have a jar destination specified'.format(src)) self._add_entry(self.FileSystemEntry(src, dest))
python
def newAttemptForUser(self, user): """ Create an L{_PasswordResetAttempt} for the user whose username is C{user} @param user: C{unicode} username """ # we could query for other attempts by the same # user within some timeframe and raise an exception, # if we wanted return _PasswordResetAttempt(store=self.store, username=user, timestamp=extime.Time(), key=self._makeKey(user))
java
public synchronized void rollback() throws SIIncorrectCallException, SIResourceException, SIConnectionLostException, SIErrorException { if (tc.isEntryEnabled()) SibTr.entry(this, tc, "rollback"); if (!valid) { throw new SIIncorrectCallException( nls.getFormattedMessage("TRANSACTION_COMPLETE_SICO1022", null, null) ); } // Mark this as invalid valid = false; CommsByteBuffer request = getCommsByteBuffer(); request.putShort(getConnectionObjectID()); request.putInt(getTransactionId()); CommsByteBuffer reply = null; try { // Pass on call to server reply = jfapExchange(request, JFapChannelConstants.SEG_ROLLBACK_TRANSACTION, lowestPriority, true); // Inform any associated consumers of the rollback, in case there are // redelivery ordering considerations. We do this regardless of the // success of the rollback, but need to do it after the rollback has // been sent to the ME. informConsumersOfRollback(); short err = reply.getCommandCompletionCode(JFapChannelConstants.SEG_ROLLBACK_TRANSACTION_R); if (err != CommsConstants.SI_NO_EXCEPTION) { checkFor_SIIncorrectCallException(reply, err); checkFor_SIResourceException(reply, err); checkFor_SIConnectionLostException(reply, err); checkFor_SIErrorException(reply, err); defaultChecker(reply, err); } } catch (SIConnectionDroppedException e) { // No FFDC Code needed // PK60857: The connection broke on us, but the ME side will eventually timeout // and assume rollback. So we can safely consume this exception. We previously // threw an exception here but that leaked a connection from the pool because // the tx never completed. if (tc.isDebugEnabled()) SibTr.debug(this, tc, "Connection failure during rollback."); } finally { if (reply != null) reply.release(); } if (tc.isEntryEnabled()) SibTr.exit(this, tc, "rollback"); }
python
def isnil(self): """ Get whether the element is I{nil} as defined by having an I{xsi:nil="true"} attribute. @return: True if I{nil}, else False @rtype: boolean """ nilattr = self.getAttribute("nil", ns=Namespace.xsins) return nilattr is not None and (nilattr.getValue().lower() == "true")
python
def end_container(self, cancel=None): """Finishes and registers the currently-active container, unless 'cancel' is True.""" if not self._containers: return container = self._containers.pop() if len(self._containers) >= 1: parent = self._containers[-1] else: parent = self._doc.text if not cancel: parent.addElement(container)
python
def read_http_header(sock): """Read HTTP header from socket, return header and rest of data.""" buf = [] hdr_end = '\r\n\r\n' while True: buf.append(sock.recv(bufsize).decode('utf-8')) data = ''.join(buf) i = data.find(hdr_end) if i == -1: continue return data[:i], data[i + len(hdr_end):]
python
def plot_sediment_rate(self, ax=None): """Plot sediment accumulation rate prior and posterior distributions""" if ax is None: ax = plt.gca() y_prior, x_prior = self.prior_sediment_rate() ax.plot(x_prior, y_prior, label='Prior') y_posterior = self.mcmcfit.sediment_rate density = scipy.stats.gaussian_kde(y_posterior.flat) density.covariance_factor = lambda: 0.25 density._compute_covariance() ax.plot(x_prior, density(x_prior), label='Posterior') acc_shape = self.mcmcsetup.mcmc_kws['acc_shape'] acc_mean = self.mcmcsetup.mcmc_kws['acc_mean'] annotstr_template = 'acc_shape: {0}\nacc_mean: {1}' annotstr = annotstr_template.format(acc_shape, acc_mean) ax.annotate(annotstr, xy=(0.9, 0.9), xycoords='axes fraction', horizontalalignment='right', verticalalignment='top') ax.set_ylabel('Density') ax.set_xlabel('Acc. rate (yr/cm)') ax.grid(True) return ax
python
def quat_conjugate(quaternion): """Return conjugate of quaternion. >>> q0 = random_quaternion() >>> q1 = quat_conjugate(q0) >>> q1[3] == q0[3] and all(q1[:3] == -q0[:3]) True """ return np.array( (-quaternion[0], -quaternion[1], -quaternion[2], quaternion[3]), dtype=np.float32, )
java
protected Server configureJetty(final int port) { final Server server = new Server(); final ServerConnector connector = new ServerConnector(server); final ServletContextHandler sch = getServletContextHandler(); // TODO: make all of this configurable connector.setIdleTimeout((int)TimeUnit.HOURS.toMillis(1)); connector.setSoLingerTime(-1); connector.setPort(port); server.addConnector(connector); // set the injector as an attribute in the context sch.setAttribute("guice-injector", getInjector()); // prevent the JSESSIONID from getting set via a URL argument sch.setInitParameter("org.eclipse.jetty.servlet.SessionIdPathParameterName", "none"); // if we're using a database, then install the filter if(!settings.getDatabaseSettings().getNotUsed()) { // setup a FilterHolder for the Guice Persistence final FilterHolder persistFilter = new FilterHolder(getInjector().getInstance(PersistFilter.class)); // add the filter to the context sch.addFilter(persistFilter, "/*", DISPATCHER_TYPES); } // configure a FilterHolder for Guice final FilterHolder filterHolder = new FilterHolder(GuiceFilter.class); sch.addFilter(filterHolder, "/*", DISPATCHER_TYPES); sch.addServlet(DefaultServlet.class, "/*"); //server.setDumpAfterStart(true); server.setHandler(sch); return server; }
java
public String logMessage(String strTrxIDIn, BaseMessage trxMessage, String strMessageInfoType, String strMessageProcessType, String strMessageStatus, String strMessageDescription, String strMessageMimeType) { String strTrxID = strTrxIDIn; int iUserID = -1; String strContactType = null; String strContact = null; int iMessageReferenceID = -1; Task taskParent = this.getTask(); Record recMessageLog = this.getRecord(MessageLog.MESSAGE_LOG_FILE); if (recMessageLog == null) recMessageLog = Record.makeRecordFromClassName(MessageLogModel.THICK_CLASS, this); try { if (strTrxID != null) { recMessageLog.getField(MessageLogModel.ID).setString(strTrxID); recMessageLog.setKeyArea(MessageLogModel.ID); if (recMessageLog.seek(null)) { recMessageLog.edit(); } else strTrxID = null; } if (strTrxID == null) recMessageLog.addNew(); ReferenceField fldReference = (ReferenceField)recMessageLog.getField(MessageLogModel.MESSAGE_INFO_TYPE_ID); int iMessageInfoTypeID = fldReference.getIDFromCode(strMessageInfoType); fldReference.setValue(iMessageInfoTypeID); // Message type fldReference = (ReferenceField)recMessageLog.getField(MessageLogModel.MESSAGE_TYPE_ID); int iMessageProcessTypeID = fldReference.getIDFromCode(strMessageProcessType); fldReference.setValue(iMessageProcessTypeID); // Message type fldReference = (ReferenceField)recMessageLog.getField(MessageLogModel.MESSAGE_STATUS_ID); String objNativeMessage = null; if (objNativeMessage == null) if (trxMessage != null) if (trxMessage.getExternalMessage() != null) if (!MessageStatusModel.ERROR.equals(strMessageStatus)) objNativeMessage = trxMessage.getExternalMessage().toString(); if (objNativeMessage == null) if (strMessageStatus == null) strMessageStatus = MessageStatusModel.TRX_ID_HOLD; if (strMessageStatus == null) strMessageStatus = MessageStatusModel.UNKNOWN; int iMessageTimeout = 0; if (trxMessage != null) if (MessageStatusModel.SENT.equalsIgnoreCase(strMessageStatus)) { if ((trxMessage.getMessageHeader().get(TrxMessageHeader.MESSAGE_RESPONSE_ID) == null) && (trxMessage.getMessageHeader().get(TrxMessageHeader.MESSAGE_RESPONSE_CODE) == null) && (trxMessage.getMessageHeader().get(TrxMessageHeader.MESSAGE_RESPONSE_CLASS) == null)) strMessageStatus = MessageStatusModel.SENTOK; // If I'm not expecting a response, status is sent okay. else if (trxMessage.getMessageHeader().get(TrxMessageHeader.MESSAGE_TIMEOUT) != null) { try { iMessageTimeout = Integer.parseInt((String)trxMessage.getMessageHeader().get(TrxMessageHeader.MESSAGE_TIMEOUT)); } catch (NumberFormatException ex) { iMessageTimeout = 0; } } } recMessageLog.getField(MessageLogModel.TIMEOUT_SECONDS).setValue(iMessageTimeout); if ((fldReference.isNull()) || (!MessageStatusModel.ERROR.equalsIgnoreCase(fldReference.getReference().getField(MessageStatusModel.CODE).toString()))) { // Can't change error status to something else int iMessageStatusID = fldReference.getIDFromCode(strMessageStatus); fldReference.setValue(iMessageStatusID); // Message type } String strMessageTransport = this.getMessageTransportType(); fldReference = (ReferenceField)recMessageLog.getField(MessageLogModel.MESSAGE_TRANSPORT_ID); int iMessageTransportID = fldReference.getIDFromCode(strMessageTransport); fldReference.setValue(iMessageTransportID); // Message type String strReferenceType = null; int iMessageProcessInfoID = -1; if (!MessageStatusModel.ERROR.equals(strMessageStatus)) if ((trxMessage != null) && (trxMessage.getMessageHeader() != null)) { TrxMessageHeader trxMessageHeader = (TrxMessageHeader)trxMessage.getMessageHeader(); if (strMessageDescription == null) strMessageDescription = Utility.convertObjectToString(trxMessageHeader.get(TrxMessageHeader.DESCRIPTION)); strContactType = Utility.convertObjectToString(trxMessageHeader.get(TrxMessageHeader.CONTACT_TYPE)); if (strContactType != null) if (strContactType.length() > 0) if (!Utility.isNumeric(strContactType)) { // Convert the contact type from the record name to the ID. Record recContactType = ((ReferenceField)recMessageLog.getField(MessageLogModel.CONTACT_TYPE_ID)).getReferenceRecord(); recContactType.setKeyArea(ContactTypeModel.CODE_KEY); recContactType.getField(ContactTypeModel.CODE).setString(strContactType); try { if (recContactType.seek(null)) { // Success strContactType = recContactType.getField(ContactTypeModel.ID).toString(); } } catch (DBException ex) { ex.printStackTrace(); } } strContact = Utility.convertObjectToString(trxMessageHeader.get(TrxMessageHeader.CONTACT_ID)); Object objUserID = trxMessageHeader.get(DBParams.USER_ID); if (objUserID == null) objUserID = trxMessageHeader.get(DBParams.USER_NAME); if (objUserID instanceof Integer) iUserID = ((Integer)objUserID).intValue(); else if (objUserID instanceof String) { if (Utility.isNumeric((String)objUserID)) iUserID = Integer.parseInt((String)objUserID); else { Record recUserInfo = Record.makeRecordFromClassName(UserInfoModel.THICK_CLASS, this); if (((UserInfoModel)recUserInfo).getUserInfo((String)objUserID, false)) iUserID = (int)recUserInfo.getField(UserInfoModel.ID).getValue(); recUserInfo.free(); } } Object objMessageReferenceID = trxMessageHeader.get(TrxMessageHeader.REFERENCE_ID); if (objMessageReferenceID == null) objMessageReferenceID = trxMessage.get(TrxMessageHeader.REFERENCE_ID); if (objMessageReferenceID instanceof Integer) iMessageReferenceID = ((Integer)objMessageReferenceID).intValue(); else if (objMessageReferenceID instanceof String) { if (Utility.isNumeric((String)objMessageReferenceID)) iMessageReferenceID = Integer.parseInt((String)objMessageReferenceID); } if (trxMessageHeader.get(TrxMessageHeader.REFERENCE_TYPE) != null) strReferenceType = (String)trxMessageHeader.get(TrxMessageHeader.REFERENCE_TYPE); if (strReferenceType == null) strReferenceType = (String)trxMessage.get(TrxMessageHeader.REFERENCE_TYPE); if (trxMessageHeader.get(TrxMessageHeader.MESSAGE_PROCESS_INFO_ID) != null) { Object objMessageProcessInfoID = trxMessageHeader.get(TrxMessageHeader.MESSAGE_PROCESS_INFO_ID); if (objMessageProcessInfoID instanceof Integer) iMessageProcessInfoID = ((Integer)objMessageProcessInfoID).intValue(); else if (objMessageProcessInfoID instanceof String) { if (Utility.isNumeric((String)objMessageProcessInfoID)) iMessageProcessInfoID = Integer.parseInt((String)objMessageProcessInfoID); } } } if (iUserID != -1) recMessageLog.getField(MessageLogModel.USER_ID).setValue(iUserID); else if (taskParent != null) recMessageLog.getField(MessageLogModel.USER_ID).setString(((BaseApplication)taskParent.getApplication()).getUserID()); if (strContactType != null) recMessageLog.getField(MessageLogModel.CONTACT_TYPE_ID).setString(strContactType); if (strContact != null) recMessageLog.getField(MessageLogModel.CONTACT_ID).setString(strContact); if (strMessageDescription != null) recMessageLog.getField(MessageLogModel.DESCRIPTION).setString(strMessageDescription); recMessageLog.getField(MessageLogModel.MESSAGE_TIME).setValue(DateTimeField.currentTime()); if (iMessageProcessInfoID != -1) recMessageLog.getField(MessageLogModel.MESSAGE_PROCESS_INFO_ID).setValue(iMessageProcessInfoID); if (strReferenceType != null) recMessageLog.getField(MessageLogModel.REFERENCE_TYPE).setString(strReferenceType); if (iMessageReferenceID != -1) recMessageLog.getField(MessageLogModel.REFERENCE_ID).setValue(iMessageReferenceID); if (strMessageInfoType != null) recMessageLog.getField(MessageLogModel.MESSAGE_DATA_TYPE).setString(strMessageInfoType); if (!MessageStatusModel.ERROR.equals(strMessageStatus)) if (trxMessage != null) { recMessageLog.getField(MessageLogModel.MESSAGE_CLASS_NAME).setString(trxMessage.getClass().getName()); if (trxMessage.getMessageHeader() != null) { recMessageLog.getField(MessageLogModel.MESSAGE_HEADER_CLASS_NAME).setString(trxMessage.getMessageHeader().getClass().getName()); recMessageLog.getField(MessageLogModel.MESSAGE_QUEUE_NAME).setString(trxMessage.getMessageHeader().getQueueName()); recMessageLog.getField(MessageLogModel.MESSAGE_QUEUE_TYPE).setString(trxMessage.getMessageHeader().getQueueType()); Map<String,Object> propHeaderInfo = ((TrxMessageHeader)trxMessage.getMessageHeader()).getMessageInfoMap(); if (propHeaderInfo != null) ((PropertiesField)recMessageLog.getField(MessageLogModel.MESSAGE_INFO_PROPERTIES)).setProperties(propHeaderInfo); Map<String,Object> propMessageHeader = ((TrxMessageHeader)trxMessage.getMessageHeader()).getMessageHeaderMap(); if (propMessageHeader != null) ((PropertiesField)recMessageLog.getField(MessageLogModel.MESSAGE_HEADER_PROPERTIES)).setProperties(propMessageHeader); Map<String,Object> propHeaderTransport = ((TrxMessageHeader)trxMessage.getMessageHeader()).getMessageTransportMap(); if (propHeaderTransport != null) ((PropertiesField)recMessageLog.getField(MessageLogModel.MESSAGE_TRANSPORT_PROPERTIES)).setProperties(propHeaderTransport); } String strMessage = trxMessage.getXML(false); recMessageLog.getField(MessageLogModel.XML_MESSAGE_DATA).setString(strMessage); if (trxMessage.getMessageDataDesc(null) != null) recMessageLog.getField(MessageLogModel.MESSAGE_DATA_CLASS_NAME).setString(trxMessage.getMessageDataDesc(null).getClass().getName()); if (trxMessage.getExternalMessage() != null) recMessageLog.getField(MessageLogModel.EXTERNAL_MESSAGE_CLASS_NAME).setString(trxMessage.getExternalMessage().getClass().getName()); } if (objNativeMessage != null) recMessageLog.getField(MessageLogModel.MESSAGE_DATA).setString(objNativeMessage.toString()); if (MessageStatusModel.ERROR.equals(strMessageStatus)) recMessageLog.getField(MessageLogModel.ERROR_TEXT).setString(strMessageDescription); if (strTrxID == null) { recMessageLog.add(); strTrxID = recMessageLog.getLastModified(DBConstants.BOOKMARK_HANDLE).toString(); } else recMessageLog.set(); } catch (DBException ex) { ex.printStackTrace(); strTrxID = null; } finally { // No, leave it here! // recMessageLog.free(); } if ((MessageStatusModel.TRX_ID_HOLD.equals(strMessageStatus)) || (strTrxIDIn == null)) if (trxMessage != null) if (trxMessage.getMessageHeader() instanceof TrxMessageHeader) ((TrxMessageHeader)trxMessage.getMessageHeader()).put(TrxMessageHeader.LOG_TRX_ID, strTrxID); return strTrxID; }
python
def digest_auth(realm, auth_func): """A decorator used to protect methods with HTTP Digest authentication. """ def digest_auth_decorator(func): def func_replacement(self, *args, **kwargs): # 'self' here is the RequestHandler object, which is inheriting # from DigestAuthMixin to get 'get_authenticated_user' if self.get_authenticated_user(auth_func, realm): return func(self, *args, **kwargs) return func_replacement return digest_auth_decorator
python
def _write_udf_descs(self, descs, outfp, progress): # type: (PyCdlib._UDFDescriptors, BinaryIO, PyCdlib._Progress) -> None ''' An internal method to write out a UDF Descriptor sequence. Parameters: descs - The UDF Descriptors object to write out. outfp - The output file descriptor to use for writing. progress - The _Progress object to use for updating progress. Returns: Nothing. ''' log_block_size = self.pvd.logical_block_size() outfp.seek(descs.pvd.extent_location() * log_block_size) rec = descs.pvd.record() self._outfp_write_with_check(outfp, rec) progress.call(len(rec)) outfp.seek(descs.impl_use.extent_location() * log_block_size) rec = descs.impl_use.record() self._outfp_write_with_check(outfp, rec) progress.call(len(rec)) outfp.seek(descs.partition.extent_location() * log_block_size) rec = descs.partition.record() self._outfp_write_with_check(outfp, rec) progress.call(len(rec)) outfp.seek(descs.logical_volume.extent_location() * log_block_size) rec = descs.logical_volume.record() self._outfp_write_with_check(outfp, rec) progress.call(len(rec)) outfp.seek(descs.unallocated_space.extent_location() * log_block_size) rec = descs.unallocated_space.record() self._outfp_write_with_check(outfp, rec) progress.call(len(rec)) outfp.seek(descs.terminator.extent_location() * log_block_size) rec = descs.terminator.record() self._outfp_write_with_check(outfp, rec) progress.call(len(rec))
java
public Collection values() { if (mValues==null) { mValues = new AbstractCollection() { public Iterator iterator() { return getHashIterator(IdentityMap.VALUES); } public int size() { return mCount; } public boolean contains(Object o) { return containsValue(o); } public void clear() { SoftHashMap.this.clear(); } public String toString() { return IdentityMap.toString(this); } }; } return mValues; }
java
private static GradientDrawable getStandardBackground(int color) { final GradientDrawable gradientDrawable = new GradientDrawable(); gradientDrawable.setCornerRadius(BackgroundUtils.convertToDIP(4)); gradientDrawable.setColor(color); return gradientDrawable; }
java
public static String escapeCssStringMinimal(final String text) { return escapeCssString(text, CssStringEscapeType.BACKSLASH_ESCAPES_DEFAULT_TO_COMPACT_HEXA, CssStringEscapeLevel.LEVEL_1_BASIC_ESCAPE_SET); }
python
def filter_image(im_name, out_base, step_size=None, box_size=None, twopass=False, cores=None, mask=True, compressed=False, nslice=None): """ Create a background and noise image from an input image. Resulting images are written to `outbase_bkg.fits` and `outbase_rms.fits` Parameters ---------- im_name : str or HDUList Image to filter. Either a string filename or an astropy.io.fits.HDUList. out_base : str The output filename base. Will be modified to make _bkg and _rms files. step_size : (int,int) Tuple of the x,y step size in pixels box_size : (int,int) The size of the box in piexls twopass : bool Perform a second pass calculation to ensure that the noise is not contaminated by the background. Default = False cores : int Number of CPU corse to use. Default = all available nslice : int The image will be divided into this many horizontal stripes for processing. Default = None = equal to cores mask : bool Mask the output array to contain np.nna wherever the input array is nan or not finite. Default = true compressed : bool Return a compressed version of the background/noise images. Default = False Returns ------- None """ header = fits.getheader(im_name) shape = (header['NAXIS2'],header['NAXIS1']) if step_size is None: if 'BMAJ' in header and 'BMIN' in header: beam_size = np.sqrt(abs(header['BMAJ']*header['BMIN'])) if 'CDELT1' in header: pix_scale = np.sqrt(abs(header['CDELT1']*header['CDELT2'])) elif 'CD1_1' in header: pix_scale = np.sqrt(abs(header['CD1_1']*header['CD2_2'])) if 'CD1_2' in header and 'CD2_1' in header: if header['CD1_2'] != 0 or header['CD2_1']!=0: logging.warning("CD1_2 and/or CD2_1 are non-zero and I don't know what to do with them") logging.warning("Ingoring them") else: logging.warning("Cannot determine pixel scale, assuming 4 pixels per beam") pix_scale = beam_size/4. # default to 4x the synthesized beam width step_size = int(np.ceil(4*beam_size/pix_scale)) else: logging.info("BMAJ and/or BMIN not in fits header.") logging.info("Assuming 4 pix/beam, so we have step_size = 16 pixels") step_size = 16 step_size = (step_size, step_size) if box_size is None: # default to 6x the step size so we have ~ 30beams box_size = (step_size[0]*6, step_size[1]*6) if compressed: if not step_size[0] == step_size[1]: step_size = (min(step_size), min(step_size)) logging.info("Changing grid to be {0} so we can compress the output".format(step_size)) logging.info("using grid_size {0}, box_size {1}".format(step_size,box_size)) logging.info("on data shape {0}".format(shape)) bkg, rms = filter_mc_sharemem(im_name, step_size=step_size, box_size=box_size, cores=cores, shape=shape, nslice=nslice, domask=mask) logging.info("done") bkg_out = '_'.join([os.path.expanduser(out_base), 'bkg.fits']) rms_out = '_'.join([os.path.expanduser(out_base), 'rms.fits']) # add a comment to the fits header header['HISTORY'] = 'BANE {0}-({1})'.format(__version__, __date__) # compress if compressed: hdu = fits.PrimaryHDU(bkg) hdu.header = copy.deepcopy(header) hdulist = fits.HDUList([hdu]) compress(hdulist, step_size[0], bkg_out) hdulist[0].header = copy.deepcopy(header) hdulist[0].data = rms compress(hdulist, step_size[0], rms_out) return write_fits(bkg, header, bkg_out) write_fits(rms, header, rms_out)
java
@SuppressWarnings("unchecked") public <V extends View> V findView(View containerView, int id) { return (V)containerView.findViewById(id); }
java
public void removeOutputPlugin(OutputPluginModel outputPlugin) { Future future = futureHashMap.remove(outputPlugin.getID()); if (future != null) { future.cancel(true); } outputPlugins.remove(outputPlugin); }
java
public @Nullable Resource getResource(@NotNull Resource baseResource) { return getResource((Predicate<Resource>)null, baseResource); }
python
def from_current(cls): """Create an application client from within a running container. Useful for connecting to the application master from a running container in a application. """ if properties.application_id is None: raise context.ValueError("Not running inside a container") return cls(properties.appmaster_address, properties.application_id, security=Security.from_default())
python
def _parse_entry_record(self, lines): """Parse a single entry record from a list of lines.""" dn = None entry = OrderedDict() for line in lines: attr_type, attr_value = self._parse_attr(line) if attr_type == 'dn': self._check_dn(dn, attr_value) dn = attr_value elif attr_type == 'version' and dn is None: pass # version = 1 else: if dn is None: self._error('First line of record does not start ' 'with "dn:": %s' % attr_type) if attr_value is not None and \ attr_type.lower() not in self._ignored_attr_types: if attr_type in entry: entry[attr_type].append(attr_value) else: entry[attr_type] = [attr_value] return dn, entry
python
def Morrison(Re): r'''Calculates drag coefficient of a smooth sphere using the method in [1]_ as described in [2]_. .. math:: C_D = \frac{24}{Re} + \frac{2.6Re/5}{1 + \left(\frac{Re}{5}\right)^{1.52}} + \frac{0.411 \left(\frac{Re}{263000}\right)^{-7.94}}{1 + \left(\frac{Re}{263000}\right)^{-8}} + \frac{Re^{0.8}}{461000} Parameters ---------- Re : float Particle Reynolds number of the sphere using the surrounding fluid density and viscosity, [-] Returns ------- Cd : float Drag coefficient [-] Notes ----- Range is Re <= 1E6. Examples -------- >>> Morrison(200.) 0.767731559965325 References ---------- .. [1] Morrison, Faith A. An Introduction to Fluid Mechanics. Cambridge University Press, 2013. .. [2] Barati, Reza, Seyed Ali Akbar Salehi Neyshabouri, and Goodarz Ahmadi. "Development of Empirical Models with High Accuracy for Estimation of Drag Coefficient of Flow around a Smooth Sphere: An Evolutionary Approach." Powder Technology 257 (May 2014): 11-19. doi:10.1016/j.powtec.2014.02.045. ''' Cd = (24./Re + 2.6*Re/5./(1 + (Re/5.)**1.52) + 0.411*(Re/263000.)**-7.94/(1 + (Re/263000.)**-8) + Re**0.8/461000.) return Cd
java
public static Metadata from(UICommandMetadata origin, Class<? extends UICommand> type) { Assert.notNull(origin, "Parent UICommand must not be null."); Assert.notNull(type, "UICommand type must not be null."); Metadata metadata = new Metadata(type); metadata.docLocation(origin.getDocLocation()).name(origin.getName()).description(origin.getDescription()) .category(origin.getCategory()); return metadata; }
java
public static BufferedImage getResized(final BufferedImage originalImage, final Method scalingMethod, final Mode resizeMode, final String formatName, final int targetWidth, final int targetHeight) throws IOException { return read(resize(originalImage, scalingMethod, resizeMode, formatName, targetWidth, targetHeight)); }
python
def verifyChainFromCAPath(self, capath, untrusted_file=None): """ Does the same job as .verifyChainFromCAFile() but using the list of anchors in capath directory. The directory should (only) contain certificates files in PEM format. As for .verifyChainFromCAFile(), a list of untrusted certificates can be passed as a file (concatenation of the certificates in PEM format). """ try: anchors = [] for cafile in os.listdir(capath): anchors.append(Cert(open(os.path.join(capath, cafile), "rb").read())) # noqa: E501 except Exception: raise Exception("capath provided is not a valid cert path") untrusted = None if untrusted_file: try: f = open(untrusted_file, "rb") untrusted_certs = f.read() f.close() except Exception: raise Exception("Could not read from untrusted_file") untrusted = [Cert(c) for c in split_pem(untrusted_certs)] return self.verifyChain(anchors, untrusted)
java
public static final void setActiveSession(Session session) { synchronized (Session.STATIC_LOCK) { if (session != Session.activeSession) { Session oldSession = Session.activeSession; if (oldSession != null) { oldSession.close(); } Session.activeSession = session; if (oldSession != null) { postActiveSessionAction(Session.ACTION_ACTIVE_SESSION_UNSET); } if (session != null) { postActiveSessionAction(Session.ACTION_ACTIVE_SESSION_SET); if (session.isOpened()) { postActiveSessionAction(Session.ACTION_ACTIVE_SESSION_OPENED); } } } } }
python
def reassign_label(self, label, new_label, relabel=False): """ Reassign a label number to a new number. If ``new_label`` is already present in the segmentation image, then it will be combined with the input ``label`` number. Parameters ---------- labels : int The label number to reassign. new_label : int The newly assigned label number. relabel : bool, optional If `True`, then the segmentation image will be relabeled such that the labels are in consecutive order starting from 1. Examples -------- >>> from photutils import SegmentationImage >>> segm = SegmentationImage([[1, 1, 0, 0, 4, 4], ... [0, 0, 0, 0, 0, 4], ... [0, 0, 3, 3, 0, 0], ... [7, 0, 0, 0, 0, 5], ... [7, 7, 0, 5, 5, 5], ... [7, 7, 0, 0, 5, 5]]) >>> segm.reassign_label(label=1, new_label=2) >>> segm.data array([[2, 2, 0, 0, 4, 4], [0, 0, 0, 0, 0, 4], [0, 0, 3, 3, 0, 0], [7, 0, 0, 0, 0, 5], [7, 7, 0, 5, 5, 5], [7, 7, 0, 0, 5, 5]]) >>> segm = SegmentationImage([[1, 1, 0, 0, 4, 4], ... [0, 0, 0, 0, 0, 4], ... [0, 0, 3, 3, 0, 0], ... [7, 0, 0, 0, 0, 5], ... [7, 7, 0, 5, 5, 5], ... [7, 7, 0, 0, 5, 5]]) >>> segm.reassign_label(label=1, new_label=4) >>> segm.data array([[4, 4, 0, 0, 4, 4], [0, 0, 0, 0, 0, 4], [0, 0, 3, 3, 0, 0], [7, 0, 0, 0, 0, 5], [7, 7, 0, 5, 5, 5], [7, 7, 0, 0, 5, 5]]) >>> segm = SegmentationImage([[1, 1, 0, 0, 4, 4], ... [0, 0, 0, 0, 0, 4], ... [0, 0, 3, 3, 0, 0], ... [7, 0, 0, 0, 0, 5], ... [7, 7, 0, 5, 5, 5], ... [7, 7, 0, 0, 5, 5]]) >>> segm.reassign_label(label=1, new_label=4, relabel=True) >>> segm.data array([[2, 2, 0, 0, 2, 2], [0, 0, 0, 0, 0, 2], [0, 0, 1, 1, 0, 0], [4, 0, 0, 0, 0, 3], [4, 4, 0, 3, 3, 3], [4, 4, 0, 0, 3, 3]]) """ self.reassign_labels(label, new_label, relabel=relabel)
java
private static boolean load(String name, boolean mustSucceed, boolean useJavaLib) { if (s_loadedLibs.contains(name)) { return true; } if (! VoltDB.getLoadLibVOLTDB()) { return false; } test64bit(); StringBuilder msgBuilder = new StringBuilder("Loading VoltDB native library "); String fullLibName = name; try { String versionString = VoltDB.instance().getEELibraryVersionString(); // This fallback is for test code only. if (versionString == null) { versionString = VoltDB.instance().getVersionString(); } assert(versionString != null); fullLibName = name + "-" + versionString; msgBuilder.append(fullLibName); File libFile = null; if (useJavaLib) { msgBuilder.append(" from the system library location. "); } else { libFile = getNativeLibraryFile(fullLibName); msgBuilder.append(" from file "); msgBuilder.append(libFile.getAbsolutePath()); msgBuilder.append(". "); } msgBuilder.append("A confirmation message will follow if the loading is successful."); s_hostLog.info(msgBuilder.toString()); if (useJavaLib) { System.loadLibrary(fullLibName); } else { System.load(libFile.getAbsolutePath()); } s_loadedLibs.add(name); s_hostLog.info("Successfully loaded VoltDB native library " + fullLibName + "."); return true; } catch (Throwable e) { if (s_hostLog.isDebugEnabled()) { s_hostLog.debug("Error loading VoltDB JNI shared library", e); } if (useJavaLib) { s_hostLog.info("Retry loading from file."); return load(name, mustSucceed, false); } if (mustSucceed) { msgBuilder.setLength(0); msgBuilder.append("Failed to load shared library ").append(fullLibName).append(": "); msgBuilder.append(e.getMessage()).append('\n'); msgBuilder.append("Library path: ").append(System.getProperty("java.library.path")).append('\n'); msgBuilder.append("The library may have failed to load because it cannot be found in your "); msgBuilder.append("load library path, or because it is not compatible with the current platform.\n"); msgBuilder.append("VoltDB provides builds on our website for 64-bit OS X systems >= 10.6, "); msgBuilder.append("and 64-bit Linux systems with kernels >= 2.6.18."); if (e instanceof UnsatisfiedLinkError) { msgBuilder.append("\nOr the library may have failed to load because java.io.tmpdir should be set "); msgBuilder.append("to a different directory. Use VOLTDB_OPTS='-Djava.io.tmpdir=<dirpath>' to set it."); } VoltDB.crashLocalVoltDB(msgBuilder.toString(), false, null); } else { s_hostLog.info("Failed to load shared library " + fullLibName + "\nLibrary path: " + System.getProperty("java.library.path")); } return false; } }
java
protected HashMap<String, INode> createHash(IContext context) { HashMap<String, INode> result = new HashMap<String, INode>(); int nodeCount = 0; for (INode node : context.getNodesList()) { result.put(getNodePathToRoot(node), node); nodeCount++; } if (log.isEnabledFor(Level.INFO)) { log.info("Created hash for " + nodeCount + " nodes..."); } return result; }
python
def read_json(fh, byteorder, dtype, count, offsetsize): """Read JSON tag data from file and return as object.""" data = fh.read(count) try: return json.loads(unicode(stripnull(data), 'utf-8')) except ValueError: log.warning('read_json: invalid JSON')
python
def draw_header(self, stream, header): """Draw header with underline""" stream.writeln('=' * (len(header) + 4)) stream.writeln('| ' + header + ' |') stream.writeln('=' * (len(header) + 4)) stream.writeln()