language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def find_sdk_dir(self): """Try to find the MS SDK from the registry. Return None if failed or the directory does not exist. """ if not SCons.Util.can_read_reg: debug('find_sdk_dir(): can not read registry') return None hkey = self.HKEY_FMT % self.hkey_data debug('find_sdk_dir(): checking registry:{}'.format(hkey)) try: sdk_dir = common.read_reg(hkey) except SCons.Util.WinError as e: debug('find_sdk_dir(): no SDK registry key {}'.format(repr(hkey))) return None debug('find_sdk_dir(): Trying SDK Dir: {}'.format(sdk_dir)) if not os.path.exists(sdk_dir): debug('find_sdk_dir(): {} not on file system'.format(sdk_dir)) return None ftc = os.path.join(sdk_dir, self.sanity_check_file) if not os.path.exists(ftc): debug("find_sdk_dir(): sanity check {} not found".format(ftc)) return None return sdk_dir
java
public String addMapping(String prefix, String uri) { String[] strs = mNsStrings; int phash = prefix.hashCode(); for (int ix = mScopeStart, end = mScopeEnd; ix < end; ix += 2) { String thisP = strs[ix]; if (thisP == prefix || (thisP.hashCode() == phash && thisP.equals(prefix))) { // Overriding an existing mapping String old = strs[ix+1]; strs[ix+1] = uri; return old; } } // no previous binding, let's just add it at the end if (mScopeEnd >= strs.length) { // let's just double the array sizes... strs = DataUtil.growArrayBy(strs, strs.length); mNsStrings = strs; } strs[mScopeEnd++] = prefix; strs[mScopeEnd++] = uri; return null; }
java
private ScoreWeight getDashboardScoreFromWidgets(List<ScoreWeight> widgetScores) { if (null == widgetScores) { return getDashboardScore(new ScoreTypeValue(Constants.ZERO_SCORE), widgetScores); } try { //Calculate score return getDashboardScore( ScoreCalculationUtils.calculateComponentScoreTypeValue(widgetScores, PropagateType.dashboard), widgetScores ); } catch (PropagateScoreException ex) { ScoreWeight scoreDashboard = new ScoreWeight(); scoreDashboard.setScore(ex.getScore()); scoreDashboard.setMessage(ex.getMessage()); scoreDashboard.setState(ex.getState()); scoreDashboard.setChildren(widgetScores); return scoreDashboard; } }
java
public static String getMimeTypeByPath(String suffix) { while(suffix.startsWith(".") && suffix.length() > 0) { suffix = suffix.substring(1); } String mimeType = null; while((mimeType = findInMap(suffix)) == null) { int pos = suffix.indexOf('.'); if(pos == -1) { break; } suffix = suffix.substring(pos+1); } return mimeType; }
python
def unlock(self, passphrase): """ Context manager method for unlocking passphrase-protected private keys. Has no effect if the key is not both private and passphrase-protected. When the context managed block is exited, the unprotected private key material is removed. Example:: privkey = PGPKey() privkey.parse(keytext) assert privkey.is_protected assert privkey.is_unlocked is False # privkey.sign("some text") <- this would raise an exception with privkey.unlock("TheCorrectPassphrase"): # privkey is now unlocked assert privkey.is_unlocked # so you can do things with it sig = privkey.sign("some text") # privkey is no longer unlocked assert privkey.is_unlocked is False Emits a :py:obj:`~warnings.UserWarning` if the key is public or not passphrase protected. :param str passphrase: The passphrase to be used to unlock this key. :raises: :py:exc:`~pgpy.errors.PGPDecryptionError` if the passphrase is incorrect """ if self.is_public: # we can't unprotect public keys because only private key material is ever protected warnings.warn("Public keys cannot be passphrase-protected", stacklevel=3) yield self return if not self.is_protected: # we can't unprotect private keys that are not protected, because there is no ciphertext to decrypt warnings.warn("This key is not protected with a passphrase", stacklevel=3) yield self return try: for sk in itertools.chain([self], self.subkeys.values()): sk._key.unprotect(passphrase) del passphrase yield self finally: # clean up here by deleting the previously decrypted secret key material for sk in itertools.chain([self], self.subkeys.values()): sk._key.keymaterial.clear()
java
public static systemdatasource[] get(nitro_service service, systemdatasource_args args) throws Exception{ systemdatasource obj = new systemdatasource(); options option = new options(); option.set_args(nitro_util.object_to_string_withoutquotes(args)); systemdatasource[] response = (systemdatasource[])obj.get_resources(service, option); return response; }
java
public static base_responses unset(nitro_service client, String variable[], String args[]) throws Exception { base_responses result = null; if (variable != null && variable.length > 0) { filterhtmlinjectionvariable unsetresources[] = new filterhtmlinjectionvariable[variable.length]; for (int i=0;i<variable.length;i++){ unsetresources[i] = new filterhtmlinjectionvariable(); unsetresources[i].variable = variable[i]; } result = unset_bulk_request(client, unsetresources,args); } return result; }
python
def xstep(self): r"""Minimise Augmented Lagrangian with respect to :math:`\mathbf{x}`. """ b = self.AHSf + self.rho*np.sum( np.conj(self.Gf)*sl.rfftn(self.Y-self.U, axes=self.axes), axis=self.Y.ndim-1) self.Xf = b / (self.AHAf + self.rho*self.GHGf) self.X = sl.irfftn(self.Xf, self.axsz, axes=self.axes) if self.opt['LinSolveCheck']: ax = (self.AHAf + self.rho*self.GHGf)*self.Xf self.xrrs = sl.rrs(ax, b) else: self.xrrs = None
python
def to_dict(self): """Return description of self in dict format. This is useful for serializing to something like json later. """ jdict = { 'user': self.user, 'summary': self.summary, 'body': self.body, 'markup': self.markup, 'url': self.url, 'timestamp': self.timestamp } return jdict
java
public <N> CompletableFutureQueue<T, N> thenCombineAsyncFromArray(Function<R, N>[] nextTransformations) { CompletableFutureQueue cfq = this; for (Function<R, N> nextTransformation : nextTransformations) { cfq = cfq.thenCombineAsync(nextTransformation); } return cfq; }
java
@Override public boolean unregisterService(URI serviceUri) throws SalException { // Check the URI is correct and belongs to the server if (serviceUri == null || !UriUtil.isResourceLocalToServer(serviceUri, this.getIserveUri())) { return false; } if (this.serviceManager.deleteService(serviceUri)) { // delete documents Set<URI> docs = this.serviceManager.listDocumentsForService(serviceUri); for (URI doc : docs) { this.docManager.deleteDocument(doc); } // Some documents may not have been deleted properly. TODO: handle properly this case return true; } return false; }
java
private HttpServletRequest fixupRequest(HttpServletRequest request) { if (isAzureAppService(request)) { final StringBuffer requestUrl = request.getRequestURL(); if (requestUrl.indexOf("http://") == 0) { final String forwardedProto = request.getHeader("x-forwarded-proto"); if (StringUtils.equals("https", forwardedProto)) { // Modify the RequestURL in-place to convert http:// to https:// // N.B. this modifies the underlying value so in the future request.getRequestURL will continue to return this value requestUrl.insert(4, 's'); assert (requestUrl.indexOf("https://") == 0); return new AzureAppServiceHttpsServletRequest(request); } } return request; } else { // Don't modify the request whatsoever return request; } }
python
def filename(value): ''' Remove everything that would affect paths in the filename :param value: :return: ''' return re.sub('[^a-zA-Z0-9.-_ ]', '', os.path.basename(InputSanitizer.trim(value)))
python
def add_remote_provider(self, provider, client, path): """Adds a remote configuration source. Remote Providers are searched in the order they are added. provider is a string value, "etcd", "consul" and "zookeeper" are currently supported. client is a client object path is the path in the k/v store to retrieve configuration To retrieve a config file called myapp.json from /configs/myapp.json you should set path to /configs and set config name (set_config_name) to "myapp" """ if provider not in constants.SUPPORTED_REMOTE_PROVIDERS: raise errors.UnsupportedRemoteProviderError(provider) host = "" if provider == "etcd": host = "{0}://{1}:{2}".format(client.protocol, client.host, client.port) elif provider == "consul": host = "{0}://{1}:{2}".format(client.http.scheme, client.http.host, client.http.port) elif provider == "zookeeper": host = ",".join( str("{0}:{1}".format(h[0], h[1])) for h in client.hosts) log.info("Adding {0}:{1} to remote provider list".format( provider, host)) rp = remote.RemoteProvider(provider, client, path, self) if not self._provider_path_exists(rp): self._remote_providers.append(rp)
python
def get_polygon_constraints(self, range_polygones=range(3, 5), print_out=False): """ :param range_polygones: list of numbers of polygones to test. :return A, b: the constraints on the theta-vector of the form A*theta = b """ rows_A = [] rows_b = [] for m in range_polygones: if (print_out): print('checking {}-polygones'.format(m)) polygons = self.get_convex_polygons(m) row_A, row_b = self.get_polygon_constraints_m(polygons, print_out) rows_A.append(row_A) rows_b.append(row_b) self.A = np.vstack(rows_A) self.b = np.hstack(rows_b) return self.A, self.b
java
protected void validate(String operationType) throws Exception { super.validate(operationType); MPSLong exporter_id_validator = new MPSLong(); exporter_id_validator.validate(operationType, exporter_id, "\"exporter_id\""); MPSLong priority_validator = new MPSLong(); priority_validator.validate(operationType, priority, "\"priority\""); MPSLong timestamp_validator = new MPSLong(); timestamp_validator.validate(operationType, timestamp, "\"timestamp\""); MPSString hostname_validator = new MPSString(); hostname_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); hostname_validator.validate(operationType, hostname, "\"hostname\""); MPSString process_name_validator = new MPSString(); process_name_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); process_name_validator.validate(operationType, process_name, "\"process_name\""); MPSString module_validator = new MPSString(); module_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); module_validator.validate(operationType, module, "\"module\""); MPSString type_validator = new MPSString(); type_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); type_validator.validate(operationType, type, "\"type\""); MPSString syslog_msg_validator = new MPSString(); syslog_msg_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 2000); syslog_msg_validator.validate(operationType, syslog_msg, "\"syslog_msg\""); MPSLong sequence_no_validator = new MPSLong(); sequence_no_validator.validate(operationType, sequence_no, "\"sequence_no\""); MPSLong datarecord_rx_time_validator = new MPSLong(); datarecord_rx_time_validator.validate(operationType, datarecord_rx_time, "\"datarecord_rx_time\""); MPSBoolean decoded_validator = new MPSBoolean(); decoded_validator.validate(operationType, decoded, "\"decoded\""); MPSString group_name_validator = new MPSString(); group_name_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); group_name_validator.validate(operationType, group_name, "\"group_name\""); MPSString sessionId_validator = new MPSString(); sessionId_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); sessionId_validator.validate(operationType, sessionId, "\"sessionId\""); MPSString username_validator = new MPSString(); username_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); username_validator.validate(operationType, username, "\"username\""); MPSString clientip_validator = new MPSString(); clientip_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); clientip_validator.validate(operationType, clientip, "\"clientip\""); MPSString vserverip_validator = new MPSString(); vserverip_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); vserverip_validator.validate(operationType, vserverip, "\"vserverip\""); MPSString vserverport_validator = new MPSString(); vserverport_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); vserverport_validator.validate(operationType, vserverport, "\"vserverport\""); MPSString nat_ip_validator = new MPSString(); nat_ip_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); nat_ip_validator.validate(operationType, nat_ip, "\"nat_ip\""); MPSString sourceip_validator = new MPSString(); sourceip_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); sourceip_validator.validate(operationType, sourceip, "\"sourceip\""); MPSString sourceport_validator = new MPSString(); sourceport_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); sourceport_validator.validate(operationType, sourceport, "\"sourceport\""); MPSString destinationip_validator = new MPSString(); destinationip_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); destinationip_validator.validate(operationType, destinationip, "\"destinationip\""); MPSString destinationport_validator = new MPSString(); destinationport_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); destinationport_validator.validate(operationType, destinationport, "\"destinationport\""); MPSString starttime_validator = new MPSString(); starttime_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); starttime_validator.validate(operationType, starttime, "\"starttime\""); MPSString endtime_validator = new MPSString(); endtime_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); endtime_validator.validate(operationType, endtime, "\"endtime\""); MPSString duration_validator = new MPSString(); duration_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); duration_validator.validate(operationType, duration, "\"duration\""); MPSString totalBytesSend_validator = new MPSString(); totalBytesSend_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); totalBytesSend_validator.validate(operationType, totalBytesSend, "\"totalBytesSend\""); MPSString totalBytesRecv_validator = new MPSString(); totalBytesRecv_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); totalBytesRecv_validator.validate(operationType, totalBytesRecv, "\"totalBytesRecv\""); MPSString totalCompressedBytesSend_validator = new MPSString(); totalCompressedBytesSend_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); totalCompressedBytesSend_validator.validate(operationType, totalCompressedBytesSend, "\"totalCompressedBytesSend\""); MPSString totalCompressedBytesRecv_validator = new MPSString(); totalCompressedBytesRecv_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); totalCompressedBytesRecv_validator.validate(operationType, totalCompressedBytesRecv, "\"totalCompressedBytesRecv\""); MPSString compressionRatioSend_validator = new MPSString(); compressionRatioSend_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); compressionRatioSend_validator.validate(operationType, compressionRatioSend, "\"compressionRatioSend\""); MPSString compressionRatioRecv_validator = new MPSString(); compressionRatioRecv_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); compressionRatioRecv_validator.validate(operationType, compressionRatioRecv, "\"compressionRatioRecv\""); MPSString domainname_validator = new MPSString(); domainname_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); domainname_validator.validate(operationType, domainname, "\"domainname\""); MPSString applicationName_validator = new MPSString(); applicationName_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); applicationName_validator.validate(operationType, applicationName, "\"applicationName\""); MPSString browserType_validator = new MPSString(); browserType_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); browserType_validator.validate(operationType, browserType, "\"browserType\""); MPSString clientType_validator = new MPSString(); clientType_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); clientType_validator.validate(operationType, clientType, "\"clientType\""); MPSString logoutMethod_validator = new MPSString(); logoutMethod_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); logoutMethod_validator.validate(operationType, logoutMethod, "\"logoutMethod\""); MPSString vpnaccess_validator = new MPSString(); vpnaccess_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); vpnaccess_validator.validate(operationType, vpnaccess, "\"vpnaccess\""); MPSString deniedURL_validator = new MPSString(); deniedURL_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); deniedURL_validator.validate(operationType, deniedURL, "\"deniedURL\""); MPSString deniedByPolicy_validator = new MPSString(); deniedByPolicy_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); deniedByPolicy_validator.validate(operationType, deniedByPolicy, "\"deniedByPolicy\""); MPSString remote_host_validator = new MPSString(); remote_host_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); remote_host_validator.validate(operationType, remote_host, "\"remote_host\""); MPSString xdatalen_validator = new MPSString(); xdatalen_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); xdatalen_validator.validate(operationType, xdatalen, "\"xdatalen\""); MPSString xdata_validator = new MPSString(); xdata_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); xdata_validator.validate(operationType, xdata, "\"xdata\""); MPSString last_contact_validator = new MPSString(); last_contact_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); last_contact_validator.validate(operationType, last_contact, "\"last_contact\""); MPSString httpResourceName_validator = new MPSString(); httpResourceName_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); httpResourceName_validator.validate(operationType, httpResourceName, "\"httpResourceName\""); MPSString licenselmt_validator = new MPSString(); licenselmt_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); licenselmt_validator.validate(operationType, licenselmt, "\"licenselmt\""); MPSString connectionId_validator = new MPSString(); connectionId_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); connectionId_validator.validate(operationType, connectionId, "\"connectionId\""); MPSString clisecexp_validator = new MPSString(); clisecexp_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); clisecexp_validator.validate(operationType, clisecexp, "\"clisecexp\""); MPSString eval_value_validator = new MPSString(); eval_value_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); eval_value_validator.validate(operationType, eval_value, "\"eval_value\""); MPSString httpResourcesAccessed_validator = new MPSString(); httpResourcesAccessed_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); httpResourcesAccessed_validator.validate(operationType, httpResourcesAccessed, "\"httpResourcesAccessed\""); MPSString nonhttpServicesAccessed_validator = new MPSString(); nonhttpServicesAccessed_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); nonhttpServicesAccessed_validator.validate(operationType, nonhttpServicesAccessed, "\"nonhttpServicesAccessed\""); MPSString totalTCPconnections_validator = new MPSString(); totalTCPconnections_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); totalTCPconnections_validator.validate(operationType, totalTCPconnections, "\"totalTCPconnections\""); MPSString totalUDPflows_validator = new MPSString(); totalUDPflows_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); totalUDPflows_validator.validate(operationType, totalUDPflows, "\"totalUDPflows\""); MPSString totalPoliciesAllowed_validator = new MPSString(); totalPoliciesAllowed_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); totalPoliciesAllowed_validator.validate(operationType, totalPoliciesAllowed, "\"totalPoliciesAllowed\""); MPSString totalPoliciesDenied_validator = new MPSString(); totalPoliciesDenied_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 255); totalPoliciesDenied_validator.validate(operationType, totalPoliciesDenied, "\"totalPoliciesDenied\""); MPSString id_validator = new MPSString(); id_validator.setConstraintIsReq(MPSConstants.DELETE_CONSTRAINT, true); id_validator.setConstraintIsReq(MPSConstants.MODIFY_CONSTRAINT, true); id_validator.validate(operationType, id, "\"id\""); }
java
public <T extends AbstractGoogleJsonClient.Builder> T fromServiceAccount(T builder, String serviceAccountId, File p12File) throws GeneralSecurityException, IOException { Preconditions.checkNotNull(builder); GoogleCredential creds = new GoogleCredential.Builder() .setTransport(httpTransport) .setJsonFactory(jsonFactory) .setServiceAccountId(serviceAccountId) .setServiceAccountScopes(scopes) .setServiceAccountPrivateKeyFromP12File(p12File) .build(); creds.refreshToken(); return prepareBuilder(builder, creds, null); }
python
def show_firmware_version_output_show_firmware_version_build_time(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") show_firmware_version = ET.Element("show_firmware_version") config = show_firmware_version output = ET.SubElement(show_firmware_version, "output") show_firmware_version = ET.SubElement(output, "show-firmware-version") build_time = ET.SubElement(show_firmware_version, "build-time") build_time.text = kwargs.pop('build_time') callback = kwargs.pop('callback', self._callback) return callback(config)
python
def autoblend(self): """ Ensure that all schemas within the collection import each other which has a blending effect. @return: self @rtype: L{SchemaCollection} """ namespaces = self.namespaces.keys() for s in self.children: for ns in namespaces: tns = s.root.get('targetNamespace') if tns == ns: continue for imp in s.root.getChildren('import'): if imp.get('namespace') == ns: continue imp = Element('import', ns=Namespace.xsdns) imp.set('namespace', ns) s.root.append(imp) return self
java
private final void pollProgress() { final String pollUrl = this.httpPollerProcessor .getPollerRequestUrl(pollerData.getJobId()); final HttpMethod pollerHttpMethod = HttpMethod.GET; final String postBodyForPoller = ""; final ActorRef pollerWorker = getContext().actorOf( Props.create(HttpWorker.class, actorMaxOperationTimeoutSec, client, String.format("%s://%s:%d%s", request .getProtocol().toString(), trueTargetNode, request.getPort(), pollUrl), pollerHttpMethod, postBodyForPoller, request.getHttpHeaderMap(), request.getResponseHeaderMeta())); logger.info("POLL_REQ_SEND" + pollUrl + " " + PcDateUtils.getNowDateTimeStrStandard()); pollerData .getPollingHistoryMap() .put("POLL_REQ_SEND_" + PcDateUtils.getNowDateTimeStrConciseNoZone(), pollUrl); pollerWorker.tell(RequestWorkerMsgType.PROCESS_REQUEST, getSelf()); // reschedule cancel cancelExistingIfAnyAndScheduleTimeoutCall(); }
python
def to_unicode(text, charset=None): """Convert a `str` object to an `unicode` object. If `charset` is given, we simply assume that encoding for the text, but we'll use the "replace" mode so that the decoding will always succeed. If `charset` is ''not'' specified, we'll make some guesses, first trying the UTF-8 encoding, then trying the locale preferred encoding, in "replace" mode. This differs from the `unicode` builtin, which by default uses the locale preferred encoding, in 'strict' mode, and is therefore prompt to raise `UnicodeDecodeError`s. Because of the "replace" mode, the original content might be altered. If this is not what is wanted, one could map the original byte content by using an encoding which maps each byte of the input to an unicode character, e.g. by doing `unicode(text, 'iso-8859-1')`. """ if not isinstance(text, str): if isinstance(text, Exception): # two possibilities for storing unicode strings in exception data: try: # custom __str__ method on the exception (e.g. PermissionError) return unicode(text) except UnicodeError: # unicode arguments given to the exception (e.g. parse_date) return ' '.join([to_unicode(arg) for arg in text.args]) return unicode(text) if charset: return unicode(text, charset, 'replace') else: try: return unicode(text, 'utf-8') except UnicodeError: return unicode(text, locale.getpreferredencoding(), 'replace')
python
def cut_from_block(html_message): """Cuts div tag which wraps block starting with "From:".""" # handle the case when From: block is enclosed in some tag block = html_message.xpath( ("//*[starts-with(mg:text_content(), 'From:')]|" "//*[starts-with(mg:text_content(), 'Date:')]")) if block: block = block[-1] parent_div = None while block.getparent() is not None: if block.tag == 'div': parent_div = block break block = block.getparent() if parent_div is not None: maybe_body = parent_div.getparent() # In cases where removing this enclosing div will remove all # content, we should assume the quote is not enclosed in a tag. parent_div_is_all_content = ( maybe_body is not None and maybe_body.tag == 'body' and len(maybe_body.getchildren()) == 1) if not parent_div_is_all_content: parent = block.getparent() next_sibling = block.getnext() # remove all tags after found From block # (From block and quoted message are in separate divs) while next_sibling is not None: parent.remove(block) block = next_sibling next_sibling = block.getnext() # remove the last sibling (or the # From block if no siblings) if block is not None: parent.remove(block) return True else: return False # handle the case when From: block goes right after e.g. <hr> # and not enclosed in some tag block = html_message.xpath( ("//*[starts-with(mg:tail(), 'From:')]|" "//*[starts-with(mg:tail(), 'Date:')]")) if block: block = block[0] if RE_FWD.match(block.getparent().text or ''): return False while(block.getnext() is not None): block.getparent().remove(block.getnext()) block.getparent().remove(block) return True
java
private boolean interruptAndJoinThreads(List<Thread> threads) { // interrupt and wait for all ongoing create threads for(Thread t : threads) { t.interrupt(); } for(Thread t : threads) { try { t.join(); } catch (InterruptedException e) { DataNode.LOG.warn("interruptOngoingCreates: t=" + t, e); return false; } } return true; }
python
def clear_cache(self): """Clear the raw packet cache for the field and all its subfields""" self.raw_packet_cache = None for _, fval in six.iteritems(self.fields): if isinstance(fval, Packet): fval.clear_cache() self.payload.clear_cache()
java
public static String matchImplicit(Pointer ptr, int len) { String tag = "str"; int cs; int act; int have = 0; int nread = 0; int p=ptr.start; int pe = p+len; int eof = p+len; int tokstart = -1; int tokend = -1; byte[] data = ptr.buffer; if(len == 0) { data = new byte[]{(byte)'~'}; p = 0; pe = 1; eof = 1; } // line 319 "src/main/org/yecht/ImplicitScanner2.java" { cs = ImplicitScanner_start; } // line 104 "src/main/org/yecht/ImplicitScanner2.rl" // line 326 "src/main/org/yecht/ImplicitScanner2.java" { int _klen; int _trans = 0; int _keys; int _goto_targ = 0; _goto: while (true) { switch ( _goto_targ ) { case 0: if ( p == pe ) { _goto_targ = 4; continue _goto; } if ( cs == 0 ) { _goto_targ = 5; continue _goto; } case 1: _match: do { _keys = _ImplicitScanner_key_offsets[cs]; _trans = _ImplicitScanner_index_offsets[cs]; _klen = _ImplicitScanner_single_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + _klen - 1; while (true) { if ( _upper < _lower ) break; _mid = _lower + ((_upper-_lower) >> 1); if ( data[p] < _ImplicitScanner_trans_keys[_mid] ) _upper = _mid - 1; else if ( data[p] > _ImplicitScanner_trans_keys[_mid] ) _lower = _mid + 1; else { _trans += (_mid - _keys); break _match; } } _keys += _klen; _trans += _klen; } _klen = _ImplicitScanner_range_lengths[cs]; if ( _klen > 0 ) { int _lower = _keys; int _mid; int _upper = _keys + (_klen<<1) - 2; while (true) { if ( _upper < _lower ) break; _mid = _lower + (((_upper-_lower) >> 1) & ~1); if ( data[p] < _ImplicitScanner_trans_keys[_mid] ) _upper = _mid - 2; else if ( data[p] > _ImplicitScanner_trans_keys[_mid+1] ) _lower = _mid + 2; else { _trans += ((_mid - _keys)>>1); break _match; } } _trans += _klen; } } while (false); _trans = _ImplicitScanner_indicies[_trans]; cs = _ImplicitScanner_trans_targs[_trans]; case 2: if ( cs == 0 ) { _goto_targ = 5; continue _goto; } if ( ++p != pe ) { _goto_targ = 1; continue _goto; } case 4: if ( p == eof ) { int __acts = _ImplicitScanner_eof_actions[cs]; int __nacts = (int) _ImplicitScanner_actions[__acts++]; while ( __nacts-- > 0 ) { switch ( _ImplicitScanner_actions[__acts++] ) { case 0: // line 56 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "null";} break; case 1: // line 57 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "bool#yes";} break; case 2: // line 58 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "bool#no";} break; case 3: // line 59 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "int#hex";} break; case 4: // line 60 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "int#oct";} break; case 5: // line 61 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "int#base60";} break; case 6: // line 62 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "int";} break; case 7: // line 63 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "float#fix";} break; case 8: // line 64 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "float#exp";} break; case 9: // line 65 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "float#base60";} break; case 10: // line 66 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "float#inf";} break; case 11: // line 67 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "float#neginf";} break; case 12: // line 68 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "float#nan";} break; case 13: // line 69 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "timestamp#ymd";} break; case 14: // line 70 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "timestamp#iso8601";} break; case 15: // line 71 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "timestamp#spaced";} break; case 16: // line 72 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "timestamp";} break; case 17: // line 73 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "default";} break; case 18: // line 74 "src/main/org/yecht/ImplicitScanner2.rl" {tag = "merge";} break; // line 489 "src/main/org/yecht/ImplicitScanner2.java" } } } case 5: } break; } } // line 106 "src/main/org/yecht/ImplicitScanner2.rl" return tag; }
python
def purge_url(self, host, path): """Purge an individual URL.""" content = self._fetch(path, method="PURGE", headers={ "Host": host }) return FastlyPurge(self, content)
java
public BiDiNavigator<Vertex> getBiDiNavigator() { if(CACHING && (cachedNavigator != null)) return cachedNavigator; BiDiNavigator<Vertex> navigator = constructBiDiNavigator(RelFacts.<Vertex,Vertex>mapSet()); if(CACHING) { cachedNavigator = navigator; } return navigator; }
python
def transform_to_length(nndata, length): """ Given NNData, transforms data to the specified fingerprint length Args: nndata: (NNData) length: (int) desired length of NNData """ if length is None: return nndata if length: for cn in range(length): if cn not in nndata.cn_weights: nndata.cn_weights[cn] = 0 nndata.cn_nninfo[cn] = [] return nndata
python
def add_to_environment(self, environment): """Add the router to the given environment.""" self._env = environment._env self._userdata = ffi.new_handle(self) ENVIRONMENT_DATA[self._env].routers[self.name] = self lib.EnvAddRouterWithContext( self._env, self._name.encode(), self._priority, lib.query_function, lib.print_function, lib.getc_function, lib.ungetc_function, lib.exit_function, self._userdata)
java
public String[] getJustify() { String[] valueDestination = new String[ justify.size() ]; this.justify.getValue(valueDestination); return valueDestination; }
python
def _impute_one_feature(self, X_filled, mask_missing_values, feat_idx, neighbor_feat_idx, predictor=None, fit_mode=True): """Impute a single feature from the others provided. This function predicts the missing values of one of the features using the current estimates of all the other features. The ``predictor`` must support ``return_std=True`` in its ``predict`` method for this function to work. Parameters ---------- X_filled : ndarray Input data with the most recent imputations. mask_missing_values : ndarray Input data's missing indicator matrix. feat_idx : int Index of the feature currently being imputed. neighbor_feat_idx : ndarray Indices of the features to be used in imputing ``feat_idx``. predictor : object The predictor to use at this step of the round-robin imputation. If ``sample_posterior`` is True, the predictor must support ``return_std`` in its ``predict`` method. If None, it will be cloned from self._predictor. fit_mode : boolean, default=True Whether to fit and predict with the predictor or just predict. Returns ------- X_filled : ndarray Input data with ``X_filled[missing_row_mask, feat_idx]`` updated. predictor : predictor with sklearn API The fitted predictor used to impute ``X_filled[missing_row_mask, feat_idx]``. """ # if nothing is missing, just return the default # (should not happen at fit time because feat_ids would be excluded) missing_row_mask = mask_missing_values[:, feat_idx] if not np.any(missing_row_mask): return X_filled, predictor if predictor is None and fit_mode is False: raise ValueError("If fit_mode is False, then an already-fitted " "predictor should be passed in.") if predictor is None: predictor = clone(self._predictor) if fit_mode: X_train = safe_indexing(X_filled[:, neighbor_feat_idx], ~missing_row_mask) y_train = safe_indexing(X_filled[:, feat_idx], ~missing_row_mask) predictor.fit(X_train, y_train) # get posterior samples X_test = safe_indexing(X_filled[:, neighbor_feat_idx], missing_row_mask) if self.sample_posterior: mus, sigmas = predictor.predict(X_test, return_std=True) good_sigmas = sigmas > 0 imputed_values = np.zeros(mus.shape, dtype=X_filled.dtype) imputed_values[~good_sigmas] = mus[~good_sigmas] imputed_values[good_sigmas] = self.random_state_.normal( loc=mus[good_sigmas], scale=sigmas[good_sigmas]) else: imputed_values = predictor.predict(X_test) # clip the values imputed_values = self.clip(imputed_values) # update the feature X_filled[missing_row_mask, feat_idx] = imputed_values return X_filled, predictor
java
public static void context(CharSequence context, int indent) { if (Boolean.getBoolean("visibleassertions.silence")) { return; } StringBuilder sb = new StringBuilder(); for (int i = 0; i < indent; i++) sb.append(" "); for (int i = 0; i < 4; i++) sb.append(CONTEXT_MARK); sb.append(" "); sb.append(context); int terminalWidth = terminalWidth(); sb.append(" "); for (int i = sb.length(); i < terminalWidth; i++) { sb.append(CONTEXT_MARK); } System.out.println(dim(sb.toString())); }
java
public static void include( HttpServletRequest request, HttpServletResponse response, String relativePath, JspWriter out, boolean flush) throws IOException, ServletException { include((ServletRequest) request, (ServletResponse) response, relativePath, (Writer) out, flush); }
python
def read(cls, file): """Reads a WebVTT captions file.""" parser = WebVTTParser().read(file) return cls(file=file, captions=parser.captions, styles=parser.styles)
java
public static int getColor(GraphicFactory graphicFactory, String colorString, ThemeCallback themeCallback, RenderInstruction origin) { if (colorString.isEmpty() || colorString.charAt(0) != '#') { throw new IllegalArgumentException(UNSUPPORTED_COLOR_FORMAT + colorString); } else if (colorString.length() == 7) { return getColor(graphicFactory, colorString, 255, 1, themeCallback, origin); } else if (colorString.length() == 9) { return getColor(graphicFactory, colorString, Integer.parseInt(colorString.substring(1, 3), 16), 3, themeCallback, origin); } else { throw new IllegalArgumentException(UNSUPPORTED_COLOR_FORMAT + colorString); } }
python
def yearly_plots( df, variable, renormalize = True, horizontal_axis_labels_days = False, horizontal_axis_labels_months = True, plot = True, scatter = False, linestyle = "-", linewidth = 1, s = 1 ): """ Create yearly plots of a variable in a DataFrame, optionally renormalized. It is assumed that the DataFrame index is datetime. """ if not df.index.dtype in ["datetime64[ns]", "<M8[ns]", ">M8[ns]"]: log.error("index is not datetime") return False years = [] for group in df.groupby(df.index.year): years.append(group[1]) scaler = MinMaxScaler() plt.xlabel("days") plt.ylabel(variable); for year in years: if renormalize: values = scaler.fit_transform(year[[variable]]) else: values = year[variable] if plot: plt.plot(year["days_through_year"], values, linestyle = linestyle, linewidth = linewidth, label = year.index.year.values[0]) if scatter: plt.scatter(year["days_through_year"], values, s = s) if horizontal_axis_labels_months: plt.xticks( [ 15.5, 45, 74.5, 105, 135.5, 166, 196.5, 227.5, 258, 288.5, 319, 349.5], ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"] ) plt.legend()
java
public void marshall(BGPPeer bGPPeer, ProtocolMarshaller protocolMarshaller) { if (bGPPeer == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(bGPPeer.getBgpPeerId(), BGPPEERID_BINDING); protocolMarshaller.marshall(bGPPeer.getAsn(), ASN_BINDING); protocolMarshaller.marshall(bGPPeer.getAuthKey(), AUTHKEY_BINDING); protocolMarshaller.marshall(bGPPeer.getAddressFamily(), ADDRESSFAMILY_BINDING); protocolMarshaller.marshall(bGPPeer.getAmazonAddress(), AMAZONADDRESS_BINDING); protocolMarshaller.marshall(bGPPeer.getCustomerAddress(), CUSTOMERADDRESS_BINDING); protocolMarshaller.marshall(bGPPeer.getBgpPeerState(), BGPPEERSTATE_BINDING); protocolMarshaller.marshall(bGPPeer.getBgpStatus(), BGPSTATUS_BINDING); protocolMarshaller.marshall(bGPPeer.getAwsDeviceV2(), AWSDEVICEV2_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def determine_file_type(self, z): """Determine file type.""" content = z.read('[Content_Types].xml') with io.BytesIO(content) as b: encoding = self._analyze_file(b) if encoding is None: encoding = 'utf-8' b.seek(0) text = b.read().decode(encoding) soup = bs4.BeautifulSoup(text, 'xml') for o in soup.find_all('Override'): name = o.attrs.get('PartName') for k, v in MIMEMAP.items(): if name.startswith('/{}/'.format(k)): self.type = v break if self.type: break self.filepattern = DOC_PARAMS[self.type]['filepattern'] self.namespaces = DOC_PARAMS[self.type]['namespaces'] self.captures = sv.compile(DOC_PARAMS[self.type]['captures'], DOC_PARAMS[self.type]['namespaces'])
python
def main(command_line=True, **kwargs): """ NAME jr6_txt_magic.py DESCRIPTION converts JR6 .txt format files to magic_measurements format files SYNTAX jr6_txt_magic.py [command line options] OPTIONS -h: prints the help message and quits. -f FILE: specify input file, or -F FILE: specify output file, default is magic_measurements.txt -Fsa: specify er_samples format file for appending, default is new er_samples.txt (Not working yet) -spc NUM : specify number of characters to designate a specimen, default = 1 -loc LOCNAME : specify location/study name -A: don't average replicate measurements -ncn NCON: specify sample naming convention (6 and 7 not yet implemented) -mcd [SO-MAG,SO-SUN,SO-SIGHT...] supply how these samples were oriented -v NUM : specify the volume of the sample, default 2.5cm^3. Sample naming convention: [1] XXXXY: where XXXX is an arbitrary length site designation and Y is the single character sample designation. e.g., TG001a is the first sample from site TG001. [default] [2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length) [3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length) [4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX [5] site name same as sample [6] site is entered under a separate column NOT CURRENTLY SUPPORTED [7-Z] [XXXX]YYY: XXXX is site designation with Z characters with sample name XXXXYYYY NB: all others you will have to customize your self or e-mail [email protected] for help. INPUT JR6 .txt format file """ # initialize some stuff noave=0 volume = 2.5 * 1e-6 # default volume is 2.5 cm^3 (2.5 * 1e-6 meters^3) inst="" samp_con,Z='1',"" missing=1 demag="N" er_location_name="unknown" citation='This study' args=sys.argv meth_code="LP-NO" specnum=-1 MagRecs=[] version_num=pmag.get_version() Samps=[] # keeps track of sample orientations user="" mag_file="" dir_path='.' ErSamps=[] SampOuts=[] samp_file = 'er_samples.txt' meas_file = 'magic_measurements.txt' # # get command line arguments # if command_line: if '-WD' in sys.argv: ind = sys.argv.index('-WD') dir_path=sys.argv[ind+1] if '-ID' in sys.argv: ind = sys.argv.index('-ID') input_dir_path = sys.argv[ind+1] else: input_dir_path = dir_path output_dir_path = dir_path if "-h" in args: print(main.__doc__) return False if '-F' in args: ind=args.index("-F") meas_file = args[ind+1] if '-Fsa' in args: ind = args.index("-Fsa") samp_file = args[ind+1] #try: # open(samp_file,'r') # ErSamps,file_type=pmag.magic_read(samp_file) # print 'sample information will be appended to ', samp_file #except: # print samp_file,' not found: sample information will be stored in new er_samples.txt file' # samp_file = output_dir_path+'/er_samples.txt' if '-f' in args: ind = args.index("-f") mag_file= args[ind+1] if "-spc" in args: ind = args.index("-spc") specnum = int(args[ind+1]) if "-ncn" in args: ind=args.index("-ncn") samp_con=sys.argv[ind+1] if "-loc" in args: ind=args.index("-loc") er_location_name=args[ind+1] if "-A" in args: noave=1 if "-mcd" in args: ind=args.index("-mcd") meth_code=args[ind+1] if "-v" in args: ind=args.index("-v") volume=float(args[ind+1]) * 1e-6 if not command_line: dir_path = kwargs.get('dir_path', '.') input_dir_path = kwargs.get('input_dir_path', dir_path) output_dir_path = dir_path meas_file = kwargs.get('meas_file', 'magic_measurements.txt') mag_file = kwargs.get('mag_file') samp_file = kwargs.get('samp_file', 'er_samples.txt') specnum = kwargs.get('specnum', 1) samp_con = kwargs.get('samp_con', '1') er_location_name = kwargs.get('er_location_name', '') noave = kwargs.get('noave', 0) # default (0) means DO average meth_code = kwargs.get('meth_code', "LP-NO") volume = float(kwargs.get('volume', 0)) if not volume: volume = 2.5 * 1e-6 #default volume is a 2.5 cm cube, translated to meters cubed else: #convert cm^3 to m^3 volume *= 1e-6 # format variables mag_file = input_dir_path+"/" + mag_file meas_file = output_dir_path+"/" + meas_file samp_file = output_dir_path+"/" + samp_file if specnum!=0: specnum=-specnum if "4" in samp_con: if "-" not in samp_con: print("option [4] must be in form 4-Z where Z is an integer") return False, "option [4] must be in form 4-Z where Z is an integer" else: Z=samp_con.split("-")[1] samp_con="4" if "7" in samp_con: if "-" not in samp_con: print("option [7] must be in form 7-Z where Z is an integer") return False, "option [7] must be in form 7-Z where Z is an integer" else: Z=samp_con.split("-")[1] samp_con="7" ErSampRec,ErSiteRec={},{} # parse data data=open(mag_file,'r') line=data.readline() line=data.readline() line=data.readline() while line !='': parsedLine=line.split() sampleName=parsedLine[0] demagLevel=parsedLine[2] date=parsedLine[3] line=data.readline() line=data.readline() line=data.readline() line=data.readline() parsedLine=line.split() specimenAngleDec=parsedLine[1] specimenAngleInc=parsedLine[2] while parsedLine[0] != 'MEAN' : line=data.readline() parsedLine=line.split() if len(parsedLine) == 0: parsedLine=["Hello"] Mx=parsedLine[1] My=parsedLine[2] Mz=parsedLine[3] line=data.readline() line=data.readline() parsedLine=line.split() splitExp = parsedLine[2].split('A') intensityVolStr=parsedLine[1] + splitExp[0] intensityVol = float(intensityVolStr) # check and see if Prec is too big and messes with the parcing. precisionStr='' if len(parsedLine) == 6: #normal line precisionStr=parsedLine[5][0:-1] else: precisionStr=parsedLine[4][0:-1] precisionPer = float(precisionStr) precision=intensityVol*precisionPer/100 while parsedLine[0] != 'SPEC.' : line=data.readline() parsedLine=line.split() if len(parsedLine) == 0: parsedLine=["Hello"] specimenDec=parsedLine[2] specimenInc=parsedLine[3] line=data.readline() line=data.readline() parsedLine=line.split() geographicDec=parsedLine[1] geographicInc=parsedLine[2] # Add data to various MagIC data tables. er_specimen_name = sampleName if specnum!=0: er_sample_name=er_specimen_name[:specnum] else: er_sample_name=er_specimen_name if int(samp_con) in [1, 2, 3, 4, 5, 7]: er_site_name=pmag.parse_site(er_sample_name,samp_con,Z) else: print("-W- Using unreognized sample convention option: ", samp_con) # else: # if 'er_site_name' in ErSampRec.keys():er_site_name=ErSampRec['er_site_name'] # if 'er_location_name' in ErSampRec.keys():er_location_name=ErSampRec['er_location_name'] # check sample list(SampOuts) to see if sample already exists in list before adding new sample info sampleFlag=0 for sampRec in SampOuts: if sampRec['er_sample_name'] == er_sample_name: sampleFlag=1 break if sampleFlag == 0: ErSampRec['er_sample_name']=er_sample_name ErSampRec['sample_azimuth']=specimenAngleDec sample_dip=str(float(specimenAngleInc)-90.0) #convert to magic orientation ErSampRec['sample_dip']=sample_dip ErSampRec['magic_method_codes']=meth_code ErSampRec['er_location_name']=er_location_name ErSampRec['er_site_name']=er_site_name ErSampRec['er_citation_names']='This study' SampOuts.append(ErSampRec.copy()) MagRec={} MagRec['measurement_description']='Date: '+date MagRec["er_citation_names"]="This study" MagRec['er_location_name']=er_location_name MagRec['er_site_name']=er_site_name MagRec['er_sample_name']=er_sample_name MagRec['magic_software_packages']=version_num MagRec["treatment_temp"]='%8.3e' % (273) # room temp in kelvin MagRec["measurement_temp"]='%8.3e' % (273) # room temp in kelvin MagRec["measurement_flag"]='g' MagRec["measurement_standard"]='u' MagRec["measurement_number"]='1' MagRec["er_specimen_name"]=er_specimen_name MagRec["treatment_ac_field"]='0' if demagLevel == 'NRM': meas_type="LT-NO" elif demagLevel[0] == 'A': meas_type="LT-AF-Z" treat=float(demagLevel[1:]) MagRec["treatment_ac_field"]='%8.3e' %(treat*1e-3) # convert from mT to tesla elif demagLevel[0] == 'T': meas_type="LT-T-Z" treat=float(demagLevel[1:]) MagRec["treatment_temp"]='%8.3e' % (treat+273.) # temp in kelvin else: print("measurement type unknown", demag_level) return False, "measurement type unknown" MagRec["measurement_magn_moment"]=str(intensityVol*volume) # Am^2 MagRec["measurement_magn_volume"]=intensityVolStr # A/m MagRec["measurement_dec"]=specimenDec MagRec["measurement_inc"]=specimenInc MagRec['magic_method_codes']=meas_type MagRecs.append(MagRec.copy()) #read lines till end of record line=data.readline() line=data.readline() line=data.readline() line=data.readline() line=data.readline() # read all the rest of the special characters. Some data files not consistantly formatted. while (len(line) <=3 and line!=''): line=data.readline() #end of data while loop MagOuts=pmag.measurements_methods(MagRecs,noave) pmag.magic_write(samp_file,SampOuts,'er_samples') print("sample orientations put in ",samp_file) pmag.magic_write(meas_file,MagOuts,'magic_measurements') print("results put in ",meas_file) return True, meas_file
python
def protocol_str(protocol): """Convert internal API protocol to string.""" if protocol == const.PROTOCOL_MRP: return 'MRP' if protocol == const.PROTOCOL_DMAP: return 'DMAP' if protocol == const.PROTOCOL_AIRPLAY: return 'AirPlay' return 'Unknown'
java
public void close() throws IOException, GeneralSecurityException { if (mManifest != null) { // write the manifest to the jar file mOutputJar.putNextEntry(new JarEntry(JarFile.MANIFEST_NAME)); mManifest.write(mOutputJar); // CERT.SF Signature signature = Signature.getInstance("SHA1with" + mKey.getAlgorithm()); signature.initSign(mKey); mOutputJar.putNextEntry(new JarEntry("META-INF/CERT.SF")); SignatureOutputStream out = new SignatureOutputStream(mOutputJar, signature); writeSignatureFile(out); // CERT.* mOutputJar.putNextEntry(new JarEntry("META-INF/CERT." + mKey.getAlgorithm())); writeSignatureBlock(signature, mCertificate, mKey); // close out at the end because it can also close mOutputJar. // (there's some timing issue here I think, because it's worked before with out // being closed after writing CERT.SF). out.close(); } mOutputJar.close(); mOutputJar = null; }
java
public InlineResponse2001 peekMultiple(String ids) throws ApiException { ApiResponse<InlineResponse2001> resp = peekMultipleWithHttpInfo(ids); return resp.getData(); }
python
def make_any_items_node(rawtext, app, prefixed_name, obj, parent, modname, options): """Render a Python sequence as a comma-separated list, with an "or" for the final item. :param rawtext: Text being replaced with link node. :param app: Sphinx application context :param prefixed_name: The dotted Python name for obj. :param obj: The Python object to be rendered to text. :param parent: The parent Python object of obj. :param module: The name of the module containing obj. :param options: Options dictionary passed to role func. """ text = list_conjunction(obj, "or") node = nodes.Text(text, rawsource=rawtext) return node
python
def _cut_selection(self, *event): """Cuts the current selection and copys it to the clipboard. """ if react_to_event(self.view, self.view.editor, event): logger.debug("cut selection") global_clipboard.cut(self.model.selection) return True
java
@Override public void commit() throws TransactionManagementException { logger.entering(CLASSNAME, "commit"); try { userTran.commit(); logger.log(Level.FINE, "javax.transaction.Status: {0}", userTran.getStatus()); } catch (SecurityException e) { throw new TransactionManagementException(e); } catch (IllegalStateException e) { throw new TransactionManagementException(e); } catch (RollbackException e) { throw new TransactionManagementException(e); } catch (HeuristicMixedException e) { throw new TransactionManagementException(e); } catch (HeuristicRollbackException e) { throw new TransactionManagementException(e); } catch (SystemException e) { throw new TransactionManagementException(e); } logger.exiting(CLASSNAME, "commit"); }
python
def import_and_get_task(task_path): """ Given a modular path to a function, import that module and return the function. """ module, function = task_path.rsplit('.', 1) app_module = importlib.import_module(module) app_function = getattr(app_module, function) return app_function
python
def absolute(parser, token): ''' Returns a full absolute URL based on the request host. This template tag takes exactly the same paramters as url template tag. ''' node = url(parser, token) return AbsoluteUrlNode( view_name=node.view_name, args=node.args, kwargs=node.kwargs, asvar=node.asvar )
java
public void init(Configuration configuration) { if (devMode && reload && !listeningToDispatcher) { // this is the only way I found to be able to get added to to // ConfigurationProvider list // listening to events in Dispatcher listeningToDispatcher = true; Dispatcher.addDispatcherListener(this); } }
python
def run(job_ini, slowest=False, hc=None, param='', concurrent_tasks=None, exports='', loglevel='info', pdb=None): """ Run a calculation bypassing the database layer """ dbserver.ensure_on() if param: params = oqvalidation.OqParam.check( dict(p.split('=', 1) for p in param.split(','))) else: params = {} if slowest: prof = cProfile.Profile() stmt = ('_run(job_ini, concurrent_tasks, pdb, loglevel, hc, ' 'exports, params)') prof.runctx(stmt, globals(), locals()) pstat = calc_path + '.pstat' prof.dump_stats(pstat) print('Saved profiling info in %s' % pstat) print(get_pstats(pstat, slowest)) else: _run(job_ini, concurrent_tasks, pdb, loglevel, hc, exports, params)
python
def find_relations(chunked): """ The input is a list of [token, tag, chunk]-items. The output is a list of [token, tag, chunk, relation]-items. A noun phrase preceding a verb phrase is perceived as sentence subject. A noun phrase following a verb phrase is perceived as sentence object. """ tag = lambda token: token[2].split("-")[-1] # B-NP => NP # Group successive tokens with the same chunk-tag. chunks = [] for token in chunked: if len(chunks) == 0 \ or token[2].startswith("B-") \ or tag(token) != tag(chunks[-1][-1]): chunks.append([]) chunks[-1].append(token+["O"]) # If a VP is preceded by a NP, the NP is tagged as NP-SBJ-(id). # If a VP is followed by a NP, the NP is tagged as NP-OBJ-(id). # Chunks that are not part of a relation get an O-tag. id = 0 for i, chunk in enumerate(chunks): if tag(chunk[-1]) == "VP" and i > 0 and tag(chunks[i-1][-1]) == "NP": if chunk[-1][-1] == "O": id += 1 for token in chunk: token[-1] = "VP-" + str(id) for token in chunks[i-1]: token[-1] += "*NP-SBJ-" + str(id) token[-1] = token[-1].lstrip("O-*") if tag(chunk[-1]) == "VP" and i < len(chunks)-1 and tag(chunks[i+1][-1]) == "NP": if chunk[-1][-1] == "O": id += 1 for token in chunk: token[-1] = "VP-" + str(id) for token in chunks[i+1]: token[-1] = "*NP-OBJ-" + str(id) token[-1] = token[-1].lstrip("O-*") # This is more a proof-of-concept than useful in practice: # PP-LOC = be + in|at + the|my # PP-DIR = go + to|towards + the|my for i, chunk in enumerate(chunks): if 0 < i < len(chunks)-1 and len(chunk) == 1 and chunk[-1][-1] == "O": t0, t1, t2 = chunks[i-1][-1], chunks[i][0], chunks[i+1][0] # previous / current / next if tag(t1) == "PP" and t2[1] in ("DT", "PR", "PRP$"): if t0[0] in BE and t1[0] in ("in", "at") : t1[-1] = "PP-LOC" if t0[0] in GO and t1[0] in ("to", "towards") : t1[-1] = "PP-DIR" related = []; [related.extend(chunk) for chunk in chunks] return related
java
protected long getNumConnections(IPeer peer) { if (peer == null) { return 0; } IStatistic stats = peer.getStatistic(); // If no statistics are available, return zero if (!stats.isEnabled()) { if (logger.isDebugEnabled()) { logger.debug("Statistics for peer are disabled. Please enable statistics in client config"); } return 0; } // Requests per second initiated by Local Peer + Request initiated by Remote peer String uri = peer.getUri() == null ? "local" : peer.getUri().toString(); long requests = getRecord(IStatisticRecord.Counters.AppGenRequestPerSecond.name()+'.'+uri, stats) + getRecord(IStatisticRecord.Counters.NetGenRequestPerSecond.name()+'.'+uri, stats); // There are likely more requests than responses active long connections = Math.max(0, requests); if (logger.isTraceEnabled()) { logger.trace("Active connections for {}: {}", peer, connections); } return connections; }
java
public Collection<String> getOutDatedCache(Map<String, Long> states) { return updatedCacheManager.getOutDatedCache(states); }
python
def upsert_smart_invite(self, smart_invite_id, recipient, event, callback_url=None, organizer=None): """ Creates or updates smart invite. :param string smart_invite_id - A String uniquely identifying the event for your application (note: this is NOT an ID generated by Cronofy). :param string callback_url - The URL within your application you want Cronofy to send notifications to about user interactions with the Smart Invite. :param dict recipient - A Dict containing the intended recipient of the invite :email - A String for the email address you are going to send the Smart Invite to. :param dict event - A Dict describing the event with symbolized keys: :summary - A String to use as the summary, sometimes referred to as the name or title, of the event. :description - A String to use as the description, sometimes referred to as the notes or body, of the event. :start - The Time or Date the event starts. :end - The Time or Date the event ends. :url - The URL associated with the event. :location - A Dict describing the location of the event with keys (optional): :description - A String describing the location. :lat - A String of the location's latitude. :long - A String of the location's longitude. :reminders - An Array of Dicts describing the desired reminders for the event. Reminders should be specified in priority order as, for example, when the underlying provider only supports a single reminder then the first reminder will be used. :minutes - An Integer specifying the number of minutes before the start of the event that the reminder should occur. :transparency - The transparency state for the event (optional). Accepted values are "transparent" and "opaque". :color - The color of the event (optional). :param dict organizer - A Dict containing the organzier of the invite :name - A String for the name of the organizer. """ event['start'] = format_event_time(event['start']) event['end'] = format_event_time(event['end']) body = { 'smart_invite_id': smart_invite_id, 'event': event } if type(recipient) == dict: body['recipient'] = recipient elif type(recipient) == list: body['recipients'] = recipient if callback_url: body['callback_url'] = callback_url if organizer: body['organizer'] = organizer return self.request_handler.post('smart_invites', data=body, use_api_key=True).json()
java
private void validationRelations(Task task) { List<Relation> predecessors = task.getPredecessors(); if (!predecessors.isEmpty()) { ArrayList<Relation> invalid = new ArrayList<Relation>(); for (Relation relation : predecessors) { Task sourceTask = relation.getSourceTask(); Task targetTask = relation.getTargetTask(); String sourceOutlineNumber = sourceTask.getOutlineNumber(); String targetOutlineNumber = targetTask.getOutlineNumber(); if (sourceOutlineNumber != null && targetOutlineNumber != null && sourceOutlineNumber.startsWith(targetOutlineNumber + '.')) { invalid.add(relation); } } for (Relation relation : invalid) { relation.getSourceTask().removePredecessor(relation.getTargetTask(), relation.getType(), relation.getLag()); } } }
java
public static SentryClient getStoredClient() { if (storedClient != null) { return storedClient; } synchronized (Sentry.class) { if (storedClient == null && !autoInitAttempted.get()) { // attempt initialization by using configuration found in the environment autoInitAttempted.set(true); init(); } } return storedClient; }
java
public InputStream get( Artifact artifact ) throws IOException, ArtifactNotFoundException { File file = getFile( artifact ); if ( !file.isFile() ) { throw new ArtifactNotFoundException( artifact ); } return new FileInputStream( file ); }
python
def setup_colorbars(self, plot_call_sign): """Setup colorbars for each type of plot. Take all of the optional performed during ``__init__`` method and makes the colorbar. Args: plot_call_sign (obj): Plot instance of ax.contourf with colormapping to add as a colorbar. """ self.fig.colorbar(plot_call_sign, cax=self.cbar_ax, ticks=self.cbar_ticks, orientation=self.cbar_orientation) # setup colorbar ticks (getattr(self.cbar_ax, 'set_' + self.cbar_var + 'ticklabels') (self.cbar_tick_labels, fontsize=self.cbar_ticks_fontsize)) (getattr(self.cbar_ax, 'set_' + self.cbar_var + 'label') (self.cbar_label, fontsize=self.cbar_label_fontsize, labelpad=self.cbar_label_pad)) return
python
def closest_hendecasyllable_patterns(self, scansion: str) -> List[str]: """ Find the closest group of matching valid hendecasyllable patterns. :return: list of the closest valid hendecasyllable patterns; only candidates with a matching length/number of syllables are considered. >>> print(MetricalValidator().closest_hendecasyllable_patterns('UU-UU-U-U-X')) ['-U-UU-U-U-X', 'U--UU-U-U-X'] """ return self._closest_patterns(self.VALID_HENDECASYLLABLES, scansion)
java
private Set<String> containerExposedPorts() { final Set<String> ports = Sets.newHashSet(); for (final Map.Entry<String, PortMapping> entry : job.getPorts().entrySet()) { final PortMapping mapping = entry.getValue(); ports.add(containerPort(mapping.getInternalPort(), mapping.getProtocol())); } return ports; }
python
def validate(self, arg=None): """Check that inputted path is valid - set validator accordingly""" if os.path.isdir(self.path): self.validator.object = None else: self.validator.object = ICONS['error']
java
public static BindEntity parse(final BindModel model, TypeElement element) { final Elements elementUtils = BaseProcessor.elementUtils; final InnerCounter counterPropertyInValue = new InnerCounter(); final Converter<String, String> typeNameConverter = CaseFormat.UPPER_CAMEL.converterTo(CaseFormat.LOWER_CAMEL); final TypeElement beanElement = element; final BindEntity currentEntity = new BindEntity(beanElement.getSimpleName().toString(), beanElement, AnnotationUtility.buildAnnotationList(element, classAnnotationFilter)); // tag typeName String tagName = AnnotationUtility.extractAsString(beanElement, BindType.class, AnnotationAttributeType.VALUE); if (StringUtils.hasText(tagName)) { currentEntity.xmlInfo.label = tagName; } else { currentEntity.xmlInfo.label = typeNameConverter.convert(beanElement.getSimpleName().toString()); } // esamine namespaces if (element.getAnnotation(BindXmlType.class) != null) { FindXmlNamespaceVisitor visitor = new FindXmlNamespaceVisitor(); for (AnnotationMirror annotationMirror : element.getAnnotationMirrors()) { Map<? extends ExecutableElement, ? extends AnnotationValue> elementValues = annotationMirror.getElementValues(); if (BindXmlType.class.getName().equals(annotationMirror.getAnnotationType().toString())) { for (Map.Entry<? extends ExecutableElement, ? extends AnnotationValue> entry : elementValues.entrySet()) { String key = entry.getKey().getSimpleName().toString(); entry.getValue().accept(visitor, key); } List<Pair<String, String>> namespaces = visitor.getNamespace(); currentEntity.xmlInfo.namespaces = namespaces; break; } } } final boolean bindAllFields = AnnotationUtility.getAnnotationAttributeAsBoolean(currentEntity, BindType.class, AnnotationAttributeType.ALL_FIELDS, Boolean.TRUE); PropertyUtility.buildProperties(elementUtils, currentEntity, new PropertyFactory<BindEntity, BindProperty>() { @Override public BindProperty createProperty(BindEntity entity, Element propertyElement) { return new BindProperty(currentEntity, propertyElement, AnnotationUtility.buildAnnotationList(propertyElement)); } }, propertyAnnotationFilter, new PropertyCreatedListener<BindEntity, BindProperty>() { @Override public boolean onProperty(BindEntity entity, BindProperty property) { // if we are build Map, the model are not null boolean contextExternal = (model == null); // if @BindDisabled is present, exit immediately if (property.hasAnnotation(BindDisabled.class)) { if (bindAllFields) { return false; } else { throw new InvalidDefinition(String.format("In class '%s', @%s can not be used with @%s(allField=false)", property.getParent().getElement().asType().toString(), BindDisabled.class.getSimpleName(), BindType.class.getSimpleName())); } } boolean enabled = bindAllFields; ModelAnnotation annotationBind = property.getAnnotation(Bind.class); enabled = enabled || (annotationBind != null && AnnotationUtility.extractAsBoolean(property, annotationBind, AnnotationAttributeType.ENABLED)); // if we are not in external context and element is not enabled, // we have to analyze in every case. if (!enabled && !contextExternal) { return false; } ModelAnnotation annotationBindXml = property.getAnnotation(BindXml.class); property.order = 0; property.mapKeyName = Bind.MAP_KEY_DEFAULT; property.mapValueName = Bind.MAP_VALUE_DEFAULT; // label for item and collection elements are the same for // default property.label = typeNameConverter.convert(property.getName()); property.xmlInfo.labelItem = property.label; property.xmlInfo.wrappedCollection = false; property.xmlInfo.xmlType = XmlType.valueOf(XmlType.TAG.toString()); property.xmlInfo.mapEntryType = MapEntryType.valueOf(MapEntryType.TAG.toString()); // check if there is an adapter if ((property.getAnnotation(BindAdapter.class)!= null)) { BindTransform transform = BindTransformer.lookup(TypeUtility.typeName(property.typeAdapter.dataType)); if (!transform.isTypeAdapterSupported()) { String msg = String.format("In class '%s', property '%s' uses @BindAdapter with unsupported 'dataType' '%s'", beanElement.asType().toString(), property.getName(), property.typeAdapter.dataType); throw (new IncompatibleAnnotationException(msg)); } if (property.getPropertyType().isPrimitive()) { String msg = String.format("In class '%s', property '%s' is primitive of type '%s' and it can not be annotated with @BindAdapter", beanElement.asType().toString(), property.getName(), property.getPropertyType().getTypeName()); throw (new IncompatibleAnnotationException(msg)); } } // @Bind management if (annotationBind != null) { int order = AnnotationUtility.extractAsInt(property.getElement(), Bind.class, AnnotationAttributeType.ORDER); property.order = order; String tempName = AnnotationUtility.extractAsString(property.getElement(), Bind.class, AnnotationAttributeType.VALUE); if (StringUtils.hasText(tempName)) { // for the moment are the same property.label = tempName; property.xmlInfo.labelItem = property.label; } // map info String mapKeyName = AnnotationUtility.extractAsString(property.getElement(), Bind.class, AnnotationAttributeType.MAP_KEY_NAME); if (StringUtils.hasText(mapKeyName)) property.mapKeyName = mapKeyName; String mapValueName = AnnotationUtility.extractAsString(property.getElement(), Bind.class, AnnotationAttributeType.MAP_VALUE_NAME); if (StringUtils.hasText(mapValueName)) property.mapValueName = mapValueName; } // @BindXml management if (annotationBindXml != null) { String mapEntryType = AnnotationUtility.extractAsEnumerationValue(property.getElement(), BindXml.class, AnnotationAttributeType.MAP_ENTRY_TYPE); if (StringUtils.hasText(mapEntryType)) property.xmlInfo.mapEntryType = MapEntryType.valueOf(mapEntryType); // define element tag typeName String tempElementName = AnnotationUtility.extractAsString(property.getElement(), BindXml.class, AnnotationAttributeType.XML_ELEMENT_TAG); if (StringUtils.hasText(tempElementName)) { property.xmlInfo.labelItem = tempElementName; property.xmlInfo.wrappedCollection = true; } String xmlType = AnnotationUtility.extractAsEnumerationValue(property.getElement(), BindXml.class, AnnotationAttributeType.XML_TYPE); if (StringUtils.hasText(xmlType)) { property.xmlInfo.xmlType = XmlType.valueOf(xmlType); } // add namespace to name String namespace= annotationBindXml.getAttribute(AnnotationAttributeType.NAMESPACE); if (StringUtils.hasText(namespace)) { if (property.xmlInfo.xmlType == XmlType.VALUE || property.xmlInfo.xmlType == XmlType.VALUE_CDATA) { String msg = String.format("In class '%s', property '%s', defined as xml value, can not be used with a namespace", beanElement.asType().toString(), property.getName()); throw (new IncompatibleAttributesInAnnotationException(msg)); } property.xmlInfo.namespace=namespace; } } if (property.xmlInfo.xmlType == XmlType.ATTRIBUTE) { BindTransform transform = BindTransformer.lookup(property.getPropertyType().getTypeName()); // check if property is a array if (property.isBindedArray() && !(transform instanceof ByteArrayBindTransform)) { String msg = String.format("In class '%s', property '%s' is an array and it can not be mapped in a xml attribute", beanElement.asType().toString(), property.getName()); throw (new IncompatibleAttributesInAnnotationException(msg)); } // check if property is a collection if (property.isBindedCollection()) { String msg = String.format("In class '%s', property '%s' is a collection and it can not be mapped in a xml attribute", beanElement.asType().toString(), property.getName()); throw (new IncompatibleAttributesInAnnotationException(msg)); } // check if property is a map if (property.isBindedMap()) { String msg = String.format("In class '%s', property '%s' is an map and it can not be mapped in a xml attribute", beanElement.asType().toString(), property.getName()); throw (new IncompatibleAttributesInAnnotationException(msg)); } if (transform != null && transform instanceof ObjectBindTransform) { String msg = String.format("In class '%s', property '%s' is an object and it can not be mapped in a xml attribute", beanElement.asType().toString(), property.getName()); throw (new IncompatibleAttributesInAnnotationException(msg)); } } if (property.xmlInfo.xmlType == XmlType.VALUE || property.xmlInfo.xmlType == XmlType.VALUE_CDATA) { counterPropertyInValue.inc(); BindTransform transform = BindTransformer.lookup(property.getPropertyType().getTypeName()); // check if property is a array if (property.isBindedArray() && !(transform instanceof ByteArrayBindTransform)) { String msg = String.format("In class '%s', property '%s' is an array and it can not be mapped in a xml value", beanElement.asType().toString(), property.getName()); throw (new IncompatibleAttributesInAnnotationException(msg)); } // check if property is a collection if (property.isBindedCollection()) { String msg = String.format("In class '%s', property '%s' is a collection and it can not be mapped in a xml value", beanElement.asType().toString(), property.getName()); throw (new IncompatibleAttributesInAnnotationException(msg)); } // check if property is a map if (property.isBindedMap()) { String msg = String.format("In class '%s', property '%s' is a map and it can not be mapped in a xml value", beanElement.asType().toString(), property.getName()); throw (new IncompatibleAttributesInAnnotationException(msg)); } if (transform != null && transform instanceof ObjectBindTransform) { String msg = String.format("In class '%s', property '%s' is an object and it can not be mapped in a xml value", beanElement.asType().toString(), property.getName()); throw (new IncompatibleAttributesInAnnotationException(msg)); } } if (counterPropertyInValue.value() > 1) { String msg = String.format("In class '%s', property '%s' and other properties are mapped in a xml value, but only one property for class can be a xml value", beanElement.asType().toString(), property.getName()); throw (new IncompatibleAttributesInAnnotationException(msg)); } property.bindedObject = BindTransformer.isBindedObject(property); // if it's an object, we need to avoid to print field typeName // (like // object transform usually do). // set inCollection to true, permits this. if (property.bindedObject && contextExternal) { property.inCollection = true; } return true; } }); ImmutableUtility.buildConstructors(elementUtils, currentEntity); // if we don't have model, we dont save bean definition if (model != null) { model.entityAdd(currentEntity); } return currentEntity; }
java
public static double Kurtosis(int[] values){ double mean = Mean(values); double std = StdDev(values, mean); return Kurtosis(values, mean, std); }
python
def debugtoolbar_middleware_factory(app, handler): """Setup Debug middleware.""" dbtb = app.ps.debugtoolbar @asyncio.coroutine def debugtoolbar_middleware(request): """Integrate to application.""" # Check for debugtoolbar is enabled for the request if not dbtb.cfg.enabled or any(map(request.path.startswith, dbtb.cfg.exclude)): return (yield from handler(request)) remote_host, remote_port = request.transport.get_extra_info('peername') for host in dbtb.cfg.hosts: if ip.ip_address(remote_host) in ip.ip_network(host): break else: return (yield from handler(request)) # Initialize a debugstate for the request state = DebugState(app, request) dbtb.history[state.id] = state context_switcher = state.wrap_handler(handler) # Make response try: response = yield from context_switcher(handler(request)) state.status = response.status except HTTPException as exc: response = exc state.status = response.status except Exception as exc: # Store traceback for unhandled exception state.status = 500 if not dbtb.cfg.intercept_exc: raise tb = get_traceback( info=sys.exc_info(), skip=1, show_hidden_frames=False, ignore_system_exceptions=True, exc=exc) dbtb.exceptions[tb.id] = request['pdbt_tb'] = tb for frame in tb.frames: dbtb.frames[id(frame)] = frame response = Response(text=tb.render_full(request), content_type='text/html') # Intercept http redirect codes and display an html page with a link to the target. if dbtb.cfg.intercept_redirects and response.status in REDIRECT_CODES \ and 'Location' in response.headers: response = yield from app.ps.jinja2.render( 'debugtoolbar/redirect.html', response=response) response = Response(text=response, content_type='text/html') yield from state.process_response(response) if isinstance(response, Response) and response.content_type == 'text/html' and \ RE_BODY.search(response.body): return (yield from dbtb.inject(state, response)) return response return debugtoolbar_middleware
python
def _gen_labels_columns(self, list_columns): """ Auto generates pretty label_columns from list of columns """ for col in list_columns: if not self.label_columns.get(col): self.label_columns[col] = self._prettify_column(col)
java
private void adaptBreadCrumbVisibility(final boolean hideBreadCrumb) { if (isSplitScreen()) { if (breadCrumbToolbar != null && breadCrumbShadowView != null) { breadCrumbToolbar.setVisibility(hideBreadCrumb ? View.GONE : View.VISIBLE); breadCrumbShadowView.setVisibility(hideBreadCrumb ? View.GONE : View.VISIBLE); } } else { if (toolbar != null && toolbarShadowView != null) { toolbar.setVisibility(hideBreadCrumb ? View.GONE : View.VISIBLE); toolbarShadowView.setVisibility(hideBreadCrumb ? View.GONE : View.VISIBLE); } } }
python
def complete(self): """ Returns whether or not this manager has reached a "completed" state. """ if not self._techniques: return False if not any(tech._is_overriden('complete') for tech in self._techniques): return False return self.completion_mode(tech.complete(self) for tech in self._techniques if tech._is_overriden('complete'))
java
public boolean hasKey(String scope, Object key) throws Exception { return getScopeCache(scope).containsKey(key); }
python
def run_command_line(args=None): """ Entry point for the FlowCal and flowcal console scripts. Parameters ---------- args: list of strings, optional Command line arguments. If None or not specified, get arguments from ``sys.argv``. See Also ---------- FlowCal.excel_ui.run() http://amir.rachum.com/blog/2017/07/28/python-entry-points/ """ # Get arguments from ``sys.argv`` if necessary. # ``sys.argv`` has the name of the script as its first element. We remove # this element because it will break ``parser.parse_args()`` later. In fact, # ``parser.parse_args()``, if provided with no arguments, will also use # ``sys.argv`` after removing the first element. if args is None: args = sys.argv[1:] import argparse # Read command line arguments parser = argparse.ArgumentParser( description="process flow cytometry files with FlowCal's Excel UI.") parser.add_argument( "-i", "--inputpath", type=str, nargs='?', help="input Excel file name. If not specified, show open file window") parser.add_argument( "-o", "--outputpath", type=str, nargs='?', help="output Excel file name. If not specified, use [INPUTPATH]_output") parser.add_argument( "-v", "--verbose", action="store_true", help="print information about individual processing steps") parser.add_argument( "-p", "--plot", action="store_true", help="generate and save density plots/histograms of beads and samples") parser.add_argument( "-H", "--histogram-sheet", action="store_true", help="generate sheet in output Excel file specifying histogram bins") args = parser.parse_args(args=args) # Run Excel UI run(input_path=args.inputpath, output_path=args.outputpath, verbose=args.verbose, plot=args.plot, hist_sheet=args.histogram_sheet)
java
protected ResourceBundle resolveBundle (HttpServletRequest req, String bundlePath, ClassLoader loader, boolean silent) { ResourceBundle bundle = null; if (req != null) { Enumeration<?> locales = req.getLocales(); while (locales.hasMoreElements()) { Locale locale = (Locale)locales.nextElement(); try { // java caches resource bundles, so we don't need to reinvent the wheel here. // however, java also falls back from a specific bundle to a more general one // if it can't find a specific bundle. that's real nice of it, but we want // first to see whether or not we have exact matches on any of the preferred // locales specified by the client. if we don't, then we can rely on java's // fallback mechanisms bundle = ResourceBundle.getBundle(bundlePath, locale, loader); // if it's an exact match, off we go if (bundle.getLocale().equals(locale)) { break; } } catch (MissingResourceException mre) { // no need to freak out quite yet, see if we have something for one of the // other preferred locales } } } // if we were unable to find an exact match for any of the user's preferred locales, take // their most preferred and let java perform it's fallback logic on that one if (bundle == null) { Locale locale = (req == null) ? _deflocale : req.getLocale(); try { bundle = ResourceBundle.getBundle(bundlePath, locale, loader); } catch (MissingResourceException mre) { if (!silent) { // if we were unable even to find a default bundle, we may want to log a // warning log.warning("Unable to resolve any message bundle", "req", getURL(req), "locale", locale, "bundlePath", bundlePath, "classLoader", loader, "siteBundlePath", _siteBundlePath, "siteLoader", _siteLoader); } } } return bundle; }
java
protected void initUserObject() { if (m_indexsource == null) { try { m_indexsource = m_searchManager.getIndexSource(getParamIndexsource()); if (m_indexsource == null) { m_indexsource = new CmsSearchIndexSource(); } } catch (Exception e) { m_indexsource = new CmsSearchIndexSource(); } } }
python
def _get_connection_info(): """Return a string with the connection info.""" info = 'Connection: %s,' % CONN.url if CONN.creds is not None: info += ' userid=%s,' % CONN.creds[0] else: info += ' no creds,' info += ' cacerts=%s,' % ('sys-default' if CONN.ca_certs is None else CONN.ca_certs) info += ' verifycert=%s,' % ('off' if CONN.no_verification else 'on') info += ' default-namespace=%s' % CONN.default_namespace if CONN.x509 is not None: info += ', client-cert=%s' % CONN.x509['cert_file'] try: kf = CONN.x509['key_file'] except KeyError: kf = "none" info += ":%s" % kf if CONN.timeout is not None: info += ', timeout=%s' % CONN.timeout # pylint: disable=protected-access info += ' stats=%s, ' % ('on' if CONN._statistics else 'off') info += 'log=%s' % ('on' if CONN._operation_recorders else 'off') if isinstance(CONN, FakedWBEMConnection): info += ', mock-server' return fill(info, 78, subsequent_indent=' ')
java
@Override public boolean listenToException(final AbstractPerfidixMethodException exec) { final StringBuilder builder = new StringBuilder(); if (exec.getMethod() != null) { builder.append("Class: ").append(exec.getMethod().getDeclaringClass().getSimpleName()).append("#").append(exec.getMethod().getName()).append("\n"); } builder.append("Annotation: ").append(exec.getRelatedAnno().getSimpleName()); builder.append("\nException: ").append(exec.getClass().getSimpleName()).append("/").append(exec.getExec().toString()); out.println(builder.toString()); exec.getExec().printStackTrace(out); return true; }
python
def generic_find_constraint_name(table, columns, referenced, db): """Utility to find a constraint name in alembic migrations""" t = sa.Table(table, db.metadata, autoload=True, autoload_with=db.engine) for fk in t.foreign_key_constraints: if fk.referred_table.name == referenced and set(fk.column_keys) == columns: return fk.name
python
def read(cls, data): """Reads data from URL, Dataframe, JSON string, JSON file or OrderedDict. Args: data: can be a Pandas Dataframe, a JSON file, a JSON string, an OrderedDict or a URL pointing to a JSONstat file. Returns: An object of class Dataset populated with data. """ if isinstance(data, pd.DataFrame): return cls((json.loads( to_json_stat(data, output='dict', version='2.0'), object_pairs_hook=OrderedDict))) elif isinstance(data, OrderedDict): return cls(data) elif (isinstance(data, basestring) and data.startswith(("http://", "https://", "ftp://", "ftps://"))): # requests will do the rest... return cls(request(data)) elif isinstance(data, basestring): try: json_dict = json.loads(data, object_pairs_hook=OrderedDict) return cls(json_dict) except ValueError: raise else: try: json_dict = json.load(data, object_pairs_hook=OrderedDict) return cls(json_dict) except ValueError: raise
java
private SerializerWrapper getSerializerWrapper(Class cls) { SerializerWrapper w = serializers.get(cls); if (w != null) { return w; } else { // Try with interfaces implemented for (Class c : cls.getInterfaces()) { w = serializers.get(c); if (w != null) { return w; } } } return null; }
python
def restore(self, backup=None, delete_backup=False): """Restore the snapshot to the associated storage resource. :param backup: name of the backup snapshot :param delete_backup: Whether to delete the backup snap after a successful restore. """ resp = self._cli.action(self.resource_class, self.get_id(), 'restore', copyName=backup) resp.raise_if_err() backup = resp.first_content['backup'] backup_snap = UnitySnap(_id=backup['id'], cli=self._cli) if delete_backup: log.info("Deleting the backup snap {} as the restoration " "succeeded.".format(backup['id'])) backup_snap.delete() return backup_snap
java
@Override public EClass getIfcSanitaryTerminalType() { if (ifcSanitaryTerminalTypeEClass == null) { ifcSanitaryTerminalTypeEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI) .getEClassifiers().get(585); } return ifcSanitaryTerminalTypeEClass; }
java
public static <E extends Exception> long importCSV(final InputStream is, long offset, final long count, final Try.Predicate<String[], E> filter, final PreparedStatement stmt, final int batchSize, final int batchInterval, final Try.BiConsumer<? super PreparedStatement, ? super String[], SQLException> stmtSetter) throws UncheckedSQLException, UncheckedIOException, E { final Reader reader = new InputStreamReader(is); return importCSV(reader, offset, count, filter, stmt, batchSize, batchInterval, stmtSetter); }
python
def find_eggs_in_zip(importer, path_item, only=False): """ Find eggs in zip files; possibly multiple nested eggs. """ if importer.archive.endswith('.whl'): # wheels are not supported with this finder # they don't have PKG-INFO metadata, and won't ever contain eggs return metadata = EggMetadata(importer) if metadata.has_metadata('PKG-INFO'): yield Distribution.from_filename(path_item, metadata=metadata) if only: # don't yield nested distros return for subitem in metadata.resource_listdir('/'): if _is_egg_path(subitem): subpath = os.path.join(path_item, subitem) dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) for dist in dists: yield dist elif subitem.lower().endswith('.dist-info'): subpath = os.path.join(path_item, subitem) submeta = EggMetadata(zipimport.zipimporter(subpath)) submeta.egg_info = subpath yield Distribution.from_location(path_item, subitem, submeta)
java
@Override public void updateAfterMisfire(Calendar cal) { int instr = getMisfireInstruction(); if (instr == Trigger.MISFIRE_INSTRUCTION_IGNORE_MISFIRE_POLICY) { return; } if (instr == Trigger.MISFIRE_INSTRUCTION_SMART_POLICY) { if (getRepeatCount() == 0) { instr = MISFIRE_INSTRUCTION_FIRE_NOW; } else if (getRepeatCount() == REPEAT_INDEFINITELY) { instr = MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_REMAINING_COUNT; } else { // if (getRepeatCount() > 0) instr = MISFIRE_INSTRUCTION_RESCHEDULE_NOW_WITH_EXISTING_REPEAT_COUNT; } } else if (instr == MISFIRE_INSTRUCTION_FIRE_NOW && getRepeatCount() != 0) { instr = MISFIRE_INSTRUCTION_RESCHEDULE_NOW_WITH_REMAINING_REPEAT_COUNT; } if (instr == MISFIRE_INSTRUCTION_FIRE_NOW) { setNextFireTime(new Date()); } else if (instr == MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_EXISTING_COUNT) { Date newFireTime = getFireTimeAfter(new Date()); while (newFireTime != null && cal != null && !cal.isTimeIncluded(newFireTime.getTime())) { newFireTime = getFireTimeAfter(newFireTime); if (newFireTime == null) { break; } // avoid infinite loop java.util.Calendar c = java.util.Calendar.getInstance(); c.setTime(newFireTime); if (c.get(java.util.Calendar.YEAR) > YEAR_TO_GIVEUP_SCHEDULING_AT) { newFireTime = null; } } setNextFireTime(newFireTime); } else if (instr == MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_REMAINING_COUNT) { Date newFireTime = getFireTimeAfter(new Date()); while (newFireTime != null && cal != null && !cal.isTimeIncluded(newFireTime.getTime())) { newFireTime = getFireTimeAfter(newFireTime); if (newFireTime == null) { break; } // avoid infinite loop java.util.Calendar c = java.util.Calendar.getInstance(); c.setTime(newFireTime); if (c.get(java.util.Calendar.YEAR) > YEAR_TO_GIVEUP_SCHEDULING_AT) { newFireTime = null; } } if (newFireTime != null) { int timesMissed = computeNumTimesFiredBetween(nextFireTime, newFireTime); setTimesTriggered(getTimesTriggered() + timesMissed); } setNextFireTime(newFireTime); } else if (instr == MISFIRE_INSTRUCTION_RESCHEDULE_NOW_WITH_EXISTING_REPEAT_COUNT) { Date newFireTime = new Date(); if (repeatCount != 0 && repeatCount != REPEAT_INDEFINITELY) { setRepeatCount(getRepeatCount() - getTimesTriggered()); setTimesTriggered(0); } if (getEndTime() != null && getEndTime().before(newFireTime)) { setNextFireTime(null); // We are past the end time } else { setStartTime(newFireTime); setNextFireTime(newFireTime); } } else if (instr == MISFIRE_INSTRUCTION_RESCHEDULE_NOW_WITH_REMAINING_REPEAT_COUNT) { Date newFireTime = new Date(); int timesMissed = computeNumTimesFiredBetween(nextFireTime, newFireTime); if (repeatCount != 0 && repeatCount != REPEAT_INDEFINITELY) { int remainingCount = getRepeatCount() - (getTimesTriggered() + timesMissed); if (remainingCount <= 0) { remainingCount = 0; } setRepeatCount(remainingCount); setTimesTriggered(0); } if (getEndTime() != null && getEndTime().before(newFireTime)) { setNextFireTime(null); // We are past the end time } else { setStartTime(newFireTime); setNextFireTime(newFireTime); } } }
python
def log_env(self, level, env): """dump env into debug logger in readable format""" self.log(level, "ENVIRONMENT:") for k, v in env.items(): self.log(level, " {} = {}".format(k, pformat(v)))
java
private void serializePartitionInfoToState() { List<PartitionDescriptor> descriptors = new ArrayList<>(); for (DataWriter writer : partitionWriters.asMap().values()) { Descriptor descriptor = writer.getDataDescriptor(); if (null == descriptor) { log.warn("Drop partition info as writer {} returns a null PartitionDescriptor", writer.toString()); continue; } if (!(descriptor instanceof PartitionDescriptor)) { log.warn("Drop partition info as writer {} does not return a PartitionDescriptor", writer.toString()); continue; } descriptors.add((PartitionDescriptor)descriptor); } if (descriptors.size() > 0) { state.setProp(getPartitionsKey(branchId), PartitionDescriptor.toPartitionJsonList(descriptors)); } else { log.info("Partitions info not available. Will not serialize partitions"); } }
python
def add(self, namespace_uri): """Add this namespace URI to the mapping, without caring what alias it ends up with""" # See if this namespace is already mapped to an alias alias = self.namespace_to_alias.get(namespace_uri) if alias is not None: return alias # Fall back to generating a numerical alias i = 0 while True: alias = 'ext' + str(i) try: self.addAlias(namespace_uri, alias) except KeyError: i += 1 else: return alias assert False, "Not reached"
python
def bin(x, bins, maxX=None, minX=None): ''' bin signal x using 'binsN' bin. If minX, maxX are None, they default to the full range of the signal. If they are not None, everything above maxX gets assigned to binsN-1 and everything below minX gets assigned to 0, this is effectively the same as clipping x before passing it to 'bin' input: ----- x: signal to be binned, some sort of iterable bins: int, number of bins iterable, bin edges maxX: clips data above maxX minX: clips data below maxX output: ------ binnedX: x after being binned bins: bins used for binning. if input 'bins' is already an iterable it just returns the same iterable example: # make 10 bins of equal length spanning from x.min() to x.max() bin(x, 10) # use predefined bins such that each bin has the same number of points (maximize entropy) binsN = 10 percentiles = list(np.arange(0, 100.1, 100/binsN)) bins = np.percentile(x, percentiles) bin(x, bins) ''' if maxX is None: maxX = x.max() if minX is None: minX = x.min() if not np.iterable(bins): bins = np.linspace(minX, maxX+1e-5, bins+1) # digitize works on 1d array but not nd arrays. # So I pass the flattened version of x and then reshape back into x's original shape return np.digitize(x.ravel(), bins).reshape(x.shape), bins
python
def _data_to_sign(self) -> bytes: """ Return the binary data to be/which was signed """ packed = pack_reward_proof( canonical_identifier=CanonicalIdentifier( chain_identifier=self.balance_proof.chain_id, token_network_address=self.balance_proof.token_network_address, channel_identifier=self.balance_proof.channel_identifier, ), reward_amount=self.reward_amount, nonce=self.balance_proof.nonce, ) return packed
python
def _get_fs(name, blade): ''' Private function to check for existance of a filesystem ''' _fs = [] _fs.append(name) try: res = blade.file_systems.list_file_systems(names=_fs) return res.items[0] except rest.ApiException: return None
java
@Override public void eSet(int featureID, Object newValue) { switch (featureID) { case AfplibPackage.GPARC__XPOS: setXPOS((Integer)newValue); return; case AfplibPackage.GPARC__YPOS: setYPOS((Integer)newValue); return; case AfplibPackage.GPARC__XCENT: setXCENT((Integer)newValue); return; case AfplibPackage.GPARC__YCENT: setYCENT((Integer)newValue); return; case AfplibPackage.GPARC__MH: setMH((Integer)newValue); return; case AfplibPackage.GPARC__MFR: setMFR((Integer)newValue); return; case AfplibPackage.GPARC__START: setSTART((Integer)newValue); return; case AfplibPackage.GPARC__SWEEP: setSWEEP((Integer)newValue); return; } super.eSet(featureID, newValue); }
python
def comparator(operator): """ Wrap a VersionInfo binary op method in a type-check """ @wraps(operator) def wrapper(self, other): if not isinstance(other, (VersionInfo, dict)): return NotImplemented return operator(self, other) return wrapper
python
def _joinedQnames(self, _list): """util for returning a string joinin names of entities *used only in info command*""" try: s = "; ".join([p.qname for p in _list]) except: s = "; ".join([p for p in _list]) return s
java
@Override public DeleteApplicationResult deleteApplication(DeleteApplicationRequest request) { request = beforeClientExecution(request); return executeDeleteApplication(request); }
python
def update(i): """ Update repository info Input: { data_uoa - data UOA of the repo (shared) - if not remote and =='git', shared through GIT (url) - if type=='git', URL of remote repository or git repository (hostname) - if !='', automatically form url above (add http:// + /ck?) (port) - if !='', automatically add to url above (hostext) - if !='', add to the end of above URL instead of '/ck?' - useful when CK server is accessed via Apache2, IIS, Nginx or other web servers (sync) - if 'yes' and type=='git', sync repo after each write operation (allow_writing) - if 'yes', allow writing (useful when kernel is set to allow writing only to such repositories) (repo_deps) - dict with dependencies on other shared repositories with following keys: "repo_uoa" ("repo_uid") - specific UID (version) of a repo ("repo_url") - URL of the shared repository (if not from github.com/ctuning) (update) - if 'yes', force updating (describe) - describe repository for Artifact Evaluation (see http://cTuning.org/ae) } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 } """ # Check if global writing is allowed r=ck.check_writing({}) if r['return']>0: return r o=i.get('out','') duoa=i.get('data_uoa','') remote=i.get('remote','') rruoa=i.get('remote_repo_uoa','') shared=i.get('shared','') sync=i.get('sync','') rx=form_url(i) if rx['return']>0: return rx url=rx['url'] rdeps=i.get('repo_deps',[]) eaw=i.get('allow_writing','') # Get configuration (not from Cache - can be outdated info!) # r=ck.load_repo_info_from_cache({'repo_uoa':duoa}) r=ck.access({'action':'load', 'module_uoa':work['self_module_uoa'], 'data_uoa':duoa}) if r['return']>0: return r p=r.get('dict',{}).get('path','') dn=r.get('data_name','') d=r['dict'] remote=d.get('remote','') changed=False # Check user-friendly name if dn!='': ck.out('Current user-friendly name of this repository: '+dn) ck.out('') r=ck.inp({'text':'Enter a user-friendly name of this repository (or Enter to keep old value): '}) x=r['string'] if x!='': dn=x changed=True # If remote, update URL shared=d.get('shared','') if remote=='yes': url=d.get('url','') ck.out('Repository is remote ...') ck.out('') ck.out('Current URL: '+url) ck.out('') rx=ck.inp({'text':'Enter new URL (or Enter to leave old one): '}) x=rx['string'] if x!='': d['url']=x changed=True elif shared!='': url=d.get('url','') ck.out('Repository is shared ...') ck.out('') ck.out('Current URL: '+url) if shared=='git': sync=d.get('sync','') ck.out('') if sync!='': ck.out('Current sync setting: '+sync) r=ck.inp({'text': 'Would you like to sync repo each time after writing to it (y/N)?: '}) x=r['string'].lower() if x=='yes' or x=='y': d['sync']='yes' changed=True # Asking about forbidding explicit writing to this repository if remote!='yes' and eaw=='': if eaw=='': eaw=d.get('allow_writing','') ck.out('') if eaw!='': ck.out('Current "allow writing" setting: '+eaw) r=ck.inp({'text':'Would you like to allow explicit writing to this repository when kernel disables all writing (y/N): '}) x=r['string'].lower() if x=='yes' or x=='y': d['allow_writing']='yes' changed=True # Check if explicit deps if len(rdeps)>0: if 'repo_deps' not in d: d['repo_deps']=rdeps else: for q in rdeps: d['repo_deps'].append(q) changed=True # Print deps rdeps=d.get('repo_deps',[]) if len(rdeps)>0: ck.out('') ck.out('Current dependencies on other repositories:') r=print_deps({'repo_deps':rdeps, 'out':o, 'out_prefix':' '}) if r['return']>0: return r ck.out('') # Check if add more deps r=add_more_deps({}) if r['return']>0: return r rdeps1=r['repo_deps'] if len(rdeps1)>0: if 'repo_deps' not in d: d['repo_deps']=rdeps1 else: for q in rdeps1: d['repo_deps'].append(q) changed=True # Check if need to describe for Artifact Evaluation if i.get('describe','')=='yes': r=describe({'dict':d}) if r['return']>0: return r changed=True # Write if changed if changed or i.get('update','')=='yes': if o=='con': ck.out('') ck.out('Updating repo info ...') rx=ck.access({'action':'update', 'module_uoa':ck.cfg['repo_name'], 'data_uoa':duoa, 'data_name':dn, 'dict':d, 'common_func':'yes', 'overwrite':'yes'}) if rx['return']>0: return rx # Recaching if o=='con': ck.out('') ck.out('Recaching repos to speed up access ...') ck.out('') r=recache({'out':o}) if r['return']>0: return r # Updating local repository description if remote!='yes': r=ck.load_repo_info_from_cache({'repo_uoa':duoa}) if r['return']>0: return r del(r['return']) if 'path_to_repo_desc' in r: del (r['path_to_repo_desc']) # Avoid recording some local info if r.get('dict',{}).get('path','')!='': del (r['dict']['path']) # Avoid recording some local info py=os.path.join(p, ck.cfg['repo_file']) ry=ck.save_json_to_file({'json_file':py, 'dict':r}) if ry['return']>0: return ry return {'return':0}
java
public static Collection<URL> forManifest(final URL url) { final Collection<URL> result = new ArrayList<URL>(); result.add(url); try { final String part = cleanPath(url); File jarFile = new File(part); JarFile myJar = new JarFile(part); URL validUrl = tryToGetValidUrl(jarFile.getPath(), new File(part).getParent(), part); if (validUrl != null) { result.add(validUrl); } final Manifest manifest = myJar.getManifest(); if (manifest != null) { final String classPath = manifest.getMainAttributes().getValue(new Attributes.Name("Class-Path")); if (classPath != null) { for (String jar : classPath.split(" ")) { validUrl = tryToGetValidUrl(jarFile.getPath(), new File(part).getParent(), jar); if (validUrl != null) { result.add(validUrl); } } } } } catch (IOException e) { // don't do anything, we're going on the assumption it is a jar, which could be wrong } return distinctUrls(result); }
python
def Upload(self,directory,filename): """Uploads/Updates/Replaces files""" if self._isMediaFile(filename): return self._upload_media(directory,filename) elif self._isConfigFile(filename): return self._update_config(directory,filename) print "Not handled!" return False
java
@Override public Object getValue(ELContext context, Object base, Object property) { if (context == null) { throw new NullPointerException("context is null"); } Object result = null; if (isResolvable(base)) { int index = toIndex(null, property); result = index < 0 || index >= Array.getLength(base) ? null : Array.get(base, index); context.setPropertyResolved(true); } return result; }
java
@Override public String getDbName(final FinderObject owner) { final Root root = (Root) owner; return root.getTheDbName(); }
java
public void invoke(HttpServletRequest req, HttpServletResponse rsp, Object root, String url) throws IOException, ServletException { RequestImpl sreq = new RequestImpl(this, req, new ArrayList<AncestorImpl>(), new TokenList(url)); RequestImpl oreq = CURRENT_REQUEST.get(); CURRENT_REQUEST.set(sreq); ResponseImpl srsp = new ResponseImpl(this, rsp); ResponseImpl orsp = CURRENT_RESPONSE.get(); CURRENT_RESPONSE.set(srsp); try { invoke(sreq,srsp,root); } finally { CURRENT_REQUEST.set(oreq); CURRENT_RESPONSE.set(orsp); } }
java
@Override public boolean eIsSet(int featureID) { switch (featureID) { case BpsimPackage.ERLANG_DISTRIBUTION_TYPE__K: return isSetK(); case BpsimPackage.ERLANG_DISTRIBUTION_TYPE__MEAN: return isSetMean(); } return super.eIsSet(featureID); }
python
def weather_history_at_id(self, id, start=None, end=None): """ Queries the OWM Weather API for weather history for the specified city ID. A list of *Weather* objects is returned. It is possible to query for weather history in a closed time period, whose boundaries can be passed as optional parameters. :param id: the city ID :type id: int :param start: the object conveying the time value for the start query boundary (defaults to ``None``) :type start: int, ``datetime.datetime`` or ISO8601-formatted string :param end: the object conveying the time value for the end query boundary (defaults to ``None``) :type end: int, ``datetime.datetime`` or ISO8601-formatted string :returns: a list of *Weather* instances or ``None`` if history data is not available for the specified location :raises: *ParseResponseException* when OWM Weather API responses' data cannot be parsed, *APICallException* when OWM Weather API can not be reached, *ValueError* if the time boundaries are not in the correct chronological order, if one of the time boundaries is not ``None`` and the other is or if one or both of the time boundaries are after the current time """ assert type(id) is int, "'id' must be an int" if id < 0: raise ValueError("'id' value must be greater than 0") params = {'id': id, 'lang': self._language} if start is None and end is None: pass elif start is not None and end is not None: unix_start = timeformatutils.to_UNIXtime(start) unix_end = timeformatutils.to_UNIXtime(end) if unix_start >= unix_end: raise ValueError("Error: the start time boundary must " \ "precede the end time!") current_time = time() if unix_start > current_time: raise ValueError("Error: the start time boundary must " \ "precede the current time!") params['start'] = str(unix_start) params['end'] = str(unix_end) else: raise ValueError("Error: one of the time boundaries is None, " \ "while the other is not!") uri = http_client.HttpClient.to_url(CITY_WEATHER_HISTORY_URL, self._API_key, self._subscription_type, self._use_ssl) _, json_data = self._wapi.cacheable_get_json(uri, params=params) return self._parsers['weather_history'].parse_JSON(json_data)
python
def estimate_rotation(self, camera, ransac_threshold=7.0): """Estimate the rotation between first and last frame It uses RANSAC where the error metric is the reprojection error of the points from the last frame to the first frame. Parameters ----------------- camera : CameraModel Camera model ransac_threshold : float Distance threshold (in pixels) for a reprojected point to count as an inlier """ if self.axis is None: x = self.points[:, 0, :].T y = self.points[:, -1, :].T inlier_ratio = 0.5 R, t, dist, idx = rotations.estimate_rotation_procrustes_ransac(x, y, camera, ransac_threshold, inlier_ratio=inlier_ratio, do_translation=False) if R is not None: self.axis, self.angle = rotations.rotation_matrix_to_axis_angle(R) if self.angle < 0: # Constrain to positive angles self.angle = -self.angle self.axis = -self.axis self.inliers = idx return self.axis is not None
java
void sixLock(Object obj, long txNum) { Object anchor = getAnchor(obj); txWaitMap.put(txNum, anchor); synchronized (anchor) { Lockers lks = prepareLockers(obj); if (hasSixLock(lks, txNum)) return; try { long timestamp = System.currentTimeMillis(); while (!sixLockable(lks, txNum) && !waitingTooLong(timestamp)) { avoidDeadlock(lks, txNum, SIX_LOCK); lks.requestSet.add(txNum); anchor.wait(MAX_TIME); lks.requestSet.remove(txNum); } if (!sixLockable(lks, txNum)) throw new LockAbortException(); lks.sixLocker = txNum; getObjectSet(txNum).add(obj); } catch (InterruptedException e) { throw new LockAbortException(); } } txWaitMap.remove(txNum); }