language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
public Object fromStream(final byte[] iStream) throws IOException { if (iStream == null || iStream.length == 0) // NULL VALUE return null; final int classNameSize = OBinaryProtocol.bytes2int(iStream); if (classNameSize <= 0) OLogManager.instance().error(this, "Class signature not found in ANY element: " + Arrays.toString(iStream), OSerializationException.class); final String className = OBinaryProtocol.bytes2string(iStream, 4, classNameSize); try { final OSerializableStream stream; // CHECK FOR ALIASES if (className.equalsIgnoreCase("q")) // QUERY stream = new OSQLSynchQuery<Object>(); else if (className.equalsIgnoreCase("c")) // SQL COMMAND stream = new OCommandSQL(); else if (className.equalsIgnoreCase("s")) // SCRIPT COMMAND stream = new OCommandScript(); else // CREATE THE OBJECT BY INVOKING THE EMPTY CONSTRUCTOR stream = (OSerializableStream) Class.forName(className).newInstance(); return stream.fromStream(OArrays.copyOfRange(iStream, 4 + classNameSize, iStream.length)); } catch (Exception e) { OLogManager.instance().error(this, "Error on unmarshalling content. Class: " + className, e, OSerializationException.class); } return null; }
python
def install_requirements(self, requires): """ Install the listed requirements """ # Temporarily install dependencies required by setup.py before trying to import them. sys.path[0:0] = ['setup-requires'] pkg_resources.working_set.add_entry('setup-requires') to_install = list(self.missing_requirements(requires)) if to_install: cmd = [sys.executable, "-m", "pip", "install", "-t", "setup-requires"] + to_install subprocess.call(cmd)
java
private IntIntPair totalPivotSearch(int k) { double max = 0; int i, j, pivotRow = k, pivotCol = k; double absValue; for(i = k; i < coeff.length; i++) { for(j = k; j < coeff[0].length; j++) { // compute absolute value of // current entry in absValue absValue = Math.abs(coeff[row[i]][col[j]]); // compare absValue with value max // found so far if(max < absValue) { // remember new value and position max = absValue; pivotRow = i; pivotCol = j; } // end if } // end for j } // end for k return new IntIntPair(pivotRow, pivotCol); }
java
@Override public void mapTileRequestExpiredTile(MapTileRequestState pState, Drawable pDrawable) { putTileIntoCache(pState.getMapTile(), pDrawable, ExpirableBitmapDrawable.getState(pDrawable)); // tell our caller we've finished and it should update its view for (final Handler handler : mTileRequestCompleteHandlers) { if (handler != null) { handler.sendEmptyMessage(MAPTILE_SUCCESS_ID); } } if (Configuration.getInstance().isDebugTileProviders()) { Log.d(IMapView.LOGTAG,"MapTileProviderBase.mapTileRequestExpiredTile(): " + MapTileIndex.toString(pState.getMapTile())); } }
java
public java.util.List<NodeGroupConfiguration> getNodeGroupConfiguration() { if (nodeGroupConfiguration == null) { nodeGroupConfiguration = new com.amazonaws.internal.SdkInternalList<NodeGroupConfiguration>(); } return nodeGroupConfiguration; }
java
public static boolean parseBoolean(String value, boolean defaultValue) { if (value == null) return defaultValue; value = value.trim(); // any of the following will final String[] acceptedTrue = new String[]{ "yes", "true", "t", "y", "1" }; final String[] acceptedFalse = new String[]{ "no", "false", "f", "n", "0" }; for (String possible: acceptedTrue) { if (possible.equalsIgnoreCase(value)) return true; } for (String possible: acceptedFalse) { if (possible.equalsIgnoreCase(value)) return false; } throw new IllegalArgumentException("Unrecognized boolean value '" + value + "'"); }
python
def is_prime( n ): """Return True if x is prime, False otherwise. We use the Miller-Rabin test, as given in Menezes et al. p. 138. This test is not exact: there are composite values n for which it returns True. In testing the odd numbers from 10000001 to 19999999, about 66 composites got past the first test, 5 got past the second test, and none got past the third. Since factors of 2, 3, 5, 7, and 11 were detected during preliminary screening, the number of numbers tested by Miller-Rabin was (19999999 - 10000001)*(2/3)*(4/5)*(6/7) = 4.57 million. """ # (This is used to study the risk of false positives:) global miller_rabin_test_count miller_rabin_test_count = 0 if n <= smallprimes[-1]: if n in smallprimes: return True else: return False if gcd( n, 2*3*5*7*11 ) != 1: return False # Choose a number of iterations sufficient to reduce the # probability of accepting a composite below 2**-80 # (from Menezes et al. Table 4.4): t = 40 n_bits = 1 + int( math.log( n, 2 ) ) for k, tt in ( ( 100, 27 ), ( 150, 18 ), ( 200, 15 ), ( 250, 12 ), ( 300, 9 ), ( 350, 8 ), ( 400, 7 ), ( 450, 6 ), ( 550, 5 ), ( 650, 4 ), ( 850, 3 ), ( 1300, 2 ), ): if n_bits < k: break t = tt # Run the test t times: s = 0 r = n - 1 while ( r % 2 ) == 0: s = s + 1 r = r // 2 for i in range( t ): a = smallprimes[ i ] y = modular_exp( a, r, n ) if y != 1 and y != n-1: j = 1 while j <= s - 1 and y != n - 1: y = modular_exp( y, 2, n ) if y == 1: miller_rabin_test_count = i + 1 return False j = j + 1 if y != n-1: miller_rabin_test_count = i + 1 return False return True
java
private static BigInteger divideAndRound(BigInteger bdividend, long ldivisor, int roundingMode) { boolean isRemainderZero; // record remainder is zero or not int qsign; // quotient sign long r = 0; // store quotient & remainder in long MutableBigInteger mq = null; // store quotient // Descend into mutables for faster remainder checks MutableBigInteger mdividend = new MutableBigInteger(bdividend.mag); mq = new MutableBigInteger(); r = mdividend.divide(ldivisor, mq); isRemainderZero = (r == 0); qsign = (ldivisor < 0) ? -bdividend.signum : bdividend.signum; if (!isRemainderZero) { if(needIncrement(ldivisor, roundingMode, qsign, mq, r)) { mq.add(MutableBigInteger.ONE); } } return mq.toBigInteger(qsign); }
java
public EventJournalConfig findCacheEventJournalConfig(String name) { name = getBaseName(name); final EventJournalConfig config = lookupByPattern(configPatternMatcher, cacheEventJournalConfigs, name); if (config != null) { return config.getAsReadOnly(); } return getCacheEventJournalConfig("default").getAsReadOnly(); }
java
public static boolean validateTokenRequest(TokenRequestDto tokenRequestDto, OAuthApplicationDto oAuthApplicationDto) { // basic check try { String decodedRedirectUri = java.net.URLDecoder.decode(tokenRequestDto.getRedirectUri(), "UTF-8"); if (StringUtils.isNotBlank(oAuthApplicationDto.getRedirectUri()) && oAuthApplicationDto.getRedirectUri().equals(decodedRedirectUri)) { if (StringUtils.isNotBlank(tokenRequestDto.getGrantType())) { if (OAuthFields.AUTHORIZATION_CODE.equals(tokenRequestDto.getGrantType())) { return true; } else { _logger.info("Grant Type '" + tokenRequestDto.getGrantType() + "' is not supported"); throw new OAuthException(ResponseCodes.GRANT_TYPE_NOT_SUPPORTED, HttpResponseStatus.BAD_REQUEST); } } else { _logger.info("Grant Type '" + tokenRequestDto.getGrantType() + "' mismatch"); throw new OAuthException(ResponseCodes.INVALID_OR_MISSING_GRANT_TYPE, HttpResponseStatus.BAD_REQUEST); } } else { _logger.info("Request Redirect URI '" + tokenRequestDto.getRedirectUri() + "' mismatch"); throw new OAuthException(ResponseCodes.INVALID_OR_MISSING_REDIRECT_URI, HttpResponseStatus.BAD_REQUEST); } } catch (UnsupportedEncodingException e) { _logger.info("Request Redirect URI '" + tokenRequestDto.getRedirectUri() + "' mismatch"); throw new OAuthException(ResponseCodes.INVALID_OR_MISSING_REDIRECT_URI, HttpResponseStatus.BAD_REQUEST); } }
java
public static ObjectName makeObjectName(String domainName, String beanName, String[] folderNameStrings) { return makeObjectName(domainName, beanName, null, folderNameStrings); }
java
public static Map<String, String> parseUrl(String url) { Map<String, String> result = new HashMap<>(8); result.put(PROTOCOL_KEY, ValueConsts.EMPTY_STRING); result.put(HOST_KEY, ValueConsts.EMPTY_STRING); result.put(PATH_KEY, ValueConsts.EMPTY_STRING); if (Checker.isNotEmpty(url)) { String[] pros; final String protocolSplit = "://"; if (url.contains(protocolSplit)) { pros = url.split(protocolSplit); } else { pros = new String[]{"", url}; } // 设置主机、协议、路径 result.put(PROTOCOL_KEY, pros[0]); if (pros.length < ValueConsts.TWO_INT) { pros = new String[]{pros[0], ValueConsts.EMPTY_STRING}; } if (pros[1].contains(ValueConsts.SPLASH_STRING)) { int lastIndex = pros[1].lastIndexOf(ValueConsts.SPLASH_STRING); if (pros[1].startsWith(ValueConsts.SPLASH_STRING)) { // 文件协议 result.put(PATH_KEY, pros[1].substring(1)); } else if (pros[1].contains(ValueConsts.SPLASH_STRING)) { int index = pros[1].indexOf("/"); // 设置主机 result.put(HOST_KEY, pros[1].substring(0, index)); // 设置参数 if (pros[1].contains(ValueConsts.QUESTION_MARK)) { lastIndex = pros[1].indexOf(ValueConsts.QUESTION_MARK); String[] params = pros[1].split("\\?")[1].split("&"); for (String param : params) { String[] kv = param.split("="); result.put(kv[0], kv[1]); } } // 设置路径 if (lastIndex > index) { String path = pros[1].substring(index + 1, lastIndex); path = path.endsWith(ValueConsts.SPLASH_STRING) ? path.substring(0, path.length() - 1) : path; result.put(PATH_KEY, path); } } else { result.put(HOST_KEY, pros[1]); } } else { result.put(HOST_KEY, pros[1]); } } return result; }
java
public int doEndTag() throws JspException { ServletRequest req = pageContext.getRequest(); String fmtErrors = null; if (_formatterError) { fmtErrors = getErrorsFromBody(); } if (hasErrors()) return reportAndExit(EVAL_PAGE); _state.disabled = isDisabled(); //Create hidden field for state tracking ByRef ref = new ByRef(); nameHtmlControl(_state, ref); if (hasErrors()) return reportAndExit(EVAL_PAGE); // Only write out the hidden field if the select is not // disabled. If it is disabled, then nothing will be posted // back from this. WriteRenderAppender writer = new WriteRenderAppender(pageContext); if (!_state.disabled) { _hiddenState.clear(); String hiddenParamName = null; hiddenParamName = _state.name + OLDVALUE_SUFFIX; _hiddenState.name = hiddenParamName; _hiddenState.value = "true"; TagRenderingBase hiddenTag = TagRenderingBase.Factory.getRendering(TagRenderingBase.INPUT_HIDDEN_TAG, req); hiddenTag.doStartTag(writer, _hiddenState); hiddenTag.doEndTag(writer); write("\n"); } // Render any formatting errors that may have occurred. if (fmtErrors != null) write(fmtErrors); TagRenderingBase br = TagRenderingBase.Factory.getRendering(TagRenderingBase.SELECT_TAG, req); br.doStartTag(writer, _state); // Render the content of the body, these would be the options if (_saveBody != null) { write(_saveBody); } // if we are repeating then the body contained the options so we can exit here if (_repeater) { if (hasErrors()) return reportAndExit(EVAL_PAGE); br.doEndTag(writer); if (!ref.isNull()) write((String) ref.getRef()); // Continue processing this page localRelease(); return EVAL_PAGE; } // All of the code below will pass through the optionsDataSource, the dataSource and defaultValue and // create a full Select. if (_dynamicOptions != null) { if (_dynamicOptions instanceof Map) { Map dynamicOptionsMap = (Map) _dynamicOptions; Iterator keyIterator = dynamicOptionsMap.keySet().iterator(); while (keyIterator.hasNext()) { Object optionValue = keyIterator.next(); String optionDisplay = null; if (dynamicOptionsMap.get(optionValue) != null) { optionDisplay = dynamicOptionsMap.get(optionValue).toString(); } if (optionValue != null) { addOption(req, optionValue.toString(), optionDisplay); } } } else if (_dynamicOptions instanceof Iterator) { Iterator dynamicOptionsIterator = (Iterator) evaluateOptionsDataSource(); while (dynamicOptionsIterator.hasNext()) { Object o = dynamicOptionsIterator.next(); if (o != null) { String optionValue = o.toString(); addOption(req, optionValue, optionValue); } } } } // add the value from the DataSource and Default value addDatasourceIfNeeded(req); addDefaultsIfNeeded(req); if (_nullable && !isMultiple()) { String txt = (_nullableOptionText != null) ? _nullableOptionText : ""; addOption(req, NULL_VALUE, txt); } br.doEndTag(writer); if (!ref.isNull()) write((String) ref.getRef()); // Continue processing this page localRelease(); return EVAL_PAGE; }
java
protected String[] prepareStringArray(Object value, String propertyName, Class<?> proeprtyType, FormMappingOption option) { // not null (empty if null) final String[] result; if (value != null && value instanceof String[]) { result = (String[]) value; } else { result = value != null ? new String[] { value.toString() } : EMPTY_STRING_ARRAY; } return filterIfSimpleText(result, option, propertyName, proeprtyType); }
python
def datetime_parsing(text, base_date=datetime.now()): """ Extract datetime objects from a string of text. """ matches = [] found_array = [] # Find the position in the string for expression, function in regex: for match in expression.finditer(text): matches.append((match.group(), function(match, base_date), match.span())) # Wrap the matched text with TAG element to prevent nested selections for match, value, spans in matches: subn = re.subn( '(?!<TAG[^>]*?>)' + match + '(?![^<]*?</TAG>)', '<TAG>' + match + '</TAG>', text ) text = subn[0] is_substituted = subn[1] if is_substituted != 0: found_array.append((match, value, spans)) # To preserve order of the match, sort based on the start position return sorted(found_array, key=lambda match: match and match[2][0])
java
@Override public ModelMapper getObject() throws Exception { // instantiates new instance of mapper final ModelMapper modelMapper = new ModelMapper(); // configures the mapper configure(modelMapper); // returns the mapper return modelMapper; }
java
@GET @Produces(MediaType.APPLICATION_JSON) @Path("/metrics/schemarecords") @Description("Discover metric schema records. If type is specified, then records of that particular type are returned.") public List<? extends Object> getRecords(@Context HttpServletRequest req, @DefaultValue("*") @QueryParam("namespace") final String namespaceRegex, @QueryParam("scope") final String scopeRegex, @QueryParam("metric") final String metricRegex, @DefaultValue("*") @QueryParam("tagk") final String tagkRegex, @DefaultValue("*") @QueryParam("tagv") final String tagvRegex, @DefaultValue("50") @QueryParam("limit") final int limit, @DefaultValue("1") @QueryParam("page") final int page, @QueryParam("type") String type) { if (type == null) { MetricSchemaRecordQuery query = new MetricSchemaRecordQueryBuilder().namespace(namespaceRegex) .scope(scopeRegex) .metric(metricRegex) .tagKey(tagkRegex) .tagValue(tagvRegex) .limit(limit * page) .build(); List<MetricSchemaRecord> schemaRecords = _discoveryService.filterRecords(query); if(_isFormat(req)) { List<String> records = new ArrayList<>(schemaRecords.size()); _formatToString(schemaRecords, records); return _getSubList(records, limit * (page - 1), records.size()); } return _getSubList(schemaRecords, limit * (page - 1), schemaRecords.size()); } else { MetricSchemaRecordQuery query = new MetricSchemaRecordQueryBuilder().namespace(namespaceRegex) .scope(scopeRegex) .metric(metricRegex) .tagKey(tagkRegex) .tagValue(tagvRegex) .limit(limit * page) .build(); List<MetricSchemaRecord> records = _discoveryService.getUniqueRecords(query, RecordType.fromName(type)); return _getValueForType(_getSubList(records, limit*(page-1), records.size()), RecordType.fromName(type)); } }
java
@Override public void messageReceived(NextFilter nextFilter, S session, AmqpSecureMessage message) throws Exception { nextFilter.messageReceived(session, message); }
java
public Parameters append(String key, String value) { if (nameValuePairs == null) { nameValuePairs = new ArrayList<NameValuePair>(); } nameValuePairs.add(new BasicNameValuePair(key, value)); return this; }
java
public EnquireLinkResp enquireLinkResp(byte[] b) { EnquireLinkResp resp = new EnquireLinkResp(); assignHeader(resp, b); return resp; }
java
@Override public DeleteLogSubscriptionResult deleteLogSubscription(DeleteLogSubscriptionRequest request) { request = beforeClientExecution(request); return executeDeleteLogSubscription(request); }
java
public static CouchbaseCluster create(final List<String> nodes) { return new CouchbaseCluster( DefaultCouchbaseEnvironment.create(), ConnectionString.fromHostnames(nodes), false ); }
python
def not_right(self, num): """ WITH SLICES BEING FLAT, WE NEED A SIMPLE WAY TO SLICE FROM THE LEFT [:-num:] """ if num == None: return FlatList([_get_list(self)[:-1:]]) if num <= 0: return FlatList.EMPTY return FlatList(_get_list(self)[:-num:])
python
def compute(self, inputVector, learn, activeArray, burstingColumns, predictedCells): """ This is the primary public method of the class. This function takes an input vector and outputs the indices of the active columns. New parameters defined here: ---------------------------- @param inputVector: The active cells from a Temporal Memory @param learn: A Boolean specifying whether learning will be performed @param activeArray: An array representing the active columns produced by this method @param burstingColumns: A numpy array with numColumns elements having binary values with 1 representing a currently bursting column in Temporal Memory. @param predictedCells: A numpy array with numInputs elements. A 1 indicates that this cell switching from predicted state in the previous time step to active state in the current timestep """ assert (numpy.size(inputVector) == self._numInputs) assert (numpy.size(predictedCells) == self._numInputs) self._updateBookeepingVars(learn) inputVector = numpy.array(inputVector, dtype=realDType) predictedCells = numpy.array(predictedCells, dtype=realDType) inputVector.reshape(-1) if self._spVerbosity > 3: print " Input bits: ", inputVector.nonzero()[0] print " predictedCells: ", predictedCells.nonzero()[0] # Phase 1: Calculate overlap scores # The overlap score has 4 components: # (1) Overlap between correctly predicted input cells and pooling TP cells # (2) Overlap between active input cells and all TP cells # (like standard SP calculation) # (3) Overlap between correctly predicted input cells and all TP cells # (4) Overlap from bursting columns in TM and all TP cells # 1) Calculate pooling overlap if self.usePoolingRule: overlapsPooling = self._calculatePoolingActivity(predictedCells, learn) if self._spVerbosity > 4: print "usePoolingRule: Overlaps after step 1:" print " ", overlapsPooling else: overlapsPooling = 0 # 2) Calculate overlap between active input cells and connected synapses overlapsAllInput = self._calculateOverlap(inputVector) # 3) overlap with predicted inputs # NEW: Isn't this redundant with 1 and 2)? This looks at connected synapses # only. # If 1) is called with learning=False connected synapses are used and # it is somewhat redundant although there is a boosting factor in 1) which # makes 1's effect stronger. If 1) is called with learning=True it's less # redundant overlapsPredicted = self._calculateOverlap(predictedCells) if self._spVerbosity > 4: print "Overlaps with all inputs:" print " Number of On Bits: ", inputVector.sum() print " ", overlapsAllInput print "Overlaps with predicted inputs:" print " ", overlapsPredicted # 4) consider bursting columns if self.useBurstingRule: overlapsBursting = self._calculateBurstingColumns(burstingColumns) if self._spVerbosity > 4: print "Overlaps with bursting inputs:" print " ", overlapsBursting else: overlapsBursting = 0 overlaps = (overlapsPooling + overlapsPredicted + overlapsAllInput + overlapsBursting) # Apply boosting when learning is on if learn: boostedOverlaps = self._boostFactors * overlaps if self._spVerbosity > 4: print "Overlaps after boosting:" print " ", boostedOverlaps else: boostedOverlaps = overlaps # Apply inhibition to determine the winning columns activeColumns = self._inhibitColumns(boostedOverlaps) if learn: self._adaptSynapses(inputVector, activeColumns, predictedCells) self._updateDutyCycles(overlaps, activeColumns) self._bumpUpWeakColumns() self._updateBoostFactors() if self._isUpdateRound(): self._updateInhibitionRadius() self._updateMinDutyCycles() activeArray.fill(0) if activeColumns.size > 0: activeArray[activeColumns] = 1 # update pooling state of cells activeColumnIndices = numpy.where(overlapsPredicted[activeColumns] > 0)[0] activeColWithPredictedInput = activeColumns[activeColumnIndices] numUnPredictedInput = float(len(burstingColumns.nonzero()[0])) numPredictedInput = float(len(predictedCells)) fracUnPredicted = numUnPredictedInput / (numUnPredictedInput + numPredictedInput) self._updatePoolingState(activeColWithPredictedInput, fracUnPredicted) if self._spVerbosity > 2: activeColumns.sort() print "The following columns are finally active:" print " ", activeColumns print "The following columns are in pooling state:" print " ", self._poolingActivation.nonzero()[0] # print "Inputs to pooling columns" # print " ",overlapsPredicted[self._poolingColumns] return activeColumns
python
def bind(self, instance, auto=False): """ Bind deps to instance :param instance: :param auto: follow update of DI and refresh binds once we will get something new :return: """ methods = [ (m, cls.__dict__[m]) for cls in inspect.getmro(type(instance)) for m in cls.__dict__ if inspect.isfunction(cls.__dict__[m]) ] try: deps_of_endpoints = [(method_ptr, self.entrypoint_deps(method_ptr)) for (method_name, method_ptr) in methods] for (method_ptr, method_deps) in deps_of_endpoints: if len(method_deps) > 0: method_ptr(instance, **method_deps) except KeyError: pass if auto and instance not in self.current_scope.get_auto_bind_list(): self.current_scope.auto_bind(instance) return instance
java
public static HashedWheelTimer newTimer(final int ticks, final int ticks_per_wheel, final String name) { class TimerThreadNamer implements ThreadNameDeterminer { @Override public String determineThreadName(String currentThreadName, String proposedThreadName) throws Exception { return "OpenTSDB Timer " + name + " #" + TIMER_ID.incrementAndGet(); } } return new HashedWheelTimer(Executors.defaultThreadFactory(), new TimerThreadNamer(), ticks, MILLISECONDS, ticks_per_wheel); }
java
public Collection<Retrospective> getRetrospectives( RetrospectiveFilter filter) { filter = (filter == null) ? new RetrospectiveFilter() : filter; filter.team.clear(); filter.team.add(this); return getInstance().get().retrospectives(filter); }
java
private void activityResumed(Activity activity) { getConfigLogger().verbose(getAccountId(), "App in foreground"); checkTimeoutSession(); if(!isAppLaunchPushed()) { pushAppLaunchedEvent(); onTokenRefresh(); } if (!inCurrentSession()) { //onTokenRefresh(); pushInitialEventsAsync(); } checkPendingInAppNotifications(activity); }
python
def timeid(self, data: ['SASdata', str] = None, by: str = None, id: str = None, out: [str, 'SASdata'] = None, procopts: str = None, stmtpassthrough: str = None, **kwargs: dict) -> 'SASresults': """ Python method to call the TIMEID procedure Documentation link: http://support.sas.com/documentation/cdl//en/etsug/68148/HTML/default/viewer.htm#etsug_timeid_syntax.htm :param data: SASdata object or string. This parameter is required. :parm by: The by variable can only be a string type. :parm id: The id variable can only be a string type. :parm out: The out variable can be a string or SASdata type. :parm procopts: The procopts variable is a generic option available for advanced use. It can only be a string type. :parm stmtpassthrough: The stmtpassthrough variable is a generic option available for advanced use. It can only be a string type. :return: SAS Result Object """
python
def train_model(params: Params, serialization_dir: str, file_friendly_logging: bool = False, recover: bool = False, force: bool = False, cache_directory: str = None, cache_prefix: str = None) -> Model: """ Trains the model specified in the given :class:`Params` object, using the data and training parameters also specified in that object, and saves the results in ``serialization_dir``. Parameters ---------- params : ``Params`` A parameter object specifying an AllenNLP Experiment. serialization_dir : ``str`` The directory in which to save results and logs. file_friendly_logging : ``bool``, optional (default=False) If ``True``, we add newlines to tqdm output, even on an interactive terminal, and we slow down tqdm's output to only once every 10 seconds. recover : ``bool``, optional (default=False) If ``True``, we will try to recover a training run from an existing serialization directory. This is only intended for use when something actually crashed during the middle of a run. For continuing training a model on new data, see the ``fine-tune`` command. force : ``bool``, optional (default=False) If ``True``, we will overwrite the serialization directory if it already exists. cache_directory : ``str``, optional For caching data pre-processing. See :func:`allennlp.training.util.datasets_from_params`. cache_prefix : ``str``, optional For caching data pre-processing. See :func:`allennlp.training.util.datasets_from_params`. Returns ------- best_model: ``Model`` The model with the best epoch weights. """ prepare_environment(params) create_serialization_dir(params, serialization_dir, recover, force) stdout_handler = prepare_global_logging(serialization_dir, file_friendly_logging) cuda_device = params.params.get('trainer').get('cuda_device', -1) check_for_gpu(cuda_device) params.to_file(os.path.join(serialization_dir, CONFIG_NAME)) evaluate_on_test = params.pop_bool("evaluate_on_test", False) trainer_type = params.get("trainer", {}).get("type", "default") if trainer_type == "default": # Special logic to instantiate backward-compatible trainer. pieces = TrainerPieces.from_params(params, # pylint: disable=no-member serialization_dir, recover, cache_directory, cache_prefix) trainer = Trainer.from_params( model=pieces.model, serialization_dir=serialization_dir, iterator=pieces.iterator, train_data=pieces.train_dataset, validation_data=pieces.validation_dataset, params=pieces.params, validation_iterator=pieces.validation_iterator) evaluation_iterator = pieces.validation_iterator or pieces.iterator evaluation_dataset = pieces.test_dataset else: trainer = TrainerBase.from_params(params, serialization_dir, recover) # TODO(joelgrus): handle evaluation in the general case evaluation_iterator = evaluation_dataset = None params.assert_empty('base train command') try: metrics = trainer.train() except KeyboardInterrupt: # if we have completed an epoch, try to create a model archive. if os.path.exists(os.path.join(serialization_dir, _DEFAULT_WEIGHTS)): logging.info("Training interrupted by the user. Attempting to create " "a model archive using the current best epoch weights.") archive_model(serialization_dir, files_to_archive=params.files_to_archive) raise # Evaluate if evaluation_dataset and evaluate_on_test: logger.info("The model will be evaluated using the best epoch weights.") test_metrics = evaluate(trainer.model, evaluation_dataset, evaluation_iterator, cuda_device=trainer._cuda_devices[0], # pylint: disable=protected-access, # TODO(brendanr): Pass in an arg following Joel's trainer refactor. batch_weight_key="") for key, value in test_metrics.items(): metrics["test_" + key] = value elif evaluation_dataset: logger.info("To evaluate on the test set after training, pass the " "'evaluate_on_test' flag, or use the 'allennlp evaluate' command.") cleanup_global_logging(stdout_handler) # Now tar up results archive_model(serialization_dir, files_to_archive=params.files_to_archive) dump_metrics(os.path.join(serialization_dir, "metrics.json"), metrics, log=True) # We count on the trainer to have the model with best weights return trainer.model
python
def loadd(self, ava): """ expects a special set of keys """ if "attributes" in ava: for key, val in ava["attributes"].items(): self.attributes[key] = val try: self.tag = ava["tag"] except KeyError: if not self.tag: raise KeyError("ExtensionElement must have a tag") try: self.namespace = ava["namespace"] except KeyError: if not self.namespace: raise KeyError("ExtensionElement must belong to a namespace") try: self.text = ava["text"] except KeyError: pass if "children" in ava: for item in ava["children"]: self.children.append(ExtensionElement(item["tag"]).loadd(item)) return self
python
def get_cv_idxs(n, cv_idx=0, val_pct=0.2, seed=42): """ Get a list of index values for Validation set from a dataset Arguments: n : int, Total number of elements in the data set. cv_idx : int, starting index [idx_start = cv_idx*int(val_pct*n)] val_pct : (int, float), validation set percentage seed : seed value for RandomState Returns: list of indexes """ np.random.seed(seed) n_val = int(val_pct*n) idx_start = cv_idx*n_val idxs = np.random.permutation(n) return idxs[idx_start:idx_start+n_val]
python
def calc_sun_times(self): """ Computes the times of sunrise, solar noon, and sunset for each day. """ self.sun_times = melodist.util.get_sun_times(self.data_daily.index, self.lon, self.lat, self.timezone)
python
def reboot(self): """Reboots the device. Generally one should use this method to reboot the device instead of directly calling `adb.reboot`. Because this method gracefully handles the teardown and restoration of running services. This method is blocking and only returns when the reboot has completed and the services restored. Raises: Error: Waiting for completion timed out. """ if self.is_bootloader: self.fastboot.reboot() return with self.handle_reboot(): self.adb.reboot()
python
def job_disable(self, job_id): """ Disable a job. Disabled jobs will not be executed. :param job_id: Job identifier to disable. :type job_id: :py:class:`uuid.UUID` """ job_id = normalize_job_id(job_id) with self._job_lock: job_desc = self._jobs[job_id] job_desc['enabled'] = False
java
@SafeVarargs private final <T> Set<T> join(Set<T>... sets) { Set<T> result = new HashSet<>(); if (sets == null) return result; for (Set<T> set : sets) { if (set != null) result.addAll(set); } return result; }
python
def get_resource_relationships_for_source_resource_on_date(self, source_resource_id, from_, to): """Pass through to provider ResourceRelationshipLookupSession.get_resource_relationships_for_source_resource_on_date""" # Implemented from azosid template for - # osid.relationship.RelationshipLookupSession.get_relationships_for_source_on_date_template if self._can('lookup'): return self._provider_session.get_resource_relationships_for_source_resource_on_date(source_resource_id, from_, to) self._check_lookup_conditions() # raises PermissionDenied query = self._query_session.get_resource_relationship_query() query.match_source_id(source_resource_id, match=True) query.match_date(from_, to, match=True) return self._try_harder(query)
java
public <D, S> ForkedStream<D, S> forkStream(RecordStreamWithMetadata<D, S> inputStream, ForkOperator<S, D> forkOperator, WorkUnitState workUnitState) throws Exception { int branches = forkOperator.getBranches(workUnitState); // Set fork.branches explicitly here so the rest task flow can pick it up workUnitState.setProp(ConfigurationKeys.FORK_BRANCHES_KEY, branches); forkOperator.init(workUnitState); List<Boolean> forkedSchemas = forkOperator.forkSchema(workUnitState, inputStream.getGlobalMetadata().getSchema()); int activeForks = (int) forkedSchemas.stream().filter(b -> b).count(); Preconditions.checkState(forkedSchemas.size() == branches, String .format("Number of forked schemas [%d] is not equal to number of branches [%d]", forkedSchemas.size(), branches)); Flowable<RecordWithForkMap<D>> forkedStream = inputStream.getRecordStream().map(r -> { if (r instanceof RecordEnvelope) { RecordEnvelope<D> recordEnvelope = (RecordEnvelope<D>) r; return new RecordWithForkMap<>(recordEnvelope, forkOperator.forkDataRecord(workUnitState, recordEnvelope.getRecord())); } else if (r instanceof ControlMessage) { return new RecordWithForkMap<D>((ControlMessage<D>) r, branches); } else { throw new IllegalStateException("Expected RecordEnvelope or ControlMessage."); } }); if (activeForks > 1) { forkedStream = forkedStream.share(); } List<RecordStreamWithMetadata<D, S>> forkStreams = Lists.newArrayList(); boolean mustCopy = mustCopy(forkedSchemas); for(int i = 0; i < forkedSchemas.size(); i++) { if (forkedSchemas.get(i)) { final int idx = i; Flowable<StreamEntity<D>> thisStream = forkedStream.filter(new ForkFilter<>(idx)).map(RecordWithForkMap::getRecordCopyIfNecessary); forkStreams.add(inputStream.withRecordStream(thisStream, mustCopy ? (GlobalMetadata<S>) CopyHelper.copy(inputStream.getGlobalMetadata()) : inputStream.getGlobalMetadata())); } else { forkStreams.add(null); } } return new ForkedStream<>(forkStreams); }
python
def _get_connect_kwargs(self, host, port, user, password, database_url): """ Get the params to pass to psycopg2.connect() based on passed-in vals from yaml settings file """ if database_url: return {'dsn': database_url} if not host: raise CheckException("Please specify a PgBouncer host to connect to.") if not user: raise CheckException("Please specify a user to connect to PgBouncer as.") if host in ('localhost', '127.0.0.1') and password == '': # Use ident method return {'dsn': "user={} dbname={}".format(user, self.DB_NAME)} if port: return {'host': host, 'user': user, 'password': password, 'database': self.DB_NAME, 'port': port} return {'host': host, 'user': user, 'password': password, 'database': self.DB_NAME}
java
private EvaluationResult callHelper(URI type, URI id, URI issuer, URI category, int adType) { if (finder != null) { return finder.findAttribute(type, id, issuer, category, this, adType); } else { logger.warn(NO_FINDER_MSG); return new EvaluationResult(BagAttribute.createEmptyBag(type)); } }
java
public static <I, D> List<Word<I>> suffixesForLocalOutput(Query<I, D> ceQuery, int localSuffixIdx, boolean allSuffixes) { if (localSuffixIdx == -1) { return Collections.emptyList(); } Word<I> suffix = ceQuery.getInput().subWord(localSuffixIdx); if (!allSuffixes) { return Collections.singletonList(suffix); } return suffix.suffixes(false); }
java
@Override protected void updateStackingContexts() { super.updateStackingContexts(); if (stackingParent != null) { if (formsStackingContext()) //all the positioned boxes are considered as separate stacking contexts { stackingParent.getStackingContext().registerChildContext(this); if (scontext != null) //clear this context if it exists (remove old children) scontext.clear(); } } }
python
def create(self, unique_name, domain_suffix=values.unset): """ Create a new EnvironmentInstance :param unicode unique_name: The unique_name :param unicode domain_suffix: The domain_suffix :returns: Newly created EnvironmentInstance :rtype: twilio.rest.serverless.v1.service.environment.EnvironmentInstance """ data = values.of({'UniqueName': unique_name, 'DomainSuffix': domain_suffix, }) payload = self._version.create( 'POST', self._uri, data=data, ) return EnvironmentInstance(self._version, payload, service_sid=self._solution['service_sid'], )
java
public Collection<Plugin> removeUncheckedPlugins( Collection<String> uncheckedPlugins, Collection<Plugin> plugins ) throws MojoExecutionException { if ( uncheckedPlugins != null && !uncheckedPlugins.isEmpty() ) { for ( String pluginKey : uncheckedPlugins ) { Plugin plugin = parsePluginString( pluginKey, "UncheckedPlugins" ); plugins.remove( plugin ); } } return plugins; }
java
public BaseMessageFilter setupRecordListener(JMessageListener listener, boolean bTrackMultipleRecords, boolean bAllowEchos, boolean bReceiveAllAdds) { BaseMessageFilter filter = null; MessageManager messageManager = null; if (listener == null) listener = this.getRecordOwner(); if (listener != null) if (this.getRecordOwner() != null) if (this.getRecordOwner().getTask() != null) messageManager = ((Application)this.getRecordOwner().getTask().getApplication()).getMessageManager(); if (messageManager != null) { Object source = null; if (bAllowEchos) source = DBConstants.BLANK; // Non-null object that will never be a source if (!bTrackMultipleRecords) filter = new RecordMessageFilter(this, source); else filter = new GridRecordMessageFilter(this, source, bReceiveAllAdds); filter.addMessageListener(listener); messageManager.addMessageFilter(filter); } return filter; }
java
@NullSafe public static boolean createFile(File path) { try { return (path != null && !path.isDirectory() && (path.isFile() || path.createNewFile())); } catch (IOException ignore) { return false; } }
java
private static void validate(String name, Collection<Geometry> geometries, int extent) { if (name == null) { throw new IllegalArgumentException("layer name is null"); } if (geometries == null) { throw new IllegalArgumentException("geometry collection is null"); } if (extent <= 0) { throw new IllegalArgumentException("extent is less than or equal to 0"); } }
java
protected boolean jobTooOld(final JobInfo jobInfo, final JobDefinition jobDefinition) { final Optional<OffsetDateTime> stopped = jobInfo.getStopped(); if (stopped.isPresent() && jobDefinition.maxAge().isPresent()) { final OffsetDateTime deadlineToRerun = stopped.get().plus(jobDefinition.maxAge().get()); return deadlineToRerun.isBefore(now()); } return false; }
java
public final SortedSet<T> toSortedSet(final Comparator<? super T> comparator) { return ImmutableSortedSet.copyOf(comparator, toCollection(Lists.<T>newArrayListWithCapacity(256))); }
java
private void updateFailedLoginAttempts(int numberOfAttempts) { UserSecret userSecret = getSecret(); userSecret.setFailedLoginAttempts(numberOfAttempts); if (userSecret.getFailedLoginAttempts() >= MAX_FAILED_LOGIN_ATTEMPTS) { if (!(userSecret.getLastFailedAuthentication() != null && (Instant.now().toEpochMilli() < userSecret .getLastFailedAuthentication() .plus(Duration.ofSeconds(BLOCKED_USER_INTERVAL)) .toEpochMilli()))) { userSecret.setFailedLoginAttempts(FAILED_LOGIN_ATTEMPT_ITERATION); } } else { userSecret.setLastFailedAuthentication(Instant.now()); } runAsSystem(() -> dataService.update(USER_SECRET, userSecret)); }
java
private StringBuffer toQueryPrefix(String t) { StringBuffer sb = new StringBuffer(255); return sb.append(selstar).append(t).append(whereTrue); }
java
public boolean isSameNode(Node other) { return (other instanceof NodeImpl) && (((NodeImpl) other).tree == tree) && (((NodeImpl) other).node == node); }
python
def _graph_add_edge(self, src_block_id, dst_block_id, **kwargs): """ Add an edge onto the graph. :param BlockID src_block_id: The block ID for source node. :param BlockID dst_block_id: The block Id for destination node. :param str jumpkind: The jumpkind of the edge. :param exit_stmt_idx: ID of the statement in the source IRSB where this edge is created from. 'default' refers to the default exit. :return: None """ dst_node = self._graph_get_node(dst_block_id, terminator_for_nonexistent_node=True) if src_block_id is None: self.graph.add_node(dst_node) else: src_node = self._graph_get_node(src_block_id, terminator_for_nonexistent_node=True) self.graph.add_edge(src_node, dst_node, **kwargs)
java
private void checkC() throws DatatypeException, IOException { if (context.length() == 0) { appendToContext(current); } current = reader.read(); appendToContext(current); skipSpaces(); boolean expectNumber = true; for (;;) { switch (current) { default: if (expectNumber) reportNonNumber('C', current); return; case '+': case '-': case '.': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': break; } checkArg('C', "x1 coordinate"); skipCommaSpaces(); checkArg('C', "y1 coordinate"); skipCommaSpaces(); checkArg('C', "x2 coordinate"); skipCommaSpaces(); checkArg('C', "y2 coordinate"); skipCommaSpaces(); checkArg('C', "x coordinate"); skipCommaSpaces(); checkArg('C', "y coordinate"); expectNumber = skipCommaSpaces2(); } }
java
public static DocLink fragment(String fragment) { return new DocLink((String) null, (String) null, fragment); }
python
def index(args): """ %prog index database.fasta ` Wrapper for `gmap_build`. Same interface. """ p = OptionParser(index.__doc__) p.add_option("--supercat", default=False, action="store_true", help="Concatenate reference to speed up alignment") opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) dbfile, = args check_index(dbfile, supercat=opts.supercat)
python
def _receive(self): """Receive any incoming socket data. If an error is thrown, handle it and return an empty string. :return: data_in :rtype: bytes """ data_in = EMPTY_BUFFER try: data_in = self._read_from_socket() except socket.timeout: pass except (IOError, OSError) as why: if why.args[0] not in (EWOULDBLOCK, EAGAIN): self._exceptions.append(AMQPConnectionError(why)) self._running.clear() return data_in
python
def __try_instantiate(self, component_context, instance): # type: (ComponentContext, object) -> bool """ Instantiates a component, if all of its handlers are there. Returns False if a handler is missing. :param component_context: A ComponentContext bean :param instance: The component instance :return: True if the component has started, False if a handler is missing """ with self.__instances_lock: # Extract information about the component factory_context = component_context.factory_context handlers_ids = factory_context.get_handlers_ids() name = component_context.name factory_name = factory_context.name try: # Get handlers handler_factories = self.__get_handler_factories(handlers_ids) except KeyError: # A handler is missing, stop here return False # Instantiate the handlers all_handlers = set() # type: Set[Any] for handler_factory in handler_factories: handlers = handler_factory.get_handlers( component_context, instance ) if handlers: all_handlers.update(handlers) # Prepare the stored instance stored_instance = StoredInstance( self, component_context, instance, all_handlers ) # Manipulate the properties for handler in all_handlers: handler.manipulate(stored_instance, instance) # Store the instance self.__instances[name] = stored_instance # Start the manager stored_instance.start() # Notify listeners now that every thing is ready to run self._fire_ipopo_event( constants.IPopoEvent.INSTANTIATED, factory_name, name ) # Try to validate it stored_instance.update_bindings() stored_instance.check_lifecycle() return True
python
def command(self, name, *args): """Execute a raw command.""" args = [name.encode('utf-8')] + [ (arg if type(arg) is bytes else str(arg).encode('utf-8')) for arg in args if arg is not None ] + [None] _mpv_command(self.handle, (c_char_p*len(args))(*args))
python
def translation(language): """ Return a translation object in the default 'django' domain. """ global _translations if language not in _translations: _translations[language] = Translations(language) return _translations[language]
java
public synchronized GoogleCloudStorageItemInfo putItem(GoogleCloudStorageItemInfo item) { StorageResourceId id = item.getResourceId(); PrefixKey key = new PrefixKey(id.getBucketName(), id.getObjectName()); CacheValue<GoogleCloudStorageItemInfo> value = new CacheValue<GoogleCloudStorageItemInfo>(item, ticker.read()); CacheValue<GoogleCloudStorageItemInfo> oldValue = itemMap.put(key, value); if (oldValue == null) { return null; } if (isExpired(oldValue)) { cleanupLists(key); return null; } return oldValue.getValue(); }
python
def submit_bsub_job(command, job_id=None, dependent_id=None, memory=None, requeue_code=None, logfile=None): """ construct a bsub job submission command Args: command: list of strings that forma unix command job_id: string for job ID for submission dependent_id: job ID, or list of job IDs which the current command needs to have finished before the current command will start. Note that the list can be empty, in which case there are no dependencies. memory: minimum memory requirements (in megabytes) Returns: nothing """ if job_id is None: job_id = get_random_string() job = "-J \"{0}\"".format(job_id) mem = "" if memory is not None: mem = "-R 'select[mem>{0}] rusage[mem={0}]' -M {0}".format(memory) requeue = "" if requeue_code is not None: requeue = "-Q 'EXCLUDE({0})'".format(requeue_code) dependent = "" if dependent_id is not None: if type(dependent_id) == list: dependent_id = " && ".join(dependent_id) dependent = "-w '{0}'".format(dependent_id) log = "bjob_output.txt" if logfile is not None: log = logfile preamble = ["bsub", job, dependent, requeue, "-q", "normal", "-o", log, mem] command = ["bash", "-c", "\""] + command + ["\""] command = " ".join(preamble + command) subprocess.call(command, shell=True)
java
public void marshall(DescribeActivationsRequest describeActivationsRequest, ProtocolMarshaller protocolMarshaller) { if (describeActivationsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(describeActivationsRequest.getFilters(), FILTERS_BINDING); protocolMarshaller.marshall(describeActivationsRequest.getMaxResults(), MAXRESULTS_BINDING); protocolMarshaller.marshall(describeActivationsRequest.getNextToken(), NEXTTOKEN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public static void main (String[] args) { try { h2omapper m = new h2omapper(); m.run(null); } catch (Exception e) { System.out.println (e); } }
java
public Set<Key> keySet(int access) { Set<Key> set = new LinkedHashSet<Key>(); Map.Entry<Key, Member> entry; Iterator<Entry<Key, Member>> it = _data.entrySet().iterator(); while (it.hasNext()) { entry = it.next(); if (entry.getValue().getAccess() <= access) set.add(entry.getKey()); } return set; }
python
def get_stats(self, nid=None): """Get statistics for class :type nid: str :param nid: This is the ID of the network to get stats from. This is optional and only to override the existing `network_id` entered when created the class """ r = self.request( api_type="main", method="network.get_stats", nid=nid, ) return self._handle_error(r, "Could not retrieve stats for class.")
python
def channel_to_id(slack, channel): """ Surely there's a better way to do this... """ channels = slack.api_call('channels.list').get('channels') or [] groups = slack.api_call('groups.list').get('groups') or [] if not channels and not groups: raise RuntimeError("Couldn't get channels and groups.") ids = [c['id'] for c in channels + groups if c['name'] == channel] if not ids: raise ValueError(f"Couldn't find #{channel}") return ids[0]
java
public static boolean copyFile(final File source, final File destination, final Charset sourceEncoding, final Charset destinationEncoding, final boolean lastModified) throws IOException { if (source.isDirectory()) { throw new IllegalArgumentException("The source File " + destination.getName() + " should be a File but is a Directory."); } if (destination.isDirectory()) { throw new IllegalArgumentException("The destination File " + destination.getName() + " should be a File but is a Directory."); } boolean copied = false; try (InputStream inputStream = StreamExtensions.getInputStream(source); InputStreamReader reader = sourceEncoding != null ? new InputStreamReader(inputStream, sourceEncoding) : new InputStreamReader(inputStream); OutputStream outputStream = StreamExtensions.getOutputStream(destination, !destination.exists()); BufferedOutputStream bos = new BufferedOutputStream(outputStream); OutputStreamWriter writer = destinationEncoding != null ? new OutputStreamWriter(bos, destinationEncoding) : new OutputStreamWriter(bos)) { int tmp; final char[] charArray = new char[FileConst.BLOCKSIZE]; while ((tmp = reader.read(charArray)) > 0) { writer.write(charArray, 0, tmp); } copied = true; } catch (final IOException e) { throw e; } if (lastModified) { destination.setLastModified(source.lastModified()); } return copied; }
java
public static dnsview_gslbservice_binding[] get(nitro_service service, String viewname) throws Exception{ dnsview_gslbservice_binding obj = new dnsview_gslbservice_binding(); obj.set_viewname(viewname); dnsview_gslbservice_binding response[] = (dnsview_gslbservice_binding[]) obj.get_resources(service); return response; }
java
public static <T extends TypeDefinition> ElementMatcher.Junction<T> noneOf(Type... value) { return noneOf(new TypeList.Generic.ForLoadedTypes(value)); }
python
def stratHeun(f, G, y0, tspan, dW=None): """Use the Stratonovich Heun algorithm to integrate Stratonovich equation dy = f(y,t)dt + G(y,t) \circ dW(t) where y is the d-dimensional state vector, f is a vector-valued function, G is an d x m matrix-valued function giving the noise coefficients and dW(t) = (dW_1, dW_2, ... dW_m) is a vector of independent Wiener increments Args: f: callable(y, t) returning (d,) array Vector-valued function to define the deterministic part of the system G: callable(y, t) returning (d,m) array Matrix-valued function to define the noise coefficients of the system y0: array of shape (d,) giving the initial state vector y(t==0) tspan (array): The sequence of time points for which to solve for y. These must be equally spaced, e.g. np.arange(0,10,0.005) tspan[0] is the intial time corresponding to the initial state y0. dW: optional array of shape (len(tspan)-1, d). This is for advanced use, if you want to use a specific realization of the d independent Wiener processes. If not provided Wiener increments will be generated randomly Returns: y: array, with shape (len(tspan), len(y0)) With the initial value y0 in the first row Raises: SDEValueError See also: W. Rumelin (1982) Numerical Treatment of Stochastic Differential Equations R. Mannella (2002) Integration of Stochastic Differential Equations on a Computer K. Burrage, P. M. Burrage and T. Tian (2004) Numerical methods for strong solutions of stochastic differential equations: an overview """ (d, m, f, G, y0, tspan, dW, __) = _check_args(f, G, y0, tspan, dW, None) N = len(tspan) h = (tspan[N-1] - tspan[0])/(N - 1) # allocate space for result y = np.zeros((N, d), dtype=type(y0[0])) if dW is None: # pre-generate Wiener increments (for m independent Wiener processes): dW = deltaW(N - 1, m, h) y[0] = y0; for n in range(0, N-1): tn = tspan[n] tnp1 = tspan[n+1] yn = y[n] dWn = dW[n,:] fn = f(yn, tn) Gn = G(yn, tn) ybar = yn + fn*h + Gn.dot(dWn) fnbar = f(ybar, tnp1) Gnbar = G(ybar, tnp1) y[n+1] = yn + 0.5*(fn + fnbar)*h + 0.5*(Gn + Gnbar).dot(dWn) return y
java
protected static List<SpecNode> getAllSpecNodes(final ContentSpec contentSpec) { final List<SpecNode> specNodes = new ArrayList<SpecNode>(); for (final Node childNode : contentSpec.getNodes()) { if (childNode instanceof SpecNode) { specNodes.add((SpecNode) childNode); } else if (childNode instanceof KeyValueNode && ((KeyValueNode) childNode).getValue() instanceof SpecNode) { specNodes.add((SpecNode) ((KeyValueNode) childNode).getValue()); } if (childNode instanceof Level) { specNodes.addAll(getAllSpecNodes((Level) childNode)); } } specNodes.addAll(getAllSpecNodes(contentSpec.getBaseLevel())); return specNodes; }
java
@Override public PackingPlan repack(PackingPlan currentPackingPlan, Map<String, Integer> componentChanges) throws PackingException { int initialNumContainer = TopologyUtils.getNumContainers(topology); int initialNumInstance = TopologyUtils.getTotalInstance(topology); double initialNumInstancePerContainer = (double) initialNumInstance / initialNumContainer; Map<String, Integer> newComponentParallelism = getNewComponentParallelism(currentPackingPlan, componentChanges); int newNumInstance = TopologyUtils.getTotalInstance(newComponentParallelism); int newNumContainer = (int) Math.ceil(newNumInstance / initialNumInstancePerContainer); return packInternal(newNumContainer, newComponentParallelism); }
java
@Override public MapReadResult readLabels(Tile upperLeft, Tile lowerRight) { return readMapData(upperLeft, lowerRight, Selector.LABELS); }
java
public void setOptions(String[][] options) throws Fault { LinkedHashSet<String[]> customTagStrs = new LinkedHashSet<>(); // Some options, specifically -link and -linkoffline, require that // the output directory has already been created: so do that first. for (String[] os : options) { String opt = StringUtils.toLowerCase(os[0]); if (opt.equals("-d")) { destDirName = addTrailingFileSep(os[1]); docFileDestDirName = destDirName; ensureOutputDirExists(); break; } } for (String[] os : options) { String opt = StringUtils.toLowerCase(os[0]); if (opt.equals("-docfilessubdirs")) { copydocfilesubdirs = true; } else if (opt.equals("-docencoding")) { docencoding = os[1]; } else if (opt.equals("-encoding")) { encoding = os[1]; } else if (opt.equals("-author")) { showauthor = true; } else if (opt.equals("-javafx")) { javafx = true; } else if (opt.equals("-nosince")) { nosince = true; } else if (opt.equals("-version")) { showversion = true; } else if (opt.equals("-nodeprecated")) { nodeprecated = true; } else if (opt.equals("-excludedocfilessubdir")) { addToSet(excludedDocFileDirs, os[1]); } else if (opt.equals("-noqualifier")) { addToSet(excludedQualifiers, os[1]); } else if (opt.equals("-linksource")) { linksource = true; } else if (opt.equals("-sourcetab")) { linksource = true; try { setTabWidth(Integer.parseInt(os[1])); } catch (NumberFormatException e) { //Set to -1 so that warning will be printed //to indicate what is valid argument. sourcetab = -1; } if (sourcetab <= 0) { message.warning("doclet.sourcetab_warning"); setTabWidth(DocletConstants.DEFAULT_TAB_STOP_LENGTH); } } else if (opt.equals("-notimestamp")) { notimestamp = true; } else if (opt.equals("-nocomment")) { nocomment = true; } else if (opt.equals("-tag") || opt.equals("-taglet")) { customTagStrs.add(os); } else if (opt.equals("-tagletpath")) { tagletpath = os[1]; } else if (opt.equals("-keywords")) { keywords = true; } else if (opt.equals("-serialwarn")) { serialwarn = true; } else if (opt.equals("-group")) { group.checkPackageGroups(os[1], os[2]); } else if (opt.equals("-link")) { String url = os[1]; extern.link(url, url, root, false); } else if (opt.equals("-linkoffline")) { String url = os[1]; String pkglisturl = os[2]; extern.link(url, pkglisturl, root, true); } else if (opt.equals("-xdaccessinternalapi")) { exportInternalAPI = true; } } if (docencoding == null) { docencoding = encoding; } classDocCatalog = new ClassDocCatalog(root.specifiedClasses(), this); initTagletManager(customTagStrs); }
python
def get_tokens(self, auth, username=None): """ Returns the tokens owned by the specified user. If no user is specified, uses the user authenticated by ``auth``. :param auth.Authentication auth: authentication for user to retrieve. Must be a username-password authentication, due to a restriction of the Gogs API :param str username: username of owner of tokens :return: list of tokens :rtype: List[Token] :raises NetworkFailure: if there is an error communicating with the server :raises ApiFailure: if the request cannot be serviced """ if username is None: username = self.authenticated_user(auth).username response = self.get("/users/{u}/tokens".format(u=username), auth=auth) return [Token.from_json(o) for o in response.json()]
java
public void parse(final InsertStatement insertStatement) { if (!lexerEngine.skipIfEqual(getCustomizedInsertKeywords())) { return; } lexerEngine.accept(DefaultKeyword.DUPLICATE); lexerEngine.accept(DefaultKeyword.KEY); lexerEngine.accept(DefaultKeyword.UPDATE); do { Column column = new Column(SQLUtil.getExactlyValue(lexerEngine.getCurrentToken().getLiterals()), insertStatement.getTables().getSingleTableName()); if (shardingRule.isShardingColumn(column.getName(), column.getTableName())) { throw new SQLParsingException("INSERT INTO .... ON DUPLICATE KEY UPDATE can not support on sharding column, token is '%s', literals is '%s'.", lexerEngine.getCurrentToken().getType(), lexerEngine.getCurrentToken().getLiterals()); } basicExpressionParser.parse(insertStatement); lexerEngine.accept(Symbol.EQ); if (lexerEngine.skipIfEqual(DefaultKeyword.VALUES)) { lexerEngine.accept(Symbol.LEFT_PAREN); basicExpressionParser.parse(insertStatement); lexerEngine.accept(Symbol.RIGHT_PAREN); } else { lexerEngine.nextToken(); } } while (lexerEngine.skipIfEqual(Symbol.COMMA)); }
python
def parse(datetime_str, timezone=None, isofirst=True, dayfirst=True, yearfirst=True): """ Parses a datetime string and returns a `Delorean` object. :param datetime_str: The string to be interpreted into a `Delorean` object. :param timezone: Pass this parameter and the returned Delorean object will be normalized to this timezone. Any offsets passed as part of datetime_str will be ignored. :param isofirst: try to parse string as date in ISO format before everything else. :param dayfirst: Whether to interpret the first value in an ambiguous 3-integer date (ex. 01/05/09) as the day (True) or month (False). If yearfirst is set to True, this distinguishes between YDM and YMD. :param yearfirst: Whether to interpret the first value in an ambiguous 3-integer date (ex. 01/05/09) as the year. If True, the first number is taken to be the year, otherwise the last number is taken to be the year. .. testsetup:: from delorean import Delorean from delorean import parse .. doctest:: >>> parse('2015-01-01 00:01:02') Delorean(datetime=datetime.datetime(2015, 1, 1, 0, 1, 2), timezone='UTC') If a fixed offset is provided in the datetime_str, it will be parsed and the returned `Delorean` object will store a `pytz.FixedOffest` as it's timezone. .. doctest:: >>> parse('2015-01-01 00:01:02 -0800') Delorean(datetime=datetime.datetime(2015, 1, 1, 0, 1, 2), timezone=pytz.FixedOffset(-480)) If the timezone argument is supplied, the returned Delorean object will be in the timezone supplied. Any offsets in the datetime_str will be ignored. .. doctest:: >>> parse('2015-01-01 00:01:02 -0500', timezone='US/Pacific') Delorean(datetime=datetime.datetime(2015, 1, 1, 0, 1, 2), timezone='US/Pacific') If an unambiguous timezone is detected in the datetime string, a Delorean object with that datetime and timezone will be returned. .. doctest:: >>> parse('2015-01-01 00:01:02 PST') Delorean(datetime=datetime.datetime(2015, 1, 1, 0, 1, 2), timezone='America/Los_Angeles') However if the provided timezone is ambiguous, parse will ignore the timezone and return a `Delorean` object in UTC time. >>> parse('2015-01-01 00:01:02 EST') Delorean(datetime=datetime.datetime(2015, 1, 1, 0, 1, 2), timezone='UTC') """ # parse string to datetime object dt = None if isofirst: try: dt = isocapture(datetime_str) except Exception: pass if dt is None: dt = capture(datetime_str, dayfirst=dayfirst, yearfirst=yearfirst) if timezone: dt = dt.replace(tzinfo=None) do = Delorean(datetime=dt, timezone=timezone) elif dt.tzinfo is None: # assuming datetime object passed in is UTC do = Delorean(datetime=dt, timezone='UTC') elif isinstance(dt.tzinfo, tzoffset): utcoffset = dt.tzinfo.utcoffset(None) total_seconds = ( (utcoffset.microseconds + (utcoffset.seconds + utcoffset.days * 24 * 3600) * 10**6) / 10**6) tz = pytz.FixedOffset(total_seconds / 60) dt = dt.replace(tzinfo=None) do = Delorean(dt, timezone=tz) elif isinstance(dt.tzinfo, tzlocal): tz = get_localzone() dt = dt.replace(tzinfo=None) do = Delorean(dt, timezone=tz) else: dt = pytz.utc.normalize(dt) # making dt naive so we can pass it to Delorean dt = dt.replace(tzinfo=None) # if parse string has tzinfo we return a normalized UTC # delorean object that represents the time. do = Delorean(datetime=dt, timezone='UTC') return do
java
public Event withAttributes(java.util.Map<String, String> attributes) { setAttributes(attributes); return this; }
java
public List<Integer> locationsToReadForDecode(List<Integer> erasedLocations) throws TooManyErasedLocations { List<Integer> locationsToRead = new ArrayList<Integer>(stripeSize()); int limit = stripeSize() + paritySize(); // Loop through all possible locations in the stripe. for (int loc = limit - 1; loc >= 0; loc--) { // Is the location good. if (erasedLocations.indexOf(loc) == -1) { locationsToRead.add(loc); if (stripeSize() == locationsToRead.size()) { break; } } } // If we are are not able to fill up the locationsToRead list, // we did not find enough good locations. Throw TooManyErasedLocations. if (locationsToRead.size() != stripeSize()) { String locationsStr = ""; for (Integer erasedLocation : erasedLocations) { locationsStr += " " + erasedLocation; } throw new TooManyErasedLocations("Locations " + locationsStr); } return locationsToRead; }
java
@GET @Path(WEBUI_CONFIG) @ReturnType("alluxio.wire.MasterWebUIConfiguration") public Response getWebUIConfiguration() { return RestUtils.call(() -> { MasterWebUIConfiguration response = new MasterWebUIConfiguration(); response.setWhitelist(mFileSystemMaster.getWhiteList()); TreeSet<Triple<String, String, String>> sortedProperties = new TreeSet<>(); Set<String> alluxioConfExcludes = Sets.newHashSet(PropertyKey.MASTER_WHITELIST.toString()); for (ConfigProperty configProperty : mMetaMaster .getConfiguration(GetConfigurationPOptions.newBuilder().setRawValue(true).build())) { String confName = configProperty.getName(); if (!alluxioConfExcludes.contains(confName)) { sortedProperties.add(new ImmutableTriple<>(confName, ConfigurationUtils.valueAsString(configProperty.getValue()), configProperty.getSource())); } } response.setConfiguration(sortedProperties); return response; }, ServerConfiguration.global()); }
python
def _cnf(lexer, varname): """Return a DIMACS CNF.""" _expect_token(lexer, {KW_p}) _expect_token(lexer, {KW_cnf}) nvars = _expect_token(lexer, {IntegerToken}).value nclauses = _expect_token(lexer, {IntegerToken}).value return _cnf_formula(lexer, varname, nvars, nclauses)
python
def directory(self, query, **kwargs): """Search by users or channels on all server.""" if isinstance(query, dict): query = str(query).replace("'", '"') return self.__call_api_get('directory', query=query, kwargs=kwargs)
python
def render_sparkline(self, **kwargs): """Render a sparkline""" spark_options = dict( width=200, height=50, show_dots=False, show_legend=False, show_x_labels=False, show_y_labels=False, spacing=0, margin=5, min_scale=1, max_scale=2, explicit_size=True, no_data_text='', js=(), classes=(_ellipsis, 'pygal-sparkline') ) spark_options.update(kwargs) return self.render(**spark_options)
python
def list_changes(self): """ Return a list of modified records. This is only applicable for attached tables. Returns: A list of `(row_index, record)` tuples of modified records Raises: :class:`delphin.exceptions.ItsdbError`: when called on a detached table """ if not self.is_attached(): raise ItsdbError('changes are not tracked for detached tables.') return [(i, self[i]) for i, row in enumerate(self._records) if row is not None]
python
def move_folder(self, to_folder, *, update_parent_if_changed=True): """ Move this folder to another folder :param to_folder: the destination Folder/folder_id to move into :type to_folder: mailbox.Folder or str :param bool update_parent_if_changed: updates self.parent with the new parent Folder if changed :return: The new folder after copying :rtype: mailbox.Folder or None """ to_folder_id = to_folder.folder_id if isinstance(to_folder, Folder) else to_folder if self.root or not self.folder_id or not to_folder_id: return False url = self.build_url( self._endpoints.get('move_folder').format(id=self.folder_id)) response = self.con.post(url, data={self._cc('destinationId'): to_folder_id}) if not response: return False folder = response.json() parent_id = folder.get(self._cc('parentFolderId'), None) if parent_id and self.parent_id: if parent_id != self.parent_id: self.parent_id = parent_id self.parent = (self.get_parent_folder() if update_parent_if_changed else None) return True
java
public static String getWebViewLink(Drive drive, String fileId) { try { return getWebViewLink(getFile(drive, fileId)); } catch (IOException e) { logger.log(Level.SEVERE, String.format( "Failed to build Google Drive URL for file %s", fileId), e); } return null; }
python
def get_keys(self): ''' 获取配置并检查是否更改 ''' if not os.path.exists(PATH_CONFIG): with open(PATH_CONFIG, 'w') as F: F.write(CONFIG) else: config = ConfigParser.ConfigParser() with open(PATH_CONFIG, 'r') as cfgfile: config.readfp(cfgfile) options = config.options('key') for option in options: option = option.upper() if option in KEYS: KEYS[option] = config.get('key', option) return KEYS
java
public synchronized void setInetAddrPort(InetAddrPort address) { if (_address != null && _address.equals(address)) return; if (isStarted()) log.warn(this + " is started"); _address = address; }
python
def add_edges_from(self, ebunch, attr_dict=None, **attr): """Add all the edges in ebunch. Parameters ---------- ebunch : container of edges Each edge given in the container will be added to the graph. The edges can be: - 3-tuples (u,v,d) for an edge attribute dict d, or - 4-tuples (u,v,k,d) for an edge identified by key k Each edge must have a layers attribute (set of str). attr_dict : dictionary, optional (default= no attributes) Dictionary of edge attributes. Key/value pairs will update existing data associated with each edge. attr : keyword arguments, optional Edge data (or labels or objects) can be assigned using keyword arguments. See Also -------- add_edge : add a single edge Notes ----- Adding the same edge twice has no effect but any edge data will be updated when each duplicate edge is added. An edge can only be added if the source and target nodes are already present in the graph. This decision was taken to ensure that all edges are associated with at least one (meaningful) layer. Edge attributes specified in edges as a tuple (in ebunch) take precedence over attributes specified otherwise (in attr_dict or attr). Layers can only be added (via a 'layers' edge attribute), but not overwritten. Examples -------- >>> d = DiscourseDocumentGraph() >>> d.add_node(1, {'int'}) >>> d.add_node(2, {'int'}) >>> d.add_edges_from([(1, 2, {'layers': {'int'}, 'weight': 23})]) >>> d.add_edges_from([(1, 2, {'layers': {'int'}, 'weight': 42})]) >>> d.edges(data=True) # multiple edges between the same nodes [(1, 2, {'layers': {'int'}, 'weight': 23}), (1, 2, {'layers': {'int'}, 'weight': 42})] Associate data to edges We update the existing edge (key=0) and overwrite its 'weight' value. Note that we can't overwrite the 'layers' value, though. Instead, they are added to the set of existing layers >>> d.add_edges_from([(1, 2, 0, {'layers':{'number'}, 'weight':66})]) [(1, 2, {'layers': {'int', 'number'}, 'weight': 66}), (1, 2, {'layers': {'int'}, 'weight': 42})] """ # set up attribute dict if attr_dict is None: attr_dict = attr else: try: attr_dict.update(attr) except AttributeError as e: raise AttributeError("The attr_dict argument must be " "a dictionary: ".format(e)) # process ebunch for e in ebunch: ne = len(e) if ne == 4: u, v, key, dd = e elif ne == 3: u, v, dd = e key = None else: raise AttributeError( "Edge tuple {0} must be a 3-tuple (u,v,attribs) " "or 4-tuple (u,v,key,attribs).".format(e)) if not 'layers' in dd: dd['layers'] = {self.ns} layers = dd['layers'] assert isinstance(layers, set), \ "'layers' must be specified as a set of strings." assert all((isinstance(layer, str) for layer in layers)), \ "All elements of the 'layers' set must be strings." additional_layers = attr_dict.get('layers', {}) if additional_layers: assert isinstance(additional_layers, set), \ "'layers' must be specified as a set of strings." assert all((isinstance(layer, str) for layer in additional_layers)), \ "'layers' set must only contain strings." # union of layers specified in ebunch tuples, # attr_dict and **attr new_layers = layers.union(additional_layers) if u in self.adj: # edge with u as source already exists keydict = self.adj[u].get(v, {}) else: keydict = {} if key is None: # find a unique integer key # other methods might be better here? key = len(keydict) while key in keydict: key += 1 datadict = keydict.get(key, {}) # existing edge attribs existing_layers = datadict.get('layers', set()) datadict.update(attr_dict) datadict.update(dd) updated_attrs = {k: v for (k, v) in datadict.items() if k != 'layers'} all_layers = existing_layers.union(new_layers) # add_edge() checks if u and v exist, so we don't need to self.add_edge(u, v, layers=all_layers, key=key, attr_dict=updated_attrs)
python
def run_prepare(*data): """ Run seqcluster prepare to merge all samples in one file """ out_dir = os.path.join(dd.get_work_dir(data[0][0]), "seqcluster", "prepare") out_dir = os.path.abspath(safe_makedir(out_dir)) prepare_dir = os.path.join(out_dir, "prepare") tools = dd.get_expression_caller(data[0][0]) if len(tools) == 0: logger.info("You didn't specify any other expression caller tool." "You can add to the YAML file:" "expression_caller:[trna, seqcluster, mirdeep2]") fn = [] for sample in data: name = sample[0]["rgnames"]['sample'] fn.append("%s\t%s" % (sample[0]['collapse'], name)) args = namedtuple('args', 'debug print_debug minc minl maxl out') args = args(False, False, 2, 17, 40, out_dir) ma_out = op.join(out_dir, "seqs.ma") seq_out = op.join(out_dir, "seqs.fastq") min_shared = max(int(len(fn) / 10.0), 1) if not file_exists(ma_out): seq_l, sample_l = prepare._read_fastq_files(fn, args) with file_transaction(ma_out) as ma_tx: with open(ma_tx, 'w') as ma_handle: with open(seq_out, 'w') as seq_handle: logger.info("Prepare seqs.fastq with -minl 17 -maxl 40 -minc 2 --min_shared 0.1") prepare._create_matrix_uniq_seq(sample_l, seq_l, ma_handle, seq_handle, min_shared) for sample in data: sample[0]["seqcluster_prepare_ma"] = ma_out sample[0]["seqcluster_prepare_fastq"] = seq_out return data
python
def save_and_close_enable(self): """Handle the data change event to enable the save and close button.""" if self.btn_save_and_close: self.btn_save_and_close.setEnabled(True) self.btn_save_and_close.setAutoDefault(True) self.btn_save_and_close.setDefault(True)
java
public ComputeNodeUpdateUserHeaders withLastModified(DateTime lastModified) { if (lastModified == null) { this.lastModified = null; } else { this.lastModified = new DateTimeRfc1123(lastModified); } return this; }
java
public synchronized void getUpdatedData() { if (!isUpdateDataNeeded()) { return; } this._projectResult = new ProjectLrResults(); _workedBuilds = new ArrayList<Integer>(); RunList<? extends Run> projectBuilds = currentProject.getBuilds(); // updateLastBuild(); for (Run run : projectBuilds) { PerformanceJobReportAction performanceJobReportAction = run.getAction(PerformanceJobReportAction.class); if (performanceJobReportAction == null) { continue; } if (run.isBuilding()) { continue; } int runNumber = run.getNumber(); if (_workedBuilds.contains(runNumber)) { continue; } _workedBuilds.add(runNumber); LrJobResults jobLrResult = performanceJobReportAction.getLrResultBuildDataset(); // get all the ran scenario results from this run and insert them into the project for (Map.Entry<String, JobLrScenarioResult> runResult : jobLrResult.getLrScenarioResults().entrySet()) { // add the scenario if it's the first time it's ran in this build (allows scenarios to be also added // at diffrent time) if (!_projectResult.getScenarioResults().containsKey(runResult.getKey())) { _projectResult.addScenario(new LrProjectScenarioResults(runResult.getKey())); } // Join the SLA rule results LrProjectScenarioResults lrProjectScenarioResults = _projectResult.getScenarioResults().get(runResult.getKey()); if(lrProjectScenarioResults.getBuildCount() > MAX_DISPLAY_BUILDS) { continue; } lrProjectScenarioResults.incBuildCount(); JobLrScenarioResult scenarioRunResult = runResult.getValue(); for (GoalResult goalResult : scenarioRunResult.scenarioSlaResults) { scenarioGoalResult(runNumber, lrProjectScenarioResults, goalResult); } // Join sceanrio stats joinSceanrioConnectionsStats(runNumber, lrProjectScenarioResults, scenarioRunResult); joinVUserScenarioStats(runNumber, lrProjectScenarioResults, scenarioRunResult); joinTransactionScenarioStats(runNumber, lrProjectScenarioResults, scenarioRunResult); joinDurationStats(runNumber, lrProjectScenarioResults, scenarioRunResult); } } }
python
def _collapse_fastq(in_fn): """ collapse reads into unique sequences """ args = argparse.Namespace() args.fastq = in_fn args.minimum = 1 args.out = op.dirname(in_fn) return collapse_fastq(args)
java
private ProcessingItem createPi(Processor processor, int parallelism) { ProcessingItem pi = this.componentFactory.createPi(processor, parallelism); this.topology.addProcessingItem(pi, parallelism); return pi; }
java
public String getHostAndPort() { if (enablePlaintextPort) { if (plaintextPort < 0) { return HostAndPort.fromString(host).toString(); } else { return HostAndPort.fromParts(host, plaintextPort).toString(); } } return null; }
java
@Override protected String newAlgorithm() { if (getModel().getAlgorithm() == null) { getModel().setAlgorithm(AesAlgorithm.AES); } return getModel().getAlgorithm().getAlgorithm(); }
java
public void checkAuthentication() throws ConnectionException { IServices services = new Services(createMetaModel(), populateHeaders(new V1APIConnector(getApplicationURL() + "rest-1.v1/", username, password, ApiClientInternals.getProxyProvider(proxySettings)))); Oid loggedin; try { loggedin = services.getLoggedIn(); } catch (Exception ex) { throw new ConnectionException( "Unable to log in. Incorrect username or password.", ex); } if (loggedin.isNull()) { throw new ConnectionException( "Unable to retrieve logged in member."); } }
java
private static void processBiGram() { //移除二元模型出现频率为1的情况 Iterator<String> iter = BIGRAM.keySet().iterator(); while(iter.hasNext()){ String key = iter.next(); if(BIGRAM.get(key) < 2){ iter.remove(); } } try(BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream("src/main/resources/bigram.txt"),"utf-8"))){ for(Entry<String, Integer> item : BIGRAM.entrySet()){ writer.write(item.getKey()+" "+item.getValue()+"\n"); } }catch(Exception e){ LOGGER.info("保存bigram模型失败:", e); } }