language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
public void setMessage(final String message, final Serializable... args) { getOrCreateComponentModel().message = I18nUtilities.asMessage(message, args); }
python
def crack_egg(egg, subjects=None, lists=None): ''' Takes an egg and returns a subset of the subjects or lists Parameters ---------- egg : Egg data object Egg that you want to crack subjects : list List of subject idxs lists : list List of lists idxs Returns ---------- new_egg : Egg data object A sliced egg, good on a salad ''' from .egg import Egg if hasattr(egg, 'features'): all_have_features = egg.features is not None else: all_have_features=False opts = {} if subjects is None: subjects = egg.pres.index.levels[0].values.tolist() elif type(subjects) is not list: subjects = [subjects] if lists is None: lists = egg.pres.index.levels[1].values.tolist() elif type(lists) is not list: lists = [lists] idx = pd.IndexSlice pres = egg.pres.loc[idx[subjects,lists],egg.pres.columns] rec = egg.rec.loc[idx[subjects,lists],egg.rec.columns] pres = [pres.loc[sub,:].values.tolist() for sub in subjects] rec = [rec.loc[sub,:].values.tolist() for sub in subjects] if all_have_features: features = egg.features.loc[idx[subjects,lists],egg.features.columns] opts['features'] = [features.loc[sub,:].values.tolist() for sub in subjects] return Egg(pres=pres, rec=rec, **opts)
python
def _req(self, req): """Send command and wait for response. The command will be repeated 3 times at most in case data loss of serial port. Args: req (str): Command to send, please do not include new line in the end. Returns: [str]: The output lines """ logger.debug('DUT> %s', req) self._log and self.pause() times = 3 res = None while times: times = times - 1 try: self._sendline(req) self._expect(req) line = None res = [] while True: line = self._readline() logger.debug('Got line %s', line) if line == 'Done': break if line: res.append(line) break except: logger.exception('Failed to send command') self.close() self._init() self._log and self.resume() return res
java
@Override public String toStringValue() { GenderTypeEnumeration g = this.getGender(); return g != null ? g.getValue() : null; }
python
def update(self, reconfigure=False): """ update the environment """ try: self.phase = PHASE.UPDATE self.logger.info("Updating environment %s..." % self.namespace) self.install_sandboxes() self.instantiate_features() # We don't grab inputs, only on install # updates inputs are grabbed on demand # self.grab_inputs(reconfigure=reconfigure) if reconfigure: self.grab_inputs(reconfigure=True) else: self._copy_source_to_target() self._specialize(reconfigure=reconfigure) for feature in self.features.run_order: self.run_action(feature, 'sync') self.inject_environment_config() self._finalize() except Exception: self.logger.debug("", exc_info=sys.exc_info()) et, ei, tb = sys.exc_info() reraise(et, ei, tb)
java
protected void imageToOutput( double x , double y , Point2D_F64 pt ) { pt.x = x/scale - tranX/scale; pt.y = y/scale - tranY/scale; }
python
def get_stock_codes(self, cached=True, as_json=False): """ returns a dictionary with key as stock code and value as stock name. It also implements cache functionality and hits the server only if user insists or cache is empty :return: dict """ url = self.stocks_csv_url req = Request(url, None, self.headers) res_dict = {} if cached is not True or self.__CODECACHE__ is None: # raises HTTPError and URLError res = self.opener.open(req) if res is not None: # for py3 compat covert byte file like object to # string file like object res = byte_adaptor(res) for line in res.read().split('\n'): if line != '' and re.search(',', line): (code, name) = line.split(',')[0:2] res_dict[code] = name # else just skip the evaluation, line may not be a valid csv else: raise Exception('no response received') self.__CODECACHE__ = res_dict return self.render_response(self.__CODECACHE__, as_json)
python
def open(filename, frame='unspecified'): """Create a NormalCloud from data saved in a file. Parameters ---------- filename : :obj:`str` The file to load data from. frame : :obj:`str` The frame to apply to the created NormalCloud. Returns ------- :obj:`NormalCloud` A NormalCloud created from the data in the file. """ data = BagOfPoints.load_data(filename) return NormalCloud(data, frame)
python
def delete(gandi, resource, background, force): """Delete one or more IPs (after detaching them from VMs if necessary). resource can be an ip id or ip. """ resource = sorted(tuple(set(resource))) possible_resources = gandi.ip.resource_list() # check that each IP can be deleted for item in resource: if item not in possible_resources: gandi.echo('Sorry interface %s does not exist' % item) gandi.echo('Please use one of the following: %s' % possible_resources) return if not force: proceed = click.confirm('Are you sure you want to delete ip(s) %s' % ', '.join(resource)) if not proceed: return return gandi.ip.delete(resource, background, force)
java
protected <T extends OperationResponse> void completeOperation(OperationResult result, OperationResponse.Builder<?, T> builder, Throwable error, CompletableFuture<T> future) { if (result != null) { builder.withIndex(result.index()); builder.withEventIndex(result.eventIndex()); if (result.failed()) { error = result.error(); } } if (error == null) { if (result == null) { future.complete(builder.withStatus(RaftResponse.Status.ERROR) .withError(RaftError.Type.PROTOCOL_ERROR) .build()); } else { future.complete(builder.withStatus(RaftResponse.Status.OK) .withResult(result.result()) .build()); } } else if (error instanceof CompletionException && error.getCause() instanceof RaftException) { future.complete(builder.withStatus(RaftResponse.Status.ERROR) .withError(((RaftException) error.getCause()).getType(), error.getMessage()) .build()); } else if (error instanceof RaftException) { future.complete(builder.withStatus(RaftResponse.Status.ERROR) .withError(((RaftException) error).getType(), error.getMessage()) .build()); } else if (error instanceof PrimitiveException.ServiceException) { log.warn("An application error occurred: {}", error.getCause()); future.complete(builder.withStatus(RaftResponse.Status.ERROR) .withError(RaftError.Type.APPLICATION_ERROR) .build()); } else { log.warn("An unexpected error occurred: {}", error); future.complete(builder.withStatus(RaftResponse.Status.ERROR) .withError(RaftError.Type.PROTOCOL_ERROR, error.getMessage()) .build()); } }
java
private void reply(final String response, final boolean error, final String errorMessage, final String stackTrace, final String statusCode, final int statusCodeInt) { if (!sentReply) { //must update sentReply first to avoid duplicated msg. sentReply = true; // Close the connection. Make sure the close operation ends because // all I/O operations are asynchronous in Netty. if(channel!=null && channel.isOpen()) channel.close().awaitUninterruptibly(); final ResponseOnSingeRequest res = new ResponseOnSingeRequest( response, error, errorMessage, stackTrace, statusCode, statusCodeInt, PcDateUtils.getNowDateTimeStrStandard(), null); if (!getContext().system().deadLetters().equals(sender)) { sender.tell(res, getSelf()); } if (getContext() != null) { getContext().stop(getSelf()); } } }
java
public ServiceCall<HTMLReturn> convertToHtml(ConvertToHtmlOptions convertToHtmlOptions) { Validator.notNull(convertToHtmlOptions, "convertToHtmlOptions cannot be null"); String[] pathSegments = { "v1/html_conversion" }; RequestBuilder builder = RequestBuilder.post(RequestBuilder.constructHttpUrl(getEndPoint(), pathSegments)); builder.query("version", versionDate); Map<String, String> sdkHeaders = SdkCommon.getSdkHeaders("compare-comply", "v1", "convertToHtml"); for (Entry<String, String> header : sdkHeaders.entrySet()) { builder.header(header.getKey(), header.getValue()); } builder.header("Accept", "application/json"); if (convertToHtmlOptions.model() != null) { builder.query("model", convertToHtmlOptions.model()); } MultipartBody.Builder multipartBuilder = new MultipartBody.Builder(); multipartBuilder.setType(MultipartBody.FORM); RequestBody fileBody = RequestUtils.inputStreamBody(convertToHtmlOptions.file(), convertToHtmlOptions .fileContentType()); multipartBuilder.addFormDataPart("file", convertToHtmlOptions.filename(), fileBody); builder.body(multipartBuilder.build()); return createServiceCall(builder.build(), ResponseConverterUtils.getObject(HTMLReturn.class)); }
python
def next_previous(self, options=None, **kwds): """ Endpoint: /photo/<id>/nextprevious[/<options>].json Returns a dict containing the next and previous photo lists (there may be more than one next/previous photo returned). """ return self._client.photo.next_previous(self, options, **kwds)
java
public static <K1, V1, K2, V2> void setMapperClass(Configuration conf, Class<? extends BaseMapper<?, ?, ?, ?>> internalMapperClass) { if (MultithreadedMapper.class.isAssignableFrom(internalMapperClass)) { throw new IllegalArgumentException("Can't have recursive " + "MultithreadedMapper instances."); } conf.setClass(ConfigConstants.CONF_MULTITHREADEDMAPPER_CLASS, internalMapperClass, Mapper.class); }
java
public static SofaResponse buildSofaErrorResponse(String errorMsg) { SofaResponse sofaResponse = new SofaResponse(); sofaResponse.setErrorMsg(errorMsg); return sofaResponse; }
python
def delete_repo(name, config_path=_DEFAULT_CONFIG_PATH, force=False): ''' Remove a local package repository. :param str name: The name of the local repository. :param str config_path: The path to the configuration file for the aptly instance. :param bool force: Whether to remove the repository even if it is used as the source of an existing snapshot. :return: A boolean representing whether all changes succeeded. :rtype: bool CLI Example: .. code-block:: bash salt '*' aptly.delete_repo name="test-repo" ''' _validate_config(config_path) force = six.text_type(bool(force)).lower() current_repo = __salt__['aptly.get_repo'](name=name, config_path=config_path) if not current_repo: log.debug('Repository already absent: %s', name) return True cmd = ['repo', 'drop', '-config={}'.format(config_path), '-force={}'.format(force), name] _cmd_run(cmd) repo = __salt__['aptly.get_repo'](name=name, config_path=config_path) if repo: log.error('Unable to remove repo: %s', name) return False log.debug('Removed repo: %s', name) return True
python
def _parse_length(self, value, font_relative, callback, *args): '''Parse/calc length, converting to pixels, calls callback(length, *args) when the length is first computed or changes''' if value.endswith('%'): frac = float(value[:-1])/100 if font_relative: attrs = self._get_current_attributes() font_size = attrs.font.get_size() / pango.SCALE callback(frac*display_resolution*font_size, *args) else: ## CSS says "Percentage values: refer to width of the closest ## block-level ancestor" ## This is difficult/impossible to implement, so we use ## textview width instead; a reasonable approximation.. alloc = self.textview.get_allocation() self.__parse_length_frac_size_allocate(self.textview, alloc, frac, callback, args) self.textview.connect("size-allocate", self.__parse_length_frac_size_allocate, frac, callback, args) elif value.endswith('pt'): # points callback(float(value[:-2])*display_resolution, *args) elif value.endswith('em'): # ems, the height of the element's font attrs = self._get_current_attributes() font_size = attrs.font.get_size() / pango.SCALE callback(float(value[:-2])*display_resolution*font_size, *args) elif value.endswith('ex'): # x-height, ~ the height of the letter 'x' ## FIXME: figure out how to calculate this correctly ## for now 'em' size is used as approximation attrs = self._get_current_attributes() font_size = attrs.font.get_size() / pango.SCALE callback(float(value[:-2])*display_resolution*font_size, *args) elif value.endswith('px'): # pixels callback(int(value[:-2]), *args) else: warnings.warn("Unable to parse length value '%s'" % value)
java
public void importFolder(String importFolderName, String importPath, CmsObject cms) throws CmsException { try { m_importedResources = new ArrayList<CmsResource>(); m_importFolderName = importFolderName; m_importPath = importPath; m_cms = cms; // open the import resource getImportResource(); // first lock the destination path m_cms.lockResource(m_importPath); // import the resources if (m_zipStreamIn == null) { importResources(m_importResource, m_importPath); } else { importZipResource(m_zipStreamIn, m_importPath, false); } // all is done, unlock the resources m_cms.unlockResource(m_importPath); } catch (Exception e) { throw new CmsVfsException( Messages.get().container(Messages.ERR_IMPORT_FOLDER_2, importFolderName, importPath), e); } }
java
@SuppressWarnings("rawtypes") public static void configure( Configuration conf, String qualifiedOutputTableId, String outputTableSchemaJson, String outputGcsPath, BigQueryFileFormat outputFileFormat, Class<? extends FileOutputFormat> outputFormatClass) throws IOException { Preconditions.checkArgument( !Strings.isNullOrEmpty(outputTableSchemaJson), "outputTableSchemaJson must not be null or empty."); TableReference outputTable = BigQueryStrings.parseTableReference(qualifiedOutputTableId); configure( conf, outputTable.getProjectId(), outputTable.getDatasetId(), outputTable.getTableId(), Optional.of(outputTableSchemaJson), outputGcsPath, outputFileFormat, outputFormatClass); }
java
public PointerDensityHierarchyRepresentationResult run(Database db, Relation<O> relation) { final DistanceQuery<O> distQ = db.getDistanceQuery(relation, getDistanceFunction()); final KNNQuery<O> knnQ = db.getKNNQuery(distQ, minPts); // We need array addressing later. final ArrayDBIDs ids = DBIDUtil.ensureArray(relation.getDBIDs()); // Compute the core distances // minPts + 1: ignore query point. final WritableDoubleDataStore coredists = computeCoreDists(ids, knnQ, minPts); WritableDBIDDataStore pi = DataStoreUtil.makeDBIDStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_STATIC); WritableDoubleDataStore lambda = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_STATIC, Double.POSITIVE_INFINITY); // Temporary storage for m. WritableDoubleDataStore m = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP); FiniteProgress progress = LOG.isVerbose() ? new FiniteProgress("Running HDBSCAN*-SLINK", ids.size(), LOG) : null; // has to be an array for monotonicity reasons! ModifiableDBIDs processedIDs = DBIDUtil.newArray(ids.size()); for(DBIDIter id = ids.iter(); id.valid(); id.advance()) { // Steps 1,3,4 are exactly as in SLINK step1(id, pi, lambda); // Step 2 is modified to use a different distance step2(id, processedIDs, distQ, coredists, m); step3(id, pi, lambda, processedIDs, m); step4(id, pi, lambda, processedIDs); processedIDs.add(id); LOG.incrementProcessed(progress); } LOG.ensureCompleted(progress); return new PointerDensityHierarchyRepresentationResult(ids, pi, lambda, distQ.getDistanceFunction().isSquared(), coredists); }
java
public boolean isAbsolute() { final int start = hasWindowsDrive(uri.getPath(), true) ? 3 : 0; return uri.getPath().startsWith(SEPARATOR, start); }
python
def get_name(self, obj=None, withext=True): """Return the filename :param obj: the fileinfo with information. If None, this will use the stored object of JB_File :type obj: :class:`FileInfo` :param withext: If True, return with the fileextension. :type withext: bool :returns: the filename, default is with fileextension :rtype: str :raises: None """ if obj is None: obj = self._obj chunks = [] for e in self._elements: c = e.get_chunk(obj) if c is not None: chunks.append(c) name = '_'.join(chunks) if withext: name = os.extsep.join([name, self.get_ext(obj)]) return name
java
public java.lang.String getPersistenceState() { java.lang.Object ref = persistenceState_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { persistenceState_ = s; } return s; } }
java
public static void generateFieldSerialize(BindTypeContext context, PersistType persistType, BindProperty property, Modifier... modifiers) { Converter<String, String> format = CaseFormat.LOWER_CAMEL.converterTo(CaseFormat.UPPER_CAMEL); String methodName = "serialize" + format.convert(property.getName()); MethodSpec.Builder methodBuilder = MethodSpec.methodBuilder(methodName).addJavadoc("for attribute $L serialization\n", property.getName()).addParameter(ParameterSpec.builder(typeName(property.getElement()), "value").build()) .addModifiers(modifiers); switch (persistType) { case STRING: methodBuilder.returns(className(String.class)); break; case BYTE: methodBuilder.returns(TypeUtility.arrayTypeName(Byte.TYPE)); break; } // if property type is byte[], return directly the value if (ArrayTypeName.of(Byte.TYPE).equals(property.getPropertyType().getTypeName()) && persistType == PersistType.BYTE) { methodBuilder.addStatement("return value"); } else { methodBuilder.beginControlFlow("if (value==null)"); methodBuilder.addStatement("return null"); methodBuilder.endControlFlow(); methodBuilder.addStatement("$T context=$T.jsonBind()", KriptonJsonContext.class, KriptonBinder.class); methodBuilder.beginControlFlow("try ($T stream=new $T(); $T wrapper=context.createSerializer(stream))", KriptonByteArrayOutputStream.class, KriptonByteArrayOutputStream.class, JacksonWrapperSerializer.class); methodBuilder.addStatement("$T jacksonSerializer=wrapper.jacksonGenerator", JsonGenerator.class); if (!property.isBindedObject()) { methodBuilder.addStatement("jacksonSerializer.writeStartObject()"); } methodBuilder.addStatement("int fieldCount=0"); BindTransform bindTransform = BindTransformer.lookup(property); String serializerName = "jacksonSerializer"; bindTransform.generateSerializeOnJackson(context, methodBuilder, serializerName, null, "value", property); if (!property.isBindedObject()) { methodBuilder.addStatement("jacksonSerializer.writeEndObject()"); } methodBuilder.addStatement("jacksonSerializer.flush()"); switch (persistType) { case STRING: methodBuilder.addStatement("return stream.toString()"); break; case BYTE: methodBuilder.addStatement("return stream.toByteArray()"); break; } methodBuilder.nextControlFlow("catch($T e)", Exception.class); methodBuilder.addStatement("throw(new $T(e.getMessage()))", KriptonRuntimeException.class); methodBuilder.endControlFlow(); } context.builder.addMethod(methodBuilder.build()); }
python
def order_by(self, *args): """ Applies query ordering. Args: **args: Order by fields names. Defaults to ascending, prepend with hypen (-) for desecending ordering. Returns: Self. Queryset object. Examples: >>> Person.objects.order_by('-name', 'join_date') """ clone = copy.deepcopy(self) clone.adapter.ordered = True if args: clone.adapter.order_by(*args) return clone
python
def do_POST(self): """Upload a file and execute a command.""" logging.debug("New POST request.") query = parse_qs(urlparse(self.path).query) sample = query['sample'][0] async = bool(int(query.get('async', [False])[0])) path = self.store_file(mkdtemp(), sample) command = query['command'][0].format(sample=path).split(' ') output = run_command(command, asynchronous=async) self.respond(output)
python
def _tle_to_keplerian_mean(cls, coord, center): """Conversion from the TLE standard format to the Mean Keplerian see :py:class:`Tle` for more information. """ i, Ω, e, ω, M, n = coord a = (center.µ / n ** 2) ** (1 / 3) return np.array([a, e, i, Ω, ω, M], dtype=float)
java
protected String getFilename(HttpServletRequest request, HttpServletResponse response) throws IOException { final String filename = request.getParameter("filename"); if (filename == null || filename.length() == 0) { throw new IOException("Invalid filename"); } return filename; }
python
def _set_used_as_input_variables_by_entity(self) -> Dict[str, List[str]]: '''Identify and set the good input variables for the different entities''' if self.used_as_input_variables_by_entity is not None: return tax_benefit_system = self.tax_benefit_system assert set(self.used_as_input_variables) <= set(tax_benefit_system.variables.keys()), \ "Some variables used as input variables are not part of the tax benefit system:\n {}".format( set(self.used_as_input_variables).difference(set(tax_benefit_system.variables.keys())) ) self.used_as_input_variables_by_entity = dict() for entity in tax_benefit_system.entities: self.used_as_input_variables_by_entity[entity.key] = [ variable for variable in self.used_as_input_variables if tax_benefit_system.get_variable(variable).entity == entity ] return self.used_as_input_variables_by_entity
python
def setCurrentIndex(self, index): """ Sets the current index on self and on the tab bar to keep the two insync. :param index | <int> """ super(XViewPanel, self).setCurrentIndex(index) self.tabBar().setCurrentIndex(index)
java
@Override public int indexOf(IBond bond) { for (int i = 0; i < bondCount; i++) { if (bonds[i].equals(bond)) return i; } return -1; }
python
def step_use_log_record_configuration(context): """ Define log record configuration parameters. .. code-block: gherkin Given I use the log record configuration: | property | value | | format | | | datefmt | | """ assert context.table, "REQUIRE: context.table" context.table.require_columns(["property", "value"]) for row in context.table.rows: property_name = row["property"] value = row["value"] if property_name == "format": context.log_record_format = value elif property_name == "datefmt": context.log_record_datefmt = value else: raise KeyError("Unknown property=%s" % property_name)
java
private Map < String, Object > getProps( XmlSchemaComplexType xsdComplexType, RootCompositeType compositeTypes) { String complexTypeName = getComplexTypeName(xsdComplexType); visit(xsdComplexType, compositeTypes, complexTypeName); Map < String, Object > props = new LinkedHashMap < String, Object >(); props.put(COMPLEX_TYPE_PROP_NAME, true); props.put(COMPLEX_TYPE_NAME_PROP_NAME, complexTypeName); return props; }
python
def absstart(self): """Returns the absolute start of the element by including docstrings outside of the element definition if applicable.""" if hasattr(self, "docstart") and self.docstart > 0: return self.docstart else: return self.start
python
def cycle_canceling(self, display): ''' API: cycle_canceling(self, display) Description: Solves minimum cost feasible flow problem using cycle canceling algorithm. Returns True when an optimal solution is found, returns False otherwise. 'flow' attribute values of arcs should be considered as junk when returned False. Input: display: Display method. Pre: (1) Arcs should have 'capacity' and 'cost' attribute. (2) Nodes should have 'demand' attribute, this value should be positive if the node is a supply node, negative if it is demand node and 0 if it is transhipment node. (3) graph should not have node 's' and 't'. Post: Changes 'flow' attributes of arcs. Return: Returns True when an optimal solution is found, returns False otherwise. ''' # find a feasible solution to flow problem if not self.find_feasible_flow(): return False # create residual graph residual_g = self.create_residual_graph() # identify a negative cycle in residual graph ncycle = residual_g.get_negative_cycle() # loop while residual graph has a negative cycle while ncycle is not None: # find capacity of cycle cap = residual_g.find_cycle_capacity(ncycle) # augment capacity amount along the cycle self.augment_cycle(cap, ncycle) # create residual graph residual_g = self.create_residual_graph() # identify next negative cycle ncycle = residual_g.get_negative_cycle() return True
python
def readConfig(self, configuration): """Read configuration from dict. Read configuration from a JSON configuration file. :param configuration: configuration to load. :type configuration: dict. """ self.__logger.debug("Reading configuration") self.city = configuration["name"] self.__logger.info("City name: " + self.city) if "intervals" in configuration: self.__intervals = configuration["intervals"] self.__logger.debug("Intervals: " + str(self.__intervals)) if "last_date" in configuration: self.__lastDay = configuration["last_date"] self.__logger.debug("Last day: " + self.__lastDay) if "locations" in configuration: self.__locations = configuration["locations"] self.__logger.debug("Locations: " + str(self.__locations)) self.__addLocationsToURL(self.__locations) if "excludedUsers" in configuration: self.__excludedUsers= set() self.__excludedLocations = set() excluded = configuration["excludedUsers"] for e in excluded: self.__excludedUsers.add(e) self.__logger.debug("Excluded users " + str(self.__excludedUsers)) if "excludedLocations" in configuration: excluded = configuration["excludedLocations"] for e in excluded: self.__excludedLocations.add(e) self.__logger.debug("Excluded locations " + str(self.__excludedLocations))
java
public DecimalFormatProperties getDecimalFormat(int i) throws ArrayIndexOutOfBoundsException { if (null == m_DecimalFormatDeclarations) throw new ArrayIndexOutOfBoundsException(); return (DecimalFormatProperties) m_DecimalFormatDeclarations.elementAt(i); }
python
def get_items_by_ids(self, item_ids, item_type=None): """Given a list of item ids, return all the Item objects Args: item_ids (obj): List of item IDs to query item_type (str): (optional) Item type to filter results with Returns: List of `Item` objects for given item IDs and given item type """ urls = [urljoin(self.item_url, F"{i}.json") for i in item_ids] result = self._run_async(urls=urls) items = [Item(r) for r in result if r] if item_type: return [item for item in items if item.item_type == item_type] else: return items
java
public static List<Parameter> collectConstructorParameters(Class<?> cls, Components components, javax.ws.rs.Consumes classConsumes) { if (cls.isLocalClass() || (cls.isMemberClass() && !Modifier.isStatic(cls.getModifiers()))) { return Collections.emptyList(); } List<Parameter> selected = Collections.emptyList(); int maxParamsCount = 0; for (Constructor<?> constructor : cls.getDeclaredConstructors()) { if (!ReflectionUtils.isConstructorCompatible(constructor) && !ReflectionUtils.isInject(Arrays.asList(constructor.getDeclaredAnnotations()))) { continue; } final Type[] genericParameterTypes = constructor.getGenericParameterTypes(); final Annotation[][] annotations = constructor.getParameterAnnotations(); int paramsCount = 0; final List<Parameter> parameters = new ArrayList<Parameter>(); for (int i = 0; i < genericParameterTypes.length; i++) { final List<Annotation> tmpAnnotations = Arrays.asList(annotations[i]); if (isContext(tmpAnnotations)) { paramsCount++; } else { final Type genericParameterType = genericParameterTypes[i]; final List<Parameter> tmpParameters = collectParameters(genericParameterType, tmpAnnotations, components, classConsumes); if (tmpParameters.size() >= 1) { for (Parameter tmpParameter : tmpParameters) { if (ParameterProcessor.applyAnnotations( tmpParameter, genericParameterType, tmpAnnotations, components, classConsumes == null ? new String[0] : classConsumes.value(), null) != null) { parameters.add(tmpParameter); } } paramsCount++; } } } if (paramsCount >= maxParamsCount) { maxParamsCount = paramsCount; selected = parameters; } } return selected; }
java
private int getInt() throws IOException { st.nextToken(); if (st.ttype == StreamTokenizer.TT_WORD) return Double.valueOf(st.sval).intValue(); else if (st.ttype == StreamTokenizer.TT_EOF) throw new EOFException("End-of-File encountered during parsing"); else throw new IOException("Unknown token found during parsing"); }
python
def _propagated_record(self, rdtype, name, content, nameservers=None): """ If the publicly propagation check should be done, waits until the domain nameservers responses with the propagated record type, name & content and returns a boolean, if the publicly propagation was successful or not. """ latency = self._get_provider_option('latency') propagated = self._get_provider_option('propagated') if propagated == 'yes': retry, max_retry = 0, 20 while retry < max_retry: for rdata in Provider._dns_lookup(name, rdtype, nameservers): if content == rdata.to_text(): LOGGER.info('Hetzner => Record %s has %s %s', name, rdtype, content) return True retry += 1 retry_log = (', retry ({}/{}) in {}s...'.format((retry + 1), max_retry, latency) if retry < max_retry else '') LOGGER.info('Hetzner => Record is not propagated%s', retry_log) time.sleep(latency) return False
python
def drain_OD(q_plant, T, depth_end, SDR): """Return the nominal diameter of the entrance tank drain pipe. Depth at the end of the flocculator is used for headloss and length calculation inputs in the diam_pipe calculation. Parameters ---------- q_plant: float Plant flow rate T: float Design temperature depth_end: float The depth of water at the end of the flocculator SDR: float Standard dimension ratio Returns ------- float ? Examples -------- >>> from aguaclara.play import* ?? """ nu = pc.viscosity_kinematic(T) K_minor = con.PIPE_ENTRANCE_K_MINOR + con.PIPE_EXIT_K_MINOR + con.EL90_K_MINOR drain_ID = pc.diam_pipe(q_plant, depth_end, depth_end, nu, mat.PVC_PIPE_ROUGH, K_minor) drain_ND = pipe.SDR_available_ND(drain_ID, SDR) return pipe.OD(drain_ND).magnitude
java
@VisibleForTesting List<File> getProcessConfigFiles() { // TODO : should use a FileVisitor (Once we update to Java 7) File[] files; File configurationDirOrFile = configuration.getProcessConfigDirOrFile(); if (configurationDirOrFile == null) { throw new IllegalStateException("Configuration should specify configuration directory or file, with -j of -f option"); } if (configurationDirOrFile.isFile()) { files = new File[1]; files[0] = configurationDirOrFile; } else { files = firstNonNull(configurationDirOrFile.listFiles(), new File[0]); } List<File> result = new ArrayList<>(); for (File file : files) { if (this.isProcessConfigFile(file)) { result.add(file); } } return result; }
java
public String resolveAliasType(String alias) { String type = getAliasedType(alias); return (type == null ? alias: type); }
java
public void shutdown() { getLogger().info("removing receiverJob from the Scheduler."); if(this.repository instanceof LoggerRepositoryEx) { Scheduler scheduler = ((LoggerRepositoryEx) repository).getScheduler(); scheduler.delete(customReceiverJob); } lastID = -1; }
python
def _create_server(host, port): """ Helper function. Creates a listening socket on the designated host and port. Modeled on the socket.create_connection() function. """ exc = socket.error("getaddrinfo returns an empty list") for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res sock = None try: # Create the listening socket sock = socket.socket(af, socktype, proto) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind(sa) sock.listen(1024) return sock except socket.error as exc: # Clean up after ourselves if sock is not None: sock.close() # Couldn't create a listening socket raise exc
java
public double getAverageOccupation() { calculateOccupied(); double result; if (occupiedHypercubes() == 0) { result = 0.0; } else { double sum = 0.0; for (int value : occupied) { sum += hypercubes[value]; } result = sum / occupiedHypercubes(); } return result; }
python
def execute(self): """ Given the command-line arguments, this figures out which subcommand is being run, creates a parser appropriate to that command, and runs it. """ try: subcommand = self.argv[1] except IndexError: subcommand = 'help' # Display help if no arguments were given. # Preprocess options to extract --settings and --pythonpath. # These options could affect the commands that are available, so they # must be processed early. parser = CommandParser(None, usage="%(prog)s subcommand [options] [args]", add_help=False) parser.add_argument('--settings') parser.add_argument('--pythonpath') parser.add_argument('args', nargs='*') # catch-all try: options, args = parser.parse_known_args(self.argv[2:]) handle_default_options(options) except CommandError: pass # Ignore any option errors at this point. no_settings_commands = [ 'help', 'version', '--help', '--version', '-h', 'compilemessages', 'makemessages', 'startapp', 'startproject', ] try: settings.INSTALLED_APPS except ImproperlyConfigured as exc: self.settings_exception = exc # A handful of built-in management commands work without settings. # Load the default settings -- where INSTALLED_APPS is empty. if subcommand in no_settings_commands: settings.configure() if settings.configured: django.setup() self.autocomplete() if subcommand == 'help': if '--commands' in args: sys.stdout.write(self.main_help_text(commands_only=True) + '\n') elif len(options.args) < 1: sys.stdout.write(self.main_help_text() + '\n') else: self.fetch_command(options.args[0]).print_help(self.prog_name, options.args[0]) # Special cases for 'jira.py --version' and 'jira.py --help' to work. elif subcommand == 'version' or self.argv[1:] == ['--version']: sys.stdout.write(django.get_version() + '\n') elif self.argv[1:] in (['--help'], ['-h']): sys.stdout.write(self.main_help_text() + '\n') else: self.fetch_command(subcommand).run_from_argv(self.argv)
java
@Override @Path("/{ownerType}/{ownerId}") @ApiOperation(value="Update attributes for an ownerType and ownerId", response=StatusMessage.class) @ApiImplicitParams({ @ApiImplicitParam(name="Attributes", paramType="body", required=true, dataType="java.lang.Object")}) public JSONObject put(String path, JSONObject content, Map<String,String> headers) throws ServiceException, JSONException { String ownerType = getSegment(path, 1); if (ownerType == null) throw new ServiceException(ServiceException.BAD_REQUEST, "Missing pathSegment: ownerType"); String ownerId = getSegment(path, 2); if (ownerId == null) throw new ServiceException(ServiceException.BAD_REQUEST, "Missing pathSegment: ownerId"); try { Map<String,String> attrs = JsonUtil.getMap(content); ServiceLocator.getWorkflowServices().updateAttributes(ownerType, Long.parseLong(ownerId), attrs); return null; } catch (NumberFormatException ex) { throw new ServiceException(ServiceException.BAD_REQUEST, "Invalid ownerId: " + ownerId); } catch (JSONException ex) { throw new ServiceException(ServiceException.INTERNAL_ERROR, ex.getMessage(), ex); } }
python
def simplesurface(idf, bsd, deletebsd=True, setto000=False): """convert a bsd (buildingsurface:detailed) into a simple surface""" funcs = (wallexterior, walladiabatic, wallunderground, wallinterzone, roof, ceilingadiabatic, ceilinginterzone, floorgroundcontact, flooradiabatic, floorinterzone,) for func in funcs: surface = func(idf, bsd, deletebsd=deletebsd, setto000=setto000) if surface: return surface return None
java
@Override public void start(BundleContext context) { initialContextFactories = initServiceTracker(context, InitialContextFactory.class, ServiceTrackerCustomizers.ICF_CACHE); objectFactories = initServiceTracker(context, ObjectFactory.class, ServiceTrackerCustomizers.URL_FACTORY_CACHE); icfBuilders = initServiceTracker(context, InitialContextFactoryBuilder.class, ServiceTrackerCustomizers.LAZY); urlObjectFactoryFinders = initServiceTracker(context, URLObjectFactoryFinder.class, ServiceTrackerCustomizers.LAZY); environmentAugmentors = initServiceTracker(context, EnvironmentAugmentation.class, null); environmentUnaugmentors = initServiceTracker(context, EnvironmentUnaugmentation.class, null); try { OSGiInitialContextFactoryBuilder builder = new OSGiInitialContextFactoryBuilder(); setField(InitialContextFactoryBuilder.class, builder, true); icfBuilder = builder; } catch (IllegalStateException e) { // Log the problem at info level, but only log the exception at debug level, as in many cases this is not a real issue and people // don't want to see stack traces at info level when everything it working as expected. LOGGER.info(Utils.MESSAGES.getMessage("unable.to.set.static.ICFB.already.exists", getClassName(InitialContextFactoryBuilder.class))); LOGGER.debug(Utils.MESSAGES.getMessage("unable.to.set.static.ICFB.already.exists", getClassName(InitialContextFactoryBuilder.class)), e); } try { OSGiObjectFactoryBuilder builder = new OSGiObjectFactoryBuilder(context); setField(ObjectFactoryBuilder.class, builder, true); ofBuilder = builder; } catch (IllegalStateException e) { // Log the problem at info level, but only log the exception at debug level, as in many cases this is not a real issue and people // don't want to see stack traces at info level when everything it working as expected. LOGGER.info(Utils.MESSAGES.getMessage("unable.to.set.static.OFB.already.exists", getClassName(ObjectFactoryBuilder.class))); LOGGER.debug(Utils.MESSAGES.getMessage("unable.to.set.static.OFB.already.exists", getClassName(ObjectFactoryBuilder.class)), e); } context.registerService(JNDIProviderAdmin.class.getName(), new ProviderAdminServiceFactory(context), null); context.registerService(InitialContextFactoryBuilder.class.getName(), new JREInitialContextFactoryBuilder(), null); context.registerService(JNDIContextManager.class.getName(), new ContextManagerServiceFactory(), null); context.registerService(AugmenterInvoker.class.getName(), AugmenterInvokerImpl.getInstance(), null); //Start the bundletracker that clears out the cache. (only interested in stopping events) // bt = new BundleTracker(context, Bundle.STOPPING, new ServiceTrackerCustomizers.CacheBundleTrackerCustomizer()); // bt.open(); }
python
def merge(cls, *others): """ Merge the `others` schema into this instance. The values will all be read from the provider of the original object. """ for other in others: for k, v in other: setattr(cls, k, BoundValue(cls, k, v.value))
python
def disk_xml(identifier, pool_xml, base_volume_xml, cow): """Clones volume_xml updating the required fields. * name * target path * backingStore """ pool = etree.fromstring(pool_xml) base_volume = etree.fromstring(base_volume_xml) pool_path = pool.find('.//path').text base_path = base_volume.find('.//target/path').text target_path = os.path.join(pool_path, '%s.qcow2' % identifier) volume_xml = VOLUME_DEFAULT_CONFIG.format(identifier, target_path) volume = etree.fromstring(volume_xml) base_volume_capacity = base_volume.find(".//capacity") volume.append(base_volume_capacity) if cow: backing_xml = BACKING_STORE_DEFAULT_CONFIG.format(base_path) backing_store = etree.fromstring(backing_xml) volume.append(backing_store) return etree.tostring(volume).decode('utf-8')
java
public ConstraintConnectiveDescr parse( final String text ) { ConstraintConnectiveDescr constraint = null; try { DRLLexer lexer = DRLFactory.getDRLLexer(new ANTLRStringStream(text), languageLevel); CommonTokenStream input = new CommonTokenStream( lexer ); RecognizerSharedState state = new RecognizerSharedState(); helper = new ParserHelper( input, state, languageLevel ); DRLExpressions parser = DRLFactory.getDRLExpressions(input, state, helper, languageLevel); parser.setBuildDescr( true ); parser.setLeftMostExpr( null ); // setting initial value just in case BaseDescr expr = parser.conditionalOrExpression(); if ( expr != null && !parser.hasErrors() ) { constraint = ConstraintConnectiveDescr.newAnd(); constraint.addOrMerge( expr ); } } catch ( RecognitionException e ) { helper.reportError( e ); } return constraint; }
python
def instances(self, skip_exist_test=False): """ Ask the collection to return a list of instances. If skip_exist_test is set to True, the instances returned by the collection won't have their primary key checked for existence. """ self.reset_result_type() self._instances = True self._instances_skip_exist_test = skip_exist_test return self
java
public static void setDefaultExecutorService( ExecutorService defaultExecutorService) { // If the timer executor service is set to the default // executor service, adjust it to the new value as well. if (timerExecutorService == Components.defaultExecutorService) { timerExecutorService = defaultExecutorService; } Components.defaultExecutorService = defaultExecutorService; }
java
public void resize(int maxCacheSize) { setMaxCacheSize(maxCacheSize); for (FeatureCache cache : tableCache.values()) { cache.resize(maxCacheSize); } }
python
def closest(self, tag): """Match closest ancestor.""" return CSSMatch(self.selectors, tag, self.namespaces, self.flags).closest()
java
static AccountingDate ofEpochDay(AccountingChronology chronology, long epochDay) { EPOCH_DAY.range().checkValidValue(epochDay, EPOCH_DAY); // validate outer bounds // Use Accounting 1 to help with 0-counts. Leap years can occur at any time. long accountingEpochDay = epochDay + chronology.getDays0001ToIso1970(); int longCycle = (int) Math.floorDiv(accountingEpochDay, DAYS_PER_LONG_CYCLE); int daysInLongCycle = (int) Math.floorMod(accountingEpochDay, DAYS_PER_LONG_CYCLE); // Value is an estimate, as the floating leap-years make this difficult. int year = (daysInLongCycle - (daysInLongCycle / 365 + daysInLongCycle / (4 * 365 + 1) - daysInLongCycle / (100 * 365 + 24)) / 7) / (DAYS_IN_WEEK * WEEKS_IN_YEAR); int yearStart = (int) (WEEKS_IN_YEAR * (year - 1) + chronology.previousLeapYears(year)) * DAYS_IN_WEEK; // Despite the year being an estimate, the effect should still be within a few days. if (yearStart > daysInLongCycle) { year--; yearStart -= (WEEKS_IN_YEAR + (chronology.isLeapYear(year) ? 1 : 0)) * DAYS_IN_WEEK; } else if (daysInLongCycle - yearStart >= (WEEKS_IN_YEAR + (chronology.isLeapYear(year) ? 1 : 0)) * DAYS_IN_WEEK) { yearStart += (WEEKS_IN_YEAR + (chronology.isLeapYear(year) ? 1 : 0)) * DAYS_IN_WEEK; year++; } return ofYearDay(chronology, year + 400 * longCycle, daysInLongCycle - yearStart + 1); }
python
def to_records(cls, attr_names, value_matrix): """ Convert a value matrix to records to be inserted into a database. :param list attr_names: List of attributes for the converting records. :param value_matrix: Values to be converted. :type value_matrix: list of |dict|/|namedtuple|/|list|/|tuple| .. seealso:: :py:meth:`.to_record` """ return [cls.to_record(attr_names, record) for record in value_matrix]
python
def _validate_param(param): # pylint: disable=too-many-branches """ Ensure the filter cast properly according to the operator """ detail = None if param.oper not in goldman.config.QUERY_FILTERS: detail = 'The query filter {} is not a supported ' \ 'operator. Please change {} & retry your ' \ 'request'.format(param.oper, param) elif param.oper in goldman.config.GEO_FILTERS: try: if not isinstance(param.val, list) or len(param.val) <= 2: raise ValueError else: param.val = [float(i) for i in param.val] except ValueError: detail = 'The query filter {} requires a list ' \ 'of floats for geo evaluation. Please ' \ 'modify your request & retry'.format(param) elif param.oper in goldman.config.ENUM_FILTERS: if not isinstance(param.val, list): param.val = [param.val] param.val = tuple(param.val) elif isinstance(param.val, list): detail = 'The query filter {} should not be specified more ' \ 'than once or have multiple values. Please modify ' \ 'your request & retry'.format(param) elif param.oper in goldman.config.BOOL_FILTERS: try: param.val = str_to_bool(param.val) except ValueError: detail = 'The query filter {} requires a boolean ' \ 'for evaluation. Please modify your ' \ 'request & retry'.format(param) elif param.oper in goldman.config.DATE_FILTERS: try: param.val = str_to_dt(param.val) except ValueError: detail = 'The query filter {} supports only an ' \ 'epoch or ISO 8601 timestamp. Please ' \ 'modify your request & retry'.format(param) elif param.oper in goldman.config.NUM_FILTERS: try: param.val = int(param.val) except ValueError: detail = 'The query filter {} requires a number ' \ 'for evaluation. Please modify your ' \ 'request & retry'.format(param) if detail: raise InvalidQueryParams(**{ 'detail': detail, 'links': LINK, 'parameter': PARAM, })
python
def edges(self, edges): """Specify edge list data and associated edge attribute values. :param edges: Edges and their attributes. :type point_size: Pandas dataframe, NetworkX graph, or IGraph graph. :returns: Plotter. :rtype: Plotter. **Example** :: import graphistry df = pandas.DataFrame({'src': [0,1,2], 'dst': [1,2,0]}) graphistry .bind(source='src', destination='dst') .edges(df) .plot() """ res = copy.copy(self) res._edges = edges return res
python
def _validate_entity(entity): ''' Validates the entity dict representation entity Dictionary representation of an entity. See ``_get_entity`` docstrings for format. ''' #Validate entity: if entity['type'] == 'cluster': schema = ESXClusterEntitySchema.serialize() elif entity['type'] == 'vcenter': schema = VCenterEntitySchema.serialize() else: raise ArgumentValueError('Unsupported entity type \'{0}\'' ''.format(entity['type'])) try: jsonschema.validate(entity, schema) except jsonschema.exceptions.ValidationError as exc: raise InvalidEntityError(exc)
python
def show_tricky_tasks(self, verbose=0): """ Print list of tricky tasks i.e. tasks that have been restarted or launched more than once or tasks with corrections. Args: verbose: Verbosity level. If > 0, task history and corrections (if any) are printed. """ nids, tasks = [], [] for task in self.iflat_tasks(): if task.num_launches > 1 or any(n > 0 for n in (task.num_restarts, task.num_corrections)): nids.append(task.node_id) tasks.append(task) if not nids: cprint("Everything's fine, no tricky tasks found", color="green") else: self.show_status(nids=nids) if not verbose: print("Use --verbose to print task history.") return for nid, task in zip(nids, tasks): cprint(repr(task), **task.status.color_opts) self.show_history(nids=[nid], full_history=False, metadata=False) #if task.num_restarts: # self.show_restarts(nids=[nid]) if task.num_corrections: self.show_corrections(nids=[nid])
python
def monitor_session_span_command_dest_tengigabitethernet(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") monitor = ET.SubElement(config, "monitor", xmlns="urn:brocade.com:mgmt:brocade-span") session = ET.SubElement(monitor, "session") session_number_key = ET.SubElement(session, "session-number") session_number_key.text = kwargs.pop('session_number') span_command = ET.SubElement(session, "span-command") dest_tengigabitethernet = ET.SubElement(span_command, "dest-tengigabitethernet") dest_tengigabitethernet.text = kwargs.pop('dest_tengigabitethernet') callback = kwargs.pop('callback', self._callback) return callback(config)
java
Object toSimpleValue(Class<?> genericType, Object element, TypeAdapters typeAdapters) { if (element == null) { return null; } log.info("Convert from " + element.getClass().getName() + " to " + genericType.getName()); if (genericType.isEnum() && (element instanceof String)) { @SuppressWarnings({ "unchecked", "rawtypes" }) Enum<?> enumValue = Enum.valueOf((Class<? extends Enum>) genericType, (String) element); return enumValue; } Converter converter = SIMPLE_VALUE_CONVERTERS.get(genericType.getName()); if (converter != null) { return converter.convert(element); } if ((element instanceof String) && (typeAdapters != null)) { TypeAdapter<?> adapter = typeAdapters.getTypeAdapter(genericType); if (adapter != null) { return adapter.deserialize((String) element); } } return element; }
java
@Override public JMXConnector newJMXConnector(JMXServiceURL serviceURL, Map<String, ?> environment) throws IOException { if (serviceURL == null || environment == null) throw new NullPointerException(); if (!ClientProvider.REST_PROTOCOL.equals(serviceURL.getProtocol())) throw new MalformedURLException(); return new Connector(serviceURL, environment); }
python
def forwards(self, orm): "Write your forwards methods here." orm['avocado.DataField'].objects.filter(app_name='samples', model_name='cohort', field_name__in=['investigator', 'notes']).delete()
python
def load(cls, file_name, model_name='default'): """ Loads a previously saved PyEMMA object from disk. Parameters ---------- file_name : str or file like object (has to provide read method). The file like object tried to be read for a serialized object. model_name: str, default='default' if multiple models are contained in the file, these can be accessed by their name. Use :func:`pyemma.list_models` to get a representation of all stored models. Returns ------- obj : the de-serialized object """ from .h5file import H5File with H5File(file_name, model_name=model_name, mode='r') as f: return f.model
python
def parser(): """Return search query parser.""" query_parser = current_app.config['COLLECTIONS_QUERY_PARSER'] if isinstance(query_parser, six.string_types): query_parser = import_string(query_parser) return query_parser
python
def finished(self, filename): """Make Checkystyle ElementTree.""" if len(self.errors) < 1: return element = ET.SubElement(self.checkstyle_element, 'file', name=filename) for error in self.errors: message = error.code + ' ' + error.text prefix = error.code.strip()[0] ET.SubElement(element, 'error', { 'severity': CHECKSTYLE_SERVERITY.get(prefix, 'info'), 'line': str(error.line_number), 'column': str(error.column_number), 'message': message, 'source': CHECKSTYLE_SOURCE.get(prefix, 'undefined code'), } )
java
static protected String quote(final String value) { final StringBuilder out = new StringBuilder(); AbstractCLA.uncompileQuoter(out, value); return out.toString(); }
python
def post(node_name, key, **kwargs): """ Give the server information about this node Arguments: node -- node_name or token for the node this data belongs to key -- identifiable key, that you use later to retrieve that piece of data kwargs -- the data you need to store """ node = nago.core.get_node(node_name) if not node: raise ValueError("Node named %s not found" % node_name) token = node.token node_data[token] = node_data[token] or {} node_data[token][key] = kwargs return "thanks!"
python
def clear_distribute_alterations(self): """Removes the distribution rights. raise: NoAccess - ``Metadata.isRequired()`` is ``true`` or ``Metadata.isReadOnly()`` is ``true`` *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for osid.resource.ResourceForm.clear_group_template if (self.get_distribute_alterations_metadata().is_read_only() or self.get_distribute_alterations_metadata().is_required()): raise errors.NoAccess() self._my_map['distributeAlterations'] = self._distribute_alterations_default
python
def process_timer(self, key, fields): """ Process a received timer event :param key: Key of timer :param fields: Received fields """ try: if key not in self.timers: self.timers[key] = [] self.timers[key].append(float(fields[0])) if self.stats_seen >= maxint: self.logger.info("hit maxint, reset seen counter") self.stats_seen = 0 self.stats_seen += 1 except Exception as err: self.logger.info("error decoding timer event: %s" % err) if self.debug: print "error decoding timer event: %s" % err
python
def match_prototype(acallable, arguments): """Return tuple (pos args, kwargs) to call given callable Let's define a callable that will printout >>> arguments = {'alphonse': 1, 'bertrand': 2, 'charlie': 3} >>> match_prototype(lambda arguments: None, arguments) ([{'bertrand': 2, 'charlie': 3, 'alphonse': 1}], {}) >>> match_prototype(lambda args: None, arguments) ([{'bertrand': 2, 'charlie': 3, 'alphonse': 1}], {}) >>> match_prototype(lambda bertrand, arguments: None, arguments) ([2, {'charlie': 3, 'alphonse': 1}], {}) >>> match_prototype(lambda bertrand, arguments, foo=None: None, arguments) ([2, {'charlie': 3, 'alphonse': 1}], {}) >>> match_prototype(lambda bertrand, arguments, charlie=None: None, ... arguments) ([2, {'alphonse': 1}], {'charlie': 3}) """ args, defaults = get_calling_prototype(acallable) arguments = arguments.copy() defaults = [] if defaults is None else defaults p = [] kw = {} pos_args = len(args) - len(defaults) has_args = any(k in ('args', 'arguments') for k in args) args_label_pos = None for i, arg in enumerate(args): is_pos = i < pos_args val = None if not args_label_pos and arg in ('arguments', 'args'): val = arguments ## copy by reference here is important else: k = None for k in arguments: norm = k if norm.startswith("--"): if is_pos: continue norm = norm[2:] elif k.startswith("-"): if is_pos: continue norm = norm[1:] norm = norm.lower() norm = norm.replace('-', '_') if norm == arg: break else: if not has_args: raise SyntaxError( "Can't match your function argument %r with " "command line keys (%s)." % (arg, ", ".join(arguments.keys()))) else: k = None if k is not None: ## inplace removal is important here val = arguments.pop(k) if is_pos: p.append(val) else: if val is not None: ## we should only have strings if it was set. kw[arg] = val return p, kw
java
public static HttpResponse execute(final String url, final String method) { return execute(url, method, null, null, new HashMap<>(), new HashMap<>()); }
java
@Override public final void filterWrite(NextFilter nextFilter, IoSession session, WriteRequest writeRequest) { if (eventTypes.contains(IoEventType.WRITE)) { IoFilterEvent event = new IoFilterEvent(nextFilter, IoEventType.WRITE, session, writeRequest); fireEvent(event); } else { nextFilter.filterWrite(session, writeRequest); } }
python
def get_uri(self): """Return the Item source""" if self.source_file and os.path.exists(self.source_file.path): return self.source_file.path elif self.source_url: return self.source_url return None
python
def is_closing(self) -> bool: """Return ``True`` if this connection is closing. The connection is considered closing if either side has initiated its closing handshake or if the stream has been shut down uncleanly. """ return self.stream.closed() or self.client_terminated or self.server_terminated
java
protected final PackageDescr compilationUnit(PackageDescrBuilder pkg) throws RecognitionException { try { // package declaration? if (input.LA(1) != DRL6Lexer.EOF && helper.validateIdentifierKey(DroolsSoftKeywords.PACKAGE)) { String pkgName = packageStatement(pkg); pkg.name(pkgName); if (state.failed) return pkg.getDescr(); // unit declaration? // this is only allowed immediately after the package declaration if (input.LA(1) != DRL6Lexer.EOF && helper.validateIdentifierKey(DroolsSoftKeywords.UNIT)) { unitStatement(pkg); } } // statements while (input.LA(1) != DRL6Lexer.EOF) { int next = input.index(); if (helper.validateStatement(1)) { statement(pkg); if (state.failed) return pkg.getDescr(); if (next == input.index()) { // no token consumed, so, report problem: resyncToNextStatement(); } } else { resyncToNextStatement(); } if (input.LA(1) == DRL6Lexer.SEMICOLON) { match(input, DRL6Lexer.SEMICOLON, null, null, DroolsEditorType.SYMBOL); if (state.failed) return pkg.getDescr(); } } } catch (RecognitionException e) { helper.reportError(e); } catch (Exception e) { helper.reportError(e); } finally { helper.setEnd(pkg); } return pkg.getDescr(); }
java
public void setVBaselineIncrement(Integer newVBaselineIncrement) { Integer oldVBaselineIncrement = vBaselineIncrement; vBaselineIncrement = newVBaselineIncrement; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, AfplibPackage.METRIC_ADJUSTMENT__VBASELINE_INCREMENT, oldVBaselineIncrement, vBaselineIncrement)); }
python
def consume(self, expect_class=None): """Retrieve the current token, then advance the parser. If an expected class is provided, it will assert that the current token matches that class (is an instance). Note that when calling a token's nud() or led() functions, the "current" token is the token following the token whose method has been called. Returns: Token: the previous current token. Raises: InvalidTokenError: If an expect_class is provided and the current token doesn't match that class. """ if expect_class and not isinstance(self.current_token, expect_class): raise InvalidTokenError("Unexpected token at %d: got %r, expected %s" % ( self.current_pos, self.current_token, expect_class.__name__)) current_token = self.current_token self._forward() return current_token
python
def get(tzid): """Return timezone data""" ns = {} path = os.path.join(DATA_DIR, tzid) with open(path) as f: raw_data = f.read() exec(raw_data, ns, ns) z = ZoneData() z.types = [(delta(offset), delta(save), abbr) for offset, save, abbr in ns['types']] z.times = [(datetime(*time), i) for time, i in ns['times']] z.rules = ns['posix'] return z
java
public String toLoggableString() { StringBuilder connectionStringBuilder = new StringBuilder(); if (this.endpoint != null) { connectionStringBuilder.append(String.format(Locale.US, "%s%s%s%s", ENDPOINT_CONFIG_NAME, KEY_VALUE_SEPARATOR, this.endpoint.toString(), KEY_VALUE_PAIR_DELIMITER)); } if (!StringUtil.isNullOrWhiteSpace(this.entityPath)) { connectionStringBuilder.append(String.format(Locale.US, "%s%s%s%s", ENTITY_PATH_CONFIG_NAME, KEY_VALUE_SEPARATOR, this.entityPath, KEY_VALUE_PAIR_DELIMITER)); } return connectionStringBuilder.toString(); }
java
@Override public void handleMessage(Message message) throws Fault { for (int i = interceptors.size() - 1; i >= 0; i--) { try { interceptors.get(i).prepareContext(); } catch (Throwable t) { LOG.log(Level.WARNING, "ASYNC_INTERCEPTOR_EXCEPTION_PREPARE_CONTEXT", new Object[]{interceptors.get(i).getClass().getName(), t}); } } message.getExchange().put(MPAsyncInvocationInterceptorImpl.class, this); //Liberty change }
java
Optional<String> writeTimeGauge(TimeGauge gauge) { Double value = gauge.value(getBaseTimeUnit()); if (Double.isFinite(value)) { return Optional.of(writeDocument(gauge, builder -> { builder.append(",\"value\":").append(value); })); } return Optional.empty(); }
java
public void marshall(ModifyEventSubscriptionRequest modifyEventSubscriptionRequest, ProtocolMarshaller protocolMarshaller) { if (modifyEventSubscriptionRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(modifyEventSubscriptionRequest.getSubscriptionName(), SUBSCRIPTIONNAME_BINDING); protocolMarshaller.marshall(modifyEventSubscriptionRequest.getSnsTopicArn(), SNSTOPICARN_BINDING); protocolMarshaller.marshall(modifyEventSubscriptionRequest.getSourceType(), SOURCETYPE_BINDING); protocolMarshaller.marshall(modifyEventSubscriptionRequest.getEventCategories(), EVENTCATEGORIES_BINDING); protocolMarshaller.marshall(modifyEventSubscriptionRequest.getEnabled(), ENABLED_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def get_manhole_factory(namespace, **passwords): """Get a Manhole Factory """ realm = manhole_ssh.TerminalRealm() realm.chainedProtocolFactory.protocolFactory = ( lambda _: EnhancedColoredManhole(namespace) ) p = portal.Portal(realm) p.registerChecker( checkers.InMemoryUsernamePasswordDatabaseDontUse(**passwords) ) return manhole_ssh.ConchFactory(p)
java
public MultiLineString fromTransferObject(MultiLineStringTo input, CrsId crsId) { if (input == null) { return null; } crsId = getCrsId(input, crsId); isValid(input); LineString[] lineStrings = new LineString[input.getCoordinates().length]; for (int i = 0; i < lineStrings.length; i++) { lineStrings[i] = new LineString(createPointSequence(input.getCoordinates()[i], crsId)); } return new MultiLineString(lineStrings); }
java
@Override public Request<ImportKeyPairRequest> getDryRunRequest() { Request<ImportKeyPairRequest> request = new ImportKeyPairRequestMarshaller().marshall(this); request.addParameter("DryRun", Boolean.toString(true)); return request; }
python
def _insertions(self, result, dimension, dimension_index): """Return list of (idx, sum) pairs representing subtotals. *idx* is the int offset at which to insert the ndarray subtotal in *sum*. """ def iter_insertions(): for anchor_idx, addend_idxs in dimension.hs_indices: insertion_idx = ( -1 if anchor_idx == "top" else result.shape[dimension_index] - 1 if anchor_idx == "bottom" else anchor_idx ) addend_fancy_idx = tuple( [slice(None) for _ in range(dimension_index)] + [np.array(addend_idxs)] ) yield ( insertion_idx, np.sum(result[addend_fancy_idx], axis=dimension_index), ) return [insertion for insertion in iter_insertions()]
java
@Override public void play2(Number streamId, Map<String, ?> playOptions) { log.debug("play2 options: {}", playOptions.toString()); /* { streamName=streams/new.flv, oldStreamName=streams/old.flv, start=0, len=-1, offset=12.195, transition=switch } */ // get the transition type String transition = (String) playOptions.get("transition"); if (conn != null) { if ("NetStreamPlayTransitions.STOP".equals(transition)) { PendingCall pendingCall = new PendingCall("play", new Object[] { Boolean.FALSE }); conn.invoke(pendingCall, getChannelForStreamId(streamId)); } else if ("NetStreamPlayTransitions.RESET".equals(transition)) { // just reset the currently playing stream } else { Object[] params = new Object[6]; params[0] = playOptions.get("streamName").toString(); Object o = playOptions.get("start"); params[1] = o instanceof Integer ? (Integer) o : Integer.valueOf((String) o); o = playOptions.get("len"); params[2] = o instanceof Integer ? (Integer) o : Integer.valueOf((String) o); // new parameters for playback params[3] = transition; params[4] = playOptions.get("offset"); params[5] = playOptions.get("oldStreamName"); // do call PendingCall pendingCall = new PendingCall("play2", params); conn.invoke(pendingCall, getChannelForStreamId(streamId)); } } else { log.info("Connection was null ?"); } }
python
def enc(data, **kwargs): ''' Alias to `{box_type}_encrypt` box_type: secretbox, sealedbox(default) ''' if 'keyfile' in kwargs: salt.utils.versions.warn_until( 'Neon', 'The \'keyfile\' argument has been deprecated and will be removed in Salt ' '{version}. Please use \'sk_file\' argument instead.' ) kwargs['sk_file'] = kwargs['keyfile'] if 'key' in kwargs: salt.utils.versions.warn_until( 'Neon', 'The \'key\' argument has been deprecated and will be removed in Salt ' '{version}. Please use \'sk\' argument instead.' ) kwargs['sk'] = kwargs['key'] box_type = _get_config(**kwargs)['box_type'] if box_type == 'secretbox': return secretbox_encrypt(data, **kwargs) return sealedbox_encrypt(data, **kwargs)
python
def wider_pre_conv(layer, n_add_filters, weighted=True): '''wider previous conv layer. ''' n_dim = get_n_dim(layer) if not weighted: return get_conv_class(n_dim)( layer.input_channel, layer.filters + n_add_filters, kernel_size=layer.kernel_size, ) n_pre_filters = layer.filters rand = np.random.randint(n_pre_filters, size=n_add_filters) teacher_w, teacher_b = layer.get_weights() student_w = teacher_w.copy() student_b = teacher_b.copy() # target layer update (i) for i in range(len(rand)): teacher_index = rand[i] new_weight = teacher_w[teacher_index, ...] new_weight = new_weight[np.newaxis, ...] student_w = np.concatenate((student_w, new_weight), axis=0) student_b = np.append(student_b, teacher_b[teacher_index]) new_pre_layer = get_conv_class(n_dim)( layer.input_channel, n_pre_filters + n_add_filters, layer.kernel_size ) new_pre_layer.set_weights( (add_noise(student_w, teacher_w), add_noise(student_b, teacher_b)) ) return new_pre_layer
java
@EventBusListenerMethod(scope = EventScope.UI) void onEvent(final RolloutEvent event) { switch (event) { case FILTER_BY_TEXT: case CREATE_ROLLOUT: case UPDATE_ROLLOUT: case SHOW_ROLLOUTS: refreshContainer(); break; default: break; } }
python
def get_go2nt(self, usr_go2nt): """Combine user namedtuple fields, GO object fields, and format_txt.""" gos_all = self.get_gos_all() # Minimum set of namedtuple fields available for use with Sorter on grouped GO IDs prt_flds_all = get_hdridx_flds() + self.gosubdag.prt_attr['flds'] if not usr_go2nt: return self.__init_go2nt_dflt(gos_all, prt_flds_all) usr_nt_flds = next(iter(usr_go2nt.values()))._fields # If user namedtuple already contains all fields available, then return usr_go2nt if len(set(prt_flds_all).difference(usr_nt_flds)) == 0: return self._init_go2nt_aug(usr_go2nt) # Otherwise, combine user fields and default Sorter fields return self.__init_go2nt_w_usr(gos_all, usr_go2nt, prt_flds_all)
java
@Deprecated public void setDayNameLength(int length) { if (length != LENGTH_LONG && length != LENGTH_SHORT) { throw new IllegalArgumentException("length parameter must be either LENGTH_LONG or LENGTH_SHORT"); } this.mDayNameLength = length; }
python
def ExportClientsByKeywords(keywords, filename, token=None): r"""A script to export clients summaries selected by a keyword search. This script does a client search for machines matching all of keywords and writes a .csv summary of the results to filename. Multi-value fields are '\n' separated. Args: keywords: a list of keywords to search for filename: the name of the file to write to, will be replaced if already present token: datastore token. """ index = client_index.CreateClientIndex(token=token) client_list = index.LookupClients(keywords) logging.info("found %d clients", len(client_list)) if not client_list: return writer = csv.DictWriter([ u"client_id", u"hostname", u"last_seen", u"os", u"os_release", u"os_version", u"users", u"ips", u"macs", ]) writer.WriteHeader() for client in aff4.FACTORY.MultiOpen(client_list, token=token): s = client.Schema writer.WriteRow({ u"client_id": client.urn.Basename(), u"hostname": client.Get(s.HOSTNAME), u"os": client.Get(s.SYSTEM), u"os_release": client.Get(s.OS_RELEASE), u"os_version": client.Get(s.OS_VERSION), u"ips": client.Get(s.HOST_IPS), u"macs": client.Get(s.MAC_ADDRESS), u"users": "\n".join(client.Get(s.USERNAMES, [])), u"last_seen": client.Get(s.PING), }) with io.open(filename, "w") as csv_out: csv_out.write(writer.Content())
python
def _populate_trace(self, graph: TraceGraph, trace_frame_ids: List[int]) -> None: """ Populates (from the given trace graph) the forward and backward traces reachable from the given traces (including input trace frames). Make sure to respect trace kind in successors """ while len(trace_frame_ids) > 0: trace_frame_id = trace_frame_ids.pop() if trace_frame_id in self._visited_trace_frame_ids: continue trace_frame = graph._trace_frames[trace_frame_id] self._add_trace_frame(graph, trace_frame) self._visited_trace_frame_ids.add(trace_frame_id) key = (trace_frame.callee_id.local_id, trace_frame.callee_port) trace_frame_ids.extend( [ trace_frame_id for trace_frame_id in graph._trace_frames_map[key] if trace_frame_id not in self._visited_trace_frame_ids and graph._trace_frames[trace_frame_id].kind == trace_frame.kind ] )