language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def memoize(func): """Simple caching decorator.""" cache = {} @functools.wraps(func) def wrapper(*args, **kwargs): """Caching wrapper.""" key = (args, tuple(sorted(kwargs.items()))) if key in cache: return cache[key] else: result = func(*args, **kwargs) cache[key] = result return result return wrapper
python
def count_courses(self): """Return the average number of courses per string.""" c = 0 for x in self.tuning: if type(x) == list: c += len(x) else: c += 1 return float(c) / len(self.tuning)
python
def get_url(cls, world, category=Category.EXPERIENCE, vocation=VocationFilter.ALL, page=1): """Gets the Tibia.com URL of the highscores for the given parameters. Parameters ---------- world: :class:`str` The game world of the desired highscores. category: :class:`Category` The desired highscores category. vocation: :class:`VocationFiler` The vocation filter to apply. By default all vocations will be shown. page: :class:`int` The page of highscores to show. Returns ------- The URL to the Tibia.com highscores. """ return HIGHSCORES_URL % (world, category.value, vocation.value, page)
python
def verify(self, store=None, chain=None, key=None): """ Verify self. Supports verification on both X509 store object or just public issuer key @param store X509Store object. @param chain - list of X509 objects to add into verification context.These objects are untrusted, but can be used to build certificate chain up to trusted object in the store @param key - PKey object with open key to validate signature parameters store and key are mutually exclusive. If neither is specified, attempts to verify self as self-signed certificate """ if store is not None and key is not None: raise X509Error("key and store cannot be specified simultaneously") if store is not None: ctx = libcrypto.X509_STORE_CTX_new() if ctx is None: raise X509Error("Error allocating X509_STORE_CTX") if chain is not None and len(chain) > 0: chain_ptr = StackOfX509(chain).ptr else: chain_ptr = None if libcrypto.X509_STORE_CTX_init(ctx, store.store, self.cert, chain_ptr) < 0: raise X509Error("Error allocating X509_STORE_CTX") res = libcrypto.X509_verify_cert(ctx) libcrypto.X509_STORE_CTX_free(ctx) return res > 0 else: if key is None: if self.issuer != self.subject: # Not a self-signed certificate return False key = self.pubkey res = libcrypto.X509_verify(self.cert, key.key) if res < 0: raise X509Error("X509_verify failed") return res > 0
python
def delete(self, docids): """Delete documents (specified by their ids) from the index.""" logger.debug("deleting %i documents from %s" % (len(docids), self)) deleted = 0 for docid in docids: try: del self.id2pos[docid] deleted += 1 del self.id2sims[docid] except: pass self.id2sims.sync() if deleted: logger.info("deleted %i documents from %s" % (deleted, self)) self.update_mappings()
python
def is_selected_by_selector(self, selector): """Assert the option matching the CSS selector is selected.""" elem = find_element_by_jquery(world.browser, selector) if not elem.is_selected(): raise AssertionError("Element expected to be selected.")
python
def _make_inputnode(self, frequency): """ Generates an input node for the given frequency. It also adds implicit file format conversion nodes to the pipeline. Parameters ---------- frequency : str The frequency (i.e. 'per_session', 'per_visit', 'per_subject' or 'per_study') of the input node to retrieve """ # Check to see whether there are any outputs for the given frequency inputs = list(self.frequency_inputs(frequency)) # Get list of input names for the requested frequency, addding fields # to hold iterator IDs input_names = [i.name for i in inputs] input_names.extend(self.study.FREQUENCIES[frequency]) if not input_names: raise ArcanaError( "No inputs to '{}' pipeline for requested freqency '{}'" .format(self.name, frequency)) # Generate input node and connect it to appropriate nodes inputnode = self.add('{}_inputnode'.format(frequency), IdentityInterface(fields=input_names)) # Loop through list of nodes connected to study data specs and # connect them to the newly created input node for input in inputs: # @ReservedAssignment # Keep track of previous conversion nodes to avoid replicating the # conversion for inputs that are used in multiple places prev_conv_nodes = {} for (node, node_in, format, # @ReservedAssignment @IgnorePep8 conv_kwargs) in self._input_conns[input.name]: # If fileset formats differ between study and pipeline # inputs create converter node (if one hasn't been already) # and connect input to that before connecting to inputnode if self.requires_conversion(input, format): try: conv = format.converter_from(input.format, **conv_kwargs) except ArcanaNoConverterError as e: e.msg += ( "which is required to convert '{}' from {} to {} " "for '{}' input of '{}' node".format( input.name, input.format, format, node_in, node.name)) raise e try: in_node = prev_conv_nodes[format.name] except KeyError: in_node = prev_conv_nodes[format.name] = self.add( 'conv_{}_to_{}_format'.format(input.name, format.name), conv.interface, inputs={conv.input: (inputnode, input.name)}, requirements=conv.requirements, mem_gb=conv.mem_gb, wall_time=conv.wall_time) in_node_out = conv.output else: in_node = inputnode in_node_out = input.name self.connect(in_node, in_node_out, node, node_in) # Connect iterator inputs for iterator, conns in self._iterator_conns.items(): # Check to see if this is the right frequency for the iterator # input, i.e. if it is the only iterator for this frequency if self.study.FREQUENCIES[frequency] == (iterator,): for (node, node_in, format) in conns: # @ReservedAssignment self.connect(inputnode, iterator, node, node_in) return inputnode
java
private <T> Collection<Integer> transformSource(SourceTransformation<T> source) { String slotSharingGroup = determineSlotSharingGroup(source.getSlotSharingGroup(), Collections.emptyList()); streamGraph.addSource(source.getId(), slotSharingGroup, source.getCoLocationGroupKey(), source.getOperator(), null, source.getOutputType(), "Source: " + source.getName()); if (source.getOperator().getUserFunction() instanceof InputFormatSourceFunction) { InputFormatSourceFunction<T> fs = (InputFormatSourceFunction<T>) source.getOperator().getUserFunction(); streamGraph.setInputFormat(source.getId(), fs.getFormat()); } streamGraph.setParallelism(source.getId(), source.getParallelism()); streamGraph.setMaxParallelism(source.getId(), source.getMaxParallelism()); return Collections.singleton(source.getId()); }
java
boolean isCodeInCCLength(int encLength, int code) { boolean found; if (encLength > 1 || code >= BitSet.SINGLE_BYTE_SIZE) { if (mbuf == null) { found = false; } else { found = CodeRange.isInCodeRange(mbuf.getCodeRange(), code); } } else { found = bs.at(code); } if (isNot()) { return !found; } else { return found; } }
java
public void disable(String jobId, DisableJobOption disableTasks, JobDisableOptions jobDisableOptions) { disableWithServiceResponseAsync(jobId, disableTasks, jobDisableOptions).toBlocking().single().body(); }
java
public double computeCosineSimilarity(String analogy, Matrix m) { double cosineVals = 0.0; int totalVals = 0; if (!isAnalogyFormat(analogy, true)) { System.err.println("Analogy: \"" + analogy + "\" not in proper format"); return 0.0; } String pairs[] = analogy.split("::"); String pair1 = pairs[0]; String pair2 = pairs[1]; if (!isAnalogyFormat(pair1) || !isAnalogyFormat(pair2)) { System.err.println("Analogy: \"" + analogy + "\" not in proper format"); return 0.0; } if(!original_to_alternates.containsKey(pair1) || !original_to_alternates.containsKey(pair2)) { //check if the reverse pair exists String pair1_pair[] = pair1.split(":"); String pair1_a = pair1_pair[1]; String pair1_b = pair1_pair[0]; String pair2_pair[] = pair2.split(":"); String pair2_a = pair2_pair[1]; String pair2_b = pair2_pair[0]; pair1 = pair1_a+":"+pair1_b; pair2 = pair2_a+":"+pair2_b; if(!original_to_alternates.containsKey(pair1) || !original_to_alternates.containsKey(pair2)) { System.err.println("Analogy: \"" + analogy + "\" not included in original pairs"); return 0.0; } } double original_cosineVal = cosineSimilarity(m.getRow(getIndexOfPair(pair1, matrix_row_map)), m.getRow(getIndexOfPair(pair2, matrix_row_map))); cosineVals += original_cosineVal; totalVals++; //System.err.println("orig cos: " + cosineVals); ArrayList<String> alternates1 = original_to_alternates.get(pair1); ArrayList<String> alternates2 = original_to_alternates.get(pair2); for (String a : alternates1) { for (String b : alternates2) { int a_index = getIndexOfPair(a, matrix_row_map); int b_index = getIndexOfPair(b, matrix_row_map); if(a_index != -1 && b_index != -1) { double alternative_cosineVal = cosineSimilarity(m.getRow(a_index),m.getRow(b_index)); //System.err.println("adding cos: " + alternative_cosineVal); if (alternative_cosineVal >= original_cosineVal) { cosineVals += alternative_cosineVal; totalVals++; } } } } if (totalVals > 0) { return cosineVals/totalVals; } else { return 0.0; } }
python
def split_by_count(items, count, filler=None): """Split the items into tuples of count items each >>> split_by_count([0,1,2,3], 2) [(0, 1), (2, 3)] If there are a mutiple of count items then filler makes no difference >>> split_by_count([0,1,2,7,8,9], 3, 0) == split_by_count([0,1,2,7,8,9], 3) True If there are not a multiple of count items, then any extras are discarded >>> split_by_count([0,1,2,7,8,9,6], 3) [(0, 1, 2), (7, 8, 9)] Specifying a filler expands the "lost" group >>> split_by_count([0,1,2,7,8,9,6], 3, 0) [(0, 1, 2), (7, 8, 9), (6, 0, 0)] """ if filler is not None: items = items[:] while len(items) % count: items.append(filler) iterator = iter(items) iterators = [iterator] * count return list(zip(*iterators))
python
def get(key, default='', delimiter=':'): ''' Retrieve master config options, with optional nesting via the delimiter argument. **Arguments** default If the key is not found, the default will be returned instead delimiter Override the delimiter used to separate nested levels of a data structure. CLI Example: .. code-block:: bash salt-run config.get gitfs_remotes salt-run config.get file_roots:base salt-run config.get file_roots,base delimiter=',' ''' ret = salt.utils.data.traverse_dict_and_list(__opts__, key, default='_|-', delimiter=delimiter) if ret == '_|-': return default else: return salt.utils.sdb.sdb_get(ret, __opts__)
python
def read_from_config(cp, **kwargs): """Initializes a model from the given config file. The section must have a ``name`` argument. The name argument corresponds to the name of the class to initialize. Parameters ---------- cp : WorkflowConfigParser Config file parser to read. \**kwargs : All other keyword arguments are passed to the ``from_config`` method of the class specified by the name argument. Returns ------- cls The initialized model. """ # use the name to get the distribution name = cp.get("model", "name") return models[name].from_config(cp, **kwargs)
java
public void setAsynchConsumerCallback(int requestNumber, int maxActiveMessages, long messageLockExpiry, int batchsize, OrderingContext orderContext, // f200337, F219476.2 boolean stoppable, //SIB0.comms int maxSequentialFailures, long hiddenMessageDelay) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "setAsynchConsumerCallback", new Object[] { requestNumber, maxActiveMessages, messageLockExpiry, batchsize, orderContext, stoppable, maxSequentialFailures, hiddenMessageDelay }); checkNotBrowserSession(); // F171893 if (getReadAheadPermitted()) { if ((maxActiveMessages == 0) && (messageLockExpiry == 0) && !stoppable) { subConsumer = new CATProxyConsumer(this); } else { try { getConsumerSession().stop(); } catch (SIException e) { //No FFDC Code Needed if(TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Caught SIException on calling stop on ConsumerSession.", e); //Throw a RuntimeException which can be caught by our caller as needed throw new RuntimeException(e.getMessage(), e); } subConsumer = new CATAsynchConsumer(this); } } else { subConsumer = new CATAsynchConsumer(this); } subConsumer.setAsynchConsumerCallback(requestNumber, maxActiveMessages, messageLockExpiry, batchsize, orderContext, // f200337, F219476.2 stoppable, maxSequentialFailures, hiddenMessageDelay); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "setAsynchConsumerCallback"); }
python
def v_init_extension(ctx, stmt): """find the modulename of the prefix, and set `stmt.keyword`""" (prefix, identifier) = stmt.raw_keyword (modname, revision) = \ prefix_to_modulename_and_revision(stmt.i_module, prefix, stmt.pos, ctx.errors) stmt.keyword = (modname, identifier) stmt.i_extension_modulename = modname stmt.i_extension_revision = revision stmt.i_extension = None
python
def align_to_mmap(num, round_up): """ Align the given integer number to the closest page offset, which usually is 4096 bytes. :param round_up: if True, the next higher multiple of page size is used, otherwise the lower page_size will be used (i.e. if True, 1 becomes 4096, otherwise it becomes 0) :return: num rounded to closest page""" res = (num // ALLOCATIONGRANULARITY) * ALLOCATIONGRANULARITY if round_up and (res != num): res += ALLOCATIONGRANULARITY # END handle size return res
python
def release(self): """Create a release 1. Perform Sanity checks on work file. 2. Copy work file to releasefile location. 3. Perform cleanup actions on releasefile. :returns: True if successfull, False if not. :rtype: bool :raises: None """ log.info("Releasing: %s", self._workfile.get_fullpath()) ac = self.build_actions() ac.execute(self) s = ac.status().value if not s == ActionStatus.SUCCESS: ard = ActionReportDialog(ac) ard.exec_() pass return s == ActionStatus.SUCCESS
java
public final Future<InetAddress> resolve(String inetHost, Iterable<DnsRecord> additionals) { return resolve(inetHost, additionals, executor().<InetAddress>newPromise()); }
python
def clear(self): """Clear the data, and thus, force it to be created on the next fetch. This is done by removing the attribute from ``owner``, deleting it from globals and removing the file from the disk. """ vname = self.varname if self.path.exists(): logger.debug('deleting cached work: {}'.format(self.path)) self.path.unlink() if self.owner is not None and hasattr(self.owner, vname): logger.debug('removing instance var: {}'.format(vname)) delattr(self.owner, vname) self.clear_global()
python
def main(): """Create a new Cheroot instance with arguments from the command line.""" parser = argparse.ArgumentParser( description='Start an instance of the Cheroot WSGI/HTTP server.', ) for arg, spec in _arg_spec.items(): parser.add_argument(arg, **spec) raw_args = parser.parse_args() # ensure cwd in sys.path '' in sys.path or sys.path.insert(0, '') # create a server based on the arguments provided raw_args._wsgi_app.server(raw_args).safe_start()
java
private void processTag(boolean start) { if (start) { nested++; doc.startElement(tag,attributes); } else { nested--; doc.endElement(tag); } }
python
def vote(self, identifier, weight=100.0): ''' Waits 5 seconds as that is the required amount of time between votes. ''' for num_of_retries in range(default.max_retry): try: self.steem_instance().vote(identifier, weight, self.mainaccount) self.msg.message("voted for " + identifier) time.sleep(5) except Exception as e: if re.search(r'You have already voted in a similar way', str(e)): self.msg.error_message('''Already voted on {}'''.format(identifier)) return "already voted" else: self.util.retry('''COULD NOT VOTE ON {}'''.format(identifier), e, num_of_retries, default.wait_time) self.s = None else: return True
python
def clean_password2(self): """ Check wether password 1 and password 2 are equivalent While ideally this would be done in clean, there is a chance a superclass could declare clean and forget to call super. We therefore opt to run this password mismatch check in password2 clean, but to show the error above password1 (as we are unsure whether password 1 or password 2 contains the typo, and putting it above password 2 may lead some users to believe the typo is in just one). """ password1 = self.cleaned_data.get('password1') password2 = self.cleaned_data.get('password2') if password1 and password2 and password1 != password2: self.add_error( 'password1', forms.ValidationError( self.error_messages['password_mismatch'], code='password_mismatch', )) return password2
python
def dates(self, field_name, kind, order='ASC'): """ Returns a list of datetime objects representing all available dates for the given field_name, scoped to 'kind'. """ assert kind in ("month", "year", "day", "week", "hour", "minute"), \ "'kind' must be one of 'year', 'month', 'day', 'week', 'hour' and 'minute'." assert order in ('ASC', 'DESC'), \ "'order' must be either 'ASC' or 'DESC'." search = self._build_search() search.facet.reset() search.facet.add_date_facet(name=field_name.replace("__", "."), field=field_name, interval=kind) search.size = 0 connection = self.model._meta.dj_connection resulset = connection.connection.search(search, indices=connection.database, doc_types=self.type) resulset.fix_facets() entries = [] for val in resulset.facets.get(field_name.replace("__", ".")).get("entries", []): if "time" in val: entries.append(val["time"]) if order == "ASC": return sorted(entries) return sorted(entries, reverse=True)
python
def vartype_argument(*arg_names): """Ensures the wrapped function receives valid vartype argument(s). One or more argument names can be specified (as a list of string arguments). Args: *arg_names (list[str], argument names, optional, default='vartype'): The names of the constrained arguments in function decorated. Returns: Function decorator. Examples: >>> from dimod.decorators import vartype_argument >>> @vartype_argument() ... def f(x, vartype): ... print(vartype) ... >>> f(1, 'SPIN') Vartype.SPIN >>> f(1, vartype='SPIN') Vartype.SPIN >>> @vartype_argument('y') ... def f(x, y): ... print(y) ... >>> f(1, 'SPIN') Vartype.SPIN >>> f(1, y='SPIN') Vartype.SPIN >>> @vartype_argument('z') ... def f(x, **kwargs): ... print(kwargs['z']) ... >>> f(1, z='SPIN') Vartype.SPIN Note: The function decorated can explicitly list (name) vartype arguments constrained by :func:`vartype_argument`, or it can use a keyword arguments `dict`. See also: :func:`~dimod.as_vartype` """ # by default, constrain only one argument, the 'vartype` if not arg_names: arg_names = ['vartype'] def _vartype_arg(f): argspec = getargspec(f) def _enforce_single_arg(name, args, kwargs): try: vartype = kwargs[name] except KeyError: raise TypeError('vartype argument missing') kwargs[name] = as_vartype(vartype) @wraps(f) def new_f(*args, **kwargs): # bound actual f arguments (including defaults) to f argument names # (note: if call arguments don't match actual function signature, # we'll fail here with the standard `TypeError`) bound_args = inspect.getcallargs(f, *args, **kwargs) # `getcallargs` doesn't merge additional positional/keyword arguments, # so do it manually final_args = list(bound_args.pop(argspec.varargs, ())) final_kwargs = bound_args.pop(argspec.keywords, {}) final_kwargs.update(bound_args) for name in arg_names: _enforce_single_arg(name, final_args, final_kwargs) return f(*final_args, **final_kwargs) return new_f return _vartype_arg
python
def inplace(method_name): """ Returns a type instance method that will call the given method name, used for inplace operators such as __iadd__ and __imul__. """ def method(self, other): getattr(self, method_name)(value_left(self, other)) return self return method
python
def _to_output_code(self): """Return a unicode object with the Gremlin/MATCH representation of this Literal.""" # All supported Literal objects serialize to identical strings both in Gremlin and MATCH. self.validate() if self.value is None: return u'null' elif self.value is True: return u'true' elif self.value is False: return u'false' elif isinstance(self.value, six.string_types): return safe_quoted_string(self.value) elif isinstance(self.value, int): return six.text_type(self.value) elif isinstance(self.value, list): if len(self.value) == 0: return '[]' elif all(isinstance(x, six.string_types) for x in self.value): list_contents = ', '.join(safe_quoted_string(x) for x in sorted(self.value)) return '[' + list_contents + ']' else: pass # Fall through to assertion error below. raise AssertionError(u'Unreachable state reached: {}'.format(self))
java
@Nonnull @ReturnsMutableCopy public ICommonsList <ICommonsList <String>> readAll () throws IOException { final ICommonsList <ICommonsList <String>> ret = new CommonsArrayList <> (); while (m_bHasNext) { final ICommonsList <String> aNextLineAsTokens = readNext (); if (aNextLineAsTokens != null) ret.add (aNextLineAsTokens); } return ret; }
java
@Override public void cleanup() throws IOException { this.client.close(); ExecutorsUtils.shutdownExecutorService(this.singleThreadPool, Optional.of(log)); }
java
public void onReceiveStatus(CmsWorkflowResponse brokenResources) { if (brokenResources.isSuccess()) { succeed(); hide(); CmsNotification.get().send( CmsNotification.Type.NORMAL, org.opencms.gwt.client.Messages.get().key(org.opencms.gwt.client.Messages.GUI_DONE_0)); } else { m_failureMessage = brokenResources.getMessage(); m_state = State.failure; m_brokenLinksPanel.setEntries(brokenResources.getResources(), brokenResources.getAvailableActions()); setPanel(PANEL_BROKEN_LINKS); } }
python
def xmoe_tr_2d(): """Mixture of experts (16 experts). 623M Params, einsum=1.09e13 Returns: a hparams """ hparams = xmoe_tr_dense_2k() hparams.mesh_shape = "b0:2;b1:4" hparams.outer_batch_size = 4 hparams.layout = "outer_batch:b0;inner_batch:b1,expert_x:b1,expert_y:b0" hparams.encoder_layers = ["self_att", "moe_2d"] * 4 hparams.decoder_layers = ["self_att", "enc_att", "moe_2d"] * 4 hparams.moe_hidden_size = 2048 hparams.moe_experts_x = 4 hparams.moe_experts_y = 4 return hparams
python
def get_helper(name=None, quiet=True, **kwargs): ''' get the correct helper depending on the environment variable HELPME_CLIENT quiet: if True, suppress most output about the client (e.g. speak) ''' # Second priority, from environment from helpme.defaults import HELPME_CLIENT # First priority, from command line if name is not None: HELPME_CLIENT = name # If no obvious credential provided, we can use HELPME_CLIENT if HELPME_CLIENT == 'github': from .github import Helper; elif HELPME_CLIENT == 'uservoice': from .uservoice import Helper elif HELPME_CLIENT == 'discourse': from .discourse import Helper else: from .github import Helper Helper.name = HELPME_CLIENT Helper.quiet = quiet # Initialize the database return Helper()
python
def _cal_color(self, value, color_index): """Blend between two colors based on input value.""" range_min_p = self._domain[color_index] range_p = self._domain[color_index + 1] - range_min_p try: factor = (value - range_min_p) / range_p except ZeroDivisionError: factor = 0 min_color = self.colors[color_index] max_color = self.colors[color_index + 1] red = round(factor * (max_color.r - min_color.r) + min_color.r) green = round(factor * (max_color.g - min_color.g) + min_color.g) blue = round(factor * (max_color.b - min_color.b) + min_color.b) return Color(red, green, blue)
java
protected void notifyChangeListeners() { ChangeEvent e = new ChangeEvent(this); for (ChangeListener l : changeListeners) { l.stateChanged(e); } }
java
public CmsUser readUser(CmsDbContext dbc, String username) throws CmsDataAccessException { CmsUser user = m_monitor.getCachedUser(username); if (user == null) { user = getUserDriver(dbc).readUser(dbc, username); m_monitor.cacheUser(user); } // important: do not return the cached user object, but a clone to avoid unwanted changes on cached objects return user.clone(); }
java
public void marshall(RestApi restApi, ProtocolMarshaller protocolMarshaller) { if (restApi == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(restApi.getId(), ID_BINDING); protocolMarshaller.marshall(restApi.getName(), NAME_BINDING); protocolMarshaller.marshall(restApi.getDescription(), DESCRIPTION_BINDING); protocolMarshaller.marshall(restApi.getCreatedDate(), CREATEDDATE_BINDING); protocolMarshaller.marshall(restApi.getVersion(), VERSION_BINDING); protocolMarshaller.marshall(restApi.getWarnings(), WARNINGS_BINDING); protocolMarshaller.marshall(restApi.getBinaryMediaTypes(), BINARYMEDIATYPES_BINDING); protocolMarshaller.marshall(restApi.getMinimumCompressionSize(), MINIMUMCOMPRESSIONSIZE_BINDING); protocolMarshaller.marshall(restApi.getApiKeySource(), APIKEYSOURCE_BINDING); protocolMarshaller.marshall(restApi.getEndpointConfiguration(), ENDPOINTCONFIGURATION_BINDING); protocolMarshaller.marshall(restApi.getPolicy(), POLICY_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public static Float getQuality(String value) { if (value==null) return __zero; int qe=value.indexOf(";"); if (qe++<0 || qe==value.length()) return __one; if (value.charAt(qe++)=='q') { qe++; Map.Entry entry=__qualities.getEntry(value,qe,value.length()-qe); if (entry!=null) return (Float)entry.getValue(); } HashMap params = new HashMap(3); valueParameters(value,params); String qs=(String)params.get("q"); Float q=(Float)__qualities.get(qs); if (q==null) { try{q=new Float(qs);} catch(Exception e){q=__one;} } return q; }
python
def list(self, instance=None, limit=20, marker=0): """ Return a paginated list of backups, or just for a particular instance. """ if instance is None: return super(CloudDatabaseBackupManager, self).list() return self.api._manager._list_backups_for_instance(instance, limit=limit, marker=marker)
python
def getOutEdges(self, label=None): """Gets all the outgoing edges of the node. If label parameter is provided, it only returns the edges of the given label @params label: Optional parameter to filter the edges @returns A generator function with the outgoing edges""" if label: for edge in self.neoelement.relationships.outgoing(types=[label]): yield Edge(edge) else: for edge in self.neoelement.relationships.outgoing(): yield Edge(edge)
python
def show(self): """ Reimplements the :meth:`QWidget.show` method. """ selected_text = self.__container.get_current_editor().get_selected_text() selected_text and SearchAndReplace.insert_pattern(selected_text, self.__search_patterns_model) self.Search_comboBox.line_edit().selectAll() self.Search_comboBox.setFocus() super(SearchInFiles, self).show() self.raise_()
python
def lxqstr(string, qchar, first): """ Lex (scan) a quoted string. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lxqstr_c.html :param string: String to be scanned. :type string: str :param qchar: Quote delimiter character. :type qchar: char (string of one char) :param first: Character position at which to start scanning. :type first: int :return: last and nchar :rtype: tuple """ string = stypes.stringToCharP(string) qchar = ctypes.c_char(qchar.encode(encoding='UTF-8')) first = ctypes.c_int(first) last = ctypes.c_int() nchar = ctypes.c_int() libspice.lxqstr_c(string, qchar, first, ctypes.byref(last), ctypes.byref(nchar)) return last.value, nchar.value
python
def dump_with_fn(dump_fn, data, stream, **options): """ Dump 'data' to a string if 'stream' is None, or dump 'data' to a file or file-like object 'stream'. :param dump_fn: Callable to dump data :param data: Data to dump :param stream: File or file like object or None :param options: optional keyword parameters :return: String represents data if stream is None or None """ if stream is None: return dump_fn(data, **options) return dump_fn(data, stream, **options)
python
def get(self, singleSnapshot=False): """ *geneate the pyephem positions* **Key Arguments:** - ``singleSnapshot`` -- just extract positions for a single pyephem snapshot (used for unit testing) **Return:** - ``None`` """ self.log.info('starting the ``get`` method') global xephemOE global tileSide global magLimit # GRAB PARAMETERS FROM SETTINGS FILE tileSide = float(self.settings["pyephem"]["atlas exposure match side"]) magLimit = float(self.settings["pyephem"]["magnitude limit"]) snapshotsRequired = 1 while snapshotsRequired > 0: nextMjds, exposures, snapshotsRequired = self._get_exposures_requiring_pyephem_positions( concurrentSnapshots=int(self.settings["pyephem"]["batch size"])) print "There are currently %(snapshotsRequired)s more pyephem snapshots required " % locals() if snapshotsRequired == 0: return if len(xephemOE) == 0: xephemOE = self._get_xephem_orbital_elements() # DEFINE AN INPUT ARRAY magLimit = self.settings["pyephem"]["magnitude limit"] pyephemDB = fmultiprocess(log=self.log, function=_generate_pyephem_snapshot, timeout=300, inputArray=nextMjds, magLimit=magLimit) matchedObjects = [] for p, e, m in zip(pyephemDB, exposures, nextMjds): matchedObjects.append( self._match_pyephem_snapshot_to_atlas_exposures(p, e, m)) self._add_matched_objects_to_database(matchedObjects) self._update_database_flag(exposures) if singleSnapshot: snapshotsRequired = 0 self.log.info('completed the ``get`` method') return None
python
def fill(self, key, mor_dict): """ Set a dict mapping (resouce_type --> objects[]) for a given key """ with self._objects_queue_lock: self._objects_queue[key] = mor_dict
python
def set_interrupt(self, method=None, **kwargs): """ Decorator that turns a function or controller method into an action interrupt. """ def action_wrap(f): action_id = kwargs.get("action_id", f.__name__) name = kwargs.get("name", action_id) if inspect.ismethod(f): # not "." in f.__qualname__: self._interrupt = _ActionInterrupt(f) self._ui_parameters["interrupt_enabled"] = True return self._interrupt else: qual_name = getattr(f, "__qualname__", None) owner_class = kwargs.get("controller_class", None) if owner_class: qual_name = owner_class + "." + f.__name__ if qual_name: Actions.add_unbound_interrupt(qual_name, self) else: print("using upython? if yes you need to pass the name of the controller class via the controller_class parameter.") return f if method: return action_wrap(method) else: return action_wrap
python
def _send_request(self, request, headers=None, content=None, **operation_config): """Prepare and send request object according to configuration. :param ClientRequest request: The request object to be sent. :param dict headers: Any headers to add to the request. :param content: Any body data to add to the request. :param config: Any specific config overrides """ if (TRACE_ENV_VAR in os.environ and os.environ[TRACE_ENV_VAR] == 'true')\ or (TRACE_ENV_VAR_COMPAT in os.environ and os.environ[TRACE_ENV_VAR_COMPAT] == 'true'): print(request.method + ' ' + request.url) logger.debug('%s %s', request.method, request.url) logger.debug('Request content: %s', content) response = self._client.send(request=request, headers=headers, content=content, **operation_config) logger.debug('Response content: %s', response.content) if response.status_code < 200 or response.status_code >= 300: self._handle_error(request, response) return response
java
public ManagedEntity findChild(ManagedEntity parent, String name) throws RuntimeFault, RemoteException { if (parent == null) { throw new IllegalArgumentException("parent entity must not be null."); } ManagedObjectReference mor = getVimService().findChild(getMOR(), parent.getMOR(), name); return MorUtil.createExactManagedEntity(getServerConnection(), mor); }
java
public void addData(final char[] data, final int off, final int size) { if(size<1){ return; } final String str = new String(data, off, size); if (str.contains("\n")) { final String[] lines = str.split("\\r?\\n", -1); for (int i = 0 ; i < lines.length - 1 ; i++) { appendString(lines[i], true); } if (lines.length > 0) { final String last = lines[lines.length - 1]; if ("".equals(last)) { //end of previous line } else { appendString(last, false); } } } else if (str.contains("\r")) { final String[] lines = str.split("\\r", -1); for (int i = 0 ; i < lines.length - 2 ; i++) { appendString(lines[i], true); } //last two split strings //a: 1,1: yes,no //b: 0,1: yes,no //c: 0,0: yes(pass),no //d: 1,0: yes(pass), if (lines.length >= 2) { final String last2 = lines[lines.length - 2]; final String last = lines[lines.length - 1]; if (!"".equals(last)) { appendString(last2, true); appendString(last, false); } else { //pass \r for later \r\n resolution appendString(last2 + "\r", false); } } } else { appendString(str, false); } }
python
def _symbol_token_end(c, ctx, is_field_name, value=None): """Returns a transition which ends the current symbol token.""" if value is None: value = ctx.value if is_field_name or c in _SYMBOL_TOKEN_TERMINATORS or ctx.quoted_text: # This might be an annotation or a field name. Mark it as self-delimiting because a symbol token termination # character has been found. ctx.set_self_delimiting(ctx.quoted_text).set_pending_symbol(value).set_quoted_text(False) trans = ctx.immediate_transition(ctx.whence) else: trans = ctx.event_transition(IonEvent, IonEventType.SCALAR, IonType.SYMBOL, _as_symbol(value)) return trans
java
public LongConsumer mask(ThrowingLongConsumer<? extends X> consumer) { Objects.requireNonNull(consumer); return l -> maskException(() -> consumer.accept(l)); }
java
public static boolean containsOnly (@Nullable final CharSequence aCS, @Nullable final ICharPredicate aFilter) { final int nLen = getLength (aCS); if (nLen == 0) return false; if (aFilter == null) return true; for (int i = 0; i < nLen; ++i) if (!aFilter.test (aCS.charAt (i))) return false; return true; }
java
public ItemCollectionMetrics withSizeEstimateRangeGB(Double... sizeEstimateRangeGB) { if (this.sizeEstimateRangeGB == null) { setSizeEstimateRangeGB(new java.util.ArrayList<Double>(sizeEstimateRangeGB.length)); } for (Double ele : sizeEstimateRangeGB) { this.sizeEstimateRangeGB.add(ele); } return this; }
java
@Override public void init(final FilterConfig filterConfig) throws ServletException { // verify there are no init parameters configured that are not recognized // since an unrecognized init param might be the adopter trying to configure this filter in // an important way // and accidentally ignoring that intent might have security implications. final Enumeration initParamNames = filterConfig.getInitParameterNames(); throwIfUnrecognizedParamName(initParamNames); final String initParamAllowMultiValuedParameters = filterConfig.getInitParameter(ALLOW_MULTI_VALUED_PARAMETERS); final String initParamParametersToCheck = filterConfig.getInitParameter(PARAMETERS_TO_CHECK); final String initParamCharactersToForbid = filterConfig.getInitParameter(CHARACTERS_TO_FORBID); try { this.allowMultiValueParameters = parseStringToBooleanDefaultingToFalse(initParamAllowMultiValuedParameters); } catch (final Exception e) { throw new ServletException( "Error parsing request parameter [" + ALLOW_MULTI_VALUED_PARAMETERS + "] with value [" + initParamAllowMultiValuedParameters + "]", e); } try { this.parametersToCheck = parseParametersToCheck(initParamParametersToCheck); } catch (final Exception e) { throw new ServletException( "Error parsing request parameter " + PARAMETERS_TO_CHECK + " with value [" + initParamParametersToCheck + "]", e); } try { this.charactersToForbid = parseCharactersToForbid(initParamCharactersToForbid); } catch (final Exception e) { throw new ServletException( "Error parsing request parameter " + CHARACTERS_TO_FORBID + " with value [" + initParamCharactersToForbid + "]", e); } if (this.allowMultiValueParameters && this.charactersToForbid.isEmpty()) { throw new ServletException( "Configuration to allow multi-value parameters and forbid no characters makes " + getClass().getSimpleName() + " a no-op, which is probably not what you want, " + "so failing Filter init."); } }
java
@BetaApi public final License getLicense(String license) { GetLicenseHttpRequest request = GetLicenseHttpRequest.newBuilder().setLicense(license).build(); return getLicense(request); }
java
@Restricted(NoExternalUse.class) public static String normalize(@Nonnull String path) { StringBuilder buf = new StringBuilder(); // Check for prefix designating absolute path Matcher m = ABSOLUTE_PREFIX_PATTERN.matcher(path); if (m.find()) { buf.append(m.group(1)); path = path.substring(m.end()); } boolean isAbsolute = buf.length() > 0; // Split remaining path into tokens, trimming any duplicate or trailing separators List<String> tokens = new ArrayList<>(); int s = 0, end = path.length(); for (int i = 0; i < end; i++) { char c = path.charAt(i); if (c == '/' || c == '\\') { tokens.add(path.substring(s, i)); s = i; // Skip any extra separator chars while (++i < end && ((c = path.charAt(i)) == '/' || c == '\\')) { } // Add token for separator unless we reached the end if (i < end) tokens.add(path.substring(s, s+1)); s = i; } } if (s < end) tokens.add(path.substring(s)); // Look through tokens for "." or ".." for (int i = 0; i < tokens.size();) { String token = tokens.get(i); if (token.equals(".")) { tokens.remove(i); if (tokens.size() > 0) tokens.remove(i > 0 ? i - 1 : i); } else if (token.equals("..")) { if (i == 0) { // If absolute path, just remove: /../something // If relative path, not collapsible so leave as-is tokens.remove(0); if (tokens.size() > 0) token += tokens.remove(0); if (!isAbsolute) buf.append(token); } else { // Normalize: remove something/.. plus separator before/after i -= 2; for (int j = 0; j < 3; j++) tokens.remove(i); if (i > 0) tokens.remove(i-1); else if (tokens.size() > 0) tokens.remove(0); } } else i += 2; } // Recombine tokens for (String token : tokens) buf.append(token); if (buf.length() == 0) buf.append('.'); return buf.toString(); }
python
def get_version(): """Obtain the version number""" import imp import os mod = imp.load_source( 'version', os.path.join('skdata', '__init__.py') ) return mod.__version__
java
private static void parseErrorPages(final ErrorPageType errorPageType, final WebApp webApp) { final WebAppErrorPage errorPage = new WebAppErrorPage(); if (errorPageType.getErrorCode() != null) { errorPage.setErrorCode(errorPageType.getErrorCode().getValue().toString()); } if (errorPageType.getExceptionType() != null) { errorPage.setExceptionType(errorPageType.getExceptionType().getValue()); } if (errorPageType.getLocation() != null) { errorPage.setLocation(errorPageType.getLocation().getValue()); } if (errorPage.getErrorCode() == null && errorPage.getExceptionType() == null) { errorPage.setExceptionType(ErrorPageModel.ERROR_PAGE); } webApp.addErrorPage(errorPage); }
python
def get_find_executions_string(desc, has_children, single_result=False, show_outputs=True, is_cached_result=False): ''' :param desc: hash of execution's describe output :param has_children: whether the execution has children to be printed :param single_result: whether the execution is displayed as a single result or as part of an execution tree :param is_cached_result: whether the execution should be formatted as a cached result ''' is_not_subjob = desc['parentJob'] is None or desc['class'] == 'analysis' or single_result result = ("* " if is_not_subjob and get_delimiter() is None else "") canonical_execution_name = desc['executableName'] if desc['class'] == 'job': canonical_execution_name += ":" + desc['function'] execution_name = desc.get('name', '<no name>') # Format the name of the execution if is_cached_result: result += BOLD() + "[" + ENDC() result += BOLD() + BLUE() if desc['class'] == 'analysis': result += UNDERLINE() result += execution_name + ENDC() if execution_name != canonical_execution_name and execution_name+":main" != canonical_execution_name: result += ' (' + canonical_execution_name + ')' if is_cached_result: result += BOLD() + "]" + ENDC() # Format state result += DELIMITER(' (') + JOB_STATES(desc['state']) + DELIMITER(') ') + desc['id'] # Add unicode pipe to child if necessary result += DELIMITER('\n' + (u'│ ' if is_not_subjob and has_children else (" " if is_not_subjob else ""))) result += desc['launchedBy'][5:] + DELIMITER(' ') result += render_short_timestamp(desc['created']) cached_and_runtime_strs = [] if is_cached_result: cached_and_runtime_strs.append(YELLOW() + "cached" + ENDC()) if desc['class'] == 'job': # Only print runtime if it ever started running if desc.get('startedRunning'): if desc['state'] in ['done', 'failed', 'terminated', 'waiting_on_output']: runtime = datetime.timedelta(seconds=int(desc['stoppedRunning']-desc['startedRunning'])//1000) cached_and_runtime_strs.append("runtime " + str(runtime)) elif desc['state'] == 'running': seconds_running = max(int(time.time()-desc['startedRunning']//1000), 0) msg = "running for {rt}".format(rt=datetime.timedelta(seconds=seconds_running)) cached_and_runtime_strs.append(msg) if cached_and_runtime_strs: result += " (" + ", ".join(cached_and_runtime_strs) + ")" if show_outputs: prefix = DELIMITER('\n' + (u'│ ' if is_not_subjob and has_children else (" " if is_not_subjob else ""))) if desc.get("output") != None: result += job_output_to_str(desc['output'], prefix=prefix) elif desc['state'] == 'failed' and 'failureReason' in desc: result += prefix + BOLD() + desc['failureReason'] + ENDC() + ": " + fill(desc.get('failureMessage', ''), subsequent_indent=prefix.lstrip('\n')) return result
python
def scale_atoms(fac): '''Scale the currently selected atoms atoms by a certain factor *fac*. Use the value *fac=1.0* to reset the scale. ''' rep = current_representation() atms = selected_atoms() rep.scale_factors[atms] = fac rep.update_scale_factors(rep.scale_factors) viewer.update()
java
@Nullable public static final <T extends AbstractSingleton> T getSingletonIfInstantiated (@Nullable final IScope aScope, @Nonnull final Class <T> aClass) { ValueEnforcer.notNull (aClass, "Class"); if (aScope != null) { final String sSingletonScopeKey = getSingletonScopeKey (aClass); final Object aObject = s_aRWLock.readLocked ( () -> aScope.attrs ().get (sSingletonScopeKey)); if (aObject != null) { // Object is in the scope final T aCastedObject = aClass.cast (aObject); if (aCastedObject.isUsableObject ()) { // Object has finished initialization return aCastedObject; } } } return null; }
java
public static void decodeTo(byte[] data, int offset, int length, MultiMap map, String charset) { if (data == null || length == 0) return; if (charset==null) charset=StringUtil.__ISO_8859_1; synchronized(map) { try { int ix = offset; int end = offset+length; int ox = offset; String key = null; String value = null; while (ix < end) { byte c = data[ix++]; switch ((char) c) { case '&': value = new String(data, offset, ox, charset); if (key != null) { map.add(key,value); key = null; } ox = offset; break; case '=': if (key!=null) break; key = new String(data, offset, ox, charset); ox = offset; break; case '+': data[ox++] = (byte)' '; break; case '%': int i0 = (14<<4)+1; byte b0 = (byte)i0; data[ox++] = (byte) ((TypeUtil.convertHexDigit(data[ix++]) << 4)+ TypeUtil.convertHexDigit(data[ix++])); break; default: data[ox++] = c; } } if (key != null) { value = new String(data, offset, ox, charset); map.add(key,value); } } catch(UnsupportedEncodingException e) { log.warn(LogSupport.EXCEPTION,e); } } }
python
def snapshot(self, wiki=False, streamed=False, action=None, chunk_size=1024, **kwargs): """Return a snapshot of the repository. Args: wiki (bool): If True return the wiki repository streamed (bool): If True the data will be processed by chunks of `chunk_size` and each chunk is passed to `action` for treatment. action (callable): Callable responsible of dealing with chunk of data chunk_size (int): Size of each chunk **kwargs: Extra options to send to the server (e.g. sudo) Raises: GitlabAuthenticationError: If authentication is not correct GitlabGetError: If the content could not be retrieved Returns: str: The uncompressed tar archive of the repository """ path = '/projects/%s/snapshot' % self.get_id() result = self.manager.gitlab.http_get(path, streamed=streamed, raw=True, **kwargs) return utils.response_content(result, streamed, action, chunk_size)
python
def transformer_nat_small(): """Set of hyperparameters.""" hparams = transformer.transformer_small() hparams.batch_size = 2048 hparams.learning_rate = 0.2 hparams.learning_rate_warmup_steps = 4000 hparams.num_hidden_layers = 3 hparams.hidden_size = 384 hparams.filter_size = 2048 hparams.label_smoothing = 0.0 hparams.force_full_predict = True hparams.optimizer = "adam" hparams.optimizer_adam_epsilon = 1e-9 hparams.optimizer_adam_beta1 = 0.9 hparams.optimizer_adam_beta2 = 0.997 hparams.add_hparam("bottleneck_kind", "vq") hparams.add_hparam("bottleneck_bits", 12) hparams.add_hparam("num_compress_steps", 3) hparams.add_hparam("beta", 0.25) hparams.add_hparam("epsilon", 1e-5) hparams.add_hparam("decay", 0.999) hparams.add_hparam("num_samples", 10) hparams.add_hparam("mask_startup_steps", 50000) return hparams
python
def ffPDC(self): """Full frequency partial directed coherence. .. math:: \mathrm{ffPDC}_{ij}(f) = \\frac{A_{ij}(f)}{\sqrt{\sum_f A_{:j}'(f) A_{:j}(f)}} """ A = self.A() return np.abs(A * self.nfft / np.sqrt(np.sum(A.conj() * A, axis=(0, 2), keepdims=True)))
java
static private void doLookup(SoftTFIDFDictionary dict,double d,String s,boolean compare,double[] stats) { System.out.println("lookup: "+s); long start1 = System.currentTimeMillis(); int n1 = dict.lookup(d,s); double elapsedSec1 = (System.currentTimeMillis()-start1) / 1000.0; dict.showLookup( n1 ); List saved = new ArrayList(dict.result); if (compare) { long start2 = System.currentTimeMillis(); int n2 = dict.slowLookup(d,s); double elapsedSec2 = (System.currentTimeMillis()-start2) / 1000.0; collectStats(elapsedSec1,elapsedSec2,saved,dict.result,stats); boolean differentFromBaseline = false; if (n1!=n2) { differentFromBaseline = true; } else { for (int j=0; j<n1; j++) { LookupResult savedj = (LookupResult)saved.get(j); if (!dict.getResult(j).equals(savedj.found) || dict.getScore(j)!=savedj.score) { differentFromBaseline = true; } } } if (differentFromBaseline) { System.out.println("baseline:"); dict.showLookup(n2); } } }
java
public static void printRawLines(PrintWriter writer, String msg) { int nl; while ((nl = msg.indexOf('\n')) != -1) { writer.println(msg.substring(0, nl)); msg = msg.substring(nl+1); } if (msg.length() != 0) writer.println(msg); }
python
def read(self): ''' Read in the specified map and return the map structure ''' map_ = None if self.opts.get('map', None) is None: if self.opts.get('map_data', None) is None: if self.opts.get('map_pillar', None) is None: pass elif self.opts.get('map_pillar') not in self.opts.get('maps'): log.error( 'The specified map not found in pillar at ' '\'cloud:maps:%s\'', self.opts['map_pillar'] ) raise SaltCloudNotFound() else: # 'map_pillar' is provided, try to use it map_ = self.opts['maps'][self.opts.get('map_pillar')] else: # 'map_data' is provided, try to use it map_ = self.opts['map_data'] else: # 'map' is provided, try to use it local_minion_opts = copy.deepcopy(self.opts) local_minion_opts['file_client'] = 'local' self.minion = salt.minion.MasterMinion(local_minion_opts) if not os.path.isfile(self.opts['map']): if not (self.opts['map']).startswith('salt://'): log.error( 'The specified map file does not exist: \'%s\'', self.opts['map'] ) raise SaltCloudNotFound() if (self.opts['map']).startswith('salt://'): cached_map = self.minion.functions['cp.cache_file'](self.opts['map']) else: cached_map = self.opts['map'] try: renderer = self.opts.get('renderer', 'jinja|yaml') rend = salt.loader.render(self.opts, {}) blacklist = self.opts.get('renderer_blacklist') whitelist = self.opts.get('renderer_whitelist') map_ = compile_template( cached_map, rend, renderer, blacklist, whitelist ) except Exception as exc: log.error( 'Rendering map %s failed, render error:\n%s', self.opts['map'], exc, exc_info_on_loglevel=logging.DEBUG ) return {} if 'include' in map_: map_ = salt.config.include_config( map_, self.opts['map'], verbose=False ) if not map_: return {} # Create expected data format if needed for profile, mapped in six.iteritems(map_.copy()): if isinstance(mapped, (list, tuple)): entries = {} for mapping in mapped: if isinstance(mapping, six.string_types): # Foo: # - bar1 # - bar2 mapping = {mapping: None} for name, overrides in six.iteritems(mapping): if overrides is None or isinstance(overrides, bool): # Foo: # - bar1: # - bar2: overrides = {} try: overrides.setdefault('name', name) except AttributeError: log.error( 'Cannot use \'name\' as a minion id in a cloud map as it ' 'is a reserved word. Please change \'name\' to a different ' 'minion id reference.' ) return {} entries[name] = overrides map_[profile] = entries continue if isinstance(mapped, dict): # Convert the dictionary mapping to a list of dictionaries # Foo: # bar1: # grains: # foo: bar # bar2: # grains: # foo: bar entries = {} for name, overrides in six.iteritems(mapped): overrides.setdefault('name', name) entries[name] = overrides map_[profile] = entries continue if isinstance(mapped, six.string_types): # If it's a single string entry, let's make iterable because of # the next step mapped = [mapped] map_[profile] = {} for name in mapped: map_[profile][name] = {'name': name} return map_
python
def get_subassistants(self): """Return list of instantiated subassistants. Usually, this needs not be overriden in subclasses, you should just override get_subassistant_classes Returns: list of instantiated subassistants """ if not hasattr(self, '_subassistants'): self._subassistants = [] # we want to know, if type(self) defines 'get_subassistant_classes', # we don't want to inherit it from superclass (would cause recursion) if 'get_subassistant_classes' in vars(type(self)): for a in self.get_subassistant_classes(): self._subassistants.append(a()) return self._subassistants
java
public double[] getDts(int from, int to) { double[] ret = new double[to-from]; for (int i = from; i < to; i++) { ret[i-from] = this.dts[i]; } return ret; }
java
public void write(JsonGenerator jsonGenerator) throws IOException { jsonGenerator.writeStartObject(); // clusterNodeInfo begins jsonGenerator.writeFieldName("clusterNodeInfo"); jsonGenerator.writeStartObject(); jsonGenerator.writeStringField("name", clusterNodeInfo.name); jsonGenerator.writeObjectField("address", clusterNodeInfo.address); jsonGenerator.writeObjectField("total", clusterNodeInfo.total); jsonGenerator.writeObjectField("free", clusterNodeInfo.free); jsonGenerator.writeObjectField("resourceInfos", clusterNodeInfo.resourceInfos); jsonGenerator.writeEndObject(); // clusterNodeInfo ends // grants begins jsonGenerator.writeFieldName("grants"); jsonGenerator.writeStartObject(); for (Map.Entry<GrantId, ResourceRequestInfo> entry : grants.entrySet()) { jsonGenerator.writeFieldName(entry.getKey().unique); jsonGenerator.writeStartObject(); jsonGenerator.writeFieldName("grantId"); entry.getKey().write(jsonGenerator); jsonGenerator.writeFieldName("grant"); entry.getValue().write(jsonGenerator); jsonGenerator.writeEndObject(); } jsonGenerator.writeEndObject(); // grants ends jsonGenerator.writeEndObject(); // We skip the hostNode and lastHeartbeatTime as they need not be persisted. // resourceTypeToMaxCpu and resourceTypeToStatsMap can be rebuilt using the // conf and the grants respectively. }
java
public Map<URI, URI> getConflicTable() { for (final Map.Entry<URI, URI> e : conflictTable.entrySet()) { assert e.getKey().isAbsolute(); assert e.getValue().isAbsolute(); } return conflictTable; }
java
boolean shouldDrainBuffers(boolean delayable) { if (executor.isShutdown()) { DrainStatus status = drainStatus.get(); return (status != PROCESSING) && (!delayable || (status == REQUIRED)); } return false; }
java
@Override public AssociateDeviceWithPlacementResult associateDeviceWithPlacement(AssociateDeviceWithPlacementRequest request) { request = beforeClientExecution(request); return executeAssociateDeviceWithPlacement(request); }
java
public ServiceFuture<DatabaseInner> updateAsync(String resourceGroupName, String serverName, String databaseName, DatabaseUpdate parameters, final ServiceCallback<DatabaseInner> serviceCallback) { return ServiceFuture.fromResponse(updateWithServiceResponseAsync(resourceGroupName, serverName, databaseName, parameters), serviceCallback); }
java
private void resize(final int ns) { final int size = BDDPrime.primeGTE(ns); this.table = new BDDCacheEntry[size]; for (int n = 0; n < size; n++) this.table[n] = new BDDCacheEntry(); }
python
def _elem_set_attrs(obj, parent, to_str): """ :param obj: Container instance gives attributes of XML Element :param parent: XML ElementTree parent node object :param to_str: Callable to convert value to string or None :param options: Keyword options, see :func:`container_to_etree` :return: None but parent will be modified """ for attr, val in anyconfig.compat.iteritems(obj): parent.set(attr, to_str(val))
java
public static int readAll(InputStream stream, byte[] target, int startOffset, int maxLength) throws IOException { Preconditions.checkNotNull(stream, "stream"); Preconditions.checkNotNull(stream, "target"); Preconditions.checkElementIndex(startOffset, target.length, "startOffset"); Exceptions.checkArgument(maxLength >= 0, "maxLength", "maxLength must be a non-negative number."); int totalBytesRead = 0; while (totalBytesRead < maxLength) { int bytesRead = stream.read(target, startOffset + totalBytesRead, maxLength - totalBytesRead); if (bytesRead < 0) { // End of stream/ break; } totalBytesRead += bytesRead; } return totalBytesRead; }
java
private static MemoryPoolMXBean findTenuredGenPool() { // I don't know whether this approach is better, or whether // we should rather check for the pool name "Tenured Gen"? for (final MemoryPoolMXBean pool : ManagementFactory.getMemoryPoolMXBeans()) if (pool.getType() == MemoryType.HEAP && pool.isUsageThresholdSupported()) { return pool; } throw new AssertionError("Could not find tenured space"); }
java
boolean storeMessage( MessageItem msg, TransactionCommon transaction, InputHandlerStore inputHandlerStore, boolean storedByIH) throws SIResourceException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry( tc, "storeMessage", new Object[] { msg, transaction, inputHandlerStore, Boolean.valueOf(storedByIH) }); //Store the msg to the item stream //When pubsub and we are attached to the output side of the destination (feature 176658.3.7) //we have to have stored this message under the InputHandler //item stream first, then reference it here if (_isPubSub) { try { // If this subscription is not an internal subscription then // create references on the relevant durable streams if (dispatcherState.getTargetDestination() == null) { //If the message has not yet been stored in the IH if (!storedByIH) { //store it in the IH inputHandlerStore.storeMessage(msg, transaction); storedByIH = true; } boolean downgradePersistence = false; // If this is a non-durable subscription there is no need to persist a // reference to a persistent message, it can be downgraded to a volatile // reference. if (dispatcherState.isDurable()) { msg.addPersistentRef(); } else { msg.addNonPersistentRef(); downgradePersistence = true; } //store a reference in the reference stream final MessageItemReference msgRef = new MessageItemReference(msg, downgradePersistence); subscriptionItemStream.getSubscriptionLockManager().lock(); try { if (!subscriptionItemStream.isToBeDeleted()) { registerForEvents(msgRef); Transaction msTran = _messageProcessor.resolveAndEnlistMsgStoreTransaction(transaction); subscriptionItemStream.add(msgRef, msTran); msTran.registerCallback(msgRef); //PM38052 registering this callback allows afterCompletion // to be called on the MessageItemReference } } finally { subscriptionItemStream.getSubscriptionLockManager().unlock(); } } else { // SM0010.mp.1 This is an internal subscription. We do not // want to create references. Instead we want to bypass // the reference stream and forward on to the target dest // Lookup the target destination DestinationHandler target; try { target = _baseDestHandler. getDestinationManager(). getDestination(dispatcherState.getTargetDestination(), false); if (target == null) { SIResourceException e = new SIResourceException( nls.getFormattedMessage( "INTERNAL_SUBSCRIPTION_TARGET_NOT_FOUND_CWSIP0115", new Object[] { dispatcherState.getSubscriberID(), dispatcherState.getDurableHome(), dispatcherState.getTargetDestination() }, null)); // Target destination for internal subscription does not exist FFDCFilter.processException( e, "com.ibm.ws.sib.processor.impl.ConsumerDispatcher.storeMessage", "1:583:1.280.5.25", this); SibTr.exception(tc, e); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "storeMessage", e); throw e; } // Take a copy of the message JsMessage msgCopy = null; try { msgCopy = msg.getMessage().getReceived(); } catch (MessageCopyFailedException e) { // FFDC FFDCFilter.processException( e, "com.ibm.ws.sib.processor.impl.ConsumerDispatcher.storeMessage", "1:606:1.280.5.25", this); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "storeMessage", "MessageCopyFailedException"); throw new SIResourceException(e); } // Get the inputhandler associated with it InputHandler inputHandler = target. getInputHandler(target.isPubSub() ? ProtocolType.PUBSUBINPUT : ProtocolType.UNICASTINPUT, _messageProcessor.getMessagingEngineUuid(), null); inputHandler.handleMessage(new MessageItem(msgCopy), transaction, _messageProcessor.getMessagingEngineUuid()); } catch (SITemporaryDestinationNotFoundException e) { // No FFDC code needed if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "storeMessage", "SITemporaryDestinationNotFoundException"); throw new SIResourceException(e); } catch (SINotPossibleInCurrentConfigurationException e) { // No FFDC code needed if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "storeMessage", "SINotPossibleInCurrentConfigurationException"); throw new SIResourceException(e); } catch (SIIncorrectCallException e) { // No FFDC code needed if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "storeMessage", "SIIncorrectCallException"); throw new SIResourceException(e); } } } catch (RollbackException e) { // No FFDC code needed // We catch the RollbackException explicitly because we do not want to generate // and FFDC and error in the log. Rollbacks are not internal errors. // Any real errors will be caught by the MessageStoreException catch block below. SibTr.exception(tc, e); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "storeMessage", e); throw new SIResourceException(e); } catch (InvalidAddOperation e) { // No FFDC code needed if (state != SIMPState.DELETED) { // MessageStoreException shouldn't occur so FFDC. FFDCFilter.processException( e, "com.ibm.ws.sib.processor.impl.ConsumerDispatcher.storeMessage", "1:668:1.280.5.25", this); SibTr.exception(tc, e); SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.impl.ConsumerDispatcher", "1:675:1.280.5.25", e }); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "storeMessage", e); throw new SIResourceException( nls.getFormattedMessage( "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.impl.ConsumerDispatcher", "1:686:1.280.5.25", e }, null), e); } if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "ConsumerDeispatcher deleted " + this); } catch (MessageStoreException e) { // MessageStoreException shouldn't occur so FFDC. FFDCFilter.processException( e, "com.ibm.ws.sib.processor.impl.ConsumerDispatcher.storeMessage", "1:702:1.280.5.25", this); SibTr.exception(tc, e); SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.impl.ConsumerDispatcher", "1:709:1.280.5.25", e }); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "storeMessage", e); throw new SIResourceException( nls.getFormattedMessage( "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.impl.ConsumerDispatcher", "1:720:1.280.5.25", e }, null), e); } } else //pt-pt { // Add the item to the itemstream. 183715.1 // The itemstream should never be null. If it is a null pointer exception // will be thrown. // The transaction should also never be null. An auto-commit transaction // should be passed in if the message is not transacted. try { // Register for the event on the listener. _baseDestHandler.registerForEvents(msg); // 516307: Register for events before we add the message. If we're using an auto commit tran // then there's a possibility that a consumer can come in on another thread and see this message // even before the addItem method returns. This means they can remove the message from the // ItemStream and no events will be driven, so no post processing occurs, including PMI statistics // counting and re-driving the message if the consumer chooses to unlock or roll back the consumption // of the message. // (this isn't a problem for pubsub as we never use an auto commit tran) registerForEvents(msg); Transaction msTran = _messageProcessor.resolveAndEnlistMsgStoreTransaction(transaction); itemStream.addItem(msg, msTran); } catch (OutOfCacheSpace e) { // No FFDC code needed SibTr.exception(tc, e); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "storeMessage", e); throw new SIResourceException(e); } catch (MessageStoreException e) { // MessageStoreException shouldn't occur so FFDC. FFDCFilter.processException( e, "com.ibm.ws.sib.processor.impl.ConsumerDispatcher.storeMessage", "1:769:1.280.5.25", this); SibTr.exception(tc, e); SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.impl.ConsumerDispatcher", "1:776:1.280.5.25", e }); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "storeMessage", e); throw new SIResourceException( nls.getFormattedMessage( "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.impl.ConsumerDispatcher", "1:787:1.280.5.25", e }, null), e); } storedByIH = false; } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "storeMessage", Boolean.valueOf(storedByIH)); return storedByIH; }
java
@Deprecated public static int matchAfter(CharSequence a, CharSequence b, int aIndex, int bIndex) { int i = aIndex, j = bIndex; int alen = a.length(); int blen = b.length(); for (; i < alen && j < blen; ++i, ++j) { char ca = a.charAt(i); char cb = b.charAt(j); if (ca != cb) { break; } } // if we failed a match make sure that we didn't match half a character int result = i - aIndex; if (result != 0 && !onCharacterBoundary(a, i) && !onCharacterBoundary(b, j)) { --result; // backup } return result; }
python
def clean(ctx, dry_run=False): """Cleanup generated document artifacts.""" basedir = ctx.sphinx.destdir or "build/docs" cleanup_dirs([basedir], dry_run=dry_run)
java
public static String[] split(String str, String separator, boolean trim) { if (str == null) { return null; } char sep = separator.charAt(0); ArrayList<String> strList = new ArrayList<String>(); StringBuilder split = new StringBuilder(); int index = 0; while ((index = StringUtils.findNext(str, sep, StringUtils.ESCAPE_CHAR, index, split)) >= 0) { ++index; // move over the separator for next search strList.add(split.toString()); split.setLength(0); // reset the buffer } strList.add(split.toString()); // remove trailing empty split(s) if (trim) { int last = strList.size(); // last split while (--last>=0 && "".equals(strList.get(last))) { strList.remove(last); } } return strList.toArray(new String[strList.size()]); }
java
public void setAcList(StringArray v) { if (DBInfo_Type.featOkTst && ((DBInfo_Type)jcasType).casFeat_acList == null) jcasType.jcas.throwFeatMissing("acList", "de.julielab.jules.types.DBInfo"); jcasType.ll_cas.ll_setRefValue(addr, ((DBInfo_Type)jcasType).casFeatCode_acList, jcasType.ll_cas.ll_getFSRef(v));}
python
def dumps(data, **kwargs): """Create a string CCSDS representation of the object Same arguments and behaviour as :py:func:`dump` """ if isinstance(data, Ephem) or (isinstance(data, Iterable) and all(isinstance(x, Ephem) for x in data)): content = _dump_oem(data, **kwargs) elif isinstance(data, Orbit): content = _dump_opm(data, **kwargs) else: raise TypeError("Unknown object type") return content
java
protected StringConcatenationClient generateStandardCommentFunctions(boolean forInterface, boolean forAppender, String elementAccessor) { return new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { it.append(generateCommentFunction(forInterface, forAppender, elementAccessor, "setDocumentation", //$NON-NLS-1$ "The documentation will be displayed just before the element.", //$NON-NLS-1$ getPreDocumentationAdapter())); } }; }
java
public void close() { logger.info("Closing all sync producers"); if (sync) { for (SyncProducer p : syncProducers.values()) { p.close(); } } else { for (AsyncProducer<V> p : asyncProducers.values()) { p.close(); } } }
java
private void checkObjectFactoryAttributes(ResourceInjectionBinding resourceBinding, ObjectFactoryInfo extensionFactory) // d675976 throws InjectionConfigurationException { Resource resourceAnnotation = resourceBinding.getAnnotation(); if (!extensionFactory.isAttributeAllowed("authenticationType")) { checkObjectFactoryAttribute(resourceBinding, "authenticationType", resourceAnnotation.authenticationType(), AuthenticationType.CONTAINER); } if (!extensionFactory.isAttributeAllowed("shareable")) { checkObjectFactoryAttribute(resourceBinding, "shareable", resourceAnnotation.shareable(), true); } }
python
def tohexstring(self): """ Returns a hexadecimal string """ val = self.tostring() st = "{0:0x}".format(int(val, 2)) return st.zfill(len(self.bitmap)*2)
python
def smooth(self, smoothing_factor): """ return a new time series which is a exponential smoothed version of the original data series. soomth forward once, backward once, and then take the average. :param float smoothing_factor: smoothing factor :return: :class:`TimeSeries` object. """ forward_smooth = {} backward_smooth = {} output = {} if self: pre = self.values[0] next = self.values[-1] for key, value in self.items(): forward_smooth[key] = smoothing_factor * pre + (1 - smoothing_factor) * value pre = forward_smooth[key] for key, value in reversed(self.items()): backward_smooth[key] = smoothing_factor * next + (1 - smoothing_factor) * value next = backward_smooth[key] for key in forward_smooth.keys(): output[key] = (forward_smooth[key] + backward_smooth[key]) / 2 return TimeSeries(output)
python
def _init_entry_points(self, entry_points): """ Default initialization loop. """ logger.debug( "registering %d entry points for registry '%s'", len(entry_points), self.registry_name, ) for entry_point in entry_points: try: logger.debug( "registering entry point '%s' from '%s'", entry_point, entry_point.dist, ) self._init_entry_point(entry_point) except ImportError: logger.warning( 'ImportError: %s not found; skipping registration', entry_point.module_name) except Exception: logger.exception( "registration of entry point '%s' from '%s' to registry " "'%s' failed with the following exception", entry_point, entry_point.dist, self.registry_name, )
java
private static void parseMessageTextNode(Builder builder, Node node) throws MalformedException { String value = extractStringFromStringExprNode(node); while (true) { int phBegin = value.indexOf(PH_JS_PREFIX); if (phBegin < 0) { // Just a string literal builder.appendStringPart(value); return; } else { if (phBegin > 0) { // A string literal followed by a placeholder builder.appendStringPart(value.substring(0, phBegin)); } // A placeholder. Find where it ends int phEnd = value.indexOf(PH_JS_SUFFIX, phBegin); if (phEnd < 0) { throw new MalformedException( "Placeholder incorrectly formatted in: " + builder.getKey(), node); } String phName = value.substring(phBegin + PH_JS_PREFIX.length(), phEnd); builder.appendPlaceholderReference(phName); int nextPos = phEnd + PH_JS_SUFFIX.length(); if (nextPos < value.length()) { // Iterate on the rest of the message value value = value.substring(nextPos); } else { // The message is parsed return; } } } }
python
def __parse_email_to_employer_stream(self, stream): """Parse email to employer stream. The stream contains a list of email addresses and their employers. Each line has an email address and a organization name separated by tabs. Optionally, the date when the identity withdrew from the organization can be included followed by a '<' character. Comment lines start with the hash character (#). Example: # List of enrollments [email protected] Example Company # John Smith [email protected] Example Company # John Doe [email protected] Bitergia < 2015-01-01 # John Smith - Bitergia """ if not stream: return f = self.__parse_email_to_employer_line for rol in self.__parse_stream(stream, f): email = rol[0] org = rol[1] rol_date = rol[2] if org not in self.__raw_orgs: self.__raw_orgs[org] = [] if email not in self.__raw_identities: self.__raw_identities[email] = [(org, rol_date)] else: self.__raw_identities[email].append((org, rol_date))
java
public static ProfilingTimer createLoggingSubtasks(final Log log, final String processName, final Object... args) { return create(log, false, null, processName, args); }
python
def logspace_bins(self,bins=None,units=None,conversion_function=convert_time,resolution=None): """Generates bin edges for a logspace tiling: there is one edge more than bins and each bin is between two edges""" bins = self.logspace(bins=bins,units=units,conversion_function=conversion_function,resolution=resolution,end_at_end=False) resolution = np.mean((bins[:-1]) / (bins[1:])) bins = np.concatenate([bins*np.sqrt(resolution),bins[-1:]/np.sqrt(resolution)]) return bins
java
@Override public void cacheResult(CommerceSubscriptionEntry commerceSubscriptionEntry) { entityCache.putResult(CommerceSubscriptionEntryModelImpl.ENTITY_CACHE_ENABLED, CommerceSubscriptionEntryImpl.class, commerceSubscriptionEntry.getPrimaryKey(), commerceSubscriptionEntry); finderCache.putResult(FINDER_PATH_FETCH_BY_UUID_G, new Object[] { commerceSubscriptionEntry.getUuid(), commerceSubscriptionEntry.getGroupId() }, commerceSubscriptionEntry); finderCache.putResult(FINDER_PATH_FETCH_BY_C_C_C, new Object[] { commerceSubscriptionEntry.getCPInstanceUuid(), commerceSubscriptionEntry.getCProductId(), commerceSubscriptionEntry.getCommerceOrderItemId() }, commerceSubscriptionEntry); commerceSubscriptionEntry.resetOriginalValues(); }
java
public void setNodes(final @Nonnull Collection<? extends Node> nodes) throws IOException { Queue.withLock(new Runnable() { @Override public void run() { Set<String> toRemove = new HashSet<>(Nodes.this.nodes.keySet()); for (Node n : nodes) { final String name = n.getNodeName(); toRemove.remove(name); Nodes.this.nodes.put(name, n); } Nodes.this.nodes.keySet().removeAll(toRemove); // directory clean up will be handled by save jenkins.updateComputerList(); jenkins.trimLabels(); } }); save(); }
java
public static DynamicMessage parseFrom(final Descriptor type, final ByteString data, final ExtensionRegistry extensionRegistry) throws InvalidProtocolBufferException { return wrap(com.google.protobuf.DynamicMessage.parseFrom(type, data, extensionRegistry)); }
python
def get_user_groups(self, user): """ Get user's group memberships. Args: user (string): User name. Returns: (list): User's groups. Raises: requests.HTTPError on failure. """ self.project_service.set_auth(self._token_project) return self.project_service.get_user_groups(user)
java
@GET public Response getInstanceInfo() { InstanceInfo appInfo = registry .getInstanceByAppAndId(app.getName(), id); if (appInfo != null) { logger.debug("Found: {} - {}", app.getName(), id); return Response.ok(appInfo).build(); } else { logger.debug("Not Found: {} - {}", app.getName(), id); return Response.status(Status.NOT_FOUND).build(); } }