language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def remove_search_paths(self, path_type, paths, target_name=None, configuration_name=None): """ Removes the given search paths from the path_type section of the target on the configurations :param path_type: name of the path type to be removed the values from :param paths: A string or array of strings :param target_name: Target name or list of target names to remove the flag from or None for every target :param configuration_name: Configuration name to add the flag to or None for every configuration :return: void """ for configuration in self.objects.get_configurations_on_targets(target_name, configuration_name): configuration.remove_search_paths(path_type, paths)
java
public void setWindowMillis(long windowMillis) { FailureInterpreter fi = getFailureInterpreter(); if (!(fi instanceof DefaultFailureInterpreter)) { throw new IllegalStateException("setWindowMillis() not supported: this CircuitBreaker's FailureInterpreter isn't a DefaultFailureInterpreter."); } ((DefaultFailureInterpreter)fi).setWindowMillis(windowMillis); }
java
protected boolean fail(List<ValidationMessage> errors, IssueType type, int line, int col, String path, boolean thePass, String msg) { if (!thePass) { addValidationMessage(errors, type, line, col, path, msg, IssueSeverity.FATAL); } return thePass; }
java
public static JsonWebKey fromRSA(KeyPair keyPair) { RSAPrivateCrtKey privateKey = (RSAPrivateCrtKey) keyPair.getPrivate(); JsonWebKey key = null; if (privateKey != null) { key = new JsonWebKey().withKty(JsonWebKeyType.RSA).withN(toByteArray(privateKey.getModulus())) .withE(toByteArray(privateKey.getPublicExponent())) .withD(toByteArray(privateKey.getPrivateExponent())).withP(toByteArray(privateKey.getPrimeP())) .withQ(toByteArray(privateKey.getPrimeQ())).withDp(toByteArray(privateKey.getPrimeExponentP())) .withDq(toByteArray(privateKey.getPrimeExponentQ())) .withQi(toByteArray(privateKey.getCrtCoefficient())); } else { RSAPublicKey publicKey = (RSAPublicKey) keyPair.getPublic(); key = new JsonWebKey().withKty(JsonWebKeyType.RSA).withN(toByteArray(publicKey.getModulus())) .withE(toByteArray(publicKey.getPublicExponent())).withD(null).withP(null).withQ(null).withDp(null) .withDq(null).withQi(null); } return key; }
python
def forward(self, X): """Forward function. :param X: The input (batch) of the model contains word sequences for lstm and features. :type X: For word sequences: a list of torch.Tensor pair (word sequence and word mask) of shape (batch_size, sequence_length). For features: torch.Tensor of shape (batch_size, feature_size). :return: The output of LSTM layer. :rtype: torch.Tensor of shape (batch_size, num_classes) """ s = X[:-1] f = X[-1] batch_size = len(f) outputs = self._cuda(torch.Tensor([])) # Calculate textual features from LSTMs for i in range(len(s)): state_word = self.lstms[0].init_hidden(batch_size) output = self.lstms[0].forward(s[i][0], s[i][1], state_word) outputs = torch.cat((outputs, output), 1) # Concatenate textual features with multi-modal features outputs = torch.cat((outputs, f), 1) return self.linear(outputs)
python
def available_state_for_gene(self, gene: Gene, state: State) -> Tuple[State, ...]: """ Return the state reachable from a given state for a particular gene. """ result: List[State] = [] active_multiplex: Tuple[Multiplex] = gene.active_multiplex(state) transition: Transition = self.find_transition(gene, active_multiplex) current_state: int = state[gene] done = set() for target_state in transition.states: target_state: int = self._state_after_transition(current_state, target_state) if target_state not in done: done.add(target_state) new_state: State = state.copy() new_state[gene] = target_state result.append(new_state) return tuple(result)
python
def get_geoip_dat (): """Find a GeoIP database, preferring city over country lookup.""" datafiles = ("GeoIPCity.dat", "GeoIP.dat") if os.name == 'nt': paths = (sys.exec_prefix, r"c:\geoip") else: paths = ("/usr/local/share/GeoIP", "/usr/share/GeoIP") for path in paths: for datafile in datafiles: filename = os.path.join(path, datafile) if os.path.isfile(filename): return filename
java
public void setErrorMode(@NonNull ErrorMode errorMode) { this.errorMode = errorMode; errorTextView.setVisibility(errorMode == ErrorMode.WhenInvalid ? INVISIBLE : errorMode == ErrorMode.Always ? VISIBLE : GONE); }
java
public boolean hasChildNodes(int nodeHandle) { int identity = makeNodeIdentity(nodeHandle); int firstChild = _firstch(identity); return firstChild != DTM.NULL; }
python
def get_doctree(path, **kwargs): """ Obtain a Sphinx doctree from the RST file at ``path``. Performs no Releases-specific processing; this code would, ideally, be in Sphinx itself, but things there are pretty tightly coupled. So we wrote this. Any additional kwargs are passed unmodified into an internal `make_app` call. :param str path: A relative or absolute file path string. :returns: A two-tuple of the generated ``sphinx.application.Sphinx`` app and the doctree (a ``docutils.document`` object). .. versionchanged:: 1.6 Added support for passing kwargs to `make_app`. """ root, filename = os.path.split(path) docname, _ = os.path.splitext(filename) # TODO: this only works for top level changelog files (i.e. ones where # their dirname is the project/doc root) app = make_app(srcdir=root, **kwargs) # Create & init a BuildEnvironment. Mm, tasty side effects. app._init_env(freshenv=True) env = app.env # More arity/API changes: Sphinx 1.3/1.4-ish require one to pass in the app # obj in BuildEnvironment.update(); modern Sphinx performs that inside # Application._init_env() (which we just called above) and so that kwarg is # removed from update(). EAFP. kwargs = dict( config=app.config, srcdir=root, doctreedir=app.doctreedir, app=app, ) try: env.update(**kwargs) except TypeError: # Assume newer Sphinx w/o an app= kwarg del kwargs['app'] env.update(**kwargs) # Code taken from sphinx.environment.read_doc; easier to manually call # it with a working Environment object, instead of doing more random crap # to trick the higher up build system into thinking our single changelog # document was "updated". env.temp_data['docname'] = docname env.app = app # NOTE: SphinxStandaloneReader API changed in 1.4 :( reader_kwargs = { 'app': app, 'parsers': env.config.source_parsers, } if sphinx.version_info[:2] < (1, 4): del reader_kwargs['app'] # This monkeypatches (!!!) docutils to 'inject' all registered Sphinx # domains' roles & so forth. Without this, rendering the doctree lacks # almost all Sphinx magic, including things like :ref: and :doc:! with sphinx_domains(env): try: reader = SphinxStandaloneReader(**reader_kwargs) except TypeError: # If we import from io, this happens automagically, not in API del reader_kwargs['parsers'] reader = SphinxStandaloneReader(**reader_kwargs) pub = Publisher(reader=reader, writer=SphinxDummyWriter(), destination_class=NullOutput) pub.set_components(None, 'restructuredtext', None) pub.process_programmatic_settings(None, env.settings, None) # NOTE: docname derived higher up, from our given path src_path = env.doc2path(docname) source = SphinxFileInput( app, env, source=None, source_path=src_path, encoding=env.config.source_encoding, ) pub.source = source pub.settings._source = src_path pub.set_destination(None, None) pub.publish() return app, pub.document
java
public Number getPercentageWorkComplete() { Number pct = (Number) getCachedValue(AssignmentField.PERCENT_WORK_COMPLETE); if (pct == null) { Duration actualWork = getActualWork(); Duration work = getWork(); if (actualWork != null && work != null && work.getDuration() != 0) { pct = Double.valueOf((actualWork.getDuration() * 100) / work.convertUnits(actualWork.getUnits(), getParentFile().getProjectProperties()).getDuration()); set(AssignmentField.PERCENT_WORK_COMPLETE, pct); } } return pct; }
python
async def join( self, *, remote_addrs: Iterable[str], listen_addr: str = "0.0.0.0:2377", join_token: str, advertise_addr: str = None, data_path_addr: str = None ) -> bool: """ Join a swarm. Args: listen_addr Used for inter-manager communication advertise_addr Externally reachable address advertised to other nodes. data_path_addr Address or interface to use for data path traffic. remote_addrs Addresses of manager nodes already participating in the swarm. join_token Secret token for joining this swarm. """ data = { "RemoteAddrs": list(remote_addrs), "JoinToken": join_token, "ListenAddr": listen_addr, "AdvertiseAddr": advertise_addr, "DataPathAddr": data_path_addr, } await self.docker._query("swarm/join", method="POST", data=clean_map(data)) return True
python
def _link_record(self): """ Checks restrictions for use of CNAME lookup and returns a tuple of the fully qualified record name to lookup and a boolean, if a CNAME lookup should be done or not. The fully qualified record name is empty if no record name is specified by this provider. """ action = self._get_lexicon_option('action') identifier = self._get_lexicon_option('identifier') rdtype = self._get_lexicon_option('type') name = (self._fqdn_name(self._get_lexicon_option('name')) if self._get_lexicon_option('name') else None) link = self._get_provider_option('linked') qname = name if identifier: rdtype, name, _ = self._parse_identifier(identifier) if action != 'list' and rdtype in ('A', 'AAAA', 'TXT') and name and link == 'yes': if action != 'update' or name == qname or not qname: LOGGER.info('Hetzner => Enable CNAME lookup ' '(see --linked parameter)') return name, True LOGGER.info('Hetzner => Disable CNAME lookup ' '(see --linked parameter)') return name, False
python
def replace_with_text_stream(stream_name): """Given a stream name, replace the target stream with a text-converted equivalent :param str stream_name: The name of a target stream, such as **stdout** or **stderr** :return: None """ new_stream = TEXT_STREAMS.get(stream_name) if new_stream is not None: new_stream = new_stream() setattr(sys, stream_name, new_stream) return None
python
def _bytes_to_values(self, bs, width=None): """Convert a packed row of bytes into a row of values. Result will be a freshly allocated object, not shared with the argument. """ if self.bitdepth == 8: return bytearray(bs) if self.bitdepth == 16: return array('H', struct.unpack('!%dH' % (len(bs) // 2), bs)) assert self.bitdepth < 8 if width is None: width = self.width # Samples per byte spb = 8 // self.bitdepth out = bytearray() mask = 2**self.bitdepth - 1 shifts = [self.bitdepth * i for i in reversed(list(range(spb)))] for o in bs: out.extend([mask & (o >> i) for i in shifts]) return out[:width]
java
@Override protected void paintMajorTickForVertSlider(Graphics g, Rectangle tickBounds, int y) { setTickColor(g); super.paintMajorTickForVertSlider(g, tickBounds, y); }
java
public static void updateSchema(SQLDatabase database, Migration migration, int version) throws SQLException { Misc.checkArgument(version > 0, "Schema version number must be positive"); // ensure foreign keys are enforced in the case that we are up to date and no migration happen database.execSQL("PRAGMA foreign_keys = ON;"); int dbVersion = database.getVersion(); if(dbVersion < version) { // switch off foreign keys during the migration - so that we don't get caught out by // "ON DELETE CASCADE" constraints etc database.execSQL("PRAGMA foreign_keys = OFF;"); database.beginTransaction(); try { try { migration.runMigration(database); database.execSQL("PRAGMA user_version = " + version + ";"); database.setTransactionSuccessful(); } catch (Exception ex) { // don't set the transaction successful, so it'll rollback throw new SQLException( String.format("Migration from %1$d to %2$d failed.", dbVersion, version), ex); } } finally { database.endTransaction(); // re-enable foreign keys database.execSQL("PRAGMA foreign_keys = ON;"); } } }
java
public V get (int key) { Record<V> rec = getImpl(key); return (rec == null) ? null : rec.value; }
python
def keywords(self): """ Returns a list of all keywords that this rule object has defined. A keyword is considered defined if the value it returns != None. """ defined_keywords = [ ('allowempty_map', 'allowempty_map'), ('assertion', 'assertion'), ('default', 'default'), ('class', 'class'), ('desc', 'desc'), ('enum', 'enum'), ('example', 'example'), ('extensions', 'extensions'), ('format', 'format'), ('func', 'func'), ('ident', 'ident'), ('include_name', 'include'), ('length', 'length'), ('map_regex_rule', 'map_regex_rule'), ('mapping', 'mapping'), ('matching', 'matching'), ('matching_rule', 'matching_rule'), ('name', 'name'), ('nullable', 'nullable') ('parent', 'parent'), ('pattern', 'pattern'), ('pattern_regexp', 'pattern_regexp'), ('range', 'range'), ('regex_mappings', 'regex_mappings'), ('required', 'required'), ('schema', 'schema'), ('schema_str', 'schema_str'), ('sequence', 'sequence'), ('type', 'type'), ('type_class', 'type_class'), ('unique', 'unique'), ('version', 'version'), ] found_keywords = [] for var_name, keyword_name in defined_keywords: if getattr(self, var_name, None): found_keywords.append(keyword_name) return found_keywords
python
def istextfile(fname, blocksize=512): """ Uses heuristics to guess whether the given file is text or binary, by reading a single block of bytes from the file. If more than 30% of the chars in the block are non-text, or there are NUL ('\x00') bytes in the block, assume this is a binary file. """ with open(fname, "rb") as fobj: block = fobj.read(blocksize) if not block: # An empty file is considered a valid text file return True if b"\x00" in block: # Files with null bytes are binary return False # Use translate's 'deletechars' argument to efficiently remove all # occurrences of TEXT_CHARS from the block nontext = block.translate(None, TEXT_CHARS) return float(len(nontext)) / len(block) <= 0.30
java
public static CPInstance fetchByLtD_S_First(Date displayDate, int status, OrderByComparator<CPInstance> orderByComparator) { return getPersistence() .fetchByLtD_S_First(displayDate, status, orderByComparator); }
python
async def make_response(*args: Any) -> Response: """Create a response, a simple wrapper function. This is most useful when you want to alter a Response before returning it, for example .. code-block:: python response = make_response(render_template('index.html')) response.headers['X-Header'] = 'Something' """ if not args: return current_app.response_class() if len(args) == 1: args = args[0] return await current_app.make_response(args)
java
protected void config() { objectMapper.configure(com.fasterxml.jackson.core.JsonParser.Feature.ALLOW_UNQUOTED_CONTROL_CHARS, true); objectMapper.configure(com.fasterxml.jackson.core.JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER, true); }
java
public static OffsetDateTime of(LocalDate date, LocalTime time, ZoneOffset offset) { LocalDateTime dt = LocalDateTime.of(date, time); return new OffsetDateTime(dt, offset); }
python
def density(G, t=None): r"""Return the density of a graph at timestamp t. The density for undirected graphs is .. math:: d = \frac{2m}{n(n-1)}, and for directed graphs is .. math:: d = \frac{m}{n(n-1)}, where `n` is the number of nodes and `m` is the number of edges in `G`. Parameters ---------- G : Graph opject DyNetx graph object t : snapshot id (default=None) If None the density will be computed on the flattened graph. Notes ----- The density is 0 for a graph without edges and 1 for a complete graph. Self loops are counted in the total number of edges so graphs with self loops can have density higher than 1. """ n = number_of_nodes(G, t) m = number_of_interactions(G, t) if m == 0 or m is None or n <= 1: return 0 d = m / (n * (n - 1)) if not G.is_directed(): d *= 2 return d
python
def find_link(self, href_pattern, make_absolute=True): """ Find link in response body which href value matches ``href_pattern``. Returns found url or None. """ if make_absolute: self.tree.make_links_absolute(self.doc.url) if isinstance(href_pattern, six.text_type): raise GrabMisuseError('Method `find_link` accepts only ' 'byte-string argument') href_pattern = make_unicode(href_pattern) for elem, _, link, _ in self.tree.iterlinks(): if elem.tag == 'a' and href_pattern in link: return link return None
python
def is_child(self, node): """Check if a node is a child of the current node Parameters ---------- node : instance of Node The potential child. Returns ------- child : bool Whether or not the node is a child. """ if node in self.children: return True for c in self.children: if c.is_child(node): return True return False
python
def _binary_op(cls, x: 'TensorFluent', y: 'TensorFluent', op: Callable[[tf.Tensor, tf.Tensor], tf.Tensor], dtype: tf.DType) -> 'TensorFluent': '''Returns a TensorFluent for the binary `op` applied to fluents `x` and `y`. Args: x: The first operand. y: The second operand. op: The binary operator. dtype: The output's data type. Returns: A TensorFluent wrapping the binary operator's output. ''' # scope s1 = x.scope.as_list() s2 = y.scope.as_list() scope, perm1, perm2 = TensorFluentScope.broadcast(s1, s2) if x.batch and perm1 != []: perm1 = [0] + [p+1 for p in perm1] if y.batch and perm2 != []: perm2 = [0] + [p+1 for p in perm2] x = x.transpose(perm1) y = y.transpose(perm2) # shape reshape1, reshape2 = TensorFluentShape.broadcast(x.shape, y.shape) if reshape1 is not None: x = x.reshape(reshape1) if reshape2 is not None: y = y.reshape(reshape2) # dtype x = x.cast(dtype) y = y.cast(dtype) # operation t = op(x.tensor, y.tensor) # batch batch = x.batch or y.batch return TensorFluent(t, scope, batch=batch)
java
private Order create( final String number, final String customerName, final LocalDate date, final String preferences, final ExecutionContext executionContext) { return executionContext.add(this, orders.create(number, customerName, date, preferences)); }
python
def abs2rel_y(y, axis=None): r''' Transform absolute y-coordinates to relative y-coordinates. Relative coordinates correspond to a fraction of the relevant axis. Be sure to set the limits and scale before calling this function! :arguments: **y** (``float``, ``list``) Absolute coordinates. :options: **axis** ([``plt.gca()``] | ...) Specify the axis to which to apply the limits. :returns: **y** (``float``, ``list``) Relative coordinates. ''' # get current axis if axis is None: axis = plt.gca() # get current limits ymin, ymax = axis.get_ylim() # transform # - log scale if axis.get_xscale() == 'log': try : return [(np.log10(i)-np.log10(ymin))/(np.log10(ymax)-np.log10(ymin)) if i is not None else i for i in y] except: return (np.log10(y)-np.log10(ymin))/(np.log10(ymax)-np.log10(ymin)) # - normal scale else: try : return [(i-ymin)/(ymax-ymin) if i is not None else i for i in y] except: return (y-ymin)/(ymax-ymin)
python
def fit_gaussian(x, y, yerr, p0): """ Fit a Gaussian to the data """ try: popt, pcov = curve_fit(gaussian, x, y, sigma=yerr, p0=p0, absolute_sigma=True) except RuntimeError: return [0],[0] return popt, pcov
python
def set_primary(self, **params): """https://developers.coinbase.com/api/v2#set-account-as-primary""" data = self.api_client.set_primary_account(self.id, **params) self.update(data) return data
java
public void setConnectTimeout(int millis) { ClientHttpRequestFactory f = getRequestFactory(); if (f instanceof SimpleClientHttpRequestFactory) { ((SimpleClientHttpRequestFactory) f).setConnectTimeout(millis); } else { ((HttpComponentsClientHttpRequestFactory) f).setConnectTimeout(millis); } }
python
def command_show(self): """ Show metadata """ self.parser = argparse.ArgumentParser( description="Show metadata of available objects") self.options_select() self.options_formatting() self.options_utils() self.options = self.parser.parse_args(self.arguments[2:]) self.show(brief=False)
java
@SuppressWarnings({"WeakerAccess", "unused"}) public float getDocumentAspectRatio() { if (this.rootElement == null) throw new IllegalArgumentException("SVG document is empty"); Length w = this.rootElement.width; Length h = this.rootElement.height; // If width and height are both specified and are not percentages, aspect ratio is calculated from these (SVG1.1 sect 7.12) if (w != null && h != null && w.unit!=Unit.percent && h.unit!=Unit.percent) { if (w.isZero() || h.isZero()) return -1f; return w.floatValue(this.renderDPI) / h.floatValue(this.renderDPI); } // Otherwise, get the ratio from the viewBox if (this.rootElement.viewBox != null && this.rootElement.viewBox.width != 0f && this.rootElement.viewBox.height != 0f) { return this.rootElement.viewBox.width / this.rootElement.viewBox.height; } // Could not determine aspect ratio return -1f; }
python
def _reverse(self): """Reverse all bits in-place.""" # Reverse the contents of each byte n = [BYTE_REVERSAL_DICT[b] for b in self._datastore.rawbytes] # Then reverse the order of the bytes n.reverse() # The new offset is the number of bits that were unused at the end. newoffset = 8 - (self._offset + self.len) % 8 if newoffset == 8: newoffset = 0 self._setbytes_unsafe(bytearray().join(n), self.length, newoffset)
python
def traverse_preorder(self, leaves=True, internal=True): '''Perform a preorder traversal starting at this ``Node`` object Args: ``leaves`` (``bool``): ``True`` to include leaves, otherwise ``False`` ``internal`` (``bool``): ``True`` to include internal nodes, otherwise ``False`` ''' s = deque(); s.append(self) while len(s) != 0: n = s.pop() if (leaves and n.is_leaf()) or (internal and not n.is_leaf()): yield n s.extend(n.children)
python
def cli(env, is_open): """List tickets.""" ticket_mgr = SoftLayer.TicketManager(env.client) table = formatting.Table([ 'id', 'assigned_user', 'title', 'last_edited', 'status', 'updates', 'priority' ]) tickets = ticket_mgr.list_tickets(open_status=is_open, closed_status=not is_open) for ticket in tickets: user = formatting.blank() if ticket.get('assignedUser'): user = "%s %s" % (ticket['assignedUser']['firstName'], ticket['assignedUser']['lastName']) table.add_row([ ticket['id'], user, click.wrap_text(ticket['title']), ticket['lastEditDate'], ticket['status']['name'], ticket.get('updateCount', 0), ticket.get('priority', 0) ]) env.fout(table)
java
public PersistenceUnitDefaults<PersistenceUnitMetadata<T>> getOrCreatePersistenceUnitDefaults() { Node node = childNode.getOrCreate("persistence-unit-defaults"); PersistenceUnitDefaults<PersistenceUnitMetadata<T>> persistenceUnitDefaults = new PersistenceUnitDefaultsImpl<PersistenceUnitMetadata<T>>(this, "persistence-unit-defaults", childNode, node); return persistenceUnitDefaults; }
java
public static int getIntegerInitParameter(ExternalContext context, String[] names, int defaultValue) { if (names == null) { throw new NullPointerException(); } String param = null; for (String name : names) { if (name == null) { throw new NullPointerException(); } param = getStringInitParameter(context, name); if (param != null) { break; } } if (param == null) { return defaultValue; } else { return Integer.parseInt(param.toLowerCase()); } }
python
def query_parent_objects(self, context, query=None): """Return the objects of the same type from the parent object :param query: Catalog query to narrow down the objects :type query: dict :returns: Content objects of the same portal type in the parent """ # return the object values if we have no catalog query if query is None: return self.get_parent_objects(context) # avoid undefined reference of catalog in except... catalog = None # try to fetch the results via the catalog try: catalogs = api.get_catalogs_for(context) catalog = catalogs[0] return map(api.get_object, catalog(query)) except (IndexError, UnicodeDecodeError, ParseError, APIError) as e: # fall back to the object values of the parent logger.warn("UniqueFieldValidator: Catalog query {} failed " "for catalog {} ({}) -> returning object values of {}" .format(query, repr(catalog), str(e), repr(api.get_parent(context)))) return self.get_parent_objects(context)
java
@Override public void extractEphemeralTableQueries(List<StmtEphemeralTableScan> scans) { if (m_leftNode != null) { m_leftNode.extractEphemeralTableQueries(scans); } if (m_rightNode != null) { m_rightNode.extractEphemeralTableQueries(scans); } }
python
def getSimilarTermsForExpressions(self, body, contextId=None, posType=None, getFingerprint=None, startIndex=0, maxResults=10, sparsity=1.0): """Bulk get similar terms for input expressions Args: body, ExpressionOperation: The JSON encoded expression to be evaluated (required) contextId, int: The identifier of a context (optional) posType, str: Part of speech (optional) getFingerprint, bool: Configure if the fingerprint should be returned as part of the results (optional) startIndex, int: The start-index for pagination (optional) maxResults, int: Max results per page (optional) sparsity, float: Sparsify the resulting expression to this percentage (optional) Returns: list of Term Raises: CorticalioException: if the request was not successful """ return self._expressions.getSimilarTermsForBulkExpressionContext(self._retina, body, contextId, posType, getFingerprint, startIndex, maxResults, sparsity)
java
public NodeDefinitionData getChildNodeDefinition(InternalQName nodeName, InternalQName nodeType, InternalQName parentNodeType, InternalQName[] parentMixinTypes) throws RepositoryException { NodeDefinitionData[] defs = getAllChildNodeDefinitions(getNodeTypeNames(parentNodeType, parentMixinTypes)); NodeDefinitionData residualDef = null; NodeDefinitionData firstResidualDef = null; outer : for (NodeDefinitionData nodeDef : defs) { if (nodeDef.getName().equals(nodeName)) { return nodeDef; } else if (nodeDef.isResidualSet()) { // store first residual definition to be able to return if (firstResidualDef == null) { firstResidualDef = nodeDef; } // check required primary types for (InternalQName requiredPrimaryType : nodeDef.getRequiredPrimaryTypes()) { if (!isNodeType(requiredPrimaryType, nodeType)) { continue outer; } } // when there are several suitable definitions take the most older if (residualDef == null || isNodeType(residualDef.getRequiredPrimaryTypes()[0], nodeDef.getRequiredPrimaryTypes()[0])) { residualDef = nodeDef; } } } return residualDef != null ? residualDef : firstResidualDef; }
python
def email_quote_txt(text, indent_txt='>>', linebreak_input="\n", linebreak_output="\n"): """ Takes a text and returns it in a typical mail quoted format, e.g.:: C'est un lapin, lapin de bois. >>Quoi? Un cadeau. >>What? A present. >>Oh, un cadeau. will return:: >>C'est un lapin, lapin de bois. >>>>Quoi? >>Un cadeau. >>>>What? >>A present. >>>>Oh, un cadeau. @param text: the string to quote @param indent_txt: the string used for quoting (default: '>>') @param linebreak_input: in the text param, string used for linebreaks @param linebreak_output: linebreak used for output @return: the text as a quoted string """ if (text == ""): return "" lines = text.split(linebreak_input) text = "" for line in lines: text += indent_txt + line + linebreak_output return text
python
def updates(self, id, update_id=None): # pylint: disable=invalid-name,redefined-builtin """Get updates of a running result via long-polling. If no updates are available, CDRouter waits up to 10 seconds before sending an empty response. :param id: Result ID as an int. :param update_id: (optional) Update ID as an int. :return: :class:`results.Update <results.Update>` object :rtype: results.Update """ if update_id is None: update_id = -1 schema = UpdateSchema() resp = self.service.get_id(self.base, id, params={'updates': update_id}) return self.service.decode(schema, resp)
java
public CassandraSink<IN> disableChaining() { if (useDataStreamSink) { getSinkTransformation().setChainingStrategy(ChainingStrategy.NEVER); } else { getStreamTransformation().setChainingStrategy(ChainingStrategy.NEVER); } return this; }
python
def do_sqlite_connect(dbapi_connection, connection_record): """Ensure SQLite checks foreign key constraints. For further details see "Foreign key support" sections on https://docs.sqlalchemy.org/en/latest/dialects/sqlite.html#foreign-key-support """ # Enable foreign key constraint checking cursor = dbapi_connection.cursor() cursor.execute('PRAGMA foreign_keys=ON') cursor.close()
java
protected void startCancellationExecutor() { this.cancellationExecutor.execute(new Runnable() { @Override public void run() { synchronized (AbstractJobLauncher.this.cancellationRequest) { try { while (!AbstractJobLauncher.this.cancellationRequested) { // Wait for a cancellation request to arrive AbstractJobLauncher.this.cancellationRequest.wait(); } LOG.info("Cancellation has been requested for job " + AbstractJobLauncher.this.jobContext.getJobId()); executeCancellation(); LOG.info("Cancellation has been executed for job " + AbstractJobLauncher.this.jobContext.getJobId()); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); } } synchronized (AbstractJobLauncher.this.cancellationExecution) { AbstractJobLauncher.this.cancellationExecuted = true; AbstractJobLauncher.this.jobContext.getJobState().setState(JobState.RunningState.CANCELLED); // Notify that the cancellation has been executed AbstractJobLauncher.this.cancellationExecution.notifyAll(); } } }); }
java
public boolean refine(Point2D_F64 a, Point2D_F64 b, LineGeneral2D_F64 found) { // determine the local coordinate system center.x = (a.x + b.x)/2.0; center.y = (a.y + b.y)/2.0; localScale = a.distance(center); // define the line which points are going to be sampled along double slopeX = (b.x - a.x); double slopeY = (b.y - a.y); double r = Math.sqrt(slopeX*slopeX + slopeY*slopeY); // tangent of unit length that radial sample samples are going to be along // Two choices for tangent here. Select the one which points to the "right" of the line, // which is inside of the edge double tanX = slopeY/r; double tanY = -slopeX/r; // set up inputs into line fitting computePointsAndWeights(slopeX, slopeY, a.x, a.y, tanX, tanY); if( samplePts.size() >= 4 ) { // fit line and convert into generalized format if( null == FitLine_F64.polar(samplePts.toList(), weights.data, polar) ) { throw new RuntimeException("All weights were zero, bug some place"); } UtilLine2D_F64.convert(polar, found); // Convert line from local to global coordinates localToGlobal(found); return true; } else { return false; } }
python
def agg_to_two_dim_dataframe(agg): '''A function that takes an elasticsearch response with aggregation and returns the names of all bucket value pairs :param agg: an aggregation from elasticsearch results :type agg: elasticsearch response.aggregation.agg_name object :returns: pandas data frame of one or two dimetions depending on input data ''' expanded_agg = [] for bucket in agg.buckets: bucket_as_dict = bucket.to_dict() if dict not in [type(item) for item in bucket_as_dict.values()]: return bucket_to_dataframe('doc_count', agg.buckets) else: lower_level_dict = [item for item in bucket_as_dict.keys() if type(bucket_as_dict[item]) is dict] if len(lower_level_dict) > 1: raise ValueError('Two dimensional data can only convert a 2 level aggregation (with 1 aggregation at each level)') name_of_lower_level = lower_level_dict[0] single_level_dataframe = bucket_to_dataframe(bucket.key, bucket[name_of_lower_level]['buckets'], name_of_lower_level) expanded_agg.append(single_level_dataframe) merged_results = merge_dataframes(*expanded_agg) # rearrange to get key as first col cols = merged_results.columns.tolist() indices_of_keys = [i for i, s in enumerate(cols) if 'key' in s] all_other_cols = [i for i in range(0, len(cols)) if i not in indices_of_keys] new_col_order = indices_of_keys + all_other_cols return merged_results[new_col_order]
java
private static Method findMatchingMethod(Method originalMethod, String methodName, Type classToSearch, ResolutionContext ctx, Class<?> originalClass) { // Check self Method result = findMethodOnClass(originalMethod, methodName, classToSearch, ctx, originalClass); // Check interfaces if (result == null) { for (Type iface : getClass(classToSearch).getGenericInterfaces()) { ctx.push(iface); result = findMatchingMethod(originalMethod, methodName, iface, ctx, originalClass); ctx.pop(); if (result != null) { break; } } } // Check superclass if (result == null) { Type superclass = getClass(classToSearch).getGenericSuperclass(); if (superclass != null) { ctx.push(superclass); result = findMatchingMethod(originalMethod, methodName, superclass, ctx, originalClass); ctx.pop(); } } return result; }
java
@CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) public static <T> Flowable<T> fromArray(T... items) { ObjectHelper.requireNonNull(items, "items is null"); if (items.length == 0) { return empty(); } if (items.length == 1) { return just(items[0]); } return RxJavaPlugins.onAssembly(new FlowableFromArray<T>(items)); }
python
def output_capturing(): """Temporarily captures/redirects stdout.""" out = sys.stdout sys.stdout = StringIO() try: yield finally: sys.stdout = out
java
private TileMatrixSetDao getTileMatrixSetDao() throws SQLException { if (tileMatrixSetDao == null) { tileMatrixSetDao = DaoManager.createDao(connectionSource, TileMatrixSet.class); } return tileMatrixSetDao; }
python
def save_token(self): """ Saves the token dict in the specified file :return bool: Success / Failure """ if self.token is None: raise ValueError('You have to set the "token" first.') try: if not self.token_path.parent.exists(): self.token_path.parent.mkdir(parents=True) except Exception as e: log.error('Token could not be saved: {}'.format(str(e))) return False with self.token_path.open('w') as token_file: # 'indent = True' will make the file human readable self.serializer.dump(self.token, token_file, indent=True) return True
python
def html(self, groups='all', template=None, **context): """Return an html string of the routes specified by the doc() method A template can be specified. A list of routes is available under the 'autodoc' value (refer to the documentation for the generate() for a description of available values). If no template is specified, a default template is used. By specifying the group or groups arguments, only routes belonging to those groups will be returned. """ context['autodoc'] = context['autodoc'] if 'autodoc' in context \ else self.generate(groups=groups) context['defaults'] = context['defaults'] if 'defaults' in context \ else self.default_props if template: return render_template(template, **context) else: filename = os.path.join( os.path.dirname(__file__), 'templates', 'autodoc_default.html' ) with open(filename) as file: content = file.read() with current_app.app_context(): return render_template_string(content, **context)
python
def parse_tx_op_return(tx): """ Given a transaction, locate its OP_RETURN and parse out its opcode and payload. Return (opcode, payload) on success Return (None, None) if there is no OP_RETURN, or if it's not a blockchain ID operation. """ # find OP_RETURN output op_return = None outputs = tx['vout'] for out in outputs: script_key = out['scriptPubKey']['hex'] if int(script_key[0:2], 16) == virtualchain.OPCODE_VALUES['OP_RETURN']: op_return = script_key.decode('hex') break if op_return is None: msg = 'transaction has no OP_RETURN output' log.error(msg) log.debug('{}:\n{}'.format(msg, simplejson.dumps(tx))) return None, None # [0] is OP_RETURN, [1] is the length; [2:4] are 'id', [4] is opcode magic = op_return[2:4] if magic != blockstack_magic_bytes(): # not a blockchain ID operation msg = 'OP_RETURN output does not encode a blockchain ID operation' log.error(msg) return None, None opcode, payload = op_return[4], op_return[5:] return (opcode, payload)
java
@Override protected <R, C> R accept(AstVisitor<R, C> visitor, C context) { return visitor.visitExpression(this, context); }
java
private boolean validateDigits(Object validationObject, Annotation annotate) { if (checkNullObject(validationObject)) { return true; } if (checkvalidDigitTypes(validationObject.getClass())) { if (!NumberUtils.isDigits(toString(validationObject))) { throwValidationException(((Digits) annotate).message()); } } return true; }
python
def is_binarized(self): """ Return True if the pianoroll is already binarized. Otherwise, return False. Returns ------- is_binarized : bool True if the pianoroll is already binarized; otherwise, False. """ is_binarized = np.issubdtype(self.pianoroll.dtype, np.bool_) return is_binarized
python
def append_field(self, fieldname): # type: (str) -> None ''' Mark a field as present in the Rock Ridge records. Parameters: fieldname - The name of the field to mark as present; should be one of 'PX', 'PN', 'SL', 'NM', 'CL', 'PL', 'RE', or 'TF'. Returns: Nothing. ''' if not self._initialized: raise pycdlibexception.PyCdlibInternalError('RR record not yet initialized!') if fieldname == 'PX': bit = 0 elif fieldname == 'PN': bit = 1 elif fieldname == 'SL': bit = 2 elif fieldname == 'NM': bit = 3 elif fieldname == 'CL': bit = 4 elif fieldname == 'PL': bit = 5 elif fieldname == 'RE': bit = 6 elif fieldname == 'TF': bit = 7 else: raise pycdlibexception.PyCdlibInternalError('Unknown RR field name %s' % (fieldname)) self.rr_flags |= (1 << bit)
python
def ready(self): """ Add user info to ExtensibleNodeSerializer """ from nodeshot.core.nodes.base import ExtensibleNodeSerializer from .models import Vote from .serializers import (CommentRelationSerializer, ParticipationSerializer) ExtensibleNodeSerializer.add_relationship( 'comments', serializer=CommentRelationSerializer, many=True, queryset=lambda obj, request: obj.comment_set.all() ) ExtensibleNodeSerializer.add_relationship( 'counts', serializer=ParticipationSerializer, queryset=lambda obj, request: obj.noderatingcount ) ExtensibleNodeSerializer.add_relationship( 'votes_url', view_name='api_node_votes', lookup_field='slug' ) ExtensibleNodeSerializer.add_relationship( 'ratings_url', view_name='api_node_ratings', lookup_field='slug' ) ExtensibleNodeSerializer.add_relationship( 'comments_url', view_name='api_node_comments', lookup_field='slug' ) def voted(obj, request): """ Determines if current logged-in user has already voted on a node returns 1 if user has already liked returns -1 if user has already disliked returns False if user hasn't voted or if not authenticated """ if request.user.is_authenticated(): v = Vote.objects.filter(node_id=obj.id, user_id=request.user.id) if len(v) > 0: return v[0].vote # hasn't voted yet or not authenticated return False ExtensibleNodeSerializer.add_relationship( 'voted', function=voted ) ExtensibleNodeSerializer.add_relationship( 'voting_allowed', function=lambda obj, request: obj.voting_allowed ) ExtensibleNodeSerializer.add_relationship( 'rating_allowed', function=lambda obj, request: obj.rating_allowed ) ExtensibleNodeSerializer.add_relationship( 'comments_allowed', function=lambda obj, request: obj.comments_allowed )
python
def _locate_bar_gen(icut, epos, transform1, transform2): """Generic function for the fine position of the CSU""" epos_pix = coor_to_pix_1d(epos) # transform -> epos_pix_s = transform1(epos_pix) icut2 = transform2(icut) # try: res = position_half_h(icut2, epos_pix_s) xint_s, next_peak_s, wpos1_s, wpos2_s, background_level, half_height = res # xint = transform1(xint_s) # epos_f = xint error = 0 except ValueError: error = 2 epos_f = epos return epos_pix, epos_f, error
python
def get_ntlmv1_response(password, challenge): """ Generate the Unicode MD4 hash for the password associated with these credentials. """ ntlm_hash = PasswordAuthentication.ntowfv1(password.encode('utf-16le')) response = PasswordAuthentication._encrypt_des_block(ntlm_hash[:7], challenge) response += PasswordAuthentication._encrypt_des_block(ntlm_hash[7:14], challenge) response += PasswordAuthentication._encrypt_des_block(ntlm_hash[14:], challenge) # The NTLMv1 session key is simply the MD4 hash of the ntlm hash session_hash = hashlib.new('md4') session_hash.update(ntlm_hash) return response, session_hash.digest()
java
public RoadSegment removeRoadSegmentAt(int index) { if (index >= 0) { int b = 0; for (final RoadPath p : this.paths) { int end = b + p.size(); if (index < end) { end = index - b; return removeRoadSegmentAt(p, end, null); } b = end; } } throw new IndexOutOfBoundsException(); }
python
def getrealpath(self, root, path): ''' Return the real path on disk from the query path, from a root path. The input path from URL might be absolute '/abc', or point to parent '../test', or even with UNC or drive '\\test\abc', 'c:\test.abc', which creates security issues when accessing file contents with the path. With getrealpath, these paths cannot point to files beyond the root path. :param root: root path of disk files, any query is limited in root directory. :param path: query path from URL. ''' if not isinstance(path, str): path = path.decode(self.encoding) # In windows, if the path starts with multiple / or \, the os.path library may consider it an UNC path # remove them; also replace \ to / path = pathrep.subn('/', path)[0] # The relative root is considered ROOT, eliminate any relative path like ../abc, which create security issues # We can use os.path.relpath(..., '/') but python2.6 os.path.relpath is buggy path = os.path.normpath(os.path.join('/', path)) # The normalized path can be an UNC path, or event a path with drive letter # Send bad request for these types if os.path.splitdrive(path)[0]: raise HttpInputException('Bad path') return os.path.join(root, path[1:])
python
def get_value(self, name, data): """ Get the value of this field from the data. If there is a problem with the data, raise ValidationError. :param name: Name of this field (to retrieve from data). :param data: Dictionary of data for all fields. :raises: ValidationError :return: The value of this field. :rtype: any """ if name in data: return data.get(name) if self.default: if callable(self.default): return self.default() return self.default return None
python
def _run_includemes(configurator, includemes): """ Automatically include packages defined in **include** configuration key. :param pyramid.config.Configurator configurator: pyramid's app configurator :param dict includemes: include, a list of includes or dictionary """ for include in includemes: if includemes[include]: try: configurator.include(include, includemes[include]) except AttributeError: configurator.include(include)
java
private String printError(Throwable t) { StringBuffer result = new StringBuffer(1024); result.append("/*\n"); result.append(CmsStringUtil.escapeHtml(t.getMessage())); result.append("\n*/\n"); result.append("function init() {\n"); result.append("}\n"); return result.toString(); }
java
@NullSafe public static <T extends Comparable<T>> int compareIgnoreNull(T obj1, T obj2) { return (obj1 == null ? 1 : (obj2 == null ? -1 : obj1.compareTo(obj2))); }
java
private static double tanQ(double xa, double xb, boolean cotanFlag) { int idx = (int) ((xa * 8.0) + 0.5); final double epsilon = xa - EIGHTHS[idx]; //idx*0.125; // Table lookups final double sintA = SINE_TABLE_A[idx]; final double sintB = SINE_TABLE_B[idx]; final double costA = COSINE_TABLE_A[idx]; final double costB = COSINE_TABLE_B[idx]; // Polynomial eval of sin(epsilon), cos(epsilon) double sinEpsA = epsilon; double sinEpsB = polySine(epsilon); final double cosEpsA = 1.0; final double cosEpsB = polyCosine(epsilon); // Split epsilon xa + xb = x double temp = sinEpsA * HEX_40000000; double temp2 = (sinEpsA + temp) - temp; sinEpsB += sinEpsA - temp2; sinEpsA = temp2; /* Compute sin(x) by angle addition formula */ /* Compute the following sum: * * result = sintA + costA*sinEpsA + sintA*cosEpsB + costA*sinEpsB + * sintB + costB*sinEpsA + sintB*cosEpsB + costB*sinEpsB; * * Ranges of elements * * xxxtA 0 PI/2 * xxxtB -1.5e-9 1.5e-9 * sinEpsA -0.0625 0.0625 * sinEpsB -6e-11 6e-11 * cosEpsA 1.0 * cosEpsB 0 -0.0625 * */ //result = sintA + costA*sinEpsA + sintA*cosEpsB + costA*sinEpsB + // sintB + costB*sinEpsA + sintB*cosEpsB + costB*sinEpsB; //result = sintA + sintA*cosEpsB + sintB + sintB * cosEpsB; //result += costA*sinEpsA + costA*sinEpsB + costB*sinEpsA + costB * sinEpsB; double a = 0; double b = 0; // Compute sine double t = sintA; double c = a + t; double d = -(c - a - t); a = c; b = b + d; t = costA*sinEpsA; c = a + t; d = -(c - a - t); a = c; b = b + d; b = b + sintA*cosEpsB + costA*sinEpsB; b = b + sintB + costB*sinEpsA + sintB*cosEpsB + costB*sinEpsB; double sina = a + b; double sinb = -(sina - a - b); // Compute cosine a = b = c = d = 0.0; t = costA*cosEpsA; c = a + t; d = -(c - a - t); a = c; b = b + d; t = -sintA*sinEpsA; c = a + t; d = -(c - a - t); a = c; b = b + d; b = b + costB*cosEpsA + costA*cosEpsB + costB*cosEpsB; b = b - (sintB*sinEpsA + sintA*sinEpsB + sintB*sinEpsB); double cosa = a + b; double cosb = -(cosa - a - b); if (cotanFlag) { double tmp; tmp = cosa; cosa = sina; sina = tmp; tmp = cosb; cosb = sinb; sinb = tmp; } /* estimate and correct, compute 1.0/(cosa+cosb) */ /* double est = (sina+sinb)/(cosa+cosb); double err = (sina - cosa*est) + (sinb - cosb*est); est += err/(cosa+cosb); err = (sina - cosa*est) + (sinb - cosb*est); */ // f(x) = 1/x, f'(x) = -1/x^2 double est = sina/cosa; /* Split the estimate to get more accurate read on division rounding */ temp = est * HEX_40000000; double esta = (est + temp) - temp; double estb = est - esta; temp = cosa * HEX_40000000; double cosaa = (cosa + temp) - temp; double cosab = cosa - cosaa; //double err = (sina - est*cosa)/cosa; // Correction for division rounding double err = (sina - esta*cosaa - esta*cosab - estb*cosaa - estb*cosab)/cosa; // Correction for division rounding err += sinb/cosa; // Change in est due to sinb err += -sina * cosb / cosa / cosa; // Change in est due to cosb if (xb != 0.0) { // tan' = 1 + tan^2 cot' = -(1 + cot^2) // Approximate impact of xb double xbadj = xb + est*est*xb; if (cotanFlag) { xbadj = -xbadj; } err += xbadj; } return est+err; }
java
@InterfaceAudience.Private protected void addAttachment(Attachment attachment, String name) { Map<String, Object> attachments = (Map<String, Object>) properties.get("_attachments"); if (attachments == null) { attachments = new HashMap<String, Object>(); } attachments.put(name, attachment); properties.put("_attachments", attachments); if (attachment != null) { attachment.setName(name); } }
java
@Override public final void propertyChange(PropertyChangeEvent evt) { if ("lookAndFeel".equals(evt.getPropertyName())) { this.onLookAndFeelChange((LookAndFeel) evt.getOldValue(), (LookAndFeel) evt.getNewValue()); } }
java
@Override public boolean eIsSet(int featureID) { switch (featureID) { case AfplibPackage.RENDERING_INTENT__RESERVED: return RESERVED_EDEFAULT == null ? reserved != null : !RESERVED_EDEFAULT.equals(reserved); case AfplibPackage.RENDERING_INTENT__IOCARI: return IOCARI_EDEFAULT == null ? iocari != null : !IOCARI_EDEFAULT.equals(iocari); case AfplibPackage.RENDERING_INTENT__OCRI: return OCRI_EDEFAULT == null ? ocri != null : !OCRI_EDEFAULT.equals(ocri); case AfplibPackage.RENDERING_INTENT__PTOCRI: return PTOCRI_EDEFAULT == null ? ptocri != null : !PTOCRI_EDEFAULT.equals(ptocri); case AfplibPackage.RENDERING_INTENT__GOCARI: return GOCARI_EDEFAULT == null ? gocari != null : !GOCARI_EDEFAULT.equals(gocari); case AfplibPackage.RENDERING_INTENT__RESERVED2: return RESERVED2_EDEFAULT == null ? reserved2 != null : !RESERVED2_EDEFAULT.equals(reserved2); } return super.eIsSet(featureID); }
python
def _get_keycachelike(self, keycache, keys, get_adds_dels, parentity, branch, turn, tick, *, forward): """Try to retrieve a frozenset representing extant keys. If I can't, generate one, store it, and return it. """ keycache_key = parentity + (branch,) keycache2 = keycache3 = None if keycache_key in keycache: keycache2 = keycache[keycache_key] if turn in keycache2: keycache3 = keycache2[turn] if tick in keycache3: return keycache3[tick] if forward: # Take valid values from the past of a keycache and copy them forward, into the present. # Assumes that time is only moving forward, never backward, never skipping any turns or ticks, # and any changes to the world state are happening through allegedb proper, meaning they'll all get cached. # In LiSE this means every change to the world state should happen inside of a call to # ``Engine.next_turn`` in a rule. if keycache2 and keycache2.rev_gettable(turn): # there's a keycache from a prior turn in this branch. Get it if turn not in keycache2: # since it's not this *exact* turn there might be changes... old_turn = keycache2.rev_before(turn) old_turn_kc = keycache2[turn] added, deleted = get_adds_dels( keys[parentity], branch, turn, tick, stoptime=( branch, old_turn, old_turn_kc.end ) ) ret = old_turn_kc[old_turn_kc.end].union(added).difference(deleted) # assert ret == get_adds_dels(keys[parentity], branch, turn, tick)[0] # slow new_turn_kc = WindowDict() new_turn_kc[tick] = ret keycache2[turn] = new_turn_kc return ret if not keycache3: keycache3 = keycache2[turn] if tick not in keycache3: if keycache3.rev_gettable(tick): added, deleted = get_adds_dels( keys[parentity], branch, turn, tick, stoptime=( branch, turn, keycache3.rev_before(tick) ) ) ret = keycache3[tick].union(added).difference(deleted) # assert ret == get_adds_dels(keys[parentity], branch, turn, tick)[0] # slow keycache3[tick] = ret return ret else: turn_before = keycache2.rev_before(turn) tick_before = keycache2[turn_before].end keys_before = keycache2[turn_before][tick_before] added, deleted = get_adds_dels( keys[parentity], branch, turn, tick, stoptime=( branch, turn_before, tick_before ) ) ret = keycache3[tick] = keys_before.union(added).difference(deleted) # assert ret == get_adds_dels(keys[parentity], branch, turn, tick)[0] # slow return ret # assert kcturn[tick] == get_adds_dels(keys[parentity], branch, turn, tick)[0] # slow return keycache3[tick] else: for (parbranch, parturn, partick) in self.db._iter_parent_btt(branch, turn, tick): par_kc_key = parentity + (parbranch,) if par_kc_key in keycache: kcpkc = keycache[par_kc_key] if parturn in kcpkc and kcpkc[parturn].rev_gettable(partick): parkeys = kcpkc[parturn][partick] break elif kcpkc.rev_gettable(parturn-1): partkeys = kcpkc[parturn-1] parkeys = partkeys[partkeys.end] break else: parkeys = frozenset() keycache2 = SettingsTurnDict() added, deleted = get_adds_dels( keys[parentity], branch, turn, tick, stoptime=( parbranch, parturn, partick ) ) ret = parkeys.union(added).difference(deleted) keycache2[turn] = {tick: ret} keycache[keycache_key] = keycache2 # assert ret == get_adds_dels(keys[parentity], branch, turn, tick)[0] # slow return ret ret = frozenset(get_adds_dels(keys[parentity], branch, turn, tick)[0]) if keycache2: if keycache3: keycache3[tick] = ret else: keycache2[turn] = {tick: ret} else: kcc = SettingsTurnDict() kcc[turn] = {tick: ret} keycache[keycache_key] = kcc return ret
python
def get_path(root, path, default=_UNSET): """Retrieve a value from a nested object via a tuple representing the lookup path. >>> root = {'a': {'b': {'c': [[1], [2], [3]]}}} >>> get_path(root, ('a', 'b', 'c', 2, 0)) 3 The path format is intentionally consistent with that of :func:`remap`. One of get_path's chief aims is improved error messaging. EAFP is great, but the error messages are not. For instance, ``root['a']['b']['c'][2][1]`` gives back ``IndexError: list index out of range`` What went out of range where? get_path currently raises ``PathAccessError: could not access 2 from path ('a', 'b', 'c', 2, 1), got error: IndexError('list index out of range',)``, a subclass of IndexError and KeyError. You can also pass a default that covers the entire operation, should the lookup fail at any level. Args: root: The target nesting of dictionaries, lists, or other objects supporting ``__getitem__``. path (tuple): A list of strings and integers to be successively looked up within *root*. default: The value to be returned should any ``PathAccessError`` exceptions be raised. """ if isinstance(path, six.string_types): path = path.split(".") cur = root try: for seg in path: try: cur = cur[seg] except (KeyError, IndexError) as exc: raise PathAccessError(exc, seg, path) except TypeError as exc: # either string index in a list, or a parent that # doesn't support indexing try: seg = int(seg) cur = cur[seg] except (ValueError, KeyError, IndexError, TypeError): if not getattr(cur, "__iter__", None): exc = TypeError("%r object is not indexable" % type(cur).__name__) raise PathAccessError(exc, seg, path) except PathAccessError: if default is _UNSET: raise return default return cur
java
@XmlElementDecl(namespace = "http://www.opengis.net/gml", name = "directedNode") public JAXBElement<DirectedNodePropertyType> createDirectedNode(DirectedNodePropertyType value) { return new JAXBElement<DirectedNodePropertyType>(_DirectedNode_QNAME, DirectedNodePropertyType.class, null, value); }
python
def image_transformer2d_base(): """Set of hyperparameters.""" hparams = common_hparams.basic_params1() hparams.hidden_size = 512 hparams.batch_size = 1 hparams.max_length = 256 hparams.dropout = 0.0 hparams.clip_grad_norm = 0. # i.e. no gradient clipping hparams.optimizer_adam_epsilon = 1e-9 hparams.learning_rate_decay_scheme = "noam" hparams.learning_rate = 0.1 hparams.learning_rate_warmup_steps = 4000 hparams.initializer_gain = 0.2 hparams.initializer = "uniform_unit_scaling" hparams.weight_decay = 0.0 hparams.optimizer_adam_beta1 = 0.9 hparams.optimizer_adam_beta2 = 0.98 hparams.label_smoothing = 0.0 hparams.bottom["targets"] = modalities.make_targets_bottom( modalities.image_channel_embeddings_bottom) hparams.top["targets"] = modalities.identity_top hparams.norm_type = "layer" hparams.layer_prepostprocess_dropout = 0.0 hparams.add_hparam("filter_size", 512) # Add new ones like this. # attention-related flags hparams.add_hparam("num_heads", 8) hparams.add_hparam("attention_key_channels", 0) hparams.add_hparam("attention_value_channels", 0) hparams.add_hparam("ffn_layer", "conv_hidden_relu") # All hyperparameters ending in "dropout" are automatically set to 0.0 # when not in training mode. hparams.add_hparam("attention_dropout", 0.0) hparams.add_hparam("relu_dropout", 0.0) hparams.add_hparam("pos", "timing") # timing, none hparams.add_hparam("nbr_decoder_problems", 1) hparams.add_hparam("num_output_layers", 3) hparams.add_hparam("block_size", 1) # image size related flags # assuming that the image has same height and width hparams.add_hparam("img_len", 32) hparams.add_hparam("num_channels", 3) # Local attention params hparams.add_hparam("local_and_global_att", False) hparams.add_hparam("block_length", 256) hparams.add_hparam("block_width", 128) # Local 2D attention params hparams.add_hparam("query_shape", (16, 16)) hparams.add_hparam("memory_flange", (16, 32)) hparams.add_hparam("num_encoder_layers", 4) hparams.add_hparam("num_decoder_layers", 8) # attention type related params hparams.add_hparam("enc_attention_type", cia.AttentionType.GLOBAL) hparams.add_hparam("dec_attention_type", cia.AttentionType.LOCAL_2D) hparams.add_hparam("block_raster_scan", False) # multipos attention params hparams.add_hparam("q_filter_width", 1) hparams.add_hparam("kv_filter_width", 1) hparams.add_hparam("unconditional", False) # unconditional generation # relative embedding hparams hparams.add_hparam("shared_rel", False) return hparams
java
private void verifyEncoding(String id, int bpc) throws WstxException { if (mByteSizeFound) { /* Let's verify that if we matched an encoding, it's the same * as what was declared... */ if (bpc != mBytesPerChar) { // [WSTX-138]: Needs to detect EBCDIC discrepancy if (mEBCDIC) { reportXmlProblem("Declared encoding '"+id+"' incompatible with auto-detected physical encoding (EBCDIC variant), can not decode input since actual code page not known"); } reportXmlProblem("Declared encoding '"+id+"' uses "+bpc +" bytes per character; but physical encoding appeared to use "+mBytesPerChar+"; cannot decode"); } } }
java
public String getContentType() { if (contentType != null) { return contentType; } if (dataStream != null) { try { contentType = URLConnection.guessContentTypeFromStream(dataStream); } catch (IOException ioe) { // ignore exception } } if (data != null) { contentType = URLConnection.guessContentTypeFromName(filename); } // fallback - if we have no contenttype and cannot detect one, use 'application/octet-stream' if (contentType == null) { contentType = "application/octet-stream"; } return contentType; }
java
public static cachepolicy_binding[] get(nitro_service service, String policyname[]) throws Exception{ if (policyname !=null && policyname.length>0) { cachepolicy_binding response[] = new cachepolicy_binding[policyname.length]; cachepolicy_binding obj[] = new cachepolicy_binding[policyname.length]; for (int i=0;i<policyname.length;i++) { obj[i] = new cachepolicy_binding(); obj[i].set_policyname(policyname[i]); response[i] = (cachepolicy_binding) obj[i].get_resource(service); } return response; } return null; }
java
public void setConfigurationRecorderNames(java.util.Collection<String> configurationRecorderNames) { if (configurationRecorderNames == null) { this.configurationRecorderNames = null; return; } this.configurationRecorderNames = new com.amazonaws.internal.SdkInternalList<String>(configurationRecorderNames); }
python
def updateSolutionTerminal(self): ''' Update the terminal period solution. This method should be run when a new AgentType is created or when CRRA changes. Parameters ---------- None Returns ------- None ''' AggShockConsumerType.updateSolutionTerminal(self) # Make replicated terminal period solution StateCount = self.MrkvArray.shape[0] self.solution_terminal.cFunc = StateCount*[self.solution_terminal.cFunc] self.solution_terminal.vPfunc = StateCount*[self.solution_terminal.vPfunc] self.solution_terminal.mNrmMin = StateCount*[self.solution_terminal.mNrmMin]
python
def select(message="", title="Lackey Input", options=None, default=None): """ Creates a dropdown selection dialog with the specified message and options `default` must be one of the options. Returns the selected value. """ if options is None or len(options) == 0: return "" if default is None: default = options[0] if default not in options: raise ValueError("<<default>> not in options[]") root = tk.Tk() input_text = tk.StringVar() input_text.set(message) PopupList(root, message, title, options, default, input_text) root.focus_force() root.mainloop() return str(input_text.get())
java
protected void loadConfig() { if (!isEnabled()) { return; } final String urlProp = format(PROPERTY_FORMAT, id, "url"); LRSUrl = propertyResolver.getProperty(urlProp); actorName = propertyResolver.getProperty(format(PROPERTY_FORMAT, id, "actor-name"), actorName); actorEmail = propertyResolver.getProperty( format(PROPERTY_FORMAT, id, "actor-email"), actorEmail); activityId = propertyResolver.getProperty( format(PROPERTY_FORMAT, id, "activity-id"), activityId); stateId = propertyResolver.getProperty(format(PROPERTY_FORMAT, id, "state-id"), stateId); formEncodeActivityData = propertyResolver.getProperty( format(PROPERTY_FORMAT, id, "form-encode-activity-data"), Boolean.class, formEncodeActivityData); activitiesFormParamName = propertyResolver.getProperty( format(PROPERTY_FORMAT, id, "activity-form-param-name"), activitiesFormParamName); if (StringUtils.isEmpty(LRSUrl)) { logger.error("Disabling TinCan API interface. Property {} not set!", urlProp); enabled = false; return; } // strip trailing '/' if included LRSUrl = LRSUrl.replaceAll("/*$", ""); }
python
def deserialize(self, raw_jws, key=None, alg=None): """Deserialize a JWS token. NOTE: Destroys any current status and tries to import the raw JWS provided. :param raw_jws: a 'raw' JWS token (JSON Encoded or Compact notation) string. :param key: A (:class:`jwcrypto.jwk.JWK`) verification key (optional). If a key is provided a verification step will be attempted after the object is successfully deserialized. :param alg: The signing algorithm (optional). usually the algorithm is known as it is provided with the JOSE Headers of the token. :raises InvalidJWSObject: if the raw object is an invaid JWS token. :raises InvalidJWSSignature: if the verification fails. """ self.objects = dict() o = dict() try: try: djws = json_decode(raw_jws) if 'signatures' in djws: o['signatures'] = list() for s in djws['signatures']: os = self._deserialize_signature(s) o['signatures'].append(os) self._deserialize_b64(o, os.get('protected')) else: o = self._deserialize_signature(djws) self._deserialize_b64(o, o.get('protected')) if 'payload' in djws: if o.get('b64', True): o['payload'] = base64url_decode(str(djws['payload'])) else: o['payload'] = djws['payload'] except ValueError: c = raw_jws.split('.') if len(c) != 3: raise InvalidJWSObject('Unrecognized representation') p = base64url_decode(str(c[0])) if len(p) > 0: o['protected'] = p.decode('utf-8') self._deserialize_b64(o, o['protected']) o['payload'] = base64url_decode(str(c[1])) o['signature'] = base64url_decode(str(c[2])) self.objects = o except Exception as e: # pylint: disable=broad-except raise InvalidJWSObject('Invalid format', repr(e)) if key: self.verify(key, alg)
python
def truncate_money(money: Money) -> Money: """Truncates money amount to the number of decimals corresponding to the currency""" amount = truncate_to(money.amount, money.currency) return Money(amount, money.currency)
python
def contains_info(self, key, value): """ Returns how many cards in the deck have the specified value under the specified key in their info data. This method requires a library to be stored in the deck instance and will return `None` if there is no library. """ if self.library is None: return 0 load = self.library.load_card matches = 0 for code in self.cards: card = load(code) if card.get_info(key) == value: matches += 1 return matches
python
def _get_capabilities_from_driver_type(driver_name): """Create initial driver capabilities :params driver_name: name of selected driver :returns: capabilities dictionary """ if driver_name == 'firefox': return DesiredCapabilities.FIREFOX.copy() elif driver_name == 'chrome': return DesiredCapabilities.CHROME.copy() elif driver_name == 'safari': return DesiredCapabilities.SAFARI.copy() elif driver_name == 'opera': return DesiredCapabilities.OPERA.copy() elif driver_name == 'iexplore': return DesiredCapabilities.INTERNETEXPLORER.copy() elif driver_name == 'edge': return DesiredCapabilities.EDGE.copy() elif driver_name == 'phantomjs': return DesiredCapabilities.PHANTOMJS.copy() elif driver_name in ('android', 'ios', 'iphone'): return {} raise Exception('Unknown driver {0}'.format(driver_name))
java
protected <E extends Event> void linkService(final Node node, final javafx.event.EventType<E> eventType, final Class<? extends Service> serviceClass, final WaveType waveType, final Callback<E, Boolean> callback, final WaveData<?>... waveData) { // LinkService node.addEventHandler(eventType, event -> { if (callback == null || callback.call(event)) { model().returnData(serviceClass, waveType, waveData); } }); }
java
@Override public GetJobResult getJob(GetJobRequest request) { request = beforeClientExecution(request); return executeGetJob(request); }
java
public Class<?> getParameterType() { if (this.parameterType == null) { if (this.parameterIndex < 0) { this.parameterType = (this.method != null ? this.method.getReturnType() : null); } else { this.parameterType = (this.method != null ? this.method.getParameterTypes()[this.parameterIndex] : this.constructor.getParameterTypes()[this.parameterIndex]); } } return this.parameterType; }
java
@Override public final EmailMsg process( final Map<String, Object> pAddParam, final EmailMsg pEntity, final IRequestData pRequestData) throws Exception { if (pEntity.getIsNew()) { this.srvOrm.insertEntity(pAddParam, pEntity); pEntity.setIsNew(false); } else { if (pEntity.getIsSent()) { throw new ExceptionWithCode(ExceptionWithCode.WRONG_PARAMETER, "can_not_change_sent_email"); } boolean isNeedSend = false; if (pRequestData.getParameter("actionAdd") != null) { //send from form/list isNeedSend = true; } if (isNeedSend) { Erecipient erec = new Erecipient(); erec.setItsOwner(pEntity); pEntity.setErecipients(getSrvOrm() .retrieveListForField(pAddParam, erec, "itsOwner")); if (pEntity.getErecipients().size() == 0) { throw new ExceptionWithCode(ExceptionWithCode.WRONG_PARAMETER, "choose_recipient"); } pEntity.setEmailConnect(this.srvOrm.retrieveEntity(pAddParam, pEntity.getEmailConnect())); EmailStringProperty emStrProp = new EmailStringProperty(); emStrProp.setItsOwner(pEntity.getEmailConnect()); pEntity.getEmailConnect().setStringProperties(getSrvOrm() .retrieveListForField(pAddParam, emStrProp, "itsOwner")); EmailIntegerProperty emIntProp = new EmailIntegerProperty(); emIntProp.setItsOwner(pEntity.getEmailConnect()); pEntity.getEmailConnect().setIntegerProperties(getSrvOrm() .retrieveListForField(pAddParam, emIntProp, "itsOwner")); Eattachment eattach = new Eattachment(); eattach.setItsOwner(pEntity); pEntity.setEattachments(getSrvOrm() .retrieveListForField(pAddParam, eattach, "itsOwner")); getEmailSender().openConnection(pAddParam, pEntity.getEmailConnect()); getEmailSender().sendEmail(pAddParam, pEntity); getEmailSender().closeConnection(pAddParam, pEntity.getEmailConnect()); pEntity.setIsSent(true); } this.srvOrm.updateEntity(pAddParam, pEntity); } return pEntity; }
java
public JobAgentInner createOrUpdate(String resourceGroupName, String serverName, String jobAgentName, JobAgentInner parameters) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, serverName, jobAgentName, parameters).toBlocking().last().body(); }
java
public AllianceResponse getAlliancesAllianceId(Integer allianceId, String datasource, String ifNoneMatch) throws ApiException { ApiResponse<AllianceResponse> resp = getAlliancesAllianceIdWithHttpInfo(allianceId, datasource, ifNoneMatch); return resp.getData(); }
java
public void marshall(ListReviewableHITsRequest listReviewableHITsRequest, ProtocolMarshaller protocolMarshaller) { if (listReviewableHITsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listReviewableHITsRequest.getHITTypeId(), HITTYPEID_BINDING); protocolMarshaller.marshall(listReviewableHITsRequest.getStatus(), STATUS_BINDING); protocolMarshaller.marshall(listReviewableHITsRequest.getNextToken(), NEXTTOKEN_BINDING); protocolMarshaller.marshall(listReviewableHITsRequest.getMaxResults(), MAXRESULTS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def check_header(in_bam, rgnames, ref_file, config): """Ensure passed in BAM header matches reference file and read groups names. """ _check_bam_contigs(in_bam, ref_file, config) _check_sample(in_bam, rgnames)
python
def _get_record(self, record_type): """This overrides _get_record in osid.Extensible. Perhaps we should leverage it somehow? """ if (not self.has_record_type(record_type) and record_type.get_identifier() not in self._record_type_data_sets): raise errors.Unsupported() if str(record_type) not in self._records: record_initialized = self._init_record(str(record_type)) if record_initialized and str(record_type) not in self._my_map['recordTypeIds']: self._my_map['recordTypeIds'].append(str(record_type)) return self._records[str(record_type)]
java
public static URI getRequestURI(HttpRequestMessage request, IoSession session) { URI requestURI = request.getRequestURI(); String host = request.getHeader("Host"); return getRequestURI(requestURI, host, session); }