language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
private static boolean matchRuleRegex(String regex, String value) { if (value == null) { value = ""; } if (regex == null) { return true; } if ((regex.length() > 0) && (regex.charAt(0) == '!')) { return !value.matches(regex.substring(1)); } else { return value.matches(regex); } }
java
@Override public int size() { final Segment<K, V>[] segments = this.segments; long sum = 0; long check = 0; int[] mc = new int[segments.length]; // Try a few times to get accurate count. On failure due to // continuous async changes in table, resort to locking. for (int k = 0; k < RETRIES_BEFORE_LOCK; ++k) { check = 0; sum = 0; int mcsum = 0; for (int i = 0; i < segments.length; ++i) { sum += segments[i].count; mcsum += mc[i] = segments[i].modCount; } if (mcsum != 0) { for (int i = 0; i < segments.length; ++i) { check += segments[i].count; if (mc[i] != segments[i].modCount) { check = -1; // force retry break; } } } if (check == sum) break; } if (check != sum) { // Resort to locking all segments sum = 0; for (int i = 0; i < segments.length; ++i) segments[i].lock(); for (int i = 0; i < segments.length; ++i) sum += segments[i].count; for (int i = 0; i < segments.length; ++i) segments[i].unlock(); } if (sum > Integer.MAX_VALUE) return Integer.MAX_VALUE; return (int) sum; }
java
public static <T> TaggedValue<T> taggedValue(String tag, T rep) { return new TaggedValueImpl<T>(tag, rep); }
java
static <T extends Annotation> T findAnnotation(Method method, Class<T> clazz) { T annotation = AnnotationUtils.findAnnotation(method, clazz); if (annotation == null) { try { annotation = AnnotationUtils.findAnnotation(method.getDeclaringClass() .getMethod(method.getName(), method.getParameterTypes()), clazz); } catch (NoSuchMethodException | SecurityException ex) { if (log.isDebugEnabled()) { log.debug("Exception occurred while tyring to find the annotation", ex); } } } return annotation; }
python
def _spill(self): """ dump already partitioned data into disks. """ global MemoryBytesSpilled, DiskBytesSpilled path = self._get_spill_dir(self.spills) if not os.path.exists(path): os.makedirs(path) used_memory = get_used_memory() if not self.pdata: # The data has not been partitioned, it will iterator the # data once, write them into different files, has no # additional memory. It only called when the memory goes # above limit at the first time. # open all the files for writing streams = [open(os.path.join(path, str(i)), 'wb') for i in range(self.partitions)] # If the number of keys is small, then the overhead of sort is small # sort them before dumping into disks self._sorted = len(self.data) < self.SORT_KEY_LIMIT if self._sorted: self.serializer = self.flattened_serializer() for k in sorted(self.data.keys()): h = self._partition(k) self.serializer.dump_stream([(k, self.data[k])], streams[h]) else: for k, v in self.data.items(): h = self._partition(k) self.serializer.dump_stream([(k, v)], streams[h]) for s in streams: DiskBytesSpilled += s.tell() s.close() self.data.clear() # self.pdata is cached in `mergeValues` and `mergeCombiners` self.pdata.extend([{} for i in range(self.partitions)]) else: for i in range(self.partitions): p = os.path.join(path, str(i)) with open(p, "wb") as f: # dump items in batch if self._sorted: # sort by key only (stable) sorted_items = sorted(self.pdata[i].items(), key=operator.itemgetter(0)) self.serializer.dump_stream(sorted_items, f) else: self.serializer.dump_stream(self.pdata[i].items(), f) self.pdata[i].clear() DiskBytesSpilled += os.path.getsize(p) self.spills += 1 gc.collect() # release the memory as much as possible MemoryBytesSpilled += max(used_memory - get_used_memory(), 0) << 20
java
public static Node newSeq(Object value) { Node n = allocSeq(); n.seqAdd(value); return n; }
java
@Override public java.util.concurrent.Future<ChangeMessageVisibilityBatchResult> changeMessageVisibilityBatchAsync(String queueUrl, java.util.List<ChangeMessageVisibilityBatchRequestEntry> entries) { return changeMessageVisibilityBatchAsync(new ChangeMessageVisibilityBatchRequest().withQueueUrl(queueUrl).withEntries(entries)); }
java
public static String getAttributeString(Tag tag, String attrName, String defaultValue) { Literal lit = getAttributeLiteral(tag, attrName, null); if (lit == null) return defaultValue; return lit.getString(); }
python
def _get_span(self, m): """ Gets a tuple that identifies a span for the specific mention class that m belongs to. """ return (m.sentence.id, m.char_start, m.char_end)
java
public static byte[] readBytes(InputStream in) throws IORuntimeException { final FastByteArrayOutputStream out = new FastByteArrayOutputStream(); copy(in, out); return out.toByteArray(); }
java
public List<TagFileType<TldTaglibType<T>>> getAllTagFile() { List<TagFileType<TldTaglibType<T>>> list = new ArrayList<TagFileType<TldTaglibType<T>>>(); List<Node> nodeList = childNode.get("tag-file"); for(Node node: nodeList) { TagFileType<TldTaglibType<T>> type = new TagFileTypeImpl<TldTaglibType<T>>(this, "tag-file", childNode, node); list.add(type); } return list; }
java
private static InputElement createHiddenInput(String name, String value) { InputElement input = Document.get().createHiddenInputElement(); input.setName(name); input.setValue(value); return input; }
java
public Concept lowestCommonAncestor(Concept v, Concept w) { if (v.taxonomy != w.taxonomy) { throw new IllegalArgumentException("Concepts are not from the same taxonomy."); } List<Concept> vPath = v.getPathFromRoot(); List<Concept> wPath = w.getPathFromRoot(); Iterator<Concept> vIter = vPath.iterator(); Iterator<Concept> wIter = wPath.iterator(); Concept commonAncestor = null; while (vIter.hasNext() && wIter.hasNext()) { Concept vAncestor = vIter.next(); Concept wAncestor = wIter.next(); if (vAncestor != wAncestor) { return commonAncestor; } else { commonAncestor = vAncestor; } } return commonAncestor; }
java
public final Table getTable(String name) { GetTableRequest request = GetTableRequest.newBuilder().setName(name).build(); return getTable(request); }
java
public static String removeNotation(String name) { if (name.matches("^m[A-Z]{1}")) { return name.substring(1, 2).toLowerCase(); } else if (name.matches("m[A-Z]{1}.*")) { return name.substring(1, 2).toLowerCase() + name.substring(2); } return name; }
python
def override_language(self, language): """ Context manager to override the instance language. """ previous_language = self._linguist.language self._linguist.language = language yield self._linguist.language = previous_language
java
public List<Instance> getInstances4Attribute(final String _attributeName) throws EFapsException { final OneSelect oneselect = this.attr2OneSelect.get(_attributeName); return oneselect == null ? null : oneselect.getInstances(); }
python
def dialogues(self): """ Access the dialogues :returns: twilio.rest.autopilot.v1.assistant.dialogue.DialogueList :rtype: twilio.rest.autopilot.v1.assistant.dialogue.DialogueList """ if self._dialogues is None: self._dialogues = DialogueList(self._version, assistant_sid=self._solution['sid'], ) return self._dialogues
python
def build_fault_model(self, collapse=False, rendered_msr=WC1994(), mfd_config=None): ''' Constructs a full fault model with epistemic uncertainty by enumerating all the possible recurrence models of each fault as separate faults, with the recurrence rates multiplied by the corresponding weights. :param bool collapse: Determines whether or not to collapse the branches :param rendered_msr: If the option is taken to collapse the branches then a recurrence model for rendering must be defined :param list/dict mfd_config: Universal list or dictionay of configuration parameters for the magnitude frequency distribution - will overwrite whatever is previously defined for the fault! ''' self.source_model = mtkSourceModel(self.id, self.name) for fault in self.faults: fault.generate_recurrence_models(collapse, config=mfd_config, rendered_msr=rendered_msr) src_model, src_weight = fault.generate_fault_source_model() for iloc, model in enumerate(src_model): new_model = deepcopy(model) new_model.id = str(model.id) + '_%g' % (iloc + 1) new_model.mfd.occurrence_rates = \ (np.array(new_model.mfd.occurrence_rates) * src_weight[iloc]).tolist() self.source_model.sources.append(new_model)
java
public <T extends OmiseObject> Map<String, Object> serializeToMap(T model) { return objectMapper.convertValue(model, new TypeReference<Map<String, Object>>() { }); }
python
async def create_connection(self): '''Initiate a connection.''' connector = self.proxy or self.loop return await connector.create_connection( self.session_factory, self.host, self.port, **self.kwargs)
java
public static long[] hashToLongs(final double datum, final long seed) { final double d = (datum == 0.0) ? 0.0 : datum; //canonicalize -0.0, 0.0 final long[] data = { Double.doubleToLongBits(d) };//canonicalize all NaN forms return hash(data, seed); }
java
public IPAddress toAddress(IPVersion version) throws AddressStringException, IncompatibleAddressException { validate(); // call validate so that we throw consistently, cover type == INVALID, and ensure the addressProvider exists return addressProvider.getProviderAddress(version); }
java
public List<String> getUnsavedResources() { List<String> list = new ArrayList<>(); List<String> l; for (int i = 0; i < getExtensionCount(); i++) { l = getExtension(i).getUnsavedResources(); if (l != null) { list.addAll(l); } } return list; }
java
public void runUncachedQuery() throws TimeoutException, InterruptedException { // [START bigquery_query_no_cache] // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;"; QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query) // Disable the query cache to force live query evaluation. .setUseQueryCache(false) .build(); // Print the results. for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) { for (FieldValue val : row) { System.out.printf("%s,", val.toString()); } System.out.printf("\n"); } // [END bigquery_query_no_cache] }
python
def _point_scalar(self, name=None): """ Returns point scalars of a vtk object Parameters ---------- name : str Name of point scalars to retrive. Returns ------- scalars : np.ndarray Numpy array of scalars """ if name is None: # use active scalar array field, name = self.active_scalar_info if field != POINT_DATA_FIELD: raise RuntimeError('Must specify an array to fetch.') vtkarr = self.GetPointData().GetArray(name) if vtkarr is None: raise AssertionError('({}) is not a point scalar'.format(name)) # numpy does not support bit array data types if isinstance(vtkarr, vtk.vtkBitArray): vtkarr = vtk_bit_array_to_char(vtkarr) if name not in self._point_bool_array_names: self._point_bool_array_names.append(name) array = vtk_to_numpy(vtkarr) if array.dtype == np.uint8 and name in self._point_bool_array_names: array = array.view(np.bool) return array
python
def _compile_pvariable_expression(self, expr: Expression, scope: Dict[str, TensorFluent], batch_size: Optional[int] = None, noise: Optional[List[tf.Tensor]] = None) -> TensorFluent: '''Compile a pvariable expression `expr` into a TensorFluent in the given `scope` with optional batch size. Args: expr (:obj:`rddl2tf.expr.Expression`): A RDDL pvariable expression. scope (Dict[str, :obj:`rddl2tf.fluent.TensorFluent`]): A fluent scope. batch_size (Optional[size]): The batch size. Returns: :obj:`rddl2tf.fluent.TensorFluent`: The compiled expression as a TensorFluent. ''' etype = expr.etype args = expr.args name = expr._pvar_to_name(args) if name not in scope: raise ValueError('Variable {} not in scope.'.format(name)) fluent = scope[name] scope = args[1] if args[1] is not None else [] if isinstance(fluent, TensorFluent): fluent = TensorFluent(fluent.tensor, scope, batch=fluent.batch) elif isinstance(fluent, tf.Tensor): fluent = TensorFluent(fluent, scope, batch=self.batch_mode) else: raise ValueError('Variable in scope must be TensorFluent-like: {}'.format(fluent)) return fluent
java
public static InsightsLogger newLogger(Context context, String clientToken) { return new InsightsLogger(context, null, null); }
python
def _astoref(ins): ''' Stores a floating point value into a memory address. ''' output = _addr(ins.quad[1]) value = ins.quad[2] if value[0] == '*': value = value[1:] indirect = True else: indirect = False if indirect: output.append('push hl') output.extend(_float_oper(ins.quad[2])) output.append('pop hl') else: output.extend(_float_oper(ins.quad[2])) output.append('call __STOREF') REQUIRES.add('storef.asm') return output
python
def install_egg(self, egg_name): """ Install an egg into the egg directory """ if not os.path.exists(self.egg_directory): os.makedirs(self.egg_directory) self.requirement_set.add_requirement( InstallRequirement.from_line(egg_name, None)) try: self.requirement_set.prepare_files(self.finder) self.requirement_set.install(['--prefix=' + self.egg_directory], []) except DistributionNotFound: self.requirement_set.requirements._keys.remove(egg_name) raise PipException()
java
public void renderMozillaStyle(StringBuilder sb) { if (functionName != null) { sb.append(functionName).append("()"); } sb.append('@').append(fileName); if (lineNumber > -1) { sb.append(':').append(lineNumber); } }
python
def _set_ipv6_network(self, v, load=False): """ Setter method for ipv6_network, mapped from YANG variable /rbridge_id/interface/ve/ipv6/ipv6_local_anycast_gateway/ipv6_track/ipv6_network (list) If this variable is read-only (config: false) in the source YANG file, then _set_ipv6_network is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_ipv6_network() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("ipv6_network_address",ipv6_network.ipv6_network, yang_name="ipv6-network", rest_name="network", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ipv6-network-address', extensions={u'tailf-common': {u'callpoint': u'AnycastGatewayLocalIpv6TrackNetworkConfig', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'network', u'cli-incomplete-command': None, u'cli-no-match-completion': None, u'cli-full-no': None}}), is_container='list', yang_name="ipv6-network", rest_name="network", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'AnycastGatewayLocalIpv6TrackNetworkConfig', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'network', u'cli-incomplete-command': None, u'cli-no-match-completion': None, u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-anycast-gateway', defining_module='brocade-anycast-gateway', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """ipv6_network must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("ipv6_network_address",ipv6_network.ipv6_network, yang_name="ipv6-network", rest_name="network", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ipv6-network-address', extensions={u'tailf-common': {u'callpoint': u'AnycastGatewayLocalIpv6TrackNetworkConfig', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'network', u'cli-incomplete-command': None, u'cli-no-match-completion': None, u'cli-full-no': None}}), is_container='list', yang_name="ipv6-network", rest_name="network", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'AnycastGatewayLocalIpv6TrackNetworkConfig', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'network', u'cli-incomplete-command': None, u'cli-no-match-completion': None, u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-anycast-gateway', defining_module='brocade-anycast-gateway', yang_type='list', is_config=True)""", }) self.__ipv6_network = t if hasattr(self, '_set'): self._set()
python
def node_to_xml(node, output=sys.stdout, nsmap=None): """ Convert a Node object into a pretty .xml file without keeping everything in memory. If you just want the string representation use tostring(node). :param node: a Node-compatible object (ElementTree nodes are fine) :param nsmap: if given, shorten the tags with aliases """ if nsmap: for ns, prefix in nsmap.items(): if prefix: node['xmlns:' + prefix[:-1]] = ns else: node['xmlns'] = ns with StreamingXMLWriter(output, nsmap=nsmap) as w: w.serialize(node)
java
@Get("/projects") public CompletableFuture<List<ProjectDto>> listProjects(@Param("status") Optional<String> status) { if (status.isPresent()) { checkStatusArgument(status.get()); return CompletableFuture.supplyAsync(() -> projectManager().listRemoved().stream() .map(ProjectDto::new) .collect(toImmutableList())); } return CompletableFuture.supplyAsync(() -> projectManager().list().values().stream() .map(DtoConverter::convert) .collect(toImmutableList())); }
python
def write(self, outfile): """Write this shape list to a region file. Parameters ---------- outfile : str File name """ if len(self) < 1: print("WARNING: The region list is empty. The region file " "'{:s}' will be empty.".format(outfile)) try: outf = _builtin_open(outfile, 'w') outf.close() return except IOError as e: cmsg = "Unable to create region file '{:s}'.".format(outfile) if e.args: e.args = (e.args[0] + '\n' + cmsg,) + e.args[1:] else: e.args = (cmsg,) raise e prev_cs = self[0].coord_format outf = None try: outf = _builtin_open(outfile, 'w') attr0 = self[0].attr[1] defaultline = " ".join(["{:s}={:s}".format(a, attr0[a]) for a in attr0 if a != 'text']) # first line is globals outf.write("global {0}\n".format(defaultline)) # second line must be a coordinate format outf.write("{0}\n".format(prev_cs)) for shape in self: shape_attr = '' if prev_cs == shape.coord_format \ else shape.coord_format + "; " shape_excl = '-' if shape.exclude else '' text_coordlist = ["{:f}".format(f) for f in shape.coord_list] shape_coords = "(" + ",".join(text_coordlist) + ")" shape_comment = " # " + shape.comment if shape.comment else '' shape_str = (shape_attr + shape_excl + shape.name + shape_coords + shape_comment) outf.write("{0}\n".format(shape_str)) except IOError as e: cmsg = "Unable to create region file \'{:s}\'.".format(outfile) if e.args: e.args = (e.args[0] + '\n' + cmsg,) + e.args[1:] else: e.args = (cmsg,) raise e finally: if outf: outf.close()
java
@Override public List<Object> getTable(final Object table) { List<Object> data = new ArrayList<>(); String tableName; if (table instanceof TableWithNullOption) { // Get source table name tableName = ((TableWithNullOption) table).getTableName(); // Insert null option data.add(null); } else { tableName = (String) table; } for (String[] row : TABLE_DATA) { if (row[0].equals(tableName)) { data.add(new TableEntry(row[1], row[2])); } } return data; }
java
protected void validateSignature(ConsumerAuthentication authentication) throws AuthenticationException { SignatureSecret secret = authentication.getConsumerDetails().getSignatureSecret(); String token = authentication.getConsumerCredentials().getToken(); OAuthProviderToken authToken = null; if (token != null && !"".equals(token)) { authToken = getTokenServices().getToken(token); } String signatureMethod = authentication.getConsumerCredentials().getSignatureMethod(); OAuthSignatureMethod method; try { method = getSignatureMethodFactory().getSignatureMethod(signatureMethod, secret, authToken != null ? authToken.getSecret() : null); } catch (UnsupportedSignatureMethodException e) { throw new OAuthException(e.getMessage(), e); } String signatureBaseString = authentication.getConsumerCredentials().getSignatureBaseString(); String signature = authentication.getConsumerCredentials().getSignature(); if (log.isDebugEnabled()) { log.debug("Verifying signature " + signature + " for signature base string " + signatureBaseString + " with method " + method.getName() + "."); } method.verify(signatureBaseString, signature); }
python
def docgen(): """ Build documentation. """ hitchpylibrarytoolkit.docgen( _storybook({}), DIR.project, DIR.key / "story", DIR.gen )
java
@Override public GetCredentialReportResult getCredentialReport(GetCredentialReportRequest request) { request = beforeClientExecution(request); return executeGetCredentialReport(request); }
python
def make_grid_with_coordtransform(numPix, deltapix, subgrid_res=1, left_lower=False, inverse=True): """ same as make_grid routine, but returns the transformaton matrix and shift between coordinates and pixel :param numPix: :param deltapix: :param subgrid_res: :param left_lower: sets the zero point at the lower left corner of the pixels :param inverse: bool, if true sets East as left, otherwise East is righrt :return: """ numPix_eff = numPix*subgrid_res deltapix_eff = deltapix/float(subgrid_res) a = np.arange(numPix_eff) matrix = np.dstack(np.meshgrid(a, a)).reshape(-1, 2) if inverse is True: delta_x = -deltapix_eff else: delta_x = deltapix_eff if left_lower is True: x_grid = matrix[:, 0]*deltapix y_grid = matrix[:, 1]*deltapix else: x_grid = (matrix[:, 0] - (numPix_eff-1)/2.)*delta_x y_grid = (matrix[:, 1] - (numPix_eff-1)/2.)*deltapix_eff shift = (subgrid_res-1)/(2.*subgrid_res)*deltapix x_grid -= shift y_grid -= shift ra_at_xy_0 = x_grid[0] dec_at_xy_0 = y_grid[0] x_at_radec_0 = (numPix_eff - 1) / 2. y_at_radec_0 = (numPix_eff - 1) / 2. Mpix2coord = np.array([[delta_x, 0], [0, deltapix_eff]]) Mcoord2pix = np.linalg.inv(Mpix2coord) return x_grid, y_grid, ra_at_xy_0, dec_at_xy_0, x_at_radec_0, y_at_radec_0, Mpix2coord, Mcoord2pix
java
public static int cuDeviceGetP2PAttribute(int value[], int attrib, CUdevice srcDevice, CUdevice dstDevice) { return checkResult(cuDeviceGetP2PAttributeNative(value, attrib, srcDevice, dstDevice)); }
java
@SuppressWarnings("unchecked") public <T> T get(String key, Class<T> clazz) { return (T) datas.get(key); }
java
public static void main(String[] args) throws Exception { // Checking input parameters final ParameterTool params = ParameterTool.fromArgs(args); // set up execution environment final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); // make parameters available in the web interface env.getConfig().setGlobalJobParameters(params); // read input data DataSet<Edge> edges; if (params.has("edges")) { edges = env.readCsvFile(params.get("edges")) .fieldDelimiter(" ") .includeFields(true, true) .types(Integer.class, Integer.class) .map(new TupleEdgeConverter()); } else { System.out.println("Executing EnumTriangles example with default edges data set."); System.out.println("Use --edges to specify file input."); edges = EnumTrianglesData.getDefaultEdgeDataSet(env); } // project edges by vertex id DataSet<Edge> edgesById = edges .map(new EdgeByIdProjector()); DataSet<Triad> triangles = edgesById // build triads .groupBy(Edge.V1).sortGroup(Edge.V2, Order.ASCENDING).reduceGroup(new TriadBuilder()) // filter triads .join(edgesById).where(Triad.V2, Triad.V3).equalTo(Edge.V1, Edge.V2).with(new TriadFilter()); // emit result if (params.has("output")) { triangles.writeAsCsv(params.get("output"), "\n", ","); // execute program env.execute("Basic Triangle Enumeration Example"); } else { System.out.println("Printing result to stdout. Use --output to specify output path."); triangles.print(); } }
python
def _get_single_depth(self, multi_index): ''' Helper method for determining how many single index entries there are in a particular multi-index ''' single_depth = 0 for subind in multi_index: if is_slice_or_dim_range(subind): break single_depth += 1 return single_depth
java
@Override public BlockChannelReader<MemorySegment> createBlockChannelReader(FileIOChannel.ID channelID, LinkedBlockingQueue<MemorySegment> returnQueue) throws IOException { checkState(!isShutdown.get(), "I/O-Manager is shut down."); return new AsynchronousBlockReader(channelID, this.readers[channelID.getThreadNum()].requestQueue, returnQueue); }
java
public static void validateLesserThan( long value, long limit, String identifier ) throws PostConditionException { if( value < limit ) { return; } throw new PostConditionException( identifier + " was not lesser than " + limit + ". Was: " + value ); }
python
def get_all_operators(self): """|coro| Checks the player stats for all operators, loading them all again if any aren't found This is significantly more efficient than calling get_operator for every operator name. Returns ------- dict[:class:`Operator`] the dictionary of all operators found""" if len(self.operators) >= len(OperatorStatisticNames): return self.operators result = yield from self.load_all_operators() return result
python
def _load(self): """ Function load. :return: file contents :raises: NotFoundError if file not found """ if self.is_exists(): return open(self._ref, "rb").read() raise NotFoundError("File %s not found" % self._ref)
python
def valid_batch(self): """ Returns a single batch with all the validation cases.""" valid_fns = list(zip(*self.corpus.get_valid_fns())) return self.load_batch(valid_fns)
java
@Override public HTableDescriptor[] disableTables(String regex) throws IOException { HTableDescriptor[] tableDescriptors = listTables(regex); for (HTableDescriptor descriptor : tableDescriptors) { disableTable(descriptor.getTableName()); } return tableDescriptors; }
python
def from_json(cls, jsonmsg): """ Create an object directly from a JSON string. Applies general validation after creating the object to check whether all required fields are present. Args: jsonmsg (str): An object encoded as a JSON string Returns: An object of the generated type Raises: ValidationError: if `jsonmsg` does not match the schema `cls` was generated from """ import json msg = json.loads(jsonmsg) obj = cls(**msg) obj.validate() return obj
python
def ipv6(self, network=False): """Produce a random IPv6 address or network with a valid CIDR""" address = str(ip_address(self.generator.random.randint( 2 ** IPV4LENGTH, (2 ** IPV6LENGTH) - 1))) if network: address += '/' + str(self.generator.random.randint(0, IPV6LENGTH)) address = str(ip_network(address, strict=False)) return address
python
def set_handler(self, language, obj): """Define a custom language handler for RiveScript objects. Pass in a ``None`` value for the object to delete an existing handler (for example, to prevent Python code from being able to be run by default). Look in the ``eg`` folder of the rivescript-python distribution for an example script that sets up a JavaScript language handler. :param str language: The lowercased name of the programming language. Examples: python, javascript, perl :param class obj: An instance of an implementation class object. It should provide the following interface:: class MyObjectHandler: def __init__(self): pass def load(self, name, code): # name = the name of the object from the RiveScript code # code = the source code of the object def call(self, rs, name, fields): # rs = the current RiveScript interpreter object # name = the name of the object being called # fields = array of arguments passed to the object return reply """ # Allow them to delete a handler too. if obj is None: if language in self._handlers: del self._handlers[language] else: self._handlers[language] = obj
python
def _parse_attribute_details(self, prop=ATTRIBUTES): """ Concatenates a list of Attribute Details data structures parsed from a remote file """ parsed_attributes = self._parse_attribute_details_file(prop) if parsed_attributes is None: # If not in the (official) remote location, try the tree itself parsed_attributes = self._parse_complex_list(prop) for attribute in (a for a in parsed_attributes if not a['aliases']): # Aliases are not in ISO standard: default to label attribute['aliases'] = attribute['label'] return get_default_for_complex(prop, parsed_attributes)
java
private final void setState(int newAction, boolean validateOnly) throws TransactionException { final String methodName = "setState"; final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) Tr.entry(this, tc, methodName, getStateAsString()); switch (newAction) { // Allow only NO_TRANSACTION_ACTIVE -> GLOBAL_TRANSACTION_ACTIVE case XA_START: if (transtate == NO_TRANSACTION_ACTIVE) { if (validateOnly) { if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName); return; } transtate = GLOBAL_TRANSACTION_ACTIVE; } else { TransactionException txe = new TransactionException(actions[newAction], transactions[transtate], true); if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName, txe); throw txe; } break; // Allow only GLOBAL_TRANSACTION_ACTIVE -> TRANSACTION_ENDING case XA_END: if (transtate == GLOBAL_TRANSACTION_ACTIVE) { if (validateOnly) { if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName); return; } transtate = TRANSACTION_ENDING; } else { TransactionException txe = new TransactionException(actions[newAction], transactions[transtate], true); if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName, txe); throw txe; } break; // Allow only GLOBAL_TRANSACTION_ACTIVE -> TRANSACTION_ENDING case XA_END_FAIL: if (transtate == GLOBAL_TRANSACTION_ACTIVE || transtate == TRANSACTION_ENDING) { if (validateOnly) { if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName); return; } transtate = TRANSACTION_FAIL; } else { TransactionException txe = new TransactionException(actions[newAction], transactions[transtate], true); if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName, txe); throw txe; } break; // Allow only TRANSACTION_ENDING-->NO_TRANSACTION_ACTIVE case XA_COMMIT: case XA_READONLY: if (transtate == TRANSACTION_ENDING || transtate == RECOVERY_IN_PROGRESS) { if (validateOnly) { if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName); return; } transtate = NO_TRANSACTION_ACTIVE; } else { TransactionException txe = new TransactionException(actions[newAction], transactions[transtate], true); if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName, txe); throw txe; } break; // Allow only TRANSACTION_ENDING-->NO_TRANSACTION_ACTIVE case XA_ROLLBACK: if (transtate == TRANSACTION_ENDING || transtate == TRANSACTION_FAIL || transtate == RECOVERY_IN_PROGRESS) { if (validateOnly) { if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName); return; } // In the event of a TRANSACTION_FAIL // we dont want other threads to use this transaction until mc // cleanup occurs // so we keep this TX dirty i.e. in TRANSACTION_FAIL state. if (transtate == TRANSACTION_ENDING || transtate == RECOVERY_IN_PROGRESS) { transtate = NO_TRANSACTION_ACTIVE; } } else { TransactionException txe = new TransactionException(actions[newAction], transactions[transtate], true); if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName, txe); throw txe; } break; // Set the state to RECOVERY_IN_PROGRESS no matter what the starting // state was. case XA_RECOVER: transtate = RECOVERY_IN_PROGRESS; break; // Send to NO_TRANSACTION_ACTIVE no matter what the starting state was. // This is the only time we can clean up our state for XA. // Even tho the DB would be in a rational state we would be in an in // inconsistent state. // so let us reset to NO_TRANSACTION_ACTIVE case XA_FORGET: transtate = NO_TRANSACTION_ACTIVE; break; // LT.BEGIN: only allow NO_TRANSACTION_ACTIVE -> // LOCAL_TRANSACTION_ACTIVE case LT_BEGIN: if (transtate == NO_TRANSACTION_ACTIVE) { if (validateOnly) { if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName); return; } transtate = LOCAL_TRANSACTION_ACTIVE; } else { TransactionException txe = new TransactionException(actions[newAction], transactions[transtate], true); if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName, txe); throw txe; } break; // Allow only Local_Transaction_active -> NO_TRANSACTION_Active case LT_COMMIT: case LT_ROLLBACK: if (transtate == LOCAL_TRANSACTION_ACTIVE) { if (validateOnly) { if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName); return; } transtate = NO_TRANSACTION_ACTIVE; } else { TransactionException txe = new TransactionException(actions[newAction], transactions[transtate], false); if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName, txe); throw txe; } break; // Allow only in transaction_active case EXECUTE: if ((transtate == LOCAL_TRANSACTION_ACTIVE) || (transtate == GLOBAL_TRANSACTION_ACTIVE)) { if (validateOnly) { if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName); return; } } else { TransactionException txe = new TransactionException(actions[newAction], transactions[transtate], true); if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName, txe); throw txe; } break; // CLEANUP - always set to NO_TRANSACTION_ACTIVE case MC_CLEANUP: transtate = NO_TRANSACTION_ACTIVE; break; // Send to TRANSACTION_HEURISTIC_END no matter what the starting state // was. case HEURISTIC_END: transtate = TRANSACTION_HEURISTIC_END; break; default: TransactionException txe = new TransactionException(actions[newAction], transactions[transtate], true); if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName, txe); throw txe; }; if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, methodName, getStateAsString()); }
java
public Vector2f setComponent(int component, float value) throws IllegalArgumentException { switch (component) { case 0: x = value; break; case 1: y = value; break; default: throw new IllegalArgumentException(); } return this; }
java
@Override public void onToken(Context context, String token, Bundle bundle) { updateAVInstallation(token); }
java
public void setPrimaryButtonHoverColor(String color) throws HelloSignException { if (white_labeling_options == null) { white_labeling_options = new WhiteLabelingOptions(); } white_labeling_options.setPrimaryButtonHoverColor(color); }
java
public Node previousSibling() { if (parentNode == null) return null; // root if (siblingIndex > 0) return parentNode.ensureChildNodes().get(siblingIndex-1); else return null; }
python
def uncompress_files(original, destination): """ Move file from original path to destination path. :type original: str :param original: The location of zip file :type destination: str :param destination: The extract path """ with zipfile.ZipFile(original) as zips: extract_path = os.path.join(destination) zips.extractall(extract_path)
java
public void addLinkValue(String ownerObjID, FieldDefinition linkDef, String targetObjID) { addColumn(SpiderService.objectsStoreName(linkDef.getTableDef()), ownerObjID, SpiderService.linkColumnName(linkDef, targetObjID)); }
java
@Override public synchronized ServiceTicket grantServiceTicket(final String id, final Service service, final ExpirationPolicy expirationPolicy, final boolean credentialProvided, final boolean onlyTrackMostRecentSession) { val serviceTicket = new ServiceTicketImpl(id, this, service, credentialProvided, expirationPolicy); trackServiceSession(serviceTicket.getId(), service, onlyTrackMostRecentSession); return serviceTicket; }
java
@Override public void doRender(final WComponent component, final WebXmlRenderContext renderContext) { WSubMenu menu = (WSubMenu) component; XmlStringBuilder xml = renderContext.getWriter(); xml.appendTagOpen("ui:submenu"); xml.appendAttribute("id", component.getId()); xml.appendOptionalAttribute("class", component.getHtmlClass()); xml.appendOptionalAttribute("track", component.isTracking(), "true"); if (isTree(menu)) { xml.appendAttribute("open", String.valueOf(isOpen(menu))); } xml.appendOptionalAttribute("disabled", menu.isDisabled(), "true"); xml.appendOptionalAttribute("hidden", menu.isHidden(), "true"); if (menu.isTopLevelMenu()) { xml.appendOptionalAttribute("accessKey", menu.getAccessKeyAsString()); } else { xml.appendAttribute("nested", "true"); } xml.appendOptionalAttribute("type", getMenuType(menu)); switch (menu.getMode()) { case CLIENT: xml.appendAttribute("mode", "client"); break; case LAZY: xml.appendAttribute("mode", "lazy"); break; case EAGER: xml.appendAttribute("mode", "eager"); break; case DYNAMIC: case SERVER: // mode server mapped to mode dynamic as per https://github.com/BorderTech/wcomponents/issues/687 xml.appendAttribute("mode", "dynamic"); break; default: throw new SystemException("Unknown menu mode: " + menu.getMode()); } xml.appendClose(); // Paint label menu.getDecoratedLabel().paint(renderContext); MenuMode mode = menu.getMode(); // Paint submenu items xml.appendTagOpen("ui:content"); xml.appendAttribute("id", component.getId() + "-content"); xml.appendClose(); // Render content if not EAGER Mode or is EAGER and is the current AJAX request if (mode != MenuMode.EAGER || AjaxHelper.isCurrentAjaxTrigger(menu)) { // Visibility of content set in prepare paint menu.paintMenuItems(renderContext); } xml.appendEndTag("ui:content"); xml.appendEndTag("ui:submenu"); }
java
public void marshall(DeleteProjectRequest deleteProjectRequest, ProtocolMarshaller protocolMarshaller) { if (deleteProjectRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(deleteProjectRequest.getArn(), ARN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
async def create(self, model_, **data): """Create a new object saved to database. """ inst = model_(**data) query = model_.insert(**dict(inst.__data__)) pk = await self.execute(query) if inst._pk is None: inst._pk = pk return inst
python
def iter_orgs(username, number=-1, etag=None): """List the organizations associated with ``username``. :param str username: (required), login of the user :param int number: (optional), number of orgs to return. Default: -1, return all of the issues :param str etag: (optional), ETag from a previous request to the same endpoint :returns: generator of :class:`Organization <github3.orgs.Organization>` """ return gh.iter_orgs(username, number, etag) if username else []
java
public void pushLoop(List<String> labelNames) { pushState(); continueLabel = new Label(); breakLabel = new Label(); if (labelNames != null) { for (String labelName : labelNames) { initLoopLabels(labelName); } } }
java
public void preClose(HashMap exclusionSizes) throws IOException, DocumentException { if (preClosed) throw new DocumentException("Document already pre closed."); preClosed = true; AcroFields af = writer.getAcroFields(); String name = getFieldName(); boolean fieldExists = !(isInvisible() || isNewField()); PdfIndirectReference refSig = writer.getPdfIndirectReference(); writer.setSigFlags(3); if (fieldExists) { PdfDictionary widget = af.getFieldItem(name).getWidget(0); writer.markUsed(widget); widget.put(PdfName.P, writer.getPageReference(getPage())); widget.put(PdfName.V, refSig); PdfObject obj = PdfReader.getPdfObjectRelease(widget.get(PdfName.F)); int flags = 0; if (obj != null && obj.isNumber()) flags = ((PdfNumber)obj).intValue(); flags |= PdfAnnotation.FLAGS_LOCKED; widget.put(PdfName.F, new PdfNumber(flags)); PdfDictionary ap = new PdfDictionary(); ap.put(PdfName.N, getAppearance().getIndirectReference()); widget.put(PdfName.AP, ap); } else { PdfFormField sigField = PdfFormField.createSignature(writer); sigField.setFieldName(name); sigField.put(PdfName.V, refSig); sigField.setFlags(PdfAnnotation.FLAGS_PRINT | PdfAnnotation.FLAGS_LOCKED); int pagen = getPage(); if (!isInvisible()) sigField.setWidget(getPageRect(), null); else sigField.setWidget(new Rectangle(0, 0), null); sigField.setAppearance(PdfAnnotation.APPEARANCE_NORMAL, getAppearance()); sigField.setPage(pagen); writer.addAnnotation(sigField, pagen); } exclusionLocations = new HashMap(); if (cryptoDictionary == null) { if (PdfName.ADOBE_PPKLITE.equals(getFilter())) sigStandard = new PdfSigGenericPKCS.PPKLite(getProvider()); else if (PdfName.ADOBE_PPKMS.equals(getFilter())) sigStandard = new PdfSigGenericPKCS.PPKMS(getProvider()); else if (PdfName.VERISIGN_PPKVS.equals(getFilter())) sigStandard = new PdfSigGenericPKCS.VeriSign(getProvider()); else throw new IllegalArgumentException("Unknown filter: " + getFilter()); sigStandard.setExternalDigest(externalDigest, externalRSAdata, digestEncryptionAlgorithm); if (getReason() != null) sigStandard.setReason(getReason()); if (getLocation() != null) sigStandard.setLocation(getLocation()); if (getContact() != null) sigStandard.setContact(getContact()); sigStandard.put(PdfName.M, new PdfDate(getSignDate())); sigStandard.setSignInfo(getPrivKey(), getCertChain(), getCrlList()); PdfString contents = (PdfString)sigStandard.get(PdfName.CONTENTS); PdfLiteral lit = new PdfLiteral((contents.toString().length() + (PdfName.ADOBE_PPKLITE.equals(getFilter())?0:64)) * 2 + 2); exclusionLocations.put(PdfName.CONTENTS, lit); sigStandard.put(PdfName.CONTENTS, lit); lit = new PdfLiteral(80); exclusionLocations.put(PdfName.BYTERANGE, lit); sigStandard.put(PdfName.BYTERANGE, lit); if (certificationLevel > 0) { addDocMDP(sigStandard); } if (signatureEvent != null) signatureEvent.getSignatureDictionary(sigStandard); writer.addToBody(sigStandard, refSig, false); } else { PdfLiteral lit = new PdfLiteral(80); exclusionLocations.put(PdfName.BYTERANGE, lit); cryptoDictionary.put(PdfName.BYTERANGE, lit); for (Iterator it = exclusionSizes.entrySet().iterator(); it.hasNext();) { Map.Entry entry = (Map.Entry)it.next(); PdfName key = (PdfName)entry.getKey(); Integer v = (Integer)entry.getValue(); lit = new PdfLiteral(v.intValue()); exclusionLocations.put(key, lit); cryptoDictionary.put(key, lit); } if (certificationLevel > 0) addDocMDP(cryptoDictionary); if (signatureEvent != null) signatureEvent.getSignatureDictionary(cryptoDictionary); writer.addToBody(cryptoDictionary, refSig, false); } if (certificationLevel > 0) { // add DocMDP entry to root PdfDictionary docmdp = new PdfDictionary(); docmdp.put(new PdfName("DocMDP"), refSig); writer.reader.getCatalog().put(new PdfName("Perms"), docmdp); } writer.close(stamper.getMoreInfo()); range = new int[exclusionLocations.size() * 2]; int byteRangePosition = ((PdfLiteral)exclusionLocations.get(PdfName.BYTERANGE)).getPosition(); exclusionLocations.remove(PdfName.BYTERANGE); int idx = 1; for (Iterator it = exclusionLocations.values().iterator(); it.hasNext();) { PdfLiteral lit = (PdfLiteral)it.next(); int n = lit.getPosition(); range[idx++] = n; range[idx++] = lit.getPosLength() + n; } Arrays.sort(range, 1, range.length - 1); for (int k = 3; k < range.length - 2; k += 2) range[k] -= range[k - 1]; if (tempFile == null) { bout = sigout.getBuffer(); boutLen = sigout.size(); range[range.length - 1] = boutLen - range[range.length - 2]; ByteBuffer bf = new ByteBuffer(); bf.append('['); for (int k = 0; k < range.length; ++k) bf.append(range[k]).append(' '); bf.append(']'); System.arraycopy(bf.getBuffer(), 0, bout, byteRangePosition, bf.size()); } else { try { raf = new RandomAccessFile(tempFile, "rw"); int boutLen = (int)raf.length(); range[range.length - 1] = boutLen - range[range.length - 2]; ByteBuffer bf = new ByteBuffer(); bf.append('['); for (int k = 0; k < range.length; ++k) bf.append(range[k]).append(' '); bf.append(']'); raf.seek(byteRangePosition); raf.write(bf.getBuffer(), 0, bf.size()); } catch (IOException e) { try{raf.close();}catch(Exception ee){} try{tempFile.delete();}catch(Exception ee){} throw e; } } }
python
def save_and_close_attributes(self): ''' Performs the same function as save_attributes but also closes the attribute file. ''' if not self.saveable(): raise AttributeError("Cannot save attribute file without a valid file") if not self._db_closed: self._db_closed = True if not self._read_only: self.save_attributes() self._fd.close()
python
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the VeSync switch platform.""" if discovery_info is None: return switches = [] manager = hass.data[DOMAIN]['manager'] if manager.outlets is not None and manager.outlets: if len(manager.outlets) == 1: count_string = 'switch' else: count_string = 'switches' _LOGGER.info("Discovered %d VeSync %s", len(manager.outlets), count_string) if len(manager.outlets) > 1: for switch in manager.outlets: switch._energy_update_interval = ENERGY_UPDATE_INT switches.append(VeSyncSwitchHA(switch)) _LOGGER.info("Added a VeSync switch named '%s'", switch.device_name) else: switches.append(VeSyncSwitchHA(manager.outlets)) else: _LOGGER.info("No VeSync switches found") add_entities(switches)
java
protected void fixPropertyValues(PropertyData prop) throws RepositoryException { final List<ValueData> vals = prop.getValues(); for (int i = 0; i < vals.size(); i++) { ValueData vd = vals.get(i); if (!vd.isByteArray()) { // check if file is correct FilePersistedValueData fpvd = (FilePersistedValueData)vd; if (fpvd.getFile() == null) { if (fpvd instanceof StreamPersistedValueData && ((StreamPersistedValueData)fpvd).getUrl() != null) continue; // error, value not found throw new RepositoryException("Value cannot be found in storage for cached Property " + prop.getQPath().getAsString() + ", orderNumb:" + vd.getOrderNumber() + ", pversion:" + prop.getPersistedVersion()); } } } }
java
public static int executeUpdate(PreparedStatement ps, Object... params) throws SQLException { StatementUtil.fillParams(ps, params); return ps.executeUpdate(); }
python
def _connect_pipeline(self, pipeline, required_outputs, workflow, subject_inds, visit_inds, filter_array, force=False): """ Connects a pipeline to a overarching workflow that sets up iterators over subjects|visits present in the repository (if required) and repository source and sink nodes Parameters ---------- pipeline : Pipeline The pipeline to connect required_outputs : set[str] | None The outputs required to be produced by this pipeline. If None all are deemed to be required workflow : nipype.pipeline.engine.Workflow The overarching workflow to connect the pipeline to subject_inds : dct[str, int] A mapping of subject ID to row index in the filter array visit_inds : dct[str, int] A mapping of visit ID to column index in the filter array filter_array : 2-D numpy.array[bool] A two-dimensional boolean array, where rows correspond to subjects and columns correspond to visits in the repository. True values represent a combination of subject & visit ID to include in the current round of processing. Note that if the 'force' flag is not set, sessions won't be reprocessed unless the save provenance doesn't match that of the given pipeline. force : bool | 'all' A flag to force the processing of all sessions in the filter array, regardless of whether the parameters|pipeline used to generate existing data matches the given pipeline """ if self.reprocess == 'force': force = True # Close-off construction of the pipeline and created, input and output # nodes and provenance dictionary pipeline.cap() # Prepend prerequisite pipelines to complete workflow if they need # to be (re)processed final_nodes = [] # The array that represents the subject/visit pairs for which any # prerequisite pipeline will be (re)processed, and which therefore # needs to be included in the processing of the current pipeline. Row # indices correspond to subjects and column indices visits prqs_to_process_array = np.zeros((len(subject_inds), len(visit_inds)), dtype=bool) # The array that represents the subject/visit pairs for which any # prerequisite pipeline will be skipped due to missing inputs. Row # indices correspond to subjects and column indices visits prqs_to_skip_array = np.zeros((len(subject_inds), len(visit_inds)), dtype=bool) for getter_name in pipeline.prerequisites: prereq = pipeline.study.pipeline(getter_name) if prereq.to_process_array.any(): final_nodes.append(prereq.node('final')) prqs_to_process_array |= prereq.to_process_array prqs_to_skip_array |= prereq.to_skip_array # Get list of sessions that need to be processed (i.e. if # they don't contain the outputs of this pipeline) to_process_array, to_protect_array, to_skip_array = self._to_process( pipeline, required_outputs, prqs_to_process_array, prqs_to_skip_array, filter_array, subject_inds, visit_inds, force) # Store the arrays signifying which nodes to process, protect or skip # so they can be passed to downstream pipelines pipeline.to_process_array = to_process_array pipeline.to_protect_array = to_protect_array pipeline.to_skip_array = to_skip_array # Check to see if there are any sessions to process if not to_process_array.any(): raise ArcanaNoRunRequiredException( "No sessions to process for '{}' pipeline" .format(pipeline.name)) # Set up workflow to run the pipeline, loading and saving from the # repository workflow.add_nodes([pipeline._workflow]) # If prerequisite pipelines need to be processed, connect their # "final" nodes to the initial node of this pipeline to ensure that # they are all processed before this pipeline is run. if final_nodes: prereqs = pipeline.add('prereqs', Merge(len(final_nodes))) for i, final_node in enumerate(final_nodes, start=1): workflow.connect(final_node, 'out', prereqs, 'in{}'.format(i)) else: prereqs = None # Construct iterator structure over subjects and sessions to be # processed iter_nodes = self._iterate(pipeline, to_process_array, subject_inds, visit_inds) sources = {} # Loop through each frequency present in the pipeline inputs and # create a corresponding source node for freq in pipeline.input_frequencies: try: inputs = list(pipeline.frequency_inputs(freq)) except ArcanaMissingDataException as e: raise ArcanaMissingDataException( str(e) + ", which is required for pipeline '{}'".format( pipeline.name)) inputnode = pipeline.inputnode(freq) sources[freq] = source = pipeline.add( '{}_source'.format(freq), RepositorySource( i.collection for i in inputs), inputs=({'prereqs': (prereqs, 'out')} if prereqs is not None else {})) # Connect iter_nodes to source and input nodes for iterator in pipeline.iterators(freq): pipeline.connect(iter_nodes[iterator], iterator, source, iterator) pipeline.connect(source, iterator, inputnode, iterator) for input in inputs: # @ReservedAssignment pipeline.connect(source, input.suffixed_name, inputnode, input.name) deiter_nodes = {} def deiter_node_sort_key(it): """ If there are two iter_nodes (i.e. both subject and visit ID) and one depends on the other (i.e. if the visit IDs per subject vary and vice-versa) we need to ensure that the dependent iterator is deiterated (joined) first. """ return iter_nodes[it].itersource is None # Connect all outputs to the repository sink, creating a new sink for # each frequency level (i.e 'per_session', 'per_subject', 'per_visit', # or 'per_study') for freq in pipeline.output_frequencies: outputs = list(pipeline.frequency_outputs(freq)) if pipeline.iterators(freq) - pipeline.iterators(): raise ArcanaDesignError( "Doesn't make sense to output '{}', which are of '{}' " "frequency, when the pipeline only iterates over '{}'" .format("', '".join(o.name for o in outputs), freq, "', '".join(pipeline.iterators()))) outputnode = pipeline.outputnode(freq) # Connect filesets/fields to sink to sink node, skipping outputs # that are study inputs to_connect = {o.suffixed_name: (outputnode, o.name) for o in outputs if o.is_spec} # Connect iterators to sink node to_connect.update( {i: (iter_nodes[i], i) for i in pipeline.iterators()}) # Connect checksums/values from sources to sink node in order to # save in provenance, joining where necessary for input_freq in pipeline.input_frequencies: checksums_to_connect = [ i.checksum_suffixed_name for i in pipeline.frequency_inputs(input_freq)] if not checksums_to_connect: # Rare case of a pipeline with no inputs only iter_nodes # that will only occur in unittests in all likelihood continue # Loop over iterators that need to be joined, i.e. that are # present in the input frequency but not the output frequency # and create join nodes source = sources[input_freq] for iterator in (pipeline.iterators(input_freq) - pipeline.iterators(freq)): join = pipeline.add( '{}_to_{}_{}_checksum_join'.format( input_freq, freq, iterator), IdentityInterface( checksums_to_connect), inputs={ tc: (source, tc) for tc in checksums_to_connect}, joinsource=iterator, joinfield=checksums_to_connect) source = join to_connect.update( {tc: (source, tc) for tc in checksums_to_connect}) # Add sink node sink = pipeline.add( '{}_sink'.format(freq), RepositorySink( (o.collection for o in outputs), pipeline), inputs=to_connect) # "De-iterate" (join) over iterators to get back to single child # node by the time we connect to the final node of the pipeline Set # the sink and subject_id as the default deiterator if there are no # deiterates (i.e. per_study) or to use as the upstream node to # connect the first deiterator for every frequency deiter_nodes[freq] = sink # for per_study the "deiterator" == sink for iterator in sorted(pipeline.iterators(freq), key=deiter_node_sort_key): # Connect to previous deiterator or sink # NB: we only need to keep a reference to the last one in the # chain in order to connect with the "final" node, so we can # overwrite the entry in the 'deiter_nodes' dict deiter_nodes[freq] = pipeline.add( '{}_{}_deiter'.format(freq, iterator), IdentityInterface( ['checksums']), inputs={ 'checksums': (deiter_nodes[freq], 'checksums')}, joinsource=iterator, joinfield='checksums') # Create a final node, which is used to connect with downstream # pipelines pipeline.add( 'final', Merge( len(deiter_nodes)), inputs={ 'in{}'.format(i): (di, 'checksums') for i, di in enumerate(deiter_nodes.values(), start=1)})
java
public byte[][] keypair() { byte[][] pair = new byte[2][]; byte[] publicKey = new byte[Size.PUBLICKEY.bytes()]; byte[] secretKey = new byte[Size.SECRETKEY.bytes()]; int rc = curve25519xsalsa20poly1305.crypto_box_keypair(publicKey, secretKey); assert (rc == 0); pair[0] = publicKey; pair[1] = secretKey; return pair; }
python
def _get_supported_for_any_abi(version=None, platform=None, impl=None, force_manylinux=False): """Generates supported tags for unspecified ABI types to support more intuitive cross-platform resolution.""" unique_tags = { tag for abi in _gen_all_abis(impl, version) for tag in _get_supported(version=version, platform=platform, impl=impl, abi=abi, force_manylinux=force_manylinux) } return list(unique_tags)
python
def get_surface_equilibrium(self, slab_entries, delu_dict=None): """ Takes in a list of SlabEntries and calculates the chemical potentials at which all slabs in the list coexists simultaneously. Useful for building surface phase diagrams. Note that to solve for x equations (x slab_entries), there must be x free variables (chemical potentials). Adjust delu_dict as need be to get the correct number of free variables. Args: slab_entries (array): The coefficients of the first equation delu_dict (Dict): Dictionary of the chemical potentials to be set as constant. Note the key should be a sympy Symbol object of the format: Symbol("delu_el") where el is the name of the element. Returns: (array): Array containing a solution to x equations with x variables (x-1 chemical potential and 1 surface energy) """ # Generate all possible coefficients all_parameters = [] all_eqns = [] for slab_entry in slab_entries: se = self.surfe_dict[slab_entry] # remove the free chempots we wish to keep constant and # set the equation to 0 (subtract gamma from both sides) if type(se).__name__ == "float": all_eqns.append(se - Symbol("gamma")) else: se = sub_chempots(se, delu_dict) if delu_dict else se all_eqns.append(se - Symbol("gamma")) all_parameters.extend([p for p in list(se.free_symbols) if p not in all_parameters]) all_parameters.append(Symbol("gamma")) # Now solve the system of linear eqns to find the chempot # where the slabs are at equilibrium with each other soln = linsolve(all_eqns, all_parameters) if not soln: warnings.warn("No solution") return soln return {p: list(soln)[0][i] for i, p in enumerate(all_parameters)}
python
def vlos(self,*args,**kwargs): """ NAME: vlos PURPOSE: return the line-of-sight velocity (in km/s) INPUT: t - (optional) time at which to get vlos (can be Quantity) obs=[X,Y,Z,vx,vy,vz] - (optional) position and velocity of observer in the Galactocentric frame (in kpc and km/s) (default=[8.0,0.,0.,0.,220.,0.]; entries can be Quantity) OR Orbit object that corresponds to the orbit of the observer Y is ignored and always assumed to be zero ro= (Object-wide default) physical scale for distances to use to convert (can be Quantity) vo= (Object-wide default) physical scale for velocities to use to convert (can be Quantity) OUTPUT: vlos(t) in km/s HISTORY: 2011-02-24 - Written - Bovy (NYU) """ out= self._orb.vlos(*args,**kwargs) if len(out) == 1: return out[0] else: return out
java
protected void removeEditOverlays() { CmsInlineEditOverlay.removeAll(); m_editOverlay = null; if (m_resizeHandlerRegistration != null) { m_resizeHandlerRegistration.removeHandler(); m_resizeHandlerRegistration = null; } }
java
public Stream<Branch> getBranchesStream(Object projectIdOrPath) throws GitLabApiException { return (getBranches(projectIdOrPath, getDefaultPerPage()).stream()); }
python
def send_message(self, message, sign=True): """Send the given message to the connection. @type message: OmapiMessage @param sign: whether the message needs to be signed @raises OmapiError: @raises socket.error: """ if sign: message.sign(self.authenticators[self.defauth]) logger.debug("sending %s", LazyStr(message.dump_oneline)) self.transport.write(message.as_string())
java
protected static Boolean isRooted() { if (android.os.Build.TAGS != null && android.os.Build.TAGS.contains("test-keys")) { return true; } String[] probableRootPaths = { "/data/local/bin/su", "/data/local/su", "/data/local/xbin/su", "/sbin/su", "/su/bin", "/su/bin/su", "/system/app/SuperSU", "/system/app/SuperSU.apk", "/system/app/Superuser", "/system/app/Superuser.apk", "/system/bin/failsafe/su", "/system/bin/su", "/system/sd/xbin/su", "/system/xbin/daemonsu", "/system/xbin/su" }; for (String probableRootPath : probableRootPaths) { try { if (new File(probableRootPath).exists()) { return true; } } catch (Exception e) { Log.e(TAG, "Exception while attempting to detect whether the device is rooted", e); } } return false; }
java
@Override public String[] getCacheStatistics(String[] names) throws javax.management.AttributeNotFoundException { if (names == null) return null; DCache cache = null; if (ServerCache.servletCacheEnabled) { cache = ServerCache.cache; } String stats[] = null; if (cache != null) { Map statistics = getCacheStatisticsMap(cache.getCacheStatistics()); stats = new String[names.length]; for (int i = 0; i < names.length; i++) { if (!statistics.containsKey(names[i])) { Tr.error(tc, "DYNA1052E", new Object[] { names[i] }); throw new AttributeNotFoundException(names[i] + " is not a valid cache statistic name"); } stats[i] = names[i] + "=" + statistics.get(names[i]); } } else { stats = new String[0]; } return stats; }
java
@Override public void write(final File filename) throws DITAOTException { assert filename.isAbsolute(); setCurrentFile(filename.toURI()); super.write(filename); }
java
private boolean addStatement(StringBuffer statementBuffer) { if (statementBuffer.length() > 0) { elements.add(new Predicate(statementBuffer.toString())); elementsArray = elements.toArray(); statementBuffer.delete(0, statementBuffer.length()); return true; } return false; }
python
def indentation(logical_line, previous_logical, indent_char, indent_level, previous_indent_level): """ Use 4 spaces per indentation level. For really old code that you don't want to mess up, you can continue to use 8-space tabs. """ if indent_char == ' ' and indent_level % 4: return 0, "E111 indentation is not a multiple of four" indent_expect = previous_logical.endswith(':') if indent_expect and indent_level <= previous_indent_level: return 0, "E112 expected an indented block" if indent_level > previous_indent_level and not indent_expect: return 0, "E113 unexpected indentation"
java
private CmsADESessionCache getSessionCache() { if (m_sessionCache == null) { m_sessionCache = CmsADESessionCache.getCache(getRequest(), getCmsObject()); } return m_sessionCache; }
python
async def _query( self, path, method="GET", *, params=None, data=None, headers=None, timeout=None, chunked=None ): """ Get the response object by performing the HTTP request. The caller is responsible to finalize the response object. """ url = self._canonicalize_url(path) if headers and "content-type" not in headers: headers["content-type"] = "application/json" try: response = await self.session.request( method, url, params=httpize(params), headers=headers, data=data, timeout=timeout, chunked=chunked, ) except asyncio.TimeoutError: raise if (response.status // 100) in [4, 5]: what = await response.read() content_type = response.headers.get("content-type", "") response.close() if content_type == "application/json": raise DockerError(response.status, json.loads(what.decode("utf8"))) else: raise DockerError(response.status, {"message": what.decode("utf8")}) return response
java
public static void runExample(AdManagerServices adManagerServices, AdManagerSession session) throws RemoteException { ContactServiceInterface contactService = adManagerServices.get(session, ContactServiceInterface.class); // Create a statement to select contacts. StatementBuilder statementBuilder = new StatementBuilder() .where("status = :status") .orderBy("id ASC") .limit(StatementBuilder.SUGGESTED_PAGE_LIMIT) .withBindVariableValue("status", ContactStatus.UNINVITED.toString()); // Retrieve a small amount of contacts at a time, paging through // until all contacts have been retrieved. int totalResultSetSize = 0; do { ContactPage page = contactService.getContactsByStatement(statementBuilder.toStatement()); if (page.getResults() != null) { // Print out some information for each contact. totalResultSetSize = page.getTotalResultSetSize(); int i = page.getStartIndex(); for (Contact contact : page.getResults()) { System.out.printf( "%d) Contact with ID %d and name '%s' was found.%n", i++, contact.getId(), contact.getName() ); } } statementBuilder.increaseOffsetBy(StatementBuilder.SUGGESTED_PAGE_LIMIT); } while (statementBuilder.getOffset() < totalResultSetSize); System.out.printf("Number of results found: %d%n", totalResultSetSize); }
java
public String[] getValues() { return value == null || value.isEmpty() ? new String[0] : len == 1 ? new String[] { value } : value.split("\\s+"); }
python
def constrain_horizon( r, strict=False, cust=None, years=0, quarters=0, months=0, days=0, weeks=0, year=None, month=None, day=None, ): """Constrain a Series/DataFrame to a specified lookback period. See the documentation for dateutil.relativedelta: dateutil.readthedocs.io/en/stable/relativedelta.html Parameters ---------- r : DataFrame or Series The target pandas object to constrain strict : bool, default False If True, raise Error if the implied start date on the horizon predates the actual start date of `r`. If False, just return `r` in this situation years, months, weeks, days : int, default 0 Relative information; specify as positive to subtract periods. Adding or subtracting a relativedelta with relative information performs the corresponding aritmetic operation on the original datetime value with the information in the relativedelta quarters : int, default 0 Similar to the other plural relative info periods above, but note that this param is custom here. (It is not a standard relativedelta param) year, month, day : int, default None Absolute information; specify as positive to subtract periods. Adding relativedelta with absolute information does not perform an aritmetic operation, but rather REPLACES the corresponding value in the original datetime with the value(s) in relativedelta """ textnum = { "zero": 0, "one": 1, "two": 2, "three": 3, "four": 4, "five": 5, "six": 6, "seven": 7, "eight": 8, "nine": 9, "ten": 10, "eleven": 11, "twelve": 12, "thirteen": 13, "fourteen": 14, "fifteen": 15, "sixteen": 16, "seventeen": 17, "eighteen": 18, "nineteen": 19, "twenty": 20, "twenty four": 24, "thirty six": 36, } relativedeltas = years, quarters, months, days, weeks, year, month, day if cust is not None and any(relativedeltas): raise ValueError( "Cannot specify competing (nonzero) values for both" " `cust` and other parameters." ) if cust is not None: cust = cust.lower() if cust.endswith("y"): years = int(re.search(r"\d+", cust).group(0)) elif cust.endswith("m"): months = int(re.search(r"\d+", cust).group(0)) elif cust.endswith(("years ago", "year ago", "year", "years")): pos = cust.find(" year") years = textnum[cust[:pos].replace("-", "")] elif cust.endswith(("months ago", "month ago", "month", "months")): pos = cust.find(" month") months = textnum[cust[:pos].replace("-", "")] else: raise ValueError("`cust` not recognized.") # Convert quarters to months & combine for MonthOffset months += quarters * 3 # Start date will be computed relative to `end` end = r.index[-1] # Establish some funky date conventions assumed in finance. If the end # date is 6/30, the date *3 months prior* is 3/31, not 3/30 as would be # produced by dateutil.relativedelta. if end.is_month_end and days == 0 and weeks == 0: if years != 0: years *= 12 months += years start = end - offsets.MonthBegin(months) else: start = end - offsets.DateOffset( years=years, months=months, days=days - 1, weeks=weeks, year=year, month=month, day=day, ) if strict and start < r.index[0]: raise ValueError( "`start` pre-dates first element of the Index, %s" % r.index[0] ) return r[start:end]
java
private List<Pair<Integer, Integer>> doGenerateEdgesWithOmitList() { final int numberOfNodes = getConfiguration().getNumberOfNodes(); final int numberOfEdges = getConfiguration().getNumberOfEdges(); final long maxEdges = numberOfNodes * (numberOfNodes - 1) / 2; final List<Pair<Integer, Integer>> edges = new LinkedList<>(); for (Long index : edgeIndices(numberOfEdges, maxEdges)) { edges.add(indexToEdgeBijection(index)); } return edges; }
python
def host_names(urls): ''' Takes a StringCounter of normalized URL and parses their hostnames N.B. this assumes that absolute URLs will begin with http:// in order to accurately resolve the host name. Relative URLs will not have host names. ''' host_names = StringCounter() for url in urls: host_names[urlparse(url).netloc] += urls[url] return host_names
java
@Override public List<CPDAvailabilityEstimate> findByCommerceAvailabilityEstimateId( long commerceAvailabilityEstimateId, int start, int end, OrderByComparator<CPDAvailabilityEstimate> orderByComparator) { return findByCommerceAvailabilityEstimateId(commerceAvailabilityEstimateId, start, end, orderByComparator, true); }
java
@Override public List<CommerceWarehouseItem> findByCPI_CPIU(long CProductId, String CPInstanceUuid, int start, int end, OrderByComparator<CommerceWarehouseItem> orderByComparator) { return findByCPI_CPIU(CProductId, CPInstanceUuid, start, end, orderByComparator, true); }
java
public void setInterval(ReadableInterval interval) { if (interval == null) { throw new IllegalArgumentException("Interval must not be null"); } long startMillis = interval.getStartMillis(); long endMillis = interval.getEndMillis(); Chronology chrono = interval.getChronology(); super.setInterval(startMillis, endMillis, chrono); }
java
private boolean isJawrImageTag(ComponentTag tag) { String tagName = tag.getName(); return (tagName.equalsIgnoreCase("img") || (tagName .equalsIgnoreCase("input") && tag.getAttribute("type").equals( "image"))); }
java
public void formatLine(String line, PrintWriter writer) { processingState.setLine(line); processingState.pwriter = writer; processingState = processingState.formatState.formatPartialLine(processingState); while (!processingState.isDoneWithLine()) { processingState = processingState.formatState.formatPartialLine(processingState); } }
java
@Override public SparkAppHandle startApplication(SparkAppHandle.Listener... listeners) throws IOException { if (builder.isClientMode(builder.getEffectiveConfig())) { LOG.warning("It's not recommended to run client-mode applications using InProcessLauncher."); } Method main = findSparkSubmit(); LauncherServer server = LauncherServer.getOrCreateServer(); InProcessAppHandle handle = new InProcessAppHandle(server); for (SparkAppHandle.Listener l : listeners) { handle.addListener(l); } String secret = server.registerHandle(handle); setConf(LauncherProtocol.CONF_LAUNCHER_PORT, String.valueOf(server.getPort())); setConf(LauncherProtocol.CONF_LAUNCHER_SECRET, secret); List<String> sparkArgs = builder.buildSparkSubmitArgs(); String[] argv = sparkArgs.toArray(new String[sparkArgs.size()]); String appName = CommandBuilderUtils.firstNonEmpty(builder.appName, builder.mainClass, "<unknown>"); handle.start(appName, main, argv); return handle; }
java
private void pushBasicProfile(JSONObject baseProfile) { try { String guid = getCleverTapID(); JSONObject profileEvent = new JSONObject(); if (baseProfile != null && baseProfile.length() > 0) { Iterator i = baseProfile.keys(); while (i.hasNext()) { String next = i.next().toString(); // need to handle command-based JSONObject props here now Object value = null; try { value = baseProfile.getJSONObject(next); } catch (Throwable t) { try { value = baseProfile.get(next); } catch (JSONException e) { //no-op } } if (value != null) { profileEvent.put(next, value); // cache the valid identifier: guid pairs if (Constants.PROFILE_IDENTIFIER_KEYS.contains(next)) { try { cacheGUIDForIdentifier(guid, next, value.toString()); } catch (Throwable t) { // no-op } } } } } try { String carrier = this.deviceInfo.getCarrier(); if (carrier != null && !carrier.equals("")) { profileEvent.put("Carrier", carrier); } String cc = this.deviceInfo.getCountryCode(); if (cc != null && !cc.equals("")) { profileEvent.put("cc", cc); } profileEvent.put("tz", TimeZone.getDefault().getID()); JSONObject event = new JSONObject(); event.put("profile", profileEvent); queueEvent(context, event, Constants.PROFILE_EVENT); } catch (JSONException e) { getConfigLogger().verbose(getAccountId(), "FATAL: Creating basic profile update event failed!"); } } catch (Throwable t) { getConfigLogger().verbose(getAccountId(), "Basic profile sync", t); } }
java
public JobInner create(String resourceGroupName, String accountName, String transformName, String jobName, JobInner parameters) { return createWithServiceResponseAsync(resourceGroupName, accountName, transformName, jobName, parameters).toBlocking().single().body(); }