language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
private List<List<EventData>> split(List<EventData> datas) { List<List<EventData>> result = new ArrayList<List<EventData>>(); if (datas == null || datas.size() == 0) { return result; } else { int[] bits = new int[datas.size()];// 初始化一个标记,用于标明对应的记录是否已分入某个batch for (int i = 0; i < bits.length; i++) { // 跳过已经被分入batch的 while (i < bits.length && bits[i] == 1) { i++; } if (i >= bits.length) { // 已处理完成,退出 break; } // 开始添加batch,最大只加入batchSize个数的对象 List<EventData> batch = new ArrayList<EventData>(); bits[i] = 1; batch.add(datas.get(i)); for (int j = i + 1; j < bits.length && batch.size() < batchSize; j++) { if (bits[j] == 0 && canBatch(datas.get(i), datas.get(j))) { batch.add(datas.get(j)); bits[j] = 1;// 修改为已加入 } } result.add(batch); } return result; } }
python
def apply(self, func, applyto='measurement', noneval=nan, setdata=False): """ Apply func either to self or to associated data. If data is not already parsed, try and read it. Parameters ---------- func : callable The function either accepts a measurement object or an FCS object. Does some calculation and returns the result. applyto : ['data' | 'measurement'] * 'data' : apply to associated data * 'measurement' : apply to measurement object itself. noneval : obj Value to return if `applyto` is 'data', but no data is available. setdata : bool Used only if data is not already set. If true parsed data will be assigned to self.data Otherwise data will be discarded at end of apply. """ applyto = applyto.lower() if applyto == 'data': if self.data is not None: data = self.data elif self.datafile is None: return noneval else: data = self.read_data() if setdata: self.data = data return func(data) elif applyto == 'measurement': return func(self) else: raise ValueError('Encountered unsupported value "%s" for applyto parameter.' % applyto)
java
public void add(TagLibTag libTag, Tag tag, FunctionLib[] flibs, SourceCode cfml) { tags.add(new TagData(libTag, tag, flibs, cfml)); }
java
public Object getGetterOrSetter(String name, int index, boolean isSetter) { if (name != null && index != 0) throw new IllegalArgumentException(name); Slot slot = slotMap.query(name, index); if (slot == null) return null; if (slot instanceof GetterSlot) { GetterSlot gslot = (GetterSlot)slot; Object result = isSetter ? gslot.setter : gslot.getter; return result != null ? result : Undefined.instance; } return Undefined.instance; }
python
def wait(self) -> None: """Wait for the process to finish""" if self._out_thread.is_alive(): self._out_thread.join() if self._err_thread.is_alive(): self._err_thread.join() # Handle case where the process ended before the last read could be done. # This will return None for the streams that weren't pipes. out, err = self._proc.communicate() if out: self._write_bytes(self._stdout, out) if err: self._write_bytes(self._stderr, err)
python
def get_arp_output_arp_entry_entry_type(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_arp = ET.Element("get_arp") config = get_arp output = ET.SubElement(get_arp, "output") arp_entry = ET.SubElement(output, "arp-entry") ip_address_key = ET.SubElement(arp_entry, "ip-address") ip_address_key.text = kwargs.pop('ip_address') entry_type = ET.SubElement(arp_entry, "entry-type") entry_type.text = kwargs.pop('entry_type') callback = kwargs.pop('callback', self._callback) return callback(config)
java
@SuppressWarnings("nls") public static SSLSessionStrategy buildMutual(TLSOptions optionsMap) throws KeyManagementException, NoSuchAlgorithmException, KeyStoreException, CertificateException, IOException, UnrecoverableKeyException { Args.notNull(optionsMap.getKeyStore(), "KeyStore"); Args.notEmpty(optionsMap.getKeyStore(), "KeyStore must not be empty"); String[] allowedProtocols = arrayDifference(optionsMap.getAllowedProtocols(), optionsMap.getDisallowedProtocols(), getDefaultProtocols()); String[] allowedCiphers = arrayDifference(optionsMap.getAllowedCiphers(), optionsMap.getDisallowedCiphers(), getDefaultCipherSuites()); return build(optionsMap.getTrustStore(), optionsMap.getTrustStorePassword(), optionsMap.getKeyStore(), optionsMap.getKeyStorePassword(), optionsMap.getKeyAliases(), optionsMap.getKeyPassword(), allowedProtocols, allowedCiphers, optionsMap.isAllowAnyHost(), optionsMap.isTrustSelfSigned()); }
python
def post(self, request): '''Create a token, given an email and password. Removes all other tokens for that user.''' serializer = CreateTokenSerializer(data=request.data) serializer.is_valid(raise_exception=True) email = serializer.validated_data.get('email') password = serializer.validated_data.get('password') user = authenticate(username=email, password=password) if not user: return Response(status=status.HTTP_401_UNAUTHORIZED) Token.objects.filter(user=user).delete() token = Token.objects.create(user=user) return Response( status=status.HTTP_201_CREATED, data={'token': token.key})
python
def run(): """Run cpp coverage.""" import json import os import sys from . import coverage, report args = coverage.create_args(sys.argv[1:]) if args.verbose: print('encodings: {}'.format(args.encodings)) yml = parse_yaml_config(args) if not args.repo_token: # try get token from yaml first args.repo_token = yml.get('repo_token', '') if not args.repo_token: # use environment COVERALLS_REPO_TOKEN as a fallback args.repo_token = os.environ.get('COVERALLS_REPO_TOKEN') args.service_name = yml.get('service_name', 'travis-ci') if not args.gcov_options: args.gcov_options = yml.get('gcov_options', '') if not args.root: args.root = yml.get('root', '.') if not args.build_root: args.build_root = yml.get('build_root', '') args.exclude.extend(yml.get('exclude', [])) args.include.extend(yml.get('include', [])) args.exclude_lines_pattern.extend(yml.get('exclude_lines_pattern', [])) args.service_job_id = os.environ.get('TRAVIS_JOB_ID', '') if args.repo_token == '' and args.service_job_id == '': raise ValueError("\nno coveralls.io token specified and no travis job id found\n" "see --help for examples on how to specify a token\n") if not args.no_gcov: coverage.run_gcov(args) cov_report = coverage.collect(args) if args.verbose: print(cov_report) if args.dryrun: return 0 if args.dump: args.dump.write(json.dumps(cov_report)) return 0 return report.post_report(cov_report, args)
java
public Observable<Page<SasTokenInfoInner>> listSasTokensAsync(final String resourceGroupName, final String accountName, final String storageAccountName, final String containerName) { return listSasTokensWithServiceResponseAsync(resourceGroupName, accountName, storageAccountName, containerName) .map(new Func1<ServiceResponse<Page<SasTokenInfoInner>>, Page<SasTokenInfoInner>>() { @Override public Page<SasTokenInfoInner> call(ServiceResponse<Page<SasTokenInfoInner>> response) { return response.body(); } }); }
python
def get_runner(worker_type, max_workers=None, workers_window=None): """returns a runner callable. :param str worker_type: one of `simple` or `thread`. :param int max_workers: max workers the runner can spawn in parallel. :param in workers_window: max number of jobs waiting to be done by the workers at any given time. :return: """ worker_func = _runners_mapping[worker_type] return partial( worker_func, max_workers=max_workers, workers_window=workers_window )
python
def min(self): """Return the minimum of ``self``. See Also -------- numpy.amin max """ results = [x.ufuncs.min() for x in self.elem] return np.min(results)
python
def hex_to_rgb(color): """ Converts from hex to rgb Parameters: ----------- color : string Color representation on hex or rgb Example: hex_to_rgb('#E1E5ED') hex_to_rgb('#f03') """ color = normalize(color) color = color[1:] # return 'rgb'+str(tuple(ord(c) for c in color.decode('hex'))) return 'rgb' + str((int(color[0:2], base=16), int(color[2:4], base=16), int(color[4:6], base=16)))
python
def read(self, addr, size): '''Read access. :param addr: i2c slave address :type addr: char :param size: size of transfer :type size: int :returns: data byte array :rtype: array.array('B') ''' self.set_addr(addr | 0x01) self.set_size(size) self.start() while not self.is_ready: pass return self.get_data(size)
java
public static Specification<JpaTarget> hasInstalledOrAssignedDistributionSet(@NotNull final Long distributionId) { return (targetRoot, query, cb) -> cb.or( cb.equal(targetRoot.get(JpaTarget_.installedDistributionSet).get(JpaDistributionSet_.id), distributionId), cb.equal(targetRoot.<JpaDistributionSet> get(JpaTarget_.assignedDistributionSet) .get(JpaDistributionSet_.id), distributionId)); }
python
def on_stop_scene(self, event: events.StopScene, signal: Callable[[Any], None]): """ Stop a running scene. If there's a scene on the stack, it resumes. """ self.stop_scene() if self.current_scene is not None: signal(events.SceneContinued()) else: signal(events.Quit())
python
def bokeh_tree(name, rawtext, text, lineno, inliner, options=None, content=None): ''' Link to a URL in the Bokeh GitHub tree, pointing to appropriate tags for releases, or to master otherwise. The link text is simply the URL path supplied, so typical usage might look like: .. code-block:: none All of the examples are located in the :bokeh-tree:`examples` subdirectory of your Bokeh checkout. Returns 2 part tuple containing list of nodes to insert into the document and a list of system messages. Both are allowed to be empty. ''' app = inliner.document.settings.env.app tag = app.env.config['version'] if '-' in tag: tag = 'master' url = "%s/tree/%s/%s" % (_BOKEH_GH, tag, text) options = options or {} set_classes(options) node = nodes.reference(rawtext, text, refuri=url, **options) return [node], []
python
def extract(fileobj, keywords, comment_tags, options): """Extracts translation messages from underscore template files. This method does also extract django templates. If a template does not contain any django translation tags we always fallback to underscore extraction. This is a plugin to Babel, written according to http://babel.pocoo.org/docs/messages/#writing-extraction-methods :param fileobj: the file-like object the messages should be extracted from :param keywords: a list of keywords (i.e. function names) that should be recognized as translation functions :param comment_tags: a list of translator tags to search for and include in the results :param options: a dictionary of additional options (optional) :return: an iterator over ``(lineno, funcname, message, comments)`` tuples :rtype: ``iterator`` """ encoding = options.get('encoding', 'utf-8') original_position = fileobj.tell() text = fileobj.read().decode(encoding) if django.VERSION[:2] >= (1, 9): tokens = Lexer(text).tokenize() else: tokens = Lexer(text, None).tokenize() vars = [token.token_type != TOKEN_TEXT for token in tokens] could_be_django = any(list(vars)) if could_be_django: fileobj.seek(original_position) iterator = extract_django(fileobj, keywords, comment_tags, options) for lineno, funcname, message, comments in iterator: yield lineno, funcname, message, comments else: # Underscore template extraction comments = [] fileobj.seek(original_position) for lineno, line in enumerate(fileobj, 1): funcname = None stream = TokenStream.from_tuple_iter(tokenize(line, underscore.rules)) while not stream.eof: if stream.current.type == 'gettext_begin': stream.expect('gettext_begin') funcname = stream.expect('func_name').value args, kwargs = parse_arguments(stream, 'gettext_end') strings = [] for arg, argtype in args: if argtype == 'func_string_arg': strings.append(force_text(arg)) else: strings.append(None) for arg in kwargs: strings.append(None) if len(strings) == 1: strings = strings[0] else: strings = tuple(strings) yield lineno, funcname, strings, [] stream.next()
python
def process_direct_map(self, root, state) -> List[Dict]: """Handles tags that are mapped directly from xml to IR with no additional processing other than recursive translation of any child nodes.""" val = {"tag": root.tag, "args": []} for node in root: val["args"] += self.parseTree(node, state) return [val]
java
public static int getPid(Process process) { if (!process.getClass().getName().equals("java.lang.UNIXProcess")) throw new UnsupportedOperationException("This operation is only supported in POSIX environments (Linux/Unix/MacOS"); if (pidField == null) { // benign race try { Field f = process.getClass().getDeclaredField("pid"); f.setAccessible(true); pidField = f; } catch (NoSuchFieldException e) { throw new AssertionError(e); } catch (SecurityException e) { throw new RuntimeException(e); } } try { return pidField.getInt(process); } catch (IllegalAccessException e) { throw new AssertionError(e); } }
java
public static void openDialogInWindow(final I_Callback callback, String windowCaption) { final Window window = CmsBasicDialog.prepareWindow(); window.setCaption(windowCaption); CmsSiteSelectDialog dialog = new CmsSiteSelectDialog(); window.setContent(dialog); dialog.setCallback(new I_Callback() { public void onCancel() { window.close(); callback.onCancel(); } public void onSiteSelect(String site) { window.close(); callback.onSiteSelect(site); } }); A_CmsUI.get().addWindow(window); }
python
def _serialize_value(self, value): """ Serialize values like date and time. Respect other values: :return: """ if isinstance(value, datetime.datetime): return value.strftime(self.datetime_format) elif isinstance(value, datetime.date): return value.strftime(self.date_format) elif isinstance(value, datetime.time): return value.strftime(self.time_format) return text_type(value)
python
def download(url, output_file=None, open_file=True, allow_overwrite=False): '''Download a file from URL. Args: url (str): URL. output_file (str, optional): If given, the downloaded file is written to the given path. open_file (bool): If True, it returns an opened file stream of the downloaded file. allow_overwrite (bool): If True, it overwrites an existing file. Returns: Returns file object if open_file is True, otherwise None. ''' filename = url.split('/')[-1] if output_file is None: cache = os.path.join(get_data_home(), filename) else: cache = output_file if os.path.exists(cache) and not allow_overwrite: logger.info("> {} already exists.".format(cache)) logger.info("> If you have any issue when using this file, ") logger.info("> manually remove the file and try download again.") else: r = request.urlopen(url) try: if six.PY2: content_length = int(r.info().dict['content-length']) elif six.PY3: content_length = int(r.info()['Content-Length']) except: content_length = 0 unit = 1000000 content = b'' with tqdm(total=content_length, desc=filename, unit='B', unit_scale=True, unit_divisor=1024) as t: while True: data = r.read(unit) l = len(data) t.update(l) if l == 0: break content += data with open(cache, 'wb') as f: f.write(content) if not open_file: return return open(cache, 'rb')
python
def square(left, top, length, filled=False, thickness=1): """Returns a generator that produces (x, y) tuples for a square. This function is an alias for the rectangle() function, with `length` passed for both the `width` and `height` parameters. The `left` and `top` arguments are the x and y coordinates for the topleft corner of the square. If `filled` is `True`, the interior points are also returned. NOTE: The `thickness` argument is not yet implemented. >>> list(square(0, 0, 5)) [(0, 0), (1, 0), (2, 0), (3, 0), (4, 0), (4, 1), (4, 2), (4, 3), (4, 4), (3, 4), (2, 4), (1, 4), (0, 4), (0, 3), (0, 2), (0, 1)] >>> drawPoints(square(0, 0, 5)) OOOOO O,,,O O,,,O O,,,O OOOOO >>> drawPoints(square(0, 0, 5, filled=True)) OOOOO OOOOO OOOOO OOOOO OOOOO """ if thickness != 1: raise NotImplementedError('The pybresenham module is under development and the filled, thickness, and endcap parameters are not implemented. You can contribute at https://github.com/asweigart/pybresenham') return rectangle(left, top, length, length, filled, thickness)
java
@Override public final Object retrieve(int handle) throws DataStoreException { if (SAFE_MODE) checkHandle(handle); // Compute total size int totalSize = computeSize(handle); // Retrieve all blocks byte[] data = new byte[totalSize]; int offset = 0; int current = handle; while (current != -1) { int blockLen = allocatedSize[current]; // Read block from map file readDataBlock(data,offset,blockLen,current); offset += blockLen; if ((flags[current] & FLAG_END_BLOCK) > 0) break; current = nextBlock[current]; } if (current == -1) throw new DataStoreException("Can't find end block for "+handle); return data; }
python
def show_idle_pc_prop(self): """ Dumps the idle PC proposals (previously generated). :returns: list of idle PC proposal """ is_running = yield from self.is_running() if not is_running: # router is not running raise DynamipsError('Router "{name}" is not running'.format(name=self._name)) proposals = yield from self._hypervisor.send('vm show_idle_pc_prop "{}" 0'.format(self._name)) return proposals
python
def list_devices(): """ List devices via HTTP GET. """ output = {} for device_id, device in devices.items(): output[device_id] = { 'host': device.host, 'state': device.state } return jsonify(devices=output)
python
def delete_port_postcommit(self, context): """Delete the port from CVX""" port = context.current log_context("delete_port_postcommit: port", port) self._delete_port_resources(port, context.host) self._try_to_release_dynamic_segment(context)
java
public void disablePresence(String privateKey, String channel, OnDisablePresence callback) throws OrtcNotConnectedException { if (!this.isConnected) { throw new OrtcNotConnectedException(); } else { String presenceUrl = this.isCluster ? this.clusterUrl : this.url; Ortc.disablePresence(presenceUrl, this.isCluster, this.applicationKey, privateKey, channel, this.proxy, callback); } }
java
public void findUnsafeOperatorsForDDL(UnsafeOperatorsForDDL ops) { if ( ! m_type.isSafeForDDL()) { ops.add(m_type.symbol()); } if (m_left != null) { m_left.findUnsafeOperatorsForDDL(ops); } if (m_right != null) { m_right.findUnsafeOperatorsForDDL(ops); } if (m_args != null) { for (AbstractExpression arg : m_args) { arg.findUnsafeOperatorsForDDL(ops); } } }
python
def load_map(map, src_file, output_dir, scale=1, cache_dir=None, datasources_cfg=None, user_styles=[], verbose=False): """ Apply a stylesheet source file to a given mapnik Map instance, like mapnik.load_map(). Parameters: map: Instance of mapnik.Map. src_file: Location of stylesheet .mml file. Can be relative path, absolute path, or fully-qualified URL of a remote stylesheet. output_dir: ... Keyword Parameters: scale: Optional scale value for output map, 2 doubles the size for high-res displays. cache_dir: ... datasources_cfg: ... user_styles: A optional list of files or URLs, that override styles defined in the map source. These are evaluated in order, with declarations from later styles overriding those from earlier styles. verbose: ... """ scheme, n, path, p, q, f = urlparse(src_file) if scheme in ('file', ''): assert exists(src_file), "We'd prefer an input file that exists to one that doesn't" if cache_dir is None: cache_dir = expanduser(CACHE_DIR) # only make the cache dir if it wasn't user-provided if not isdir(cache_dir): mkdir(cache_dir) chmod(cache_dir, 0755) dirs = Directories(output_dir, realpath(cache_dir), dirname(src_file)) compile(src_file, dirs, verbose, datasources_cfg=datasources_cfg, user_styles=user_styles, scale=scale).to_mapnik(map, dirs)
python
def _read_object(self, correlation_id, parameters): """ Reads configuration file, parameterizes its content and converts it into JSON object. :param correlation_id: (optional) transaction id to trace execution through call chain. :param parameters: values to parameters the configuration. :return: a JSON object with configuration. """ path = self.get_path() if path == None: raise ConfigException(correlation_id, "NO_PATH", "Missing config file path") if not os.path.isfile(path): raise FileException(correlation_id, 'FILE_NOT_FOUND', 'Config file was not found at ' + path) try: with open(path, 'r') as file: config = file.read() config = self._parameterize(config, parameters) return yaml.load(config) except Exception as ex: raise FileException( correlation_id, "READ_FAILED", "Failed reading configuration " + path + ": " + str(ex) ).with_details("path", path).with_cause(ex)
java
public void mapInPlace(Function<Double, Double> fn) { for (int i = 0; i < pointers.length; i++) { if (pointers[i] == null) continue; if (copyOnWrite[i]) { copyOnWrite[i] = false; pointers[i] = pointers[i].clone(); } if (sparse[i]) { for (int j = 0; j < pointers[i].length / 2; j++) { pointers[i][(j * 2) + 1] = fn.apply(pointers[i][(j * 2) + 1]); } } else { for (int j = 0; j < pointers[i].length; j++) { pointers[i][j] = fn.apply(pointers[i][j]); } } } }
java
public DiagnosticCategoryInner getSiteDiagnosticCategorySlot(String resourceGroupName, String siteName, String diagnosticCategory, String slot) { return getSiteDiagnosticCategorySlotWithServiceResponseAsync(resourceGroupName, siteName, diagnosticCategory, slot).toBlocking().single().body(); }
java
private static String toCanonicalName(String className) { className = StringUtils.deleteWhitespace(className); Validate.notNull(className, "className must not be null."); if (className.endsWith("[]")) { final StringBuilder classNameBuffer = new StringBuilder(); while (className.endsWith("[]")) { className = className.substring(0, className.length() - 2); classNameBuffer.append("["); } final String abbreviation = abbreviationMap.get(className); if (abbreviation != null) { classNameBuffer.append(abbreviation); } else { classNameBuffer.append("L").append(className).append(";"); } className = classNameBuffer.toString(); } return className; }
java
private Remote getRemoteAnnotation(Annotated annotated) { Remote remote = annotated.getAnnotation(Remote.class); if (remote == null) { remote = getMetaAnnotation(annotated, Remote.class); } return remote; }
java
static Matcher zeroOrMoreFalse(Matcher matcher) { if (matcher instanceof ZeroOrMoreMatcher) { ZeroOrMoreMatcher zm = matcher.as(); if (zm.repeated() instanceof FalseMatcher || zm.next() instanceof FalseMatcher) { return zm.next(); } } return matcher; }
java
@Private static StringBuilder read(Readable from) throws IOException { StringBuilder builder = new StringBuilder(); CharBuffer buf = CharBuffer.allocate(2048); for (; ; ) { int r = from.read(buf); if (r == -1) break; buf.flip(); builder.append(buf, 0, r); } return builder; }
java
private boolean executeGitHasUncommitted() throws MojoFailureException, CommandLineException { boolean uncommited = false; // 1 if there were differences and 0 means no differences // git diff --no-ext-diff --ignore-submodules --quiet --exit-code final CommandResult diffCommandResult = executeGitCommandExitCode( "diff", "--no-ext-diff", "--ignore-submodules", "--quiet", "--exit-code"); String error = null; if (diffCommandResult.getExitCode() == SUCCESS_EXIT_CODE) { // git diff-index --cached --quiet --ignore-submodules HEAD -- final CommandResult diffIndexCommandResult = executeGitCommandExitCode( "diff-index", "--cached", "--quiet", "--ignore-submodules", "HEAD", "--"); if (diffIndexCommandResult.getExitCode() != SUCCESS_EXIT_CODE) { error = diffIndexCommandResult.getError(); uncommited = true; } } else { error = diffCommandResult.getError(); uncommited = true; } if (StringUtils.isNotBlank(error)) { throw new MojoFailureException(error); } return uncommited; }
java
public void endElement(String namespaceURI, String localName, String qName) throws SAXException { if (localName.equalsIgnoreCase(TABLE)) { if (startTable) if (m_record.getEditMode() == DBConstants.EDIT_ADD) { try { m_record.add(); } catch (DBException e) { e.printStackTrace(); } } startTable = false; } }
python
def create_connection(dest_pair, proxy_type=None, proxy_addr=None, proxy_port=None, proxy_rdns=True, proxy_username=None, proxy_password=None, timeout=None, source_address=None, socket_options=None): """create_connection(dest_pair, *[, timeout], **proxy_args) -> socket object Like socket.create_connection(), but connects to proxy before returning the socket object. dest_pair - 2-tuple of (IP/hostname, port). **proxy_args - Same args passed to socksocket.set_proxy() if present. timeout - Optional socket timeout value, in seconds. source_address - tuple (host, port) for the socket to bind to as its source address before connecting (only for compatibility) """ # Remove IPv6 brackets on the remote address and proxy address. remote_host, remote_port = dest_pair if remote_host.startswith('['): remote_host = remote_host.strip('[]') if proxy_addr and proxy_addr.startswith('['): proxy_addr = proxy_addr.strip('[]') err = None # Allow the SOCKS proxy to be on IPv4 or IPv6 addresses. for r in socket.getaddrinfo(proxy_addr, proxy_port, 0, socket.SOCK_STREAM): family, socket_type, proto, canonname, sa = r sock = None try: sock = socksocket(family, socket_type, proto) if socket_options: for opt in socket_options: sock.setsockopt(*opt) if isinstance(timeout, (int, float)): sock.settimeout(timeout) if proxy_type: sock.set_proxy(proxy_type, proxy_addr, proxy_port, proxy_rdns, proxy_username, proxy_password) if source_address: sock.bind(source_address) sock.connect((remote_host, remote_port)) return sock except (socket.error, ProxyConnectionError) as e: err = e if sock: sock.close() sock = None if err: raise err raise socket.error("gai returned empty list.")
python
def get_user(self, username): """Retrieve information about a user Returns: dict: User information None: If no user or failure occurred """ response = self._get(self.rest_url + "/user", params={"username": username, "expand": "attributes"}) if not response.ok: return None return response.json()
java
public Observable<ServiceResponse<BackupLongTermRetentionVaultInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String serverName, String recoveryServicesVaultResourceId) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (serverName == null) { throw new IllegalArgumentException("Parameter serverName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } if (recoveryServicesVaultResourceId == null) { throw new IllegalArgumentException("Parameter recoveryServicesVaultResourceId is required and cannot be null."); } final String backupLongTermRetentionVaultName = "RegisteredVault"; BackupLongTermRetentionVaultInner parameters = new BackupLongTermRetentionVaultInner(); parameters.withRecoveryServicesVaultResourceId(recoveryServicesVaultResourceId); Observable<Response<ResponseBody>> observable = service.createOrUpdate(this.client.subscriptionId(), resourceGroupName, serverName, backupLongTermRetentionVaultName, this.client.apiVersion(), this.client.acceptLanguage(), parameters, this.client.userAgent()); return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<BackupLongTermRetentionVaultInner>() { }.getType()); }
java
public static void join(SAXSymbol left, SAXSymbol right) { // System.out.println(" performing the join of " + getPayload(left) + " and " // + getPayload(right)); // check for an OLD digram existence - i.e. left must have a next symbol // if .n exists then we are joining TERMINAL symbols within the string, and must clean-up the // old digram if (left.n != null) { // System.out.println(" " + getPayload(left) // + " use to be in the digram table, cleaning up"); left.deleteDigram(); } // re-link left and right left.n = right; right.p = left; }
java
protected Long performCommitLogic(EDBCommit commit) throws EDBException { if (!(commit instanceof JPACommit)) { throw new EDBException("The given commit type is not supported."); } if (commit.isCommitted()) { throw new EDBException("EDBCommit is already commitet."); } if (revisionCheckEnabled && commit.getParentRevisionNumber() != null && !commit.getParentRevisionNumber().equals(getCurrentRevisionNumber())) { throw new EDBException("EDBCommit do not have the correct head revision number."); } runBeginCommitHooks(commit); EDBException exception = runPreCommitHooks(commit); if (exception != null) { return runErrorHooks(commit, exception); } Long timestamp = performCommit((JPACommit) commit); runEDBPostHooks(commit); return timestamp; }
java
private long[] refine(long[] invariants, boolean[] hydrogens) { int ord = g.length; InvariantRanker ranker = new InvariantRanker(ord); // current/next vertices, these only hold the vertices which are // equivalent int[] currVs = new int[ord]; int[] nextVs = new int[ord]; // fill with identity (also set number of non-unique) int nnu = ord; for (int i = 0; i < ord; i++) currVs[i] = i; long[] prev = invariants; long[] curr = Arrays.copyOf(invariants, ord); // initially all labels are 1, the input invariants are then used to // refine this coarse partition Arrays.fill(prev, 1L); // number of ranks int n = 0, m = 0; // storage of symmetry classes long[] symmetry = null; while (n < ord) { // refine the initial invariants using product of primes from // adjacent ranks while ((n = ranker.rank(currVs, nextVs, nnu, curr, prev)) > m && n < ord) { nnu = 0; for (int i = 0; i < ord && nextVs[i] >= 0; i++) { int v = nextVs[i]; currVs[nnu++] = v; curr[v] = hydrogens[v] ? prev[v] : primeProduct(g[v], prev, hydrogens); } m = n; } if (symmetry == null) { // After symmetry classes have been found without hydrogens we add // back in the hydrogens and assign ranks. We don't refine the // partition until the next time round the while loop to avoid // artificially splitting due to hydrogen representation, for example // the two hydrogens are equivalent in this SMILES for ethane '[H]CC' for (int i = 0; i < g.length; i++) { if (hydrogens[i]) { curr[i] = prev[g[i][0]]; hydrogens[i] = false; } } n = ranker.rank(currVs, nextVs, nnu, curr, prev); symmetry = Arrays.copyOf(prev, ord); // Update the buffer of non-unique vertices as hydrogens next // to discrete heavy atoms are also discrete (and removed from // 'nextVs' during ranking. nnu = 0; for (int i = 0; i < ord && nextVs[i] >= 0; i++) { currVs[nnu++] = nextVs[i]; } } // partition is discrete or only symmetry classes are needed if (symOnly || n == ord) return symmetry; // artificially split the lowest cell, we perturb the value // of all vertices with equivalent rank to the lowest non-unique // vertex int lo = nextVs[0]; for (int i = 1; i < ord && nextVs[i] >= 0 && prev[nextVs[i]] == prev[lo]; i++) prev[nextVs[i]]++; // could also swap but this is cleaner System.arraycopy(nextVs, 0, currVs, 0, nnu); } return symmetry; }
python
def _get_wmi_properties(self, instance_key, metrics, tag_queries): """ Create and cache a (metric name, metric type) by WMI property map and a property list. """ if instance_key not in self.wmi_props: metric_name_by_property = dict( (wmi_property.lower(), (metric_name, metric_type)) for wmi_property, metric_name, metric_type in metrics ) properties = map(lambda x: x[0], metrics + tag_queries) self.wmi_props[instance_key] = (metric_name_by_property, properties) return self.wmi_props[instance_key]
java
public void writeXml(String path, XMLOutputter doc) throws IOException { doc.startTag(RemoteException.class.getSimpleName()); doc.attribute("path", path); doc.attribute("class", getClassName()); String msg = getLocalizedMessage(); int i = msg.indexOf("\n"); if (i >= 0) { msg = msg.substring(0, i); } doc.attribute("message", msg.substring(msg.indexOf(":") + 1).trim()); doc.endTag(); }
java
public static JSONArray monitorForHA() { Map<String, Cache> CACHE = Redis.unmodifiableCache(); JSONArray monitors = new JSONArray(); if (CACHE == null || CACHE.isEmpty()) return monitors; JSONObject monitor = new JSONObject(); monitor.put("application", EnvUtil.getApplication()); monitor.put("nodeId", LocalNodeManager.LOCAL_NODE_ID); for (Map.Entry<String, Cache> entry : CACHE.entrySet()) { Cache cache = entry.getValue(); if (cache == null) continue; try { monitor.put("instance", cache.getName()); monitors.add(new JSONObject() {{ putAll(monitor); put("value", cache.highAvailable() ? 1 : -1); put("name", "HA"); }}); } catch (Exception e) { LOG.error(String.format("Jedis Pool: %s (Grafana) monitor 出现异常", entry.getKey()), e); } } return monitors; }
python
def determine_context(device_ids: List[int], use_cpu: bool, disable_device_locking: bool, lock_dir: str, exit_stack: ExitStack) -> List[mx.Context]: """ Determine the MXNet context to run on (CPU or GPU). :param device_ids: List of device as defined from the CLI. :param use_cpu: Whether to use the CPU instead of GPU(s). :param disable_device_locking: Disable Sockeye's device locking feature. :param lock_dir: Directory to place device lock files in. :param exit_stack: An ExitStack from contextlib. :return: A list with the context(s) to run on. """ if use_cpu: context = [mx.cpu()] else: num_gpus = get_num_gpus() check_condition(num_gpus >= 1, "No GPUs found, consider running on the CPU with --use-cpu ") if disable_device_locking: context = expand_requested_device_ids(device_ids) else: context = exit_stack.enter_context(acquire_gpus(device_ids, lock_dir=lock_dir)) context = [mx.gpu(gpu_id) for gpu_id in context] return context
java
private static Object generateKey(final Object[] args) { if (args == null) return Collections.emptyList(); Object[] copyOfArgs = copyOf(args, args.length); return asList(copyOfArgs); }
python
def get_ancestors_through_subont(self, go_term, relations): """ Returns the ancestors from the relation filtered GO subontology of go_term's ancestors. subontology() primarily used here for speed when specifying relations to traverse. Point of this is to first get a smaller graph (all ancestors of go_term regardless of relation) and then filter relations on that instead of the whole GO. """ all_ancestors = self.ontology.ancestors(go_term, reflexive=True) subont = self.ontology.subontology(all_ancestors) return subont.ancestors(go_term, relations)
java
public void stop( BundleContext bundleContext ) throws Exception { LOG.debug( "Unbinding " + RemoteBundleContext.class.getSimpleName() ); m_registry.unbind( RemoteBundleContext.class.getName() ); UnicastRemoteObject.unexportObject( m_remoteBundleContext, true ); m_registry = null; m_remoteBundleContext = null; LOG.info( "Remote Bundle Context stopped" ); }
java
public double sim(List<String> sentence, int index) { double score = 0; for (String word : sentence) { if (!f[index].containsKey(word)) continue; int d = docs.get(index).size(); Integer tf = f[index].get(word); score += (idf.get(word) * tf * (k1 + 1) / (tf + k1 * (1 - b + b * d / avgdl))); } return score; }
java
public String getString( String key ) { verifyIsNull(); Object o = get( key ); if( o != null ){ return o.toString(); } throw new JSONException( "JSONObject[" + JSONUtils.quote( key ) + "] not found." ); }
python
def checkModelIndex( self, modelIndex ): """ Sets the current index as the checked index. :param modelIndex | <QModelIndex> """ self.checkablePopup().hide() if not self.isCheckable(): return self.setCheckedIndexes([modelIndex.row()])
python
def get_model_index(cls, model, default=True): ''' Returns the default model index for the given model, or the list of indices if default is False. :param model: model name as a string. :raise KeyError: If the provided model does not have any index associated. ''' try: if default: return cls._model_name_to_default_index[model] return cls._model_name_to_model_idx[model] except KeyError: raise KeyError('Could not find any model index defined for model {}.'.format(model))
java
public UnsignedTransaction createUnsignedTransaction(List<UnspentTransactionOutput> unspent, Address changeAddress, PublicKeyRing keyRing, NetworkParameters network) throws InsufficientFundsException { long fee = MIN_MINER_FEE; while (true) { UnsignedTransaction unsigned; try { unsigned = createUnsignedTransaction(unspent, changeAddress, fee, keyRing, network); } catch (InsufficientFundsException e) { // We did not even have enough funds to pay the minimum fee throw e; } int txSize = estimateTransacrionSize(unsigned); // fee is based on the size of the transaction, we have to pay for // every 1000 bytes long requiredFee = (1 + (txSize / 1000)) * MIN_MINER_FEE; if (fee >= requiredFee) { return unsigned; } // collect coins anew with an increased fee fee += MIN_MINER_FEE; } }
java
@Override public synchronized RecordAlert write(int scanId, int pluginId, String alert, int risk, int confidence, String description, String uri, String param, String attack, String otherInfo, String solution, String reference, String evidence, int cweId, int wascId, int historyId, int sourceHistoryId, int sourceId) throws DatabaseException { try { psInsert.setInt(1, scanId); psInsert.setInt(2, pluginId); psInsert.setString(3, alert); psInsert.setInt(4, risk); psInsert.setInt(5, confidence); psInsert.setString(6, description); psInsert.setString(7, uri); psInsert.setString(8, param); psInsert.setString(9, attack); psInsert.setString(10, otherInfo); psInsert.setString(11, solution); psInsert.setString(12, reference); psInsert.setString(13, evidence); psInsert.setInt(14, cweId); psInsert.setInt(15, wascId); psInsert.setInt(16, historyId); psInsert.setInt(17, sourceHistoryId); psInsert.setInt(18, sourceId); psInsert.executeUpdate(); int id; try (ResultSet rs = psGetIdLastInsert.executeQuery()) { rs.next(); id = rs.getInt(1); } return read(id); } catch (SQLException e) { throw new DatabaseException(e); } }
python
def confd_state_internal_cdb_client_subscription_twophase(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") confd_state = ET.SubElement(config, "confd-state", xmlns="http://tail-f.com/yang/confd-monitoring") internal = ET.SubElement(confd_state, "internal") cdb = ET.SubElement(internal, "cdb") client = ET.SubElement(cdb, "client") subscription = ET.SubElement(client, "subscription") twophase = ET.SubElement(subscription, "twophase") callback = kwargs.pop('callback', self._callback) return callback(config)
python
def dump_bulk(cls, parent=None, keep_ids=True): """Dumps a tree branch to a python data structure.""" cls = get_result_class(cls) # Because of fix_tree, this method assumes that the depth # and numchild properties in the nodes can be incorrect, # so no helper methods are used qset = cls._get_serializable_model().objects.all() if parent: qset = qset.filter(path__startswith=parent.path) ret, lnk = [], {} for pyobj in serializers.serialize('python', qset): # django's serializer stores the attributes in 'fields' fields = pyobj['fields'] path = fields['path'] depth = int(len(path) / cls.steplen) # this will be useless in load_bulk del fields['depth'] del fields['path'] del fields['numchild'] if 'id' in fields: # this happens immediately after a load_bulk del fields['id'] newobj = {'data': fields} if keep_ids: newobj['id'] = pyobj['pk'] if (not parent and depth == 1) or\ (parent and len(path) == len(parent.path)): ret.append(newobj) else: parentpath = cls._get_basepath(path, depth - 1) parentobj = lnk[parentpath] if 'children' not in parentobj: parentobj['children'] = [] parentobj['children'].append(newobj) lnk[path] = newobj return ret
java
final void updateDelayInMillisecondsFrom(HttpResponse httpPollResponse) { final Long parsedDelayInMilliseconds = delayInMillisecondsFrom(httpPollResponse); if (parsedDelayInMilliseconds != null) { delayInMilliseconds = parsedDelayInMilliseconds; } }
python
def _get_deltas(self, rake): """ Return the value of deltas (delta_R, delta_S, delta_V, delta_I), as defined in "Table 5: Model 1" pag 198 """ # All deltas = 0 for DowrickRhoades2005SSlab Model 3: Deep Region, # pag 198 delta_R, delta_S = 0, 0 delta_V, delta_I = 0, 0 return delta_R, delta_S, delta_V, delta_I
java
public void unmarshalling(Xdr xdr) throws RpcException { super.unmarshalling(xdr); unmarshallingAttributes(xdr); if (stateIsOk()) { _attributes = new NfsPosixAttributes(); _attributes.unmarshalling(xdr); } }
python
def get(self, value): """ Get an enumeration item for an enumeration value. :param unicode value: Enumeration value. :raise InvalidEnumItem: If ``value`` does not match any known enumeration value. :rtype: EnumItem """ _nothing = object() item = self._values.get(value, _nothing) if item is _nothing: raise InvalidEnumItem(value) return item
java
public byte[] getBytes() throws IOException { if(cachedData == null){ if(getReader().available() > 0) { cachedData = getReader().read(); } else{ cachedData = new byte[0]; } } return cachedData; }
java
public DoubleMatrix1D solve(DoubleMatrix1D b) { // if (b.size() != Math.max(m, n)) { // throw new // IllegalArgumentException("The size b must be equal to max(A.rows(), A.columns())."); // } if (!this.hasFullRank()) { log.error("Matrix is rank deficient: " + ArrayUtils.toString(this.A.toArray())); throw new IllegalArgumentException("Matrix is rank deficient"); } // with scaling, we must solve U.A.V.z = U.b, after that we have x = V.z if (this.rescaler != null) { // b = ALG.mult(this.U, b); b = ColtUtils.diagonalMatrixMult(this.U, b); } double[] bdata = b.toArray(); double[] x = new double[this.n]; System.arraycopy(bdata, 0, x, 0, bdata.length); if (m >= n) { double[] y = new double[S != null ? S.m2 : 1]; /* get workspace */ Dcs_ipvec.cs_ipvec(S.pinv, x, y, m); /* y(0:m-1) = b(p(0:m-1) */ for (int k = 0; k < n; k++) { /* apply Householder refl. to x */ Dcs_happly.cs_happly(N.L, k, N.B[k], y); } Dcs_usolve.cs_usolve(N.U, y); /* y = R\y */ Dcs_ipvec.cs_ipvec(S.q, y, x, n); /* x(q(0:n-1)) = y(0:n-1) */ } else { double[] y = new double[S != null ? S.m2 : 1]; /* get workspace */ Dcs_pvec.cs_pvec(S.q, x, y, m); /* y(q(0:m-1)) = b(0:m-1) */ Dcs_utsolve.cs_utsolve(N.U, y); /* y = R'\y */ for (int k = m - 1; k >= 0; k--) { /* apply Householder refl. to x */ Dcs_happly.cs_happly(N.L, k, N.B[k], y); } Dcs_pvec.cs_pvec(S.pinv, y, x, n); /* x(0:n-1) = y(p(0:n-1)) */ } // log.debug("x: " + ArrayUtils.toString(x)); // log.debug("b: " + ArrayUtils.toString(b.toArray())); //return new DenseDoubleMatrix1D(x); if (this.rescaler != null) { // return ALG.mult(this.U, x); return ColtUtils.diagonalMatrixMult(this.V, F1.make(x)); } else { return F1.make(x); } }
python
def prepare_call_state(self, calling_state, initial_state=None, preserve_registers=(), preserve_memory=()): """ This function prepares a state that is executing a call instruction. If given an initial_state, it copies over all of the critical registers to it from the calling_state. Otherwise, it prepares the calling_state for action. This is mostly used to create minimalistic for CFG generation. Some ABIs, such as MIPS PIE and x86 PIE, require certain information to be maintained in certain registers. For example, for PIE MIPS, this function transfer t9, gp, and ra to the new state. """ if isinstance(self.arch, ArchMIPS32): if initial_state is not None: initial_state = self.state_blank() mips_caller_saves = ('s0', 's1', 's2', 's3', 's4', 's5', 's6', 's7', 'gp', 'sp', 'bp', 'ra') preserve_registers = preserve_registers + mips_caller_saves + ('t9',) if initial_state is None: new_state = calling_state.copy() else: new_state = initial_state.copy() for reg in set(preserve_registers): new_state.registers.store(reg, calling_state.registers.load(reg)) for addr, val in set(preserve_memory): new_state.memory.store(addr, calling_state.memory.load(addr, val)) return new_state
java
JournalSegmentDescriptor copyTo(ByteBuffer buffer) { buffer.putInt(version); buffer.putLong(id); buffer.putLong(index); buffer.putInt(maxSegmentSize); buffer.putInt(maxEntries); buffer.putLong(updated); buffer.put(locked ? (byte) 1 : (byte) 0); return this; }
python
def show_portindex_interface_info_output_show_portindex_interface_portsgroup_rbridgeid(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") show_portindex_interface_info = ET.Element("show_portindex_interface_info") config = show_portindex_interface_info output = ET.SubElement(show_portindex_interface_info, "output") show_portindex_interface = ET.SubElement(output, "show-portindex-interface") portsgroup_rbridgeid = ET.SubElement(show_portindex_interface, "portsgroup-rbridgeid") portsgroup_rbridgeid.text = kwargs.pop('portsgroup_rbridgeid') callback = kwargs.pop('callback', self._callback) return callback(config)
python
def _downsample(self, how, **kwargs): """ Downsample the cython defined function. Parameters ---------- how : string / cython mapped function **kwargs : kw args passed to how function """ self._set_binner() how = self._is_cython_func(how) or how ax = self.ax obj = self._selected_obj if not len(ax): # reset to the new freq obj = obj.copy() obj.index.freq = self.freq return obj # do we have a regular frequency if ax.freq is not None or ax.inferred_freq is not None: if len(self.grouper.binlabels) > len(ax) and how is None: # let's do an asfreq return self.asfreq() # we are downsampling # we want to call the actual grouper method here result = obj.groupby( self.grouper, axis=self.axis).aggregate(how, **kwargs) result = self._apply_loffset(result) return self._wrap_result(result)
java
static long getFiveBytesLong(byte[] buffer, int offset) { return (buffer[offset] & 0xffL) << 32 | (buffer[offset + 1] & 0xffL) << 24 | (buffer[offset + 2] & 0xffL) << 16 | (buffer[offset + 3] & 0xffL) << 8 | (buffer[offset + 4] & 0xffL); }
python
def acquire(self): """Acquire the lock.""" self.lease = self.client.lease(self.ttl) base64_key = _encode(self.key) base64_value = _encode(self._uuid) txn = { 'compare': [{ 'key': base64_key, 'result': 'EQUAL', 'target': 'CREATE', 'create_revision': 0 }], 'success': [{ 'request_put': { 'key': base64_key, 'value': base64_value, 'lease': self.lease.id } }], 'failure': [{ 'request_range': { 'key': base64_key } }] } result = self.client.transaction(txn) if 'succeeded' in result: return result['succeeded'] return False
python
def filter_like(self, **filters): ''' Filter query using re.compile(). **Examples**: ``query.filter_like(Name="andi")`` ''' Query = {} for name, value in filters.items(): name = resolve_name(self.type, name) Query[name] = re_compile(value, IGNORECASE) self.filter(QueryExpression(Query)) return self
python
def get_ldap_groups(self): """Retrieve groups from LDAP server.""" if (not self.conf_LDAP_SYNC_GROUP): return (None, None) uri_groups_server, groups = self.ldap_search(self.conf_LDAP_SYNC_GROUP_FILTER, self.conf_LDAP_SYNC_GROUP_ATTRIBUTES.keys(), self.conf_LDAP_SYNC_GROUP_INCREMENTAL, self.conf_LDAP_SYNC_GROUP_FILTER_INCREMENTAL) logger.debug("Retrieved %d groups from %s LDAP server" % (len(groups), uri_groups_server)) return (uri_groups_server, groups)
python
def get_size(data, default_width=0, default_height=0): """ Get image size :param data: A buffer with image content :return: Tuple (width, height, filetype) """ height = default_height width = default_width filetype = None # Original version: # https://github.com/shibukawa/imagesize_py # # The MIT License (MIT) # # Copyright © 2016 Yoshiki Shibukawa # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. size = len(data) # handle GIFs if size >= 10 and data[:6] in (b'GIF87a', b'GIF89a'): # Check to see if content_type is correct try: width, height = struct.unpack("<hh", data[6:10]) filetype = "gif" except struct.error: raise ValueError("Invalid GIF file") # see png edition spec bytes are below chunk length then and finally the elif size >= 24 and data.startswith(b'\211PNG\r\n\032\n') and data[12:16] == b'IHDR': try: width, height = struct.unpack(">LL", data[16:24]) filetype = "png" except struct.error: raise ValueError("Invalid PNG file") # Maybe this is for an older PNG version. elif size >= 16 and data.startswith(b'\211PNG\r\n\032\n'): # Check to see if we have the right content type try: width, height = struct.unpack(">LL", data[8:16]) filetype = "png" except struct.error: raise ValueError("Invalid PNG file") # handle JPEGs elif size >= 2 and data.startswith(b'\377\330'): try: # Not very efficient to copy data to a buffer fhandle = io.BytesIO(data) size = 2 ftype = 0 while not 0xc0 <= ftype <= 0xcf: fhandle.seek(size, 1) byte = fhandle.read(1) while ord(byte) == 0xff: byte = fhandle.read(1) ftype = ord(byte) size = struct.unpack('>H', fhandle.read(2))[0] - 2 # We are at a SOFn block fhandle.seek(1, 1) # Skip `precision' byte. height, width = struct.unpack('>HH', fhandle.read(4)) filetype = "jpg" except struct.error: raise ValueError("Invalid JPEG file") # End of https://github.com/shibukawa/imagesize_py # handle SVG elif size >= 10 and data.startswith(b'<?xml'): filetype = "svg" fhandle = io.BytesIO(data) tree = ElementTree() try: tree.parse(fhandle) except ParseError: raise ValueError("Invalid SVG file") root = tree.getroot() try: width = _svg_convert_size(root.attrib.get("width", "0")) height = _svg_convert_size(root.attrib.get("height", "0")) except IndexError: raise ValueError("Invalid SVG file") return width, height, filetype
java
public static boolean isSymbol(final char ch) { return '(' == ch || ')' == ch || '[' == ch || ']' == ch || '{' == ch || '}' == ch || '+' == ch || '-' == ch || '*' == ch || '/' == ch || '%' == ch || '^' == ch || '=' == ch || '>' == ch || '<' == ch || '~' == ch || '!' == ch || '?' == ch || '&' == ch || '|' == ch || '.' == ch || ':' == ch || '#' == ch || ',' == ch || ';' == ch; }
python
def swo_start(self, baudrate): """! @brief Start receiving SWO data at the given baudrate.""" try: self._link.swo_configure(True, baudrate) self._link.swo_control(True) except DAPAccess.Error as exc: six.raise_from(self._convert_exception(exc), exc)
java
@Override public boolean satisfies(Match match, int... ind) { BioPAXElement ele0 = match.get(ind[0]); BioPAXElement ele1 = match.get(ind[1]); if (ele1 == null) return false; Set vals = pa.getValueFromBean(ele0); return vals.contains(ele1); }
python
def read_struct_file(struct_file,return_type=GeoStruct): """read an existing PEST-type structure file into a GeoStruct instance Parameters ---------- struct_file : (str) existing pest-type structure file return_type : (object) the instance type to return. Default is GeoStruct Returns ------- GeoStruct : list or GeoStruct Note ---- if only on structure is listed in struct_file, then return type is GeoStruct. Otherwise, return type is a list of GeoStruct Example ------- ``>>>import pyemu`` ``>>>gs = pyemu.utils.geostats.reads_struct_file("struct.dat")`` """ VARTYPE = {1:SphVario,2:ExpVario,3:GauVario,4:None} assert os.path.exists(struct_file) structures = [] variograms = [] with open(struct_file,'r') as f: while True: line = f.readline() if line == '': break line = line.strip().lower() if line.startswith("structure"): name = line.strip().split()[1] nugget,transform,variogram_info = _read_structure_attributes(f) s = return_type(nugget=nugget,transform=transform,name=name) s.variogram_info = variogram_info # not sure what is going on, but if I don't copy s here, # all the structures end up sharing all the variograms later structures.append(copy.deepcopy(s)) elif line.startswith("variogram"): name = line.strip().split()[1].lower() vartype,bearing,a,anisotropy = _read_variogram(f) if name in variogram_info: v = VARTYPE[vartype](variogram_info[name],a,anisotropy=anisotropy, bearing=bearing,name=name) variograms.append(v) for i,st in enumerate(structures): for vname in st.variogram_info: vfound = None for v in variograms: if v.name == vname: vfound = v break if vfound is None: raise Exception("variogram {0} not found for structure {1}".\ format(vname,s.name)) st.variograms.append(vfound) if len(structures) == 1: return structures[0] return structures
python
def version(self, path, postmap=None, **params): """ Return the taskforce version. Supports standard options. """ q = httpd.merge_query(path, postmap) ans = { 'taskforce': taskforce_version, 'python': '.'.join(str(x) for x in sys.version_info[:3]), } ans['platform'] = { 'system': platform.system(), } # Add in some extra details if this is a control path. # These might give away too many details on a public # path. # if self._httpd.allow_control: ans['platform']['platform'] = platform.platform() ans['platform']['release'] = platform.release() return self._format(ans, q)
java
private String makeHash(String request) throws CacheException { RequestCtx reqCtx = null; try { reqCtx = m_contextUtil.makeRequestCtx(request); } catch (MelcoeXacmlException pe) { throw new CacheException("Error converting request", pe); } byte[] hash = null; // ensure thread safety, don't want concurrent invocations of this method all modifying digest at once // (alternative is to construct a new digest for each( synchronized(digest) { digest.reset(); hashSubjectList(reqCtx.getSubjectsAsList(), digest); hashAttributeList(reqCtx.getResourceAsList(), digest); hashAttributeList(reqCtx.getActionAsList(), digest); hashAttributeList(reqCtx.getEnvironmentAttributesAsList(), digest); hash = digest.digest(); } return byte2hex(hash); }
python
def get_cfcompliant_units(units, prefix='', suffix=''): """ Get equivalent units that are compatible with the udunits2 library (thus CF-compliant). Parameters ---------- units : string A string representation of the units. prefix : string Will be added at the beginning of the returned string (must be a valid udunits2 expression). suffix : string Will be added at the end of the returned string (must be a valid udunits2 expression). Returns ------- A string representation of the conforming units. References ---------- The udunits2 package : http://www.unidata.ucar.edu/software/udunits/ Notes ----- This function only relies on the table stored in :attr:`UNITS_MAP_CTM2CF`. Therefore, the units string returned by this function is not certified to be compatible with udunits2. Examples -------- >>> get_cfcompliant_units('molec/cm2') 'count/cm2' >>> get_cfcompliant_units('v/v') '1' >>> get_cfcompliant_units('ppbC', prefix='3') '3ppb """ compliant_units = units for gcunits, udunits in UNITS_MAP_CTM2CF: compliant_units = str.replace(compliant_units, gcunits, udunits) return prefix + compliant_units + suffix
java
public double[] getColumn(int column) { checkIndices(0, column); rowReadLock.lock(); double[] values = new double[rows.get()]; for (int row = 0; row < rows.get(); ++row) values[row] = get(row, column); rowReadLock.unlock(); return values; }
java
public ClassGraph blacklistPackages(final String... packageNames) { enableClassInfo(); for (final String packageName : packageNames) { final String packageNameNormalized = WhiteBlackList.normalizePackageOrClassName(packageName); if (packageNameNormalized.isEmpty()) { throw new IllegalArgumentException( "Blacklisting the root package (\"\") will cause nothing to be scanned"); } // Blacklisting always prevents further recursion, no need to blacklist sub-packages scanSpec.packageWhiteBlackList.addToBlacklist(packageNameNormalized); final String path = WhiteBlackList.packageNameToPath(packageNameNormalized); scanSpec.pathWhiteBlackList.addToBlacklist(path + "/"); if (!packageNameNormalized.contains("*")) { // Blacklist sub-packages (zipfile entries can occur in any order) scanSpec.packagePrefixWhiteBlackList.addToBlacklist(packageNameNormalized + "."); scanSpec.pathPrefixWhiteBlackList.addToBlacklist(path + "/"); } } return this; }
java
public synchronized Future<?> addIndexedColumn(ColumnDefinition cdef) { if (indexesByColumn.containsKey(cdef.name.bytes)) return null; assert cdef.getIndexType() != null; SecondaryIndex index; try { index = SecondaryIndex.createInstance(baseCfs, cdef); } catch (ConfigurationException e) { throw new RuntimeException(e); } // Keep a single instance of the index per-cf for row level indexes // since we want all columns to be under the index if (index instanceof PerRowSecondaryIndex) { SecondaryIndex currentIndex = rowLevelIndexMap.get(index.getClass()); if (currentIndex == null) { rowLevelIndexMap.put(index.getClass(), index); index.init(); } else { index = currentIndex; index.addColumnDef(cdef); logger.info("Creating new index : {}",cdef); } } else { // TODO: We sould do better than throw a RuntimeException if (cdef.getIndexType() == IndexType.CUSTOM && index instanceof AbstractSimplePerColumnSecondaryIndex) throw new RuntimeException("Cannot use a subclass of AbstractSimplePerColumnSecondaryIndex as a CUSTOM index, as they assume they are CFS backed"); index.init(); } // link in indexedColumns. this means that writes will add new data to // the index immediately, // so we don't have to lock everything while we do the build. it's up to // the operator to wait // until the index is actually built before using in queries. indexesByColumn.put(cdef.name.bytes, index); // Add to all indexes set: allIndexes.add(index); // if we're just linking in the index to indexedColumns on an // already-built index post-restart, we're done if (index.isIndexBuilt(cdef.name.bytes)) return null; return index.buildIndexAsync(); }
python
def expand_expression(self, pattern, hosts, services, hostgroups, servicegroups, running=False): # pylint: disable=too-many-locals """Expand a host or service expression into a dependency node tree using (host|service)group membership, regex, or labels as item selector. :param pattern: pattern to parse :type pattern: str :param hosts: hosts list, used to find a specific host :type hosts: alignak.objects.host.Host :param services: services list, used to find a specific service :type services: alignak.objects.service.Service :param running: rules are evaluated at run time and parsing. True means runtime :type running: bool :return: root node of parsed tree :rtype: alignak.dependencynode.DependencyNode """ error = None node = DependencyNode() node.operand = '&' elts = [e.strip() for e in pattern.split(',')] # If host_name is empty, use the host_name the business rule is bound to if not elts[0]: elts[0] = self.bound_item.host_name filters = [] # Looks for hosts/services using appropriate filters try: all_items = { "hosts": hosts, "hostgroups": hostgroups, "servicegroups": servicegroups } if len(elts) > 1: # We got a service expression host_expr, service_expr = elts filters.extend(self.get_srv_host_filters(host_expr)) filters.extend(self.get_srv_service_filters(service_expr)) items = services.find_by_filter(filters, all_items) else: # We got a host expression host_expr = elts[0] filters.extend(self.get_host_filters(host_expr)) items = hosts.find_by_filter(filters, all_items) except re.error as regerr: error = "Business rule uses invalid regex %s: %s" % (pattern, regerr) else: if not items: error = "Business rule got an empty result for pattern %s" % pattern # Checks if we got result if error: if running is False: node.configuration_errors.append(error) else: # As business rules are re-evaluated at run time on # each scheduling loop, if the rule becomes invalid # because of a badly written macro modulation, it # should be notified upper for the error to be # displayed in the check output. raise Exception(error) return node # Creates dependency node subtree # here we have Alignak SchedulingItem object (Host/Service) for item in items: # Creates a host/service node son = DependencyNode() son.operand = item.__class__.my_type son.sons.append(item.uuid) # Only store the uuid, not the full object. # Appends it to wrapping node node.sons.append(son) node.switch_zeros_of_values() return node
python
async def load_tuple(self, elem_type, params=None, elem=None, obj=None): """ Loads tuple of elements from the reader. Supports the tuple ref. Returns loaded tuple. :param elem_type: :param params: :param elem: :param obj: :return: """ if obj is None: return None elem_fields = params[0] if params else None if elem_fields is None: elem_fields = elem_type.f_specs() c_len = len(obj) if len(elem_fields) != c_len: raise ValueError('Size mismatch') res = elem if elem else [] for i in range(len(elem_fields)): try: self.tracker.push_index(i) fvalue = await self._load_field(params[1:] if params else None, x.eref(res, i) if elem else None, obj=obj[i]) self.tracker.pop() if not elem: res.append(fvalue) except Exception as e: raise helpers.ArchiveException(e, tracker=self.tracker) from e return res
python
def analyze(self, file, filename): """ :param file: The File object itself. :param filename: string; filename of File object, used for creating PotentialSecret objects :returns dictionary representation of set (for random access by hash) { detect_secrets.core.potential_secret.__hash__: detect_secrets.core.potential_secret } """ potential_secrets = {} for line_num, line in enumerate(file.readlines(), start=1): secrets = self.analyze_string(line, line_num, filename) potential_secrets.update(secrets) return potential_secrets
java
@Override public boolean matches(Object item) { int count = 0; for (Matcher<?> m: matchers) { if (m.matches(item)) { count++; } } return countMatcher.matches(count); }
python
def is_within(self, query, subject): """Accessory function to check if a range is fully within another range""" if self.pt_within(query[0], subject) and self.pt_within(query[1], subject): return True return False
python
def _delete_line(self, count=1): """ Deletes count lines, starting at line with cursor. As lines are deleted, lines displayed below cursor move up. Lines added to bottom of screen have spaces with same character attributes as last line moved up. """ self.display = self.display[:self.y] + \ self.display[self.y+1:] self.display.append([u" " * self.size[1]] * count) self.attributes = self.attributes[:self.y] + \ self.attributes[self.y+1:] last_attributes = self.attributes[-1] for _ in xrange(count): self.attributes.append(copy(last_attributes))
python
def import_users(self): """ save users to local DB """ self.message('saving users into local DB') saved_users = self.saved_admins # loop over all extracted unique email addresses for email in self.email_set: owner = self.users_dict[email].get('owner') # if owner is not specified, build username from email if owner.strip() == '': owner, domain = email.split('@') # replace any points with a space owner = owner.replace('.', ' ') # if owner has a space, assume he specified first and last name if ' ' in owner: owner_parts = owner.split(' ') first_name = owner_parts[0] last_name = owner_parts[1] else: first_name = owner last_name = '' # username must be slugified otherwise won't get into the DB username = slugify(owner) # check if user exists first try: # try looking by email user = User.objects.get(email=email) except User.DoesNotExist: # otherwise init new user = User() user.username = username # generate new password only for new users user.password = self.generate_random_password() user.is_active = True # we'll create one user for each unique email address we've got user.first_name = first_name.capitalize() user.last_name = last_name.capitalize() user.email = email # extract date joined from old nodes # find the oldest node of this user oldest_node = OldNode.objects.filter(email=email).order_by('added')[0] user.date_joined = oldest_node.added # be sure username is unique counter = 1 original_username = username while True: # do this check only if user is new if not user.pk and User.objects.filter(username=user.username).count() > 0: counter += 1 user.username = '%s%d' % (original_username, counter) else: break try: # validate data and save user.full_clean() user.save(sync_emailaddress=False) except Exception: # if user already exists use that instance if(User.objects.filter(email=email).count() == 1): user = User.objects.get(email=email) # otherwise report error else: tb = traceback.format_exc() self.message('Could not save user %s, got exception:\n\n%s' % (user.username, tb)) continue # if we got a user to add if user: # store id self.users_dict[email]['id'] = user.id # append to saved users saved_users.append(user) self.verbose('Saved user %s (%s) with email <%s>' % (user.username, user.get_full_name(), user.email)) # mark email address as confirmed if feature is enabled if EMAIL_CONFIRMATION and EmailAddress.objects.filter(email=user.email).count() is 0: try: email_address = EmailAddress(user=user, email=user.email, verified=True, primary=True) email_address.full_clean() email_address.save() except Exception: tb = traceback.format_exc() self.message('Could not save email address for user %s, got exception:\n\n%s' % (user.username, tb)) self.message('saved %d users into local DB' % len(saved_users)) self.saved_users = saved_users
python
def do_debug(self, args, arguments): """ :: Usage: debug on debug off Turns the debug log level on and off. """ filename = path_expand("~/.cloudmesh/cmd3.yaml") config = ConfigDict(filename=filename) if arguments['on']: self.set_debug(True) elif arguments['off']: self.set_debug(False)
java
public void setPreferredEditor(String resourceType, String editorUri) { if (editorUri == null) { m_editorSettings.remove(resourceType); } else { m_editorSettings.put(resourceType, editorUri); } }
python
def run_tpm(system, steps, blackbox): """Iterate the TPM for the given number of timesteps. Returns: np.ndarray: tpm * (noise_tpm^(t-1)) """ # Generate noised TPM # Noise the connections from every output element to elements in other # boxes. node_tpms = [] for node in system.nodes: node_tpm = node.tpm_on for input_node in node.inputs: if not blackbox.in_same_box(node.index, input_node): if input_node in blackbox.output_indices: node_tpm = marginalize_out([input_node], node_tpm) node_tpms.append(node_tpm) noised_tpm = rebuild_system_tpm(node_tpms) noised_tpm = convert.state_by_node2state_by_state(noised_tpm) tpm = convert.state_by_node2state_by_state(system.tpm) # Muliply by noise tpm = np.dot(tpm, np.linalg.matrix_power(noised_tpm, steps - 1)) return convert.state_by_state2state_by_node(tpm)
java
public void eventProcessingFailed(ActivityHandle activityHandle, FireableEventType fireableEventType, Object object, Address address, ReceivableService receivableService, int integer, FailureReason failureReason) { // not used }
java
private void uploadPart() throws IOException { if (mFile == null) { return; } mLocalOutputStream.close(); int partNumber = mPartNumber.getAndIncrement(); File newFileToUpload = new File(mFile.getPath()); mFile = null; mLocalOutputStream = null; UploadPartRequest uploadRequest = new UploadPartRequest() .withBucketName(mBucketName) .withKey(mKey) .withUploadId(mUploadId) .withPartNumber(partNumber) .withFile(newFileToUpload) .withPartSize(newFileToUpload.length()); execUpload(uploadRequest); }
python
def from_euler(self, roll, pitch, yaw): '''fill the matrix from Euler angles in radians''' cp = cos(pitch) sp = sin(pitch) sr = sin(roll) cr = cos(roll) sy = sin(yaw) cy = cos(yaw) self.a.x = cp * cy self.a.y = (sr * sp * cy) - (cr * sy) self.a.z = (cr * sp * cy) + (sr * sy) self.b.x = cp * sy self.b.y = (sr * sp * sy) + (cr * cy) self.b.z = (cr * sp * sy) - (sr * cy) self.c.x = -sp self.c.y = sr * cp self.c.z = cr * cp
java
public Mapper createMapperToSubflowState(final List<DefaultMapping> mappings) { val inputMapper = new DefaultMapper(); mappings.forEach(inputMapper::addMapping); return inputMapper; }