language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
@SuppressWarnings({ "unchecked", "rawtypes" }) public List<HazeltaskTask<G>> shutdownNow() { return (List<HazeltaskTask<G>>) (List) localExecutorPool.shutdownNow(); }
python
def unicode_stdio (): """Make sure that the standard I/O streams accept Unicode. In Python 2, the standard I/O streams accept bytes, not Unicode characters. This means that in principle every Unicode string that we want to output should be encoded to utf-8 before print()ing. But Python 2.X has a hack where, if the output is a terminal, it will automatically encode your strings, using UTF-8 in most cases. BUT this hack doesn't kick in if you pipe your program's output to another program. So it's easy to write a tool that works fine in most cases but then blows up when you log its output to a file. The proper solution is just to do the encoding right. This function sets things up to do this in the most sensible way I can devise, if we're running on Python 2. This approach sets up compatibility with Python 3, which has the stdio streams be in text mode rather than bytes mode to begin with. Basically, every command-line Python program should call this right at startup. I'm tempted to just invoke this code whenever this module is imported since I foresee many accidentally omissions of the call. """ if six.PY3: return enc = sys.stdin.encoding or 'utf-8' sys.stdin = codecs.getreader (enc) (sys.stdin) enc = sys.stdout.encoding or enc sys.stdout = codecs.getwriter (enc) (sys.stdout) enc = sys.stderr.encoding or enc sys.stderr = codecs.getwriter (enc) (sys.stderr)
python
def calc_hmin_qmin_hmax_qmax_v1(self): """Determine an starting interval for iteration methods as the one implemented in method |calc_h_v1|. The resulting interval is determined in a manner, that on the one hand :math:`Qmin \\leq QRef \\leq Qmax` is fulfilled and on the other hand the results of method |calc_qg_v1| are continuous for :math:`Hmin \\leq H \\leq Hmax`. Required control parameter: |HM| Required derived parameters: |HV| |lstream_derived.QM| |lstream_derived.QV| Required flux sequence: |QRef| Calculated aide sequences: |HMin| |HMax| |QMin| |QMax| Besides the mentioned required parameters and sequences, those of the actual method for calculating the discharge of the total cross section might be required. This is the case whenever water flows on both outer embankments. In such occasions no previously determined upper boundary values are available and method |calc_hmin_qmin_hmax_qmax_v1| needs to increase the value of :math:`HMax` successively until the condition :math:`QG \\leq QMax` is met. """ con = self.parameters.control.fastaccess der = self.parameters.derived.fastaccess flu = self.sequences.fluxes.fastaccess aid = self.sequences.aides.fastaccess if flu.qref <= der.qm: aid.hmin = 0. aid.qmin = 0. aid.hmax = con.hm aid.qmax = der.qm elif flu.qref <= min(der.qv[0], der.qv[1]): aid.hmin = con.hm aid.qmin = der.qm aid.hmax = con.hm+min(der.hv[0], der.hv[1]) aid.qmax = min(der.qv[0], der.qv[1]) elif flu.qref < max(der.qv[0], der.qv[1]): aid.hmin = con.hm+min(der.hv[0], der.hv[1]) aid.qmin = min(der.qv[0], der.qv[1]) aid.hmax = con.hm+max(der.hv[0], der.hv[1]) aid.qmax = max(der.qv[0], der.qv[1]) else: flu.h = con.hm+max(der.hv[0], der.hv[1]) aid.hmin = flu.h aid.qmin = flu.qg while True: flu.h *= 2. self.calc_qg() if flu.qg < flu.qref: aid.hmin = flu.h aid.qmin = flu.qg else: aid.hmax = flu.h aid.qmax = flu.qg break
java
public String key(String keyName, String defaultKeyName) { String value = getMessages().key(keyName, getLocalizeParameters()); if (value.startsWith(CmsMessages.UNKNOWN_KEY_EXTENSION) && CmsStringUtil.isNotEmpty(defaultKeyName)) { value = getMessages().key(defaultKeyName, getLocalizeParameters()); } return CmsStringUtil.escapeHtml(value); }
java
public static BigFloat min(BigFloat value1, BigFloat... values) { BigFloat result = value1; for (BigFloat other : values) { result = min(result, other); } return result; }
python
def _info(self): """ Module internal status representation """ name = self.__class__.__module__ + '.' + self.__class__.__name__ info, created = ModuleInfo.objects.get_or_create(name=name) if created: # Do not set as changed info.commit() return info
python
def raw_encode(data): """Special case serializer.""" content_type = 'application/data' payload = data if isinstance(payload, unicode): content_encoding = 'utf-8' payload = payload.encode(content_encoding) else: content_encoding = 'binary' return content_type, content_encoding, payload
python
def get_qutip_module(required_version='3.2'): """ Attempts to return the qutip module, but silently returns ``None`` if it can't be imported, or doesn't have version at least ``required_version``. :param str required_version: Valid input to ``distutils.version.LooseVersion``. :return: The qutip module or ``None``. :rtype: ``module`` or ``NoneType`` """ try: import qutip as qt from distutils.version import LooseVersion _qt_version = LooseVersion(qt.version.version) if _qt_version < LooseVersion(required_version): return None except ImportError: return None return qt
python
def create_lr_scheduler_with_warmup(lr_scheduler, warmup_start_value, warmup_end_value, warmup_duration, save_history=False, output_simulated_values=None): """ Helper method to create a LR scheduler with a linear warm-up. Args: lr_scheduler (ParamScheduler or subclass of `torch.optim.lr_scheduler._LRScheduler`): LR scheduler after the warm-up. warmup_start_value (float): LR start value of the warm-up phase. warmup_end_value (float): LR end value of the warm-up phase. warmup_duration (int): warm-up phase duration, number of events. save_history (bool, optional): whether to log the parameter values to `engine.state.param_history`, (default=False). output_simulated_values (list, optional): optional output of simulated LR values. If output_simulated_values is a list of None, e.g. `[None] * 100`, after the execution it will be filled by 100 simulated LR values. Returns: ConcatScheduler: LR scheduler with linear warm-up. .. code-block:: python torch_lr_scheduler = ExponentialLR(optimizer=optimizer, gamma=0.98) lr_values = [None] * 100 scheduler = create_lr_scheduler_with_warmup(torch_lr_scheduler, warmup_start_value=0.0, warmup_end_value=0.1, warmup_duration=10, output_simulated_values=lr_values) lr_values = np.array(lr_values) # Plot simulated values plt.plot(lr_values[:, 0], lr_values[:, 1], label="learning rate") # Attach to the trainer trainer.add_event_handler(Events.ITERATION_STARTED, scheduler) """ if not isinstance(lr_scheduler, (ParamScheduler, _LRScheduler)): raise TypeError("Argument lr_scheduler should be a subclass of torch.optim.lr_scheduler._LRScheduler or " "ParamScheduler, but given {}".format(type(lr_scheduler))) if isinstance(lr_scheduler, _LRScheduler): lr_scheduler = LRScheduler(lr_scheduler) dummy_optimizer = {} warmup_scheduler = LinearCyclicalScheduler(dummy_optimizer, param_name="lr", start_value=warmup_start_value, end_value=warmup_end_value, cycle_size=warmup_duration * 2) warmup_scheduler.optimizer_param_groups = lr_scheduler.optimizer_param_groups schedulers = [warmup_scheduler, lr_scheduler] durations = [warmup_duration, ] combined_scheduler = ConcatScheduler(schedulers, durations=durations, save_history=save_history) if output_simulated_values is not None: if not isinstance(output_simulated_values, list): raise TypeError("Argument output_simulated_values should be a list of None, e.g. `[None] * 100`, " "but given {}.".format(type(output_simulated_values))) num_events = len(output_simulated_values) result = ConcatScheduler.simulate_values(num_events=num_events, schedulers=schedulers, durations=durations) for i in range(num_events): output_simulated_values[i] = result[i] return combined_scheduler
java
public String rawQuery() { int start = pathEndIdx() + 1; return start < uri.length() ? uri.substring(start) : EMPTY_STRING; }
java
@SuppressWarnings("unchecked") public final Command optimiezeGet(final Queue writeQueue, final Queue<Command> executingCmds, Command optimiezeCommand) { if (optimiezeCommand.getCommandType() == CommandType.GET_ONE || optimiezeCommand.getCommandType() == CommandType.GETS_ONE) { if (this.optimiezeGet) { optimiezeCommand = this.mergeGetCommands(optimiezeCommand, writeQueue, executingCmds, optimiezeCommand.getCommandType()); } } return optimiezeCommand; }
python
def format_language(language): """ Attempt to format language parameter as 'ww-WW'. :param string language: language parameter """ if not language: return language if not re.match('^[a-zA-Z]{2}[_-][a-zA-Z]{2}$', language): raise TwiMLException('Invalid value for language parameter.') return language[0:2].lower() + '-' + language[3:5].upper()
java
private Statement generateFunctionBodyForSoyElement(TemplateNode node) { String soyElementClassName = this.getSoyElementClassName(); Expression firstElementKey = // Since Soy element roots cannot have manual keys (see go/soy-element-keyed-roots), // this will always be the first element key. JsRuntime.XID.call(Expression.stringLiteral(node.getTemplateName() + "-0")); VariableDeclaration elementInstanceDeclaration = VariableDeclaration.builder("element") .setRhs( SOY_IDOM .dotAccess("$$tryGetElement") .call(INCREMENTAL_DOM, id(soyElementClassName), firstElementKey)) .build(); Statement maybeCreateElement = Statement.ifStatement( elementInstanceDeclaration.ref().tripleEquals(Expression.LITERAL_NULL), elementInstanceDeclaration .ref() .assign( Expression.construct( id(soyElementClassName), JsRuntime.OPT_DATA, JsRuntime.OPT_IJ_DATA)) .asStatement()) .build(); Statement elementRenderInvocation = elementInstanceDeclaration .ref() .dotAccess("renderInternal") .call(INCREMENTAL_DOM, JsRuntime.OPT_DATA) .asStatement(); return Statement.of(maybeCreateElement, elementInstanceDeclaration, elementRenderInvocation); }
java
private boolean recreateMismatching() { List<Configuration> mismatching = inventory.removeMismatching(); mismatching.forEach(entry->{ try { inventory.add(InventoryEntry.create(entry.copyWithIdentifier(inventory.nextIdentifier()), newConfigurationFile())); } catch (IOException e1) { logger.log(Level.WARNING, "Failed to write a configuration.", e1); } }); return !mismatching.isEmpty(); }
python
def app_profile( self, app_profile_id, routing_policy_type=None, description=None, cluster_id=None, allow_transactional_writes=None, ): """Factory to create AppProfile associated with this instance. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_create_app_profile] :end-before: [END bigtable_create_app_profile] :type app_profile_id: str :param app_profile_id: The ID of the AppProfile. Must be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. :type: routing_policy_type: int :param: routing_policy_type: The type of the routing policy. Possible values are represented by the following constants: :data:`google.cloud.bigtable.enums.RoutingPolicyType.ANY` :data:`google.cloud.bigtable.enums.RoutingPolicyType.SINGLE` :type: description: str :param: description: (Optional) Long form description of the use case for this AppProfile. :type: cluster_id: str :param: cluster_id: (Optional) Unique cluster_id which is only required when routing_policy_type is ROUTING_POLICY_TYPE_SINGLE. :type: allow_transactional_writes: bool :param: allow_transactional_writes: (Optional) If true, allow transactional writes for ROUTING_POLICY_TYPE_SINGLE. :rtype: :class:`~google.cloud.bigtable.app_profile.AppProfile>` :returns: AppProfile for this instance. """ return AppProfile( app_profile_id, self, routing_policy_type=routing_policy_type, description=description, cluster_id=cluster_id, allow_transactional_writes=allow_transactional_writes, )
java
private OnPreferenceChangeListener createHideNavigationChangeListener() { return new OnPreferenceChangeListener() { @Override public boolean onPreferenceChange(final Preference preference, final Object newValue) { if (newValue != null) { boolean hideNavigation = (boolean) newValue; ((PreferenceActivity) getActivity()).hideNavigation(hideNavigation); } return true; } }; }
java
public FNCPatAlign createFNCPatAlignFromString(EDataType eDataType, String initialValue) { FNCPatAlign result = FNCPatAlign.get(initialValue); if (result == null) throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; }
java
public HistoryReference getHistoryReferenceFromNode(Object value) { SiteNode node = null; if (value instanceof SiteNode) { node = (SiteNode) value; if (node.getHistoryReference() != null) { try { return node.getHistoryReference(); } catch (Exception e) { log.warn(e.getMessage(), e); } } } return null; }
java
private State checkl(State state) throws DatatypeException, IOException { if (state.context.length() == 0) { state = appendToContext(state); } state.current = state.reader.read(); state = appendToContext(state); state = skipSpaces(state); _checkl('l', true, state); return state; }
java
@Override public CreateContactResult createContact(CreateContactRequest request) { request = beforeClientExecution(request); return executeCreateContact(request); }
java
@Override public void eUnset(int featureID) { switch (featureID) { case AfplibPackage.TEXT_FIDELITY__STP_TXT_EX: setStpTxtEx(STP_TXT_EX_EDEFAULT); return; case AfplibPackage.TEXT_FIDELITY__REP_TXT_EX: setRepTxtEx(REP_TXT_EX_EDEFAULT); return; } super.eUnset(featureID); }
java
public Cell<C,T> minSize (float width, float height) { minWidth = new FixedValue<C, T>(layout.toolkit, width); minHeight = new FixedValue<C, T>(layout.toolkit, height); return this; }
python
def sync_badges(**kwargs): """ Iterates over registered recipes and creates missing badges. """ update = kwargs.get('update', False) created_badges = [] instances = registry.get_recipe_instances() for instance in instances: reset_queries() badge, created = instance.create_badge(update=update) if created: created_badges.append(badge) log_queries(instance) return created_badges
java
@Override public boolean postBuild(MavenBuildProxy build, MavenProject pom, BuildListener listener) throws InterruptedException, IOException { build.executeAsync(new BuildCallable<Void, IOException>() { // record is transient, so needs to make a copy first private final Set<MavenDependency> d = dependencies; public Void call(MavenBuild build) throws IOException, InterruptedException { // add the action //TODO: [by yl] These actions are persisted into the build.xml of each build run - we need another //context to store these actions build.getActions().add(new MavenDependenciesRecord(build, d)); return null; } }); return true; }
java
public static base_responses update(nitro_service client, gslbvserver resources[]) throws Exception { base_responses result = null; if (resources != null && resources.length > 0) { gslbvserver updateresources[] = new gslbvserver[resources.length]; for (int i=0;i<resources.length;i++){ updateresources[i] = new gslbvserver(); updateresources[i].name = resources[i].name; updateresources[i].iptype = resources[i].iptype; updateresources[i].dnsrecordtype = resources[i].dnsrecordtype; updateresources[i].backupvserver = resources[i].backupvserver; updateresources[i].backupsessiontimeout = resources[i].backupsessiontimeout; updateresources[i].lbmethod = resources[i].lbmethod; updateresources[i].backuplbmethod = resources[i].backuplbmethod; updateresources[i].netmask = resources[i].netmask; updateresources[i].v6netmasklen = resources[i].v6netmasklen; updateresources[i].tolerance = resources[i].tolerance; updateresources[i].persistencetype = resources[i].persistencetype; updateresources[i].persistenceid = resources[i].persistenceid; updateresources[i].persistmask = resources[i].persistmask; updateresources[i].v6persistmasklen = resources[i].v6persistmasklen; updateresources[i].timeout = resources[i].timeout; updateresources[i].edr = resources[i].edr; updateresources[i].mir = resources[i].mir; updateresources[i].disableprimaryondown = resources[i].disableprimaryondown; updateresources[i].dynamicweight = resources[i].dynamicweight; updateresources[i].considereffectivestate = resources[i].considereffectivestate; updateresources[i].somethod = resources[i].somethod; updateresources[i].sopersistence = resources[i].sopersistence; updateresources[i].sopersistencetimeout = resources[i].sopersistencetimeout; updateresources[i].sothreshold = resources[i].sothreshold; updateresources[i].sobackupaction = resources[i].sobackupaction; updateresources[i].servicename = resources[i].servicename; updateresources[i].weight = resources[i].weight; updateresources[i].domainname = resources[i].domainname; updateresources[i].ttl = resources[i].ttl; updateresources[i].backupip = resources[i].backupip; updateresources[i].cookie_domain = resources[i].cookie_domain; updateresources[i].cookietimeout = resources[i].cookietimeout; updateresources[i].sitedomainttl = resources[i].sitedomainttl; updateresources[i].comment = resources[i].comment; updateresources[i].appflowlog = resources[i].appflowlog; } result = update_bulk_request(client, updateresources); } return result; }
python
def gen_ordered(self): """Generate batches of operations, batched by type of operation, in the order **provided**. """ run = None for idx, (op_type, operation) in enumerate(self.ops): if run is None: run = _Run(op_type) elif run.op_type != op_type: yield run run = _Run(op_type) run.add(idx, operation) yield run
python
def create_backed_vol(self, name, backer, _format='qcow2'): """ TODO(rdelinger) think about changing _format This is a pretty specialized function. It takes an existing volume, and creates a new volume that is backed by the existing volume Sadly there is no easy way to do this in libvirt, the best way I've found is to just create some xml and use the createXML function """ vol_xml = ElementTree.Element('volume') vol_name = ElementTree.SubElement(vol_xml, 'name') name = '{0}.{1}'.format(name, _format) vol_name.text = name target = ElementTree.SubElement(vol_xml, 'target') target_format = ElementTree.SubElement(target, 'format') target_format.set('type', _format) vol_cap = ElementTree.SubElement(vol_xml, 'capacity') vol_cap.set('unit', 'bytes') # @TODO(rdelinger) this should be dynamic vol_cap.text = backer.capacity backing_store = ElementTree.SubElement(vol_xml, 'backingStore') bs_path = ElementTree.SubElement(backing_store, 'path') bs_path.text = backer.path bs_format = ElementTree.SubElement(backing_store, 'format') bs_format.set('type', backer.format) XMLString = ElementTree.tostring(vol_xml) self.virsp.createXML(XMLString, 0) return self.find_volume(name)
python
def update_sentry_logging(logging_dict: DictStrAny, sentry_dsn: Optional[str], *loggers: str, level: Union[str, int] = None, **kwargs: Any) -> None: r"""Enable Sentry logging if Sentry DSN passed. .. note:: Sentry logging requires `raven <http://pypi.python.org/pypi/raven>`_ library to be installed. **Usage**:: from logging.config import dictConfig LOGGING = default_logging_dict() SENTRY_DSN = '...' update_sentry_logging(LOGGING, SENTRY_DSN) dictConfig(LOGGING) **Using AioHttpTransport for SentryHandler** This will allow to use ``aiohttp.client`` for pushing data to Sentry in your ``aiohttp.web`` app, which means elimination of sync calls to Sentry. :: from raven_aiohttp import AioHttpTransport update_sentry_logging(LOGGING, SENTRY_DSN, transport=AioHttpTransport) :param logging_dict: Logging dict. :param sentry_dsn: Sentry DSN value. If ``None`` do not update logging dict at all. :param \*loggers: Use Sentry logging for each logger in the sequence. If the sequence is empty use Sentry logging to each available logger. :param \*\*kwargs: Additional kwargs to be passed to ``SentryHandler``. """ # No Sentry DSN, nothing to do if not sentry_dsn: return # Add Sentry handler kwargs['class'] = 'raven.handlers.logging.SentryHandler' kwargs['dsn'] = sentry_dsn logging_dict['handlers']['sentry'] = dict( level=level or 'WARNING', **kwargs) loggers = tuple(logging_dict['loggers']) if not loggers else loggers for logger in loggers: # Ignore missing loggers logger_dict = logging_dict['loggers'].get(logger) if not logger_dict: continue # Ignore logger from logger config if logger_dict.pop('ignore_sentry', False): continue # Handlers list should exist handlers = list(logger_dict.setdefault('handlers', [])) handlers.append('sentry') logger_dict['handlers'] = tuple(handlers)
python
def return_handler(module_logger, first_is_session=True): """Decorator for VISA library classes. """ def _outer(visa_library_method): def _inner(self, session, *args, **kwargs): ret_value = visa_library_method(*args, **kwargs) module_logger.debug('%s%s -> %r', visa_library_method.__name__, _args_to_str(args, kwargs), ret_value) try: ret_value = constants.StatusCode(ret_value) except ValueError: pass if first_is_session: self._last_status = ret_value self._last_status_in_session[session] = ret_value if ret_value < 0: raise VisaIOError(ret_value) if ret_value in self.issue_warning_on: if session and ret_value not in self._ignore_warning_in_session[session]: module_logger.warn(VisaIOWarning(ret_value), stacklevel=2) return ret_value return _inner return _outer
python
def _section_execution_order(self, section, iterargs , reverse=False , custom_order=None , explicit_checks: Iterable = None , exclude_checks: Iterable = None): """ order must: a) contain all variable args (we're appending missing ones) b) not contian duplictates (we're removing repeated items) order may contain *iterargs otherwise it is appended to the end order may contain "*check" otherwise, it is like *check is appended to the end (Not done explicitly though). """ stack = list(custom_order) if custom_order is not None else list(section.order) if '*iterargs' not in stack: stack.append('*iterargs') stack.reverse() full_order = [] seen = set() while len(stack): item = stack.pop() if item in seen: continue seen.add(item) if item == '*iterargs': all_iterargs = list(iterargs.keys()) # assuming there is a meaningful order all_iterargs.reverse() stack += all_iterargs continue full_order.append(item) # Filter down checks. Checks to exclude are filtered for last as the user # might e.g. want to include all tests with "kerning" in the ID, except for # "kerning_something". explicit_checks could then be ["kerning"] and # exclude_checks ["something"]. checks = section.checks if explicit_checks: checks = [ check for check in checks if any(include_string in check.id for include_string in explicit_checks) ] if exclude_checks: checks = [ check for check in checks if not any(exclude_string in check.id for exclude_string in exclude_checks) ] scopes = self._analyze_checks(full_order, checks) key = lambda item: item[1] # check, signature, scope = item scopes.sort(key=key, reverse=reverse) for check, args in self._execute_scopes(iterargs, scopes): # this is the iterargs tuple that will be used as a key for caching # and so on. we could sort it, to ensure it yields in the same # cache locations always, but then again, it is already in a well # defined order, by clustering. yield check, tuple(args)
python
def _exec_vector(self, a, bd, xy, xy_orig, mask, n_withdrifts, spec_drift_grids): """Solves the kriging system as a vectorized operation. This method can take a lot of memory for large grids and/or large datasets.""" npt = bd.shape[0] n = self.X_ADJUSTED.shape[0] zero_index = None zero_value = False a_inv = scipy.linalg.inv(a) if np.any(np.absolute(bd) <= self.eps): zero_value = True zero_index = np.where(np.absolute(bd) <= self.eps) if self.UNBIAS: b = np.zeros((npt, n_withdrifts+1, 1)) else: b = np.zeros((npt, n_withdrifts, 1)) b[:, :n, 0] = - self.variogram_function(self.variogram_model_parameters, bd) if zero_value: b[zero_index[0], zero_index[1], 0] = 0.0 i = n if self.regional_linear_drift: b[:, i, 0] = xy[:, 0] i += 1 b[:, i, 0] = xy[:, 1] i += 1 if self.point_log_drift: for well_no in range(self.point_log_array.shape[0]): log_dist = np.log(np.sqrt((xy[:, 0] - self.point_log_array[well_no, 0])**2 + (xy[:, 1] - self.point_log_array[well_no, 1])**2)) if np.any(np.isinf(log_dist)): log_dist[np.isinf(log_dist)] = -100.0 b[:, i, 0] = - self.point_log_array[well_no, 2] * log_dist i += 1 if self.external_Z_drift: b[:, i, 0] = self._calculate_data_point_zscalars(xy_orig[:, 0], xy_orig[:, 1]) i += 1 if self.specified_drift: for spec_vals in spec_drift_grids: b[:, i, 0] = spec_vals.flatten() i += 1 if self.functional_drift: for func in self.functional_drift_terms: b[:, i, 0] = func(xy[:, 0], xy[:, 1]) i += 1 if i != n_withdrifts: warnings.warn("Error in setting up kriging system. " "Kriging may fail.", RuntimeWarning) if self.UNBIAS: b[:, n_withdrifts, 0] = 1.0 if (~mask).any(): mask_b = np.repeat(mask[:, np.newaxis, np.newaxis], n_withdrifts+1, axis=1) b = np.ma.array(b, mask=mask_b) if self.UNBIAS: x = np.dot(a_inv, b.reshape((npt, n_withdrifts+1)).T).reshape((1, n_withdrifts+1, npt)).T else: x = np.dot(a_inv, b.reshape((npt, n_withdrifts)).T).reshape((1, n_withdrifts, npt)).T zvalues = np.sum(x[:, :n, 0] * self.Z, axis=1) sigmasq = np.sum(x[:, :, 0] * -b[:, :, 0], axis=1) return zvalues, sigmasq
python
def _FromSpecs(self, specs): """ Populates _params using specification Arguments: specs -- either: (a) list as [(name, {...}), ...] (see Parameter.FromSpec() for further information) (b) dictionary as {"name": value, ...} """ if isinstance(specs, dict): specs_ = [] for name, value in specs.items(): specs_.append((name, {"value": value})) else: specs_ = specs for spec in specs_: self.params.append(Parameter(spec))
java
public List<String> getOutPorts() { List<String> ports = new ArrayList<>(); JsonObject jsonPorts = config.getObject("ports"); if (jsonPorts == null) { return ports; } JsonArray jsonOutPorts = jsonPorts.getArray("out"); if (jsonOutPorts == null) { return ports; } for(Object jsonOutPort : jsonOutPorts) { ports.add((String) jsonOutPort); } return ports; }
java
static <K extends Comparable<K>> void checkNode(Node<K> node) { node.checkNode(null, null); }
java
@Override public void exitQuery(GDLParser.QueryContext ctx) { for(Vertex v : vertices) { addPredicates(Predicate.fromGraphElement(v, getDefaultVertexLabel())); } for(Edge e : edges) { addPredicates(Predicate.fromGraphElement(e, getDefaultEdgeLabel())); } }
python
def _callRestartAgent(self, ev_data: RestartLogData, failTimeout) -> None: """ Callback which is called when restart time come. Writes restart record to restart log and asks node control service to perform restart :param ev_data: restart event data :param version: version to restart to """ logger.info("{}'s restart calling agent for restart".format(self)) self._actionLog.append_started(ev_data) self._action_start_callback() self.scheduledAction = None asyncio.ensure_future( self._sendUpdateRequest(ev_data, failTimeout))
python
def insertion(args): """ %prog insertion mic.mac.bed Find IES based on mapping MIC reads to MAC genome. Output a bedfile with 'lesions' (stack of broken reads) in the MAC genome. """ p = OptionParser(insertion.__doc__) p.add_option("--mindepth", default=6, type="int", help="Minimum depth to call an insertion") p.set_outfile() opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) bedfile, = args mindepth = opts.mindepth bed = Bed(bedfile) fw = must_open(opts.outfile, "w") for seqid, feats in bed.sub_beds(): left_ends = Counter([x.start for x in feats]) right_ends = Counter([x.end for x in feats]) selected = [] for le, count in left_ends.items(): if count >= mindepth: selected.append((seqid, le, "LE-{0}".format(le), count)) for re, count in right_ends.items(): if count >= mindepth: selected.append((seqid, re, "RE-{0}".format(re), count)) selected.sort() for seqid, pos, label, count in selected: label = "{0}-r{1}".format(label, count) print("\t".join((seqid, str(pos - 1), str(pos), label)), file=fw)
python
def getApplicationSupportedMimeTypes(self, pchAppKey, pchMimeTypesBuffer, unMimeTypesBuffer): """Get the list of supported mime types for this application, comma-delimited""" fn = self.function_table.getApplicationSupportedMimeTypes result = fn(pchAppKey, pchMimeTypesBuffer, unMimeTypesBuffer) return result
python
def evil(expr, lookup, operators, cast, reducer, tokenizer): """evil evaluates an expression according to the eval description given. :param expr: An expression to evaluate. :param lookup: A callable which takes a single pattern argument and returns a set of results. The pattern can be anything that is not an operator token or round brackets. :param operators: A precedence-ordered dictionary of (function, side) tuples keyed on the operator token. :param reducer: A callable which takes a sequential list of values (from operations or lookups) and combines them into a result. Typical behaviour is that of the + operator. The return type should be the same as cast. :param cast: A callable which transforms the results of the lookup into the type expected by the operators and the type of the result. :param tokenizer: A callable which will break the query into tokens for evaluation per the lookup and operators. Defaults to setquery.query_tokenizer. :raises: SyntaxError :returns: """ operators = OrderedDict((op[0], op[1:]) for op in operators) if "(" in operators or ")" in operators: raise ValueError("( and ) are reserved operators") operator_tokens = ["(", ")"] + operators.keys() tokens = iter(tokenizer(expr, operator_tokens)) levels = [[]] while True: # Token evaluation and pattern lookups expr = levels.pop() # The currently-constructed expression new_level = False # We should step into a subexpression first_token = len(expr) == 0 # The first (sub)exp. token prev_op_side = None # The side of the last-seen operator try: # Try to get the side of the last operator from an expression # which we are going to continue constructing. prev_op_side = operators[expr[-1]][1] except: pass for token in tokens: if token == "(": new_level = True break elif token == ")": break elif token in operators: op_side = operators[token][1] if first_token and op_side & OP_LEFT: raise SyntaxError("Operators which act on expressions to " "their left or both sides cannot be at " "the beginning of an expression.") if prev_op_side is not None: if prev_op_side & OP_RIGHT and op_side & OP_LEFT: raise SyntaxError("Operators cannot be beside one " "another if they act on expressions " "facing one-another.") expr.append(token) prev_op_side = op_side continue else: expr.append(cast(lookup(token))) prev_op_side = None first_token = False if new_level: levels.append(expr) levels.append([]) continue elif prev_op_side is not None and prev_op_side & OP_RIGHT: raise SyntaxError("Operators which act on expressions to their " "right or both sides cannot be at the end of " "an expression.") # Operator evaluation explen = len(expr) for op, (op_eval, op_side) in operators.iteritems(): if op_side is OP_RIGHT: # Apply right-sided operators. We loop from the end backward so # that multiple such operators next to noe another are resolved # in the correct order t = explen - 1 while t >= 0: if expr[t] == op: expr[t] = op_eval(expr[t + 1]) del expr[t + 1] explen -= 1 t -= 1 else: # Apply left- and both-sided operators. We loop forward so that # that multiple such operators next to one another are resolved # in the correct order. t = 0 while t < explen: if expr[t] == op: # Apply left- or both-sided operators if op_side is OP_LEFT: expr[t] = op_eval(expr[t - 1]) del expr[t - 1] t -= 1 explen -= 1 elif op_side is OP_BOTH: expr[t] = op_eval(expr[t - 1], expr[t + 1]) del expr[t + 1], expr[t - 1] t -= 1 explen -= 2 t += 1 if len(levels) > 0: levels[-1].append(reducer(expr)) else: break return reducer(expr)
python
def xpath(node, query, namespaces={}): """A safe xpath that only uses namespaces if available.""" if namespaces and 'None' not in namespaces: return node.xpath(query, namespaces=namespaces) return node.xpath(query)
python
def find_cc(arch, args, sp_delta): """ Pinpoint the best-fit calling convention and return the corresponding SimCC instance, or None if no fit is found. :param Arch arch: An ArchX instance. Can be obtained from archinfo. :param list args: A list of arguments. :param int sp_delta: The change of stack pointer before and after the call is made. :return: A calling convention instance, or None if none of the SimCC subclasses seems to fit the arguments provided. :rtype: SimCC or None """ if arch.name not in CC: return None possible_cc_classes = CC[arch.name] for cc_cls in possible_cc_classes: if cc_cls._match(arch, args, sp_delta): return cc_cls(arch, args=args, sp_delta=sp_delta) return None
python
def line_pos_from_number(self, line_number): """ Computes line position on Y-Axis (at the center of the line) from line number. :param line_number: The line number for which we want to know the position in pixels. :return: The center position of the line. """ editor = self._editor block = editor.document().findBlockByNumber(line_number) if block.isValid(): return int(editor.blockBoundingGeometry(block).translated( editor.contentOffset()).top()) if line_number <= 0: return 0 else: return int(editor.blockBoundingGeometry( block.previous()).translated(editor.contentOffset()).bottom())
python
def remove_object(collision_object): """Remove the collision object from the Manager""" global collidable_objects if isinstance(collision_object, CollidableObj): # print "Collision object of type ", type(collision_object), " removed from the collision manager." try: collidable_objects.remove(collision_object) except: print "Ragnarok Says: Collision_Object with ID # " + str( collision_object.obj_id) + " could not be found in the Collision Manager. Skipping over..."
java
@Deprecated public Decoder<S> getDecoder(int generation) throws FetchNoneException, FetchException { try { synchronized (mLayout) { IntHashMap decoders = mDecoders; if (decoders == null) { mDecoders = decoders = new IntHashMap(); } Decoder<S> decoder = (Decoder<S>) decoders.get(generation); if (decoder == null) { synchronized (cCodecDecoders) { Object altLayoutKey = new LayoutKey(mLayout.getGeneration(generation)); Object key = KeyFactory.createKey // Note: Generation is still required in the key // because an equivalent layout (with different generation) // might have been supplied by Layout.getGeneration. (new Object[] {mCodecKey, generation, altLayoutKey}); decoder = (Decoder<S>) cCodecDecoders.get(key); if (decoder == null) { decoder = generateDecoder(generation); cCodecDecoders.put(key, decoder); } } mDecoders.put(generation, decoder); } return decoder; } } catch (NullPointerException e) { if (mLayout == null) { throw new FetchNoneException("Layout evolution not supported"); } throw e; } }
python
def runcoro(async_function): """ Runs an asynchronous function without needing to use await - useful for lambda Args: async_function (Coroutine): The asynchronous function to run """ future = _asyncio.run_coroutine_threadsafe(async_function, client.loop) result = future.result() return result
java
public void reimageComputeNode(String poolId, String nodeId, ComputeNodeReimageOption nodeReimageOption, Iterable<BatchClientBehavior> additionalBehaviors) throws BatchErrorException, IOException { ComputeNodeReimageOptions options = new ComputeNodeReimageOptions(); BehaviorManager bhMgr = new BehaviorManager(this.customBehaviors(), additionalBehaviors); bhMgr.applyRequestBehaviors(options); this.parentBatchClient.protocolLayer().computeNodes().reimage(poolId, nodeId, nodeReimageOption, options); }
java
public FormLoginConfigType<LoginConfigType<T>> getOrCreateFormLoginConfig() { Node node = childNode.getOrCreate("form-login-config"); FormLoginConfigType<LoginConfigType<T>> formLoginConfig = new FormLoginConfigTypeImpl<LoginConfigType<T>>(this, "form-login-config", childNode, node); return formLoginConfig; }
java
public String format(final Duration duration) { if (duration == null) return format(now()); TimeFormat format = getFormat(duration.getUnit()); String time = format.format(duration); return format.decorate(duration, time); }
python
def ensure_string_list(self, option): r"""Ensure that 'option' is a list of strings. If 'option' is currently a string, we split it either on /,\s*/ or /\s+/, so "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become ["foo", "bar", "baz"]. """ val = getattr(self, option) if val is None: return elif isinstance(val, string_types): setattr(self, option, re.split(r',\s*|\s+', val)) else: if isinstance(val, list): ok = all(isinstance(v, string_types) for v in val) else: ok = False if not ok: raise DistutilsOptionError( "'%s' must be a list of strings (got %r)" % (option, val))
java
public void addImportedPackages(Set<String> importedPackages) { addImportedPackages(importedPackages.toArray(new String[importedPackages.size()])); }
python
def deaccent(text): """ Remove accentuation from the given string. """ norm = unicodedata.normalize("NFD", text) result = "".join(ch for ch in norm if unicodedata.category(ch) != 'Mn') return unicodedata.normalize("NFC", result)
java
static protected Map<String, String> extractTemplateParameters(String viewName){ int i1 = -1; int i2 = -1; int i3 = -1; int i4 = -1; i1 = viewName.indexOf(LEFT_BRACKET); if (i1 != -1){ i2 = viewName.indexOf(LEFT_BRACKET, i1+1); } i4 = viewName.lastIndexOf(RIGHT_BRACKET); if (i4 != -1){ i3 = viewName.lastIndexOf(RIGHT_BRACKET, i4-1); } if ((i1 == -1 || i4 == -1) // no starting or ending || (i2 == -1 || i3 == -1) // not found || (i2 > i3)){ // not matching return null; // no valid template descriptor } //////// the format is guaranteed to be valid after this point Map<String, String> parameters = new HashMap<String, String>(); if (i1 > 0){ String prefix = viewName.substring(0, i1); parameters.put(StdrUtil.URL_PREFIX_PARAMETER, prefix); } if (i4 < viewName.length() - 1){ String postfix = viewName.substring(i4 + 1); parameters.put(StdrUtil.URL_POSTFIX_PARAMETER, postfix); } StrTokenizer tokenizer = new StrTokenizer(viewName.substring(i2+1, i3), StrMatcher.charSetMatcher(',', '='), StrMatcher.singleQuoteMatcher()); tokenizer.setEmptyTokenAsNull(true); while(tokenizer.hasNext()){ String name = tokenizer.next(); String value = null; try{ value = tokenizer.next(); }catch(NoSuchElementException e){ // do nothing, value should be null anyway. } parameters.put(name, value); } return parameters; }
python
def memoized(func): """Decorate a function to memoize results. Functions wraped by this decorator won't compute twice for each input. Any results will be stored. This decorator might increase used memory in order to shorten computational time. """ cache = {} @wraps(func) def memoized_function(*args): """The decorated function. """ try: return cache[args] except KeyError: value = func(*args) try: cache[args] = value except MemoryError: cache.clear() gc.collect() return value return memoized_function
java
private static DateTime parseDate(String dateStr, DateTime defaultDate, boolean floor) { if ("now".equals(dateStr)) { //$NON-NLS-1$ return new DateTime(); } if (dateStr.length() == 10) { DateTime parsed = ISODateTimeFormat.date().withZone(DateTimeZone.UTC).parseDateTime(dateStr); // If what we want is the floor, then just return it. But if we want the // ceiling of the date, then we need to set the right params. if (!floor) { parsed = parsed.plusDays(1).minusMillis(1); } return parsed; } if (dateStr.length() == 20) { return ISODateTimeFormat.dateTimeNoMillis().withZone(DateTimeZone.UTC).parseDateTime(dateStr); } if (dateStr.length() == 24) { return ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC).parseDateTime(dateStr); } return defaultDate; }
java
public static String extractMultiAndDelPre(String regex, Holder<CharSequence> contentHolder, String template) { if (null == contentHolder || null == regex || null == template) { return null; } // Pattern pattern = Pattern.compile(regex, Pattern.DOTALL); final Pattern pattern = PatternPool.get(regex, Pattern.DOTALL); return extractMultiAndDelPre(pattern, contentHolder, template); }
java
public void setGrants(java.util.Collection<GrantListEntry> grants) { if (grants == null) { this.grants = null; return; } this.grants = new com.amazonaws.internal.SdkInternalList<GrantListEntry>(grants); }
python
def _compute_mean(self, imt, mag, rhypo): """ Compute mean value from lookup table. Lookup table defines log10(IMT) (in g) for combinations of Mw and log10(rhypo) values. ``mag`` is therefore converted from Mblg to Mw using Atkinson and Boore 1987 conversion equation. Mean value is finally converted from base 10 to base e. """ mag = np.zeros_like(rhypo) + self._convert_magnitude(mag) # to avoid run time warning in case rhypo is zero set minimum distance # to 10, which is anyhow the minimum distance allowed by the tables rhypo[rhypo < 10] = 10 rhypo = np.log10(rhypo) # create lookup table and interpolate it at magnitude/distance values table = RectBivariateSpline( self.MAGS, self.DISTS, self.IMTS_TABLES[imt].T ) mean = table.ev(mag, rhypo) # convert mean from base 10 to base e return mean * np.log(10)
java
public int doRecordChange(FieldInfo field, int iChangeType, boolean bDisplayOption) { int iErrorCode = super.doRecordChange(field, iChangeType, bDisplayOption); if (iChangeType == DBConstants.AFTER_UPDATE_TYPE) { MessageManager messageManager = ((Application)this.getOwner().getTask().getApplication()).getMessageManager(); if (messageManager != null) { BaseMessageHeader messageHeader = m_message.getMessageHeader(); String strQueueType = messageHeader.getQueueType(); String strQueueName = messageHeader.getQueueName(); if (MessageConstants.LOCAL_QUEUE.equalsIgnoreCase(strQueueType)) { BaseMessageReceiver messageReceiver = (BaseMessageReceiver)messageManager.getMessageQueue(strQueueName, strQueueType).getMessageReceiver(); if (!messageReceiver.getFilterList(messageHeader).hasNext()) { // First time, make sure this receiver can handle my messages\ BaseMessageFilter messageFilter = new BaseMessageFilter(strQueueName, strQueueType, null, null); String strProcessClass = null; Map<String,Object> properties = null; // Set up the trx message hander in my local message listener BaseApplication application = (BaseApplication)this.getOwner().getTask().getApplication(); // Note: By adding this to the message app, I don't have to worry about a message being added to a freed app. application = (BaseApplication)application.getEnvironment().getMessageApplication(true, application.getProperties()); new TrxMessageListener(messageFilter, application, strProcessClass, properties); // This listener was added to the filter messageReceiver.addMessageFilter(messageFilter); } } if (MessageTransportModel.DIRECT.equalsIgnoreCase((String)messageHeader.get(MessageTransportModel.SEND_MESSAGE_BY_PARAM))) { if (this.getOwner().getTask() instanceof SyncPage) { // Since this may be time-consuming, display the hour glass (and lock the window, since I'm using your task). Map<String,Object> map = new HashMap<String,Object>(); map.put("message", m_message); map.put("transport", getDirectMessageTransport(this.getOwner().getTask())); SyncNotify syncNotify = new SyncNotifyAdapter() { public void done() { BaseRecordOwner transport = (BaseRecordOwner)this.get("transport"); BaseMessage message = (BaseMessage)this.get("message"); ((MessageSender)transport).sendMessage(message); transport.free(); } }; TaskScheduler.startPageWorker((SyncPage)this.getOwner().getTask(), syncNotify, null, map, true); } else { BaseRecordOwner transport = getDirectMessageTransport(this.getOwner().getTask()); ((MessageSender)transport).sendMessage(m_message); transport.free(); } } else messageManager.sendMessage(m_message); } m_message = null; this.getOwner().removeListener(this, true); // ONE TIME SHOT } return iErrorCode; }
python
def expand_row(table_fields,fields,values): "helper for insert. turn (field_names, values) into the full-width, properly-ordered row" table_fieldnames=[f.name for f in table_fields] reverse_indexes={table_fieldnames.index(f):i for i,f in enumerate(fields)} indexes=[reverse_indexes.get(i) for i in range(len(table_fields))] return [(Missing if i is None else values[i]) for i in indexes]
python
def check_credentials(self): ''' Check credentials ''' req = requests.get(self.api_server + '/api/ad', auth=(self.access_key, self.secret_key)) # Not sure 500 server error should be included here if req.status_code in [401, 403, 500]: return False else: return True
java
public CmsUUID publishProject(CmsObject cms, I_CmsReport report) throws CmsException { return publishProject(cms, report, getPublishList(cms)); }
python
def Send(self, url, opname, obj, nsdict={}, soapaction=None, wsaction=None, endPointReference=None, soapheaders=(), **kw): '''Send a message. If url is None, use the value from the constructor (else error). obj is the object (data) to send. Data may be described with a requesttypecode keyword, the default is the class's typecode (if there is one), else Any. Try to serialize as a Struct, if this is not possible serialize an Array. If data is a sequence of built-in python data types, it will be serialized as an Array, unless requesttypecode is specified. arguments: url -- opname -- struct wrapper obj -- python instance key word arguments: nsdict -- soapaction -- wsaction -- WS-Address Action, goes in SOAP Header. endPointReference -- set by calling party, must be an EndPointReference type instance. soapheaders -- list of pyobj, typically w/typecode attribute. serialized in the SOAP:Header. requesttypecode -- ''' url = url or self.url endPointReference = endPointReference or self.endPointReference # Serialize the object. d = {} d.update(self.nsdict) d.update(nsdict) sw = SoapWriter(nsdict=d, header=True, outputclass=self.writerclass, encodingStyle=kw.get('encodingStyle'),) requesttypecode = kw.get('requesttypecode') if kw.has_key('_args'): #NamedParamBinding tc = requesttypecode or TC.Any(pname=opname, aslist=False) sw.serialize(kw['_args'], tc) elif not requesttypecode: tc = getattr(obj, 'typecode', None) or TC.Any(pname=opname, aslist=False) try: if type(obj) in _seqtypes: obj = dict(map(lambda i: (i.typecode.pname,i), obj)) except AttributeError: # can't do anything but serialize this in a SOAP:Array tc = TC.Any(pname=opname, aslist=True) else: tc = TC.Any(pname=opname, aslist=False) sw.serialize(obj, tc) else: sw.serialize(obj, requesttypecode) for i in soapheaders: sw.serialize_header(i) # # Determine the SOAP auth element. SOAP:Header element if self.auth_style & AUTH.zsibasic: sw.serialize_header(_AuthHeader(self.auth_user, self.auth_pass), _AuthHeader.typecode) # # Serialize WS-Address if self.wsAddressURI is not None: if self.soapaction and wsaction.strip('\'"') != self.soapaction: raise WSActionException, 'soapAction(%s) and WS-Action(%s) must match'\ %(self.soapaction,wsaction) self.address = Address(url, self.wsAddressURI) self.address.setRequest(endPointReference, wsaction) self.address.serialize(sw) # # WS-Security Signature Handler if self.sig_handler is not None: self.sig_handler.sign(sw) scheme,netloc,path,nil,nil,nil = urlparse.urlparse(url) transport = self.transport if transport is None and url is not None: if scheme == 'https': transport = self.defaultHttpsTransport elif scheme == 'http': transport = self.defaultHttpTransport else: raise RuntimeError, 'must specify transport or url startswith https/http' # Send the request. if issubclass(transport, httplib.HTTPConnection) is False: raise TypeError, 'transport must be a HTTPConnection' soapdata = str(sw) self.h = transport(netloc, None, **self.transdict) self.h.connect() self.SendSOAPData(soapdata, url, soapaction, **kw)
java
public void each(ObjectName pObjectName, MBeanEachCallback pCallback) throws IOException, ReflectionException, MBeanException { try { Set<ObjectName> visited = new HashSet<ObjectName>(); for (MBeanServerConnection server : getMBeanServers()) { // Query for a full name is the same as a direct lookup for (ObjectName nameObject : server.queryNames(pObjectName, null)) { // Don't add if already visited previously if (!visited.contains(nameObject)) { pCallback.callback(server, nameObject); visited.add(nameObject); } } } } catch (InstanceNotFoundException exp) { // Well, should not happen, since we do a query before and the returned value are supposed to exist // on the mbean-server. But, who knows ... throw new IllegalArgumentException("Cannot find MBean " + (pObjectName != null ? "(MBean " + pObjectName + ")" : "") + ": " + exp,exp); } }
python
def retryNextHost(self, connector=None): """ Have this connector connect again, to the next host in the configured list of hosts. """ if not self.continueTrying: msg = "TxMongo: Abandoning {0} on explicit request.".format(connector) log.msg(msg) return if connector is None: if self.connector is None: raise ValueError("TxMongo: No additional connector to retry.") else: connector = self.connector delay = False self.__index += 1 if self.__index >= len(self.__allnodes): self.__index = 0 delay = True connector.host, connector.port = self.__allnodes[self.__index] if delay: self.retry(connector) else: connector.connect()
python
def get_chat_from_id(self, chat_id): """ Fetches a chat given its ID :param chat_id: Chat ID :type chat_id: str :return: Chat or Error :rtype: Chat """ chat = self.wapi_functions.getChatById(chat_id) if chat: return factory_chat(chat, self) raise ChatNotFoundError("Chat {0} not found".format(chat_id))
java
public static Object invokeMethod(Object object, String methodName, Class<?>[] parameterTypes, Object[] parameters) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { if (Checker.isEmpty(parameters)) { return object.getClass().getMethod(methodName).invoke(object); } else { return object.getClass().getMethod(methodName, parameterTypes).invoke(object, parameters); } }
java
public synchronized void restoreTaskUpdates(List<TaskInfo> tasks) { for (TaskInfo task : tasks) { Pair<Long, Integer> id = new Pair<>(task.getJobId(), task.getTaskId()); if (!mTaskUpdates.containsKey(id)) { mTaskUpdates.put(id, task); } } }
java
public ServiceFuture<ExpressRouteCircuitsRoutesTableListResultInner> listRoutesTableAsync(String resourceGroupName, String crossConnectionName, String peeringName, String devicePath, final ServiceCallback<ExpressRouteCircuitsRoutesTableListResultInner> serviceCallback) { return ServiceFuture.fromResponse(listRoutesTableWithServiceResponseAsync(resourceGroupName, crossConnectionName, peeringName, devicePath), serviceCallback); }
python
async def remove_participant(self, p: Participant): """ remove a participant from the tournament |methcoro| Args: p: the participant to remove Raises: APIException """ await self.connection('DELETE', 'tournaments/{}/participants/{}'.format(self._id, p._id)) if p in self.participants: self.participants.remove(p)
python
def clear(self): """delete and re-initialize all private components to zero""" for field in self.__privfields__: delattr(self, field) setattr(self, field, MPI(0))
java
@Override public Object run(Object... args) { assertLength(args, 2, 3); return process(assertAssignable(assertNotNull(args[0]), InvocationContext.class), assertAssignable(assertNotNull(args[1]), HttpResponse.class), (args.length > 2)? args[2] :null); }
java
protected String getDeviceViewName(String viewName) { // Check for special "redirect:" prefix. if (viewName.startsWith(REDIRECT_URL_PREFIX)) { return viewName; } // Check for special "forward:" prefix. if (viewName.startsWith(FORWARD_URL_PREFIX)) { return viewName; } return getDeviceViewNameInternal(viewName); }
python
def create( self, policy_id, type, condition_scope, name, entities, metric, terms, runbook_url=None, user_defined=None, enabled=True): """ Creates an alert condition :type policy_id: int :param policy_id: Alert policy id where target alert condition belongs to :type type: str :param type: The type of the condition, can be apm_app_metric, apm_kt_metric, servers_metric, browser_metric, mobile_metric :type condition_scope: str :param condition_scope: The scope of the condition, can be instance or application :type name: str :param name: The name of the server :type entities: list[str] :param name: entity ids to which the alert condition is applied :type : str :param metric: The target metric :type : str :param runbook_url: The url of the runbook :type terms: list[hash] :param terms: list of hashes containing threshold config for the alert :type user_defined: hash :param user_defined: hash containing threshold user_defined for the alert required if metric is set to user_defined :type enabled: bool :param enabled: Whether to enable that alert condition :rtype: dict :return: The JSON response of the API :: { "condition": { "id": "integer", "type": "string", "condition_scope": "string", "name": "string", "enabled": "boolean", "entities": [ "integer" ], "metric": "string", "runbook_url": "string", "terms": [ { "duration": "string", "operator": "string", "priority": "string", "threshold": "string", "time_function": "string" } ], "user_defined": { "metric": "string", "value_function": "string" } } } """ data = { 'condition': { 'type': type, 'name': name, 'enabled': enabled, 'entities': entities, 'condition_scope': condition_scope, 'terms': terms, 'metric': metric, 'runbook_url': runbook_url, } } if metric == 'user_defined': if user_defined: data['condition']['user_defined'] = user_defined else: raise ConfigurationException( 'Metric is set as user_defined but no user_defined config specified' ) return self._post( url='{0}alerts_conditions/policies/{1}.json'.format(self.URL, policy_id), headers=self.headers, data=data )
python
def next_power_of_2(x): """Finds the next power of 2 value Args: x: Input value Returns: power_of_2: Next power of 2 value """ power_of_2 = 1 if x == 0 else 2 ** np.ceil(np.log2(x)) return power_of_2
python
def insert(self): """Insert this document""" from mongoframes.queries import to_refs # Send insert signal signal('insert').send(self.__class__, frames=[self]) # Prepare the document to be inserted document = to_refs(self._document) # Insert the document and update the Id self._id = self.get_collection().insert_one(document).inserted_id # Send inserted signal signal('inserted').send(self.__class__, frames=[self])
python
def delete(name, runas=None): ''' Delete a VM .. versionadded:: 2016.11.0 :param str name: Name/ID of VM to clone :param str runas: The user that the prlctl command will be run as Example: .. code-block:: bash salt '*' parallels.exec macvm 'find /etc/paths.d' runas=macdev ''' return prlctl('delete', salt.utils.data.decode(name), runas=runas)
java
@CliObjectOption(description = "Specifies files should be updated if they're different in the source.") public EmbeddedGobblinDistcp update() { this.setConfiguration(RecursiveCopyableDataset.UPDATE_KEY, Boolean.toString(true)); return this; }
java
@Override public boolean set(HttpCookie cookie, byte[] data) { if (null != data && 0 < data.length) { cookie.setAttribute(getName(), HttpChannelUtils.getEnglishString(data)); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Cookie CommentURL set to " + cookie.getAttribute(getName())); } return true; } return false; }
java
@Override public synchronized void read (byte[] bytes, long storageIndex) throws IOException { final int bucketIndex = (int) (storageIndex / SIZE_PER_BUCKET); final int bucketOffset = (int) (storageIndex % SIZE_PER_BUCKET); try { storeBucket(-1, null); // // DEBUG CODE // reader.write(bucketIndex + "," + storageIndex + "," + // bucketOffset + "," + bytes.length + // "\n"); // reader.flush(); byte[] data = mByteCache.getIfPresent(bucketIndex); if (data == null) { data = getAndprefetchBuckets(bucketIndex); } final ByteArrayDataOutput output = ByteStreams.newDataOutput(bytes.length); int length; if (bucketOffset + bytes.length > SIZE_PER_BUCKET) { length = SIZE_PER_BUCKET - bucketOffset; } else { length = bytes.length; } output.write(data, bucketOffset, length); if (bucketOffset + bytes.length > SIZE_PER_BUCKET) { data = mByteCache.getIfPresent(bucketIndex + 1); if (data == null) { data = getAndprefetchBuckets(bucketIndex + 1); } output.write(data, 0, bytes.length - (SIZE_PER_BUCKET - bucketOffset)); } System.arraycopy(output.toByteArray(), 0, bytes, 0, bytes.length); } catch (ExecutionException | InterruptedException exc) { throw new IOException(exc); } }
python
def parse_doc_tree(self, doctree, pypackages): """Parse the given documentation tree. :param str doctree: The absolute path to the documentation tree which is to be parsed. :param set pypackages: A set of all Python packages found in the pytree. :rtype: dict :returns: A dict where each key is the path of an expected Python module and its value is the parsed rst module name (relative to the documentation tree). """ parsed_doctree = {} for filename in os.listdir(doctree): if self._ignore_docfile(filename): continue expected_pyfile = self.build_pyfile_path_from_docname(filename) parsed_doctree[expected_pyfile] = filename pypackages = {name + '.py' for name in pypackages} return {elem: parsed_doctree[elem] for elem in parsed_doctree if elem not in pypackages}
python
def loggray(x, a, b): """Auxiliary function that specifies the logarithmic gray scale. a and b are the cutoffs.""" linval = 10.0 + 990.0 * (x-float(a))/(b-a) return (np.log10(linval)-1.0)*0.5 * 255.0
java
public static Part<String> text(String name, String value) { // the text part do not set content type return new Part<>(name, null, value, null, null, (body, out, charset) -> { OutputStreamWriter writer = new OutputStreamWriter(out, Objects2.elvis(charset, ISO_8859_1)); writer.write(body); writer.flush(); }); }
java
public static final void plus(Structure s, Matrix matrix){ AtomIterator iter = new AtomIterator(s) ; Atom oldAtom = null; Atom rotOldAtom = null; while (iter.hasNext()) { Atom atom = null ; atom = iter.next() ; try { if ( oldAtom != null){ logger.debug("before {}", getDistance(oldAtom,atom)); } } catch (Exception e){ logger.error("Exception: ", e); } oldAtom = (Atom)atom.clone(); double x = atom.getX(); double y = atom.getY() ; double z = atom.getZ(); double[][] ad = new double[][]{{x,y,z}}; Matrix am = new Matrix(ad); Matrix na = am.plus(matrix); double[] coords = new double[3] ; coords[0] = na.get(0,0); coords[1] = na.get(0,1); coords[2] = na.get(0,2); atom.setCoords(coords); try { if ( rotOldAtom != null){ logger.debug("after {}", getDistance(rotOldAtom,atom)); } } catch (Exception e){ logger.error("Exception: ", e); } rotOldAtom = (Atom) atom.clone(); } }
java
public static void divideRowBy(Matrix matrix, long aRow, long fromCol, double value) { long cols = matrix.getColumnCount(); for (long col = fromCol; col < cols; col++) { matrix.setAsDouble(matrix.getAsDouble(aRow, col) / value, aRow, col); } }
java
@Override @Trivial public InputStream getInputStream(ZipFile useZipFile, ZipEntry zipEntry) throws IOException { String methodName = "getInputStream"; String entryName = zipEntry.getName(); if ( zipEntry.isDirectory() ) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled() ) { debug(methodName, "Entry [ " + entryName + " ] [ null ] (Not using cache: Directory entry)"); } return null; } long entrySize = zipEntry.getSize(); if ( entrySize == 0 ) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled() ) { debug(methodName, "Entry [ " + entryName + " ] [ empty stream ] (Not using cache: Empty entry)"); } return EMPTY_STREAM; } boolean doNotCache; String doNotCacheReason; if ( zipEntries == null ) { // No entry cache. doNotCache = true; doNotCacheReason = "Do not cache: Entry cache disabled"; } else if ( entrySize > ZipCachingProperties.ZIP_CACHE_ENTRY_LIMIT) { // Too big for the cache doNotCache = true; doNotCacheReason = "Do not cache: Too big"; } else if ( entryName.equals("META-INF/MANIFEST.MF") ) { doNotCache = false; doNotCacheReason = "Cache META-INF/MANIFEST.MF"; } else if ( entryName.endsWith(".class") ) { doNotCache = false; doNotCacheReason = "Cache .class resources"; } else { doNotCache = true; doNotCacheReason = "Do not cache: Not manifest or class resource"; } if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled() ) { debug(methodName, "Entry [ " + entryName + " ] [ non-null ] [ " + doNotCacheReason + " ]"); } if ( doNotCache ) { return useZipFile.getInputStream(zipEntry); // throws IOException } // The addition of ":::" *seems* to allow for non-unique cache keys. Duplicate // keys *are not* possible because the CRC and last-modified values are numeric. // Duplicate keys would be possible of the CRC or last-modified values, when // converted to strings, could contain ":::" character sequences. String entryCacheKey = entryName + ":::" + Long.toString( zipEntry.getCrc() ) + ":::" + Long.toString( getLastModified() ); // Note that only the individual gets and puts are protected. // // That means that simultaneous get misses are possible, which // will result in double reads and double puts. // // That is unfortunate, but is harmless. // // The simultaneous puts are allowed because they should be very // rare. // // They are allowed because blocking entry gets while waiting for // reads could create large delays. byte[] entryBytes; synchronized( zipEntriesLock ) { entryBytes = zipEntries.get(entryCacheKey); } if ( entryBytes == null ) { InputStream inputStream = useZipFile.getInputStream(zipEntry); // throws IOException try { entryBytes = read(inputStream, (int) entrySize, entryName); // throws IOException } finally { inputStream.close(); // throws IOException } synchronized( zipEntriesLock ) { zipEntries.put(entryCacheKey, entryBytes); } } return new ByteArrayInputStream(entryBytes); }
java
public static boolean hasProperty(Properties props, String key) { String value = props.getProperty(key); if (value == null) { return false; } value = value.toLowerCase(); return ! (value.equals("false") || value.equals("no") || value.equals("off")); }
java
protected StyledString signatureWithoutReturnType(StyledString simpleName, JvmExecutable element) { return simpleName.append(this.uiStrings.styledParameters(element)); }
python
def header(self): """ Return the BAM/SAM header Returns ------- generator Each line of the header """ cmd = [self.__samtools, 'view', '-H', self.__bam] stdout = subprocess.Popen(cmd, stdout=subprocess.PIPE).stdout for l in stdout: yield l.decode('utf-8').strip() stdout.close()
python
def getFaxResult(self, CorpNum, ReceiptNum, UserID=None): """ ํŒฉ์Šค ์ „์†ก๊ฒฐ๊ณผ ์กฐํšŒ args CorpNum : ํŒ๋นŒํšŒ์› ์‚ฌ์—…์ž๋ฒˆํ˜ธ ReceiptNum : ์ „์†ก์š”์ฒญ์‹œ ๋ฐœ๊ธ‰๋ฐ›์€ ์ ‘์ˆ˜๋ฒˆํ˜ธ UserID : ํŒ๋นŒํšŒ์› ์•„์ด๋”” return ํŒฉ์Šค์ „์†ก์ •๋ณด as list raise PopbillException """ if ReceiptNum == None or len(ReceiptNum) != 18: raise PopbillException(-99999999, "์ ‘์ˆ˜๋ฒˆํ˜ธ๊ฐ€ ์˜ฌ๋ฐ”๋ฅด์ง€ ์•Š์Šต๋‹ˆ๋‹ค.") return self._httpget('/FAX/' + ReceiptNum, CorpNum, UserID)
java
protected <T extends Serializable> void postDeserializer(T serializableObject) { Method method = null; for(Field field : ReflectionMethods.getAllFields(new LinkedList<>(), serializableObject.getClass())) { if (field.isAnnotationPresent(BigMap.class)) { //look only for BigMaps field.setAccessible(true); try { if(field.get(serializableObject) == null) { //initialize it only if null. this makes it safe it the BigMap was serialized in the file. //lazy initialize the correct method once if(method == null) { method = ReflectionMethods.findMethod(serializableObject, "initializeBigMapField", this, field); } ReflectionMethods.invokeMethod(serializableObject, method, this, field); } } catch (IllegalAccessException ex) { throw new RuntimeException(ex); } } } }
java
@SuppressWarnings("PMD.CompareObjectsWithEquals") private void processArithmetic() { if (stack.getStackDepth() > 1) { OpcodeStack.Item arg1 = stack.getStackItem(0); OpcodeStack.Item arg2 = stack.getStackItem(1); Units u1 = (Units) arg1.getUserValue(); Units u2 = (Units) arg2.getUserValue(); if ((u1 != null) && (u2 != null) && (u1 != u2)) { bugReporter.reportBug(new BugInstance(this, BugType.CTU_CONFLICTING_TIME_UNITS.name(), NORMAL_PRIORITY).addClass(this).addMethod(this) .addSourceLine(this).addString(u1.toString()).addString(u2.toString())); } } }
java
@Override public EClass getVersion() { if (versionEClass == null) { versionEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(StorePackage.eNS_URI).getEClassifiers().get(46); } return versionEClass; }
java
@CheckForNull public static OffsetDateTime parseOffsetDateTimeQuietly(@Nullable String s) { OffsetDateTime datetime = null; if (s != null) { try { datetime = parseOffsetDateTime(s); } catch (RuntimeException e) { // ignore } } return datetime; }
python
def init(self, id): """ This method does nothing and will simply tell if the next ID will be larger than the given ID. You don't need to call this method on cluster restart - uniqueness is preserved thanks to the timestamp component of the ID. This method exists to make :class:`~hazelcast.proxy.FlakeIdGenerator` drop-in replacement for the deprecated :class:`~hazelcast.proxy.IdGenerator`. :param id: (int), ID to compare. :return: (bool), True if the next ID will be larger than the supplied id, False otherwise. """ # Add 1 hour worth of IDs as a reserve: due to long batch validity some clients might be still getting # older IDs. 1 hour is just a safe enough value, not a real guarantee: some clients might have longer # validity. # The init method should normally be called before any client generated IDs: in this case no reserve is # needed, so we don't want to increase the reserve excessively. reserve = to_millis(TimeUnit.HOUR) << (FlakeIdGenerator._BITS_NODE_ID + FlakeIdGenerator._BITS_SEQUENCE) return self.new_id().continue_with(lambda f: f.result() >= (id + reserve))
python
def _deprefix(line, prefix, callback=None): """Remove the prefix string from the beginning of line, if it exists. :param string line: A line, such as one output by GnuPG's status-fd. :param string prefix: A substring to remove from the beginning of ``line``. Case insensitive. :type callback: callable :param callback: Function to call if the prefix is found. The signature to callback will be only one argument, the ``line`` without the ``prefix``, i.e. ``callback(line)``. :rtype: string :returns: If the prefix was found, the ``line`` without the prefix is returned. Otherwise, the original ``line`` is returned. """ try: assert line.upper().startswith(u''.join(prefix).upper()) except AssertionError: log.debug("Line doesn't start with prefix '%s':\n%s" % (prefix, line)) return line else: newline = line[len(prefix):] if callback is not None: try: callback(newline) except Exception as exc: log.exception(exc) return newline
python
def ping(): ''' Returns true if the device is reachable, else false. ''' try: session, cookies, csrf_token = logon() logout(session, cookies, csrf_token) except salt.exceptions.CommandExecutionError: return False except Exception as err: log.debug(err) return False return True
java
public void marshall(AdminUpdateDeviceStatusRequest adminUpdateDeviceStatusRequest, ProtocolMarshaller protocolMarshaller) { if (adminUpdateDeviceStatusRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(adminUpdateDeviceStatusRequest.getUserPoolId(), USERPOOLID_BINDING); protocolMarshaller.marshall(adminUpdateDeviceStatusRequest.getUsername(), USERNAME_BINDING); protocolMarshaller.marshall(adminUpdateDeviceStatusRequest.getDeviceKey(), DEVICEKEY_BINDING); protocolMarshaller.marshall(adminUpdateDeviceStatusRequest.getDeviceRememberedStatus(), DEVICEREMEMBEREDSTATUS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public static Expression arrayRemove(String expression, Expression value) { return arrayRemove(x(expression), value); }
java
public void writeNestedLibrary(String destination, Library library) throws IOException { File file = library.getFile(); JarArchiveEntry entry = new JarArchiveEntry(destination + library.getName()); entry.setTime(getNestedLibraryTime(file)); new CrcAndSize(file).setupStoredEntry(entry); writeEntry(entry, new InputStreamEntryWriter(new FileInputStream(file), true), new LibraryUnpackHandler(library)); }
python
def GetForwardedIps(self, interface, interface_ip=None): """Retrieve the list of configured forwarded IP addresses. Args: interface: string, the output device to query. interface_ip: string, current interface ip address. Returns: list, the IP address strings. """ try: ips = netifaces.ifaddresses(interface) ips = ips[netifaces.AF_INET] except (ValueError, IndexError): return [] forwarded_ips = [] for ip in ips: if ip['addr'] != interface_ip: full_addr = '%s/%d' % (ip['addr'], netaddr.IPAddress(ip['netmask']).netmask_bits()) forwarded_ips.append(full_addr) return self.ParseForwardedIps(forwarded_ips)