language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def release_subnet(self, cidr, direc): """Routine to release a subnet from the DB. """ if direc == 'in': self.service_in_ip.release_subnet(cidr) else: self.service_out_ip.release_subnet(cidr)
java
public void setIntHeader(String name, int value) { try{_httpResponse.setIntField(name,value);} catch(IllegalStateException e){LogSupport.ignore(log,e);} }
java
public static <T> ExpressionList<T> applyWhere(ExpressionList<T> expressionList, Object queryObject) { if (queryObject != null) { Class clz = queryObject.getClass(); Field[] fields = clz.getDeclaredFields(); for (Field field : fields) { field.setAccessible(true); String param = field.getName(); Object value = null; try { value = field.get(queryObject); } catch (Exception e) { e.printStackTrace(); break; } ExprType exprType = ExprType.DEFAULT; boolean ignoreCase = false; boolean escapeNull = true; boolean isString = field.getType() == String.class; boolean isArray = field.getType().isArray(); if (field.isAnnotationPresent(ExprParam.class)) { ExprParam exprParam = field.getAnnotation(ExprParam.class); String qpv = exprParam.value(); String qpn = exprParam.name(); exprType = exprParam.expr(); ignoreCase = exprParam.ignoreCase(); escapeNull = exprParam.escapeNull(); if (escapeNull && value == null) { break; } if (StringUtils.hasText(qpv)) { param = qpv; } if (StringUtils.hasText(qpn)) { param = qpn; } } switch (exprType) { case EQ: if (isString && ignoreCase) { expressionList.add(Expr.ieq(param, (String) value)); } else { expressionList.add(Expr.eq(param, value)); } break; case NE: expressionList.add(Expr.ne(param, value)); break; case GE: expressionList.add(Expr.ge(param, value)); break; case GT: expressionList.add(Expr.gt(param, value)); break; case LE: expressionList.add(Expr.le(param, value)); break; case LT: expressionList.add(Expr.lt(param, value)); break; case LIKE: if (isString) { if (ignoreCase) { expressionList.add(Expr.ilike(param, (String) value)); } else { expressionList.add(Expr.like(param, (String) value)); } } break; case STARTS_WITH: if (isString) { if (ignoreCase) { expressionList.add(Expr.istartsWith(param, (String) value)); } else { expressionList.add(Expr.startsWith(param, (String) value)); } } break; case ENDS_WITH: if (isString) { if (ignoreCase) { expressionList.add(Expr.iendsWith(param, (String) value)); } else { expressionList.add(Expr.endsWith(param, (String) value)); } } break; case CONTAINS: if (isString) { if (ignoreCase) { expressionList.add(Expr.icontains(param, (String) value)); } else { expressionList.add(Expr.contains(param, (String) value)); } } break; case IN: if (isArray) { expressionList.add(Expr.in(param, (Object[]) value)); } default: if (isString) { if (ignoreCase) { expressionList.add(Expr.icontains(param, (String) value)); } else { expressionList.add(Expr.contains(param, (String) value)); } } else { expressionList.add(Expr.eq(param, value)); } break; } } } return expressionList; }
python
def generate_identifier(result): """ Returns a fixed length identifier based on a hash of a combined set of playbook/task values which are as close as we can guess to unique for each task. """ # Determine the playbook file path to use for the ID if result.task.playbook and result.task.playbook.path: playbook_file = result.task.playbook.path else: playbook_file = '' play_path = u'%s.%s' % (playbook_file, result.task.play.name) # Determine the task file path to use for the ID if result.task.file and result.task.file.path: task_file = result.task.file.path else: task_file = '' task_path = u'%s.%s' % (task_file, result.task.name) # Combine both of the above for a full path identifier_path = u'%s.%s' % (play_path, task_path) # Assign the identifier as a hash of the fully unique path. identifier = hashlib.sha1(encodeutils.to_utf8(identifier_path)).hexdigest() return identifier
java
static MutableBigInteger modInverseBP2(MutableBigInteger mod, int k) { // Copy the mod to protect original return fixup(new MutableBigInteger(1), new MutableBigInteger(mod), k); }
java
public static final Builder privateKey(Class<?> cls, String resource) { return new Builder().privateKey(cls, resource); }
python
def fen(self, *, shredder: bool = False, en_passant: str = "legal", promoted: Optional[bool] = None) -> str: """ Gets a FEN representation of the position. A FEN string (e.g., ``rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1``) consists of the position part :func:`~chess.Board.board_fen()`, the :data:`~chess.Board.turn`, the castling part (:data:`~chess.Board.castling_rights`), the en passant square (:data:`~chess.Board.ep_square`), the :data:`~chess.Board.halfmove_clock` and the :data:`~chess.Board.fullmove_number`. :param shredder: Use :func:`~chess.Board.castling_shredder_fen()` and encode castling rights by the file of the rook (like ``HAha``) instead of the default :func:`~chess.Board.castling_xfen()` (like ``KQkq``). :param en_passant: By default, only fully legal en passant squares are included (:func:`~chess.Board.has_legal_en_passant()`). Pass ``fen`` to strictly follow the FEN specification (always include the en passant square after a two-step pawn move) or ``xfen`` to follow the X-FEN specification (:func:`~chess.Board.has_pseudo_legal_en_passant()`). :param promoted: Mark promoted pieces like ``Q~``. By default, this is only enabled in chess variants where this is relevant. """ return " ".join([ self.epd(shredder=shredder, en_passant=en_passant, promoted=promoted), str(self.halfmove_clock), str(self.fullmove_number) ])
python
def setup_signal_handlers(self): """Called when a child process is spawned to register the signal handlers """ LOGGER.debug('Registering signal handlers') signal.signal(signal.SIGABRT, self.on_sigabrt)
python
def skipDryRun(logger, dryRun, level=logging.DEBUG): """ Return logging function. When logging function called, will return True if action should be skipped. Log will indicate if skipped because of dry run. """ # This is an undocumented "feature" of logging module: # logging.log() requires a numeric level # logging.getLevelName() maps names to numbers if not isinstance(level, int): level = logging.getLevelName(level) return ( functools.partial(_logDryRun, logger, level) if dryRun else functools.partial(logger.log, level) )
java
public void addAliasForLocator(String alias, String locator) { LOG.info("Add alias: '" + alias + "' for '" + locator + "'"); aliases.put(alias, locator); }
java
public static void copyFile(File srcFile, File destFile) throws IOException { InputStream reader = new FileInputStream(srcFile); OutputStream out = new FileOutputStream(destFile); try { byte[] buffer = new byte[2048]; int n = 0; while (-1 != (n = reader.read(buffer))) { out.write(buffer, 0, n); } } finally { org.apache.commons.io.IOUtils.closeQuietly(out); org.apache.commons.io.IOUtils.closeQuietly(reader); } }
java
protected void copy(final CacheEntry cacheEntry, final InputStream is, final ServletOutputStream ostream) throws IOException { IOException exception = null; InputStream resourceInputStream = null; // Optimization: If the binary content has already been loaded, send // it directly final Resource resource = cacheEntry.getResource(); if (resource != null) { final byte buffer[] = resource.getContent(); if (buffer != null) { ostream.write(buffer, 0, buffer.length); return; } resourceInputStream = resource.streamContent(); } else { resourceInputStream = is; } final InputStream istream = new BufferedInputStream(resourceInputStream, getInputBufferSize()); // Copy the input stream to the output stream exception = copyRange(istream, ostream); // Clean up the input stream istream.close(); // Rethrow any exception that has occurred if (exception != null) throw exception; }
java
public static FluentLogger forEnclosingClass() { // NOTE: It is _vital_ that the call to "caller finder" is made directly inside the static // factory method. See getCallerFinder() for more information. String loggingClass = Platform.getCallerFinder().findLoggingClass(FluentLogger.class); return new FluentLogger(Platform.getBackend(loggingClass)); }
java
public static String verify(String value, int min, int max) { try { return validate(value, min, max); } catch (IllegalArgumentException ex) { throw new LocalizedIllegalArgumentException(ex); } }
python
def refresh(self): """ Resets the data for this navigator. """ self.setUpdatesEnabled(False) self.blockSignals(True) self.clear() tableType = self.tableType() if not tableType: self.setUpdatesEnabled(True) self.blockSignals(False) return schema = tableType.schema() columns = list(sorted(schema.columns(), key=lambda x: x.name().strip('_'))) for column in columns: XOrbColumnItem(self, column) self.setUpdatesEnabled(True) self.blockSignals(False)
python
def get_ylim(self): ''' Computes the ideal y-axis limits for the light curve plot. Attempts to set the limits equal to those of the raw light curve, but if more than 1% of the flux lies either above or below these limits, auto-expands to include those points. At the end, adds 5% padding to both the top and the bottom. ''' bn = np.array( list(set(np.concatenate([self.badmask, self.nanmask]))), dtype=int) fraw = np.delete(self.fraw, bn) lo, hi = fraw[np.argsort(fraw)][[3, -3]] flux = np.delete(self.flux, bn) fsort = flux[np.argsort(flux)] if fsort[int(0.01 * len(fsort))] < lo: lo = fsort[int(0.01 * len(fsort))] if fsort[int(0.99 * len(fsort))] > hi: hi = fsort[int(0.99 * len(fsort))] pad = (hi - lo) * 0.05 ylim = (lo - pad, hi + pad) return ylim
python
def same_intersection(intersection1, intersection2, wiggle=0.5 ** 40): """Check if two intersections are close to machine precision. .. note:: This is a helper used only by :func:`verify_duplicates`, which in turn is only used by :func:`generic_intersect`. Args: intersection1 (.Intersection): The first intersection. intersection2 (.Intersection): The second intersection. wiggle (Optional[float]): The amount of relative error allowed in parameter values. Returns: bool: Indicates if the two intersections are the same to machine precision. """ if intersection1.index_first != intersection2.index_first: return False if intersection1.index_second != intersection2.index_second: return False return np.allclose( [intersection1.s, intersection1.t], [intersection2.s, intersection2.t], atol=0.0, rtol=wiggle, )
python
def sdiv(computation: BaseComputation) -> None: """ Signed Division """ numerator, denominator = map( unsigned_to_signed, computation.stack_pop(num_items=2, type_hint=constants.UINT256), ) pos_or_neg = -1 if numerator * denominator < 0 else 1 if denominator == 0: result = 0 else: result = (pos_or_neg * (abs(numerator) // abs(denominator))) computation.stack_push(signed_to_unsigned(result))
java
public void onUp(HumanInputEvent<?> event) { if (!isRightMouseButton(event)) { ToggleSelectionAction action = new ToggleSelectionAction(mapWidget, pixelTolerance); action.setPriorityToSelectedLayer(priorityToSelectedLayer); action.toggleSingle(getLocation(event, RenderSpace.SCREEN)); } }
java
public EEnum getIfcMemberTypeEnum() { if (ifcMemberTypeEnumEEnum == null) { ifcMemberTypeEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(858); } return ifcMemberTypeEnumEEnum; }
python
def _set_class_parser(self, init_parser, methods_to_parse, cls): """Creates the complete argument parser for the decorated class. Args: init_parser: argument parser for the __init__ method or None methods_to_parse: dict of method name pointing to their associated argument parser cls: the class we are decorating Returns: The decorated class with an added attribute 'parser' """ top_level_parents = [init_parser] if init_parser else [] description = self._description or cls.__doc__ top_level_parser = argparse.ArgumentParser(description=description, parents=top_level_parents, add_help=False, conflict_handler="resolve") top_level_parser.add_argument("-h", "--help", action=FullHelpAction, help="Display this help message") parser_to_method = self._add_sub_parsers(top_level_parser, methods_to_parse, cls.__name__) # Update the dict with the __init__ method so we can instantiate # the decorated class if init_parser: parser_to_method["__init__"] = "__init__" top_level_parser.call = self._get_parser_call_method(parser_to_method) cls.parser = top_level_parser
python
def create_hlamphaplot(plotman, h, v, alpha, options): '''Plot the data of the tomodir in one overview plot. ''' sizex, sizez = getfigsize(plotman) # create figure f, ax = plt.subplots(1, 3, figsize=(3 * sizex, sizez)) if options.title is not None: plt.suptitle(options.title, fontsize=18) plt.subplots_adjust(wspace=1, top=0.8) cidh = plotman.parman.add_data(h) cidv = plotman.parman.add_data(v) cidr = plotman.parman.add_data(np.subtract(h, v)) plot_pha(cidh, ax[0], plotman, 'horizontal', alpha, options.pha_vmin, options.pha_vmax, options.xmin, options.xmax, options.zmin, options.zmax, options.unit, options.pha_cbtiks, options.no_elecs, ) plot_pha(cidv, ax[1], plotman, 'vertical', alpha, options.pha_vmin, options.pha_vmax, options.xmin, options.xmax, options.zmin, options.zmax, options.unit, options.pha_cbtiks, options.no_elecs, ) plot_ratio(cidr, ax[2], plotman, 'hor - ver', alpha, options.rat_vmin, options.rat_vmax, options.xmin, options.xmax, options.zmin, options.zmax, options.unit, options.pha_cbtiks, options.no_elecs, ) f.tight_layout() f.savefig('pha_hlam.png', dpi=300) return f, ax
python
def is_cleanly_mergable(*dicts: Dict[Any, Any]) -> bool: """Check that nothing will be overwritten when dictionaries are merged using `deep_merge`. Examples: >>> is_cleanly_mergable({"a": 1}, {"b": 2}, {"c": 3}) True >>> is_cleanly_mergable({"a": 1}, {"b": 2}, {"a": 0, c": 3}) False >>> is_cleanly_mergable({"a": 1, "b": {"ba": 2}}, {"c": 3, {"b": {"bb": 4}}) True >>> is_cleanly_mergable({"a": 1, "b": {"ba": 2}}, {"b": {"ba": 4}}) False """ if len(dicts) <= 1: return True elif len(dicts) == 2: if not all(isinstance(d, Mapping) for d in dicts): return False else: shared_keys = set(dicts[0].keys()) & set(dicts[1].keys()) return all(is_cleanly_mergable(dicts[0][key], dicts[1][key]) for key in shared_keys) else: dict_combinations = itertools.combinations(dicts, 2) return all(is_cleanly_mergable(*combination) for combination in dict_combinations)
python
def expand_iota_subscript(input_str, lowercase=False): """Find characters with iota subscript and replace w/ char + iota added.""" new_list = [] for char in input_str: new_char = MAP_SUBSCRIPT_NO_SUB.get(char) if not new_char: new_char = char new_list.append(new_char) new_str = ''.join(new_list) if lowercase: new_str = new_str.lower() return new_str
python
def onchange_dates(self): ''' This method gives the duration between check in and checkout if customer will leave only for some hour it would be considers as a whole day.If customer will check in checkout for more or equal hours, which configured in company as additional hours than it would be consider as full days -------------------------------------------------------------------- @param self: object pointer @return: Duration and checkout_date ''' configured_addition_hours = 0 wid = self.warehouse_id whouse_com_id = wid or wid.company_id if whouse_com_id: configured_addition_hours = wid.company_id.additional_hours myduration = 0 chckin = self.checkin_date chckout = self.checkout_date if chckin and chckout: server_dt = DEFAULT_SERVER_DATETIME_FORMAT chkin_dt = datetime.datetime.strptime(chckin, server_dt) chkout_dt = datetime.datetime.strptime(chckout, server_dt) dur = chkout_dt - chkin_dt sec_dur = dur.seconds if (not dur.days and not sec_dur) or (dur.days and not sec_dur): myduration = dur.days else: myduration = dur.days + 1 # To calculate additional hours in hotel room as per minutes if configured_addition_hours > 0: additional_hours = abs((dur.seconds / 60) / 60) if additional_hours >= configured_addition_hours: myduration += 1 self.duration = myduration self.duration_dummy = self.duration
python
def get_dummy_request(language=None): """ Returns a Request instance populated with cms specific attributes. """ if settings.ALLOWED_HOSTS and settings.ALLOWED_HOSTS != "*": host = settings.ALLOWED_HOSTS[0] else: host = Site.objects.get_current().domain request = RequestFactory().get("/", HTTP_HOST=host) request.session = {} request.LANGUAGE_CODE = language or settings.LANGUAGE_CODE # Needed for plugin rendering. request.current_page = None if 'django.contrib.auth' in settings.INSTALLED_APPS: from django.contrib.auth.models import AnonymousUser request.user = AnonymousUser() return request
python
def cli(ctx=None, verbose=0): """Thoth solver command line interface.""" if ctx: ctx.auto_envvar_prefix = "THOTH_SOLVER" if verbose: _LOG.setLevel(logging.DEBUG) _LOG.debug("Debug mode is on")
java
public static Integer toIntegerObject(final Boolean bool) { if (bool == null) { return null; } return bool.booleanValue() ? NumberUtils.INTEGER_ONE : NumberUtils.INTEGER_ZERO; }
java
public void setRules(RuleProvider provider, List<Rule> rules) { providersToRules.put(provider, rules); }
python
def to_curve_spline(obj): ''' to_curve_spline(obj) obj if obj is a curve spline and otherwise attempts to coerce obj into a curve spline, raising an error if it cannot. ''' if is_curve_spline(obj): return obj elif is_tuple(obj) and len(obj) == 2: (crds,opts) = obj else: (crds,opts) = (obj,{}) if pimms.is_matrix(crds) or is_curve_spline(crds): crds = [crds] spls = [c for c in crds if is_curve_spline(c)] opts = dict(opts) if 'weights' not in opts and len(spls) == len(crds): if all(c.weights is not None for c in crds): opts['weights'] = np.concatenate([c.weights for c in crds]) if 'order' not in opts and len(spls) > 0: opts['order'] = np.min([c.order for c in spls]) if 'smoothing' not in opts and len(spls) > 0: sm = set([c.smoothing for c in spls]) if len(sm) == 1: opts['smoothing'] = list(sm)[0] else: opts['smoothing'] = None crds = [x.crds if is_curve_spline(crds) else np.asarray(x) for x in crds] crds = [x if x.shape[0] == 2 else x.T for x in crds] crds = np.hstack(crds) return curve_spline(crds, **opts)
java
public static synchronized void multipart(HttpString method, String url, HttpConsumer<MultipartExchange> endpoint, long maxSize) { checkStarted(); instance().endpoints.add(HandlerUtil.multipart(method, url, endpoint, instance().exceptionMapper, maxSize)); }
java
public BoundingBox extendDegrees(double verticalExpansion, double horizontalExpansion) { if (verticalExpansion == 0 && horizontalExpansion == 0) { return this; } else if (verticalExpansion < 0 || horizontalExpansion < 0) { throw new IllegalArgumentException("BoundingBox extend operation does not accept negative values"); } double minLat = Math.max(MercatorProjection.LATITUDE_MIN, this.minLatitude - verticalExpansion); double minLon = Math.max(-180, this.minLongitude - horizontalExpansion); double maxLat = Math.min(MercatorProjection.LATITUDE_MAX, this.maxLatitude + verticalExpansion); double maxLon = Math.min(180, this.maxLongitude + horizontalExpansion); return new BoundingBox(minLat, minLon, maxLat, maxLon); }
python
def CallHwclock(logger): """Sync clock using hwclock. Args: logger: logger object, used to write to SysLog and serial port. """ command = ['/sbin/hwclock', '--hctosys'] try: subprocess.check_call(command) except subprocess.CalledProcessError: logger.warning('Failed to sync system time with hardware clock.') else: logger.info('Synced system time with hardware clock.')
java
public boolean alreadyPresent(ImportedKey importedKey) { if (foreignKeysByName == null) { return false; } Collection<ForeignKey> fks = foreignKeysByName.values(); if (fks == null) { return false; } for (ForeignKey fk : fks) { if (fk.getSize() == 1) { ImportedKey other = fk.getImportedKey(); // Note: we just need to compare the FkColumnName, if we have a match, we consider // that this is an error in the conf, and we skip. No need to compare pkTable and pkCol boolean same = importedKey.getFkColumnName().equalsIgnoreCase(other.getFkColumnName()); if (same) { return true; } } } return false; }
java
public void setIgnoredPackagings (final Set <String> aCollection) { ignoredPackagings = new HashSet <> (); if (aCollection != null) { for (final String sName : aCollection) if (StringHelper.hasText (sName)) if (!ignoredPackagings.add (sName)) getLog ().warn ("The ignored packaging '" + sName + "' is contained more than once"); } }
java
static List<AnnotatedValueResolver> ofServiceMethod(Method method, Set<String> pathParams, List<RequestObjectResolver> objectResolvers) { return of(method, pathParams, objectResolvers, true, true); }
java
public static int cusolverSpXcsrperm_bufferSizeHost( cusolverSpHandle handle, int m, int n, int nnzA, cusparseMatDescr descrA, Pointer csrRowPtrA, Pointer csrColIndA, Pointer p, Pointer q, long[] bufferSizeInBytes) { return checkResult(cusolverSpXcsrperm_bufferSizeHostNative(handle, m, n, nnzA, descrA, csrRowPtrA, csrColIndA, p, q, bufferSizeInBytes)); }
python
def get_reachable_volume_templates(self, start=0, count=-1, filter='', query='', sort='', networks=None, scope_uris='', private_allowed_only=False): """ Gets the storage templates that are connected on the specified networks based on the storage system port's expected network connectivity. Returns: list: Storage volume templates. """ uri = self.URI + "/reachable-volume-templates" uri += "?networks={}&privateAllowedOnly={}".format(networks, private_allowed_only) get_uri = self._client.build_query_uri(start=start, count=count, filter=filter, query=query, sort=sort, uri=uri, scope_uris=scope_uris) return self._client.get(get_uri)
python
def parse(readDataInstance): """ Returns a new L{NetDirectory} object. @type readDataInstance: L{ReadData} @param readDataInstance: A L{ReadData} object with data to be parsed as a L{NetDirectory} object. @rtype: L{NetDirectory} @return: A new L{NetDirectory} object. """ nd = NetDirectory() nd.cb.value = readDataInstance.readDword() nd.majorRuntimeVersion.value= readDataInstance.readWord() nd.minorRuntimeVersion.value = readDataInstance.readWord() nd.metaData.rva.value = readDataInstance.readDword() nd.metaData.size.value = readDataInstance.readDword() nd.metaData.name.value = "MetaData" nd.flags.value = readDataInstance.readDword() nd.entryPointToken.value = readDataInstance.readDword() nd.resources.rva.value = readDataInstance.readDword() nd.resources.size.value = readDataInstance.readDword() nd.resources.name.value = "Resources" nd.strongNameSignature.rva.value = readDataInstance.readDword() nd.strongNameSignature.size.value = readDataInstance.readDword() nd.strongNameSignature.name.value = "StrongNameSignature" nd.codeManagerTable.rva.value = readDataInstance.readDword() nd.codeManagerTable.size.value = readDataInstance.readDword() nd.codeManagerTable.name.value = "CodeManagerTable" nd.vTableFixups.rva.value = readDataInstance.readDword() nd.vTableFixups.size.value = readDataInstance.readDword() nd.vTableFixups.name.value = "VTableFixups" nd.exportAddressTableJumps.rva.value = readDataInstance.readDword() nd.exportAddressTableJumps.size.value = readDataInstance.readDword() nd.exportAddressTableJumps.name.value = "ExportAddressTableJumps" nd.managedNativeHeader.rva.value = readDataInstance.readDword() nd.managedNativeHeader.size.value = readDataInstance.readDword() nd.managedNativeHeader.name.value = "ManagedNativeHeader" return nd
python
def save_tabs_when_changed(func): """Decorator for save-tabs-when-changed """ def wrapper(*args, **kwargs): func(*args, **kwargs) log.debug("mom, I've been called: %s %s", func.__name__, func) # Find me the Guake! clsname = args[0].__class__.__name__ g = None if clsname == 'Guake': g = args[0] elif getattr(args[0], 'get_guake', None): g = args[0].get_guake() elif getattr(args[0], 'get_notebook', None): g = args[0].get_notebook().guake elif getattr(args[0], 'guake', None): g = args[0].guake elif getattr(args[0], 'notebook', None): g = args[0].notebook.guake # Tada! if g and g.settings.general.get_boolean('save-tabs-when-changed'): g.save_tabs() return wrapper
python
def get_environment_paths(config, env): """ Get environment paths from given environment variable. """ if env is None: return config.get(Config.DEFAULTS, 'environment') # Config option takes precedence over environment key. if config.has_option(Config.ENVIRONMENTS, env): env = config.get(Config.ENVIRONMENTS, env).replace(' ', '').split(';') else: env = os.getenv(env) if env: env = env.split(os.pathsep) return [i for i in env if i]
python
def fn_with_custom_grad(grad_fn, use_global_vars=False): """Decorator to create a subgraph with a custom gradient function. The subgraph created by the decorated function is NOT put in a Defun and so does not suffer from the limitations of the Defun (all subgraph ops on the same device, no summaries). Args: grad_fn: function with signature (inputs, variables, outputs, output_grads) -> (grad_inputs, grad_vars), all of which are lists of Tensors. use_global_vars: if True, variables will be the global variables created. If False, will be the trainable variables. Returns: Decorator for function such that the gradient is defined by grad_fn. """ def dec(fn): @functools.wraps(fn) def wrapped(*args): return _fn_with_custom_grad( fn, args, grad_fn, use_global_vars=use_global_vars) return wrapped return dec
java
public Map<String, Object> getViewMap(boolean create) { Map<String, Object> viewMap = (Map<String, Object>) getTransientStateHelper().getTransient("com.sun.faces.application.view.viewMap"); if (create && viewMap == null) { viewMap = new ViewMap(getFacesContext().getApplication().getProjectStage()); getTransientStateHelper().putTransient("com.sun.faces.application.view.viewMap", viewMap); getFacesContext().getApplication().publishEvent(getFacesContext(), PostConstructViewMapEvent.class, UIViewRoot.class, this); } return viewMap; }
python
def get_jaro_distance(first, second, winkler=True, winkler_ajustment=True, scaling=0.1): """ :param first: word to calculate distance for :param second: word to calculate distance with :param winkler: same as winkler_ajustment :param winkler_ajustment: add an adjustment factor to the Jaro of the distance :param scaling: scaling factor for the Winkler adjustment :return: Jaro distance adjusted (or not) """ if not first or not second: raise JaroDistanceException("Cannot calculate distance from NoneType ({0}, {1})".format( first.__class__.__name__, second.__class__.__name__)) jaro = _score(first, second) cl = min(len(_get_prefix(first, second)), 4) if all([winkler, winkler_ajustment]): # 0.1 as scaling factor return round((jaro + (scaling * cl * (1.0 - jaro))) * 100.0) / 100.0 return jaro
java
public Page<T> previousPage(final Page<T> page) { return previousPage(page, Twilio.getRestClient()); }
java
public void setupFields() { FieldInfo field = null; field = new FieldInfo(this, ID, Constants.DEFAULT_FIELD_LENGTH, null, null); field.setDataClass(Integer.class); field = new FieldInfo(this, KEY, 128, null, null); field = new FieldInfo(this, VALUE, 255, null, null); field = new FieldInfo(this, COMMENT, 255, null, null); }
java
private boolean processQueue(K key) { Queue<AsyncResourceRequest<V>> requestQueue = getRequestQueueForKey(key); if(requestQueue.isEmpty()) { return false; } // Attempt to get a resource. Pool<V> resourcePool = getResourcePoolForKey(key); V resource = null; Exception ex = null; try { // Must attempt non-blocking checkout to ensure resources are // created for the pool. resource = attemptNonBlockingCheckout(key, resourcePool); } catch(Exception e) { destroyResource(key, resourcePool, resource); ex = e; resource = null; } // Neither we got a resource, nor an exception. So no requests can be // processed return if(resource == null && ex == null) { return false; } // With resource in hand, process the resource requests AsyncResourceRequest<V> resourceRequest = getNextUnexpiredResourceRequest(requestQueue); if(resourceRequest == null) { if(resource != null) { // Did not use the resource! Directly check in via super to // avoid // circular call to processQueue(). try { super.checkin(key, resource); } catch(Exception e) { logger.error("Exception checking in resource: ", e); } } else { // Poor exception, no request to tag this exception onto // drop it on the floor and continue as usual. } return false; } else { // We have a request here. if(resource != null) { resourceRequest.useResource(resource); } else { resourceRequest.handleException(ex); } return true; } }
java
public synchronized void setValue(String name, String value) { if (map == null) { map = new HashMap<String,String>(); } map.put(name, value); }
java
public SecurityRuleInner createOrUpdate(String resourceGroupName, String networkSecurityGroupName, String securityRuleName, SecurityRuleInner securityRuleParameters) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, networkSecurityGroupName, securityRuleName, securityRuleParameters).toBlocking().last().body(); }
python
def add_port_to_free_pool(self, port): """Add a new port to the free pool for allocation.""" if port < 1 or port > 65535: raise ValueError( 'Port must be in the [1, 65535] range, not %d.' % port) port_info = _PortInfo(port=port) self._port_queue.append(port_info)
python
def add(name, device): ''' Add new device to RAID array. CLI Example: .. code-block:: bash salt '*' raid.add /dev/md0 /dev/sda1 ''' cmd = 'mdadm --manage {0} --add {1}'.format(name, device) if __salt__['cmd.retcode'](cmd) == 0: return True return False
java
public static double getRadius(Atom atom) { if (atom.getElement()==null) { logger.warn("Unrecognised atom "+atom.getName()+" with serial "+atom.getPDBserial()+ ", assigning the default vdw radius (Nitrogen vdw radius)."); return Element.N.getVDWRadius(); } Group res = atom.getGroup(); if (res==null) { logger.warn("Unknown parent residue for atom "+atom.getName()+" with serial "+ atom.getPDBserial()+", assigning its default vdw radius"); return atom.getElement().getVDWRadius(); } GroupType type = res.getType(); if (type == GroupType.AMINOACID) return getRadiusForAmino(((AminoAcid)res), atom); if (type == GroupType.NUCLEOTIDE) return getRadiusForNucl((NucleotideImpl)res,atom); return atom.getElement().getVDWRadius(); }
java
@Override protected BaasStream getFromCache(BaasBox box) throws BaasException { return box.mCache.getStream(id); }
python
def init(context, reset, force): """Setup the database.""" store = Store(context.obj['database'], context.obj['root']) existing_tables = store.engine.table_names() if force or reset: if existing_tables and not force: message = f"Delete existing tables? [{', '.join(existing_tables)}]" click.confirm(click.style(message, fg='yellow'), abort=True) store.drop_all() elif existing_tables: click.echo(click.style("Database already exists, use '--reset'", fg='red')) context.abort() store.create_all() message = f"Success! New tables: {', '.join(store.engine.table_names())}" click.echo(click.style(message, fg='green'))
java
public static <T> T[] validIndex(final T[] array, final int index) { return validIndex(array, index, DEFAULT_VALID_INDEX_ARRAY_EX_MESSAGE, Integer.valueOf(index)); }
java
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { String streamId = getOutputStreamId(); Fields names = new Fields(outputFields); logger.info("{} declares {} for stream '{}'", new Object[] {this, names, streamId}); declarer.declareStream(streamId, names); }
java
@XmlElementDecl(namespace = "http://www.opengis.net/gml", name = "geometryMember") public JAXBElement<GeometryPropertyType> createGeometryMember(GeometryPropertyType value) { return new JAXBElement<GeometryPropertyType>(_GeometryMember_QNAME, GeometryPropertyType.class, null, value); }
python
def create(cls, cli, management_address, local_username=None, local_password=None, remote_username=None, remote_password=None, connection_type=None): """ Configures a remote system for remote replication. :param cls: this class. :param cli: the rest client. :param management_address: the management IP address of the remote system. :param local_username: administrative username of local system. :param local_password: administrative password of local system. :param remote_username: administrative username of remote system. :param remote_password: administrative password of remote system. :param connection_type: `ReplicationCapabilityEnum`. Replication connection type to the remote system. :return: the newly created remote system. """ req_body = cli.make_body( managementAddress=management_address, localUsername=local_username, localPassword=local_password, remoteUsername=remote_username, remotePassword=remote_password, connectionType=connection_type) resp = cli.post(cls().resource_class, **req_body) resp.raise_if_err() return cls.get(cli, resp.resource_id)
java
public static String getURLString(String relativeToBase){ if(relativeToBase.startsWith("/")){ relativeToBase = relativeToBase.substring(1); } return baseURL + relativeToBase; }
python
def import_data(self, data): """Import additional data for tuning Parameters ---------- data: a list of dictionarys, each of which has at least two keys, 'parameter' and 'value' """ _completed_num = 0 for trial_info in data: logger.info("Importing data, current processing progress %s / %s" %(_completed_num, len(data))) _completed_num += 1 assert "parameter" in trial_info _params = trial_info["parameter"] assert "value" in trial_info _value = trial_info['value'] if not _value: logger.info("Useless trial data, value is %s, skip this trial data." %_value) continue _params_tuple = convert_dict2tuple(_params) self.supplement_data[_params_tuple] = True logger.info("Successfully import data to grid search tuner.")
java
public static void addSoftEvidence(Network bn, String nodeName, HashMap<String, Double> softEvidence) throws ShanksException { String auxNodeName = softEvidenceNodePrefix + nodeName; int targetNode = bn.getNode(nodeName); boolean found = false; int[] children = bn.getChildren(targetNode); for (int child : children) { if (bn.getNodeName(child).equals(auxNodeName)) { if (bn.getOutcomeCount(child) == 2 && bn.getOutcomeId(child, 0).equals(triggerState)) { found = true; break; } } } if (!found) { // Create soft-evidence node bn.addNode(NodeType.Cpt, auxNodeName); bn.setNodeName(auxNodeName, auxNodeName); int node = bn.getNode(auxNodeName); bn.setOutcomeId(node, 0, triggerState); bn.setOutcomeId(node, 1, "NON" + triggerState); // bn.insertOutcome(auxNode, 0, triggerState); // bn.insertOutcome(auxNode, 1, "NON" + triggerState); double[] cpt = bn.getNodeDefinition(auxNodeName); for (int i = 0; i < cpt.length; i++) { cpt[i] = (float) 0.5; } bn.addArc(targetNode, bn.getNode(auxNodeName)); cpt = bn.getNodeDefinition(auxNodeName); for (int i = 0; i < cpt.length; i++) { cpt[i] = (float) 0.5; } } ShanksAgentBayesianReasoningCapability .updateSoftEvidenceAuxiliaryNodeCPT(bn, nodeName, softEvidence); ShanksAgentBayesianReasoningCapability.addEvidence(bn, softEvidenceNodePrefix + nodeName, triggerState); }
python
def update(dest, variation, path=None): """ Deep merges dictionary object variation into dest, dest keys in variation will be assigned new values from variation :param dest: :param variation: :param path: :return: """ if dest is None: return None if variation is None: return dest if path is None: path = [] for key in variation: if key in dest: if isinstance(dest[key], dict) and isinstance(variation[key], dict): EBUtils.update(dest[key], variation[key], path + [str(key)]) elif dest[key] == variation[key]: pass # same leaf value else: dest[key] = variation[key] else: dest[key] = variation[key] return dest
java
protected IStyleAppendable appendCluster(IStyleAppendable it, String element0, String... elements) { return appendCluster(it, true, element0, elements); }
java
public String getConfigurationOptionValue (String optionName, String defaultValue) { String optionValue; Node configurationOption = this.getConfigurationOption (optionName); if (configurationOption != null) { optionValue = configurationOption.getTextContent (); } else { optionValue = defaultValue; } return optionValue; }
java
public boolean match(String str, String regex) { if (str == null || regex == null) return false; if (regex.trim().isEmpty()) return true; return Pattern.compile(regex.trim()).matcher(str).find(); }
python
def get_free_shipping_promotion_by_id(cls, free_shipping_promotion_id, **kwargs): """Find FreeShippingPromotion Return single instance of FreeShippingPromotion by its ID. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.get_free_shipping_promotion_by_id(free_shipping_promotion_id, async=True) >>> result = thread.get() :param async bool :param str free_shipping_promotion_id: ID of freeShippingPromotion to return (required) :return: FreeShippingPromotion If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._get_free_shipping_promotion_by_id_with_http_info(free_shipping_promotion_id, **kwargs) else: (data) = cls._get_free_shipping_promotion_by_id_with_http_info(free_shipping_promotion_id, **kwargs) return data
python
def getBuffer(x): """ Copy @x into a (modifiable) ctypes byte array """ b = bytes(x) return (c_ubyte * len(b)).from_buffer_copy(bytes(x))
python
def select(self, names): """return the named subset of policies""" return PolicyCollection( [p for p in self.policies if p.name in names], self.options)
java
public static Workbook createWorkbook(FileFormat format, OutputStream os) throws IOException { return createWorkbook(format, os, null); }
python
def remove_rule_entry(self, rule_info): """Remove host data object from rule_info list.""" temp_list = list(self.rule_info) for rule in temp_list: if (rule.ip == rule_info.get('ip') and rule.mac == rule_info.get('mac') and rule.port == rule_info.get('port')): LOG.debug('Removed rule info %s from the list', rule_info) self.rule_info.remove(rule)
python
def updateHeader(self, wcsname=None, reusename=False): """ Update header of image with shifts computed by *perform_fit()*. """ # Insure filehandle is open and available... self.openFile() verbose_level = 1 if not self.perform_update: verbose_level = 0 # Create WCSCORR table to keep track of WCS revisions anyway if self.perform_update: wcscorr.init_wcscorr(self._im.hdu) extlist = [] wcscorr_extname = self.ext_name if self.ext_name == "PRIMARY": extlist = [0] else: for ext in range(1,self.nvers+1): extlist.append((self.ext_name,ext)) # add WCSNAME to SCI headers, if not provided (such as for # drizzled images directly obtained from the archive pre-AD) if ('wcsname' not in self._im.hdu[self.ext_name,ext].header and self._im.hdu.fileinfo(0)['filemode'] == 'update'): self._im.hdu[self.ext_name,ext].header['wcsname'] = 'Default' if not self.identityfit and self.goodmatch and \ self.fit['offset'][0] != np.nan: updatehdr.updatewcs_with_shift(self._im.hdu, self.refWCS, wcsname=wcsname, reusename=reusename, fitgeom=self.fit_pars['fitgeometry'], xsh=self.fit['offset'][0],ysh=self.fit['offset'][1], rot=self.fit['rot'],scale=self.fit['scale'][0], fit=self.fit['fit_matrix'], verbose=verbose_level, xrms=self.fit['rms_keys']['RMS_RA'], yrms=self.fit['rms_keys']['RMS_DEC']) wnames = altwcs.wcsnames(self._im.hdu,ext=extlist[0]) altkeys = [] for k in wnames: if wnames[k] == wcsname: altkeys.append(k) if len(altkeys) > 1 and ' ' in altkeys: altkeys.remove(' ') if len(altkeys) == 0: next_key = ' ' else: next_key = altkeys[-1] if self.perform_update: log.info(' Writing out new WCS to alternate WCS: "%s"'%next_key) self.next_key = next_key else: #if self.identityfit or not self.goodmatch: if reusename: # Look for key of WCS with this name next_key = altwcs.getKeyFromName(self._im.hdu[extlist[0]].header,wcsname) # This wcsname is new, so start fresh if next_key is None: next_key = altwcs.next_wcskey(self._im.hdu[extlist[0]].header) else: # Find key for next WCS and save again to replicate an updated solution next_key = altwcs.next_wcskey(self._im.hdu[extlist[0]].header) if self.perform_update: # archive current WCS as alternate WCS with specified WCSNAME # Start by archiving original PRIMARY WCS wnames = altwcs.wcsnames(self._im.hdu,ext=extlist[0]) # Define a default WCSNAME in the case that the file to be # updated did not have the WCSNAME keyword defined already # (as will happen when updating images that have not been # updated using updatewcs). if len(wnames) == 0: pri_wcsname = None else: # Safeguard against headers not having WCSNAME defined # This would occur if they were written out by something # other than stwcs.updatewcs v if ' ' not in wnames: self._im.hdu[extlist[0]].header['wscname'] = '' wnames[' '] = '' pri_wcsname = wnames[' '] next_pkey = altwcs.getKeyFromName(fits.getheader(self.name, extlist[0], memmap=False),pri_wcsname) log.info(' Saving Primary WCS to alternate WCS: "%s"'%next_pkey) altwcs.archiveWCS(self._im.hdu, extlist, wcskey=next_pkey, wcsname=pri_wcsname, reusekey=True) if reusename: # Look for key of WCS with this name next_key = altwcs.getKeyFromName(self._im.hdu[extlist[0]].header,wcsname) # This wcsname is new, so start fresh if next_key is None: next_key = altwcs.next_wcskey(self._im.hdu[extlist[0]].header) else: # Find key for next WCS and save again to replicate an updated solution next_key = altwcs.next_wcskey(self._im.hdu[extlist[0]].header) # update WCSNAME to be the new name for ext in extlist: self._im.hdu[ext].header['WCSNAME'] = wcsname # save again using new WCSNAME altwcs.archiveWCS(self._im.hdu, extlist, wcskey=next_key,wcsname=wcsname, reusekey=reusename) self.next_key = ' ' # add FIT values to image's PRIMARY header fimg = self._im.hdu if wcsname in ['',' ',None,"INDEF"]: wcsname = 'TWEAK' # Record values for the fit with both the PRIMARY WCS being updated # and the alternate WCS which will be created. assert(not self._im.closed) for ext in extlist: self._im.hdu[ext].header['FITNAME'+next_key] = wcsname for kw in self.fit['rms_keys']: self._im.hdu[ext].header.set(kw+next_key, self.fit['rms_keys'][kw], after='FITNAME'+next_key) if self.perform_update: log.info('Updating WCSCORR table with new WCS solution "%s"'%wcsname) wcscorr.update_wcscorr(self._im.hdu, wcs_id=wcsname, extname=self.ext_name)
java
private void updateAdd() { if (willAdd) { for (final Featurable featurable : toAdd) { featurables.add(featurable); for (final HandlerListener listener : listeners) { listener.notifyHandlableAdded(featurable); } if (featurable.hasFeature(Transformable.class)) { final Transformable transformable = featurable.getFeature(Transformable.class); transformable.teleport(transformable.getX(), transformable.getY()); } } toAdd.clear(); willAdd = false; } }
python
def classname(self): """ Returns the Java classname in dot-notation. :return: the Java classname :rtype: str """ cls = javabridge.call(self.jobject, "getClass", "()Ljava/lang/Class;") return javabridge.call(cls, "getName", "()Ljava/lang/String;")
java
@Override @Nonnull public JQueryInvocation jqinvoke (@Nonnull @Nonempty final String sMethod) { return new JQueryInvocation (this, sMethod); }
java
public static <K,V> CacheLoader<K,V> loader(Function<K,V> function) { return new FunctionCacheLoader<>(function); }
python
def get_stp_mst_detail_output_cist_port_interface_type(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_stp_mst_detail = ET.Element("get_stp_mst_detail") config = get_stp_mst_detail output = ET.SubElement(get_stp_mst_detail, "output") cist = ET.SubElement(output, "cist") port = ET.SubElement(cist, "port") interface_type = ET.SubElement(port, "interface-type") interface_type.text = kwargs.pop('interface_type') callback = kwargs.pop('callback', self._callback) return callback(config)
java
public Converter getConverter() { if (this.converter != null) { return (this.converter); } return (Converter) getStateHelper().eval(PropertyKeys.converter); }
java
protected void setMessageListener(MessageListener messageListener) { this.messageListener = messageListener; if (messageListener == null || isClosed()) { return; } synchronized (stateLock) { if (!running || isClosed()) { return; } List<MessageManager> allPrefetchedMessages = new ArrayList<MessageManager>(messageQueue); sqsSessionRunnable.scheduleCallBacks(messageListener, allPrefetchedMessages); messageQueue.clear(); // This will request the first message if necessary. // TODO: This may overfetch if setMessageListener is being called multiple // times, as the session callback scheduler may already have entries for this consumer. messageListenerReady(); } }
java
public static String getVersionString() { String versionString = "UNKNOWN"; Properties propeties = getProperites(); if(propeties != null) { versionString = propeties.getProperty("finmath-lib.version"); } return versionString; }
python
def report(policies, start_date, options, output_fh, raw_output_fh=None): """Format a policy's extant records into a report.""" regions = set([p.options.region for p in policies]) policy_names = set([p.name for p in policies]) formatter = Formatter( policies[0].resource_manager.resource_type, extra_fields=options.field, include_default_fields=not options.no_default_fields, include_region=len(regions) > 1, include_policy=len(policy_names) > 1 ) records = [] for policy in policies: # initialize policy execution context for output access policy.ctx.initialize() if policy.ctx.output.type == 's3': policy_records = record_set( policy.session_factory, policy.ctx.output.config['netloc'], policy.ctx.output.config['path'].strip('/'), start_date) else: policy_records = fs_record_set(policy.ctx.log_dir, policy.name) log.debug("Found %d records for region %s", len(policy_records), policy.options.region) for record in policy_records: record['policy'] = policy.name record['region'] = policy.options.region records += policy_records rows = formatter.to_csv(records) if options.format == 'csv': writer = UnicodeWriter(output_fh, formatter.headers()) writer.writerow(formatter.headers()) writer.writerows(rows) elif options.format == 'json': print(dumps(records, indent=2)) else: # We special case CSV, and for other formats we pass to tabulate print(tabulate(rows, formatter.headers(), tablefmt=options.format)) if raw_output_fh is not None: dumps(records, raw_output_fh, indent=2)
java
public static JSONObject toJSONObject(String string) throws JSONException { JSONObject jo = new JSONObject(); XMLTokener x = new XMLTokener(string); while (x.more() && x.skipPast("<")) { parse(x, jo, null); } return jo; }
java
@JsonProperty("dq") @JsonSerialize(using = Base64UrlJsonSerializer.class) @JsonDeserialize(using = Base64UrlJsonDeserializer.class) public byte[] dq() { return ByteExtensions.clone(this.dq); }
python
def git_add_commit_push_all_repos(cat): """Add all files in each data repository tree, commit, push. Creates a commit message based on the current catalog version info. If either the `git add` or `git push` commands fail, an error will be raised. Currently, if `commit` fails an error *WILL NOT* be raised because the `commit` command will return a nonzero exit status if there are no files to add... which we dont want to raise an error. FIX: improve the error checking on this. """ log = cat.log log.debug("gitter.git_add_commit_push_all_repos()") # Do not commit/push private repos all_repos = cat.PATHS.get_all_repo_folders(private=False) for repo in all_repos: log.info("Repo in: '{}'".format(repo)) # Get the initial git SHA sha_beg = get_sha(repo) log.debug("Current SHA: '{}'".format(sha_beg)) # Get files that should be added, compress and check sizes add_files = cat._prep_git_add_file_list(repo, cat.COMPRESS_ABOVE_FILESIZE) log.info("Found {} Files to add.".format(len(add_files))) if len(add_files) == 0: continue try: # Add all files in the repository directory tree git_comm = ["git", "add"] if cat.args.travis: git_comm.append("-f") git_comm.extend(add_files) _call_command_in_repo( git_comm, repo, cat.log, fail=True, log_flag=False) # Commit these files commit_msg = "'push' - adding all files." commit_msg = "{} : {}".format(cat._version_long, commit_msg) log.info(commit_msg) git_comm = ["git", "commit", "-am", commit_msg] _call_command_in_repo(git_comm, repo, cat.log) # Add all files in the repository directory tree git_comm = ["git", "push"] if not cat.args.travis: _call_command_in_repo(git_comm, repo, cat.log, fail=True) except Exception as err: try: git_comm = ["git", "reset", "HEAD"] _call_command_in_repo(git_comm, repo, cat.log, fail=True) except: pass raise err return
java
@BetaApi public final Operation setTargetPoolsRegionInstanceGroupManager( String instanceGroupManager, RegionInstanceGroupManagersSetTargetPoolsRequest regionInstanceGroupManagersSetTargetPoolsRequestResource) { SetTargetPoolsRegionInstanceGroupManagerHttpRequest request = SetTargetPoolsRegionInstanceGroupManagerHttpRequest.newBuilder() .setInstanceGroupManager(instanceGroupManager) .setRegionInstanceGroupManagersSetTargetPoolsRequestResource( regionInstanceGroupManagersSetTargetPoolsRequestResource) .build(); return setTargetPoolsRegionInstanceGroupManager(request); }
java
private int getPageOffset(int page) { if (pageSize <= 0) { pageSize = frame.getVdmThread().getPropertyPageSize(); } if (pageSize <= 0) { return 0; } return page * pageSize; }
python
def list_repos(self, envs=[], query='/repositories/'): """ List repositories in specified environments """ juicer.utils.Log.log_debug( "List Repos In: %s", ", ".join(envs)) repo_lists = {} for env in envs: repo_lists[env] = [] for env in envs: _r = self.connectors[env].get(query) if _r.status_code == Constants.PULP_GET_OK: for repo in juicer.utils.load_json_str(_r.content): if re.match(".*-{0}$".format(env), repo['id']): repo_lists[env].append(repo['display_name']) else: _r.raise_for_status() return repo_lists
python
def get(self, key): """ Retrieves previously stored key from the storage :return value, stored in the storage """ if key not in self._keystore: return None rec = self._keystore[key] """:type rec InMemoryItemValue""" if rec.is_expired: self.delete(key) return None return rec.value
python
def login(self, username, passwd_hash, webapi_key, country_code=1): """Log in (sets self.token). Returns token (session_handle).""" self.client = Client(self.webapi_url) self.ArrayOfLong = self.client.get_type('ns0:ArrayOfLong') # this should be done by zeep... ver_key = self.client.service.doQuerySysStatus(1, 1, webapi_key)['verKey'] return self.client.service.doLoginEnc(username, passwd_hash, country_code, webapi_key, ver_key)['sessionHandlePart']
python
def _logout(creds_file=None): """ Logout main function, just rm ~/.onecodex more or less """ if _remove_creds(creds_file=creds_file): click.echo("Successfully removed One Codex credentials.", err=True) sys.exit(0) else: click.echo("No One Codex API keys found.", err=True) sys.exit(1)
python
def _cast_to_frameset(cls, other): """ Private method to simplify comparison operations. Args: other (:class:`FrameSet` or set or frozenset or or iterable): item to be compared Returns: :class:`FrameSet` Raises: :class:`NotImplemented`: if a comparison is impossible """ if isinstance(other, FrameSet): return other try: return FrameSet(other) except Exception: return NotImplemented
python
def find_one(self, filter_=None, *args, **kwargs): """find_one method """ wrapper = kwargs.pop('wrapper', False) if wrapper is True: return self._wrapper_find_one(filter_, *args, **kwargs) return self.__collect.find_one(filter_, *args, **kwargs)
java
public ResponseEntity handleWebFingerDiscoveryRequest(final String resource, final String rel) { if (StringUtils.isNotBlank(rel) && !OidcConstants.WEBFINGER_REL.equalsIgnoreCase(rel)) { LOGGER.warn("Handling discovery request for a non-standard OIDC relation [{}]", rel); } val issuer = this.discovery.getIssuer(); if (!StringUtils.equalsIgnoreCase(resource, issuer)) { val resourceUri = normalize(resource); if (resourceUri == null) { LOGGER.error("Unable to parse and normalize resource: [{}]", resource); return buildNotFoundResponseEntity("Unable to normalize provided resource"); } val issuerUri = normalize(issuer); if (issuerUri == null) { LOGGER.error("Unable to parse and normalize issuer: [{}]", issuer); return buildNotFoundResponseEntity("Unable to normalize issuer"); } if (!"acct".equals(resourceUri.getScheme())) { LOGGER.error("Unable to accept resource scheme: [{}]", resourceUri.toUriString()); return buildNotFoundResponseEntity("Unable to recognize/accept resource scheme " + resourceUri.getScheme()); } var user = userInfoRepository.findByEmailAddress(resourceUri.getUserInfo() + '@' + resourceUri.getHost()); if (user.isEmpty()) { user = userInfoRepository.findByUsername(resourceUri.getUserInfo()); } if (user.isEmpty()) { LOGGER.info("User/Resource not found: [{}]", resource); return buildNotFoundResponseEntity("Unable to find resource"); } if (!StringUtils.equalsIgnoreCase(issuerUri.getHost(), resourceUri.getHost())) { LOGGER.info("Host mismatch for resource [{}]: expected [{}] and yet received [{}]", resource, issuerUri.getHost(), resourceUri.getHost()); return buildNotFoundResponseEntity("Unable to match resource host"); } } val body = new LinkedHashMap<String, Object>(); body.put("subject", resource); val links = new ArrayList<>(); links.add(CollectionUtils.wrap("rel", OidcConstants.WEBFINGER_REL, "href", issuer)); body.put("links", links); return new ResponseEntity<>(body, HttpStatus.OK); }
java
public static base_responses expire(nitro_service client, cachecontentgroup resources[]) throws Exception { base_responses result = null; if (resources != null && resources.length > 0) { cachecontentgroup expireresources[] = new cachecontentgroup[resources.length]; for (int i=0;i<resources.length;i++){ expireresources[i] = new cachecontentgroup(); expireresources[i].name = resources[i].name; } result = perform_operation_bulk_request(client, expireresources,"expire"); } return result; }
python
def generate_supremacy_circuit_google_v2(qubits: Iterable[devices.GridQubit], cz_depth: int, seed: int) -> circuits.Circuit: """ Generates Google Random Circuits v2 as in github.com/sboixo/GRCS cz_v2. See also https://arxiv.org/abs/1807.10749 Args: qubits: qubit grid in which to generate the circuit. cz_depth: number of layers with CZ gates. seed: seed for the random instance. Returns: A circuit corresponding to instance inst_{n_rows}x{n_cols}_{cz_depth+1}_{seed} The mapping of qubits is cirq.GridQubit(j,k) -> q[j*n_cols+k] (as in the QASM mapping) """ non_diagonal_gates = [ops.pauli_gates.X**(1/2), ops.pauli_gates.Y**(1/2)] rand_gen = random.Random(seed).random circuit = circuits.Circuit() # Add an initial moment of Hadamards circuit.append(ops.common_gates.H(qubit) for qubit in qubits) layer_index = 0 if cz_depth: layer_index = _add_cz_layer(layer_index, circuit) # In the first moment, add T gates when possible for qubit in qubits: if not circuit.operation_at(qubit, 1): circuit.append(ops.common_gates.T(qubit), strategy=InsertStrategy.EARLIEST) for moment_index in range(2, cz_depth+1): layer_index = _add_cz_layer(layer_index, circuit) # Add single qubit gates in the same moment for qubit in qubits: if not circuit.operation_at(qubit, moment_index): last_op = circuit.operation_at(qubit, moment_index-1) if last_op: gate = cast(ops.GateOperation, last_op).gate # Add a random non diagonal gate after a CZ if gate == ops.CZ: circuit.append(_choice(rand_gen, non_diagonal_gates).on(qubit), strategy=InsertStrategy.EARLIEST) # Add a T gate after a non diagonal gate elif not gate == ops.T: circuit.append(ops.common_gates.T(qubit), strategy=InsertStrategy.EARLIEST) # Add a final moment of Hadamards circuit.append([ops.common_gates.H(qubit) for qubit in qubits], strategy=InsertStrategy.NEW_THEN_INLINE) return circuit
java
@Override public RandomVariable getValue(double evaluationTime, AssetModelMonteCarloSimulationModel model) throws CalculationException { /* * The following valuation code requires in-depth knowledge of the model to calculate the denstiy analytically. */ BlackScholesModel blackScholesModel = null; if(model instanceof MonteCarloAssetModel) { try { blackScholesModel = (BlackScholesModel)((MonteCarloAssetModel)model).getModel(); } catch(Exception e) {} } else if(model instanceof MonteCarloBlackScholesModel) { blackScholesModel = ((MonteCarloBlackScholesModel)model).getModel(); } if(model == null) { throw new ClassCastException("This method requires a Black-Scholes type model (MonteCarloBlackScholesModel)."); } // Get underlying and numeraire RandomVariable underlyingAtMaturity = model.getAssetValue(maturity,0); RandomVariable underlyingAtToday = model.getAssetValue(0.0,0); // Get some model parameters double T = maturity-evaluationTime; double r = blackScholesModel.getRiskFreeRate().doubleValue(); double sigma = blackScholesModel.getVolatility().doubleValue(); RandomVariable lr = underlyingAtMaturity.log().sub(underlyingAtToday.log()).sub(r * T - 0.5 * sigma*sigma * T).div(sigma * sigma * T).div(underlyingAtToday); RandomVariable payoff = underlyingAtMaturity.sub(strike).choose(new Scalar(1.0), new Scalar(0.0)); RandomVariable modifiedPayoff = payoff.mult(lr); RandomVariable numeraireAtMaturity = model.getNumeraire(maturity); RandomVariable numeraireAtToday = model.getNumeraire(0); RandomVariable monteCarloWeightsAtMaturity = model.getMonteCarloWeights(maturity); RandomVariable monteCarloWeightsAtToday = model.getMonteCarloWeights(maturity); return modifiedPayoff.div(numeraireAtMaturity).mult(numeraireAtToday).mult(monteCarloWeightsAtMaturity).div(monteCarloWeightsAtToday); }
java
public Nfs3RemoveRequest makeRemoveRequest(byte[] parentDirectoryFileHandle, String name) throws FileNotFoundException { return new Nfs3RemoveRequest(parentDirectoryFileHandle, name, _credential); }
python
def create_summary_tear_sheet(factor_data, long_short=True, group_neutral=False): """ Creates a small summary tear sheet with returns, information, and turnover analysis. Parameters ---------- factor_data : pd.DataFrame - MultiIndex A MultiIndex DataFrame indexed by date (level 0) and asset (level 1), containing the values for a single alpha factor, forward returns for each period, the factor quantile/bin that factor value belongs to, and (optionally) the group the asset belongs to. - See full explanation in utils.get_clean_factor_and_forward_returns long_short : bool Should this computation happen on a long short portfolio? if so, then mean quantile returns will be demeaned across the factor universe. group_neutral : bool Should this computation happen on a group neutral portfolio? if so, returns demeaning will occur on the group level. """ # Returns Analysis mean_quant_ret, std_quantile = \ perf.mean_return_by_quantile(factor_data, by_group=False, demeaned=long_short, group_adjust=group_neutral) mean_quant_rateret = \ mean_quant_ret.apply(utils.rate_of_return, axis=0, base_period=mean_quant_ret.columns[0]) mean_quant_ret_bydate, std_quant_daily = \ perf.mean_return_by_quantile(factor_data, by_date=True, by_group=False, demeaned=long_short, group_adjust=group_neutral) mean_quant_rateret_bydate = mean_quant_ret_bydate.apply( utils.rate_of_return, axis=0, base_period=mean_quant_ret_bydate.columns[0] ) compstd_quant_daily = std_quant_daily.apply( utils.std_conversion, axis=0, base_period=std_quant_daily.columns[0] ) alpha_beta = perf.factor_alpha_beta(factor_data, demeaned=long_short, group_adjust=group_neutral) mean_ret_spread_quant, std_spread_quant = perf.compute_mean_returns_spread( mean_quant_rateret_bydate, factor_data['factor_quantile'].max(), factor_data['factor_quantile'].min(), std_err=compstd_quant_daily) periods = utils.get_forward_returns_columns(factor_data.columns) fr_cols = len(periods) vertical_sections = 2 + fr_cols * 3 gf = GridFigure(rows=vertical_sections, cols=1) plotting.plot_quantile_statistics_table(factor_data) plotting.plot_returns_table(alpha_beta, mean_quant_rateret, mean_ret_spread_quant) plotting.plot_quantile_returns_bar(mean_quant_rateret, by_group=False, ylim_percentiles=None, ax=gf.next_row()) # Information Analysis ic = perf.factor_information_coefficient(factor_data) plotting.plot_information_table(ic) # Turnover Analysis quantile_factor = factor_data['factor_quantile'] quantile_turnover = \ {p: pd.concat([perf.quantile_turnover(quantile_factor, q, p) for q in range(1, int(quantile_factor.max()) + 1)], axis=1) for p in periods} autocorrelation = pd.concat( [perf.factor_rank_autocorrelation(factor_data, period) for period in periods], axis=1) plotting.plot_turnover_table(autocorrelation, quantile_turnover) plt.show() gf.close()
java
public HttpHandler findHandler(Class handlerClass, String uri, String[] vhosts) { uri = URI.stripPath(uri); if (vhosts==null || vhosts.length==0) vhosts=__noVirtualHost; for (int h=0; h<vhosts.length ; h++) { String host = vhosts[h]; PathMap contextMap=(PathMap)_virtualHostMap.get(host); if (contextMap!=null) { List contextLists =contextMap.getMatches(uri); if(contextLists!=null) { for (int i=0;i<contextLists.size();i++) { Map.Entry entry= (Map.Entry) contextLists.get(i); List contextList = (List)entry.getValue(); for (int j=0;j<contextList.size();j++) { HttpContext context= (HttpContext)contextList.get(j); HttpHandler handler = context.getHandler(handlerClass); if (handler!=null) return handler; } } } } } return null; }
java
private void addCoord(Chunk chunk, BlockPos pos) { chunks(chunk).add(chunk, pos); }
java
public RunT getBuild(String id) { for (RunT r : _getRuns().values()) { if (r.getId().equals(id)) return r; } return null; }