language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def _is_excluded(filename, exclusions): """Return true if filename matches any of exclusions.""" for exclusion in exclusions: if fnmatch(filename, exclusion): return True return False
java
public static IndianCalendar of( int iyear, IndianMonth imonth, int idom ) { return IndianCalendar.of(iyear, imonth.getValue(), idom); }
java
private void search(final long pMapTileIndex) { for (final MapTileModuleProviderBase provider : mProviders) { try { if (provider instanceof MapTileDownloader) { final ITileSource tileSource = ((MapTileDownloader) provider).getTileSource(); if (tileSource instanceof OnlineTileSourceBase) { if (!((OnlineTileSourceBase)tileSource).getTileSourcePolicy().acceptsPreventive()) { continue; } } } final Drawable drawable = provider.getTileLoader().loadTile(pMapTileIndex); if (drawable == null) { continue; } mCache.putTile(pMapTileIndex, drawable); return; } catch (CantContinueException exception) { // just dismiss that lazily: we don't need to be severe here } } }
java
private static String getPomText(ReleaseId releaseId, ReleaseId... dependencies) { String pom = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + "\n" + " <groupId>" + releaseId.getGroupId() + "</groupId>\n" + " <artifactId>" + releaseId.getArtifactId() + "</artifactId>\n" + " <version>" + releaseId.getVersion() + "</version>\n" + "\n"; if (dependencies != null && dependencies.length > 0) { pom += "<dependencies>\n"; for (ReleaseId dep : dependencies) { pom += "<dependency>\n"; pom += " <groupId>" + dep.getGroupId() + "</groupId>\n"; pom += " <artifactId>" + dep.getArtifactId() + "</artifactId>\n"; pom += " <version>" + dep.getVersion() + "</version>\n"; pom += "</dependency>\n"; } pom += "</dependencies>\n"; } pom += "</project>"; return pom; }
python
def estimate_supercell_matrix(spglib_dataset, max_num_atoms=120): """Estimate supercell matrix from conventional cell Diagonal supercell matrix is estimated from basis vector lengths and maximum number of atoms to be accepted. Supercell is assumed to be made from the standardized cell and to be closest to sphere under keeping lattice symmetry. For triclinic, monoclinic, and orthorhombic cells, multiplicities for a, b, c are not constrained by symmetry. For tetragonal and hexagonal cells, multiplicities for a and b are chosen to be the same, and for cubic cell, those of a, b, c are the same. Parameters ---------- spglib_dataset : tuple Spglib symmetry dataset max_num_atoms : int, optional Maximum number of atoms in created supercell to be tolerated. Returns ------- list of three integer numbers Multiplicities for a, b, c basis vectors, respectively. """ spg_num = spglib_dataset['number'] num_atoms = len(spglib_dataset['std_types']) lengths = _get_lattice_parameters(spglib_dataset['std_lattice']) if spg_num <= 74: # Triclinic, monoclinic, and orthorhombic multi = _get_multiplicity_abc(num_atoms, lengths, max_num_atoms) elif spg_num <= 194: # Tetragonal and hexagonal multi = _get_multiplicity_ac(num_atoms, lengths, max_num_atoms) else: # Cubic multi = _get_multiplicity_a(num_atoms, lengths, max_num_atoms) return multi
java
public <T> T getPrototypeBean(final Function<BeanAccessor, T> creator) { final PrototypeProvider provider = new PrototypeProvider(name, creator); return provider.getBean(this, dryRun); }
python
def blackbody_wn_rad2temp(wavenumber, radiance): """Derive brightness temperatures from radiance using the Planck function. Wavenumber space""" if np.isscalar(radiance): rad = np.array([radiance, ], dtype='float64') else: rad = np.array(radiance, dtype='float64') if np.isscalar(wavenumber): wavnum = np.array([wavenumber, ], dtype='float64') else: wavnum = np.array(wavenumber, dtype='float64') const1 = H_PLANCK * C_SPEED / K_BOLTZMANN const2 = 2 * H_PLANCK * C_SPEED**2 res = const1 * wavnum / np.log(np.divide(const2 * wavnum**3, rad) + 1.0) shape = rad.shape resshape = res.shape if wavnum.shape[0] == 1: if rad.shape[0] == 1: return res[0] else: return res[::].reshape(shape) else: if rad.shape[0] == 1: return res[0, :] else: if len(shape) == 1: return np.reshape(res, (shape[0], resshape[1])) else: return np.reshape(res, (shape[0], shape[1], resshape[1]))
python
def luhn(base, num_only=False, allow_lower_case=False): """Return the Luhn check digit for the given string. Args: base(str): string for which to calculate the check digit num_only(bool): allow only digits in `base` (default: False) allow_lower_case(bool): allow lower case letters in `base` (default: False) Returns: int: Luhn check digit Raises: ValueError: given `base` contains an unallowed character """ if num_only: alphabet = _ALPHABET[:10] else: alphabet = _ALPHABET if allow_lower_case: base = base.upper() try: pre_calc = (_PRE_CALC[alphabet.index(c)] for c in reversed(base)) cum = 0 parity = 1 for elem in pre_calc: val, parity = elem[parity] cum += val except ValueError: pass # fall through else: return 10 - cum % 10 # unallowed character detected if num_only: msg = 'The string given must only contain digits.' elif allow_lower_case: msg = 'The string given must only contain digits and ascii letters.' else: msg = 'The string given must only contain digits and upper case ' \ 'ascii letters.' raise ValueError(msg)
python
def get_links(self, request=None): """ Return a dictionary containing all the links that should be included in the API schema. """ links = LinkNode() # Generate (path, method, view) given (path, method, callback). paths = [] view_endpoints = [] for path, method, callback in self.endpoints: view = self.create_view(callback, method, request) if getattr(view, 'exclude_from_schema', False): continue path = self.coerce_path(path, method, view) paths.append(path) view_endpoints.append((path, method, view)) # Only generate the path prefix for paths that will be included if not paths: return None prefix = self.determine_path_prefix(paths) for path, method, view in view_endpoints: if not self.has_view_permissions(path, method, view): continue link = self.get_link(path, method, view, version=getattr(request, 'version', None)) subpath = path[len(prefix):] keys = self.get_keys(subpath, method, view) try: insert_into(links, keys, link) except Exception: continue return links
java
public static synchronized void destroyManagedConnectionPool(String poolName, Object mcp) { log.tracef("%s", new TraceEvent(poolName, Integer.toHexString(System.identityHashCode(mcp)), TraceEvent.MANAGED_CONNECTION_POOL_DESTROY, "NONE")); }
java
public static WidgetLib init(GVRContext gvrContext, String customPropertiesAsset) throws InterruptedException, JSONException, NoSuchMethodException { if (mInstance == null) { // Constructor sets mInstance to ensure the initialization order new WidgetLib(gvrContext, customPropertiesAsset); } return mInstance.get(); }
python
def _encode_dbref(name, value, check_keys, opts): """Encode bson.dbref.DBRef.""" buf = bytearray(b"\x03" + name + b"\x00\x00\x00\x00") begin = len(buf) - 4 buf += _name_value_to_bson(b"$ref\x00", value.collection, check_keys, opts) buf += _name_value_to_bson(b"$id\x00", value.id, check_keys, opts) if value.database is not None: buf += _name_value_to_bson( b"$db\x00", value.database, check_keys, opts) for key, val in iteritems(value._DBRef__kwargs): buf += _element_to_bson(key, val, check_keys, opts) buf += b"\x00" buf[begin:begin + 4] = _PACK_INT(len(buf) - begin) return bytes(buf)
python
def from_out_edges(cls, vertices, edge_mapper): """ Create a DirectedGraph from a collection of vertices and a mapping giving the vertices that each vertex is connected to. """ vertices = set(vertices) edges = set() heads = {} tails = {} # Number the edges arbitrarily. edge_identifier = itertools.count() for tail in vertices: for head in edge_mapper[tail]: edge = next(edge_identifier) edges.add(edge) heads[edge] = head tails[edge] = tail return cls._raw( vertices=vertices, edges=edges, heads=heads, tails=tails, )
java
@Override @Trivial public Enumeration<URL> getResources(String name) throws IOException { /* * The default implementation of getResources never calls getResources on its parent, instead it just calls findResources on all of the loaders parents. We know that our * parent will be a gateway class loader that changes the order that resources are loaded but it does this in getResources (as that is where the order *should* be changed * according to the JavaDoc). Therefore call getResources on our parent and then findResources on ourself. */ // Note we don't need to worry about getSystemResources as our parent will do that for us try { final String f_name = name; final ClassLoader f_parent = parent; Enumeration<URL> eURL = AccessController.doPrivileged(new java.security.PrivilegedExceptionAction<Enumeration<URL>>() { @Override public Enumeration<URL> run() throws Exception { return f_parent.getResources(f_name); } }); return new CompositeEnumeration<URL>(eURL).add(this.findResources(name)); } catch (PrivilegedActionException pae) { return null; } }
java
@Override public Object invoke(final MethodInvocation invocation) throws Throwable { final Method method = invocation.getMethod(); final Transactional annotation = method.getAnnotation(Transactional.class); Object result; if (null == annotation) { result = invocation.proceed(); } else { result = AbstractDao.withTransaction(new TransFunc<Object>() { @Override public Object apply() throws IOException { try { return invocation.proceed(); } catch (Throwable throwable) { throw new IOException("withTransaction invocation.proceed()", throwable); } } }, true, supplier.get()); } return result; }
java
private CloseableHttpClient buildClient(boolean trustSelfSigned) { try { // if required, define custom SSL context allowing self-signed certs SSLContext sslContext = !trustSelfSigned ? SSLContexts.createSystemDefault() : SSLContexts.custom().loadTrustMaterial(null, new TrustSelfSignedStrategy()).build(); // set timeouts for the HTTP client int globalTimeout = readFromProperty("bdTimeout", 100000); int connectTimeout = readFromProperty("bdConnectTimeout", globalTimeout); int connectionRequestTimeout = readFromProperty("bdConnectionRequestTimeout", globalTimeout); int socketTimeout = readFromProperty("bdSocketTimeout", globalTimeout); RequestConfig requestConfig = RequestConfig.copy(RequestConfig.DEFAULT).setConnectTimeout(connectTimeout) .setSocketTimeout(socketTimeout).setConnectionRequestTimeout(connectionRequestTimeout).build(); // configure caching CacheConfig cacheConfig = CacheConfig.copy(CacheConfig.DEFAULT).setSharedCache(false).setMaxCacheEntries(1000) .setMaxObjectSize(2 * 1024 * 1024).build(); // configure connection pooling PoolingHttpClientConnectionManager connManager = new PoolingHttpClientConnectionManager(RegistryBuilder .<ConnectionSocketFactory> create().register("http", PlainConnectionSocketFactory.getSocketFactory()) .register("https", new SSLConnectionSocketFactory(sslContext)).build()); int connectionLimit = readFromProperty("bdMaxConnections", 40); // there's only one server to connect to, so max per route matters connManager.setMaxTotal(connectionLimit); connManager.setDefaultMaxPerRoute(connectionLimit); // create the HTTP client return CachingHttpClientBuilder.create().setCacheConfig(cacheConfig).setDefaultRequestConfig(requestConfig) .setConnectionManager(connManager).build(); } catch (GeneralSecurityException e) { throw new InternalConfigurationException("Failed to set up SSL context", e); } }
java
static SettingsCommand get(Map<String, String[]> clArgs) { for (Command cmd : Command.values()) { if (cmd.category == null) continue; for (String name : cmd.names) { final String[] params = clArgs.remove(name); if (params == null) continue; switch (cmd.category) { case BASIC: return SettingsCommandPreDefined.build(cmd, params); case MIXED: return SettingsCommandPreDefinedMixed.build(params); case USER: return SettingsCommandUser.build(params); } } } return null; }
python
def finalize(self, **kwargs): """ Finalize the plot with ticks, labels, and title Parameters ---------- kwargs: dict generic keyword arguments. """ # NOTE: not deduping here, so this is total, not unique self.set_title( "PosTag plot for {}-token corpus".format( (sum(self.pos_tag_counts_.values())) ) ) self.ax.set_xticks(range(len(self.pos_tag_counts_))) self.ax.set_xticklabels(list(self.pos_tag_counts_.keys()), rotation=90) # Set the axis labels if self.frequency: self.ax.set_xlabel( "{} part-of-speech tags, sorted by frequency".format(self.tagset_names[self.tagset]) ) else: self.ax.set_xlabel( "{} part-of-speech tags".format(self.tagset_names[self.tagset]) ) self.ax.set_ylabel("Count")
python
def define_ppo_epoch(memory, hparams, action_space, batch_size): """PPO epoch.""" observation, reward, done, action, old_pdf, value = memory # This is to avoid propagating gradients through simulated environment. observation = tf.stop_gradient(observation) action = tf.stop_gradient(action) reward = tf.stop_gradient(reward) if hasattr(hparams, "rewards_preprocessing_fun"): reward = hparams.rewards_preprocessing_fun(reward) done = tf.stop_gradient(done) value = tf.stop_gradient(value) old_pdf = tf.stop_gradient(old_pdf) advantage = calculate_generalized_advantage_estimator( reward, value, done, hparams.gae_gamma, hparams.gae_lambda) discounted_reward = tf.stop_gradient(advantage + value[:-1]) advantage_mean, advantage_variance = tf.nn.moments(advantage, axes=[0, 1], keep_dims=True) advantage_normalized = tf.stop_gradient( (advantage - advantage_mean)/(tf.sqrt(advantage_variance) + 1e-8)) add_lists_elementwise = lambda l1, l2: [x + y for x, y in zip(l1, l2)] number_of_batches = ((hparams.epoch_length-1) * hparams.optimization_epochs // hparams.optimization_batch_size) epoch_length = hparams.epoch_length if hparams.effective_num_agents is not None: number_of_batches *= batch_size number_of_batches //= hparams.effective_num_agents epoch_length //= hparams.effective_num_agents assert number_of_batches > 0, "Set the paremeters so that number_of_batches>0" lr = learning_rate.learning_rate_schedule(hparams) shuffled_indices = [tf.random.shuffle(tf.range(epoch_length - 1)) for _ in range(hparams.optimization_epochs)] shuffled_indices = tf.concat(shuffled_indices, axis=0) shuffled_indices = shuffled_indices[:number_of_batches * hparams.optimization_batch_size] indices_of_batches = tf.reshape(shuffled_indices, shape=(-1, hparams.optimization_batch_size)) input_tensors = [observation, action, discounted_reward, advantage_normalized, old_pdf] ppo_step_rets = tf.scan( lambda a, i: add_lists_elementwise( # pylint: disable=g-long-lambda a, define_ppo_step([tf.gather(t, indices_of_batches[i, :]) for t in input_tensors], hparams, action_space, lr )), tf.range(number_of_batches), [0., 0., 0.], parallel_iterations=1) ppo_summaries = [tf.reduce_mean(ret) / number_of_batches for ret in ppo_step_rets] ppo_summaries.append(lr) summaries_names = [ "policy_loss", "value_loss", "entropy_loss", "learning_rate" ] summaries = [tf.summary.scalar(summary_name, summary) for summary_name, summary in zip(summaries_names, ppo_summaries)] losses_summary = tf.summary.merge(summaries) for summary_name, summary in zip(summaries_names, ppo_summaries): losses_summary = tf.Print(losses_summary, [summary], summary_name + ": ") return losses_summary
python
def restore_cmd(argv): """Try to restore a broken virtualenv by reinstalling the same python version on top of it""" if len(argv) < 1: sys.exit('You must provide a valid virtualenv to target') env = argv[0] path = workon_home / env py = path / env_bin_dir / ('python.exe' if windows else 'python') exact_py = py.resolve().name return check_call([sys.executable, "-m", "virtualenv", str(path.absolute()), "--python=%s" % exact_py])
python
def do_OP_RIGHT(vm): """ >>> s = [b'abcdef', b'\\3'] >>> do_OP_RIGHT(s, require_minimal=True) >>> print(s==[b'def']) True >>> s = [b'abcdef', b'\\0'] >>> do_OP_RIGHT(s, require_minimal=False) >>> print(s==[b'']) True """ pos = vm.pop_nonnegative() if pos > 0: vm.append(vm.pop()[-pos:]) else: vm.pop() vm.append(b'')
java
public static Document parse(File file) throws IOException, SAXException, IllegalArgumentException { return parse(file, true); }
java
@Override public ThreadContext createDefaultThreadContext(Map<String, String> execProps) { return new SecurityContextImpl(false, null); }
java
private static Class<?> getProxyClass0(ClassLoader loader, Class<?>... interfaces) { if (interfaces.length > 65535) { throw new IllegalArgumentException("interface limit exceeded"); } Class<?> proxyClass = null; /* collect interface names to use as key for proxy class cache */ String[] interfaceNames = new String[interfaces.length]; // for detecting duplicates Set<Class<?>> interfaceSet = new HashSet<>(); for (int i = 0; i < interfaces.length; i++) { /* * Verify that the class loader resolves the name of this * interface to the same Class object. */ String interfaceName = interfaces[i].getName(); Class<?> interfaceClass = null; try { interfaceClass = Class.forName(interfaceName, false, loader); } catch (ClassNotFoundException e) { } if (interfaceClass != interfaces[i]) { throw new IllegalArgumentException( interfaces[i] + " is not visible from class loader"); } /* * Verify that the Class object actually represents an * interface. */ if (!interfaceClass.isInterface()) { throw new IllegalArgumentException( interfaceClass.getName() + " is not an interface"); } /* * Verify that this interface is not a duplicate. */ if (interfaceSet.contains(interfaceClass)) { throw new IllegalArgumentException( "repeated interface: " + interfaceClass.getName()); } interfaceSet.add(interfaceClass); interfaceNames[i] = interfaceName; } /* * Using string representations of the proxy interfaces as * keys in the proxy class cache (instead of their Class * objects) is sufficient because we require the proxy * interfaces to be resolvable by name through the supplied * class loader, and it has the advantage that using a string * representation of a class makes for an implicit weak * reference to the class. */ List<String> key = Arrays.asList(interfaceNames); /* * Find or create the proxy class cache for the class loader. */ Map<List<String>, Object> cache; synchronized (loaderToCache) { cache = loaderToCache.get(loader); if (cache == null) { cache = new HashMap<>(); loaderToCache.put(loader, cache); } /* * This mapping will remain valid for the duration of this * method, without further synchronization, because the mapping * will only be removed if the class loader becomes unreachable. */ } /* * Look up the list of interfaces in the proxy class cache using * the key. This lookup will result in one of three possible * kinds of values: * null, if there is currently no proxy class for the list of * interfaces in the class loader, * the pendingGenerationMarker object, if a proxy class for the * list of interfaces is currently being generated, * or a weak reference to a Class object, if a proxy class for * the list of interfaces has already been generated. */ synchronized (cache) { /* * Note that we need not worry about reaping the cache for * entries with cleared weak references because if a proxy class * has been garbage collected, its class loader will have been * garbage collected as well, so the entire cache will be reaped * from the loaderToCache map. */ do { Object value = cache.get(key); if (value instanceof Reference) { proxyClass = (Class<?>) ((Reference<?>) value).get(); } if (proxyClass != null) { // proxy class already generated: return it return proxyClass; } else if (value == pendingGenerationMarker) { // proxy class being generated: wait for it try { cache.wait(); } catch (InterruptedException e) { /* * The class generation that we are waiting for should * take a small, bounded time, so we can safely ignore * thread interrupts here. */ } continue; } else { /* * No proxy class for this list of interfaces has been * generated or is being generated, so we will go and * generate it now. Mark it as pending generation. */ cache.put(key, pendingGenerationMarker); break; } } while (true); } try { String proxyPkg = null; // package to define proxy class in /* * Record the package of a non-public proxy interface so that the * proxy class will be defined in the same package. Verify that * all non-public proxy interfaces are in the same package. */ for (int i = 0; i < interfaces.length; i++) { int flags = interfaces[i].getModifiers(); if (!Modifier.isPublic(flags)) { String name = interfaces[i].getName(); int n = name.lastIndexOf('.'); String pkg = ((n == -1) ? "" : name.substring(0, n + 1)); if (proxyPkg == null) { proxyPkg = pkg; } else if (!pkg.equals(proxyPkg)) { throw new IllegalArgumentException( "non-public interfaces from different packages"); } } } if (proxyPkg == null) { // if no non-public proxy interfaces, use the default package. proxyPkg = ""; } { // Android-changed: Generate the proxy directly instead of calling // through to ProxyGenerator. /* J2ObjC removed. List<Method> methods = getMethods(interfaces); Collections.sort(methods, ORDER_BY_SIGNATURE_AND_SUBTYPE); validateReturnTypes(methods); List<Class<?>[]> exceptions = deduplicateAndGetExceptions(methods); Method[] methodsArray = methods.toArray(new Method[methods.size()]); Class<?>[][] exceptionsArray = exceptions.toArray(new Class<?>[exceptions.size()][]); */ /* * Choose a name for the proxy class to generate. */ final long num; synchronized (nextUniqueNumberLock) { num = nextUniqueNumber++; } String proxyName = proxyPkg + proxyClassNamePrefix + num; proxyClass = generateProxy(proxyName, interfaces, loader); } // add to set of all generated proxy classes, for isProxyClass proxyClasses.put(proxyClass, null); } finally { /* * We must clean up the "pending generation" state of the proxy * class cache entry somehow. If a proxy class was successfully * generated, store it in the cache (with a weak reference); * otherwise, remove the reserved entry. In all cases, notify * all waiters on reserved entries in this cache. */ synchronized (cache) { if (proxyClass != null) { cache.put(key, new WeakReference<Class<?>>(proxyClass)); } else { cache.remove(key); } cache.notifyAll(); } } return proxyClass; }
java
public static <T extends Number> WindowOver<T> stddevSamp(Expression<T> expr) { return new WindowOver<T>(expr.getType(), SQLOps.STDDEVSAMP, expr); }
java
public List<FileInfo> listAllFileInfoForPrefix(URI prefix) throws IOException { logger.atFine().log("listAllFileInfoForPrefixPage(%s)", prefix); StorageResourceId prefixId = getPrefixId(prefix); List<GoogleCloudStorageItemInfo> itemInfos = gcs.listObjectInfo( prefixId.getBucketName(), prefixId.getObjectName(), /* delimiter= */ null); List<FileInfo> fileInfos = FileInfo.fromItemInfos(pathCodec, itemInfos); fileInfos.sort(FILE_INFO_PATH_COMPARATOR); return fileInfos; }
java
@Order @Bean ArmeriaServerConfigurator corsConfigurator( @Value("${zipkin.query.allowed-origins:*}") String allowedOrigins) { CorsServiceBuilder corsBuilder = CorsServiceBuilder.forOrigins(allowedOrigins.split(",")) // NOTE: The property says query, and the UI does not use POST, but we allow POST? // // The reason is that our former CORS implementation accidentally allowed POST. People doing // browser-based tracing relied on this, so we can't remove it by default. In the future, we // could split the collector's CORS policy into a different property, still allowing POST // with content-type by default. .allowRequestMethods(HttpMethod.GET, HttpMethod.POST) .allowRequestHeaders(HttpHeaderNames.CONTENT_TYPE, // Use literals to avoid a runtime dependency on armeria-grpc types HttpHeaderNames.of("X-GRPC-WEB")) .exposeHeaders("grpc-status", "grpc-message", "armeria.grpc.ThrowableProto-bin"); return builder -> builder.decorator(corsBuilder::build); }
java
protected Response buildPost(final WebApplicationService service, final Map<String, String> parameters) { return DefaultResponse.getPostResponse(service.getOriginalUrl(), parameters); }
java
public <T> List<List<T>> queryTypedResults(String sql, String[] args) { return db.queryTypedResults(sql, args); }
java
@Override public <U> CollectionX<U> unitIterable(final Iterable<U> u) { return ListX.fromIterable(u); }
java
public static DataResource fromName(String name) { String[] parts = StringUtils.split(name, '/'); if (!parts[0].equals(ROOT_NAME) || parts.length > 3) throw new IllegalArgumentException(String.format("%s is not a valid data resource name", name)); if (parts.length == 1) return root(); if (parts.length == 2) return keyspace(parts[1]); return columnFamily(parts[1], parts[2]); }
python
def train( self, X_train, Y_train, n_epochs=25, lr=0.01, batch_size=256, shuffle=True, X_dev=None, Y_dev=None, print_freq=5, dev_ckpt=True, dev_ckpt_delay=0.75, b=0.5, pos_label=1, seed=1234, host_device="CPU", ): """ Generic training procedure for PyTorch model :param X_train: The training data which is a (list of Candidate objects, a sparse matrix of corresponding features) pair. :type X_train: pair :param Y_train: Array of marginal probabilities for each Candidate. :type Y_train: list or numpy.array :param n_epochs: Number of training epochs. :type n_epochs: int :param lr: Learning rate. :type lr: float :param batch_size: Batch size for learning model. :type batch_size: int :param shuffle: If True, shuffle training data every epoch. :type shuffle: bool :param X_dev: Candidates for evaluation, same format as X_train. :param Y_dev: Labels for evaluation, same format as Y_train. :param print_freq: number of epochs at which to print status, and if present, evaluate the dev set (X_dev, Y_dev). :type print_freq: int :param dev_ckpt: If True, save a checkpoint whenever highest score on (X_dev, Y_dev) reached. Note: currently only evaluates at every @print_freq epochs. :param dev_ckpt_delay: Start dev checkpointing after this portion of n_epochs. :type dev_ckpt_delay: float :param b: Decision boundary *for binary setting only*. :type b: float :param pos_label: Positive class index *for binary setting only*. Default: 1 :type pos_label: int :param seed: Random seed :type seed: int :param host_device: Host device :type host_device: str """ # Update training parameters self.settings.update( { "n_epochs": n_epochs, "lr": lr, "batch_size": batch_size, "shuffle": shuffle, "seed": 1234, "host_device": host_device, } ) # Set random seed self._set_random_seed(self.settings["seed"]) self._check_input(X_train) verbose = print_freq > 0 # Update cardinality of the model with training marginals self.cardinality = Y_train.shape[1] # Make sure marginals are in [0,1] (v.s e.g. [-1, 1]) if not np.all(Y_train.sum(axis=1) - 1 < 1e-10): raise ValueError("Y_train must be row-stochastic (rows sum to 1).") if not np.all(Y_train >= 0): raise ValueError("Y_train must have values in [0,1].") # Remove unlabeled examples diffs = Y_train.max(axis=1) - Y_train.min(axis=1) train_idxs = np.where(diffs > 1e-6)[0] self._update_settings(X_train) _X_train, _Y_train = self._preprocess_data( X_train, Y_train, idxs=train_idxs, train=True ) train_dataloader = DataLoader( MultiModalDataset(_X_train, _Y_train), batch_size=self.settings["batch_size"], collate_fn=self._collate_fn(), shuffle=self.settings["shuffle"], ) if X_dev is not None: _X_dev, _Y_dev = self._preprocess_data(X_dev, Y_dev) if self.settings["host_device"] in self._gpu: if not torch.cuda.is_available(): self.settings["host_device"] = "CPU" self.logger.info("GPU is not available, switching to CPU...") else: self.logger.info("Using GPU...") self.logger.info(f"Settings: {self.settings}") # Build network self._build_model() self._setup_model_loss(self.settings["lr"]) # Set up GPU if necessary if self.settings["host_device"] in self._gpu: nn.Module.cuda(self) # Run mini-batch SGD n = len(_X_train) if self.settings["batch_size"] > n: self.logger.info(f"Switching batch size to {n} for training.") batch_size = min(self.settings["batch_size"], n) if verbose: st = time() self.logger.info(f"[{self.name}] Training model") self.logger.info( f"[{self.name}] " f"n_train={n} " f"#epochs={self.settings['n_epochs']} " f"batch size={batch_size}" ) dev_score_opt = 0.0 for epoch in range(self.settings["n_epochs"]): iteration_losses = [] nn.Module.train(self, True) for X_batch, Y_batch in train_dataloader: # zero gradients for each batch self.optimizer.zero_grad() output = self._calc_logits(X_batch) loss = self.loss(output, Y_batch) # Compute gradient loss.backward() # Update the parameters self.optimizer.step() iteration_losses.append(self._non_cuda(loss)) # Print training stats and optionally checkpoint model if ( verbose and (epoch + 1) % print_freq == 0 ) or epoch + 1 == self.settings["n_epochs"]: # Log the training loss into tensorboard self.tensorboard_logger.add_scalar("loss", loss.item(), epoch + 1) msg = ( f"[{self.name}] " f"Epoch {epoch + 1} ({time() - st:.2f}s)\t" f"Average loss={torch.stack(iteration_losses).mean():.6f}" ) if X_dev is not None: scores = self.score(_X_dev, _Y_dev, b=b, pos_label=pos_label) score = scores["accuracy"] if self.cardinality > 2 else scores["f1"] score_label = "Acc." if self.cardinality > 2 else "F1" msg += f"\tDev {score_label}={100.0 * score:.2f}" # Log the evaulation score on dev set into tensorboard for metric in scores.keys(): self.tensorboard_logger.add_scalar( metric, scores[metric], epoch + 1 ) self.logger.info(msg) # Save checkpoint model_file = f"checkpoint_epoch_{epoch + 1}.pt" self.save(model_file=model_file, save_dir=self.settings["log_dir"]) # If best score on dev set so far and dev checkpointing is # active, save best checkpoint if ( X_dev is not None and dev_ckpt and epoch > dev_ckpt_delay * self.settings["n_epochs"] and score > dev_score_opt ): dev_score_opt = score self.logger.info( f"Saving best checkpoint " f'{self.settings["log_dir"]}/{model_file}.' ) copyfile( f'{self.settings["log_dir"]}/{model_file}', f'{self.settings["log_dir"]}/best_model.pt', ) if (X_dev is None or dev_ckpt is False) and epoch + 1 == self.settings[ "n_epochs" ]: self.logger.info( f"Saving final model as best checkpoint " f'{self.settings["log_dir"]}/{model_file}.' ) copyfile( f'{self.settings["log_dir"]}/{model_file}', f'{self.settings["log_dir"]}/best_model.pt', ) # Conclude training if verbose: self.logger.info(f"[{self.name}] Training done ({time() - st:.2f}s)") # Load the best checkpoint (i.e. best on dev set) self.logger.info("Loading best checkpoint") self.load(model_file="best_model.pt", save_dir=self.settings["log_dir"])
java
private void actionProduced() { for (final ProducerListener listener : listeners) { listener.notifyProduced(currentObject); } for (final ProducibleListener listener : current.getFeature(Producible.class).getListeners()) { listener.notifyProductionEnded(this); } currentObject = null; progress = -1; // Next production if (!productions.isEmpty()) { state = ProducerState.CHECK; } else { state = ProducerState.NONE; } }
java
private List<Bucket> lookup(Record record) { List<Bucket> buckets = new ArrayList(); for (Property p : config.getLookupProperties()) { String propname = p.getName(); Collection<String> values = record.getValues(propname); if (values == null) continue; for (String value : values) { String[] tokens = StringUtils.split(value); for (int ix = 0; ix < tokens.length; ix++) { Bucket b = store.lookupToken(propname, tokens[ix]); if (b == null || b.records == null) continue; long[] ids = b.records; if (DEBUG) System.out.println(propname + ", " + tokens[ix] + ": " + b.nextfree + " (" + b.getScore() + ")"); buckets.add(b); } } } return buckets; }
java
protected void _generate(SarlSkill skill, IExtraLanguageGeneratorContext context) { final JvmDeclaredType jvmType = getJvmModelAssociations().getInferredType(skill); final PyAppendable appendable = createAppendable(jvmType, context); List<JvmTypeReference> superTypes = getSuperTypes(skill.getExtends(), skill.getImplements()); if (superTypes.isEmpty()) { superTypes = Collections.singletonList(getTypeReferences().getTypeForName(Skill.class, skill)); } final String qualifiedName = this.qualifiedNameProvider.getFullyQualifiedName(skill).toString(); if (generateTypeDeclaration( qualifiedName, skill.getName(), skill.isAbstract(), superTypes, getTypeBuilder().getDocumentation(skill), true, skill.getMembers(), appendable, context, (it, context2) -> { generateGuardEvaluators(qualifiedName, it, context2); })) { final QualifiedName name = getQualifiedNameProvider().getFullyQualifiedName(skill); writeFile(name, appendable, context); } }
java
public void marshall(LoggingOptionsPayload loggingOptionsPayload, ProtocolMarshaller protocolMarshaller) { if (loggingOptionsPayload == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(loggingOptionsPayload.getRoleArn(), ROLEARN_BINDING); protocolMarshaller.marshall(loggingOptionsPayload.getLogLevel(), LOGLEVEL_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def describe(self): """ describes a Symbol, returns a string """ lines = [] lines.append("Symbol = {}".format(self.name)) if len(self.tags): tgs = ", ".join(x.tag for x in self.tags) lines.append(" tagged = {}".format(tgs)) if len(self.aliases): als = ", ".join(x.alias for x in self.aliases) lines.append(" aliased = {}".format(als)) if len(self.feeds): lines.append(" feeds:") for fed in self.feeds: lines.append(" {}. {}".format(fed.fnum, fed.ftype)) return "\n".join(lines)
java
public static String getText(final Node root) { if (root == null) { return ""; } else { final StringBuilder result = new StringBuilder(1024); if (root.hasChildNodes()) { final NodeList list = root.getChildNodes(); for (int i = 0; i < list.getLength(); i++) { final Node childNode = list.item(i); if (childNode.getNodeType() == Node.ELEMENT_NODE) { final Element e = (Element) childNode; final String value = e.getAttribute(ATTRIBUTE_NAME_CLASS); if (!excludeList.contains(value)) { final String s = getText(e); result.append(s); } } else if (childNode.getNodeType() == Node.TEXT_NODE) { result.append(childNode.getNodeValue()); } } } else if (root.getNodeType() == Node.TEXT_NODE) { result.append(root.getNodeValue()); } return result.toString(); } }
java
public PathImpl lookupImpl(String userPath, Map<String,Object> newAttributes, boolean isAllowRoot) { if (userPath == null) { return lookupImpl(getPath(), newAttributes, isAllowRoot); } if (! isAllowRoot) { return schemeWalk(userPath, newAttributes, userPath, 0); } String scheme = scanScheme(userPath); if (scheme == null) { return schemeWalk(userPath, newAttributes, userPath, 0); } PathImpl path; SchemeMap schemeMap = _schemeMap; // Special case to handle the windows special schemes // c:xxx -> file:/c:xxx if (isWindows()) { int length = scheme.length(); char ch; if (length == 1 && ('a' <= (ch = scheme.charAt(0)) && ch <= 'z' || 'A' <= ch && ch <= 'Z')) { userPath = Character.toLowerCase(ch) + userPath.substring(1); if (_isTestWindows) return schemeWalk(userPath, newAttributes, "/" + userPath, 0); path = schemeMap.get("file"); if (path != null) return path.schemeWalk(userPath, newAttributes, "/" + userPath, 0); else return schemeWalk(userPath, newAttributes, "/" + userPath, 0); } } path = schemeMap.get(scheme); // assume the foo:bar is a subfile if (path == null) { return schemeWalk(userPath, newAttributes, userPath, 0); } else { return path.schemeWalk(userPath, newAttributes, userPath, scheme.length() + 1); } }
java
protected void addOccupantInfo (BodyObject body, OccupantInfo info) { // clone the canonical copy and insert it into the DSet _plobj.addToOccupantInfo(info); // add the body oid to our place object's occupant list _plobj.addToOccupants(body.getOid()); }
java
public Set<T> outEdges(int vertex) { // REMINDER: this is probably best wrapped with yet another // decorator class to avoid the O(n) penality of iteration over all // the edges Set<T> edges = getAdjacencyList(vertex); if (edges.isEmpty()) return Collections.<T>emptySet(); Set<T> out = new HashSet<T>(); for (T e : edges) { if (e.from() == vertex) out.add(e); } return out; }
python
def get_ref_favorites(self, project, repository_id=None, identity_id=None): """GetRefFavorites. [Preview API] Gets the refs favorites for a repo and an identity. :param str project: Project ID or project name :param str repository_id: The id of the repository. :param str identity_id: The id of the identity whose favorites are to be retrieved. If null, the requesting identity is used. :rtype: [GitRefFavorite] """ route_values = {} if project is not None: route_values['project'] = self._serialize.url('project', project, 'str') query_parameters = {} if repository_id is not None: query_parameters['repositoryId'] = self._serialize.query('repository_id', repository_id, 'str') if identity_id is not None: query_parameters['identityId'] = self._serialize.query('identity_id', identity_id, 'str') response = self._send(http_method='GET', location_id='876f70af-5792-485a-a1c7-d0a7b2f42bbb', version='5.1-preview.1', route_values=route_values, query_parameters=query_parameters) return self._deserialize('[GitRefFavorite]', self._unwrap_collection(response))
python
def html_table_from_query(rows: Iterable[Iterable[Optional[str]]], descriptions: Iterable[Optional[str]]) -> str: """ Converts rows from an SQL query result to an HTML table. Suitable for processing output from the defunct function ``rnc_db.fetchall_with_fieldnames(sql)``. """ html = u"<table>\n" # Header row html += u"<tr>" for x in descriptions: if x is None: x = u"" html += u"<th>{}</th>".format(webify(x)) html += u"</tr>\n" # Data rows for row in rows: html += u"<tr>" for x in row: if x is None: x = u"" html += u"<td>{}</td>".format(webify(x)) html += u"<tr>\n" html += u"</table>\n" return html
java
private static Map<State, StateStrategy> createStrategyMap() { final Map<State, StateStrategy> map = new EnumMap<>(State.class); map.put(State.CLOSED, new StateStrategyClosed()); map.put(State.OPEN, new StateStrategyOpen()); return map; }
python
def preferred_format(incomplete_format, preferred_formats): """Return the preferred format for the given extension""" incomplete_format = long_form_one_format(incomplete_format) if 'format_name' in incomplete_format: return incomplete_format for fmt in long_form_multiple_formats(preferred_formats): if ((incomplete_format['extension'] == fmt['extension'] or ( fmt['extension'] == '.auto' and incomplete_format['extension'] not in ['.md', '.Rmd', '.ipynb'])) and incomplete_format.get('suffix') == fmt.get('suffix', incomplete_format.get('suffix')) and incomplete_format.get('prefix') == fmt.get('prefix', incomplete_format.get('prefix'))): fmt.update(incomplete_format) return fmt return incomplete_format
python
def get_string_offset(edge): """return the offset (int) of a string""" onset_label = edge.find('labels[@name="SEND"]') onset_str = onset_label.xpath('@valueString')[0] return int(onset_str)
java
public void setSecurityPolicy(com.google.api.ads.admanager.axis.v201805.SecurityPolicySettings securityPolicy) { this.securityPolicy = securityPolicy; }
java
private void _RemoveValidator( Row item, int column ) { // already clean ? if( m_currentEditedItem == null && m_currentEditedColumn < 0 ) return; // touch the table if( m_callback != null ) m_callback.onTouchCellContent( item, column ); if( m_currentEditor != null ) m_currentEditor.removeFromParent(); m_currentEditor = null; m_currentEditedItem = null; m_currentEditedColumn = -1; }
java
@Nullable public static String getFromFirstExcl (@Nullable final String sStr, @Nullable final String sSearch) { return _getFromFirst (sStr, sSearch, false); }
java
public static boolean isEmpty(File file) { if (null == file) { return true; } if (file.isDirectory()) { String[] subFiles = file.list(); if (ArrayUtil.isEmpty(subFiles)) { return true; } } else if (file.isFile()) { return file.length() <= 0; } return false; }
python
def conditional_probability_alive_matrix(self, max_frequency=None, max_recency=None): """ Compute the probability alive matrix. Parameters ---------- max_frequency: float, optional the maximum frequency to plot. Default is max observed frequency. max_recency: float, optional the maximum recency to plot. This also determines the age of the customer. Default to max observed age. Returns ------- matrix: A matrix of the form [t_x: historical recency, x: historical frequency] """ max_frequency = max_frequency or int(self.data["frequency"].max()) max_recency = max_recency or int(self.data["T"].max()) return np.fromfunction( self.conditional_probability_alive, (max_frequency + 1, max_recency + 1), T=max_recency ).T
python
def on_IOError(self, e): """ Handle an IOError exception. """ sys.stderr.write("Error: %s: \"%s\"\n" % (e.strerror, e.filename))
java
public void addSummaryLinkComment(AbstractMemberWriter mw, ProgramElementDoc member, Tag[] firstSentenceTags, Content tdSummary) { addIndexComment(member, firstSentenceTags, tdSummary); }
java
public RestRequestInformation asRestRequestInformation() { try { return new RestRequestInformationImpl( api, new URL(endpoint.getFullUrl(urlParameters)), queryParameters, headers, body); } catch (MalformedURLException e) { throw new AssertionError(e); } }
python
def wherein(self, fieldname, collection, negate=False): """ .wherein(fieldname, collection, negate=False) Returns a new DataTable with rows only where the value at `fieldname` is contained within `collection`. """ if negate: return self.mask([elem not in collection for elem in self[fieldname]]) else: return self.mask([elem in collection for elem in self[fieldname]])
python
def _interpolate(self, kind='linear'): """Apply scipy.interpolate.interp1d along resampling dimension.""" # drop any existing non-dimension coordinates along the resampling # dimension dummy = self._obj.copy() for k, v in self._obj.coords.items(): if k != self._dim and self._dim in v.dims: dummy = dummy.drop(k) return dummy.interp(assume_sorted=True, method=kind, kwargs={'bounds_error': False}, **{self._dim: self._full_index})
python
def run(command, options, args): """Run the requested command. args is either a list of descriptions or a list of strings to filter by""" if command == "backend": subprocess.call(("sqlite3", db_path)) if command == "add": dp = pdt.Calendar() due = mktime(dp.parse(options.due)[0]) if options.due else None print "added tasks..." [Task(desc, due).add() for desc in args] return filters = args if len(args) else None rows = Query(filters, options).find() tasks = [Task(r["desc"], r["due"]) for r in rows] if command == "list": for t in tasks: print "\t *", t if command == "done": print "done with..." finished_tasks = [] for t in tasks: finished = t.done() if finished: finished_tasks.append(t) if not finished_tasks: return print "" print "finished tasks:" for t in finished_tasks: print "\t X", t if command == "remove": print "remove..." removed_tasks = [] for t in tasks: removed = t.remove() if removed: removed_tasks.append(t) if not removed_tasks: return print "" print "removed tasks:" for t in removed_tasks: print "\t RM", t
java
private void assignInstancesToContainers(PackingPlanBuilder planBuilder, Map<String, Integer> parallelismMap, PolicyType policyType) throws ConstraintViolationException { for (String componentName : parallelismMap.keySet()) { int numInstance = parallelismMap.get(componentName); for (int i = 0; i < numInstance; ++i) { policyType.assignInstance(planBuilder, componentName, this); } } }
python
def load_x509_cert(url, httpc, spec2key, **get_args): """ Get and transform a X509 cert into a key. :param url: Where the X509 cert can be found :param httpc: HTTP client to use for fetching :param spec2key: A dictionary over keys already seen :param get_args: Extra key word arguments to the HTTP GET request :return: List of 2-tuples (keytype, key) """ try: r = httpc('GET', url, allow_redirects=True, **get_args) if r.status_code == 200: cert = str(r.text) try: public_key = spec2key[cert] # If I've already seen it except KeyError: public_key = import_rsa_key(cert) spec2key[cert] = public_key if isinstance(public_key, rsa.RSAPublicKey): return {"rsa": public_key} else: raise Exception("HTTP Get error: %s" % r.status_code) except Exception as err: # not a RSA key logger.warning("Can't load key: %s" % err) return []
java
public String mailbox(ImapRequestLineReader request) throws ProtocolException { String mailbox = astring(request); if (mailbox.equalsIgnoreCase(ImapConstants.INBOX_NAME)) { return ImapConstants.INBOX_NAME; } else { return BASE64MailboxDecoder.decode(mailbox); } }
python
def Append(self, item): """Add an item to the list. Args: item (object): item. """ if self._index >= self._size: self._index = self._index % self._size try: self._list[self._index] = item except IndexError: self._list.append(item) self._index += 1
java
public com.google.api.ads.admanager.axis.v201808.ProgressStep[] getSteps() { return steps; }
python
def set_window_geometry(geometry): """Set window geometry. Parameters ========== geometry : tuple (4 integers) or None x, y, dx, dy values employed to set the Qt backend geometry. """ if geometry is not None: x_geom, y_geom, dx_geom, dy_geom = geometry mngr = plt.get_current_fig_manager() if 'window' in dir(mngr): try: mngr.window.setGeometry(x_geom, y_geom, dx_geom, dy_geom) except AttributeError: pass else: pass
java
public ActivityImpl parseSubProcess(Element subProcessElement, ScopeImpl scope) { ActivityImpl subProcessActivity = createActivityOnScope(subProcessElement, scope); subProcessActivity.setSubProcessScope(true); parseAsynchronousContinuationForActivity(subProcessElement, subProcessActivity); Boolean isTriggeredByEvent = parseBooleanAttribute(subProcessElement.attribute("triggeredByEvent"), false); subProcessActivity.getProperties().set(BpmnProperties.TRIGGERED_BY_EVENT, isTriggeredByEvent); subProcessActivity.setProperty(PROPERTYNAME_CONSUMES_COMPENSATION, !isTriggeredByEvent); subProcessActivity.setScope(true); if (isTriggeredByEvent) { subProcessActivity.setActivityBehavior(new EventSubProcessActivityBehavior()); subProcessActivity.setEventScope(scope); } else { subProcessActivity.setActivityBehavior(new SubProcessActivityBehavior()); } parseScope(subProcessElement, subProcessActivity); for (BpmnParseListener parseListener : parseListeners) { parseListener.parseSubProcess(subProcessElement, scope, subProcessActivity); } return subProcessActivity; }
python
def _set_pim(self, v, load=False): """ Setter method for pim, mapped from YANG variable /rbridge_id/router/hide_pim_holder/pim (container) If this variable is read-only (config: false) in the source YANG file, then _set_pim is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_pim() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=pim.pim, is_container='container', presence=True, yang_name="pim", rest_name="pim", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-add-mode': None, u'cli-full-command': None, u'callpoint': u'PimRtrCfgCallpoint', u'info': u'Enable PIM (Protocol Independent Multicast)', u'cli-mode-name': u'config-pim-router'}}, namespace='urn:brocade.com:mgmt:brocade-pim', defining_module='brocade-pim', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """pim must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=pim.pim, is_container='container', presence=True, yang_name="pim", rest_name="pim", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-add-mode': None, u'cli-full-command': None, u'callpoint': u'PimRtrCfgCallpoint', u'info': u'Enable PIM (Protocol Independent Multicast)', u'cli-mode-name': u'config-pim-router'}}, namespace='urn:brocade.com:mgmt:brocade-pim', defining_module='brocade-pim', yang_type='container', is_config=True)""", }) self.__pim = t if hasattr(self, '_set'): self._set()
java
public void setMaxNumberOfThreadsPerTask(Integer maxNumberOfThreadsPerTask) { if(maxNumberOfThreadsPerTask<0) { throw new IllegalArgumentException("The max number of threads can not be negative."); } else if(maxNumberOfThreadsPerTask==0) { this.maxNumberOfThreadsPerTask = AVAILABLE_PROCESSORS; } else { this.maxNumberOfThreadsPerTask = Math.min(maxNumberOfThreadsPerTask, 4*AVAILABLE_PROCESSORS); } }
python
def block_signals(self): """Prevent the combos and dock listening for event changes.""" self.disconnect_layer_listener() self.aggregation_layer_combo.blockSignals(True) self.exposure_layer_combo.blockSignals(True) self.hazard_layer_combo.blockSignals(True)
java
public R fetchQuery(String properties) { ((TQRootBean) _root).query().fetchQuery(_name, properties); return _root; }
python
def render_and_write(template_dir, path, context): """Renders the specified template into the file. :param template_dir: the directory to load the template from :param path: the path to write the templated contents to :param context: the parameters to pass to the rendering engine """ env = Environment(loader=FileSystemLoader(template_dir)) template_file = os.path.basename(path) template = env.get_template(template_file) log('Rendering from template: %s' % template.name, level=DEBUG) rendered_content = template.render(context) if not rendered_content: log("Render returned None - skipping '%s'" % path, level=WARNING) return write(path, rendered_content.encode('utf-8').strip()) log('Wrote template %s' % path, level=DEBUG)
python
def _set_add_options(cls, checked_codes, options): """Set `checked_codes` by the `add_ignore` or `add_select` options.""" checked_codes |= cls._expand_error_codes(options.add_select) checked_codes -= cls._expand_error_codes(options.add_ignore)
python
def longest_lines(shape): """ Creates lines from shape :param shape: :return: list of dictionaries with col,row,len fields """ lines = [] for level in set(shape): count = 0 for i in range(len(shape)): if shape[i] <= level: count += 1 elif count: lines.append({'row': level, 'col': i - count + 1, 'len': count}) count = 0 if count: lines.append({'row': level, 'col': i - count + 2, 'len': count}) return sorted(lines, key=lambda l: l['row'])
java
public Collection<Tag> getListUser(String userId) throws FlickrException { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("method", METHOD_GET_LIST_USER); parameters.put("user_id", userId); Response response = transportAPI.get(transportAPI.getPath(), parameters, apiKey, sharedSecret); if (response.isError()) { throw new FlickrException(response.getErrorCode(), response.getErrorMessage()); } Element whoElement = response.getPayload(); List<Tag> tags = new ArrayList<Tag>(); Element tagsElement = (Element) whoElement.getElementsByTagName("tags").item(0); NodeList tagElements = tagsElement.getElementsByTagName("tag"); for (int i = 0; i < tagElements.getLength(); i++) { Element tagElement = (Element) tagElements.item(i); Tag tag = new Tag(); tag.setValue(((Text) tagElement.getFirstChild()).getData()); tags.add(tag); } return tags; }
java
public Set<HttpMethod> allAllowedMethods() { if (anyMethodRouter.size() > 0) { Set<HttpMethod> ret = new HashSet<HttpMethod>(9); ret.add(HttpMethod.CONNECT); ret.add(HttpMethod.DELETE); ret.add(HttpMethod.GET); ret.add(HttpMethod.HEAD); ret.add(HttpMethod.OPTIONS); ret.add(HttpMethod.PATCH); ret.add(HttpMethod.POST); ret.add(HttpMethod.PUT); ret.add(HttpMethod.TRACE); return ret; } else { return new HashSet<HttpMethod>(routers.keySet()); } }
java
static void splitHeaders(ClassList classes) { Set<String> ctVersions = new HashSet<>(); for (ClassDescription cd : classes) { for (ClassHeaderDescription header : cd.header) { for (char c : header.versions.toCharArray()) { ctVersions.add("" + c); } } } classes.sort(); for (ClassDescription cd : classes) { Map<String, String> outerSignatures2Version = new HashMap<>(); for (String version : ctVersions) { //XXX ClassDescription outer = cd; String outerSignatures = ""; while ((outer = classes.enclosingClass(outer)) != null) { for (ClassHeaderDescription outerHeader : outer.header) { if (outerHeader.versions.contains(version)) { outerSignatures += outerHeader.signature; } } } outerSignatures2Version.compute(outerSignatures, (key, value) -> value != null ? value + version : version); } List<ClassHeaderDescription> newHeaders = new ArrayList<>(); HEADER_LOOP: for (ClassHeaderDescription header : cd.header) { for (String versions : outerSignatures2Version.values()) { if (containsAll(versions, header.versions)) { newHeaders.add(header); continue HEADER_LOOP; } if (disjoint(versions, header.versions)) { continue; } ClassHeaderDescription newHeader = new ClassHeaderDescription(); newHeader.classAnnotations = header.classAnnotations; newHeader.deprecated = header.deprecated; newHeader.extendsAttr = header.extendsAttr; newHeader.flags = header.flags; newHeader.implementsAttr = header.implementsAttr; newHeader.innerClasses = header.innerClasses; newHeader.runtimeAnnotations = header.runtimeAnnotations; newHeader.signature = header.signature; newHeader.versions = reduce(versions, header.versions); newHeaders.add(newHeader); } } cd.header = newHeaders; } }
python
def frange(x, y, jump=1): """ range for floats """ precision = get_sig_digits(jump) while x < y: yield round(x, precision) x += jump
java
public void jump(int index) { if (index >= byteBuffer.capacity()) { throw new IllegalArgumentException("cannot jump past " + "end of input"); } byteBuffer.position(index); byteBuffer.limit(byteBuffer.capacity()); }
java
private void serverHello(ServerHello mesg) throws IOException { serverKeyExchangeReceived = false; if (debug != null && Debug.isOn("handshake")) { mesg.print(System.out); } // check if the server selected protocol version is OK for us ProtocolVersion mesgVersion = mesg.protocolVersion; if (!isNegotiable(mesgVersion)) { throw new SSLHandshakeException( "Server chose " + mesgVersion + ", but that protocol version is not enabled or not supported " + "by the client."); } handshakeHash.protocolDetermined(mesgVersion); // Set protocolVersion and propagate to SSLSocket and the // Handshake streams setVersion(mesgVersion); // check the "renegotiation_info" extension RenegotiationInfoExtension serverHelloRI = (RenegotiationInfoExtension) mesg.extensions.get(ExtensionType.EXT_RENEGOTIATION_INFO); if (serverHelloRI != null) { if (isInitialHandshake) { // verify the length of the "renegotiated_connection" field if (!serverHelloRI.isEmpty()) { // abort the handshake with a fatal handshake_failure alert fatalSE(Alerts.alert_handshake_failure, "The renegotiation_info field is not empty"); } secureRenegotiation = true; } else { // For a legacy renegotiation, the client MUST verify that // it does not contain the "renegotiation_info" extension. if (!secureRenegotiation) { fatalSE(Alerts.alert_handshake_failure, "Unexpected renegotiation indication extension"); } // verify the client_verify_data and server_verify_data values byte[] verifyData = new byte[clientVerifyData.length + serverVerifyData.length]; System.arraycopy(clientVerifyData, 0, verifyData, 0, clientVerifyData.length); System.arraycopy(serverVerifyData, 0, verifyData, clientVerifyData.length, serverVerifyData.length); if (!Arrays.equals(verifyData, serverHelloRI.getRenegotiatedConnection())) { fatalSE(Alerts.alert_handshake_failure, "Incorrect verify data in ServerHello " + "renegotiation_info message"); } } } else { // no renegotiation indication extension if (isInitialHandshake) { if (!allowLegacyHelloMessages) { // abort the handshake with a fatal handshake_failure alert fatalSE(Alerts.alert_handshake_failure, "Failed to negotiate the use of secure renegotiation"); } secureRenegotiation = false; if (debug != null && Debug.isOn("handshake")) { System.out.println("Warning: No renegotiation " + "indication extension in ServerHello"); } } else { // For a secure renegotiation, the client must abort the // handshake if no "renegotiation_info" extension is present. if (secureRenegotiation) { fatalSE(Alerts.alert_handshake_failure, "No renegotiation indication extension"); } // we have already allowed unsafe renegotation before request // the renegotiation. } } // // Save server nonce, we always use it to compute connection // keys and it's also used to create the master secret if we're // creating a new session (i.e. in the full handshake). // svr_random = mesg.svr_random; if (isNegotiable(mesg.cipherSuite) == false) { fatalSE(Alerts.alert_illegal_parameter, "Server selected improper ciphersuite " + mesg.cipherSuite); } setCipherSuite(mesg.cipherSuite); if (protocolVersion.v >= ProtocolVersion.TLS12.v) { handshakeHash.setFinishedAlg(cipherSuite.prfAlg.getPRFHashAlg()); } if (mesg.compression_method != 0) { fatalSE(Alerts.alert_illegal_parameter, "compression type not supported, " + mesg.compression_method); // NOTREACHED } // so far so good, let's look at the session if (session != null) { // we tried to resume, let's see what the server decided if (session.getSessionId().equals(mesg.sessionId)) { // server resumed the session, let's make sure everything // checks out // Verify that the session ciphers are unchanged. CipherSuite sessionSuite = session.getSuite(); if (cipherSuite != sessionSuite) { throw new SSLProtocolException ("Server returned wrong cipher suite for session"); } // verify protocol version match ProtocolVersion sessionVersion = session.getProtocolVersion(); if (protocolVersion != sessionVersion) { throw new SSLProtocolException ("Server resumed session with wrong protocol version"); } // validate subject identity if (sessionSuite.keyExchange == K_KRB5 || sessionSuite.keyExchange == K_KRB5_EXPORT) { Principal localPrincipal = session.getLocalPrincipal(); Subject subject = null; try { subject = AccessController.doPrivileged( new PrivilegedExceptionAction<Subject>() { public Subject run() throws Exception { return Krb5Helper.getClientSubject(getAccSE()); }}); } catch (PrivilegedActionException e) { subject = null; if (debug != null && Debug.isOn("session")) { System.out.println("Attempt to obtain" + " subject failed!"); } } if (subject != null) { // Eliminate dependency on KerberosPrincipal Set<Principal> principals = subject.getPrincipals(Principal.class); if (!principals.contains(localPrincipal)) { throw new SSLProtocolException("Server resumed" + " session with wrong subject identity"); } else { if (debug != null && Debug.isOn("session")) System.out.println("Subject identity is same"); } } else { if (debug != null && Debug.isOn("session")) System.out.println("Kerberos credentials are not" + " present in the current Subject; check if " + " javax.security.auth.useSubjectAsCreds" + " system property has been set to false"); throw new SSLProtocolException ("Server resumed session with no subject"); } } // looks fine; resume it, and update the state machine. resumingSession = true; state = HandshakeMessage.ht_finished - 1; calculateConnectionKeys(session.getMasterSecret()); if (debug != null && Debug.isOn("session")) { System.out.println("%% Server resumed " + session); } } else { // we wanted to resume, but the server refused session = null; if (!enableNewSession) { throw new SSLException("New session creation is disabled"); } } } if (resumingSession && session != null) { if (protocolVersion.v >= ProtocolVersion.TLS12.v) { handshakeHash.setCertificateVerifyAlg(null); } setHandshakeSessionSE(session); // Reserve the handshake state if this is a session-resumption // abbreviated initial handshake. if (isInitialHandshake) { session.setAsSessionResumption(true); // NPN_CHANGES_BEGIN npnReceived(mesg); // NPN_CHANGES_END } return; } // check extensions for (HelloExtension ext : mesg.extensions.list()) { ExtensionType type = ext.type; if ((type != ExtensionType.EXT_ELLIPTIC_CURVES) && (type != ExtensionType.EXT_EC_POINT_FORMATS) && (type != ExtensionType.EXT_SERVER_NAME) // NPN_CHANGES_BEGIN && (type != ExtensionType.EXT_NEXT_PROTOCOL_NEGOTIATION) // NPN_CHANGES_END && (type != ExtensionType.EXT_RENEGOTIATION_INFO)) { fatalSE(Alerts.alert_unsupported_extension, "Server sent an unsupported extension: " + type); } } // Create a new session, we need to do the full handshake session = new SSLSessionImpl(protocolVersion, cipherSuite, getLocalSupportedSignAlgs(), mesg.sessionId, getHostSE(), getPortSE()); setHandshakeSessionSE(session); if (debug != null && Debug.isOn("handshake")) { System.out.println("** " + cipherSuite); } // NPN_CHANGES_BEGIN if (isInitialHandshake) npnReceived(mesg); // NPN_CHANGES_END }
java
@Override public Request<DescribePlacementGroupsRequest> getDryRunRequest() { Request<DescribePlacementGroupsRequest> request = new DescribePlacementGroupsRequestMarshaller().marshall(this); request.addParameter("DryRun", Boolean.toString(true)); return request; }
python
def set_world(self, grd, start_y_x, y_x): """ tell the agent to move to location y,x Why is there another grd object in the agent? Because this is NOT the main grid, rather a copy for the agent to overwrite with planning routes, etc. The real grid is initialised in World.__init__() class """ self.grd = grd self.start_y = start_y_x[0] self.start_x = start_y_x[1] self.current_y = start_y_x[0] self.current_x = start_y_x[1] self.target_y = y_x[0] self.target_x = y_x[1] self.backtrack = [0,0] # set only if blocked and agent needs to go back self.prefer_x = 0 # set only if backtracked as preferred direction x self.prefer_y = 0
java
public Class parseClass(File file) throws CompilationFailedException, IOException { return parseClass(new GroovyCodeSource(file, config.getSourceEncoding())); }
python
def compress(self): """Main function of compression.""" for ast_token in self.ast_tokens: if type(ast_token) in self.dispatcher: # pylint: disable=unidiomatic-typecheck self.dispatcher[type(ast_token)](ast_token) else: self.dispatcher['default'](ast_token)
python
def find_source_files_from_list(self, file_names): """ Finds all source files that actually exists from a list of file names. :param list[str] file_names: The list of file names. """ for file_name in file_names: if os.path.exists(file_name): routine_name = os.path.splitext(os.path.basename(file_name))[0] if routine_name not in self._source_file_names: self._source_file_names[routine_name] = file_name else: self._io.error("Files '{0}' and '{1}' have the same basename.". format(self._source_file_names[routine_name], file_name)) self.error_file_names.add(file_name) else: self._io.error("File not exists: '{0}'".format(file_name)) self.error_file_names.add(file_name)
python
def call_fn_name(token): """Customize CALL_FUNCTION to add the number of positional arguments""" if token.attr is not None: return '%s_%i' % (token.kind, token.attr) else: return '%s_0' % (token.kind)
python
def complete(self, table_name, key): """ Log a completed job. When a job is completed, its reservation entry is deleted. :param table_name: `database`.`table_name` :param key: the dict of the job's primary key """ job_key = dict(table_name=table_name, key_hash=key_hash(key)) (self & job_key).delete_quick()
java
@Override protected void onLayout(boolean changed, int l, int t, int r, int b) { super.onLayout(changed, l, t, r, b); if(getOrientation() == HORIZONTAL) { int N = getChildCount(); for (int i = 0; i < N; i++) { View child = getChildAt(i); int width = child.getWidth(); if (width > MAX_BUTTON_WIDTH || (N>=3 && width > MIN_BUTTON_WIDTH)) { // Clear out the children list in preparation for new manipulation children.clear(); // Update the children's params for (int j = 0; j < N; j++) { RippleView chd = (RippleView) getChildAt(j); Button btn = (Button) chd.getChildAt(0); btn.setGravity(Gravity.END|Gravity.CENTER_VERTICAL); children.add(chd); } // Clear out the chitlens removeAllViews(); // Sort buttons properly Collections.sort(children, mButtonComparator); // Re-Add all the views for(int j=0; j<children.size(); j++){ View chd = children.get(j); addView(chd); } // Switch orientation setOrientation(VERTICAL); requestLayout(); return; } } } }
python
def set_auto_discard_for_device(self, name, controller_port, device, discard): """Sets a flag in the device information which indicates that the medium supports discarding unused blocks (called trimming for SATA or unmap for SCSI devices) .This may or may not be supported by a particular drive, and is silently ignored in the latter case. At the moment only hard disks (which is a misnomer in this context) accept this setting. Changing the setting while the VM is running is forbidden. The device must already exist; see :py:func:`IMachine.attach_device` for how to attach a new device. The @a controllerPort and @a device parameters specify the device slot and have have the same meaning as with :py:func:`IMachine.attach_device` . in name of type str Name of the storage controller. in controller_port of type int Storage controller port. in device of type int Device slot in the given port. in discard of type bool New value for the discard device flag. raises :class:`OleErrorInvalidarg` SATA device, SATA port, SCSI port out of range. raises :class:`VBoxErrorInvalidObjectState` Attempt to modify an unregistered virtual machine. raises :class:`VBoxErrorInvalidVmState` Invalid machine state. """ if not isinstance(name, basestring): raise TypeError("name can only be an instance of type basestring") if not isinstance(controller_port, baseinteger): raise TypeError("controller_port can only be an instance of type baseinteger") if not isinstance(device, baseinteger): raise TypeError("device can only be an instance of type baseinteger") if not isinstance(discard, bool): raise TypeError("discard can only be an instance of type bool") self._call("setAutoDiscardForDevice", in_p=[name, controller_port, device, discard])
java
public INDArray loadSingleSentence(String sentence) { List<String> tokens = tokenizeSentence(sentence); if(format == Format.CNN1D || format == Format.RNN){ int[] featuresShape = new int[] {1, wordVectorSize, Math.min(maxSentenceLength, tokens.size())}; INDArray features = Nd4j.create(featuresShape, (format == Format.CNN1D ? 'c' : 'f')); INDArrayIndex[] indices = new INDArrayIndex[3]; indices[0] = NDArrayIndex.point(0); for (int i = 0; i < featuresShape[2]; i++) { INDArray vector = getVector(tokens.get(i)); indices[1] = NDArrayIndex.all(); indices[2] = NDArrayIndex.point(i); features.put(indices, vector); } return features; } else { int[] featuresShape = new int[] {1, 1, 0, 0}; if (sentencesAlongHeight) { featuresShape[2] = Math.min(maxSentenceLength, tokens.size()); featuresShape[3] = wordVectorSize; } else { featuresShape[2] = wordVectorSize; featuresShape[3] = Math.min(maxSentenceLength, tokens.size()); } INDArray features = Nd4j.create(featuresShape); int length = (sentencesAlongHeight ? featuresShape[2] : featuresShape[3]); INDArrayIndex[] indices = new INDArrayIndex[4]; indices[0] = NDArrayIndex.point(0); indices[1] = NDArrayIndex.point(0); for (int i = 0; i < length; i++) { INDArray vector = getVector(tokens.get(i)); if (sentencesAlongHeight) { indices[2] = NDArrayIndex.point(i); indices[3] = NDArrayIndex.all(); } else { indices[2] = NDArrayIndex.all(); indices[3] = NDArrayIndex.point(i); } features.put(indices, vector); } return features; } }
java
@Nonnull public Process exec() throws IOException { List<String> commandWords = toCommandWords(); ProcessBuilder builder = new ProcessBuilder(commandWords); // TODO: Use Redirect to send the I/O somewhere useful. QEmuCommandLineUtils.redirectIO(builder); return builder.start(); }
python
def main(): """Main ShutIt function. Handles the configured actions: - skeleton - create skeleton module - list_configs - output computed configuration - depgraph - output digraph of module dependencies """ # Create base shutit object. shutit = shutit_global.shutit_global_object.shutit_objects[0] if sys.version_info[0] == 2: if sys.version_info[1] < 7: shutit.fail('Python version must be 2.7+') # pragma: no cover try: shutit.setup_shutit_obj() except KeyboardInterrupt: shutit_util.print_debug(sys.exc_info()) shutit_global.shutit_global_object.shutit_print('Keyboard interrupt caught, exiting with status 1') sys.exit(1)
python
def shared_otuids(groups): """ Get shared OTUIDs between all unique combinations of groups. :type groups: Dict :param groups: {Category name: OTUIDs in category} :return type: dict :return: Dict keyed on group combination and their shared OTUIDs as values. """ for g in sorted(groups): print("Number of OTUs in {0}: {1}".format(g, len(groups[g].results["otuids"]))) number_of_categories = len(groups) shared = defaultdict() for i in range(2, number_of_categories+1): for j in combinations(sorted(groups), i): combo_name = " & ".join(list(j)) for grp in j: # initialize combo values shared[combo_name] = groups[j[0]].results["otuids"].copy() """iterate through all groups and keep updating combo OTUIDs with set intersection_update""" for grp in j[1:]: shared[combo_name].intersection_update(groups[grp].results["otuids"]) return shared
python
def sectionsPDF(self,walkTrace=tuple(),case=None,element=None,doc=None): """Prepares section for PDF output. """ import pylatex as pl if case == 'sectionmain': if self.settings['clearpage']: doc.append(pl.utils.NoEscape(r'\clearpage')) with doc.create(pl.Section(self.title) if len(walkTrace) == 1 else pl.Subsection(self.title) if len(walkTrace) == 2 else pl.Subsubsection(self.title)): text = (self.p.replace('\n',' ').replace('//','\n') if self.settings['doubleslashnewline'] else renewliner(self.p)) if r'\ref' not in text: doc.append(text) else: figrefs = re.compile(r'\\ref\{figref\d+\}') #latexcode = re.compile(r'&@\\.+') lastpos = 0 for fr in figrefs.finditer(text): doc.append(text[lastpos:fr.start()]) doc.append(pl.utils.NoEscape(text[fr.start():fr.end()])) lastpos = fr.end() doc.append(text[lastpos:]) if case == 'figure': width = r'1\textwidth' figtitle,fig = element #if fig._suptitle: fig.suptitle('Figure {}: {}'.format(fig.number,fig._suptitle.get_text())) #figtitle = fig._suptitle.get_text() if fig._suptitle else '' #fig.suptitle('') with doc.create(pl.Figure(position='htbp')) as plot: plt.figure(fig.number) plot.add_plot(width=pl.NoEscape(width)) plot.add_caption(figtitle) plot.append(pl.utils.NoEscape(r'\label{figref'+str(fig.number)+r'}')) #fig.suptitle(figtitle if figtitle else None) if case == 'table': caption,t = element t = pdSeriesToFrame(t) if type(t) == pd.Series else t if self.settings['tablehead']: t = t.head(self.settings['tablehead']) if self.settings['tablecolumns']: t = t[self.settings['tablecolumns']] with doc.create(pl.Table(position='ht')) as tablenv: tablenv.add_caption(caption) with doc.create(pl.Tabular('r|'+'l'*len(t.columns))) as table: table.add_hline() table.add_row(('',)+tuple(t.columns)) for row in t.to_records(): table.add_row(row) table.add_hline(1)
python
def _validate_lattice_vectors(self, lattice_vectors): """Ensure that the lattice_vectors are reasonable inputs. """ dataType = np.float64 if lattice_vectors is None: lattice_vectors = np.identity(self.dimension, dtype=dataType) else: lattice_vectors = np.asarray(lattice_vectors, dtype=dataType) if (self.dimension, self.dimension) != np.shape(lattice_vectors): raise ValueError('Dimensionality of lattice_vectors is ' ' of shape {} not {}.' .format(np.shape(lattice_vectors), (self.dimension, self.dimension))) det = np.linalg.det(lattice_vectors) if abs(det) == 0.0: raise ValueError('Co-linear vectors: {}' 'have a determinant of 0.0. Does not ' 'define a unit cell.' .format(lattice_vectors)) if det <= 0.0: raise ValueError('Negative Determinant: the determinant ' 'of {} is negative, indicating a left-' 'handed system.' .format(det)) self.lattice_vectors = lattice_vectors
python
def parse_union_type_extension(lexer: Lexer) -> UnionTypeExtensionNode: """UnionTypeExtension""" start = lexer.token expect_keyword(lexer, "extend") expect_keyword(lexer, "union") name = parse_name(lexer) directives = parse_directives(lexer, True) types = parse_union_member_types(lexer) if not (directives or types): raise unexpected(lexer) return UnionTypeExtensionNode( name=name, directives=directives, types=types, loc=loc(lexer, start) )
python
def to_array(self): """ Serializes this GameMessage to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(GameMessage, self).to_array() array['game_short_name'] = u(self.game_short_name) # py2: type unicode, py3: type str if self.receiver is not None: if isinstance(self.receiver, None): array['chat_id'] = None(self.receiver) # type Noneelif isinstance(self.receiver, str): array['chat_id'] = u(self.receiver) # py2: type unicode, py3: type str elif isinstance(self.receiver, int): array['chat_id'] = int(self.receiver) # type intelse: raise TypeError('Unknown type, must be one of None, str, int.') # end if if self.reply_id is not None: if isinstance(self.reply_id, DEFAULT_MESSAGE_ID): array['reply_to_message_id'] = DEFAULT_MESSAGE_ID(self.reply_id) # type DEFAULT_MESSAGE_IDelif isinstance(self.reply_id, int): array['reply_to_message_id'] = int(self.reply_id) # type intelse: raise TypeError('Unknown type, must be one of DEFAULT_MESSAGE_ID, int.') # end if if self.disable_notification is not None: array['disable_notification'] = bool(self.disable_notification) # type bool if self.reply_markup is not None: array['reply_markup'] = self.reply_markup.to_array() # type InlineKeyboardMarkup return array
python
def remove(self, child): '''Remove a ``child`` from the list of :attr:`children`.''' try: self.children.remove(child) if isinstance(child, String): child._parent = None except ValueError: pass
python
def get_vars(n): """ extract variables from expression node defined by tuple-pair: (_var_, [variable name]) """ op = n[0] if op.startswith('_') and op.endswith('_'): op = op.strip('_') if op == 'var': return [n[1]] return [] else: ret = [] for c in n[1:]: vs = get_vars(c) if vs: ret.extend(vs) return ret
java
public static INDArray conv2d(INDArray input, INDArray kernel, Type type) { return Nd4j.getConvolution().conv2d(input, kernel, type); }
python
def fire_failed_msisdn_lookup(self, to_identity): """ Fires a webhook in the event of a None to_addr. """ payload = {"to_identity": to_identity} hooks = Hook.objects.filter(event="identity.no_address") for hook in hooks: hook.deliver_hook( None, payload_override={"hook": hook.dict(), "data": payload} )
python
def _update_model(self, normalization_type='stats'): """ Updates the model (when more than one observation is available) and saves the parameters (if available). """ if self.num_acquisitions % self.model_update_interval == 0: # input that goes into the model (is unziped in case there are categorical variables) X_inmodel = self.space.unzip_inputs(self.X) # Y_inmodel is the output that goes into the model if self.normalize_Y: Y_inmodel = normalize(self.Y, normalization_type) else: Y_inmodel = self.Y self.model.updateModel(X_inmodel, Y_inmodel, None, None) # Save parameters of the model self._save_model_parameter_values()
python
def scatter(x, y, xlabel=LABEL_DEFAULT, ylabel=LABEL_DEFAULT, title=LABEL_DEFAULT): """ Plots the data in `x` on the X axis and the data in `y` on the Y axis in a 2d scatter plot, and returns the resulting Plot object. The function supports SArrays of dtypes: int, float. Parameters ---------- x : SArray The data to plot on the X axis of the scatter plot. Must be numeric (int/float). y : SArray The data to plot on the Y axis of the scatter plot. Must be the same length as `x`. Must be numeric (int/float). xlabel : str (optional) The text label for the X axis. Defaults to "X". ylabel : str (optional) The text label for the Y axis. Defaults to "Y". title : str (optional) The title of the plot. Defaults to LABEL_DEFAULT. If the value is LABEL_DEFAULT, the title will be "<xlabel> vs. <ylabel>". If the value is None, the title will be omitted. Otherwise, the string passed in as the title will be used as the plot title. Returns ------- out : Plot A :class: Plot object that is the scatter plot. Examples -------- Make a scatter plot. >>> x = turicreate.SArray([1,2,3,4,5]) >>> y = x * 2 >>> scplt = turicreate.visualization.scatter(x, y) """ if (not isinstance(x, tc.data_structures.sarray.SArray) or not isinstance(y, tc.data_structures.sarray.SArray) or x.dtype not in [int, float] or y.dtype not in [int, float]): raise ValueError("turicreate.visualization.scatter supports " + "SArrays of dtypes: int, float") # legit input title = _get_title(title) plt_ref = tc.extensions.plot_scatter(x, y, xlabel, ylabel,title) return Plot(plt_ref)