max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
841
package org.jboss.resteasy.test.asyncio; public class AsyncThrowingWriterData { public boolean throwNow; public AsyncThrowingWriterData(final boolean throwNow){ this.throwNow = throwNow; } }
72
1,438
<gh_stars>1000+ package com.dexvis.dex; import java.io.File; import java.util.List; import java.util.concurrent.CompletionService; import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import javafx.application.Application; import javafx.application.Platform; import javafx.event.ActionEvent; import javafx.scene.Scene; import javafx.stage.Stage; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.Options; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.tbee.javafx.scene.layout.MigPane; import com.dexvis.dex.wf.DexEnvironment; import com.dexvis.dex.wf.DexTaskState; import com.dexvis.javafx.scene.control.DexTaskItem; public class DexCLI extends Application { // Thread factory for executing task serially. private final static BasicThreadFactory serialThreadFactory = new BasicThreadFactory.Builder() .namingPattern("Dex-Serial-Task-%d").daemon(true) .priority(Thread.MAX_PRIORITY).build(); // Executor for executing task serially. public final static ExecutorService serialExecutor = Executors .newSingleThreadExecutor(serialThreadFactory); // Thread factory for concurrent task execution. Such task may not update the // UI. private final static BasicThreadFactory concurrentThreadFactory = new BasicThreadFactory.Builder() .namingPattern("Dex-Concurrent-Task-%d").daemon(true) .priority(Thread.MAX_PRIORITY).build(); // Executor for parallel task execution. public final static ExecutorService concurrentExecutor = Executors .newFixedThreadPool( Math.max(1, Runtime.getRuntime().availableProcessors() - 1), concurrentThreadFactory); // Service for task completion notification. public final static CompletionService<Object> CCS = new ExecutorCompletionService( concurrentExecutor); // Main stage private Stage stage = null; // Main scene. private Scene scene; private static String[] arguments; private void init(Stage stage) { try { this.stage = stage; stage.setTitle("Data Explorer"); MigPane rootLayout = new MigPane("", "[grow]", "[][grow]"); scene = new Scene(rootLayout, 1600, 900); stage.setScene(scene); } catch(Exception ex) { ex.printStackTrace(); } } public void start(Stage stage) throws Exception { init(stage); stage.show(); Options options = new Options(); options.addOption("p", "project", true, "The project to be run."); options.addOption("e", "env", true, "The project environment."); CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, arguments); System.out.println("Running: " + cmd.getOptionValue("project")); if (cmd.hasOption("e") || cmd.hasOption("env")) { System.out.println("*** ENABLING HEADLESS MODE ***"); DexEnvironment env = DexEnvironment.getInstance(); env.setVariable("HEADLESS", "true"); String envStr = cmd.getOptionValue("env"); String envVars[] = StringUtils.split(envStr, ';'); if (envVars != null) { for (String envVar: envVars) { if (envVar != null && envVar.indexOf('=') > -1) { int ei = envVar.indexOf('='); env.setVariable(envVar.substring(0, ei), envVar.substring(ei+1)); System.out.println("Setting Env Var: '" + envVar.substring(0, ei) + "'='" + envVar.substring(ei+1) + "'"); } } } // Split by semicolon. // Split by equals. } DexProject project = DexProject.readProject(stage, new File(cmd.getOptionValue("project"))); List<DexTaskItem> tasks = project.getTaskItems(); int taskNum = 1; DexTaskState state = new DexTaskState(); long projectStartTime = System.currentTimeMillis(); long taskStartTime; for (DexTaskItem task : tasks) { taskStartTime = System.currentTimeMillis(); System.out.println(" TASK[" + taskNum + "]: '" + task.getName().getValue() + "'"); // long startTime = System.currentTimeMillis(); if (task.getActive().getValue()) { state = task.getTask().getValue().execute(state); } System.out.println(" " + (System.currentTimeMillis() - taskStartTime) + " ms"); taskNum++; } serialExecutor.shutdown(); concurrentExecutor.shutdown(); if (!serialExecutor.awaitTermination(3600, TimeUnit.SECONDS)) { serialExecutor.shutdownNow(); } if (!concurrentExecutor.awaitTermination(3600, TimeUnit.SECONDS)) { concurrentExecutor.shutdownNow(); } System.out.println("Execution Completed In: " + (System.currentTimeMillis() - projectStartTime) + " ms"); Platform.exit(); } public void exit(ActionEvent evt) { System.exit(0); } private static void setDefault(String propertyName, String propertyValue) { if (!System.getProperties().containsKey(propertyName)) { System.setProperty(propertyName, propertyValue); } System.out.println(propertyName + "='" + System.getProperty(propertyName) + "'"); } public static void main(String[] args) { arguments = args; Platform.setImplicitExit(false); // Headless params, overridable from command line. setDefault("glass.platform", "Monocle"); setDefault("monocle.platform", "Headless"); setDefault("prism.order", "sw"); setDefault("prism.text", "t2k"); setDefault("headless.geometry", "1600x1200-32"); launch(args); } }
2,182
722
<gh_stars>100-1000 from typing import Union import torch import collections from pathlib import Path from catalyst.dl import utils from catalyst.dl.runner import SupervisedRunner from catalyst.dl.callbacks import EarlyStoppingCallback from catalyst.dl.callbacks.scheduler import SchedulerCallback from catalyst.dl.callbacks.checkpoint import IterationCheckpointCallback from train import ParamConfig from train import SNRCallback def train( model: torch.nn.Module, dataset: torch.utils.data.Dataset, optimizer: torch.optim.Optimizer, criterion: torch.nn.Module, config: ParamConfig, val_dataset: torch.utils.data.Dataset = None, logdir: str = "./logdir", resume: Union[str, None] = "logdir/checkpoints/best_full.pth", ) -> None: """ train the model with specified paremeters Args: model: neural network model dataset: training dataset optimizer: optimizer criterion: loss function val_dataset: validation dataset logdir: logdir location to save checkpoints resume: path where the partially trained model is stored """ loaders = collections.OrderedDict() train_loader = utils.get_loader( dataset, open_fn=lambda x: {"input_audio": x[-1], "input_video": x[1], "targets": x[0]}, batch_size=config.batch_size, num_workers=config.workers, shuffle=True, ) val_loader = utils.get_loader( val_dataset, open_fn=lambda x: {"input_audio": x[-1], "input_video": x[1], "targets": x[0]}, batch_size=config.batch_size, num_workers=config.workers, shuffle=True, ) loaders = {"train": train_loader, "valid": val_loader} scheduler = torch.optim.lr_scheduler.CyclicLR( optimizer, base_lr=config.learning_rate, max_lr=config.learning_rate * 10, step_size_up=4 * len(train_loader), mode="triangular", cycle_momentum=False, ) runner = SupervisedRunner(input_key=["input_audio", "input_video"]) runner.train( model=model, criterion=criterion, optimizer=optimizer, scheduler=scheduler, loaders=loaders, logdir=logdir, verbose=True, num_epochs=config.epochs, resume=resume, callbacks=collections.OrderedDict( { "iteration_checkpoint": IterationCheckpointCallback( save_n_last=1, num_iters=10_000 ), "snr_callback": SNRCallback(), "sched_callback": SchedulerCallback(mode="batch"), } ), )
1,139
609
<reponame>RC0D3/gramado #ifndef __LT8X8_H #define __LT8X8_H 1 #define CH_CTRLA 0x01 #define CH_CTRLB 0x02 #define CH_CTRLC 0x03 #define CH_CTRLD 0x04 #define CH_CTRLE 0x05 #define CH_CTRLF 0x06 #define CH_CTRLG 0x07 #define CH_CTRLH 0x08 #define CH_CTRLI 0x09 #define CH_CTRLJ 0x0A #define CH_CTRLK 0x0B #define CH_CTRLL 0x0C #define CH_CTRLM 0x0D #define CH_CTRLN 0x0E #define CH_CTRLO 0x0F #define CH_CTRLP 0x10 #define CH_CTRLQ 0x11 #define CH_CTRLR 0x12 #define CH_CTRLS 0x13 #define CH_CTRLT 0x14 #define CH_CTRLU 0x15 #define CH_CTRLV 0x16 #define CH_CTRLW 0x17 #define CH_CTRLX 0x18 #define CH_CTRLY 0x19 #define CH_CTRLZ 0x1A #define CH_CURSOR 0x05 #define CH_BACKSPACE 0x08 #define CH_TAB 0x09 #define CH_NEW_LINE 0x0A #define CH_FORM_FEED 0x0C #define CH_CARRIAGE_RETURN 0x0D #define CH_ESC 0x1B #define CH_SHIFT_ESC 0x1C #define CH_SHIFT_SPACE 0x1F #define CH_SPACE 0x20 #define CH_SINGLE_VERT 0xB3 #define CH_DBL_VERT 0xBA #define CH_SINGLE_HORZ 0xC4 #define CH_DBL_HORZ 0xCD #define CH_SINGLE_TL 0xDA #define CH_DBL_TL 0xC9 #define CH_SINGLE_TR 0xBF #define CH_DBL_TR 0xBB #define CH_DBL_BR 0xBC #define CH_SINGLE_BL 0xC0 #define CH_DBL_BL 0xC8 #define CH_SINGLE_BR 0xD9 #define CH_SOLID 0xDB static unsigned char font_lt8x8[256*8] = { // 0 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, // 10 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, // 20 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, // 30 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x08,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, // 32 = ' ' Space! 0x18,0x3C,0x3C,0x18,0x18,0x00,0x18,0x00, 0x36,0x36,0x36,0x00,0x00,0x00,0x00,0x00, 0x6C,0x6C,0xFE,0x6C,0xFE,0x6C,0x6C,0x00, 0x30,0xFC,0x16,0x7C,0xD0,0x7E,0x18,0x00, 0x06,0x66,0x30,0x18,0x0C,0x66,0x60,0x00, 0x1C,0x36,0x36,0x1C,0xB6,0x66,0xDC,0x00, 0x18,0x18,0x18,0x00,0x00,0x00,0x00,0x00, // 40 0x30,0x18,0x0C,0x0C,0x0C,0x18,0x30,0x00, 0x0C,0x18,0x30,0x30,0x30,0x18,0x0C,0x00, 0x00,0x18,0x7E,0x3C,0x7E,0x18,0x00,0x00, 0x00,0x18,0x18,0x7E,0x18,0x18,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x18,0x18,0x0C, 0x00,0x00,0x00,0x7E,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x18,0x18,0x00, 0x00,0x60,0x30,0x18,0x0C,0x06,0x00,0x00, 0x3C,0x66,0x76,0x7E,0x6E,0x66,0x3C,0x00, 0x18,0x1C,0x18,0x18,0x18,0x18,0x7E,0x00, // 50 0x3C,0x66,0x60,0x30,0x18,0x0C,0x7E,0x00, 0x3C,0x66,0x60,0x38,0x60,0x66,0x3C,0x00, 0x30,0x38,0x3C,0x36,0x7E,0x30,0x30,0x00, 0x7E,0x06,0x3E,0x60,0x60,0x66,0x3C,0x00, 0x38,0x0C,0x06,0x3E,0x66,0x66,0x3C,0x00, 0x7E,0x60,0x30,0x18,0x0C,0x0C,0x0C,0x00, 0x3C,0x66,0x66,0x3C,0x66,0x66,0x3C,0x00, 0x3C,0x66,0x66,0x7C,0x60,0x30,0x1C,0x00, 0x00,0x00,0x18,0x18,0x00,0x18,0x18,0x00, 0x00,0x00,0x18,0x18,0x00,0x18,0x18,0x0C, // 60 0x30,0x18,0x0C,0x06,0x0C,0x18,0x30,0x00, 0x00,0x00,0x7E,0x00,0x7E,0x00,0x00,0x00, 0x0C,0x18,0x30,0x60,0x30,0x18,0x0C,0x00, 0x3C,0x66,0x30,0x18,0x18,0x00,0x18,0x00, 0x3C,0x66,0x76,0x56,0x76,0x06,0x3C,0x00, 0x3C,0x66,0x66,0x7E,0x66,0x66,0x66,0x00, //65 'A' (inverted?) 0x3E,0x66,0x66,0x3E,0x66,0x66,0x3E,0x00, 0x3C,0x66,0x06,0x06,0x06,0x66,0x3C,0x00, 0x1E,0x36,0x66,0x66,0x66,0x36,0x1E,0x00, 0x7E,0x06,0x06,0x3E,0x06,0x06,0x7E,0x00, 0x7E,0x06,0x06,0x3E,0x06,0x06,0x06,0x00, 0x3C,0x66,0x06,0x76,0x66,0x66,0x3C,0x00, 0x66,0x66,0x66,0x7E,0x66,0x66,0x66,0x00, 0x7E,0x18,0x18,0x18,0x18,0x18,0x7E,0x00, 0x7C,0x30,0x30,0x30,0x30,0x36,0x1C,0x00, 0x66,0x36,0x1E,0x0E,0x1E,0x36,0x66,0x00, 0x06,0x06,0x06,0x06,0x06,0x06,0x7E,0x00, 0xC6,0xEE,0xFE,0xD6,0xD6,0xC6,0xC6,0x00, 0x66,0x66,0x6E,0x7E,0x76,0x66,0x66,0x00, 0x3C,0x66,0x66,0x66,0x66,0x66,0x3C,0x00, 0x3E,0x66,0x66,0x3E,0x06,0x06,0x06,0x00, 0x3C,0x66,0x66,0x66,0x56,0x36,0x6C,0x00, 0x3E,0x66,0x66,0x3E,0x36,0x66,0x66,0x00, 0x3C,0x66,0x06,0x3C,0x60,0x66,0x3C,0x00, 0x7E,0x18,0x18,0x18,0x18,0x18,0x18,0x00, 0x66,0x66,0x66,0x66,0x66,0x66,0x3C,0x00, 0x66,0x66,0x66,0x66,0x66,0x3C,0x18,0x00, 0xC6,0xC6,0xD6,0xD6,0xFE,0xEE,0xC6,0x00, 0x66,0x66,0x3C,0x18,0x3C,0x66,0x66,0x00, 0x66,0x66,0x66,0x3C,0x18,0x18,0x18,0x00, 0x7E,0x60,0x30,0x18,0x0C,0x06,0x7E,0x00, 0x3E,0x06,0x06,0x06,0x06,0x06,0x3E,0x00, 0x00,0x06,0x0C,0x18,0x30,0x60,0x00,0x00, 0x7C,0x60,0x60,0x60,0x60,0x60,0x7C,0x00, 0x3C,0x66,0x00,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0xFF,0xFF, 0x0C,0x18,0x30,0x00,0x00,0x00,0x00,0x00, 0x00,0x00,0x3C,0x60,0x7C,0x66,0x7C,0x00, 0x06,0x06,0x3E,0x66,0x66,0x66,0x3E,0x00, 0x00,0x00,0x3C,0x66,0x06,0x66,0x3C,0x00, 0x60,0x60,0x7C,0x66,0x66,0x66,0x7C,0x00, 0x00,0x00,0x3C,0x66,0x7E,0x06,0x3C,0x00, 0x38,0x0C,0x0C,0x3E,0x0C,0x0C,0x0C,0x00, 0x00,0x00,0x7C,0x66,0x66,0x7C,0x60,0x3C, 0x06,0x06,0x3E,0x66,0x66,0x66,0x66,0x00, 0x18,0x00,0x1C,0x18,0x18,0x18,0x3C,0x00, 0x18,0x00,0x1C,0x18,0x18,0x18,0x18,0x0E, 0x06,0x06,0x66,0x36,0x1E,0x36,0x66,0x00, 0x1C,0x18,0x18,0x18,0x18,0x18,0x3C,0x00, 0x00,0x00,0x6C,0xFE,0xD6,0xD6,0xC6,0x00, 0x00,0x00,0x3E,0x66,0x66,0x66,0x66,0x00, 0x00,0x00,0x3C,0x66,0x66,0x66,0x3C,0x00, 0x00,0x00,0x3E,0x66,0x66,0x3E,0x06,0x06, 0x00,0x00,0x7C,0x66,0x66,0x7C,0x60,0xE0, 0x00,0x00,0x36,0x6E,0x06,0x06,0x06,0x00, 0x00,0x00,0x7C,0x06,0x3C,0x60,0x3E,0x00, 0x0C,0x0C,0x3E,0x0C,0x0C,0x0C,0x38,0x00, 0x00,0x00,0x66,0x66,0x66,0x66,0x7C,0x00, 0x00,0x00,0x66,0x66,0x66,0x3C,0x18,0x00, 0x00,0x00,0xC6,0xD6,0xD6,0xFE,0x6C,0x00, 0x00,0x00,0x66,0x3C,0x18,0x3C,0x66,0x00, 0x00,0x00,0x66,0x66,0x66,0x7C,0x60,0x3C, 0x00,0x00,0x7E,0x30,0x18,0x0C,0x7E,0x00, 0x30,0x18,0x18,0x0E,0x18,0x18,0x30,0x00, 0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x00, 0x0C,0x18,0x18,0x70,0x18,0x18,0x0C,0x00, 0x8C,0xD6,0x62,0x00,0x00,0x00,0x00,0x00, 0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF, 0x1E,0x33,0x03,0x33,0x1E,0x18,0x30,0x1E, 0x00,0x33,0x00,0x33,0x33,0x33,0x7E,0x00, 0x38,0x00,0x1E,0x33,0x3F,0x03,0x1E,0x00, 0x7E,0xC3,0x3C,0x60,0x7C,0x66,0xFC,0x00, 0x33,0x00,0x1E,0x30,0x3E,0x33,0x7E,0x00, 0x07,0x00,0x1E,0x30,0x3E,0x33,0x7E,0x00, 0x0C,0x0C,0x1E,0x30,0x3E,0x33,0x7E,0x00, 0x00,0x00,0x3E,0x03,0x03,0x3E,0x60,0x3C, 0x7E,0xC3,0x3C,0x66,0x7E,0x06,0x3C,0x00, 0x33,0x00,0x1E,0x33,0x3F,0x03,0x1E,0x00, 0x07,0x00,0x1E,0x33,0x3F,0x03,0x1E,0x00, 0x33,0x00,0x0E,0x0C,0x0C,0x0C,0x1E,0x00, 0x3E,0x63,0x1C,0x18,0x18,0x18,0x3C,0x00, 0x07,0x00,0x0E,0x0C,0x0C,0x0C,0x1E,0x00, 0x33,0x0C,0x1E,0x33,0x33,0x3F,0x33,0x00, 0x0C,0x0C,0x00,0x1E,0x33,0x3F,0x33,0x00, 0x38,0x00,0x3F,0x06,0x1E,0x06,0x3F,0x00, 0x00,0x00,0xFE,0x30,0xFE,0x33,0xFE,0x00, 0x7C,0x36,0x33,0x7F,0x33,0x33,0x73,0x00, 0x1E,0x33,0x00,0x1E,0x33,0x33,0x1E,0x00, 0x00,0x33,0x00,0x1E,0x33,0x33,0x1E,0x00, 0x00,0x07,0x00,0x1E,0x33,0x33,0x1E,0x00, 0x1E,0x33,0x00,0x33,0x33,0x33,0x7E,0x00, 0x00,0x07,0x00,0x33,0x33,0x33,0x7E,0x00, 0x00,0x33,0x00,0x33,0x33,0x3F,0x30,0x1F, 0x63,0x1C,0x3E,0x63,0x63,0x3E,0x1C,0x00, 0x33,0x00,0x33,0x33,0x33,0x33,0x1E,0x00, 0x18,0x18,0x7E,0x03,0x03,0x7E,0x18,0x18, 0x1C,0x36,0x26,0x0F,0x06,0x67,0x3F,0x00, 0x33,0x33,0x1E,0x3F,0x0C,0x3F,0x0C,0x00, 0x0F,0x1B,0x1B,0x2F,0x33,0x7B,0x33,0x70, 0x70,0xD8,0x18,0x7E,0x18,0x18,0x1B,0x0E, 0x38,0x00,0x1E,0x30,0x3E,0x33,0x7E,0x00, 0x1C,0x00,0x0E,0x0C,0x0C,0x0C,0x1E,0x00, 0x00,0x38,0x00,0x1E,0x33,0x33,0x1E,0x00, 0x00,0x38,0x00,0x33,0x33,0x33,0x7E,0x00, 0x00,0x1F,0x00,0x1F,0x33,0x33,0x33,0x00, 0x3F,0x00,0x33,0x37,0x3F,0x3B,0x33,0x00, 0x3C,0x36,0x36,0x7C,0x00,0x7E,0x00,0x00, 0x3C,0x66,0x66,0x3C,0x00,0x7E,0x00,0x00, 0x0C,0x00,0x0C,0x06,0x03,0x33,0x1E,0x00, 0x00,0x00,0x00,0x3F,0x03,0x03,0x00,0x00, 0x00,0x00,0x00,0x3F,0x30,0x30,0x00,0x00, 0x63,0x33,0x1B,0x7C,0xC6,0x73,0x19,0xF8, 0x63,0x33,0x1B,0xCF,0xE6,0xF3,0xF9,0xC0, 0x00,0x18,0x00,0x18,0x18,0x3C,0x3C,0x18, 0x00,0xCC,0x66,0x33,0x66,0xCC,0x00,0x00, 0x00,0x33,0x66,0xCC,0x66,0x33,0x00,0x00, 0x44,0x11,0x44,0x11,0x44,0x11,0x44,0x11, 0xAA,0x55,0xAA,0x55,0xAA,0x55,0xAA,0x55, 0xBB,0xEE,0xBB,0xEE,0xBB,0xEE,0xBB,0xEE, 0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18, 0x18,0x18,0x18,0x18,0x1F,0x18,0x18,0x18, 0x18,0x18,0x1F,0x18,0x1F,0x18,0x18,0x18, 0x6C,0x6C,0x6C,0x6C,0x6F,0x6C,0x6C,0x6C, 0x00,0x00,0x00,0x00,0x7F,0x6C,0x6C,0x6C, 0x00,0x00,0x1F,0x18,0x1F,0x18,0x18,0x18, 0x6C,0x6C,0x6F,0x60,0x6F,0x6C,0x6C,0x6C, 0x6C,0x6C,0x6C,0x6C,0x6C,0x6C,0x6C,0x6C, 0x00,0x00,0x7F,0x60,0x6F,0x6C,0x6C,0x6C, 0x6C,0x6C,0x6F,0x60,0x7F,0x00,0x00,0x00, 0x6C,0x6C,0x6C,0x6C,0x7F,0x00,0x00,0x00, 0x18,0x18,0x1F,0x18,0x1F,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x1F,0x18,0x18,0x18, 0x18,0x18,0x18,0x18,0xF8,0x00,0x00,0x00, 0x18,0x18,0x18,0x18,0xFF,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xFF,0x18,0x18,0x18, 0x18,0x18,0x18,0x18,0xF8,0x18,0x18,0x18, 0x00,0x00,0x00,0x00,0xFF,0x00,0x00,0x00, 0x18,0x18,0x18,0x18,0xFF,0x18,0x18,0x18, 0x18,0x18,0xF8,0x18,0xF8,0x18,0x18,0x18, 0x6C,0x6C,0x6C,0x6C,0xEC,0x6C,0x6C,0x6C, 0x6C,0x6C,0xEC,0x0C,0xFC,0x00,0x00,0x00, 0x00,0x00,0xFC,0x0C,0xEC,0x6C,0x6C,0x6C, 0x6C,0x6C,0xEF,0x00,0xFF,0x00,0x00,0x00, 0x00,0x00,0xFF,0x00,0xEF,0x6C,0x6C,0x6C, 0x6C,0x6C,0xEC,0x0C,0xEC,0x6C,0x6C,0x6C, 0x00,0x00,0xFF,0x00,0xFF,0x00,0x00,0x00, 0x6C,0x6C,0xEF,0x00,0xEF,0x6C,0x6C,0x6C, 0x18,0x18,0xFF,0x00,0xFF,0x00,0x00,0x00, 0x6C,0x6C,0x6C,0x6C,0xFF,0x00,0x00,0x00, 0x00,0x00,0xFF,0x00,0xFF,0x18,0x18,0x18, 0x00,0x00,0x00,0x00,0xFF,0x6C,0x6C,0x6C, 0x6C,0x6C,0x6C,0x6C,0xFC,0x00,0x00,0x00, 0x18,0x18,0xF8,0x18,0xF8,0x00,0x00,0x00, 0x00,0x00,0xF8,0x18,0xF8,0x18,0x18,0x18, 0x00,0x00,0x00,0x00,0xFC,0x6C,0x6C,0x6C, 0x6C,0x6C,0x6C,0x6C,0xEF,0x6C,0x6C,0x6C, 0x18,0x18,0xFF,0x00,0xFF,0x18,0x18,0x18, 0x18,0x18,0x18,0x18,0x1F,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xF8,0x18,0x18,0x18, 0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF, 0x00,0x00,0x00,0x00,0xFF,0xFF,0xFF,0xFF, 0x0F,0x0F,0x0F,0x0F,0x0F,0x0F,0x0F,0x0F, 0xF0,0xF0,0xF0,0xF0,0xF0,0xF0,0xF0,0xF0, 0xFF,0xFF,0xFF,0xFF,0x00,0x00,0x00,0x00, 0x00,0x00,0x6E,0x3B,0x13,0x3B,0x6E,0x00, 0x00,0x1E,0x33,0x1F,0x33,0x1F,0x03,0x03, 0x00,0x7F,0x63,0x03,0x03,0x03,0x03,0x00, 0x00,0x7F,0x36,0x36,0x36,0x36,0x36,0x00, 0x7F,0x66,0x0C,0x18,0x0C,0x66,0x7F,0x00, 0x00,0x00,0x7E,0x33,0x33,0x33,0x1E,0x00, 0x00,0x66,0x66,0x66,0x66,0x3E,0x06,0x03, 0x00,0x6E,0x3B,0x18,0x18,0x18,0x18,0x00, 0x3F,0x0C,0x1E,0x33,0x33,0x1E,0x0C,0x3F, 0x1C,0x36,0x63,0x7F,0x63,0x36,0x1C,0x00, 0x1C,0x36,0x63,0x63,0x36,0x36,0x77,0x00, 0x38,0x0C,0x18,0x3E,0x33,0x33,0x1E,0x00, 0x00,0x00,0x7E,0xDB,0xDB,0x7E,0x00,0x00, 0x60,0x30,0x7E,0xDB,0xDB,0x7E,0x06,0x03, 0x3C,0x06,0x03,0x3F,0x03,0x06,0x3C,0x00, 0x1E,0x33,0x33,0x33,0x33,0x33,0x33,0x00, 0x00,0x3F,0x00,0x3F,0x00,0x3F,0x00,0x00, //240 0x0C,0x0C,0x3F,0x0C,0x0C,0x00,0x3F,0x00, 0x06,0x0C,0x18,0x0C,0x06,0x00,0x3F,0x00, 0x18,0x0C,0x06,0x0C,0x18,0x00,0x3F,0x00, 0x70,0xD8,0xD8,0x18,0x18,0x18,0x18,0x18, 0x18,0x18,0x18,0x18,0x18,0x1B,0x1B,0x0E, 0x0C,0x0C,0x00,0x3F,0x00,0x0C,0x0C,0x00, 0x00,0x4E,0x39,0x00,0x4E,0x39,0x00,0x00, 0x1C,0x36,0x36,0x1C,0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x18,0x18,0x00,0x00,0x00, //249 0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, //250 0xF0,0x30,0x30,0x30,0x37,0x36,0x3C,0x38, 0x1E,0x36,0x36,0x36,0x36,0x00,0x00,0x00, 0x1E,0x30,0x1C,0x06,0x3E,0x00,0x00,0x00, 0x00,0x00,0x3C,0x3C,0x3C,0x3C,0x00,0x00, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 //255 }; #endif
9,843
19,529
<filename>vnpy/api/oes/include/oes/oes_api/samples/c_sample/02_oes_client_option_sample.c /* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file oes_client_sample.c * * OES API接口库的示例程序 * * @version 1.0 2016/10/21 * @since 2016/10/21 */ #include <oes_api/oes_api.h> #include <sutil/logger/spk_log.h> /* =================================================================== * 常量定义 * =================================================================== */ /* 待交易的上海期权产品代码 */ #define _SH_OPT_SECURITY_ID "10001229" /* 待交易的期权产品对应的标的证券代码 */ #define _SH_OPT_UNDERLYING_SECURITY_ID "510050" /* ------------------------- */ /** * 发送委托请求 * * 提示: * - 可以通过 OesApi_GetClEnvId() 方法获得到当前通道所使用的客户端环境号(clEnvId), 如: * <code>int8 clEnvId = OesApi_GetClEnvId(pOrdChannel);</code> * * @param pOrdChannel 委托通道的会话信息 * @param mktId 市场代码 (必填) @see eOesMarketIdT * @param pSecurityId 股票代码 (必填) * @param pInvAcctId 股东账户代码 (可不填) * @param ordType 委托类型 (必填) @see eOesOrdTypeT, eOesOrdTypeShT, eOesOrdTypeSzT * @param bsType 买卖类型 (必填) @see eOesBuySellTypeT * @param ordQty 委托数量 (必填, 单位为股/张) * @param ordPrice 委托价格 (必填, 单位精确到元后四位,即1元 = 10000) * @return 大于等于0,成功;小于0,失败(错误号) */ static inline int32 _OesApiSample_SendOrderReq(OesApiSessionInfoT *pOrdChannel, uint8 mktId, const char *pSecurityId, const char *pInvAcctId, uint8 ordType, uint8 bsType, int32 ordQty, int32 ordPrice) { OesOrdReqT ordReq = {NULLOBJ_OES_ORD_REQ}; SLOG_ASSERT2(pOrdChannel && mktId > 0 && mktId < __OES_MKT_ID_MAX && pSecurityId && ordType < __OES_ORD_TYPE_FOK_MAX && bsType > 0 && bsType < __OES_BS_TYPE_MAX_TRADING && ordQty > 0 && ordPrice >= 0, "pOrdChannel[%p], mktId[%" __SPK_FMT_HH__ "u], " \ "pSecurityId[%s], ordType[%" __SPK_FMT_HH__ "u], " \ "bsType[%" __SPK_FMT_HH__ "u], ordQty[%d], ordPrice[%d]", pOrdChannel, mktId, pSecurityId ? pSecurityId : "NULL", ordType, bsType, ordQty, ordPrice); ordReq.clSeqNo = (int32) ++pOrdChannel->lastOutMsgSeq; ordReq.mktId = mktId; ordReq.ordType = ordType; ordReq.bsType = bsType; strncpy(ordReq.securityId, pSecurityId, sizeof(ordReq.securityId) - 1); if (pInvAcctId) { /* 股东账户可不填 */ strncpy(ordReq.invAcctId, pInvAcctId, sizeof(ordReq.invAcctId) - 1); } ordReq.ordQty = ordQty; ordReq.ordPrice = ordPrice; return OesApi_SendOrderReq(pOrdChannel, &ordReq); } /** * 发送撤单请求 * * @param pOrdChannel 委托通道的会话信息 * @param mktId 被撤委托的市场代码 (必填) @see eOesMarketIdT * @param pSecurityId 被撤委托的股票代码 (选填, 若不为空则校验待撤订单是否匹配) * @param pInvAcctId 被撤委托的股东账户代码 (选填, 若不为空则校验待撤订单是否匹配) * @param origClSeqNo 被撤委托的流水号 (若使用 origClOrdId, 则不必填充该字段) * @param origClEnvId 被撤委托的客户端环境号 (小于等于0, 则使用当前会话的 clEnvId) * @param origClOrdId 被撤委托的客户订单编号 (若使用 origClSeqNo, 则不必填充该字段) * @return 大于等于0,成功;小于0,失败(错误号) */ static inline int32 _OesApiSample_SendOrderCancelReq(OesApiSessionInfoT *pOrdChannel, uint8 mktId, const char *pSecurityId, const char *pInvAcctId, int32 origClSeqNo, int8 origClEnvId, int64 origClOrdId) { OesOrdCancelReqT cancelReq = {NULLOBJ_OES_ORD_CANCEL_REQ}; SLOG_ASSERT2(pOrdChannel && mktId > 0 && mktId < __OES_MKT_ID_MAX, "pOrdChannel[%p], mktId[%" __SPK_FMT_HH__ "u]", pOrdChannel, mktId); cancelReq.clSeqNo = (int32) ++pOrdChannel->lastOutMsgSeq; cancelReq.mktId = mktId; if (pSecurityId) { /* 撤单时被撤委托的股票代码可不填 */ strncpy(cancelReq.securityId, pSecurityId, sizeof(cancelReq.securityId) - 1); } if (pInvAcctId) { /* 撤单时被撤委托的股东账户可不填 */ strncpy(cancelReq.invAcctId, pInvAcctId, sizeof(cancelReq.invAcctId) - 1); } cancelReq.origClSeqNo = origClSeqNo; cancelReq.origClEnvId = origClEnvId; cancelReq.origClOrdId = origClOrdId; return OesApi_SendOrderCancelReq(pOrdChannel, &cancelReq); } /** * 期权结算单确认 * * @param pOrdChannel 委托通道的会话信息 * @param pClientName 客户端名称 * @param pCustId 客户代码 * * @retval 0 成功 * @retval <0 API调用失败 (负的错误号) * @retval >0 服务端业务处理失败 (OES错误号) */ static inline int32 _OesApiSample_SendOptSettlementConfirmReq(OesApiSessionInfoT *pOrdChannel, const char *pCustId) { OesOptSettlementConfirmReqT optSettleCnfmReq = { NULLOBJ_OES_OPT_SETTLEMENT_CONFIRM_REQ }; OesOptSettlementConfirmReqT *pOptSettleCnfmReq = (OesOptSettlementConfirmReqT *) NULL; OesOptSettlementConfirmRspT optSettleCnfmRsp = { NULLOBJ_OES_OPT_SETTLEMENT_CONFIRM_RSP}; SLOG_ASSERT2(pOrdChannel, "pOrdChannel[%p], pCustId[%p]", pOrdChannel, pCustId); if (pCustId != NULL) { strncpy(optSettleCnfmReq.custId, pCustId, sizeof(optSettleCnfmReq.custId) - 1); pOptSettleCnfmReq = &optSettleCnfmReq; } return OesApi_SendOptSettlementConfirmReq(pOrdChannel, pOptSettleCnfmReq, &optSettleCnfmRsp); } /** * 查询客户端总览信息 * * @param pSessionInfo 会话信息 * @return 大于等于0,成功;小于0,失败(错误号) */ static int32 _OesApiSample_QueryClientOverview(OesApiSessionInfoT *pSessionInfo) { OesClientOverviewT clientOverview = {NULLOBJ_OES_CLIENT_OVERVIEW}; int32 ret = 0; int32 i = 0; ret = OesApi_GetClientOverview(pSessionInfo, &clientOverview); if (__spk_unlikely(ret < 0)) { SLOG_ERROR("Query client overview failure! ret[%d]", ret); return ret; } else { SLOG_DEBUG("Query client overview success! "); } printf(">>> Client Overview: {clientId[%d], " \ "clientType[%" __SPK_FMT_HH__ "u], " \ "clientStatus[%" __SPK_FMT_HH__ "u], " \ "clientName[%s], businessScope[%" __SPK_FMT_HH__ "u], " \ "sseStkPbuId[%d], szseStkPbuId[%d], ordTrafficLimit[%d], " \ "qryTrafficLimit[%d], associatedCustCnt[%d]}\n", clientOverview.clientId, clientOverview.clientType, clientOverview.clientStatus, clientOverview.clientName, clientOverview.businessScope, clientOverview.sseStkPbuId, clientOverview.szseStkPbuId, clientOverview.ordTrafficLimit, clientOverview.qryTrafficLimit, clientOverview.associatedCustCnt); for (i = 0; i < clientOverview.associatedCustCnt; i++) { printf(" >>> Cust Overview: {custId[%s], " \ "status[%" __SPK_FMT_HH__ "u], " \ "riskLevel[%" __SPK_FMT_HH__ "u], branchId[%d], " \ "custName[%s]}\n", clientOverview.custItems[i].custId, clientOverview.custItems[i].status, clientOverview.custItems[i].riskLevel, clientOverview.custItems[i].branchId, clientOverview.custItems[i].custName); if (clientOverview.custItems[i].spotCashAcct.isValid) { printf(" >>> CashAcct Overview: {cashAcctId[%s], " \ "cashType[%" __SPK_FMT_HH__ "u], " \ "cashAcctStatus[%" __SPK_FMT_HH__ "u], " \ "isFundTrsfDisabled[%" __SPK_FMT_HH__ "u]}\n", clientOverview.custItems[i].spotCashAcct.cashAcctId, clientOverview.custItems[i].spotCashAcct.cashType, clientOverview.custItems[i].spotCashAcct.cashAcctStatus, clientOverview.custItems[i].spotCashAcct.isFundTrsfDisabled); } if (clientOverview.custItems[i].shSpotInvAcct.isValid) { printf(" >>> InvAcct Overview: {invAcctId[%s], " \ "mktId[%" __SPK_FMT_HH__ "u], " \ "status[%" __SPK_FMT_HH__ "u], " \ "isTradeDisabled[%" __SPK_FMT_HH__ "u], " \ "pbuId[%d], trdOrdCnt[%d], " \ "nonTrdOrdCnt[%d], cancelOrdCnt[%d], " \ "oesRejectOrdCnt[%d], exchRejectOrdCnt[%d], trdCnt[%d]}\n", clientOverview.custItems[i].shSpotInvAcct.invAcctId, clientOverview.custItems[i].shSpotInvAcct.mktId, clientOverview.custItems[i].shSpotInvAcct.status, clientOverview.custItems[i].shSpotInvAcct.isTradeDisabled, clientOverview.custItems[i].shSpotInvAcct.pbuId, clientOverview.custItems[i].shSpotInvAcct.trdOrdCnt, clientOverview.custItems[i].shSpotInvAcct.nonTrdOrdCnt, clientOverview.custItems[i].shSpotInvAcct.cancelOrdCnt, clientOverview.custItems[i].shSpotInvAcct.oesRejectOrdCnt, clientOverview.custItems[i].shSpotInvAcct.exchRejectOrdCnt, clientOverview.custItems[i].shSpotInvAcct.trdCnt); } if (clientOverview.custItems[i].szSpotInvAcct.isValid) { printf(" >>> InvAcct Overview: {invAcctId[%s], " \ "mktId[%" __SPK_FMT_HH__ "u], " \ "status[%" __SPK_FMT_HH__ "u], " \ "isTradeDisabled[%" __SPK_FMT_HH__ "u], " \ "pbuId[%d], trdOrdCnt[%d], " \ "nonTrdOrdCnt[%d], cancelOrdCnt[%d], " \ "oesRejectOrdCnt[%d], exchRejectOrdCnt[%d], trdCnt[%d]}\n", clientOverview.custItems[i].szSpotInvAcct.invAcctId, clientOverview.custItems[i].szSpotInvAcct.mktId, clientOverview.custItems[i].szSpotInvAcct.status, clientOverview.custItems[i].szSpotInvAcct.isTradeDisabled, clientOverview.custItems[i].szSpotInvAcct.pbuId, clientOverview.custItems[i].szSpotInvAcct.trdOrdCnt, clientOverview.custItems[i].szSpotInvAcct.nonTrdOrdCnt, clientOverview.custItems[i].szSpotInvAcct.cancelOrdCnt, clientOverview.custItems[i].szSpotInvAcct.oesRejectOrdCnt, clientOverview.custItems[i].szSpotInvAcct.exchRejectOrdCnt, clientOverview.custItems[i].szSpotInvAcct.trdCnt); } } return 0; } /** * 对期权产品查询返回的产品信息进行处理的回调函数 * * @param pSessionInfo 会话信息 * @param pMsgHead 消息头 * @param pMsgBody 消息体数据 @see OesOptionItemT * @param pCallbackParams 外部传入的参数 * @return 大于等于0,成功;小于0,失败(错误号) */ static int32 _OesApiSample_OnQryOptionCallback(OesApiSessionInfoT *pSessionInfo, SMsgHeadT *pMsgHead, void *pMsgBody, OesQryCursorT *pQryCursor, void *pCallbackParams) { OesOptionItemT *pOptionItem = (OesOptionItemT *) pMsgBody; printf(">>> Recv QryOptionRsp: {index[%d], isEnd[%c], " \ "securityId[%s], contractId[%s], securityName[%s], " \ "underlyingSecurityId[%s], mktId[%" __SPK_FMT_HH__ "u], " "securityType[%" __SPK_FMT_HH__ "u], " \ "subSecurityType[%" __SPK_FMT_HH__ "u], " \ "contractType[%" __SPK_FMT_HH__ "u], " \ "exerciseType[%" __SPK_FMT_HH__ "u], " \ "deliveryType[%" __SPK_FMT_HH__ "u], " \ "isDayTrading[%" __SPK_FMT_HH__ "u], " \ "limitOpenFlag[%" __SPK_FMT_HH__ "u], " \ "suspFlag[%" __SPK_FMT_HH__ "u], " \ "temporarySuspFlag[%" __SPK_FMT_HH__ "u], " \ "contractUnit[%d], exercisePrice[%d], " \ "exerciseBeginDate[%08d], exerciseEndDate[%08d], " \ "prevSettlPrice[%d], prevClosePrice[%d], " \ "upperLimitPrice[%d], lowerLimitPrice[%d], " \ "sellMargin[%" __SPK_FMT_LL__ "d], " \ "increasedMarginRatio[%d]}\n", pQryCursor->seqNo, pQryCursor->isEnd ? 'Y' : 'N', pOptionItem->securityId, pOptionItem->contractId, pOptionItem->securityName, pOptionItem->underlyingSecurityId, pOptionItem->mktId, pOptionItem->securityType, pOptionItem->subSecurityType, pOptionItem->contractType, pOptionItem->exerciseType, pOptionItem->deliveryType, pOptionItem->isDayTrading, pOptionItem->limitOpenFlag, pOptionItem->suspFlag, pOptionItem->temporarySuspFlag, pOptionItem->contractUnit, pOptionItem->exercisePrice, pOptionItem->exerciseBeginDate, pOptionItem->exerciseEndDate, pOptionItem->prevSettlPrice, pOptionItem->prevClosePrice, pOptionItem->upperLimitPrice, pOptionItem->lowerLimitPrice, pOptionItem->sellMargin, pOptionItem->increasedMarginRatio); return 0; } /** * 查询期权产品信息 * * @param pQryChannel 查询通道的会话信息 * @param pSecurityId 产品代码 * @param mktId 市场代码 * @param securityType 证券类别 * @param subSecurityType 证券子类别 * @return 大于等于0,成功;小于0,失败(错误号) */ static inline int32 _OesApiSample_QueryOption(OesApiSessionInfoT *pQryChannel, const char *pSecurityId, uint8 mktId) { OesQryOptionFilterT qryFilter = {NULLOBJ_OES_QRY_OPTION_FILTER}; int32 ret = 0; SLOG_ASSERT(pQryChannel); if (pSecurityId) { strncpy(qryFilter.securityId, pSecurityId, sizeof(qryFilter.securityId) - 1); } qryFilter.mktId = mktId; ret = OesApi_QueryOption(pQryChannel, &qryFilter, _OesApiSample_OnQryOptionCallback, NULL); if (__spk_unlikely(ret < 0)) { SLOG_ERROR("Query option failure! ret[%d], pSecurityId[%s], " \ "mktId[%" __SPK_FMT_HH__ "u]", ret, pSecurityId ? pSecurityId : "NULL", mktId); return ret; } else { SLOG_DEBUG("Query option success! total count: [%d]", ret); } return 0; } /** * 对资金查询返回的资金信息进行处理的回调函数 * * @param pSessionInfo 会话信息 * @param pMsgHead 消息头 * @param pMsgBody 消息体数据 @see OesCashAssetItemT * @param pCallbackParams 外部传入的参数 * @return 大于等于0,成功;小于0,失败(错误号) */ static int32 _OesApiSample_OnQryCashAssetCallback(OesApiSessionInfoT *pSessionInfo, SMsgHeadT *pMsgHead, void *pMsgBody, OesQryCursorT *pQryCursor, void *pCallbackParams) { OesCashAssetItemT *pCashAssetItem = (OesCashAssetItemT *) pMsgBody; printf(">>> Recv QryCashRsp: {index[%d], isEnd[%c], " \ "cashAcctId[%s], custId[%s], " \ "cashType[%" __SPK_FMT_HH__ "u], " \ "beginningBal[%" __SPK_FMT_LL__ "d], " \ "beginningAvailableBal[%" __SPK_FMT_LL__ "d], " \ "beginningDrawableBal[%" __SPK_FMT_LL__ "d], " \ "disableBal[%" __SPK_FMT_LL__ "d], " \ "totalDepositAmt[%" __SPK_FMT_LL__ "d], " \ "totalWithdrawAmt[%" __SPK_FMT_LL__ "d], " \ "withdrawFrzAmt[%" __SPK_FMT_LL__ "d], " \ "totalSellAmt[%" __SPK_FMT_LL__ "d], " \ "totalBuyAmt[%" __SPK_FMT_LL__ "d], " \ "buyFrzAmt[%" __SPK_FMT_LL__ "d], " \ "totalFeeAmt[%" __SPK_FMT_LL__ "d], " \ "feeFrzAmt[%" __SPK_FMT_LL__ "d], " \ "initialMargin[%" __SPK_FMT_LL__ "d], " \ "totalExerciseFrzAmt[%" __SPK_FMT_LL__ "d], " \ "marginAmt[%" __SPK_FMT_LL__ "d], " \ "marginFrzAmt[%" __SPK_FMT_LL__ "d], " \ "totalMarketMargin[%" __SPK_FMT_LL__ "d], " \ "totalNetMargin[%" __SPK_FMT_LL__ "d], " \ "pendingSupplMargin[%" __SPK_FMT_LL__ "d], " \ "currentTotalBal[%" __SPK_FMT_LL__ "d], " \ "currentAvailableBal[%" __SPK_FMT_LL__ "d], " \ "currentDrawableBal[%" __SPK_FMT_LL__ "d]}\n", pQryCursor->seqNo, pQryCursor->isEnd ? 'Y' : 'N', pCashAssetItem->cashAcctId, pCashAssetItem->custId, pCashAssetItem->cashType, pCashAssetItem->beginningBal, pCashAssetItem->beginningAvailableBal, pCashAssetItem->beginningDrawableBal, pCashAssetItem->disableBal, pCashAssetItem->totalDepositAmt, pCashAssetItem->totalWithdrawAmt, pCashAssetItem->withdrawFrzAmt, pCashAssetItem->totalSellAmt, pCashAssetItem->totalBuyAmt, pCashAssetItem->buyFrzAmt, pCashAssetItem->totalFeeAmt, pCashAssetItem->feeFrzAmt, pCashAssetItem->optionExt.initialMargin, pCashAssetItem->optionExt.totalExerciseFrzAmt, pCashAssetItem->marginAmt, pCashAssetItem->marginFrzAmt, pCashAssetItem->optionExt.totalMarketMargin, pCashAssetItem->optionExt.totalNetMargin, pCashAssetItem->optionExt.pendingSupplMargin, pCashAssetItem->currentTotalBal, pCashAssetItem->currentAvailableBal, pCashAssetItem->currentDrawableBal); return 0; } /** * 查询资金 * * @param pQryChannel 查询通道的会话信息 * @param pCashAcctId 资金账户代码 * @return 大于等于0,成功;小于0,失败(错误号) */ static inline int32 _OesApiSample_QueryCashAsset(OesApiSessionInfoT *pQryChannel, const char *pCashAcctId) { OesQryCashAssetFilterT qryFilter = {NULLOBJ_OES_QRY_CASH_ASSET_FILTER}; int32 ret = 0; SLOG_ASSERT(pQryChannel); if (pCashAcctId) { strncpy(qryFilter.cashAcctId, pCashAcctId, sizeof(qryFilter.cashAcctId) - 1); } ret = OesApi_QueryCashAsset(pQryChannel, &qryFilter, _OesApiSample_OnQryCashAssetCallback, NULL); if (__spk_unlikely(ret < 0)) { SLOG_ERROR("Query cash asset failure! " \ "ret[%d], pCashAcctId[%s]", ret, pCashAcctId ? pCashAcctId : "NULL"); return ret; } else { SLOG_DEBUG("Query cash asset success! total count: [%d]", ret); } return 0; } /** * 对期权持仓查询返回的持仓信息进行处理的回调函数 * * @param pSessionInfo 会话信息 * @param pMsgHead 消息头 * @param pMsgBody 消息体数据 @see OesOptHoldingItemT * @param pCallbackParams 外部传入的参数 * @return 大于等于0,成功;小于0,失败(错误号) */ static int32 _OesApiSample_OnQryOptHoldingCallback(OesApiSessionInfoT *pSessionInfo, SMsgHeadT *pMsgHead, void *pMsgBody, OesQryCursorT *pQryCursor, void *pCallbackParams) { OesOptHoldingItemT *pHoldingItem = (OesOptHoldingItemT *) pMsgBody; printf(">>> Recv QryOptHoldingRsp: {index[%d], isEnd[%c], " \ "invAcctId[%s], securityId[%s], mktId[%" __SPK_FMT_HH__ "u], " \ "positionType[%" __SPK_FMT_HH__ "u], " \ "positionMargin[%" __SPK_FMT_LL__ "d], " \ "closeAvlQty[%" __SPK_FMT_LL__ "d], " \ "exerciseAvlQty[%" __SPK_FMT_LL__ "d], " \ "coveredAvlUnderlyingQty[%" __SPK_FMT_LL__ "d]}\n", pQryCursor->seqNo, pQryCursor->isEnd ? 'Y' : 'N', pHoldingItem->invAcctId, pHoldingItem->securityId, pHoldingItem->mktId, pHoldingItem->positionType, pHoldingItem->positionMargin, pHoldingItem->closeAvlQty, pHoldingItem->exerciseAvlQty, pHoldingItem->coveredAvlUnderlyingQty); return 0; } /** * 查询期权持仓 * * @param pQryChannel 查询通道的会话信息 * @param mktId 市场代码 @see eOesMarketIdT * @param positionType 持仓类型 @see eOesOptPositionTypeT * @param pSecurityId 股票代码 (char[6]/char[8]) * @return 大于等于0,成功;小于0,失败(错误号) */ static inline int32 _OesApiSample_QueryOptHolding(OesApiSessionInfoT *pQryChannel, uint8 mktId, uint8 positionType, const char *pSecurityId) { OesQryOptHoldingFilterT qryFilter = {NULLOBJ_OES_QRY_OPT_HOLDING_FILTER}; int32 ret = 0; SLOG_ASSERT2(pQryChannel && mktId < __OES_MKT_ID_MAX, "pOrdChannel[%p], mktId[%" __SPK_FMT_HH__ "u]", pQryChannel, mktId); qryFilter.mktId = mktId; qryFilter.positionType = positionType; if (pSecurityId) { strncpy(qryFilter.securityId, pSecurityId, sizeof(qryFilter.securityId) - 1); } ret = OesApi_QueryOptHolding(pQryChannel, &qryFilter, _OesApiSample_OnQryOptHoldingCallback, NULL); if (__spk_unlikely(ret < 0)) { SLOG_ERROR("Query option holding failure! " \ "ret[%d], mktId[%" __SPK_FMT_HH__ "u], " \ "positionType[%" __SPK_FMT_HH__ "u], pSecurityId[%s]", ret, mktId, positionType, pSecurityId ? pSecurityId : "NULL"); return ret; } else { SLOG_DEBUG("Query option holding success! total count: [%d]", ret); } return 0; } /** * 对期权标的持仓查询返回的标的持仓信息进行处理的回调函数 * * @param pSessionInfo 会话信息 * @param pMsgHead 消息头 * @param pMsgBody 消息体数据 @see OesOptUnderlyingHoldingItemT * @param pCallbackParams 外部传入的参数 * @return 大于等于0,成功;小于0,失败(错误号) */ static int32 _OesApiSample_OnQryOptUnderlyingHoldingCallback(OesApiSessionInfoT *pSessionInfo, SMsgHeadT *pMsgHead, void *pMsgBody, OesQryCursorT *pQryCursor, void *pCallbackParams) { OesOptUnderlyingHoldingItemT *pUnderlyingItem = (OesOptUnderlyingHoldingItemT *) pMsgBody; printf(">>> Recv QryOptUnderlyingHoldingRsp: {index[%d], isEnd[%c], " \ "invAcctId[%s], underlyingSecurityId[%s], " \ "mktId[%" __SPK_FMT_HH__ "u], " \ "underlyingMktId[%" __SPK_FMT_HH__ "u], " \ "underlyingSecurityType[%" __SPK_FMT_HH__ "u], " \ "underlyingSubSecurityType[%" __SPK_FMT_HH__ "u], " \ "originalHld[%" __SPK_FMT_LL__ "d], " \ "originalAvlHld[%" __SPK_FMT_LL__ "d], " \ "originalCoveredQty[%" __SPK_FMT_LL__ "d], " \ "initialCoveredQty[%" __SPK_FMT_LL__ "d], " \ "coveredQty[%" __SPK_FMT_LL__ "d], " \ "coveredGapQty[%" __SPK_FMT_LL__ "d], " \ "coveredAvlQty[%" __SPK_FMT_LL__ "d], " \ "lockAvlQty[%" __SPK_FMT_LL__ "d], " \ "sumHld[%" __SPK_FMT_LL__ "d], " \ "maxReduceQuota[%" __SPK_FMT_LL__ "d]}\n", pQryCursor->seqNo, pQryCursor->isEnd ? 'Y' : 'N', pUnderlyingItem->invAcctId, pUnderlyingItem->underlyingSecurityId, pUnderlyingItem->mktId, pUnderlyingItem->underlyingMktId, pUnderlyingItem->underlyingSecurityType, pUnderlyingItem->underlyingSubSecurityType, pUnderlyingItem->originalHld, pUnderlyingItem->originalAvlHld, pUnderlyingItem->originalCoveredQty, pUnderlyingItem->initialCoveredQty, pUnderlyingItem->coveredQty, pUnderlyingItem->coveredGapQty, pUnderlyingItem->coveredAvlQty, pUnderlyingItem->lockAvlQty, pUnderlyingItem->sumHld, pUnderlyingItem->maxReduceQuota); return 0; } /** * 查询期权标的持仓 * * @param pQryChannel 查询通道的会话信息 * @param mktId 市场代码 @see eOesMarketIdT * @param pSecurityId 股票代码 (char[6]/char[8]) * @return 大于等于0,成功;小于0,失败(错误号) */ static inline int32 _OesApiSample_QueryOptUnderlyingHolding(OesApiSessionInfoT *pQryChannel, uint8 mktId, const char *pUnderlyingSecurityId) { OesQryOptUnderlyingHoldingFilterT qryFilter = {NULLOBJ_OES_QRY_OPT_UNDERLYING_HOLDING_FILTER}; int32 ret = 0; SLOG_ASSERT2(pQryChannel && mktId < __OES_MKT_ID_MAX, "pOrdChannel[%p], mktId[%" __SPK_FMT_HH__ "u]", pQryChannel, mktId); qryFilter.mktId = mktId; if (pUnderlyingSecurityId) { strncpy(qryFilter.underlyingSecurityId, pUnderlyingSecurityId, sizeof(qryFilter.underlyingSecurityId) - 1); } ret = OesApi_QueryOptUnderlyingHolding(pQryChannel, &qryFilter, _OesApiSample_OnQryOptUnderlyingHoldingCallback, NULL); if (__spk_unlikely(ret < 0)) { SLOG_ERROR("Query option underlying holding failure! " \ "ret[%d], mktId[%" __SPK_FMT_HH__ "u], pUnderlyingSecurityId[%s]", ret, mktId, pUnderlyingSecurityId ? pUnderlyingSecurityId : "NULL"); return ret; } else { SLOG_DEBUG("Query option underlying holding success! total count: [%d]", ret); } return 0; } /** * 对期权限仓额度查询返回的限仓信息进行处理的回调函数 * * @param pSessionInfo 会话信息 * @param pMsgHead 消息头 * @param pMsgBody 消息体数据 @see OesOptPositionLimitItemT * @param pCallbackParams 外部传入的参数 * @return 大于等于0,成功;小于0,失败(错误号) */ static int32 _OesApiSample_OnQryOptPositionLimitCallback(OesApiSessionInfoT *pSessionInfo, SMsgHeadT *pMsgHead, void *pMsgBody, OesQryCursorT *pQryCursor, void *pCallbackParams) { OesOptPositionLimitItemT *pPositionLimitItem = (OesOptPositionLimitItemT *) pMsgBody; printf(">>> Recv QryOptPositionLimitRsp: {index[%d], isEnd[%c], " \ "invAcctId[%s], underlyingSecurityId[%s], " \ "mktId[%" __SPK_FMT_HH__ "u], " \ "underlyingMktId[%" __SPK_FMT_HH__ "u], " \ "underlyingSecurityType[%" __SPK_FMT_HH__ "u], " \ "underlyingSubSecurityType[%" __SPK_FMT_HH__ "u], " \ "longPositionLimit[%d], " \ "totalPositionLimit[%d], " \ "dailyBuyOpenLimit[%d], " \ "originalLongQty[%d], " \ "originalShortQty[%d], " \ "originalCoveredQty[%d], " \ "availableLongPositionLimit[%d], " \ "availableTotalPositionLimit[%d], " \ "availableDailyBuyOpenLimit[%d]}\n", pQryCursor->seqNo, pQryCursor->isEnd ? 'Y' : 'N', pPositionLimitItem->invAcctId, pPositionLimitItem->underlyingSecurityId, pPositionLimitItem->mktId, pPositionLimitItem->underlyingMktId, pPositionLimitItem->underlyingSecurityType, pPositionLimitItem->underlyingSubSecurityType, pPositionLimitItem->longPositionLimit, pPositionLimitItem->totalPositionLimit, pPositionLimitItem->dailyBuyOpenLimit, pPositionLimitItem->originalLongQty, pPositionLimitItem->originalShortQty, pPositionLimitItem->originalCoveredQty, pPositionLimitItem->availableLongPositionLimit, pPositionLimitItem->availableTotalPositionLimit, pPositionLimitItem->availableDailyBuyOpenLimit); return 0; } /** * 查询期权限仓额度 * * @param pQryChannel 查询通道的会话信息 * @param mktId 市场代码 @see eOesMarketIdT * @param pSecurityId 股票代码 (char[6]/char[8]) * @return 大于等于0,成功;小于0,失败(错误号) */ static inline int32 _OesApiSample_QueryOptPositionLimit(OesApiSessionInfoT *pQryChannel, uint8 mktId, const char *pUnderlyingSecurityId) { OesQryOptPositionLimitFilterT qryFilter = {NULLOBJ_OES_QRY_OPT_POSITION_LIMIT_FILTER}; int32 ret = 0; SLOG_ASSERT2(pQryChannel && mktId < __OES_MKT_ID_MAX, "pOrdChannel[%p], mktId[%" __SPK_FMT_HH__ "u]", pQryChannel, mktId); qryFilter.mktId = mktId; if (pUnderlyingSecurityId) { strncpy(qryFilter.underlyingSecurityId, pUnderlyingSecurityId, sizeof(qryFilter.underlyingSecurityId) - 1); } ret = OesApi_QueryOptPositionLimit(pQryChannel, &qryFilter, _OesApiSample_OnQryOptPositionLimitCallback, NULL); if (__spk_unlikely(ret < 0)) { SLOG_ERROR("Query option position limit failure! " \ "ret[%d], mktId[%" __SPK_FMT_HH__ "u], pUnderlyingSecurityId[%s]", ret, mktId, pUnderlyingSecurityId ? pUnderlyingSecurityId : "NULL"); return ret; } else { SLOG_DEBUG("Query option position limit success! total count: [%d]", ret); } return 0; } /** * 对期权限购额度查询返回的限购信息进行处理的回调函数 * * @param pSessionInfo 会话信息 * @param pMsgHead 消息头 * @param pMsgBody 消息体数据 @see OesOptPurchaseLimitItemT * @param pCallbackParams 外部传入的参数 * @return 大于等于0,成功;小于0,失败(错误号) */ static int32 _OesApiSample_OnQryOptPurchaseLimitCallback(OesApiSessionInfoT *pSessionInfo, SMsgHeadT *pMsgHead, void *pMsgBody, OesQryCursorT *pQryCursor, void *pCallbackParams) { OesOptPurchaseLimitItemT *pPurchaseLimitItem = (OesOptPurchaseLimitItemT *) pMsgBody; printf(">>> Recv OesOptPurchaseLimitRsp: {index[%d], isEnd[%c], " \ "custId[%s], cashAcctId[%s], invAcctId[%s], " \ "mktId[%" __SPK_FMT_HH__ "u], " \ "custType[%" __SPK_FMT_HH__ "u], " \ "purchaseLimit[%" __SPK_FMT_LL__ "d], " \ "originalUsedPurchaseAmt[%" __SPK_FMT_LL__ "d], " \ "totalOpenPurchaseAmt[%" __SPK_FMT_LL__ "d], " \ "frzPurchaseAmt[%" __SPK_FMT_LL__ "d], " \ "totalClosePurchaseAmt[%" __SPK_FMT_LL__ "d], " \ "availablePurchaseLimit[%" __SPK_FMT_LL__ "d]}\n", pQryCursor->seqNo, pQryCursor->isEnd ? 'Y' : 'N', pPurchaseLimitItem->custId, pPurchaseLimitItem->cashAcctId, pPurchaseLimitItem->invAcctId, pPurchaseLimitItem->mktId, pPurchaseLimitItem->custType, pPurchaseLimitItem->purchaseLimit, pPurchaseLimitItem->originalUsedPurchaseAmt, pPurchaseLimitItem->totalOpenPurchaseAmt, pPurchaseLimitItem->frzPurchaseAmt, pPurchaseLimitItem->totalClosePurchaseAmt, pPurchaseLimitItem->availablePurchaseLimit); return 0; } /** * 查询期权限购额度 * * @param pQryChannel 查询通道的会话信息 * @param mktId 市场代码 @see eOesMarketIdT * @return 大于等于0,成功;小于0,失败(错误号) */ static inline int32 _OesApiSample_QueryOptPurchaseLimit( OesApiSessionInfoT *pQryChannel, uint8 mktId) { OesQryOptPurchaseLimitFilterT qryFilter = {NULLOBJ_OES_QRY_OPT_PURCHASE_LIMIT_FILTER}; int32 ret = 0; SLOG_ASSERT2(pQryChannel && mktId < __OES_MKT_ID_MAX, "pOrdChannel[%p], mktId[%" __SPK_FMT_HH__ "u]", pQryChannel, mktId); qryFilter.mktId = mktId; ret = OesApi_QueryOptPurchaseLimit(pQryChannel, &qryFilter, _OesApiSample_OnQryOptPurchaseLimitCallback, NULL); if (__spk_unlikely(ret < 0)) { SLOG_ERROR("Query option purchase limit failure! " \ "ret[%d], mktId[%" __SPK_FMT_HH__ "u]", ret, mktId); return ret; } else { SLOG_DEBUG("Query option purchase limit success! total count: [%d]", ret); } return 0; } /** * 查询期权结算单信息 * * @param pQryChannel 查询通道的会话信息 * @param pCustId 客户代码 * @param[out] pSettlementInfo 用于输出结算单信息的缓存区 * @param settlementInfoSize 结算单缓存区大小 * @return 大于等于0,成功;小于0,失败(错误号) */ static inline int32 _OesApiSample_QueryOptSettlementStatement(OesApiSessionInfoT *pQryChannel, const char *pCustId) { char settlementInfo[32 * 1024] = {0}; int32 ret = 0; SLOG_ASSERT2(pQryChannel, "pOrdChannel[%p]", pQryChannel); ret = OesApi_QueryOptSettlementStatement(pQryChannel, pCustId, settlementInfo, sizeof(settlementInfo)); if (ret > 0) { fprintf(stdout, "%s\n", settlementInfo); } return 0; } /** * 对通知消息查询返回的信息进行处理的回调函数 * * @param pSessionInfo 会话信息 * @param pMsgHead 消息头 * @param pMsgBody 消息体数据 @see OesMarketStateItemT * @param pCallbackParams 外部传入的参数 * @return 大于等于0,成功;小于0,失败(错误号) */ static int32 _OesApiSample_OnQryNotifyInfoCallback(OesApiSessionInfoT *pSessionInfo, SMsgHeadT *pMsgHead, void *pMsgBody, OesQryCursorT *pQryCursor, void *pCallbackParams) { OesNotifyInfoItemT *pNotifyInfoItem = (OesNotifyInfoItemT *) pMsgBody; printf(">>> Recv QryNotifyInfoRsp: {index[%d], isEnd[%c], " "notifySeqNo[%d], " \ "notifyLevel[%" __SPK_FMT_HH__ "u], " \ "notifyScope[%" __SPK_FMT_HH__ "u], " \ "notifySource[%" __SPK_FMT_HH__ "u], " \ "notifyType[%" __SPK_FMT_HH__ "u], " \ "tranTime[%d], custId[%s], content[%s]}\n", pQryCursor->seqNo, pQryCursor->isEnd ? 'Y' : 'N', pNotifyInfoItem->notifySeqNo, pNotifyInfoItem->notifyLevel, pNotifyInfoItem->notifyScope, pNotifyInfoItem->notifySource, pNotifyInfoItem->notifyType, pNotifyInfoItem->tranTime, pNotifyInfoItem->custId, pNotifyInfoItem->content); return 0; } /** * 查询通知消息 * * @param pQryChannel 查询通道的会话信息 * @param pCustId 客户代码 * @param notifyLevel 通知消息等级 @see eOesNotifyLevelT * @return 大于等于0,成功;小于0,失败(错误号) */ static inline int32 _OesApiSample_QueryNotifyInfo(OesApiSessionInfoT *pQryChannel, char *pCustId, uint8 notifyLevel) { OesQryNotifyInfoFilterT qryFilter = {NULLOBJ_OES_QRY_NOTIFY_INFO_FILTER}; int32 ret = 0; SLOG_ASSERT(pQryChannel); qryFilter.notifyLevel = notifyLevel; if (pCustId) { strncpy(qryFilter.custId, pCustId, sizeof(qryFilter.custId) - 1); } ret = OesApi_QueryNotifyInfo(pQryChannel, &qryFilter, _OesApiSample_OnQryNotifyInfoCallback, NULL); if (__spk_unlikely(ret < 0)) { SLOG_ERROR("Query notify info failure! " \ "ret[%d], custId[%s]", ret, pCustId ? pCustId : "NULL"); return ret; } else { SLOG_DEBUG("Query notify info success! total count: [%d]", ret); } return 0; } /** * 对执行报告消息进行处理的回调函数 * * @param pRptChannel 回报通道的会话信息 * @param pMsgHead 消息头 * @param pMsgBody 消息体数据 * @param pCallbackParams 外部传入的参数 * @return 大于等于0,成功;小于0,失败(错误号) */ static inline int32 _OesApiSample_HandleReportMsg(OesApiSessionInfoT *pRptChannel, SMsgHeadT *pMsgHead, void *pMsgBody, void *pCallbackParams) { OesRspMsgBodyT *pRspMsg = (OesRspMsgBodyT *) pMsgBody; OesRptMsgT *pRptMsg = &pRspMsg->rptMsg; assert(pRptChannel && pMsgHead && pRspMsg); switch (pMsgHead->msgId) { case OESMSG_RPT_ORDER_INSERT: /* OES委托已生成 (已通过风控检查) @see OesOrdCnfmT */ printf(">>> Recv OrdInsertRsp: {clSeqNo: %d, " \ "clOrdId: %" __SPK_FMT_LL__ "d}\n", pRptMsg->rptBody.ordInsertRsp.clSeqNo, pRptMsg->rptBody.ordInsertRsp.clOrdId); break; case OESMSG_RPT_BUSINESS_REJECT: /* OES业务拒绝 (未通过风控检查等) @see OesOrdRejectT */ printf(">>> Recv OrdRejectRsp: {clSeqNo: %d, ordRejReason: %d}\n", pRptMsg->rptBody.ordRejectRsp.clSeqNo, pRptMsg->rptHead.ordRejReason); break; case OESMSG_RPT_ORDER_REPORT: /* 交易所委托回报 (包括交易所委托拒绝、委托确认和撤单完成通知) @see OesOrdCnfmT */ printf(">>> Recv OrdCnfm: {clSeqNo: %d, " \ "clOrdId: %" __SPK_FMT_LL__ "d}\n", pRptMsg->rptBody.ordCnfm.clSeqNo, pRptMsg->rptBody.ordCnfm.clOrdId); break; case OESMSG_RPT_TRADE_REPORT: /* 交易所成交回报 @see OesTrdCnfmT */ printf(">>> Recv TrdCnfm: {clSeqNo: %d, " \ "clOrdId: %" __SPK_FMT_LL__ "d}\n", pRptMsg->rptBody.trdCnfm.clSeqNo, pRptMsg->rptBody.trdCnfm.clOrdId); /* 如果全部成交, 则提取和构造相应的全部成交委托回报信息 if (pRptMsg->rptBody.trdCnfm.ordStatus == OES_ORD_STATUS_FILLED) { OesOrdCnfmT ordReport = {NULLOBJ_OES_ORD_CNFM}; OesHelper_ExtractOrdReportFromTrd( &pRptMsg->rptBody.trdCnfm, &ordReport); } */ break; case OESMSG_RPT_CASH_ASSET_VARIATION: /* 资金变动信息 @see OesCashAssetItemT */ printf(">>> Recv CashAsset: {cashAcctId: %s, " \ "currentAvailableBal: %" __SPK_FMT_LL__ "d}\n", pRptMsg->rptBody.cashAssetRpt.cashAcctId, pRptMsg->rptBody.cashAssetRpt.currentAvailableBal); break; case OESMSG_RPT_STOCK_HOLDING_VARIATION: /* 持仓变动信息 (股票) @see OesStkHoldingItemT */ printf(">>> Recv StkHolding: {invAcctId: %s, securityId: %s, " \ "mktId: %" __SPK_FMT_HH__ "u, " \ "sellAvlHld: %" __SPK_FMT_LL__ "d}\n", pRptMsg->rptBody.stkHoldingRpt.invAcctId, pRptMsg->rptBody.stkHoldingRpt.securityId, pRptMsg->rptBody.stkHoldingRpt.mktId, pRptMsg->rptBody.stkHoldingRpt.sellAvlHld); break; case OESMSG_RPT_OPTION_HOLDING_VARIATION: /* 持仓变动信息 (期权) @see OesOptHoldingItemT */ printf(">>> Recv OptHolding: {invAcctId: %s, securityId: %s, " \ "mktId: %" __SPK_FMT_HH__ "u, " \ "positionType: %" __SPK_FMT_HH__ "u, " \ "closeAvlQty: %" __SPK_FMT_LL__ "d}\n", pRptMsg->rptBody.optHoldingRpt.invAcctId, pRptMsg->rptBody.optHoldingRpt.securityId, pRptMsg->rptBody.optHoldingRpt.mktId, pRptMsg->rptBody.optHoldingRpt.positionType, pRptMsg->rptBody.optHoldingRpt.closeAvlQty); break; case OESMSG_RPT_OPTION_UNDERLYING_HOLDING_VARIATION: /* 期权标的持仓变动信息 @see OesOptUnderlyingHoldingItemT */ printf(">>> Recv OptUnderlyingHolding: {invAcctId: %s, " \ "underlyingSecurityId: %s, mktId: %" __SPK_FMT_HH__ "u, " \ "coveredAvlQty: %" __SPK_FMT_LL__ "d, " \ "coveredGapQty: %" __SPK_FMT_LL__ "d}\n", pRptMsg->rptBody.optUnderlyingHoldingRpt.invAcctId, pRptMsg->rptBody.optUnderlyingHoldingRpt.underlyingSecurityId, pRptMsg->rptBody.optUnderlyingHoldingRpt.mktId, pRptMsg->rptBody.optUnderlyingHoldingRpt.coveredAvlQty, pRptMsg->rptBody.optUnderlyingHoldingRpt.coveredGapQty); break; case OESMSG_RPT_FUND_TRSF_REJECT: /* 出入金委托响应-业务拒绝 @see OesFundTrsfRejectT */ printf(">>> Recv FundTrsfReject: {cashAcctId: %s, rejReason: %d}\n", pRptMsg->rptBody.fundTrsfRejectRsp.cashAcctId, pRptMsg->rptBody.fundTrsfRejectRsp.rejReason); break; case OESMSG_RPT_FUND_TRSF_REPORT: /* 出入金委托执行报告 @see OesFundTrsfReportT */ printf(">>> Recv FundTrsfReport: {cashAcctId: %s, " \ "trsfStatus: %" __SPK_FMT_HH__ "u}\n", pRptMsg->rptBody.fundTrsfCnfm.cashAcctId, pRptMsg->rptBody.fundTrsfCnfm.trsfStatus); break; case OESMSG_RPT_REPORT_SYNCHRONIZATION: /* 回报同步响应 @see OesReportSynchronizationRspT */ printf(">>> Recv report synchronization: " \ "{subscribeEnvId: %" __SPK_FMT_HH__ "d, " \ "subscribeRptTypes: %d, " \ "lastRptSeqNum: %" __SPK_FMT_LL__ "d}\n", pRspMsg->reportSynchronizationRsp.subscribeEnvId, pRspMsg->reportSynchronizationRsp.subscribeRptTypes, pRspMsg->reportSynchronizationRsp.lastRptSeqNum); break; case OESMSG_RPT_MARKET_STATE: /* 市场状态信息 @see OesMarketStateInfoT */ printf(">>> Recv MktStatusReport: " \ "{exchId: %" __SPK_FMT_HH__ "u, " \ "platformId: %" __SPK_FMT_HH__ "u, " \ "mktId: %" __SPK_FMT_HH__ "u, " \ "mktState: %" __SPK_FMT_HH__ "u}\n", pRspMsg->mktStateRpt.exchId, pRspMsg->mktStateRpt.platformId, pRspMsg->mktStateRpt.mktId, pRspMsg->mktStateRpt.mktState); break; case OESMSG_RPT_NOTIFY_INFO: /* 通知消息回报 @see OesNotifyInfoItemT */ printf(">>> Recv NotifyInfoReport: " \ "{notifySeqNo: %d, " \ "notifyLevel: %" __SPK_FMT_HH__ "u, " \ "notifyScope: %" __SPK_FMT_HH__ "u, " \ "notifySource: %" __SPK_FMT_HH__ "u, " \ "notifyType: %" __SPK_FMT_HH__ "u, " \ "tranTime: %d, " \ "custId: %s, " \ "content: %s}\n", pRptMsg->rptBody.notifyInfoRpt.notifySeqNo, pRptMsg->rptBody.notifyInfoRpt.notifyLevel, pRptMsg->rptBody.notifyInfoRpt.notifyScope, pRptMsg->rptBody.notifyInfoRpt.notifySource, pRptMsg->rptBody.notifyInfoRpt.notifyType, pRptMsg->rptBody.notifyInfoRpt.tranTime, pRptMsg->rptBody.notifyInfoRpt.custId, pRptMsg->rptBody.notifyInfoRpt.content); break; case OESMSG_RPT_OPTION_SETTLEMENT_CONFIRMED: /* 期权账户结算单确认消息 @see OesOptSettlementConfirmRspT */ printf(">>> Recv SettlementConfirmedRsp: " \ "{custId: %s, " \ "clientId: %" __SPK_FMT_SHORT__ "d, " \ "clEnvId: %" __SPK_FMT_HH__ "u, " \ "transDate: %d, " \ "transTime: %d, " \ "rejReason: %d}\n", pRptMsg->rptBody.optSettlementConfirmRpt.custId, pRptMsg->rptBody.optSettlementConfirmRpt.clientId, pRptMsg->rptBody.optSettlementConfirmRpt.clEnvId, pRptMsg->rptBody.optSettlementConfirmRpt.transDate, pRptMsg->rptBody.optSettlementConfirmRpt.transTime, pRptMsg->rptBody.optSettlementConfirmRpt.rejReason); break; case OESMSG_SESS_HEARTBEAT: printf(">>> Recv heartbeat message.\n"); break; default: fprintf(stderr, "Invalid message type! msgId[0x%02X]\n", pMsgHead->msgId); break; } return 0; } /** * 超时检查处理 * * @param pRptChannel 回报通道的会话信息 * @return 等于0,运行正常,未超时;大于0,已超时,需要重建连接;小于0,失败(错误号) */ static inline int32 _OesApiSample_OnTimeout(OesApiClientEnvT *pClientEnv) { OesApiSessionInfoT *pRptChannel = &pClientEnv->rptChannel; int64 recvInterval = 0; if (pRptChannel->heartBtInt > 0) { recvInterval = time((time_t *) NULL) - OesApi_GetLastRecvTime(pRptChannel); if (recvInterval > pRptChannel->heartBtInt * 2) { SLOG_ERROR("会话已超时, 将主动断开与服务器[%s:%d]的连接! " \ "lastRecvTime: [%" __SPK_FMT_LL__ "d], " \ "lastSendTime: [%" __SPK_FMT_LL__ "d], " \ "heartBtInt: [%d], recvInterval: [%" __SPK_FMT_LL__ "d]", pRptChannel->channel.remoteAddr, pRptChannel->channel.remotePort, (int64) pRptChannel->lastRecvTime.tv_sec, (int64) pRptChannel->lastSendTime.tv_sec, pRptChannel->heartBtInt, recvInterval); return ETIMEDOUT; } } return 0; } /** * 回报采集处理 (可以做为线程的主函数运行) * * @param pRptChannel 回报通道的会话信息 * @return TRUE 处理成功; FALSE 处理失败 */ void* OesApiSample_ReportThreadMain(OesApiClientEnvT *pClientEnv) { static const int32 THE_TIMEOUT_MS = 1000; OesApiSessionInfoT *pRptChannel = &pClientEnv->rptChannel; volatile int32 *pThreadTerminatedFlag = &pRptChannel->__customFlag; int32 ret = 0; while (! *pThreadTerminatedFlag) { /* 等待回报消息到达, 并通过回调函数对消息进行处理 */ ret = OesApi_WaitReportMsg(pRptChannel, THE_TIMEOUT_MS, _OesApiSample_HandleReportMsg, NULL); if (__spk_unlikely(ret < 0)) { if (__spk_likely(SPK_IS_NEG_ETIMEDOUT(ret))) { /* 执行超时检查 (检查会话是否已超时) */ if (__spk_likely(_OesApiSample_OnTimeout(pClientEnv) == 0)) { continue; } /* 会话已超时 */ goto ON_ERROR; } if (SPK_IS_NEG_EPIPE(ret)) { /* 连接已断开 */ } goto ON_ERROR; } } *pThreadTerminatedFlag = -1; return (void *) TRUE; ON_ERROR: *pThreadTerminatedFlag = -1; return (void *) FALSE; } /** * API接口库示例程序的主函数 (期权交易) */ int32 OesApiSample_Main() { static const char THE_CONFIG_FILE_NAME[] = "oes_client_sample.conf"; OesApiClientEnvT cliEnv = {NULLOBJ_OESAPI_CLIENT_ENV}; int32 retCode = 0; /* 因为样例代码会将信息输出到控制台, 所以此处先关闭标准输出缓存 */ setvbuf(stdout, (char *) NULL, _IONBF, 0); /* * 1. 设置 线程私有变量 以及 自定义参数信息 * - 可设置的线程私有变量包括: 登录用户名、登录密码、客户端环境号以及订阅回报使用的客户端环境号 * - 登录时会优先使用设置的线程变量值替换配置文件中的配置信息 * - 自定义信息包括: IP地址、MAC地址、设备序列号 * - IP和MAC地址在登录时会尝试自动获取, 自动获取失败时会使用自设置 * - 设备序列号目前不会自动获取, 需要主动设置以防止券商控制导致的登录失败, 同时满足监管需求 */ { /* 设置当前线程使用的登录用户名 */ // OesApi_SetThreadUsername("customer1"); /* * 设置当前线程使用的登录密码 * @note 如通过API接口设置,则可以不在配置文件中配置; * - 支持通过前缀指定密码类型, 如 md5:PASSWORD, txt:PASSWORD */ // OesApi_SetThreadPassword("<PASSWORD>"); // OesApi_SetThreadPassword("<PASSWORD>"); /* 设置客户端本地的设备序列号 */ OesApi_SetCustomizedDriverId("ABCDEFGHIJKLMN"); } /* * 2. 初始化客户端环境 * - 一次性初始化多个通道时, 可通过如下接口完成初始化: * - OesApi_InitAll * - OesApi_InitAllByConvention * - OesApi_InitAllByCfgStruct * - 对单个通道进行初始化时, 可通过如下接口完成初始化: * - OesApi_InitOrdChannel / OesApi_InitOrdChannel2 * - OesApi_InitRptChannel / OesApi_InitRptChannel2 * - OesApi_InitQryChannel / OesApi_InitQryChannel2 */ { /* 初始化客户端环境 (配置文件参见: oes_client_sample.conf) */ if (! OesApi_InitAll(&cliEnv, THE_CONFIG_FILE_NAME, OESAPI_CFG_DEFAULT_SECTION_LOGGER, OESAPI_CFG_DEFAULT_SECTION, OESAPI_CFG_DEFAULT_KEY_ORD_ADDR, OESAPI_CFG_DEFAULT_KEY_RPT_ADDR, OESAPI_CFG_DEFAULT_KEY_QRY_ADDR, 0, (int32 *) NULL)) { return -1; } } /* 3. 创建回报接收进程 */ #if ! (defined (__WINDOWS__) || defined (__MINGW__)) { pthread_t rptThreadId; retCode = pthread_create(&rptThreadId, NULL, (void* (*)(void *)) OesApiSample_ReportThreadMain, &cliEnv); if (retCode != 0) { fprintf(stderr, "创建回报接收线程失败! error[%d]\n", retCode); goto ON_ERROR; } } #else { HANDLE rptThreadId; rptThreadId = CreateThread(NULL, 0, (LPTHREAD_START_ROUTINE) OesApiSample_ReportThreadMain, (LPVOID) &cliEnv, 0, NULL); if (rptThreadId == NULL) { fprintf(stderr, "创建回报接收线程失败! error[%lu]\n", GetLastError()); goto ON_ERROR; } } #endif /* * 4. 查询接口使用样例 * - 查询接口分为单条查询和批量查询两类 * - 单条查询直接返回查询结果(返回值标识查询是否成功) * - 批量查询以回调方式返回查询结果(返回值除标识是否成功外, 还代表查询到的总条数) * - 查询到的总条数为0时不会触发回调 */ { /* 查询 客户端总览信息 */ _OesApiSample_QueryClientOverview(&cliEnv.qryChannel); /* 查询 期权结算单信息 */ _OesApiSample_QueryOptSettlementStatement(&cliEnv.qryChannel, NULL); /* 查询 所有关联资金账户的资金信息 */ _OesApiSample_QueryCashAsset(&cliEnv.qryChannel, NULL); /* 查询 指定资金账户的资金信息 */ /* _OesApiSample_QueryCashAsset(&cliEnv.qryChannel, "指定资金账户"); */ /* 查询 通知消息 */ _OesApiSample_QueryNotifyInfo(&cliEnv.qryChannel, NULL, 0); /* 查询 期权标的持仓 */ _OesApiSample_QueryOptUnderlyingHolding(&cliEnv.qryChannel, OES_MKT_ID_UNDEFINE, NULL); /* 查询 期权限仓额度 */ _OesApiSample_QueryOptPositionLimit(&cliEnv.qryChannel, OES_MKT_ID_UNDEFINE, NULL); /* 查询 期权限购额度 */ _OesApiSample_QueryOptPurchaseLimit(&cliEnv.qryChannel, OES_MKT_ID_UNDEFINE); /* 查询 上海期权市场 指定期权产品(_SH_OPT_SECURITY_ID) 的产品信息 */ _OesApiSample_QueryOption(&cliEnv.qryChannel, _SH_OPT_SECURITY_ID, OES_MKT_ID_UNDEFINE); /* 查询 上海期权市场 全部 的产品信息 */ //_OesApiSample_QueryOption(&cliEnv.qryChannel, NULL, OES_MKT_SH_OPTION); /* 查询 上海期权市场 指定期权产品(_SH_OPT_SECURITY_ID) 的权利仓持仓 */ _OesApiSample_QueryOptHolding(&cliEnv.qryChannel, OES_MKT_SH_OPTION, OES_OPT_POSITION_TYPE_LONG, _SH_OPT_SECURITY_ID); /* 查询 上海期权市场 指定期权产品(_SH_OPT_SECURITY_ID) 的所有持仓 */ _OesApiSample_QueryOptHolding(&cliEnv.qryChannel, OES_MKT_SH_OPTION, OES_OPT_POSITION_TYPE_UNDEFINE, _SH_OPT_SECURITY_ID); /* 查询 上海期权市场的所有持仓 */ _OesApiSample_QueryOptHolding(&cliEnv.qryChannel, OES_MKT_SH_OPTION, OES_OPT_POSITION_TYPE_UNDEFINE, NULL); } /* * 5. 委托接口使用样例 * - 委托接口分为单笔委托申报和批量委托申报 * - 委托申报为单向异步方式发送, 申报处理结果将通过回报数据返回 */ { /* * 期权结算单确认 * - 期权客户结算单确认后, 方可进行委托申报和出入金请求 * - 期权结算单只需确认一次, 不需要重复确认 (需自行识别重复确认的错误码) * - 客户端仅关联一个客户时, 可不指定客户代码; 否则需指定待确认的客户代码 */ retCode = _OesApiSample_SendOptSettlementConfirmReq(&cliEnv.ordChannel, NULL); if (retCode != 0) { if (OesApi_IsErrorOf(retCode, OESERR_SETTLEMENT_REPEAT_CONFIRM)) { fprintf(stdout, "期权结算单已经确认, 无需重复确认!\n"); } else { /* 结算单确认失败时直接退出 */ fprintf(stderr, "期权结算单确认失败, 退出程序! errCode[%d], errMsg[%s]\n", retCode, OesApi_GetErrorMsg(retCode)); goto END; } } /* * 上海期权市场的买开 * - 以 0.5元 买开 指定期权产品(_SH_OPT_SECURITY_ID) 1张 * - 此处需自行配置交易的期权产品代码和对应的价格 */ _OesApiSample_SendOrderReq(&cliEnv.ordChannel, OES_MKT_SH_OPTION, _SH_OPT_SECURITY_ID, NULL, OES_ORD_TYPE_LMT, OES_BS_TYPE_BUY_OPEN, 1, 5000); /* * 上海期权市场的卖平 * - 以 市价 卖平 指定期权产品(_SH_OPT_SECURITY_ID) 1张 */ _OesApiSample_SendOrderReq(&cliEnv.ordChannel, OES_MKT_SH_OPTION, _SH_OPT_SECURITY_ID, NULL, OES_ORD_TYPE_SHOPT_FOK, OES_BS_TYPE_SELL_CLOSE, 1, 0); /* * 上海期权市场的标的锁定 * - 锁定 期权产品(_SH_OPT_SECURITY_ID) 对应的标的证券(_SH_OPT_UNDERLYING_SECURITY_ID) 10000 股 */ _OesApiSample_SendOrderReq(&cliEnv.ordChannel, OES_MKT_SH_OPTION, _SH_OPT_UNDERLYING_SECURITY_ID, NULL, OES_ORD_TYPE_LMT, OES_BS_TYPE_UNDERLYING_FREEZE, 10000, 0); /* * 上海期权市场的备兑开仓 * - 以 市价 备兑开仓 指定期权产品(_SH_OPT_SECURITY_ID) 1张 */ _OesApiSample_SendOrderReq(&cliEnv.ordChannel, OES_MKT_SH_OPTION, _SH_OPT_SECURITY_ID, NULL, OES_ORD_TYPE_SHOPT_FOK, OES_BS_TYPE_COVERED_OPEN, 1, 0); /* * 上海期权市场的备兑平仓 * - 以 市价 备兑平仓 指定期权产品(_SH_OPT_SECURITY_ID) 1张 */ _OesApiSample_SendOrderReq(&cliEnv.ordChannel, OES_MKT_SH_OPTION, _SH_OPT_SECURITY_ID, NULL, OES_ORD_TYPE_SHOPT_FOK, OES_BS_TYPE_COVERED_CLOSE, 1, 0); /* * 上海期权市场的标的解锁 * - 解锁 期权产品(_SH_OPT_SECURITY_ID) 对应的的标的证券(_SH_OPT_UNDERLYING_SECURITY_ID) 10000 股 */ _OesApiSample_SendOrderReq(&cliEnv.ordChannel, OES_MKT_SH_OPTION, _SH_OPT_UNDERLYING_SECURITY_ID, NULL, OES_ORD_TYPE_LMT, OES_BS_TYPE_UNDERLYING_UNFREEZE, 10000, 0); /* * 上海期权市场的期权行权 * - 行权 指定期权产品(_SH_OPT_SECURITY_ID) 1 张 */ _OesApiSample_SendOrderReq(&cliEnv.ordChannel, OES_MKT_SH_OPTION, _SH_OPT_SECURITY_ID, NULL, OES_ORD_TYPE_LMT, OES_BS_TYPE_OPTION_EXERCISE, 1, 0); } /* * 6. 撤单接口使用样例 * - 可以通过指定"待撤订单的客户订单编号(origClOrdId)"予以撤单 * - 可以通过指定"待撤订单的客户委托流水号(origClSeqNo)"予以撤单 * - 需结合"待撤订单的客户端环境号(origClEnvId)", 不指定时使用当前会话的clEnvId * - 如下交易类型不支持撤单: * - 上海期权市场的标的锁定/解锁 */ { /* 定义 origOrder 作为模拟的待撤委托 */ OesOrdCnfmT origOrder = {NULLOBJ_OES_ORD_CNFM}; origOrder.mktId = OES_MKT_SH_OPTION; origOrder.clEnvId = 0; origOrder.clSeqNo = 11; origOrder.clOrdId = 111; /* 真实场景中,待撤委托的clOrdId需要通过回报消息获取 */ /* 通过待撤委托的 clOrdId 进行撤单 */ _OesApiSample_SendOrderCancelReq(&cliEnv.ordChannel, origOrder.mktId, NULL, NULL, 0, 0, origOrder.clOrdId); /* 通过待撤委托的 clSeqNo 进行撤单 */ _OesApiSample_SendOrderCancelReq(&cliEnv.ordChannel, origOrder.mktId, NULL, NULL, origOrder.clSeqNo, origOrder.clEnvId, 0); } END: /* 7. 通知并等待回报线程退出 (实际场景中请勿参考此部分代码) */ { /* 等待回报消息接收完成 */ SPK_SLEEP_MS(1000); /* 设置回报线程退出标志 */ *((volatile int32 *) &cliEnv.rptChannel.__customFlag) = 1; /* 回报线程将标志设置为-1后退出, 父进程再释放资源 */ while(*((volatile int32 *) &cliEnv.rptChannel.__customFlag) != -1) { SPK_SLEEP_MS(1000); } } /* 8. 发送注销消息, 并释放会话数据 */ fprintf(stdout, "\n运行结束, 即将退出...\n\n"); OesApi_LogoutAll(&cliEnv, TRUE); return 0; ON_ERROR: /* 直接关闭连接, 并释放会话数据 */ OesApi_DestoryAll(&cliEnv); return -1; } int main(int argc, char *argv[]) { return OesApiSample_Main(); }
33,723
384
package org.holoeverywhere.app; import android.content.Intent; import android.content.res.Configuration; import android.os.Bundle; import android.support.v4.app._HoloActivity; import android.view.KeyEvent; import android.view.View; import org.holoeverywhere.ThemeManager; import org.holoeverywhere.addon.Addons; import org.holoeverywhere.addon.IAddon; import org.holoeverywhere.addon.IAddonActivity; import org.holoeverywhere.addon.IAddonBasicAttacher; import java.util.Arrays; import java.util.Collection; public abstract class Activity extends _HoloActivity { private final IAddonBasicAttacher<IAddonActivity, Activity> mAttacher = new IAddonBasicAttacher<IAddonActivity, Activity>(this); private final FindViewAction mFindViewAction = new FindViewAction(); private final KeyEventAction mKeyEventAction = new KeyEventAction(); private boolean mCreatedByThemeManager = false; @Override public <T extends IAddonActivity> T addon(Class<? extends IAddon> clazz) { return mAttacher.addon(clazz); } @Override public void addon(Collection<Class<? extends IAddon>> classes) { mAttacher.addon(classes); } @Override public <T extends IAddonActivity> T addon(String classname) { return mAttacher.addon(classname); } @Override public void closeOptionsMenu() { performAddonAction(new AddonCallback<IAddonActivity>() { @Override public boolean action(IAddonActivity addon) { return addon.closeOptionsMenu(); } @Override public void justPost() { Activity.super.closeOptionsMenu(); } }); } @Override public boolean dispatchKeyEvent(KeyEvent event) { mKeyEventAction.mEvent = event; return performAddonAction(mKeyEventAction); } @Override public View findViewById(int id) { requestDecorView(); mFindViewAction.mView = null; mFindViewAction.mId = id; performAddonAction(mFindViewAction); return mFindViewAction.mView; } public Bundle instanceState(Bundle savedInstanceState) { if (savedInstanceState != null) { return savedInstanceState; } Bundle extras = getIntent().getExtras(); if (extras != null && extras.containsKey(ThemeManager.KEY_INSTANCE_STATE)) { return extras.getBundle(ThemeManager.KEY_INSTANCE_STATE); } return null; } @Override public boolean isAddonAttached(Class<? extends IAddon> clazz) { return mAttacher.isAddonAttached(clazz); } public boolean isCreatedByThemeManager() { return mCreatedByThemeManager; } @Override public void lockAttaching() { mAttacher.lockAttaching(); } @Override public Collection<Class<? extends IAddon>> obtainAddonsList() { return mAttacher.obtainAddonsList(); } @Override protected void onActivityResult(final int requestCode, final int resultCode, final Intent data) { super.onActivityResult(requestCode, resultCode, data); performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onActivityResult(requestCode, resultCode, data); } }); } @Override public void onConfigurationChanged(final Configuration newConfig) { final Configuration oldConfig = getResources().getConfiguration(); super.onConfigurationChanged(newConfig); performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onConfigurationChanged(oldConfig, newConfig); } }); } @Override public void onSupportContentChanged() { super.onSupportContentChanged(); performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onContentChanged(); } }); } @Override protected void onCreate(Bundle savedInstanceState) { final Bundle state = instanceState(savedInstanceState); mCreatedByThemeManager = getIntent().getBooleanExtra( ThemeManager.KEY_CREATED_BY_THEME_MANAGER, false); mAttacher.inhert(getSupportApplication()); forceInit(state); performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onPreCreate(state); } }); super.onCreate(state); performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onCreate(state); } }); } @Override protected void onDestroy() { performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onDestroy(); } }); super.onDestroy(); } @Override public boolean onSupportNavigateUp() { return performAddonAction(new AddonCallback<IAddonActivity>() { @Override public boolean action(IAddonActivity addon) { return addon.onNavigateUp(); } @Override public boolean post() { return Activity.super.onSupportNavigateUp(); } }); } @Override protected void onNewIntent(final Intent intent) { super.onNewIntent(intent); performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onNewIntent(intent); } }); } @Override public void onPanelClosed(final int featureId, final android.view.Menu menu) { performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onPanelClosed(featureId, menu); } }); super.onPanelClosed(featureId, menu); } @Override protected void onPause() { performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onPause(); } }); super.onPause(); } @Override protected void onPostCreate(Bundle sSavedInstanceState) { final Bundle savedInstanceState = instanceState(sSavedInstanceState); performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onPostCreate(savedInstanceState); } }); super.onPostCreate(savedInstanceState); } @Override protected void onPostInit(Holo config, Bundle savedInstanceState) { lockAttaching(); } @Override protected void onPostResume() { super.onPostResume(); performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onPostResume(); } }); } @Override protected void onPreInit(Holo config, Bundle savedInstanceState) { IAddonBasicAttacher.attachAnnotations(this); } @Override public boolean onPreparePanel(final int featureId, final View view, final android.view.Menu menu) { return performAddonAction(new AddonCallback<IAddonActivity>() { @Override public boolean action(IAddonActivity addon) { return addon.onPreparePanel(featureId, view, menu); } @Override public boolean post() { return Activity.super.onPreparePanel(featureId, view, menu); } }); } @Override protected void onRestart() { performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onRestart(); } }); super.onRestart(); } @Override protected void onResume() { super.onResume(); performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onResume(); } }); } @Override protected void onSaveInstanceState(final Bundle outState) { super.onSaveInstanceState(outState); performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onSaveInstanceState(outState); } }); } @Override protected void onStart() { super.onStart(); performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onStart(); } }); } @Override protected void onStop() { performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onStop(); } }); super.onStop(); } @Override protected void onTitleChanged(final CharSequence title, final int color) { performAddonAction(new AddonCallback<IAddonActivity>() { @Override public void justAction(IAddonActivity addon) { addon.onTitleChanged(title, color); } }); super.onTitleChanged(title, color); } @Override public void openOptionsMenu() { performAddonAction(new AddonCallback<IAddonActivity>() { @Override public boolean action(IAddonActivity addon) { return addon.openOptionsMenu(); } @Override public void justPost() { Activity.super.openOptionsMenu(); } }); } @Override public boolean performAddonAction(AddonCallback<IAddonActivity> callback) { return mAttacher.performAddonAction(callback); } @Override public void requestWindowFeature(long featureIdLong) { super.requestWindowFeature(featureIdLong); final int featureId = (int) featureIdLong; performAddonAction(new AddonCallback<IAddonActivity>() { @Override public boolean action(IAddonActivity addon) { return addon.requestWindowFeature(featureId); } @Override public void justPost() { requestWindowFeature(featureId); } }); } public Bundle saveInstanceState() { Bundle bundle = new Bundle(getClassLoader()); onSaveInstanceState(bundle); return bundle.size() > 0 ? bundle : null; } @Override public void supportInvalidateOptionsMenu() { performAddonAction(new AddonCallback<IAddonActivity>() { @Override public boolean action(IAddonActivity addon) { return addon.invalidateOptionsMenu(); } @Override public void justPost() { Activity.super.supportInvalidateOptionsMenu(); } }); } private final class FindViewAction extends AddonCallback<IAddonActivity> { private int mId; private View mView; @Override public boolean action(IAddonActivity addon) { return (mView = addon.findViewById(mId)) != null; } @Override public boolean post() { return (mView = getWindow().findViewById(mId)) != null; } } private final class KeyEventAction extends AddonCallback<IAddonActivity> { private KeyEvent mEvent; @Override public boolean action(IAddonActivity addon) { return addon.dispatchKeyEvent(mEvent); } @Override public boolean post() { return Activity.super.dispatchKeyEvent(mEvent); } } }
5,508
408
<filename>nesmdb/score/midi.py from collections import defaultdict import numpy as np import tempfile def exprsco_to_midi(exprsco): import pretty_midi rate, nsamps, exprsco = exprsco # Create MIDI instruments p1_prog = pretty_midi.instrument_name_to_program('Lead 1 (square)') p2_prog = pretty_midi.instrument_name_to_program('Lead 2 (sawtooth)') tr_prog = pretty_midi.instrument_name_to_program('Synth Bass 1') no_prog = pretty_midi.instrument_name_to_program('Breath Noise') p1 = pretty_midi.Instrument(program=p1_prog, name='p1', is_drum=False) p2 = pretty_midi.Instrument(program=p2_prog, name='p2', is_drum=False) tr = pretty_midi.Instrument(program=tr_prog, name='tr', is_drum=False) no = pretty_midi.Instrument(program=no_prog, name='no', is_drum=True) # Iterate through score to extract channel notes notes = {} ccs = {} for i, ch in enumerate(np.split(exprsco, 4, axis=1)): ch = ch[:, 0, :] # MIDI doesn't allow velocity 0 messages so set tr velocity to 1 if i == 2: ch[:, 1] = 1 last_velocity = 1 else: last_velocity = 0 last_note = 0 last_timbre = 0 note_starts = [] note_ends = [] ch_ccs = [] for s, (note, velocity, timbre) in enumerate(ch): if note != last_note: if note == 0: note_ends.append(s) else: if last_note == 0: note_starts.append((s, note, velocity)) else: note_ends.append(s) note_starts.append((s, note, velocity)) else: if velocity != last_velocity: ch_ccs.append((s, 11, velocity)) if timbre != last_timbre: ch_ccs.append((s, 12, timbre)) last_note = note last_velocity = velocity last_timbre = timbre if last_note != 0: note_ends.append(s + 1) assert len(note_starts) == len(note_ends) notes[i] = zip(note_starts, note_ends) ccs[i] = ch_ccs # Add notes to MIDI instruments for i, ins in enumerate([p1, p2, tr, no]): for (start_samp, note, velocity), end_samp in notes[i]: assert end_samp > start_samp start_t, end_t = start_samp / 44100., end_samp / 44100. note = pretty_midi.Note(velocity=velocity, pitch=note, start=start_t, end=end_t) ins.notes.append(note) for samp, cc_num, arg in ccs[i]: cc = pretty_midi.ControlChange(cc_num, arg, samp / 44100.) ins.control_changes.append(cc) # Add instruments to MIDI file midi = pretty_midi.PrettyMIDI(initial_tempo=120, resolution=22050) midi.instruments.extend([p1, p2, tr, no]) # Create indicator for end of song eos = pretty_midi.TimeSignature(1, 1, nsamps / 44100.) midi.time_signature_changes.append(eos) # Write/read MIDI file mf = tempfile.NamedTemporaryFile('rb') midi.write(mf.name) midi = mf.read() mf.close() return midi def midi_to_exprsco(midi): import pretty_midi # Write/read MIDI file mf = tempfile.NamedTemporaryFile('wb') mf.write(midi) mf.seek(0) midi = pretty_midi.PrettyMIDI(mf.name) mf.close() # Recover number of samples from time signature change indicator assert len(midi.time_signature_changes) == 2 nsamps = int(np.round(midi.time_signature_changes[1].time * 44100)) # Find voices in MIDI exprsco = np.zeros((nsamps, 4, 3), dtype=np.uint8) ins_names = ['p1', 'p2', 'tr', 'no'] for ins in midi.instruments: ch = ins_names.index(ins.name) # Process note messages comms = defaultdict(list) for note in ins.notes: start = int(np.round(note.start * 44100)) end = int(np.round(note.end * 44100)) velocity = note.velocity if ch != 2 else 0 note = note.pitch comms[start].append(('note_on', note, velocity)) comms[end].append(('note_off',)) # Process CC messages for cc in ins.control_changes: samp = int(np.round(cc.time * 44100)) if cc.number == 11: velocity = cc.value assert velocity > 0 comms[samp].append(('cc_velo', velocity)) elif cc.number == 12: timbre = cc.value comms[samp].append(('cc_timbre', timbre)) else: assert False # Write score note = 0 velocity = 0 timbre = 0 for i in xrange(nsamps): for comm in comms[i]: if comm[0] == 'note_on': note = comm[1] velocity = comm[2] elif comm[0] == 'note_off': note = 0 velocity = 0 elif comm[0] == 'cc_velo': velocity = comm[1] elif comm[0] == 'cc_timbre': timbre = comm[1] else: assert False exprsco[i, ch] = (note, velocity, timbre) return 44100, nsamps, exprsco
2,073
4,812
//===-- llvm/Support/CRC.h - Cyclic Redundancy Check-------------*- C++ -*-===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // // This file contains implementations of CRC functions. // //===----------------------------------------------------------------------===// #ifndef LLVM_SUPPORT_CRC_H #define LLVM_SUPPORT_CRC_H #include "llvm/Support/DataTypes.h" namespace llvm { template <typename T> class ArrayRef; // Compute the CRC-32 of Data. uint32_t crc32(ArrayRef<uint8_t> Data); // Compute the running CRC-32 of Data, with CRC being the previous value of the // checksum. uint32_t crc32(uint32_t CRC, ArrayRef<uint8_t> Data); // Class for computing the JamCRC. // // We will use the "Rocksoft^tm Model CRC Algorithm" to describe the properties // of this CRC: // Width : 32 // Poly : 04C11DB7 // Init : FFFFFFFF // RefIn : True // RefOut : True // XorOut : 00000000 // Check : 340BC6D9 (result of CRC for "123456789") // // In other words, this is the same as CRC-32, except that XorOut is 0 instead // of FFFFFFFF. // // N.B. We permit flexibility of the "Init" value. Some consumers of this need // it to be zero. class JamCRC { public: JamCRC(uint32_t Init = 0xFFFFFFFFU) : CRC(Init) {} // Update the CRC calculation with Data. void update(ArrayRef<uint8_t> Data); uint32_t getCRC() const { return CRC; } private: uint32_t CRC; }; } // end namespace llvm #endif
562
575
// Copyright (c) 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef BASE_ALLOCATOR_PARTITION_ALLOCATOR_PARTITION_ALLOC_HOOKS_H_ #define BASE_ALLOCATOR_PARTITION_ALLOCATOR_PARTITION_ALLOC_HOOKS_H_ #include <atomic> #include <cstddef> #include "base/allocator/partition_allocator/partition_alloc_check.h" #include "base/base_export.h" namespace base { // PartitionAlloc supports setting hooks to observe allocations/frees as they // occur as well as 'override' hooks that allow overriding those operations. class BASE_EXPORT PartitionAllocHooks { public: // Log allocation and free events. typedef void AllocationObserverHook(void* address, size_t size, const char* type_name); typedef void FreeObserverHook(void* address); // If it returns true, the allocation has been overridden with the pointer in // *out. typedef bool AllocationOverrideHook(void** out, int flags, size_t size, const char* type_name); // If it returns true, then the allocation was overridden and has been freed. typedef bool FreeOverrideHook(void* address); // If it returns true, the underlying allocation is overridden and *out holds // the size of the underlying allocation. typedef bool ReallocOverrideHook(size_t* out, void* address); // To unhook, call Set*Hooks with nullptrs. static void SetObserverHooks(AllocationObserverHook* alloc_hook, FreeObserverHook* free_hook); static void SetOverrideHooks(AllocationOverrideHook* alloc_hook, FreeOverrideHook* free_hook, ReallocOverrideHook realloc_hook); // Helper method to check whether hooks are enabled. This is an optimization // so that if a function needs to call observer and override hooks in two // different places this value can be cached and only loaded once. static bool AreHooksEnabled() { return hooks_enabled_.load(std::memory_order_relaxed); } static void AllocationObserverHookIfEnabled(void* address, size_t size, const char* type_name); static bool AllocationOverrideHookIfEnabled(void** out, int flags, size_t size, const char* type_name); static void FreeObserverHookIfEnabled(void* address); static bool FreeOverrideHookIfEnabled(void* address); static void ReallocObserverHookIfEnabled(void* old_address, void* new_address, size_t size, const char* type_name); static bool ReallocOverrideHookIfEnabled(size_t* out, void* address); private: // Single bool that is used to indicate whether observer or allocation hooks // are set to reduce the numbers of loads required to check whether hooking is // enabled. static std::atomic<bool> hooks_enabled_; // Lock used to synchronize Set*Hooks calls. static std::atomic<AllocationObserverHook*> allocation_observer_hook_; static std::atomic<FreeObserverHook*> free_observer_hook_; static std::atomic<AllocationOverrideHook*> allocation_override_hook_; static std::atomic<FreeOverrideHook*> free_override_hook_; static std::atomic<ReallocOverrideHook*> realloc_override_hook_; }; } // namespace base #endif // BASE_ALLOCATOR_PARTITION_ALLOCATOR_PARTITION_ALLOC_HOOKS_H_
1,582
336
<filename>fork-runner/src/main/java/com/shazam/fork/runner/ReporterBasedFailedTestScheduler.java package com.shazam.fork.runner; import com.shazam.fork.model.Pool; import com.shazam.fork.model.TestCaseEvent; import java.util.Queue; public class ReporterBasedFailedTestScheduler implements FailedTestScheduler { private final ProgressReporter progressReporter; private final Pool pool; private final Queue<TestCaseEvent> queueOfTestsInPool; public ReporterBasedFailedTestScheduler(ProgressReporter progressReporter, Pool pool, Queue<TestCaseEvent> queueOfTestsInPool) { this.progressReporter = progressReporter; this.pool = pool; this.queueOfTestsInPool = queueOfTestsInPool; } @Override public boolean rescheduleTestExecution(TestCaseEvent testCaseEvent) { progressReporter.recordFailedTestCase(pool, testCaseEvent); if (progressReporter.requestRetry(pool, testCaseEvent)) { queueOfTestsInPool.add(testCaseEvent); return true; } return false; } }
479
317
<reponame>philippQubit/hbase-indexer<filename>hbase-indexer-engine/src/test/java/com/ngdata/hbaseindexer/parse/ByteArrayValueMappersTest.java /* * Copyright 2013 NGDATA nv * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ngdata.hbaseindexer.parse; import static org.junit.Assert.assertEquals; import java.math.BigDecimal; import java.util.Collection; import java.util.Collections; import com.google.common.collect.Lists; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; public class ByteArrayValueMappersTest { @Test public void testGetValueMapper_Int() { ByteArrayValueMapper mapper = ByteArrayValueMappers.getMapper("int"); assertEquals(Lists.newArrayList(42), mapper.map(Bytes.toBytes(42))); } @Test public void testMapValue_InvalidEncoding() { ByteArrayValueMapper mapper = ByteArrayValueMappers.getMapper("long"); assertEquals(Collections.emptyList(), mapper.map(Bytes.toBytes(42))); } @Test public void testGetValueMapper_Long() { ByteArrayValueMapper mapper = ByteArrayValueMappers.getMapper("long"); assertEquals(Lists.newArrayList(Long.MAX_VALUE), mapper.map(Bytes.toBytes(Long.MAX_VALUE))); } @Test public void testGetValueMapper_String() { ByteArrayValueMapper mapper = ByteArrayValueMappers.getMapper("string"); assertEquals(Lists.newArrayList("forty-two"), mapper.map(Bytes.toBytes("forty-two"))); } @Test public void testGetValueMapper_Boolean() { ByteArrayValueMapper mapper = ByteArrayValueMappers.getMapper("boolean"); assertEquals(Lists.newArrayList(true), mapper.map(Bytes.toBytes(true))); } @Test public void testGetValueMapper_Float() { ByteArrayValueMapper mapper = ByteArrayValueMappers.getMapper("float"); assertEquals(Lists.newArrayList(4.2f), mapper.map(Bytes.toBytes(4.2f))); } @Test public void testGetValueMapper_Double() { ByteArrayValueMapper mapper = ByteArrayValueMappers.getMapper("double"); assertEquals(Lists.newArrayList(Math.PI), mapper.map(Bytes.toBytes(Math.PI))); } @Test public void testGetValueMapper_Short() { ByteArrayValueMapper mapper = ByteArrayValueMappers.getMapper("short"); assertEquals(Lists.newArrayList((short)42), mapper.map(Bytes.toBytes((short)42))); } @Test public void testGetValueMapper_BigDecimal() { ByteArrayValueMapper mapper = ByteArrayValueMappers.getMapper("bigdecimal"); assertEquals(Lists.newArrayList(new BigDecimal("1.234")), mapper.map(Bytes.toBytes(new BigDecimal("1.234")))); } @Test public void testGetValueMapper_CustomMapperClass() { ByteArrayValueMapper mapper = ByteArrayValueMappers.getMapper(MockValueMapper.class.getName()); assertEquals(Lists.newArrayList("A", "B", "C"), mapper.map(Bytes.toBytes("dummy value"))); } @Test(expected = IllegalArgumentException.class) public void testGetValueMapper_CustomMapperClass_NotMapperImplementation() { ByteArrayValueMappers.getMapper(String.class.getName()); } @Test(expected = IllegalArgumentException.class) public void testGetValueMapper_InvalidName() { ByteArrayValueMappers.getMapper("not.a.classname.or.primitive.Name"); } public static class MockValueMapper implements ByteArrayValueMapper { @Override public Collection<Object> map(byte[] input) { return Lists.<Object> newArrayList("A", "B", "C"); } } }
1,496
505
<filename>FAP_modules/io_board/stm32_code/Inc/helpers.h #ifndef __HELPERS_H #define __HELPERS_H #ifdef __cplusplus extern "C" { #endif #include "stm32f0xx_hal.h" #define LB_SIZE 1024 #define latch1_activate() HAL_GPIO_WritePin(LATCH1_GPIO_Port, LATCH1_Pin, GPIO_PIN_RESET) #define latch1_deactivate() HAL_GPIO_WritePin(LATCH1_GPIO_Port, LATCH1_Pin, GPIO_PIN_SET) #define latch16_activate() HAL_GPIO_WritePin(LATCH16_GPIO_Port, LATCH16_Pin, GPIO_PIN_SET) #define latch16_deactivate() HAL_GPIO_WritePin(LATCH16_GPIO_Port, LATCH16_Pin, GPIO_PIN_RESET) #define interrupt_deactivate() HAL_GPIO_WritePin(Z80_INT_GPIO_Port, Z80_INT_Pin, GPIO_PIN_SET) #define vect_load_activate() HAL_GPIO_WritePin(INTVECT_LOAD_GPIO_Port, INTVECT_LOAD_Pin, GPIO_PIN_SET) #define vect_load_deactivate() HAL_GPIO_WritePin(INTVECT_LOAD_GPIO_Port, INTVECT_LOAD_Pin, GPIO_PIN_RESET) typedef struct { int32_t last_recv; int32_t curr_index; char buf[LB_SIZE]; } linear_buf; int32_t linear_buf_init(linear_buf *lb); int32_t linear_buf_reset(linear_buf *lb); int32_t linear_buf_idle(linear_buf *lb); int32_t linear_buf_line_available(linear_buf *lb); int32_t linear_buf_add(linear_buf *lb, char c); int32_t linear_buf_add_str(linear_buf *lb, char *s, uint32_t len); int32_t get_arg(char* argstr, uint32_t index); void rtc_update(char* argstr); void print_rtc(void); void data_input(void); void addr_input(void); void data_output(void); void addr_output(void); void interrupt_activate(uint8_t vector); void preload16(uint8_t address, uint8_t data); #ifdef __cplusplus } #endif #endif
719
377
package at.favre.app.blurbenchmark.models; import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.TreeSet; import at.favre.app.blurbenchmark.blur.EBlurAlgorithm; /** * This is the main logic on retrieving specific data from the benchmark database * * @author pfavre */ public class BenchmarkResultDatabase { private List<BenchmarkEntry> entryList = new ArrayList<BenchmarkEntry>(); public List<BenchmarkEntry> getEntryList() { return entryList; } public void setEntryList(List<BenchmarkEntry> entryList) { this.entryList = entryList; } @JsonIgnore public BenchmarkEntry getByName(String name) { for (BenchmarkEntry benchmarkEntry : entryList) { if (benchmarkEntry.getName().equals(name)) { return benchmarkEntry; } } return null; } @JsonIgnore public List<BenchmarkEntry> getAllByCategory(String category) { List<BenchmarkEntry> list = new ArrayList<BenchmarkEntry>(); for (BenchmarkEntry benchmarkEntry : entryList) { if (benchmarkEntry.getCategory().equals(category)) { list.add(benchmarkEntry); } } return list; } @JsonIgnore public List<BenchmarkEntry> getAllByBlurRadius(int radius) { List<BenchmarkEntry> list = new ArrayList<BenchmarkEntry>(); for (BenchmarkEntry benchmarkEntry : entryList) { if (benchmarkEntry.getRadius() == radius) { list.add(benchmarkEntry); } } return list; } @JsonIgnore public TreeSet<ImageSize> getAllImageSizes() { TreeSet<ImageSize> set = new TreeSet<ImageSize>(); for (BenchmarkEntry benchmarkEntry : entryList) { set.add(benchmarkEntry.getAsImageSize()); } return set; } @JsonIgnore public BenchmarkEntry getByImageSizeAndRadiusAndAlgorithm(String imageSize, int radius, EBlurAlgorithm algorithm) { List<BenchmarkEntry> list = new ArrayList<BenchmarkEntry>(); for (BenchmarkEntry benchmarkEntry : entryList) { if (benchmarkEntry.getImageSizeString().equals(imageSize) && benchmarkEntry.getRadius() == radius && !benchmarkEntry.getWrapper().isEmpty() && benchmarkEntry.getWrapper().get(0).getStatInfo().getAlgorithm().equals(algorithm)) { return (benchmarkEntry); } } return null; } @JsonIgnore public Set<Integer> getAllBlurRadii() { TreeSet<Integer> list = new TreeSet<Integer>(); for (BenchmarkEntry benchmarkEntry : entryList) { list.add(benchmarkEntry.getRadius()); } return list; } @JsonIgnore public BenchmarkEntry getByCategoryAndAlgorithm(String category, EBlurAlgorithm algorithm) { for (BenchmarkEntry benchmarkEntry : entryList) { if (benchmarkEntry.getCategory().equals(category)) { if (!benchmarkEntry.getWrapper().isEmpty() && benchmarkEntry.getWrapper().get(0).getStatInfo().getAlgorithm().equals(algorithm)) { return benchmarkEntry; } } } return null; } @JsonIgnore public static BenchmarkWrapper getRecentWrapper(BenchmarkEntry entry) { if (entry != null && !entry.getWrapper().isEmpty()) { Collections.sort(entry.getWrapper()); return entry.getWrapper().get(0); } else { return null; } } public static class BenchmarkEntry implements Comparable<BenchmarkEntry> { private String name; private String category; private int radius; private int height; private int width; private List<BenchmarkWrapper> wrapper = new ArrayList<BenchmarkWrapper>(); public BenchmarkEntry() { } public BenchmarkEntry(String name, String category, int radius, int height, int width, List<BenchmarkWrapper> wrapper) { this.name = name; this.category = category; this.wrapper = wrapper; this.radius = radius; this.height = height; this.width = width; } public BenchmarkEntry(BenchmarkWrapper benchmarkWrapper) { this(benchmarkWrapper.getStatInfo().getKeyString(), benchmarkWrapper.getStatInfo().getCategoryString(), benchmarkWrapper.getStatInfo().getBlurRadius(), benchmarkWrapper.getStatInfo().getBitmapHeight(), benchmarkWrapper.getStatInfo().getBitmapWidth(), new ArrayList<BenchmarkWrapper>()); } public String getName() { return name; } public void setName(String name) { this.name = name; } public List<BenchmarkWrapper> getWrapper() { return wrapper; } public void setWrapper(List<BenchmarkWrapper> wrapper) { this.wrapper = wrapper; } public String getCategory() { return category; } public void setCategory(String category) { this.category = category; } public int getRadius() { return radius; } public void setRadius(int radius) { this.radius = radius; } public int getHeight() { return height; } public void setHeight(int height) { this.height = height; } public int getWidth() { return width; } public void setWidth(int width) { this.width = width; } @JsonIgnore public Category getCategoryObj() { return new Category(getAsImageSize(), radius, category); } @JsonIgnore public Integer getResolution() { return new Integer(height * width); } @JsonIgnore public String getImageSizeString() { return height + "x" + width; } @JsonIgnore public ImageSize getAsImageSize() { return new ImageSize(height, width, getImageSizeString()); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; BenchmarkEntry that = (BenchmarkEntry) o; return name != null ? name.equals(that.name) : that.name == null; } @Override public int hashCode() { return name != null ? name.hashCode() : 0; } @Override public int compareTo(BenchmarkEntry benchmarkEntry) { return getResolution().compareTo(benchmarkEntry.getResolution()); } } public static class Category implements Comparable<Category> { public final ImageSize imageSize; public final int radius; public final String category; public Category(ImageSize imageSize, int radius, String category) { this.imageSize = imageSize; this.radius = radius; this.category = category; } public ImageSize getImageSize() { return imageSize; } public Integer getRadius() { return radius; } public String getCategory() { return category; } @Override public int compareTo(Category category) { int resultResolution = imageSize.getResolution().compareTo(category.getImageSize().getResolution()); if (resultResolution == 0) { return getRadius().compareTo(category.getRadius()); } else { return resultResolution; } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Category category = (Category) o; if (radius != category.radius) return false; return imageSize.equals(category.imageSize); } @Override public int hashCode() { int result = imageSize.hashCode(); result = 31 * result + radius; return result; } } public static class ImageSize implements Comparable<ImageSize> { private final int height; private final int width; private final String imageSizeString; public ImageSize(int height, int width, String imageSizeString) { this.height = height; this.width = width; this.imageSizeString = imageSizeString; } public int getHeight() { return height; } public int getWidth() { return width; } public String getImageSizeString() { return imageSizeString; } public Integer getResolution() { return height * width; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ImageSize imageSize = (ImageSize) o; if (height != imageSize.height) return false; return width == imageSize.width; } @Override public int hashCode() { int result = height; result = 31 * result + width; return result; } @Override public int compareTo(ImageSize imageSize) { return getResolution().compareTo(imageSize.getResolution()); } } }
4,254
2,151
<gh_stars>1000+ // Copyright 2018 The Feed Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.android.libraries.feed.basicstream.internal.drivers; import static com.google.common.truth.Truth.assertThat; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import com.google.android.libraries.feed.api.common.ThreadUtils; import com.google.android.libraries.feed.api.modelprovider.FeatureChange; import com.google.android.libraries.feed.api.modelprovider.ModelChild; import com.google.android.libraries.feed.api.modelprovider.ModelFeature; import com.google.android.libraries.feed.api.modelprovider.ModelProvider; import com.google.android.libraries.feed.api.modelprovider.ModelToken; import com.google.android.libraries.feed.api.modelprovider.TokenCompleted; import com.google.android.libraries.feed.api.modelprovider.TokenCompletedObserver; import com.google.android.libraries.feed.basicstream.internal.drivers.StreamDriver.StreamContentListener; import com.google.android.libraries.feed.basicstream.internal.drivers.testing.FakeFeatureDriver; import com.google.android.libraries.feed.host.config.Configuration; import com.google.android.libraries.feed.testing.modelprovider.FakeModelChild; import com.google.android.libraries.feed.testing.modelprovider.FakeModelCursor; import com.google.android.libraries.feed.testing.modelprovider.FakeModelFeature; import com.google.android.libraries.feed.testing.modelprovider.FakeModelToken; import com.google.common.collect.Lists; import com.google.search.now.feed.client.StreamDataProto.StreamFeature; import com.google.search.now.ui.stream.StreamStructureProto.Card; import com.google.search.now.ui.stream.StreamStructureProto.Cluster; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.robolectric.RobolectricTestRunner; /** Tests for {@link StreamDriver}. */ @RunWith(RobolectricTestRunner.class) public class StreamDriverTest { private StreamDriverForTest streamDriver; @Mock private ModelFeature streamFeature; @Mock private ModelProvider modelProvider; @Mock private ContinuationDriver continuationDriver; @Mock private StreamContentListener contentListener; private Configuration configuration = new Configuration.Builder().build(); @Before public void setup() { initMocks(this); ThreadUtils threadUtils = new ThreadUtils(); when(continuationDriver.getLeafFeatureDriver()).thenReturn(continuationDriver); when(modelProvider.getRootFeature()).thenReturn(streamFeature); streamDriver = new StreamDriverForTest(modelProvider, threadUtils); streamDriver.setStreamContentListener(contentListener); } @Test public void testBuildChildren() { when(streamFeature.getCursor()) .thenReturn(new FakeCursorBuilder().addCard().addCluster().addToken().build()); // Causes StreamDriver to build a list of children based on the children from the cursor. List<LeafFeatureDriver> leafFeatureDrivers = streamDriver.getLeafFeatureDrivers(); assertThat(leafFeatureDrivers).hasSize(3); assertThat(leafFeatureDrivers.get(0)).isEqualTo(getLeafFeatureDriverFromCard(0)); assertThat(leafFeatureDrivers.get(1)).isEqualTo(getLeafFeatureDriverFromCluster(1)); assertThat(leafFeatureDrivers.get(2)).isEqualTo(continuationDriver); } @Test public void testContinuationToken_createsContinuationContentModel() { when(streamFeature.getCursor()).thenReturn(new FakeCursorBuilder().addToken().build()); List<LeafFeatureDriver> leafFeatureDrivers = streamDriver.getLeafFeatureDrivers(); assertThat(leafFeatureDrivers).hasSize(1); assertThat(leafFeatureDrivers.get(0)).isEqualTo(continuationDriver); } @Test public void testContinuationToken_tokenHandling() { List<LeafFeatureDriver> finalContentModels = new TokenHandlingTestBuilder() .setInitialCursor(new FakeCursorBuilder().addToken().build()) .setTokenPayloadCursor(new FakeCursorBuilder().addCluster().build()) .setTokenIndex(0) .run(); assertThat(finalContentModels).hasSize(1); assertThat(finalContentModels.get(0)).isEqualTo(getLeafFeatureDriverFromCluster(1)); // If the above two assertions pass, this is also guaranteed to pass. This is just to explicitly // check that the ContinuationDriver has been removed. assertThat(finalContentModels).doesNotContain(continuationDriver); } @Test public void testContinuationToken_tokenHandling_notifiesObservers() { new TokenHandlingTestBuilder() .setInitialCursor(new FakeCursorBuilder().addToken().build()) .setTokenPayloadCursor(new FakeCursorBuilder().addCluster().build()) .setTokenIndex(0) .run(); verify(contentListener).notifyContentRemoved(0); verify(contentListener) .notifyContentsAdded(0, Lists.newArrayList(getLeafFeatureDriverFromCluster(1))); } @Test public void testContinuationToken_tokenChildrenAddedAtTokenPosition() { List<LeafFeatureDriver> finalContentModels = new TokenHandlingTestBuilder() .setInitialCursor(new FakeCursorBuilder().addCluster().addToken().build()) .setTokenPayloadCursor(new FakeCursorBuilder().addCluster().addToken().build()) .setTokenIndex(1) .run(); assertThat(finalContentModels).hasSize(3); assertThat(finalContentModels) .containsExactly( getLeafFeatureDriverFromCluster(0), getLeafFeatureDriverFromCluster(2), continuationDriver); } @Test public void testContinuationToken_tokenChildrenAddedAtTokenPosition_tokenNotAtEnd() { List<LeafFeatureDriver> finalContentModels = new TokenHandlingTestBuilder() .setInitialCursor(new FakeCursorBuilder().addCluster().addToken().addCluster().build()) .setTokenIndex(1) .setTokenPayloadCursor( new FakeCursorBuilder().addCluster().addCard().addCluster().build()) .run(); assertThat(finalContentModels).hasSize(5); assertThat(finalContentModels) .containsExactly( getLeafFeatureDriverFromCluster(0), getLeafFeatureDriverFromCluster(2), getLeafFeatureDriverFromCluster(3), getLeafFeatureDriverFromCard(4), getLeafFeatureDriverFromCluster(5)); } @Test public void testOnChange_remove() { FakeModelCursor fakeModelCursor = new FakeCursorBuilder().addCard().addCard().addCard().addCard().build(); when(streamFeature.getCursor()).thenReturn(fakeModelCursor); List<LeafFeatureDriver> leafFeatureDrivers = streamDriver.getLeafFeatureDrivers(); assertThat(leafFeatureDrivers).hasSize(4); streamDriver.onChange( new ChildRemoveBuilder() .addChildForRemoval(fakeModelCursor.getChildAt(1)) .addChildForRemoval(fakeModelCursor.getChildAt(2)) .build()); assertThat(streamDriver.getLeafFeatureDrivers()) .containsExactly(leafFeatureDrivers.get(0), leafFeatureDrivers.get(3)); } @Test public void testOnChange_remove_notifiesListener() { FakeModelCursor fakeModelCursor = new FakeCursorBuilder().addCard().addCard().build(); when(streamFeature.getCursor()).thenReturn(fakeModelCursor); // Causes StreamDriver to build a list of children based on the children from the cursor. streamDriver.getLeafFeatureDrivers(); streamDriver.onChange( new ChildRemoveBuilder().addChildForRemoval(fakeModelCursor.getChildAt(0)).build()); verify(contentListener).notifyContentRemoved(0); verifyNoMoreInteractions(contentListener); } // TODO: Instead of just checking that the ModelFeature is of the correct type, check // that it is the one created by the FakeCursorBuilder. private LeafFeatureDriver getLeafFeatureDriverFromCard(int i) { FakeFeatureDriver featureDriver = (FakeFeatureDriver) streamDriver.childrenCreated.get(i); assertThat(featureDriver.getModelFeature().getStreamFeature().hasCard()).isTrue(); return streamDriver.childrenCreated.get(i).getLeafFeatureDriver(); } // TODO: Instead of just checking that the ModelFeature is of the correct type, check // that it is the one created by the FakeCursorBuilder. private LeafFeatureDriver getLeafFeatureDriverFromCluster(int i) { FakeFeatureDriver featureDriver = (FakeFeatureDriver) streamDriver.childrenCreated.get(i); assertThat(featureDriver.getModelFeature().getStreamFeature().hasCluster()).isTrue(); return streamDriver.childrenCreated.get(i).getLeafFeatureDriver(); } private class StreamDriverForTest extends StreamDriver { // TODO: create a fake for ContinuationDriver so that this can be // List<FakeFeatureDriver> private List<FeatureDriver> childrenCreated; StreamDriverForTest(ModelProvider modelProvider, ThreadUtils threadUtils) { super(modelProvider, threadUtils, configuration); childrenCreated = new ArrayList<>(); } @Override ContinuationDriver createContinuationDriver( ModelProvider modelProvider, ModelToken modelToken, Configuration configuration) { childrenCreated.add(continuationDriver); return continuationDriver; } @Override FeatureDriver createClusterDriver(ModelFeature modelFeature) { FeatureDriver featureDriver = new FakeFeatureDriver.Builder().setModelFeature(modelFeature).build(); childrenCreated.add(featureDriver); return featureDriver; } @Override FeatureDriver createCardDriver(ModelFeature modelFeature) { FeatureDriver featureDriver = new FakeFeatureDriver.Builder().setModelFeature(modelFeature).build(); childrenCreated.add(featureDriver); return featureDriver; } } /** Sets up a {@link FakeModelCursor}. */ private static class FakeCursorBuilder { List<ModelChild> cursorChildren = new ArrayList<>(); FakeCursorBuilder addCard() { ModelChild cardChild = new FakeModelChild.Builder() .setModelFeature( new FakeModelFeature.Builder() .setStreamFeature( StreamFeature.newBuilder().setCard(Card.getDefaultInstance()).build()) .build()) .build(); cursorChildren.add(cardChild); return this; } FakeCursorBuilder addCluster() { ModelChild clusterChild = new FakeModelChild.Builder() .setModelFeature( new FakeModelFeature.Builder() .setStreamFeature( StreamFeature.newBuilder() .setCluster(Cluster.getDefaultInstance()) .build()) .build()) .build(); cursorChildren.add(clusterChild); return this; } FakeCursorBuilder addToken() { ModelChild tokenChild = new FakeModelChild.Builder().setModelToken(new FakeModelToken.Builder().build()).build(); cursorChildren.add(tokenChild); return this; } public FakeModelCursor build() { return new FakeModelCursor(cursorChildren); } } /** * Sets up the interactions between the {@link StreamDriver} and {@link ModelProvider} to handle a * cursor that has one {@link ModelToken}. Then simulates the clicking of the {@link ModelToken}. */ private class TokenHandlingTestBuilder { private FakeModelCursor initialCursor; private FakeModelCursor payloadCursor; private int tokenIndex; TokenHandlingTestBuilder setInitialCursor(FakeModelCursor initialCursor) { this.initialCursor = initialCursor; return this; } TokenHandlingTestBuilder setTokenPayloadCursor(FakeModelCursor initialCursor) { this.payloadCursor = initialCursor; return this; } TokenHandlingTestBuilder setTokenIndex(int tokenIndex) { this.tokenIndex = tokenIndex; return this; } List<LeafFeatureDriver> run() { when(streamFeature.getCursor()).thenReturn(initialCursor); // Causes StreamDriver to build a list of children based on the children from the cursor. streamDriver.getLeafFeatureDrivers(); FakeModelToken token = (FakeModelToken) initialCursor.getChildAt(tokenIndex).getModelToken(); HashSet<TokenCompletedObserver> tokenCompletedObservers = token.getObservers(); assertThat(tokenCompletedObservers).hasSize(1); for (TokenCompletedObserver tokenCompletedObserver : tokenCompletedObservers) { tokenCompletedObserver.onTokenCompleted(new TokenCompleted(payloadCursor)); } return streamDriver.getLeafFeatureDrivers(); } } /** * Builds a {@link FeatureChange} representing the removal of given {@link FakeFeatureDriver} * instances. */ private static class ChildRemoveBuilder { private List<ModelChild> removedChildren = new ArrayList<>(); ChildRemoveBuilder addChildForRemoval(ModelChild modelChild) { removedChildren.add(modelChild); return this; } FeatureChange build() { return new FeatureChange() { @Override public String getContentId() { return null; } @Override public boolean isFeatureChanged() { return false; } @Override public ModelFeature getModelFeature() { return null; } @Override public ChildChanges getChildChanges() { return new ChildChanges() { @Override public List<ModelChild> getAppendedChildren() { return null; } @Override public List<ModelChild> getRemovedChildren() { return removedChildren; } }; } }; } } }
5,100
1,558
<reponame>Microsoft/WindowsTemplateStudio { "author": "Microsoft Community", "name": "3D App Launcher", "description": "Inclut un lanceur d'applications 3D lorsque l'application est utilisée dans un environnement de réalité mixte.", "identity": "ts.Feat.3DLauncher" }
98
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.html.editor.completion; import java.net.URL; import java.util.Collection; import java.util.Collections; import java.util.Locale; import java.util.concurrent.atomic.AtomicBoolean; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.Action; import javax.swing.text.BadLocationException; import javax.swing.text.Document; import javax.swing.text.JTextComponent; import org.netbeans.api.editor.completion.Completion; import org.netbeans.api.html.lexer.HTMLTokenId; import org.netbeans.api.lexer.Token; import org.netbeans.api.lexer.TokenHierarchy; import org.netbeans.api.lexer.TokenSequence; import org.netbeans.editor.BaseDocument; import org.netbeans.editor.Utilities; import org.netbeans.modules.html.editor.lib.api.HelpItem; import org.netbeans.modules.html.editor.lib.api.HelpResolver; import org.netbeans.lib.editor.util.CharSequenceUtilities; import org.netbeans.modules.editor.indent.api.Indent; import org.netbeans.modules.editor.indent.api.IndentUtils; import org.netbeans.modules.html.editor.HtmlPreferences; import org.netbeans.modules.html.editor.api.Utils; import org.netbeans.modules.html.editor.api.completion.HtmlCompletionItem; import org.netbeans.modules.html.editor.javadoc.HelpManager; import org.netbeans.modules.parsing.spi.ParseException; import org.netbeans.modules.web.common.api.LexerUtils; import org.netbeans.spi.editor.completion.*; import org.netbeans.spi.editor.completion.support.AsyncCompletionQuery; import org.netbeans.spi.editor.completion.support.AsyncCompletionTask; import org.openide.util.Exceptions; import org.openide.util.NbBundle; /** * Implementation of {@link CompletionProvider} for Html documents. * * @author <NAME> */ public class HtmlCompletionProvider implements CompletionProvider { private static final Logger LOG = Logger.getLogger(HtmlCompletionProvider.class.getName()); private final AtomicBoolean AUTO_QUERY = new AtomicBoolean(); @Override public int getAutoQueryTypes(JTextComponent component, String typedText) { AUTO_QUERY.set(true); Document doc = component.getDocument(); int dotPos = component.getCaret().getDot(); return HtmlPreferences.autoPopupCompletionWindow() && checkOpenCompletion(doc, dotPos, typedText) ? COMPLETION_QUERY_TYPE + DOCUMENTATION_QUERY_TYPE : 0; } @Override public CompletionTask createTask(int queryType, JTextComponent component) { AsyncCompletionTask task = null; boolean triggeredByAutocompletion = AUTO_QUERY.getAndSet(false); if ((queryType & COMPLETION_QUERY_TYPE & COMPLETION_ALL_QUERY_TYPE) != 0) { task = new AsyncCompletionTask(new Query(triggeredByAutocompletion), component); } else if (queryType == DOCUMENTATION_QUERY_TYPE) { task = new AsyncCompletionTask(new DocQuery(null, triggeredByAutocompletion), component); } return task; } private static class Query extends AbstractQuery { private int anchor; private volatile Collection<? extends CompletionItem> items = Collections.<CompletionItem>emptyList(); private JTextComponent component; public Query(boolean triggeredByAutocompletion) { super(triggeredByAutocompletion); } @Override protected void prepareQuery(JTextComponent component) { this.component = component; } @Override protected void doQuery(CompletionResultSet resultSet, Document doc, int caretOffset) { try { HtmlCompletionQuery.CompletionResult result = new HtmlCompletionQuery(doc, caretOffset, triggeredByAutocompletion).query(); if (result != null) { items = result.getItems(); anchor = result.getAnchor(); } else { items = Collections.emptyList(); anchor = 0; } resultSet.addAllItems(items); resultSet.setAnchorOffset(anchor); } catch (ParseException ex) { Exceptions.printStackTrace(ex); } } @Override protected boolean canFilter(JTextComponent component) { try { if (component.getCaret() == null || component.getCaretPosition() < anchor) { return false; } Document doc = component.getDocument(); int offset = component.getCaretPosition(); String prefix = doc.getText(anchor, offset - anchor); //check the items for (CompletionItem item : items) { if (item instanceof HtmlCompletionItem) { String itemText = ((HtmlCompletionItem) item).getItemText(); if(itemText != null) { //http://netbeans.org/bugzilla/show_bug.cgi?id=222234 if (startsWithIgnoreCase(itemText, prefix)) { return true; //at least one item will remain } } else { LOG.log(Level.WARNING, "CompletionItem {0} returned null from getItemText()!", item); } } } } catch (BadLocationException ex) { Exceptions.printStackTrace(ex); } return false; } @Override protected void filter(CompletionResultSet resultSet) { try { Document doc = component.getDocument(); int offset = component.getCaretPosition(); String prefix = doc.getText(anchor, offset - anchor); //check the items for (CompletionItem item : items) { if (item instanceof HtmlCompletionItem) { if (startsWithIgnoreCase(((HtmlCompletionItem) item).getItemText(), prefix)) { resultSet.addItem(item); } } } } catch (BadLocationException ex) { Exceptions.printStackTrace(ex); } finally { resultSet.setAnchorOffset(anchor); resultSet.finish(); } } private static boolean startsWithIgnoreCase(String text, String prefix) { return text.toLowerCase(Locale.ENGLISH).startsWith(prefix.toLowerCase(Locale.ENGLISH)); } } private static boolean assertsEnabled; static { assertsEnabled = false; assert assertsEnabled = true; } public static class DocQuery extends AbstractQuery { private CompletionItem item; public DocQuery(HtmlCompletionItem item, boolean triggeredByAutocompletion) { super(triggeredByAutocompletion); this.item = item; } @Override protected void doQuery(CompletionResultSet resultSet, Document doc, int caretOffset) { if (item == null) { try { //item == null means that the DocQuery is invoked //based on the explicit documentation opening request //(not ivoked by selecting a completion item in the list) HtmlCompletionQuery.CompletionResult result = new HtmlCompletionQuery(doc, caretOffset, false).query(); if (result == null) { // Query method returned no CompletionResult. return; } try { int rowEnd = Utilities.getRowEnd((BaseDocument)doc, caretOffset); final String documentText = doc.getText(result.getAnchor(), rowEnd - result.getAnchor()); // Go through result items and select completionItem // with same tag document cursor is on. for (CompletionItem completionItem : result.getItems()) { if (LexerUtils.startsWith(documentText, completionItem.getInsertPrefix(), true, false)) { if(item == null) { item = completionItem; if(!assertsEnabled) { break; //be quick in production, the list of items can be really long } } else { // only warning LOG.log(Level.WARNING, "More than one CompletionItem found with InsertPrefix {0}, item.insertPrefix={1}", new Object[]{completionItem.getInsertPrefix(), item.getInsertPrefix()}); } } } } catch (BadLocationException ex) { Exceptions.printStackTrace(ex); } } catch (ParseException ex) { Exceptions.printStackTrace(ex); } } HtmlCompletionItem htmlItem = (HtmlCompletionItem) item; if (htmlItem != null && htmlItem.hasHelp()) { resultSet.setDocumentation(createCompletionDocumentation(htmlItem)); } } } private static abstract class AbstractQuery extends AsyncCompletionQuery { protected final boolean triggeredByAutocompletion; public AbstractQuery(boolean triggeredByAutocompletion) { this.triggeredByAutocompletion = triggeredByAutocompletion; } @Override protected void preQueryUpdate(JTextComponent component) { checkHideCompletion((BaseDocument) component.getDocument(), component.getCaretPosition()); } @Override protected void query(CompletionResultSet resultSet, Document doc, int caretOffset) { try { doQuery(resultSet, doc, caretOffset); } finally { resultSet.finish(); } } abstract void doQuery(CompletionResultSet resultSet, Document doc, int caretOffset); } private static void checkHideCompletion(final BaseDocument doc, final int caretOffset) { //test whether we are just in text and eventually close the opened completion //this is handy after end tag autocompletion when user doesn't complete the //end tag and just types a text //test whether the user typed an ending quotation in the attribute value doc.render(new Runnable() { @Override public void run() { TokenHierarchy tokenHierarchy = TokenHierarchy.get(doc); TokenSequence tokenSequence = tokenHierarchy.tokenSequence(); tokenSequence.move(caretOffset == 0 ? 0 : caretOffset - 1); if (!tokenSequence.moveNext()) { return; } Token tokenItem = tokenSequence.token(); if (tokenItem.id() == HTMLTokenId.TEXT && !tokenItem.text().toString().startsWith("<") && !tokenItem.text().toString().startsWith("&")) { hideCompletion(); } } }); } static boolean checkOpenCompletion(Document document, final int dotPos, String typedText) { final BaseDocument doc = (BaseDocument) document; switch (typedText.charAt(typedText.length() - 1)) { case '/': if (dotPos >= 2) { // last char before inserted slash try { String txtBeforeSpace = doc.getText(dotPos - 2, 2); if (txtBeforeSpace.equals("</")) // NOI18N { return true; } } catch (BadLocationException e) { //no action } } break; case ' ': //Bug 235048 - second tab activates suggestion in html editor //trigger the completion window only if the user types space //char, not upon tab expand or enter + indentation. // //in theory one could set tab size to 1 and then the issue would //reappear, but it's not worth adding a new condition :-) if(typedText.length() > 1) { return false; } final AtomicBoolean value = new AtomicBoolean(); doc.render(new Runnable() { @Override public void run() { TokenSequence ts = Utils.getJoinedHtmlSequence(doc, dotPos); if (ts == null) { //no suitable token sequence found value.set(false); return; } int diff = ts.move(dotPos); if (diff == 0) { //just after a token if (ts.movePrevious()) { if(ts.token().id() == HTMLTokenId.WS) { //just after a whitespace if(ts.movePrevious()) { //test what precedes the WS -- are we inside a tag? value.set(ts.token().id() == HTMLTokenId.TAG_OPEN || ts.token().id() == HTMLTokenId.VALUE || ts.token().id() == HTMLTokenId.VALUE_CSS || ts.token().id() == HTMLTokenId.VALUE_JAVASCRIPT); } } } } else if (diff > 0) { //after first char of the token if (ts.moveNext()) { if (ts.token().id() == HTMLTokenId.WS) { if (ts.movePrevious()) { value.set(ts.token().id() == HTMLTokenId.TAG_OPEN || ts.token().id() == HTMLTokenId.VALUE || ts.token().id() == HTMLTokenId.VALUE_CSS || ts.token().id() == HTMLTokenId.VALUE_JAVASCRIPT); } } } } } }); return value.get(); case '<': case '&': return true; case '>': //handle tag autocomplete if(HtmlPreferences.autoPopupEndTagAutoCompletion()) { final boolean[] ret = new boolean[1]; doc.runAtomic(new Runnable() { @Override public void run() { TokenSequence ts = Utils.getJoinedHtmlSequence(doc, dotPos); if (ts == null) { //no suitable token sequence found ret[0] = false; } else { ts.move(dotPos - 1); if (ts.moveNext() || ts.movePrevious()) { if (ts.token().id() == HTMLTokenId.TAG_CLOSE_SYMBOL && !CharSequenceUtilities.equals("/>", ts.token().text())) { ret[0] = true; } } } } }); return ret[0]; } } return false; } private static void hideCompletion() { Completion.get().hideCompletion(); Completion.get().hideDocumentation(); } private static class LegacyLinkDocItem implements CompletionDocumentation { private URL url; public LegacyLinkDocItem(URL url) { this.url = url; } @Override public String getText() { return null; /* String anchor = HelpManager.getDefault().getAnchorText(url); if(anchor != null) return HelpManager.getDefault().getHelpText(url, anchor); else return HelpManager.getDefault().getHelpText(url); */ } @Override public URL getURL() { return url; } @Override public CompletionDocumentation resolveLink(String link) { return new LegacyLinkDocItem(HelpManager.getDefault().getRelativeURL(url, link)); } @Override public Action getGotoSourceAction() { return null; } } private static class LinkDocItem implements CompletionDocumentation { private URL url; private HelpResolver resolver; public LinkDocItem(HelpResolver resolver, URL url) { this.url = url; this.resolver = resolver; } @Override public String getText() { //normally it should be enough to return null here //and the documentation would be loaded from the URL. //However it seems that the html5 anchor navigation doesn't //properly work in the embedded swing browser so I need to //strip the begginning of the file to the anchor manually return resolver.getHelpContent(getURL()); } @Override public URL getURL() { return url; } @Override public CompletionDocumentation resolveLink(String link) { return new LinkDocItem(resolver, resolver.resolveLink(getURL(), link)); } @Override public Action getGotoSourceAction() { return null; } } private static class NoDocItem implements CompletionDocumentation { @Override public String getText() { return NbBundle.getMessage(HtmlCompletionProvider.class, "MSG_No_Doc_For_Target"); //NOI18N } @Override public URL getURL() { return null; } @Override public CompletionDocumentation resolveLink(String link) { return null; } @Override public Action getGotoSourceAction() { return null; } } private static CompletionDocumentation createCompletionDocumentation(HtmlCompletionItem item) { //fork for the new and old help approach, legacy html4 not migrated yet HelpItem helpItem = item.getHelpItem(); if (helpItem != null) { return new HtmlTagDocumetationItem(item); } //else legacy approach return new DocItem(item); } private static class HtmlTagDocumetationItem implements CompletionDocumentation { private final HtmlCompletionItem item; private final String documentationText; public HtmlTagDocumetationItem(HtmlCompletionItem ri) { this.item = ri; //initialize the text in constructor as it is not called from EDT //in contrary to the {@link #getText()} method. this.documentationText = loadDocText(); } private HelpItem getHelpItem() { return item.getHelpItem(); } private String loadDocText() { //normally it should be enough to return null here //and the documentation would be loaded from the URL. //However it seems that the html5 anchor navigation doesn't //properly work in the embedded swing browser so I need to //strip the begginning of the file to the anchor manually //now the statement above is not fully true since I need to add //the header before the URL content StringBuilder sb = new StringBuilder(); String header = getHelpItem().getHelpHeader(); if (header != null) { sb.append(header); } String helpContent = getHelpItem().getHelpContent() != null ? getHelpItem().getHelpContent() : getHelpItem().getHelpResolver().getHelpContent(getURL()); sb.append(helpContent); return sb.toString(); } @Override public String getText() { return documentationText; } @Override public URL getURL() { return getHelpItem().getHelpURL(); } @Override public CompletionDocumentation resolveLink(String link) { URL itemUrl = getHelpItem().getHelpResolver().resolveLink(getURL(), link); return itemUrl != null ? new LinkDocItem(getHelpItem().getHelpResolver(), itemUrl) : new NoDocItem(); } @Override public Action getGotoSourceAction() { return null; } } private static class DocItem implements CompletionDocumentation { HtmlCompletionItem item; public DocItem(HtmlCompletionItem ri) { this.item = ri; ri.prepareHelp(); } @Override public String getText() { return item.getHelp(); } @Override public URL getURL() { return item.getHelpURL(); } @Override public CompletionDocumentation resolveLink(String link) { URL itemUrl = HelpManager.getDefault().getHelpURL(item.getHelpId()); return itemUrl != null ? new LegacyLinkDocItem(HelpManager.getDefault().getRelativeURL(itemUrl, link)) : new NoDocItem(); } @Override public Action getGotoSourceAction() { return null; } } }
11,475
2,728
<filename>sdk/machinelearning/azure-mgmt-guestconfig/azure/mgmt/guestconfig/models/_models_py3.py<gh_stars>1000+ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import List, Optional, Union from azure.core.exceptions import HttpResponseError import msrest.serialization from ._guest_configuration_client_enums import * class AssignmentInfo(msrest.serialization.Model): """Information about the guest configuration assignment. Variables are only populated by the server, and will be ignored when sending a request. :ivar name: Name of the guest configuration assignment. :vartype name: str :param configuration: Information about the configuration. :type configuration: ~azure.mgmt.guestconfig.models.ConfigurationInfo """ _validation = { 'name': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'configuration': {'key': 'configuration', 'type': 'ConfigurationInfo'}, } def __init__( self, *, configuration: Optional["ConfigurationInfo"] = None, **kwargs ): super(AssignmentInfo, self).__init__(**kwargs) self.name = None self.configuration = configuration class AssignmentReport(msrest.serialization.Model): """AssignmentReport. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: ARM resource id of the report for the guest configuration assignment. :vartype id: str :ivar report_id: GUID that identifies the guest configuration assignment report under a subscription, resource group. :vartype report_id: str :param assignment: Configuration details of the guest configuration assignment. :type assignment: ~azure.mgmt.guestconfig.models.AssignmentInfo :param vm: Information about the VM. :type vm: ~azure.mgmt.guestconfig.models.VMInfo :ivar start_time: Start date and time of the guest configuration assignment compliance status check. :vartype start_time: ~datetime.datetime :ivar end_time: End date and time of the guest configuration assignment compliance status check. :vartype end_time: ~datetime.datetime :ivar compliance_status: A value indicating compliance status of the machine for the assigned guest configuration. Possible values include: "Compliant", "NonCompliant", "Pending". :vartype compliance_status: str or ~azure.mgmt.guestconfig.models.ComplianceStatus :ivar operation_type: Type of report, Consistency or Initial. Possible values include: "Consistency", "Initial". :vartype operation_type: str or ~azure.mgmt.guestconfig.models.Type :param resources: The list of resources for which guest configuration assignment compliance is checked. :type resources: list[~azure.mgmt.guestconfig.models.AssignmentReportResource] """ _validation = { 'id': {'readonly': True}, 'report_id': {'readonly': True}, 'start_time': {'readonly': True}, 'end_time': {'readonly': True}, 'compliance_status': {'readonly': True}, 'operation_type': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'report_id': {'key': 'reportId', 'type': 'str'}, 'assignment': {'key': 'assignment', 'type': 'AssignmentInfo'}, 'vm': {'key': 'vm', 'type': 'VMInfo'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'compliance_status': {'key': 'complianceStatus', 'type': 'str'}, 'operation_type': {'key': 'operationType', 'type': 'str'}, 'resources': {'key': 'resources', 'type': '[AssignmentReportResource]'}, } def __init__( self, *, assignment: Optional["AssignmentInfo"] = None, vm: Optional["VMInfo"] = None, resources: Optional[List["AssignmentReportResource"]] = None, **kwargs ): super(AssignmentReport, self).__init__(**kwargs) self.id = None self.report_id = None self.assignment = assignment self.vm = vm self.start_time = None self.end_time = None self.compliance_status = None self.operation_type = None self.resources = resources class AssignmentReportDetails(msrest.serialization.Model): """Details of the guest configuration assignment report. Variables are only populated by the server, and will be ignored when sending a request. :ivar compliance_status: A value indicating compliance status of the machine for the assigned guest configuration. Possible values include: "Compliant", "NonCompliant", "Pending". :vartype compliance_status: str or ~azure.mgmt.guestconfig.models.ComplianceStatus :ivar start_time: Start date and time of the guest configuration assignment compliance status check. :vartype start_time: ~datetime.datetime :ivar end_time: End date and time of the guest configuration assignment compliance status check. :vartype end_time: ~datetime.datetime :ivar job_id: GUID of the report. :vartype job_id: str :ivar operation_type: Type of report, Consistency or Initial. Possible values include: "Consistency", "Initial". :vartype operation_type: str or ~azure.mgmt.guestconfig.models.Type :param resources: The list of resources for which guest configuration assignment compliance is checked. :type resources: list[~azure.mgmt.guestconfig.models.AssignmentReportResource] """ _validation = { 'compliance_status': {'readonly': True}, 'start_time': {'readonly': True}, 'end_time': {'readonly': True}, 'job_id': {'readonly': True}, 'operation_type': {'readonly': True}, } _attribute_map = { 'compliance_status': {'key': 'complianceStatus', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'job_id': {'key': 'jobId', 'type': 'str'}, 'operation_type': {'key': 'operationType', 'type': 'str'}, 'resources': {'key': 'resources', 'type': '[AssignmentReportResource]'}, } def __init__( self, *, resources: Optional[List["AssignmentReportResource"]] = None, **kwargs ): super(AssignmentReportDetails, self).__init__(**kwargs) self.compliance_status = None self.start_time = None self.end_time = None self.job_id = None self.operation_type = None self.resources = resources class AssignmentReportResource(msrest.serialization.Model): """The guest configuration assignment resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar compliance_status: A value indicating compliance status of the machine for the assigned guest configuration. Possible values include: "Compliant", "NonCompliant", "Pending". :vartype compliance_status: str or ~azure.mgmt.guestconfig.models.ComplianceStatus :ivar resource_id: Name of the guest configuration assignment resource setting. :vartype resource_id: str :param reasons: Compliance reason and reason code for a resource. :type reasons: list[~azure.mgmt.guestconfig.models.AssignmentReportResourceComplianceReason] :ivar properties: Properties of a guest configuration assignment resource. :vartype properties: any """ _validation = { 'compliance_status': {'readonly': True}, 'resource_id': {'readonly': True}, 'properties': {'readonly': True}, } _attribute_map = { 'compliance_status': {'key': 'complianceStatus', 'type': 'str'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'reasons': {'key': 'reasons', 'type': '[AssignmentReportResourceComplianceReason]'}, 'properties': {'key': 'properties', 'type': 'object'}, } def __init__( self, *, reasons: Optional[List["AssignmentReportResourceComplianceReason"]] = None, **kwargs ): super(AssignmentReportResource, self).__init__(**kwargs) self.compliance_status = None self.resource_id = None self.reasons = reasons self.properties = None class AssignmentReportResourceComplianceReason(msrest.serialization.Model): """Reason and code for the compliance of the guest configuration assignment resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar phrase: Reason for the compliance of the guest configuration assignment resource. :vartype phrase: str :ivar code: Code for the compliance of the guest configuration assignment resource. :vartype code: str """ _validation = { 'phrase': {'readonly': True}, 'code': {'readonly': True}, } _attribute_map = { 'phrase': {'key': 'phrase', 'type': 'str'}, 'code': {'key': 'code', 'type': 'str'}, } def __init__( self, **kwargs ): super(AssignmentReportResourceComplianceReason, self).__init__(**kwargs) self.phrase = None self.code = None class ConfigurationInfo(msrest.serialization.Model): """Information about the configuration. Variables are only populated by the server, and will be ignored when sending a request. :ivar name: Name of the configuration. :vartype name: str :ivar version: Version of the configuration. :vartype version: str """ _validation = { 'name': {'readonly': True}, 'version': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, **kwargs ): super(ConfigurationInfo, self).__init__(**kwargs) self.name = None self.version = None class ConfigurationParameter(msrest.serialization.Model): """Represents a configuration parameter. :param name: Name of the configuration parameter. :type name: str :param value: Value of the configuration parameter. :type value: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, value: Optional[str] = None, **kwargs ): super(ConfigurationParameter, self).__init__(**kwargs) self.name = name self.value = value class ConfigurationSetting(msrest.serialization.Model): """Configuration setting of LCM (Local Configuration Manager). :param configuration_mode: Specifies how the LCM(Local Configuration Manager) actually applies the configuration to the target nodes. Possible values are ApplyOnly, ApplyAndMonitor, and ApplyAndAutoCorrect. Possible values include: "ApplyOnly", "ApplyAndMonitor", "ApplyAndAutoCorrect". :type configuration_mode: str or ~azure.mgmt.guestconfig.models.ConfigurationMode :param allow_module_overwrite: If true - new configurations downloaded from the pull service are allowed to overwrite the old ones on the target node. Otherwise, false. :type allow_module_overwrite: bool :param action_after_reboot: Specifies what happens after a reboot during the application of a configuration. The possible values are ContinueConfiguration and StopConfiguration. Possible values include: "ContinueConfiguration", "StopConfiguration". :type action_after_reboot: str or ~azure.mgmt.guestconfig.models.ActionAfterReboot :param refresh_frequency_mins: The time interval, in minutes, at which the LCM checks a pull service to get updated configurations. This value is ignored if the LCM is not configured in pull mode. The default value is 30. :type refresh_frequency_mins: float :param reboot_if_needed: Set this to true to automatically reboot the node after a configuration that requires reboot is applied. Otherwise, you will have to manually reboot the node for any configuration that requires it. The default value is false. To use this setting when a reboot condition is enacted by something other than DSC (such as Windows Installer), combine this setting with the xPendingReboot module. :type reboot_if_needed: bool :param configuration_mode_frequency_mins: How often, in minutes, the current configuration is checked and applied. This property is ignored if the ConfigurationMode property is set to ApplyOnly. The default value is 15. :type configuration_mode_frequency_mins: float """ _attribute_map = { 'configuration_mode': {'key': 'configurationMode', 'type': 'str'}, 'allow_module_overwrite': {'key': 'allowModuleOverwrite', 'type': 'bool'}, 'action_after_reboot': {'key': 'actionAfterReboot', 'type': 'str'}, 'refresh_frequency_mins': {'key': 'refreshFrequencyMins', 'type': 'float'}, 'reboot_if_needed': {'key': 'rebootIfNeeded', 'type': 'bool'}, 'configuration_mode_frequency_mins': {'key': 'configurationModeFrequencyMins', 'type': 'float'}, } def __init__( self, *, configuration_mode: Optional[Union[str, "ConfigurationMode"]] = None, allow_module_overwrite: Optional[bool] = None, action_after_reboot: Optional[Union[str, "ActionAfterReboot"]] = None, refresh_frequency_mins: Optional[float] = 30, reboot_if_needed: Optional[bool] = None, configuration_mode_frequency_mins: Optional[float] = 15, **kwargs ): super(ConfigurationSetting, self).__init__(**kwargs) self.configuration_mode = configuration_mode self.allow_module_overwrite = allow_module_overwrite self.action_after_reboot = action_after_reboot self.refresh_frequency_mins = refresh_frequency_mins self.reboot_if_needed = reboot_if_needed self.configuration_mode_frequency_mins = configuration_mode_frequency_mins class ErrorResponse(msrest.serialization.Model): """Error response of an operation failure. :param error: :type error: ~azure.mgmt.guestconfig.models.ErrorResponseError """ _attribute_map = { 'error': {'key': 'error', 'type': 'ErrorResponseError'}, } def __init__( self, *, error: Optional["ErrorResponseError"] = None, **kwargs ): super(ErrorResponse, self).__init__(**kwargs) self.error = error class ErrorResponseError(msrest.serialization.Model): """ErrorResponseError. :param code: Error code. :type code: str :param message: Detail error message indicating why the operation failed. :type message: str """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, *, code: Optional[str] = None, message: Optional[str] = None, **kwargs ): super(ErrorResponseError, self).__init__(**kwargs) self.code = code self.message = message class Resource(msrest.serialization.Model): """The core properties of ARM resources. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: ARM resource id of the guest configuration assignment. :vartype id: str :param name: Name of the guest configuration assignment. :type name: str :param location: Region where the VM is located. :type location: str :ivar type: The type of the resource. :vartype type: str """ _validation = { 'id': {'readonly': True}, 'type': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, location: Optional[str] = None, **kwargs ): super(Resource, self).__init__(**kwargs) self.id = None self.name = name self.location = location self.type = None class ProxyResource(Resource): """ARM proxy resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: ARM resource id of the guest configuration assignment. :vartype id: str :param name: Name of the guest configuration assignment. :type name: str :param location: Region where the VM is located. :type location: str :ivar type: The type of the resource. :vartype type: str """ _validation = { 'id': {'readonly': True}, 'type': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, location: Optional[str] = None, **kwargs ): super(ProxyResource, self).__init__(name=name, location=location, **kwargs) class GuestConfigurationAssignment(ProxyResource): """Guest configuration assignment is an association between a machine and guest configuration. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: ARM resource id of the guest configuration assignment. :vartype id: str :param name: Name of the guest configuration assignment. :type name: str :param location: Region where the VM is located. :type location: str :ivar type: The type of the resource. :vartype type: str :param properties: Properties of the Guest configuration assignment. :type properties: ~azure.mgmt.guestconfig.models.GuestConfigurationAssignmentProperties """ _validation = { 'id': {'readonly': True}, 'type': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'GuestConfigurationAssignmentProperties'}, } def __init__( self, *, name: Optional[str] = None, location: Optional[str] = None, properties: Optional["GuestConfigurationAssignmentProperties"] = None, **kwargs ): super(GuestConfigurationAssignment, self).__init__(name=name, location=location, **kwargs) self.properties = properties class GuestConfigurationAssignmentList(msrest.serialization.Model): """The response of the list guest configuration assignment operation. :param value: Result of the list guest configuration assignment operation. :type value: list[~azure.mgmt.guestconfig.models.GuestConfigurationAssignment] """ _attribute_map = { 'value': {'key': 'value', 'type': '[GuestConfigurationAssignment]'}, } def __init__( self, *, value: Optional[List["GuestConfigurationAssignment"]] = None, **kwargs ): super(GuestConfigurationAssignmentList, self).__init__(**kwargs) self.value = value class GuestConfigurationAssignmentProperties(msrest.serialization.Model): """Guest configuration assignment properties. Variables are only populated by the server, and will be ignored when sending a request. :ivar target_resource_id: VM resource Id. :vartype target_resource_id: str :param guest_configuration: The guest configuration to assign. :type guest_configuration: ~azure.mgmt.guestconfig.models.GuestConfigurationNavigation :ivar compliance_status: A value indicating compliance status of the machine for the assigned guest configuration. Possible values include: "Compliant", "NonCompliant", "Pending". :vartype compliance_status: str or ~azure.mgmt.guestconfig.models.ComplianceStatus :ivar last_compliance_status_checked: Date and time when last compliance status was checked. :vartype last_compliance_status_checked: ~datetime.datetime :ivar latest_report_id: Id of the latest report for the guest configuration assignment. :vartype latest_report_id: str :param latest_assignment_report: Last reported guest configuration assignment report. :type latest_assignment_report: ~azure.mgmt.guestconfig.models.AssignmentReport :param context: The source which initiated the guest configuration assignment. Ex: Azure Policy. :type context: str :ivar assignment_hash: Combined hash of the configuration package and parameters. :vartype assignment_hash: str :ivar provisioning_state: The provisioning state, which only appears in the response. Possible values include: "Succeeded", "Failed", "Canceled", "Created". :vartype provisioning_state: str or ~azure.mgmt.guestconfig.models.ProvisioningState """ _validation = { 'target_resource_id': {'readonly': True}, 'compliance_status': {'readonly': True}, 'last_compliance_status_checked': {'readonly': True}, 'latest_report_id': {'readonly': True}, 'assignment_hash': {'readonly': True}, 'provisioning_state': {'readonly': True}, } _attribute_map = { 'target_resource_id': {'key': 'targetResourceId', 'type': 'str'}, 'guest_configuration': {'key': 'guestConfiguration', 'type': 'GuestConfigurationNavigation'}, 'compliance_status': {'key': 'complianceStatus', 'type': 'str'}, 'last_compliance_status_checked': {'key': 'lastComplianceStatusChecked', 'type': 'iso-8601'}, 'latest_report_id': {'key': 'latestReportId', 'type': 'str'}, 'latest_assignment_report': {'key': 'latestAssignmentReport', 'type': 'AssignmentReport'}, 'context': {'key': 'context', 'type': 'str'}, 'assignment_hash': {'key': 'assignmentHash', 'type': 'str'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, } def __init__( self, *, guest_configuration: Optional["GuestConfigurationNavigation"] = None, latest_assignment_report: Optional["AssignmentReport"] = None, context: Optional[str] = None, **kwargs ): super(GuestConfigurationAssignmentProperties, self).__init__(**kwargs) self.target_resource_id = None self.guest_configuration = guest_configuration self.compliance_status = None self.last_compliance_status_checked = None self.latest_report_id = None self.latest_assignment_report = latest_assignment_report self.context = context self.assignment_hash = None self.provisioning_state = None class GuestConfigurationAssignmentReport(msrest.serialization.Model): """Report for the guest configuration assignment. Report contains information such as compliance status, reason, and more. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: ARM resource id of the report for the guest configuration assignment. :vartype id: str :ivar name: GUID that identifies the guest configuration assignment report under a subscription, resource group. :vartype name: str :param properties: Properties of the guest configuration report. :type properties: ~azure.mgmt.guestconfig.models.GuestConfigurationAssignmentReportProperties """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'GuestConfigurationAssignmentReportProperties'}, } def __init__( self, *, properties: Optional["GuestConfigurationAssignmentReportProperties"] = None, **kwargs ): super(GuestConfigurationAssignmentReport, self).__init__(**kwargs) self.id = None self.name = None self.properties = properties class GuestConfigurationAssignmentReportList(msrest.serialization.Model): """List of guest configuration assignment reports. :param value: List of reports for the guest configuration. Report contains information such as compliance status, reason and more. :type value: list[~azure.mgmt.guestconfig.models.GuestConfigurationAssignmentReport] """ _attribute_map = { 'value': {'key': 'value', 'type': '[GuestConfigurationAssignmentReport]'}, } def __init__( self, *, value: Optional[List["GuestConfigurationAssignmentReport"]] = None, **kwargs ): super(GuestConfigurationAssignmentReportList, self).__init__(**kwargs) self.value = value class GuestConfigurationAssignmentReportProperties(msrest.serialization.Model): """Report for the guest configuration assignment. Report contains information such as compliance status, reason, and more. Variables are only populated by the server, and will be ignored when sending a request. :ivar compliance_status: A value indicating compliance status of the machine for the assigned guest configuration. Possible values include: "Compliant", "NonCompliant", "Pending". :vartype compliance_status: str or ~azure.mgmt.guestconfig.models.ComplianceStatus :ivar report_id: GUID that identifies the guest configuration assignment report under a subscription, resource group. :vartype report_id: str :param assignment: Configuration details of the guest configuration assignment. :type assignment: ~azure.mgmt.guestconfig.models.AssignmentInfo :param vm: Information about the VM. :type vm: ~azure.mgmt.guestconfig.models.VMInfo :ivar start_time: Start date and time of the guest configuration assignment compliance status check. :vartype start_time: ~datetime.datetime :ivar end_time: End date and time of the guest configuration assignment compliance status check. :vartype end_time: ~datetime.datetime :param details: Details of the assignment report. :type details: ~azure.mgmt.guestconfig.models.AssignmentReportDetails """ _validation = { 'compliance_status': {'readonly': True}, 'report_id': {'readonly': True}, 'start_time': {'readonly': True}, 'end_time': {'readonly': True}, } _attribute_map = { 'compliance_status': {'key': 'complianceStatus', 'type': 'str'}, 'report_id': {'key': 'reportId', 'type': 'str'}, 'assignment': {'key': 'assignment', 'type': 'AssignmentInfo'}, 'vm': {'key': 'vm', 'type': 'VMInfo'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'details': {'key': 'details', 'type': 'AssignmentReportDetails'}, } def __init__( self, *, assignment: Optional["AssignmentInfo"] = None, vm: Optional["VMInfo"] = None, details: Optional["AssignmentReportDetails"] = None, **kwargs ): super(GuestConfigurationAssignmentReportProperties, self).__init__(**kwargs) self.compliance_status = None self.report_id = None self.assignment = assignment self.vm = vm self.start_time = None self.end_time = None self.details = details class GuestConfigurationNavigation(msrest.serialization.Model): """Guest configuration is an artifact that encapsulates DSC configuration and its dependencies. The artifact is a zip file containing DSC configuration (as MOF) and dependent resources and other dependencies like modules. :param kind: Kind of the guest configuration. For example:DSC. Possible values include: "DSC". :type kind: str or ~azure.mgmt.guestconfig.models.Kind :param name: Name of the guest configuration. :type name: str :param version: Version of the guest configuration. :type version: str :param content_uri: Uri of the storage where guest configuration package is uploaded. :type content_uri: str :param content_hash: Combined hash of the guest configuration package and configuration parameters. :type content_hash: str :param assignment_type: Specifies the assignment type and execution of the configuration. Possible values are Audit, DeployAndAutoCorrect, ApplyAndAutoCorrect and ApplyAndMonitor. Possible values include: "Audit", "DeployAndAutoCorrect", "ApplyAndAutoCorrect", "ApplyAndMonitor". :type assignment_type: str or ~azure.mgmt.guestconfig.models.AssignmentType :param configuration_parameter: The configuration parameters for the guest configuration. :type configuration_parameter: list[~azure.mgmt.guestconfig.models.ConfigurationParameter] :param configuration_setting: The configuration setting for the guest configuration. :type configuration_setting: ~azure.mgmt.guestconfig.models.ConfigurationSetting """ _attribute_map = { 'kind': {'key': 'kind', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'content_uri': {'key': 'contentUri', 'type': 'str'}, 'content_hash': {'key': 'contentHash', 'type': 'str'}, 'assignment_type': {'key': 'assignmentType', 'type': 'str'}, 'configuration_parameter': {'key': 'configurationParameter', 'type': '[ConfigurationParameter]'}, 'configuration_setting': {'key': 'configurationSetting', 'type': 'ConfigurationSetting'}, } def __init__( self, *, kind: Optional[Union[str, "Kind"]] = None, name: Optional[str] = None, version: Optional[str] = None, content_uri: Optional[str] = None, content_hash: Optional[str] = None, assignment_type: Optional[Union[str, "AssignmentType"]] = None, configuration_parameter: Optional[List["ConfigurationParameter"]] = None, configuration_setting: Optional["ConfigurationSetting"] = None, **kwargs ): super(GuestConfigurationNavigation, self).__init__(**kwargs) self.kind = kind self.name = name self.version = version self.content_uri = content_uri self.content_hash = content_hash self.assignment_type = assignment_type self.configuration_parameter = configuration_parameter self.configuration_setting = configuration_setting class Operation(msrest.serialization.Model): """GuestConfiguration REST API operation. :param name: Operation name: For ex. providers/Microsoft.GuestConfiguration/guestConfigurationAssignments/write or read. :type name: str :param display: Provider, Resource, Operation and description values. :type display: ~azure.mgmt.guestconfig.models.OperationDisplay :param status_code: Service provider: Microsoft.GuestConfiguration. :type status_code: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display': {'key': 'display', 'type': 'OperationDisplay'}, 'status_code': {'key': 'properties.statusCode', 'type': 'str'}, } def __init__( self, *, name: Optional[str] = None, display: Optional["OperationDisplay"] = None, status_code: Optional[str] = None, **kwargs ): super(Operation, self).__init__(**kwargs) self.name = name self.display = display self.status_code = status_code class OperationDisplay(msrest.serialization.Model): """Provider, Resource, Operation and description values. :param provider: Service provider: Microsoft.GuestConfiguration. :type provider: str :param resource: Resource on which the operation is performed: For ex. :type resource: str :param operation: Operation type: Read, write, delete, etc. :type operation: str :param description: Description about operation. :type description: str """ _attribute_map = { 'provider': {'key': 'provider', 'type': 'str'}, 'resource': {'key': 'resource', 'type': 'str'}, 'operation': {'key': 'operation', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, } def __init__( self, *, provider: Optional[str] = None, resource: Optional[str] = None, operation: Optional[str] = None, description: Optional[str] = None, **kwargs ): super(OperationDisplay, self).__init__(**kwargs) self.provider = provider self.resource = resource self.operation = operation self.description = description class OperationList(msrest.serialization.Model): """The response model for the list of Automation operations. :param value: List of Automation operations supported by the Automation resource provider. :type value: list[~azure.mgmt.guestconfig.models.Operation] """ _attribute_map = { 'value': {'key': 'value', 'type': '[Operation]'}, } def __init__( self, *, value: Optional[List["Operation"]] = None, **kwargs ): super(OperationList, self).__init__(**kwargs) self.value = value class VMInfo(msrest.serialization.Model): """Information about the VM. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Azure resource Id of the VM. :vartype id: str :ivar uuid: UUID(Universally Unique Identifier) of the VM. :vartype uuid: str """ _validation = { 'id': {'readonly': True}, 'uuid': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'uuid': {'key': 'uuid', 'type': 'str'}, } def __init__( self, **kwargs ): super(VMInfo, self).__init__(**kwargs) self.id = None self.uuid = None
12,587
2,151
package com.google.typography.font.sfntly.table.opentype.component; import com.google.typography.font.sfntly.data.ReadableFontData; public class NumRecordTable extends RecordsTable<NumRecord> { public NumRecordTable(ReadableFontData data, int base, boolean dataIsCanonical) { super(data, base, dataIsCanonical); } public NumRecordTable(NumRecordList records) { super(records); } @Override protected RecordList<NumRecord> createRecordList(ReadableFontData data) { return new NumRecordList(data); } @Override public int fieldCount() { return 0; } public static class Builder extends RecordsTable.Builder<NumRecordTable, NumRecord> { public Builder() { super(); } public Builder(ReadableFontData data, int base, boolean dataIsCanonical) { super(data, base, dataIsCanonical); } public Builder(NumRecordTable table) { super(table); } @Override protected NumRecordTable readTable(ReadableFontData data, int base, boolean dataIsCanonical) { if (base != 0) { throw new UnsupportedOperationException(); } return new NumRecordTable(data, base, dataIsCanonical); } @Override protected RecordList<NumRecord> readRecordList(ReadableFontData data, int base) { if (base != 0) { throw new UnsupportedOperationException(); } return new NumRecordList(data); } @Override protected int fieldCount() { return 0; } @Override protected void initFields() { } } }
544
347
import sys from pathlib import Path import numpy as np import pyrallis from dataclasses import dataclass from tqdm import tqdm sys.path.append(".") sys.path.append("..") from editing.interfacegan.helpers.anycostgan import attr_list from editing.interfacegan.helpers.manipulator import train_boundary @dataclass class TrainConfig: # Path to the `npy` saved from the script `generate_latents_and_attribute_scores.py` input_path: Path = Path("./latents") # Where to ave the boundary `npy` files to output_path: Path = Path("./boundaries") @pyrallis.wrap() def main(opts: TrainConfig): all_latent_codes, all_attribute_scores, all_ages, all_poses = [], [], [], [] for batch_dir in tqdm(opts.input_path.glob("*")): if not str(batch_dir.name).startswith("id_"): continue # load batch latents latent_codes = np.load(opts.input_path / batch_dir / 'ws.npy', allow_pickle=True) all_latent_codes.extend(latent_codes.tolist()) # load batch attribute scores scores = np.load(opts.input_path / batch_dir / 'scores.npy', allow_pickle=True) all_attribute_scores.extend(scores.tolist()) # load batch ages ages = np.load(opts.input_path / batch_dir / 'ages.npy', allow_pickle=True) all_ages.extend(ages.tolist()) # load batch poses poses = np.load(opts.input_path / batch_dir / 'poses.npy', allow_pickle=True) all_poses.extend(poses.tolist()) opts.output_path.mkdir(exist_ok=True, parents=True) print(f"Obtained a total of {len(all_latent_codes)} latent codes!") all_latent_codes = np.array(all_latent_codes) all_latent_codes = np.array([l[0] for l in all_latent_codes]) # train all boundaries for all attributes predicted from the AnyCostGAN classifier for attribute_name in attr_list: print("Training boundary for: {attribute_name}") attr_scores = [s[attr_list.index(attribute_name)][1] for s in all_attribute_scores] attr_scores = np.array(attr_scores)[:, np.newaxis] boundary = train_boundary(latent_codes=np.array(all_latent_codes), scores=attr_scores, chosen_num_or_ratio=0.02, split_ratio=0.7, invalid_value=None) np.save(opts.output_path / f'{attribute_name}_boundary.npy', boundary) # train the age boundary boundary = train_boundary(latent_codes=np.array(all_latent_codes), scores=np.array(all_ages), chosen_num_or_ratio=0.02, split_ratio=0.7, invalid_value=None) np.save(opts.output_path / f'age_boundary.npy', boundary) boundary = train_boundary(latent_codes=np.array(all_latent_codes), scores=np.array(all_poses), chosen_num_or_ratio=0.02, split_ratio=0.7, invalid_value=None) np.save(opts.output_path / f'pose_boundary.npy', boundary) if __name__ == '__main__': main()
1,519
2,541
<gh_stars>1000+ // // WAPageRouteDelegate-Protocol.h // WeAppExample // // Created by lionvoom on 2020/12/3. // Copyright © 2020 wept. All rights reserved. // #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN @protocol WAPageRouteDelegate <NSObject> - (void)launchHome; - (void)backToHome; - (void)switchTab:(NSString *)pagePath; - (void)navigateBack:(NSUInteger)atIndexFromTop; - (void)navigateTo:(NSString *)pagePath; - (void)redirectTo:(NSString *)pagePath; - (void)reLaunch:(NSString *)pagePath; - (void)restart; @end NS_ASSUME_NONNULL_END
228
326
<gh_stars>100-1000 // Boost.Geometry // Copyright (c) 2021, Oracle and/or its affiliates. // Contributed and/or modified by <NAME>, on behalf of Oracle // Licensed under the Boost Software License version 1.0. // http://www.boost.org/users/license.html #ifndef BOOST_GEOMETRY_GEOMETRIES_CONCEPTS_GEOMETRY_COLLECTION_CONCEPT_HPP #define BOOST_GEOMETRY_GEOMETRIES_CONCEPTS_GEOMETRY_COLLECTION_CONCEPT_HPP #include <utility> #include <boost/concept_check.hpp> #include <boost/range/concepts.hpp> #include <boost/geometry/core/geometry_types.hpp> #include <boost/geometry/core/mutable_range.hpp> #include <boost/geometry/core/tag.hpp> #include <boost/geometry/core/tags.hpp> #include <boost/geometry/core/visit.hpp> #include <boost/geometry/geometries/concepts/box_concept.hpp> #include <boost/geometry/geometries/concepts/concept_type.hpp> #include <boost/geometry/geometries/concepts/linestring_concept.hpp> #include <boost/geometry/geometries/concepts/multi_point_concept.hpp> #include <boost/geometry/geometries/concepts/multi_linestring_concept.hpp> #include <boost/geometry/geometries/concepts/multi_polygon_concept.hpp> #include <boost/geometry/geometries/concepts/point_concept.hpp> #include <boost/geometry/geometries/concepts/polygon_concept.hpp> #include <boost/geometry/geometries/concepts/ring_concept.hpp> #include <boost/geometry/geometries/concepts/segment_concept.hpp> #include <boost/geometry/util/sequence.hpp> #include <boost/geometry/util/type_traits.hpp> namespace boost { namespace geometry { namespace concepts { namespace detail { template < typename Geometry, typename SubGeometry, typename Tag = typename tag<Geometry>::type, bool IsSubDynamicOrCollection = util::is_dynamic_geometry<SubGeometry>::value || util::is_geometry_collection<SubGeometry>::value > struct GeometryType; // Prevent recursive concept checking template <typename Geometry, typename SubGeometry, typename Tag> struct GeometryType<Geometry, SubGeometry, Tag, true> {}; template <typename Geometry, typename SubGeometry, typename Tag> struct GeometryType<Geometry const, SubGeometry, Tag, true> {}; template <typename Geometry, typename SubGeometry> struct GeometryType<Geometry, SubGeometry, geometry_collection_tag, false> : concepts::concept_type<SubGeometry>::type { #ifndef DOXYGEN_NO_CONCEPT_MEMBERS BOOST_CONCEPT_USAGE(GeometryType) { Geometry* gc = nullptr; SubGeometry* sg = nullptr; traits::emplace_back<Geometry>::apply(*gc, std::move(*sg)); } #endif // DOXYGEN_NO_CONCEPT_MEMBERS }; template <typename Geometry, typename SubGeometry> struct GeometryType<Geometry const, SubGeometry, geometry_collection_tag, false> : concepts::concept_type<SubGeometry const>::type {}; template <typename Geometry, typename ...SubGeometries> struct GeometryTypesPack {}; template <typename Geometry, typename SubGeometry, typename ...SubGeometries> struct GeometryTypesPack<Geometry, SubGeometry, SubGeometries...> : GeometryTypesPack<Geometry, SubGeometries...> , GeometryType<Geometry, SubGeometry> {}; template <typename Geometry, typename SubGeometriesSequence> struct GeometryTypes; template <typename Geometry, typename ...SubGeometries> struct GeometryTypes<Geometry, util::type_sequence<SubGeometries...>> : GeometryTypesPack<Geometry, SubGeometries...> {}; } // namespace detail template <typename Geometry> struct GeometryCollection : boost::ForwardRangeConcept<Geometry> { #ifndef DOXYGEN_NO_CONCEPT_MEMBERS using sequence_t = typename traits::geometry_types<Geometry>::type; BOOST_CONCEPT_ASSERT( (detail::GeometryTypes<Geometry, sequence_t>) ); BOOST_CONCEPT_USAGE(GeometryCollection) { Geometry* gc = nullptr; traits::clear<Geometry>::apply(*gc); traits::iter_visit<Geometry>::apply([](auto &&) {}, boost::begin(*gc)); } #endif // DOXYGEN_NO_CONCEPT_MEMBERS }; template <typename Geometry> struct ConstGeometryCollection : boost::ForwardRangeConcept<Geometry> { #ifndef DOXYGEN_NO_CONCEPT_MEMBERS using sequence_t = typename traits::geometry_types<Geometry>::type; BOOST_CONCEPT_ASSERT( (detail::GeometryTypes<Geometry const, sequence_t>) ); BOOST_CONCEPT_USAGE(ConstGeometryCollection) { Geometry const* gc = nullptr; traits::iter_visit<Geometry>::apply([](auto &&) {}, boost::begin(*gc)); } #endif // DOXYGEN_NO_CONCEPT_MEMBERS }; template <typename Geometry> struct concept_type<Geometry, geometry_collection_tag> { using type = GeometryCollection<Geometry>; }; template <typename Geometry> struct concept_type<Geometry const, geometry_collection_tag> { using type = ConstGeometryCollection<Geometry>; }; }}} // namespace boost::geometry::concepts #endif // BOOST_GEOMETRY_GEOMETRIES_CONCEPTS_GEOMETRY_COLLECTION_CONCEPT_HPP
1,814
453
<reponame>Wilder-Wonka/Coloring-greyscale-images from .utils.calc_output_and_feature_size import calc_output_and_feature_size from .utils.instance_normalization import InstanceNormalization from keras.models import model_from_json, Model from .utils.sn import ConvSN2D from keras.optimizers import Adam from keras import backend as K from .utils.attention import Attention from keras.utils import multi_gpu_model from keras.layers import Conv2D, Lambda, add, AvgPool2D, Activation, UpSampling2D, Input, concatenate, Reshape, LeakyReLU, Reshape, Flatten, concatenate class DiscriminatorFull(): """Full Resolution Discriminator. # Arguments width: Width of image in pixels height: Height of image in pixels channels: Channels for the input image and the generated image gpus: The number of gpus you will be using. learning_rate: Learning rate decay_rate: The amount of learning decay for each training update """ def __init__(self, width=256, height=256, channels=3, learning_rate=0.0002, decay_rate=2e-6, gpus = 0): self.width = width self.height = height self.channels = channels self.gpus = gpus self.learning_rate = learning_rate self.decay_rate = decay_rate # ---------------------- # Discriminator Fullres # ---------------------- output_size_full_picture, output_size_full_features = calc_output_and_feature_size(self.width, self.height) discriminator_input = Input(shape=(self.height, self.width, self.channels,)) x_1 = ConvSN2D(64, 4, padding='same', strides=2)(discriminator_input) x = LeakyReLU(alpha=0.2)(x_1) x_2 = ConvSN2D(128, 4, padding='same', strides=2)(x) x = LeakyReLU(alpha=0.2)(x_2) x_2_att = Attention(128)(x) x_3 = ConvSN2D(256, 4, padding='same', strides=2)(x_2_att) x = LeakyReLU(alpha=0.2)(x_3) x_4 = ConvSN2D(512, 4, padding='same', strides=1)(x) x = LeakyReLU(alpha=0.2)(x_4) x = ConvSN2D(1, 4, padding='same', strides=1)(x) x = Reshape([output_size_full_picture, 1])(x) discriminator_features = concatenate([Flatten()(x_1), Flatten()(x_2), Flatten()(x_3), Flatten()(x_4)], axis=1) discriminator_features = Reshape([output_size_full_features, 1])(discriminator_features) def zero_loss(y_true, y_pred): return K.zeros_like(y_true) loss_d = ['mse', zero_loss] if self.gpus < 2: self.model = Model(discriminator_input, [x, discriminator_features]) self.save_model = self.model else: self.save_model = Model(discriminator_input, [x, discriminator_features]) self.model = multi_gpu_model(self.save_model, gpus=gpus) loss_weights_d = [1, 0] optimizer = Adam(self.learning_rate, 0.5, decay=self.decay_rate) self.model.compile(optimizer=optimizer, loss_weights=loss_weights_d, loss=loss_d)
1,415
1,847
// Copyright (c) 2021 The Orbit Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "DisplayFormats/DisplayFormats.h" #include <absl/strings/str_format.h> #include <absl/time/time.h> namespace orbit_display_formats { std::string GetDisplaySize(uint64_t size_bytes) { constexpr double kBytesInKb = 1024.0; constexpr double kBytesInMb = 1024.0 * kBytesInKb; constexpr double kBytesInGb = 1024.0 * kBytesInMb; constexpr double kBytesInTb = 1024.0 * kBytesInGb; if (size_bytes < kBytesInKb) return absl::StrFormat("%u B", size_bytes); if (size_bytes < kBytesInMb) return absl::StrFormat("%.2f KB", size_bytes / kBytesInKb); if (size_bytes < kBytesInGb) return absl::StrFormat("%.2f MB", size_bytes / kBytesInMb); if (size_bytes < kBytesInTb) return absl::StrFormat("%.2f GB", size_bytes / kBytesInGb); return absl::StrFormat("%.2f TB", size_bytes / kBytesInTb); } std::string GetDisplayTime(absl::Duration duration) { if (duration < absl::Microseconds(1)) { return absl::StrFormat("%.3f ns", absl::ToDoubleNanoseconds(duration)); } if (duration < absl::Milliseconds(1)) { return absl::StrFormat("%.3f us", absl::ToDoubleMicroseconds(duration)); } if (duration < absl::Seconds(1)) { return absl::StrFormat("%.3f ms", absl::ToDoubleMilliseconds(duration)); } if (duration < absl::Minutes(1)) { return absl::StrFormat("%.3f s", absl::ToDoubleSeconds(duration)); } if (duration < absl::Hours(1)) { return absl::StrFormat("%.3f min", absl::ToDoubleMinutes(duration)); } constexpr double kHoursInOneDay = 24; if (duration < absl::Hours(kHoursInOneDay)) { return absl::StrFormat("%.3f h", absl::ToDoubleHours(duration)); } return absl::StrFormat("%.3f days", absl::ToDoubleHours(duration) / kHoursInOneDay); } } // namespace orbit_display_formats
696
348
<filename>docs/data/t2/054/54417.json {"nom":"Parey-Saint-Césaire","dpt":"Meurthe-et-Moselle","inscrits":188,"abs":26,"votants":162,"blancs":11,"nuls":5,"exp":146,"res":[{"panneau":"2","voix":80},{"panneau":"1","voix":66}]}
99
345
<filename>huntbugs/src/main/java/one/util/huntbugs/detect/FieldAccess.java /* * Copyright 2016 HuntBugs contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package one.util.huntbugs.detect; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; import com.strobel.assembler.metadata.FieldDefinition; import com.strobel.assembler.metadata.FieldReference; import com.strobel.assembler.metadata.Flags; import com.strobel.assembler.metadata.JvmType; import com.strobel.assembler.metadata.MethodDefinition; import com.strobel.assembler.metadata.MethodReference; import com.strobel.assembler.metadata.ParameterDefinition; import com.strobel.assembler.metadata.TypeDefinition; import com.strobel.core.ArrayUtilities; import com.strobel.decompiler.ast.AstCode; import com.strobel.decompiler.ast.Expression; import one.util.huntbugs.db.FieldStats; import one.util.huntbugs.db.Mutability; import one.util.huntbugs.flow.Inf; import one.util.huntbugs.flow.ValuesFlow; import one.util.huntbugs.registry.FieldContext; import one.util.huntbugs.registry.MethodContext; import one.util.huntbugs.registry.anno.AstNodes; import one.util.huntbugs.registry.anno.AstVisitor; import one.util.huntbugs.registry.anno.FieldVisitor; import one.util.huntbugs.registry.anno.MethodVisitor; import one.util.huntbugs.registry.anno.VisitOrder; import one.util.huntbugs.registry.anno.WarningDefinition; import one.util.huntbugs.util.AccessLevel; import one.util.huntbugs.util.Annotations; import one.util.huntbugs.util.Exprs; import one.util.huntbugs.util.NodeChain; import one.util.huntbugs.util.Nodes; import one.util.huntbugs.util.Types; import one.util.huntbugs.warning.Roles; import one.util.huntbugs.warning.WarningAnnotation; import one.util.huntbugs.warning.WarningAnnotation.Location; /** * @author lan * */ @WarningDefinition(category="RedundantCode", name="UnusedPrivateField", maxScore=45) @WarningDefinition(category="RedundantCode", name="UnusedPublicField", maxScore=38) @WarningDefinition(category="RedundantCode", name="UnreadPrivateField", maxScore=48) @WarningDefinition(category="RedundantCode", name="UnreadPublicField", maxScore=37) @WarningDefinition(category="Correctness", name="UnwrittenPrivateField", maxScore=60) @WarningDefinition(category="Correctness", name="UnwrittenPublicField", maxScore=45) @WarningDefinition(category="Correctness", name="FieldIsAlwaysNull", maxScore=55) @WarningDefinition(category="Performance", name="FieldShouldBeStatic", maxScore=50) @WarningDefinition(category="Performance", name="FieldUsedInSingleMethod", maxScore=55) @WarningDefinition(category="MaliciousCode", name="StaticFieldShouldBeFinal", maxScore=55) @WarningDefinition(category="MaliciousCode", name="StaticFieldShouldBeFinalAndPackagePrivate", maxScore=55) @WarningDefinition(category="MaliciousCode", name="StaticFieldCannotBeFinal", maxScore=35) @WarningDefinition(category="MaliciousCode", name="StaticFieldMutableArray", maxScore=40) @WarningDefinition(category="MaliciousCode", name="StaticFieldMutableCollection", maxScore=45) @WarningDefinition(category="MaliciousCode", name="StaticFieldMutable", maxScore=40) @WarningDefinition(category="MaliciousCode", name="StaticFieldShouldBeRefactoredToFinal", maxScore=40) @WarningDefinition(category="MaliciousCode", name="StaticFieldShouldBePackagePrivate", maxScore=55) @WarningDefinition(category="MaliciousCode", name="StaticFieldShouldBeNonInterfacePackagePrivate", maxScore=30) @WarningDefinition(category = "MaliciousCode", name = "ExposeMutableFieldViaReturnValue", maxScore = 35) @WarningDefinition(category = "MaliciousCode", name = "ExposeMutableStaticFieldViaReturnValue", maxScore = 50) @WarningDefinition(category = "MaliciousCode", name = "MutableEnumField", maxScore = 55) public class FieldAccess { private static final Set<String> MUTABLE_COLLECTION_CLASSES = new HashSet<>(Arrays.asList("java/util/ArrayList", "java/util/HashSet", "java/util/HashMap", "java/util/Hashtable", "java/util/IdentityHashMap", "java/util/LinkedHashSet", "java/util/LinkedList", "java/util/LinkedHashMap", "java/util/TreeSet", "java/util/TreeMap", "java/util/Properties")); static class MethodLocation { MethodDefinition md; Location loc; public MethodLocation(MethodDefinition md, Location loc) { this.md = md; this.loc = loc; } public WarningAnnotation<?>[] getAnnotations() { WarningAnnotation<?>[] anno = {Roles.METHOD.create(md), Roles.LOCATION.create(loc)}; return anno; } } static class FieldRecord { MethodLocation firstWrite, firstRead, expose; Object constant; int numWrites; boolean mutable; boolean array; boolean collection; boolean usedInSingleMethod = true; boolean hasSimpleSetter; } private final Map<String, FieldRecord> fields = new HashMap<>(); private boolean fullyAnalyzed = true; @AstVisitor(nodes=AstNodes.EXPRESSIONS) public void visitCode(Expression expr, NodeChain nc, MethodContext mc, MethodDefinition md, TypeDefinition td, Mutability m) { if(expr.getCode() == AstCode.PutField || expr.getCode() == AstCode.PutStatic || expr.getCode() == AstCode.GetField || expr.getCode() == AstCode.GetStatic) { FieldDefinition fd = ((FieldReference) expr.getOperand()).resolve(); if(fd != null && !fd.isSynthetic() && fd.getDeclaringType().isEquivalentTo(td)) { FieldRecord fieldRecord = fields.computeIfAbsent(fd.getName(), n -> new FieldRecord()); if(Nodes.isFieldRead(expr)) { if(fieldRecord.firstRead == null) { fieldRecord.firstRead = new MethodLocation(md, mc.getLocation(expr)); } if (Inf.BACKLINK.findTransitiveUsages(expr, true).anyMatch( e -> e.getCode() == AstCode.Return && (e.getArguments().get(0).getCode() == AstCode.GetField || !ValuesFlow.hasUpdatedSource(e .getArguments().get(0))))) { fieldRecord.expose = new MethodLocation(md, mc.getLocation(expr)); } } else { Expression value = Exprs.getChild(expr, expr.getArguments().size()-1); if(fieldRecord.firstWrite == null) { fieldRecord.firstWrite = new MethodLocation(md, mc.getLocation(expr)); fieldRecord.constant = Nodes.getConstant(value); } else { if(fieldRecord.constant != null) { Object constant = Nodes.getConstant(value); if(!Objects.equals(fieldRecord.constant, constant)) fieldRecord.constant = null; } } if (md.isPublic() && nc.getParent() == null && nc.getRoot().getBody().size() == 1 && (expr .getCode() == AstCode.PutField ^ md.isStatic()) && value .getOperand() instanceof ParameterDefinition) { fieldRecord.hasSimpleSetter = true; } if(value.getCode() == AstCode.InitObject) { String typeName = ((MethodReference) value.getOperand()).getDeclaringType().getInternalName(); if(MUTABLE_COLLECTION_CLASSES.contains(typeName)) { fieldRecord.mutable = true; fieldRecord.collection = true; } } else if(value.getCode() == AstCode.InvokeStatic) { MethodReference mr = (MethodReference) value.getOperand(); if (isMutableCollectionFactory(value, mr)) { fieldRecord.mutable = true; fieldRecord.collection = true; } } if(fd.getFieldType().isArray() || value.getInferredType() != null && value.getInferredType().isArray()) { fieldRecord.array = true; if (!isEmptyArray(value)) { fieldRecord.mutable = true; } } if(m.isKnownMutable(fd.getFieldType())) { fieldRecord.mutable = true; } fieldRecord.numWrites++; } if(fieldRecord.usedInSingleMethod) { if (md.isTypeInitializer()) { fieldRecord.usedInSingleMethod = false; } if (Nodes.isFieldRead(expr) && ValuesFlow.getSource(expr) == expr) { fieldRecord.usedInSingleMethod = false; } if((expr.getCode() == AstCode.PutField || expr.getCode() == AstCode.GetField) && (md.isStatic() || !Exprs.isThis(Exprs.getChild(expr, 0)))) { fieldRecord.usedInSingleMethod = false; } if(fieldRecord.firstWrite != null && fieldRecord.firstWrite.md != md || fieldRecord.firstRead != null && fieldRecord.firstRead.md != md) { fieldRecord.usedInSingleMethod = false; } } } } } private boolean isMutableCollectionFactory(Expression value, MethodReference mr) { if (mr.getName().equals("asList") && mr.getDeclaringType().getInternalName().equals("java/util/Arrays") && value.getArguments().size() == 1 && !isEmptyArray(Exprs.getChild(value, 0))) return true; if ((mr.getName().equals("newArrayList") || mr.getName().equals("newLinkedList")) && mr.getDeclaringType().getInternalName().equals("com/google/common/collect/Lists")) return true; if ((mr.getName().equals("newHashSet") || mr.getName().equals("newTreeSet")) && mr.getDeclaringType().getInternalName().equals("com/google/common/collect/Sets")) return true; return false; } private static boolean isEmptyArray(Expression value) { return value.getCode() == AstCode.NewArray && Integer.valueOf(0).equals(Nodes.getConstant(value.getArguments().get(0))); } @MethodVisitor(order=VisitOrder.AFTER) public void checkAnalyzed(MethodContext mc) { fullyAnalyzed &= mc.isFullyAnalyzed(); } @FieldVisitor public void visit(FieldContext fc, FieldDefinition fd, TypeDefinition td, FieldStats fs) { if(fd.isSynthetic() || fd.isEnumConstant()) return; int flags = fs.getFlags(fd); if(Flags.testAny(flags, FieldStats.UNRESOLVED) || Annotations.hasAnnotation(fd, false)) { return; } boolean isConstantType = fd.getFieldType().isPrimitive() || Types.isString(fd.getFieldType()); if(!Flags.testAny(flags, FieldStats.ACCESS)) { if(fd.isStatic() && fd.isFinal() && isConstantType) return; // Autogenerated by javacc if(fd.getName().equals("lengthOfMatch") && td.getName().endsWith("TokenManager")) return; fc.report(fd.isPublic() || fd.isProtected() ? "UnusedPublicField" : "UnusedPrivateField", fd.isPublic() ? 5 : 0); return; } FieldRecord fieldRecord = fields.get(fd.getName()); if (fieldRecord != null && !fd.isStatic() && fd.isFinal() && fieldRecord.constant != null) { fc.report("FieldShouldBeStatic", 0, fieldRecord.firstWrite.getAnnotations()); return; } if(!Flags.testAny(flags, FieldStats.READ)) { // Autogenerated by javacc if(fd.getName().startsWith("jj") && td.getName().endsWith("TokenManager")) return; if(fd.getName().equals("errorCode") && td.getSimpleName().equals("TokenMgrError")) return; int priority = 0; String warningType = fd.isPublic() || fd.isProtected() ? "UnreadPublicField" : "UnreadPrivateField"; if(fd.isPublic()) { priority += 5; if(fd.isFinal()) { priority += 5; if(fd.isStatic()) { priority += 10; } } } if(!fd.isStatic() && !fd.isPublic() && fd.getName().startsWith("ref") && fd.getFieldType().getSimpleType() == JvmType.Object) { // probably field is used to keep strong reference priority += 10; } fc.report(warningType, priority, getWriteAnnotations(fieldRecord)); } if(checkWrite(fc, fd, td, fieldRecord, flags, isConstantType)) return; if(checkNull(fc, fd, td, fieldRecord, flags)) return; checkSingleMethod(fc, fd, fieldRecord, flags); if(td.isEnum() && fieldRecord != null && !fd.isStatic()) { boolean mutable = fieldRecord.mutable; if(fd.isPublic() && (!fd.isFinal() || mutable)) { fc.report("MutableEnumField", 0, getWriteAnnotations(fieldRecord)); return; } } if(fd.isStatic() && (fd.isPublic() || fd.isProtected()) && (td.isPublic() || td.isProtected())) { boolean mutable = fieldRecord != null && fieldRecord.mutable; if(!fd.isFinal() && Flags.testAny(flags, FieldStats.WRITE_CONSTRUCTOR) && !Flags.testAny(flags, FieldStats.WRITE_CLASS | FieldStats.WRITE_PACKAGE | FieldStats.WRITE_OUTSIDE)) { String type = "StaticFieldShouldBeRefactoredToFinal"; if(fieldRecord != null && fieldRecord.numWrites == 1) { type = "StaticFieldShouldBeFinal"; if(mutable && !Flags.testAny(flags, FieldStats.READ_OUTSIDE)) { type = "StaticFieldShouldBeFinalAndPackagePrivate"; } } fc.report(type, AccessLevel.of(fd).select(0, 10, 100, 100), getWriteAnnotations(fieldRecord)); return; } if(mutable || !fd.isFinal()) { String type = null; WarningAnnotation<?>[] anno = ArrayUtilities.append(getWriteAnnotations(fieldRecord), Roles.FIELD_TYPE.create(fd.getFieldType())); if(!Flags.testAny(flags, FieldStats.WRITE_OUTSIDE | FieldStats.READ_OUTSIDE)) { type = td.isInterface() ? "StaticFieldShouldBeNonInterfacePackagePrivate" : "StaticFieldShouldBePackagePrivate"; } else if(!fd.isFinal()) { type = "StaticFieldCannotBeFinal"; } else if(mutable && fieldRecord.array) { type = "StaticFieldMutableArray"; } else if(mutable && fieldRecord.collection) { type = "StaticFieldMutableCollection"; } else if(mutable) { type = "StaticFieldMutable"; } if(type != null) { fc.report(type, AccessLevel.of(fd).select(0, 10, 100, 100), anno); return; } } } if(fieldRecord != null && (td.isPublic() || td.isProtected()) && (fd.isPrivate() || fd.isPackagePrivate())) { MethodLocation expose = fieldRecord.expose; if(fieldRecord.mutable && expose != null && (expose.md.isPublic() || expose.md.isProtected())) { String type = fd.isStatic() ? "ExposeMutableStaticFieldViaReturnValue" : "ExposeMutableFieldViaReturnValue"; int priority = AccessLevel.of(expose.md).select(0, 10, 100, 100); if(fieldRecord.hasSimpleSetter) priority += 15; else if(!fd.isFinal()) priority += 3; fc.report(type, priority, ArrayUtilities.append(expose .getAnnotations(), Roles.FIELD_TYPE.create(fd.getFieldType()))); } } } private boolean checkWrite(FieldContext fc, FieldDefinition fd, TypeDefinition td, FieldRecord fieldRecord, int flags, boolean isConstantType) { if(!Flags.testAny(flags, FieldStats.WRITE)) { if(fd.isStatic() && fd.isFinal() && isConstantType) return false; WarningAnnotation<?>[] anno = {}; int priority = 0; String warningType = fd.isPublic() || fd.isProtected() ? "UnwrittenPublicField" : "UnwrittenPrivateField"; if (fieldRecord != null && fieldRecord.firstRead != null) { anno = fieldRecord.firstRead.getAnnotations(); } if(fd.isPublic()) { priority += 5; } priority += tweakForSerialization(fd, td); if(fd.getFieldType().getSimpleType() == JvmType.Boolean) { priority += 5; } if(fd.getName().equalsIgnoreCase("debug")) { priority += 5; } fc.report(warningType, priority, anno); return true; } return false; } private int tweakForSerialization(FieldDefinition fd, TypeDefinition td) { // Probably field is kept for backwards serialization compatibility if(!fd.isStatic() && Types.isInstance(td, "java/io/Serializable")) { return 10; } if(Flags.testAny(fd.getFlags(), Flags.TRANSIENT)) { return 30; } return 0; } private boolean checkNull(FieldContext fc, FieldDefinition fd, TypeDefinition td, FieldRecord fieldRecord, int flags) { if(!Flags.testAny(flags, FieldStats.WRITE_NONNULL) && Flags.testAny(flags, FieldStats.READ)) { int priority = 0; if(fd.isFinal() && fd.isStatic()) { priority += 20; String lcName = fd.getName().toLowerCase(Locale.ENGLISH); if (lcName.contains("null") || lcName.contains("zero") || lcName.contains("empty")) { priority += 15; } } else if(fd.isPublic()) { priority += 10; } priority += tweakForSerialization(fd, td); fc.report("FieldIsAlwaysNull", priority, getWriteAnnotations(fieldRecord)); return true; } return false; } private void checkSingleMethod(FieldContext fc, FieldDefinition fd, FieldRecord fieldRecord, int flags) { if (fullyAnalyzed && Flags.testAny(flags, FieldStats.READ) && !Flags.testAny(flags, FieldStats.READ_PACKAGE | FieldStats.READ_OUTSIDE | FieldStats.WRITE_PACKAGE | FieldStats.WRITE_OUTSIDE) && fieldRecord != null && fieldRecord.usedInSingleMethod && fieldRecord.firstWrite != null) { // javacc-generated if(fd.getName().startsWith("jj_") && fd.getDeclaringType().getSimpleName().endsWith("Parser") && fieldRecord.firstWrite.md.getName().equals("generateParseException")) return; int priority = AccessLevel.of(fd).select(10, 3, 1, 0); if(!fd.isStatic()) priority += 5; if(fieldRecord.firstWrite.md.isConstructor()) priority += 5; if(fd.getFieldType().isPrimitive()) priority += 3; fc.report("FieldUsedInSingleMethod", priority, fieldRecord.firstWrite.getAnnotations()); } } private WarningAnnotation<?>[] getWriteAnnotations(FieldRecord fieldRecord) { if (fieldRecord != null && fieldRecord.firstWrite != null) { return fieldRecord.firstWrite.getAnnotations(); } WarningAnnotation<?>[] anno = {}; return anno; } }
9,422
1,383
// ============================================================================= // PROJECT CHRONO - http://projectchrono.org // // Copyright (c) 2014 projectchrono.org // All right reserved. // // Use of this source code is governed by a BSD-style license that can be found // in the LICENSE file at the top level of the distribution and at // http://projectchrono.org/license-chrono.txt. // // ============================================================================= // Authors: <NAME>, <NAME> // ============================================================================= // // Base class for the Sedan vehicle models // // ============================================================================= #ifndef FEDA_VEHICLE_H #define FEDA_VEHICLE_H #include <vector> #include "chrono/core/ChCoordsys.h" #include "chrono/physics/ChMaterialSurface.h" #include "chrono/physics/ChSystem.h" #include "chrono_vehicle/wheeled_vehicle/ChWheeledVehicle.h" #include "chrono_models/ChApiModels.h" #include "chrono_models/vehicle/ChVehicleModelDefs.h" namespace chrono { namespace vehicle { namespace feda { /// @addtogroup vehicle_models_feda /// @{ /// FEDA vehicle system. class CH_MODELS_API FEDA_Vehicle : public ChWheeledVehicle { public: FEDA_Vehicle(const bool fixed = false, BrakeType brake_type = BrakeType::SIMPLE, ChContactMethod contact_method = ChContactMethod::NSC, CollisionType chassis_collision_type = CollisionType::NONE, int ride_height = 1, int damperMode = 2); FEDA_Vehicle(ChSystem* system, const bool fixed = false, BrakeType brake_type = BrakeType::SIMPLE, CollisionType chassis_collision_type = CollisionType::NONE, int ride_height = 1, int damperMode = 2); ~FEDA_Vehicle(); virtual int GetNumberAxles() const override { return 2; } virtual double GetWheelbase() const override { return 3.302; } virtual double GetMinTurningRadius() const override { return 7.7; } virtual double GetMaxSteeringAngle() const override { return 27.05 * CH_C_DEG_TO_RAD; } void SetInitWheelAngVel(const std::vector<double>& omega) { assert(omega.size() == 4); m_omega = omega; } double GetSpringForce(int axle, VehicleSide side) const; double GetSpringLength(int axle, VehicleSide side) const; double GetSpringDeformation(int axle, VehicleSide side) const; double GetShockForce(int axle, VehicleSide side) const; double GetShockLength(int axle, VehicleSide side) const; double GetShockVelocity(int axle, VehicleSide side) const; void SetRideHeight(int theConfig) { m_ride_height = ChClamp(theConfig, 0, 2); } virtual void Initialize(const ChCoordsys<>& chassisPos, double chassisFwdVel = 0) override; // Log debugging information void LogHardpointLocations(); /// suspension hardpoints at design void DebugLog(int what); /// shock forces and lengths, constraints, etc. private: void Create(bool fixed, BrakeType brake_type, CollisionType chassis_collision_type); std::vector<double> m_omega; int m_ride_height; int m_damper_mode; }; /// @} vehicle_models_feda } // namespace feda } // end namespace vehicle } // end namespace chrono #endif
1,163
513
#include "amulet.h" #define AM_AUDIO_NODE_FLAG_MARK ((uint32_t)1) #define AM_AUDIO_NODE_FLAG_CHILDREN_DIRTY ((uint32_t)2) #define AM_AUDIO_NODE_FLAG_PENDING_PAUSE ((uint32_t)4) #define AM_AUDIO_NODE_MASK_LIVE_PAUSE_STATE ((uint32_t)(8+16)) #define node_marked(node) (node->flags & AM_AUDIO_NODE_FLAG_MARK) #define mark_node(node) node->flags |= AM_AUDIO_NODE_FLAG_MARK #define unmark_node(node) node->flags &= ~AM_AUDIO_NODE_FLAG_MARK #define children_dirty(node) (node->flags & AM_AUDIO_NODE_FLAG_CHILDREN_DIRTY) #define set_children_dirty(node) node->flags |= AM_AUDIO_NODE_FLAG_CHILDREN_DIRTY #define clear_children_dirty(node) node->flags &= ~AM_AUDIO_NODE_FLAG_CHILDREN_DIRTY #define LIVE_PAUSE_STATE_UNPAUSED (0 << 3) #define LIVE_PAUSE_STATE_BEGIN (1 << 3) #define LIVE_PAUSE_STATE_PAUSED (2 << 3) #define LIVE_PAUSE_STATE_END (3 << 3) #define live_pause_state(node) (node->flags & AM_AUDIO_NODE_MASK_LIVE_PAUSE_STATE) #define set_live_pause_state(node, state) {node->flags &= ~AM_AUDIO_NODE_MASK_LIVE_PAUSE_STATE; node->flags |= state;} #define pending_pause(node) (node->flags & AM_AUDIO_NODE_FLAG_PENDING_PAUSE) #define set_pending_pause(node) node->flags |= AM_AUDIO_NODE_FLAG_PENDING_PAUSE #define clear_pending_pause(node) node->flags &= ~AM_AUDIO_NODE_FLAG_PENDING_PAUSE static am_audio_context audio_context; // Audio Bus // all buffers in the pool have the same size // current_pool_bufsize is the size of each buffer in the // pool, in bytes. static std::vector<void*> buffer_pool; static int current_pool_bufsize = 0; static unsigned int bufpool_top = 0; static double audio_time_accum = 0.0; static void clear_buffer_pool() { for (unsigned int i = 0; i < buffer_pool.size(); i++) { free(buffer_pool[i]); } buffer_pool.clear(); bufpool_top = 0; current_pool_bufsize = 0; } static void* push_buffer(int size) { if (size != current_pool_bufsize) { // size of audio buffer has changed, clear pool clear_buffer_pool(); current_pool_bufsize = size; } am_always_assert(bufpool_top <= buffer_pool.size()); if (bufpool_top == buffer_pool.size()) { buffer_pool.push_back(malloc(size)); } void *buf = buffer_pool[bufpool_top++]; memset(buf, 0, size); return buf; } static void pop_buffer(void *buf) { bufpool_top--; assert(bufpool_top >= 0); assert(buffer_pool.size() > bufpool_top); assert(buffer_pool[bufpool_top] == buf); } static void setup_channels(am_audio_bus *bus) { for (int i = 0; i < bus->num_channels; i++) { bus->channel_data[i] = bus->buffer + i * bus->num_samples; } } am_audio_bus::am_audio_bus(int nchannels, int nsamples, float* buf) { num_channels = nchannels; num_samples = nsamples; buffer = buf; owns_buffer = false; setup_channels(this); } am_audio_bus::am_audio_bus(am_audio_bus *bus) { num_channels = bus->num_channels; num_samples = bus->num_samples; buffer = (float*)push_buffer(num_channels * num_samples * sizeof(float)); owns_buffer = true; setup_channels(this); } am_audio_bus::~am_audio_bus() { if (owns_buffer) { pop_buffer(buffer); } } // Audio Param static float linear_incf(am_audio_param<float> param, int samples) { return (param.target_value - param.current_value) / (float)samples; } // Audio Node am_audio_node::am_audio_node() { pending_children.owner = this; live_children.owner = this; last_sync = 0; last_render = 0; flags = 0; recursion_limit = 0; } void am_audio_node::sync_params() { } void am_audio_node::post_render(am_audio_context *context, int num_samples) { } void am_audio_node::render_audio(am_audio_context *context, am_audio_bus *bus) { render_children(context, bus); } bool am_audio_node::finished() { for (int i = 0; i < pending_children.size; i++) { am_audio_node_child *child = &pending_children.arr[i]; if (!child->child->finished()) return false; } return true; } static void mix_bus(am_audio_bus * AM_RESTRICT dest, am_audio_bus * AM_RESTRICT src) { for (int c = 0; c < am_min(dest->num_channels, src->num_channels); c++) { int n = am_min(dest->num_samples, src->num_samples); float * AM_RESTRICT dest_data = dest->channel_data[c]; float * AM_RESTRICT src_data = src->channel_data[c]; for (int s = 0; s < n; s++) { dest_data[s] += src_data[s]; } } } static void apply_fadein(am_audio_bus *bus) { int n = am_conf_audio_interpolate_samples; float inc = 1.0f / (float)n; for (int c = 0; c < bus->num_channels; c++) { float *data = bus->channel_data[c]; float scale = 0.0f; for (int s = 0; s < n; s++) { data[s] *= scale; scale += inc; } } } static void apply_fadeout(am_audio_bus *bus) { int n = am_conf_audio_interpolate_samples; int start = bus->num_samples - n; int end = bus->num_samples; float inc = -1.0f / (float)n; for (int c = 0; c < bus->num_channels; c++) { float *data = bus->channel_data[c]; float scale = 1.0f; for (int s = start; s < end; s++) { data[s] *= scale; scale += inc; } } } void am_audio_node::render_children(am_audio_context *context, am_audio_bus *bus) { if (recursion_limit < 0) return; recursion_limit--; for (int i = 0; i < live_children.size; i++) { am_audio_node_child *child = &live_children.arr[i]; int pause_state = live_pause_state(child->child); am_audio_node_child_state child_state = child->state; if (child_state == AM_AUDIO_NODE_CHILD_STATE_OLD && pause_state == LIVE_PAUSE_STATE_UNPAUSED) { child->child->render_audio(context, bus); } else if (child_state == AM_AUDIO_NODE_CHILD_STATE_DONE || pause_state == LIVE_PAUSE_STATE_PAUSED // also ignore if paused and added at same time... || (pause_state == LIVE_PAUSE_STATE_BEGIN && child->state == AM_AUDIO_NODE_CHILD_STATE_NEW) // ...or if unpaused and removed at same time || (pause_state == LIVE_PAUSE_STATE_END && child->state == AM_AUDIO_NODE_CHILD_STATE_REMOVED)) { // ignore } else { // a fadein or fadeout is required, because // the child was recently added/removed or // paused/unpaused. am_audio_bus tmp(bus); child->child->render_audio(context, &tmp); if (child_state == AM_AUDIO_NODE_CHILD_STATE_NEW || pause_state == LIVE_PAUSE_STATE_END) { apply_fadein(&tmp); } else if (child_state == AM_AUDIO_NODE_CHILD_STATE_REMOVED || pause_state == LIVE_PAUSE_STATE_BEGIN) { apply_fadeout(&tmp); } else { assert(false); } mix_bus(bus, &tmp); } } recursion_limit++; } // Gain Node am_gain_node::am_gain_node() : gain(0) { gain.pending_value = 1.0f; } void am_gain_node::sync_params() { gain.update_target(); } void am_gain_node::post_render(am_audio_context *context, int num_samples) { gain.update_current(); } void am_gain_node::render_audio(am_audio_context *context, am_audio_bus *bus) { am_audio_bus tmp(bus); render_children(context, &tmp); int num_channels = bus->num_channels; int num_samples = bus->num_samples; for (int s = 0; s < num_samples; s++) { for (int c = 0; c < num_channels; c++) { bus->channel_data[c][s] += tmp.channel_data[c][s] * gain.interpolate_linear(s); } } } // Biquad filters. Most code here adapted from http://www.chromium.org/blink am_biquad_filter_node::am_biquad_filter_node() { for (int c = 0; c < AM_MAX_CHANNELS; c++) { memset(&current_state[c], 0, sizeof(am_biquad_filter_state)); next_state[c] = current_state[c]; } } void am_biquad_filter_node::post_render(am_audio_context *context, int num_samples) { for (int i = 0; i < AM_MAX_CHANNELS; i++) { current_state[i] = next_state[i]; } } void am_biquad_filter_node::render_audio(am_audio_context *context, am_audio_bus *bus) { am_audio_bus tmp(bus); render_children(context, &tmp); int num_channels = bus->num_channels; int num_samples = bus->num_samples; for (int c = 0; c < num_channels; c++) { float *source = &tmp.channel_data[c][0]; float *dest = &bus->channel_data[c][0]; int n = num_samples; double x1 = current_state[c].x1; double x2 = current_state[c].x2; double y1 = current_state[c].y1; double y2 = current_state[c].y2; double b0 = coeffs.b0; double b1 = coeffs.b1; double b2 = coeffs.b2; double a1 = coeffs.a1; double a2 = coeffs.a2; while (n--) { float x = *source++; float y = b0*x + b1*x1 + b2*x2 - a1*y1 - a2*y2; *dest++ += y; x2 = x1; x1 = x; y2 = y1; y1 = y; } next_state[c].x1 = x1; next_state[c].x2 = x2; next_state[c].y1 = y1; next_state[c].y2 = y2; } } void am_biquad_filter_node::set_lowpass_params(double cutoff, double resonance) { // normalize cutoff 0->1 double nyquist = (double)am_conf_audio_sample_rate * 0.5; cutoff = cutoff / nyquist; cutoff = am_clamp(cutoff, 0.0, 1.0); if (cutoff == 1) { // When cutoff is 1, the z-transform is 1. set_normalized_coeffs(1, 0, 0, 1, 0, 0); } else if (cutoff > 0) { // Compute biquad coefficients for lowpass filter resonance = am_max(0.0, resonance); // can't go negative double g = pow(10.0, 0.05 * resonance); double d = sqrt((4 - sqrt(16 - 16 / (g * g))) / 2); double theta = AM_PI * cutoff; double sn = 0.5 * d * sin(theta); double beta = 0.5 * (1 - sn) / (1 + sn); double gamma = (0.5 + beta) * cos(theta); double alpha = 0.25 * (0.5 + beta - gamma); double b0 = 2 * alpha; double b1 = 2 * 2 * alpha; double b2 = 2 * alpha; double a1 = 2 * -gamma; double a2 = 2 * beta; set_normalized_coeffs(b0, b1, b2, 1, a1, a2); } else { // When cutoff is zero, nothing gets through the filter, so set // coefficients up correctly. set_normalized_coeffs(0, 0, 0, 1, 0, 0); } } void am_biquad_filter_node::set_highpass_params(double cutoff, double resonance) { // normalize cutoff 0->1 double nyquist = (double)am_conf_audio_sample_rate * 0.5; cutoff = cutoff / nyquist; cutoff = am_clamp(cutoff, 0.0, 1.0); if (cutoff == 1) { // The z-transform is 0. set_normalized_coeffs(0, 0, 0, 1, 0, 0); } else if (cutoff > 0) { // Compute biquad coefficients for highpass filter resonance = am_max(0.0, resonance); // can't go negative double g = pow(10.0, 0.05 * resonance); double d = sqrt((4 - sqrt(16 - 16 / (g * g))) / 2); double theta = AM_PI * cutoff; double sn = 0.5 * d * sin(theta); double beta = 0.5 * (1 - sn) / (1 + sn); double gamma = (0.5 + beta) * cos(theta); double alpha = 0.25 * (0.5 + beta + gamma); double b0 = 2 * alpha; double b1 = 2 * -2 * alpha; double b2 = 2 * alpha; double a1 = 2 * -gamma; double a2 = 2 * beta; set_normalized_coeffs(b0, b1, b2, 1, a1, a2); } else { // When cutoff is zero, we need to be careful because the above // gives a quadratic divided by the same quadratic, with poles // and zeros on the unit circle in the same place. When cutoff // is zero, the z-transform is 1. set_normalized_coeffs(1, 0, 0, 1, 0, 0); } } void am_biquad_filter_node::set_normalized_coeffs(double b0, double b1, double b2, double a0, double a1, double a2) { double a0_inv = 1.0 / a0; coeffs.b0 = b0 * a0_inv; coeffs.b1 = b1 * a0_inv; coeffs.b2 = b2 * a0_inv; coeffs.a1 = a1 * a0_inv; coeffs.a2 = a2 * a0_inv; } am_lowpass_filter_node::am_lowpass_filter_node() : cutoff(0), resonance(0) { } void am_lowpass_filter_node::sync_params() { cutoff.update_target(); resonance.update_target(); if (cutoff.current_value != cutoff.target_value || resonance.current_value != resonance.target_value) { cutoff.update_current(); resonance.update_current(); set_lowpass_params(cutoff.current_value, resonance.current_value); } } am_highpass_filter_node::am_highpass_filter_node() : cutoff(0), resonance(0) { } void am_highpass_filter_node::sync_params() { cutoff.update_target(); resonance.update_target(); if (cutoff.current_value != cutoff.target_value || resonance.current_value != resonance.target_value) { cutoff.update_current(); resonance.update_current(); set_highpass_params(cutoff.current_value, resonance.current_value); } } // Audio track node am_audio_track_node::am_audio_track_node() : playback_speed(1.0f), gain(1.0f) { audio_buffer = NULL; audio_buffer_ref = LUA_NOREF; current_position = 0.0; next_position = 0.0; reset_position = 0.0; loop = false; needs_reset = false; done_server = false; done_client = false; } void am_audio_track_node::sync_params() { playback_speed.update_target(); gain.update_target(); if (needs_reset) { current_position = reset_position; next_position = reset_position; needs_reset = false; done_server = false; } done_client = done_server; } static bool track_resample_required(am_audio_track_node *node) { return (node->audio_buffer->sample_rate != am_conf_audio_sample_rate) || (node->playback_speed.current_value != node->playback_speed.target_value) || (fabs(node->playback_speed.current_value - 1.0f) > 0.00001f); } static bool is_too_slow(float playback_speed) { return playback_speed < 0.00001f; } void am_audio_track_node::render_audio(am_audio_context *context, am_audio_bus *bus) { if (done_server) return; if (is_too_slow(playback_speed.current_value)) return; if (audio_buffer->buffer->data == NULL) return; am_audio_bus tmp(bus); int buf_num_channels = audio_buffer->num_channels; int buf_num_samples = audio_buffer->buffer->size / (buf_num_channels * sizeof(float)); int bus_num_samples = tmp.num_samples; int bus_num_channels = tmp.num_channels; if (!track_resample_required(this)) { // optimise common case where no resampling is required for (int c = 0; c < bus_num_channels; c++) { float *bus_data = tmp.channel_data[c]; float *buf_data = ((float*)audio_buffer->buffer->data) + c * buf_num_samples; if (c < buf_num_channels) { int buf_pos = (int)floor(current_position); assert(buf_pos < buf_num_samples); for (int bus_pos = 0; bus_pos < bus_num_samples; bus_pos++) { bus_data[bus_pos] += buf_data[buf_pos++] * gain.interpolate_linear(bus_pos); if (buf_pos >= buf_num_samples) { if (loop) { buf_pos = 0; } else { done_server = true; break; } } } } else { // less channels in buffer than bus, so duplicate previous channels assert(c > 0); memcpy(bus_data, tmp.channel_data[c-1], bus_num_samples * sizeof(float)); } } next_position = current_position + (double)bus_num_samples; if (next_position >= (double)buf_num_samples) { next_position = fmod(next_position, (double)buf_num_samples); } } else { // resample for (int c = 0; c < bus_num_channels; c++) { float *bus_data = tmp.channel_data[c]; float *buf_data = ((float*)audio_buffer->buffer->data) + c * buf_num_samples; if (c < buf_num_channels) { double pos = current_position; for (int write_index = 0; write_index < bus_num_samples; write_index++) { int read_index1 = (int)floor(pos); int read_index2 = read_index1 + 1; if (read_index2 >= buf_num_samples) { if (loop) { read_index2 = 0; } else { done_server = true; break; } } double interpolation_factor = pos - (double)read_index1; float sample1 = buf_data[read_index1]; float sample2 = buf_data[read_index2]; float interpolated_sample = (1.0 - interpolation_factor) * sample1 + interpolation_factor * sample2; bus_data[write_index] += interpolated_sample * gain.interpolate_linear(write_index); pos += playback_speed.interpolate_linear(write_index) * sample_rate_ratio; if (pos >= (double)buf_num_samples) { if (loop) { pos = fmod(pos, (double)buf_num_samples); } else { done_server = true; break; } } } next_position = pos; } else { // less channels in buffer than bus, so duplicate previous channels assert(c > 0); memcpy(bus_data, tmp.channel_data[c-1], bus_num_samples * sizeof(float)); } } } mix_bus(bus, &tmp); } void am_audio_track_node::post_render(am_audio_context *context, int num_samples) { playback_speed.update_current(); gain.update_current(); current_position = next_position; } bool am_audio_track_node::finished() { return done_client; } // Audio stream node am_audio_stream_node::am_audio_stream_node() : playback_speed(1.0f) { buffer = NULL; buffer_ref = LUA_NOREF; handle = NULL; num_channels = 2; sample_rate = 44100; sample_rate_ratio = 1.0f; loop = false; done_server = false; done_client = false; current_position = 0.0; next_position = 0.0; } void am_audio_stream_node::sync_params() { playback_speed.update_target(); done_client = done_server; } void am_audio_stream_node::render_audio(am_audio_context *context, am_audio_bus *bus) { if (done_server) return; int bus_num_samples = bus->num_samples; int bus_num_channels = bus->num_channels; stb_vorbis *f = (stb_vorbis*)handle; int n = 0; float *channel_data[AM_MAX_CHANNELS]; am_audio_bus tmp(bus); for (int i = 0; i < bus_num_channels; i++) { channel_data[i] = tmp.channel_data[i]; } while (n < bus_num_samples) { int m = stb_vorbis_get_samples_float(f, bus_num_channels, channel_data, bus_num_samples - n); if (m == 0) { if (loop) { stb_vorbis_seek_start(f); } else { done_server = true; break; } } else { n += m; for (int i = 0; i < bus_num_channels; i++) { channel_data[i] += m; } } } // fill in missing channels for (int c = num_channels; c < bus_num_channels; c++) { assert(c > 0); memcpy(tmp.channel_data[c], tmp.channel_data[c-1], bus_num_samples * sizeof(float)); } mix_bus(bus, &tmp); } void am_audio_stream_node::post_render(am_audio_context *context, int num_samples) { playback_speed.update_current(); current_position = next_position; } bool am_audio_stream_node::finished() { return done_client; } // Oscillator Node am_oscillator_node::am_oscillator_node() : phase(0) , freq(440) , waveform(AM_WAVEFORM_SINE) { offset = 0; } void am_oscillator_node::sync_params() { phase.update_target(); freq.update_target(); } void am_oscillator_node::post_render(am_audio_context *context, int num_samples) { phase.update_current(); freq.update_current(); offset += num_samples; } void am_oscillator_node::render_audio(am_audio_context *context, am_audio_bus *bus) { double t = (double)offset / (double)context->sample_rate; double dt = 1.0/(double)context->sample_rate; int num_channels = bus->num_channels; int num_samples = bus->num_samples; float phase_inc = linear_incf(phase, num_samples); float freq_inc = linear_incf(freq, num_samples); float phase_val = phase.current_value; float freq_val = freq.current_value; switch (waveform.current_value) { case AM_WAVEFORM_SINE: { for (int i = 0; i < num_samples; i++) { float val = sinf(AM_PI*2.0f*freq_val*(float)t+phase_val); for (int c = 0; c < num_channels; c++) { bus->channel_data[c][i] += val; } phase_val += phase_inc; freq_val += freq_inc; t += dt; } } default: {} } } bool am_oscillator_node::finished() { return false; } // Spectrum node am_spectrum_node::am_spectrum_node() : smoothing(0.9f) { for (int i = 0; i < AM_MAX_FFT_BINS; i++) { bin_data[i] = 0.0f; } fftsize = 1024; num_bins = fftsize / 2 + 1; cfg = NULL; arr = NULL; arr_ref = LUA_NOREF; } void am_spectrum_node::sync_params() { if (arr->size < num_bins - 1) return; if (arr->type != AM_VIEW_TYPE_F32) return; if (arr->components != 1) return; if (arr->buffer->data == NULL) return; float *farr = (float*)&arr->buffer->data[arr->offset]; for (int i = 1; i < num_bins; i++) { float data = bin_data[i]; float decibels = data == 0.0f ? -1000.0f : 20.0f * log10f(data); *farr = decibels; farr = (float*)(((uint8_t*)farr) + arr->stride); } smoothing.update_target(); smoothing.update_current(); done = false; } void am_spectrum_node::post_render(am_audio_context *context, int num_samples) { } static void apply_fft_window(float *p, size_t n) { // Blackman window, copied from http://www.chromium.org/blink double alpha = 0.16; double a0 = 0.5 * (1 - alpha); double a1 = 0.5; double a2 = 0.5 * alpha; for (unsigned i = 0; i < n; ++i) { double x = static_cast<double>(i) / static_cast<double>(n); double window = a0 - a1 * cos(AM_2PI * x) + a2 * cos(AM_2PI * 2.0 * x); p[i] *= float(window); } } void am_spectrum_node::render_audio(am_audio_context *context, am_audio_bus *bus) { am_audio_bus tmp(bus); render_children(context, &tmp); mix_bus(bus, &tmp); if (done) return; int num_channels = tmp.num_channels; int num_samples = tmp.num_samples; if (fftsize > num_samples) { am_log1("%s", "WARNING: fft size > audio buffer size, ignoring"); return; } if (num_samples % fftsize != 0) { am_log1("%s", "WARNING: audio buffer size not divisible by fft size, ignoring"); return; } if (!am_is_power_of_two(fftsize)) { am_log1("%s", "WARNING: fft size is not a power of 2, ignoring"); return; } float *input = tmp.channel_data[0]; // combine channels for (int c = 1; c < num_channels; c++) { float *channeln = tmp.channel_data[c]; for (int s = 0; s < num_samples; s++) { input[s] += channeln[s]; } } for (int i = 0; i < num_samples / fftsize; i++) { apply_fft_window(input, fftsize); kiss_fft_cpx output[AM_MAX_FFT_BINS]; kiss_fftr(cfg, input, output); // Adapted from http://www.chromium.org/blink (I don't understand this) // Normalize so that an input sine wave at 0dBfs registers as 0dBfs (undo FFT scaling factor). const float magnitudeScale = 1.0f / (float)fftsize; // A value of 0 does no averaging with the previous result. Larger values produce slower, but smoother changes. float k = smoothing.current_value; // Convert the analysis data from complex to magnitude and average with the previous result. for (int j = 0; j < num_bins; j++) { float im = output[j].i; float re = output[j].r; double scalarMagnitude = sqrtf(re * re + im * im) * magnitudeScale; bin_data[j] = k * bin_data[j] + (1.0f - k) * scalarMagnitude; } input += fftsize; } done = true; } // Capture Node am_capture_node::am_capture_node() { } void am_capture_node::sync_params() { } void am_capture_node::post_render(am_audio_context *context, int num_samples) { } void am_capture_node::render_audio(am_audio_context *context, am_audio_bus *bus) { am_audio_bus tmp(bus); am_capture_audio(&tmp); mix_bus(bus, &tmp); } bool am_capture_node::finished() { return false; } //------------------------------------------------------------------------- // Lua bindings. static int search_uservalues(lua_State *L, am_audio_node *node) { if (node_marked(node)) return 0; // cycle node->pushuservalue(L); // push uservalue table of node lua_pushvalue(L, 2); // push field lua_rawget(L, -2); // lookup field in uservalue table if (!lua_isnil(L, -1)) { // found it lua_remove(L, -2); // remove uservalue table return 1; } lua_pop(L, 2); // pop nil, uservalue table if (node->pending_children.size != 1) return 0; mark_node(node); am_audio_node *child = node->pending_children.arr[0].child; child->push(L); lua_replace(L, 1); // child is now at index 1 int r = search_uservalues(L, child); unmark_node(node); return r; } int am_audio_node_index(lua_State *L) { am_audio_node *node = (am_audio_node*)lua_touserdata(L, 1); am_default_index_func(L); // check metatable if (!lua_isnil(L, -1)) return 1; lua_pop(L, 1); // pop nil return search_uservalues(L, node); } static int add_child(lua_State *L) { am_check_nargs(L, 2); am_audio_node *parent = am_get_userdata(L, am_audio_node, 1); am_audio_node *child = am_get_userdata(L, am_audio_node, 2); am_audio_node_child child_slot; child_slot.child = child; child_slot.ref = parent->ref(L, 2); // ref from parent to child // keep list sorted (required for sync_children_list below) int n = parent->pending_children.size; for (int i = 0; i <= n; i++) { if (i == n || child < parent->pending_children.arr[i].child) { parent->pending_children.insert(L, i, child_slot); break; } } set_children_dirty(parent); lua_pushvalue(L, 1); // for chaining return 1; } static int remove_child(lua_State *L) { am_check_nargs(L, 2); am_audio_node *parent = am_get_userdata(L, am_audio_node, 1); am_audio_node *child = am_get_userdata(L, am_audio_node, 2); for (int i = 0; i < parent->pending_children.size; i++) { if (parent->pending_children.arr[i].child == child) { parent->unref(L, parent->pending_children.arr[i].ref); parent->pending_children.remove(i); set_children_dirty(parent); break; } } lua_pushvalue(L, 1); // for chaining return 1; } static int remove_all_children(lua_State *L) { am_check_nargs(L, 1); am_audio_node *parent = am_get_userdata(L, am_audio_node, 1); for (int i = parent->pending_children.size-1; i >= 0; i--) { parent->unref(L, parent->pending_children.arr[i].ref); } parent->pending_children.clear(); set_children_dirty(parent); lua_pushvalue(L, 1); // for chaining return 1; } void am_set_audio_node_child(lua_State *L, am_audio_node *parent) { if (lua_isnil(L, 1)) { return; } am_audio_node *child = am_get_userdata(L, am_audio_node, 1); am_audio_node_child child_slot; child_slot.child = child; child_slot.ref = parent->ref(L, 1); // ref from parent to child parent->pending_children.push_back(L, child_slot); set_children_dirty(parent); } static int child_pair_next(lua_State *L) { am_check_nargs(L, 2); am_audio_node *node = am_get_userdata(L, am_audio_node, 1); int i = luaL_checkinteger(L, 2); if (i >= 0 && i < node->pending_children.size) { lua_pushinteger(L, i+1); node->pending_children.arr[i].child->push(L); return 2; } else { lua_pushnil(L); return 1; } } static int child_pairs(lua_State *L) { lua_pushcclosure(L, child_pair_next, 0); lua_pushvalue(L, 1); lua_pushinteger(L, 0); return 3; } static int get_child(lua_State *L) { am_check_nargs(L, 2); am_audio_node *node = am_get_userdata(L, am_audio_node, 1); int i = luaL_checkinteger(L, 2); if (i >= 1 && i <= node->pending_children.size) { node->pending_children.arr[i-1].child->push(L); return 1; } else { return 0; } } static void check_alias(lua_State *L) { am_default_index_func(L); if (!lua_isnil(L, -1)) goto error; lua_pop(L, 1); return; error: luaL_error(L, "alias '%s' is already used for something else", lua_tostring(L, 2)); } static int alias(lua_State *L) { int nargs = am_check_nargs(L, 2); am_audio_node *node = am_get_userdata(L, am_audio_node, 1); node->pushuservalue(L); int userval_idx = am_absindex(L, -1); if (lua_istable(L, 2)) { // create multiple aliases - one for each key/value pair lua_pushvalue(L, 2); // push table, as we need position 2 for check_alias int tbl_idx = am_absindex(L, -1); lua_pushnil(L); while (lua_next(L, tbl_idx)) { lua_pushvalue(L, -2); // key lua_replace(L, 2); // check_alias expects key in position 2 check_alias(L); lua_pushvalue(L, -2); // key lua_pushvalue(L, -2); // value lua_rawset(L, userval_idx); // uservalue[key] = value lua_pop(L, 1); // value } lua_pop(L, 1); // table } else if (lua_isstring(L, 2)) { check_alias(L); lua_pushvalue(L, 2); if (nargs > 2) { lua_pushvalue(L, 3); } else { lua_pushvalue(L, 1); } lua_rawset(L, userval_idx); } else { return luaL_error(L, "expecting a string or table at position 2"); } lua_pop(L, 1); // uservalue lua_pushvalue(L, 1); return 1; } // Gain node lua bindings static int create_gain_node(lua_State *L) { am_check_nargs(L, 2); am_gain_node *node = am_new_userdata(L, am_gain_node); am_set_audio_node_child(L, node); node->gain.set_immediate(luaL_checknumber(L, 2)); return 1; } static void get_gain(lua_State *L, void *obj) { am_gain_node *node = (am_gain_node*)obj; lua_pushnumber(L, node->gain.pending_value); } static void set_gain(lua_State *L, void *obj) { am_gain_node *node = (am_gain_node*)obj; node->gain.pending_value = luaL_checknumber(L, 3); } static am_property gain_property = {get_gain, set_gain}; static void register_gain_node_mt(lua_State *L) { lua_newtable(L); lua_pushcclosure(L, am_audio_node_index, 0); lua_setfield(L, -2, "__index"); am_set_default_newindex_func(L); am_register_property(L, "value", &gain_property); am_register_metatable(L, "gain", MT_am_gain_node, MT_am_audio_node); } // Lowpass biquad filter node lua bindings static int create_lowpass_filter_node(lua_State *L) { am_check_nargs(L, 3); am_lowpass_filter_node *node = am_new_userdata(L, am_lowpass_filter_node); am_set_audio_node_child(L, node); node->cutoff.set_immediate(luaL_checknumber(L, 2)); node->resonance.set_immediate(luaL_checknumber(L, 3)); node->set_lowpass_params(node->cutoff.current_value, node->resonance.current_value); return 1; } static void get_lowpass_cutoff(lua_State *L, void *obj) { am_lowpass_filter_node *node = (am_lowpass_filter_node*)obj; lua_pushnumber(L, node->cutoff.pending_value); } static void set_lowpass_cutoff(lua_State *L, void *obj) { am_lowpass_filter_node *node = (am_lowpass_filter_node*)obj; node->cutoff.pending_value = am_clamp(luaL_checknumber(L, 3), 1.0, 22050.0); } static void get_lowpass_resonance(lua_State *L, void *obj) { am_lowpass_filter_node *node = (am_lowpass_filter_node*)obj; lua_pushnumber(L, node->resonance.pending_value); } static void set_lowpass_resonance(lua_State *L, void *obj) { am_lowpass_filter_node *node = (am_lowpass_filter_node*)obj; node->resonance.pending_value = am_clamp(luaL_checknumber(L, 3), 0.0, 1000.0); } static am_property lowpass_cutoff_property = {get_lowpass_cutoff, set_lowpass_cutoff}; static am_property lowpass_resonance_property = {get_lowpass_resonance, set_lowpass_resonance}; static void register_lowpass_filter_node_mt(lua_State *L) { lua_newtable(L); lua_pushcclosure(L, am_audio_node_index, 0); lua_setfield(L, -2, "__index"); am_set_default_newindex_func(L); am_register_property(L, "cutoff", &lowpass_cutoff_property); am_register_property(L, "resonance", &lowpass_resonance_property); am_register_metatable(L, "lowpass_filter", MT_am_lowpass_filter_node, MT_am_audio_node); } // Highpass biquad filter node lua bindings static int create_highpass_filter_node(lua_State *L) { am_check_nargs(L, 3); am_highpass_filter_node *node = am_new_userdata(L, am_highpass_filter_node); am_set_audio_node_child(L, node); node->cutoff.set_immediate(luaL_checknumber(L, 2)); node->resonance.set_immediate(luaL_checknumber(L, 3)); node->set_highpass_params(node->cutoff.current_value, node->resonance.current_value); return 1; } static void get_highpass_cutoff(lua_State *L, void *obj) { am_highpass_filter_node *node = (am_highpass_filter_node*)obj; lua_pushnumber(L, node->cutoff.pending_value); } static void set_highpass_cutoff(lua_State *L, void *obj) { am_highpass_filter_node *node = (am_highpass_filter_node*)obj; node->cutoff.pending_value = am_clamp(luaL_checknumber(L, 3), 1.0, 22050.0); } static void get_highpass_resonance(lua_State *L, void *obj) { am_highpass_filter_node *node = (am_highpass_filter_node*)obj; lua_pushnumber(L, node->resonance.pending_value); } static void set_highpass_resonance(lua_State *L, void *obj) { am_highpass_filter_node *node = (am_highpass_filter_node*)obj; node->resonance.pending_value = am_clamp(luaL_checknumber(L, 3), 0.0, 1000.0); } static am_property highpass_cutoff_property = {get_highpass_cutoff, set_highpass_cutoff}; static am_property highpass_resonance_property = {get_highpass_resonance, set_highpass_resonance}; static void register_highpass_filter_node_mt(lua_State *L) { lua_newtable(L); lua_pushcclosure(L, am_audio_node_index, 0); lua_setfield(L, -2, "__index"); am_set_default_newindex_func(L); am_register_property(L, "cutoff", &highpass_cutoff_property); am_register_property(L, "resonance", &highpass_resonance_property); am_register_metatable(L, "highpass_filter", MT_am_highpass_filter_node, MT_am_audio_node); } // Audio track node lua bindings static int create_audio_track_node(lua_State *L) { int nargs = am_check_nargs(L, 1); am_audio_track_node *node = am_new_userdata(L, am_audio_track_node); node->audio_buffer = am_get_userdata(L, am_audio_buffer, 1); node->audio_buffer_ref = node->ref(L, 1); if (nargs > 1) { node->loop = lua_toboolean(L, 2); } if (nargs > 2) { node->playback_speed.set_immediate(luaL_checknumber(L, 3)); } if (nargs > 3) { node->gain.set_immediate(luaL_checknumber(L, 4)); } node->sample_rate_ratio = (float)node->audio_buffer->sample_rate / (float)am_conf_audio_sample_rate; return 1; } static int reset_track(lua_State *L) { int nargs = am_check_nargs(L, 1); am_audio_track_node *node = am_get_userdata(L, am_audio_track_node, 1); node->needs_reset = true; if (nargs > 1) { int buf_num_channels = node->audio_buffer->num_channels; int buf_num_samples = node->audio_buffer->buffer->size / (buf_num_channels * sizeof(float)); node->reset_position = am_min(luaL_checknumber(L, 2) * node->audio_buffer->sample_rate, (double)(buf_num_samples-1)); } else { node->reset_position = 0.0; } return 0; } static void get_track_playback_speed(lua_State *L, void *obj) { am_audio_track_node *node = (am_audio_track_node*)obj; lua_pushnumber(L, node->playback_speed.pending_value); } static void set_track_playback_speed(lua_State *L, void *obj) { am_audio_track_node *node = (am_audio_track_node*)obj; node->playback_speed.pending_value = luaL_checknumber(L, 3); } static am_property track_playback_speed_property = {get_track_playback_speed, set_track_playback_speed}; static void get_track_volume(lua_State *L, void *obj) { am_audio_track_node *node = (am_audio_track_node*)obj; lua_pushnumber(L, node->gain.pending_value); } static void set_track_volume(lua_State *L, void *obj) { am_audio_track_node *node = (am_audio_track_node*)obj; node->gain.pending_value = luaL_checknumber(L, 3); } static am_property track_volume_property = {get_track_volume, set_track_volume}; static void register_audio_track_node_mt(lua_State *L) { lua_newtable(L); lua_pushcclosure(L, am_audio_node_index, 0); lua_setfield(L, -2, "__index"); am_set_default_newindex_func(L); lua_pushcclosure(L, reset_track, 0); lua_setfield(L, -2, "reset"); am_register_property(L, "playback_speed", &track_playback_speed_property); am_register_property(L, "volume", &track_volume_property); am_register_metatable(L, "track", MT_am_audio_track_node, MT_am_audio_node); } // Audio stream node lua bindings static int create_audio_stream_node(lua_State *L) { int nargs = am_check_nargs(L, 1); am_audio_stream_node *node = am_new_userdata(L, am_audio_stream_node); node->buffer = am_check_buffer(L, 1); node->buffer_ref = node->ref(L, 1); if (nargs > 1) { node->loop = lua_toboolean(L, 2); } if (nargs > 2) { node->playback_speed.set_immediate(luaL_checknumber(L, 3)); } int err = 0; node->handle = stb_vorbis_open_memory( (const unsigned char *)node->buffer->data, node->buffer->size, &err, NULL); if (node->handle == NULL) { return luaL_error(L, "buffer '%s' is not valid ogg vorbis data", node->buffer->origin); } stb_vorbis_info info = stb_vorbis_get_info((stb_vorbis*)node->handle); node->sample_rate = info.sample_rate; if (node->sample_rate != am_conf_audio_sample_rate) { am_log0("WARNING: buffer '%s' has sample rate of %dHz, but will play at %dHz", node->buffer->origin, node->sample_rate, am_conf_audio_sample_rate); } node->num_channels = info.channels; node->sample_rate_ratio = (float)node->sample_rate / (float)am_conf_audio_sample_rate; return 1; } static int audio_stream_gc(lua_State *L) { am_audio_stream_node *node = (am_audio_stream_node*)lua_touserdata(L, 1); if (node->handle != NULL) { stb_vorbis_close((stb_vorbis*)node->handle); node->handle = NULL; } return 0; } static void get_stream_playback_speed(lua_State *L, void *obj) { am_audio_stream_node *node = (am_audio_stream_node*)obj; lua_pushnumber(L, node->playback_speed.pending_value); } static void set_stream_playback_speed(lua_State *L, void *obj) { am_audio_stream_node *node = (am_audio_stream_node*)obj; node->playback_speed.pending_value = luaL_checknumber(L, 3); } static am_property stream_playback_speed_property = {get_stream_playback_speed, set_stream_playback_speed}; static void register_audio_stream_node_mt(lua_State *L) { lua_newtable(L); lua_pushcclosure(L, am_audio_node_index, 0); lua_setfield(L, -2, "__index"); am_set_default_newindex_func(L); lua_pushcclosure(L, audio_stream_gc, 0); lua_setfield(L, -2, "__gc"); am_register_property(L, "playback_speed", &stream_playback_speed_property); am_register_metatable(L, "audio_stream", MT_am_audio_stream_node, MT_am_audio_node); } // Oscillator node lua bindings static int create_oscillator_node(lua_State *L) { int nargs = am_check_nargs(L, 1); am_oscillator_node *node = am_new_userdata(L, am_oscillator_node); node->freq.set_immediate(luaL_checknumber(L, 1)); if (nargs > 1) { node->phase.set_immediate(luaL_checknumber(L, 2)); } return 1; } static void get_phase(lua_State *L, void *obj) { am_oscillator_node *node = (am_oscillator_node*)obj; lua_pushnumber(L, node->phase.pending_value); } static void set_phase(lua_State *L, void *obj) { am_oscillator_node *node = (am_oscillator_node*)obj; node->phase.pending_value = luaL_checknumber(L, 3); } static am_property phase_property = {get_phase, set_phase}; static void get_freq(lua_State *L, void *obj) { am_oscillator_node *node = (am_oscillator_node*)obj; lua_pushnumber(L, node->freq.pending_value); } static void set_freq(lua_State *L, void *obj) { am_oscillator_node *node = (am_oscillator_node*)obj; node->freq.pending_value = luaL_checknumber(L, 3); } static am_property freq_property = {get_freq, set_freq}; static void register_oscillator_node_mt(lua_State *L) { lua_newtable(L); lua_pushcclosure(L, am_audio_node_index, 0); lua_setfield(L, -2, "__index"); am_set_default_newindex_func(L); am_register_property(L, "phase", &phase_property); am_register_property(L, "freq", &freq_property); am_register_metatable(L, "oscillator", MT_am_oscillator_node, MT_am_audio_node); } // Spectrum node lua bindings static am_spectrum_node *new_spectrum_node(lua_State *L, int fftsize) { size_t cfg_sz = 0; kiss_fftr_alloc(fftsize, 0, NULL, &cfg_sz); // allocate extra space for kissfft cfg am_spectrum_node *node = (am_spectrum_node*)am_set_metatable(L, new (lua_newuserdata(L, sizeof(am_spectrum_node) + cfg_sz)) am_spectrum_node(), MT_am_spectrum_node); node->cfg = kiss_fftr_alloc(fftsize, 0, (void*)(node + 1), &cfg_sz); assert(node->cfg); node->fftsize = fftsize; node->num_bins = fftsize / 2 + 1; return node; } static int create_spectrum_node(lua_State *L) { int nargs = am_check_nargs(L, 3); int freq_bins = luaL_checkinteger(L, 2); int fftsize = freq_bins * 2; if (fftsize < AM_MIN_FFT_SIZE) { return luaL_error(L, "number of frequency bins must be at least %d", AM_MIN_FFT_SIZE / 2); } if (fftsize > am_min(am_conf_audio_buffer_size, AM_MAX_FFT_SIZE)) { return luaL_error(L, "too many frequency bins (max %d)", am_min(am_conf_audio_buffer_size, AM_MAX_FFT_SIZE) / 2); } if (!am_is_power_of_two(fftsize)) { return luaL_error(L, "frequency bins must be a power of 2", freq_bins); } am_spectrum_node *node = new_spectrum_node(L, fftsize); am_set_audio_node_child(L, node); node->arr = am_get_userdata(L, am_buffer_view, 3); node->arr_ref = node->ref(L, 3); if (node->arr->size < freq_bins) { return luaL_error(L, "array must have at least %d elements", freq_bins); } if (node->arr->type != AM_VIEW_TYPE_F32) { return luaL_error(L, "array must have type float"); } if (node->arr->components != 1) { return luaL_error(L, "array must have 1 component of type float"); } if (nargs > 3) { node->smoothing.set_immediate(luaL_checknumber(L, 4)); } return 1; } static void get_smoothing(lua_State *L, void *obj) { am_spectrum_node *node = (am_spectrum_node*)obj; lua_pushnumber(L, node->smoothing.pending_value); } static void set_smoothing(lua_State *L, void *obj) { am_spectrum_node *node = (am_spectrum_node*)obj; node->smoothing.pending_value = am_clamp(luaL_checknumber(L, 3), 0.0, 1.0); } static am_property smoothing_property = {get_smoothing, set_smoothing}; static void register_spectrum_node_mt(lua_State *L) { lua_newtable(L); lua_pushcclosure(L, am_audio_node_index, 0); lua_setfield(L, -2, "__index"); am_set_default_newindex_func(L); am_register_property(L, "smoothing", &smoothing_property); am_register_metatable(L, "spectrum", MT_am_spectrum_node, MT_am_audio_node); } // Capture node lua bindings static int create_capture_node(lua_State *L) { am_new_userdata(L, am_capture_node); return 1; } static void register_capture_node_mt(lua_State *L) { lua_newtable(L); lua_pushcclosure(L, am_audio_node_index, 0); lua_setfield(L, -2, "__index"); am_set_default_newindex_func(L); am_register_metatable(L, "capture", MT_am_capture_node, MT_am_audio_node); } // Audio node lua bindings static int create_audio_node(lua_State *L) { am_new_userdata(L, am_audio_node); return 1; } static int get_root_audio_node(lua_State *L) { lua_rawgeti(L, LUA_REGISTRYINDEX, AM_ROOT_AUDIO_NODE); return 1; } static void get_finished(lua_State *L, void *obj) { am_audio_node *node = (am_audio_node*)obj; lua_pushboolean(L, node->finished()); } static am_property finished_property = {get_finished, NULL}; static void get_num_children(lua_State *L, void *obj) { am_audio_node *node = (am_audio_node*)obj; lua_pushinteger(L, node->pending_children.size); } static am_property num_children_property = {get_num_children, NULL}; static void get_paused(lua_State *L, void *obj) { am_audio_node *node = (am_audio_node*)obj; lua_pushboolean(L, pending_pause(node)); } static void set_paused(lua_State *L, void *obj) { am_audio_node *node = (am_audio_node*)obj; if (lua_toboolean(L, 3)) { set_pending_pause(node); } else { clear_pending_pause(node); } } static am_property paused_property = {get_paused, set_paused}; static void register_audio_node_mt(lua_State *L) { lua_newtable(L); lua_pushcclosure(L, am_audio_node_index, 0); lua_setfield(L, -2, "__index"); am_set_default_newindex_func(L); lua_pushcclosure(L, child_pairs, 0); lua_setfield(L, -2, "children"); lua_pushcclosure(L, get_child, 0); lua_setfield(L, -2, "child"); lua_pushcclosure(L, alias, 0); lua_setfield(L, -2, "alias"); lua_pushcclosure(L, add_child, 0); lua_setfield(L, -2, "add"); lua_pushcclosure(L, remove_child, 0); lua_setfield(L, -2, "remove"); lua_pushcclosure(L, remove_all_children, 0); lua_setfield(L, -2, "remove_all"); lua_pushcclosure(L, create_gain_node, 0); lua_setfield(L, -2, "gain"); lua_pushcclosure(L, create_lowpass_filter_node, 0); lua_setfield(L, -2, "lowpass_filter"); lua_pushcclosure(L, create_highpass_filter_node, 0); lua_setfield(L, -2, "highpass_filter"); lua_pushcclosure(L, create_spectrum_node, 0); lua_setfield(L, -2, "spectrum"); am_register_property(L, "finished", &finished_property); am_register_property(L, "num_children", &num_children_property); am_register_property(L, "paused", &paused_property); am_register_metatable(L, "audio_node", MT_am_audio_node, 0); } //------------------------------------------------------------------------- static int create_audio_buffer(lua_State *L) { am_check_nargs(L, 3); am_buffer *buf = am_check_buffer(L, 1); int channels = lua_tointeger(L, 2); int sample_rate = lua_tointeger(L, 3); luaL_argcheck(L, channels >= 1, 2, "channels must be a positive integer"); luaL_argcheck(L, buf->size / sizeof(float) / channels >= 1, 2, "not enough data for that many channels"); luaL_argcheck(L, (buf->size / sizeof(float)) % channels == 0, 2, "buffer has invalid size for that many channels"); luaL_argcheck(L, sample_rate >= 1, 3, "sample rate must be a positive integer"); am_audio_buffer *audio_buffer = am_new_userdata(L, am_audio_buffer); audio_buffer->buffer = buf; audio_buffer->buffer_ref = audio_buffer->ref(L, 1); audio_buffer->num_channels = channels; audio_buffer->sample_rate = sample_rate; return 1; } static void get_channels(lua_State *L, void *obj) { am_audio_buffer *buf = (am_audio_buffer*)obj; lua_pushinteger(L, buf->num_channels); } static am_property channels_property = {get_channels, NULL}; static void get_sample_rate(lua_State *L, void *obj) { am_audio_buffer *buf = (am_audio_buffer*)obj; lua_pushinteger(L, buf->sample_rate); } static am_property sample_rate_property = {get_sample_rate, NULL}; static void get_samples_per_channel(lua_State *L, void *obj) { am_audio_buffer *buf = (am_audio_buffer*)obj; lua_pushinteger(L, buf->buffer->size / sizeof(float) / buf->num_channels); } static am_property samples_per_channel_property = {get_samples_per_channel, NULL}; static void get_audio_buf_length(lua_State *L, void *obj) { am_audio_buffer *buf = (am_audio_buffer*)obj; double samples = (double)(buf->buffer->size / sizeof(float) / buf->num_channels); double len = samples / (double)buf->sample_rate; lua_pushnumber(L, len); } static am_property audio_buf_length_property = {get_audio_buf_length, NULL}; static void get_audio_buf_buffer(lua_State *L, void *obj) { am_audio_buffer *buf = (am_audio_buffer*)obj; buf->buffer->push(L); } static am_property audio_buf_buffer_property = {get_audio_buf_buffer, NULL}; static void register_audio_buffer_mt(lua_State *L) { lua_newtable(L); lua_pushcclosure(L, am_default_index_func, 0); lua_setfield(L, -2, "__index"); am_register_property(L, "channels", &channels_property); am_register_property(L, "sample_rate", &sample_rate_property); am_register_property(L, "samples_per_channel", &samples_per_channel_property); am_register_property(L, "length", &audio_buf_length_property); am_register_property(L, "buffer", &audio_buf_buffer_property); am_register_metatable(L, "audio_buffer", MT_am_audio_buffer, 0); } //------------------------------------------------------------------------- static int load_audio(lua_State *L) { char *errmsg; int len; const char *filename = luaL_checkstring(L, 1); void *data = am_read_resource(filename, &len, &errmsg); if (data == NULL) { free(errmsg); lua_pushnil(L); return 1; } int num_channels; int sample_rate; short *tmp_data; int num_samples = stb_vorbis_decode_memory((unsigned char*)data, len, &num_channels, &sample_rate, &tmp_data); free(data); if (num_samples <= 0) { return luaL_error(L, "error loading audio '%s'", filename); } am_buffer *dest_buf; float *dest_data; num_channels = am_min(num_channels, am_conf_audio_channels); int dest_samples; if (sample_rate != am_conf_audio_sample_rate) { // resample required am_log0("WARNING: resampling buffer '%s' from %dHz to %dHz", filename, sample_rate, am_conf_audio_sample_rate); double sample_rate_ratio = (double)sample_rate / (double)am_conf_audio_sample_rate; dest_samples = floor((double)num_samples / sample_rate_ratio); dest_buf = am_push_new_buffer_and_init(L, dest_samples * num_channels * 4); dest_data = (float*)dest_buf->data; for (int c = 0; c < num_channels; c++) { double pos = 0.0f; for (int write_index = 0; write_index < dest_samples; write_index++) { int read_index1 = (int)floor(pos); int read_index2 = read_index1 + 1; if (read_index2 >= num_samples) { break; } float interpolation_factor = (float)(pos - (float)read_index1); float sample1 = (float)tmp_data[read_index1 * num_channels + c] / (float)INT16_MAX; float sample2 = (float)tmp_data[read_index2 * num_channels + c] / (float)INT16_MAX; float interpolated_sample = (1.0f - interpolation_factor) * sample1 + interpolation_factor * sample2; dest_data[c * dest_samples + write_index] = interpolated_sample; pos += sample_rate_ratio; if (pos >= (double)num_samples) { break; } } } } else { // no resample required dest_buf = am_push_new_buffer_and_init(L, num_samples * num_channels * 4); dest_data = (float*)dest_buf->data; dest_samples = num_samples; for (int c = 0; c < num_channels; c++) { for (int s = 0; s < num_samples; s++) { dest_data[c * num_samples + s] = (float)tmp_data[s * num_channels + c] / (float)INT16_MAX; } } } free(tmp_data); am_audio_buffer *audio_buffer = am_new_userdata(L, am_audio_buffer); audio_buffer->num_channels = num_channels; audio_buffer->sample_rate = am_conf_audio_sample_rate; audio_buffer->buffer = dest_buf; audio_buffer->buffer_ref = audio_buffer->ref(L, -2); lua_remove(L, -2); // remove dest buf return 1; } //------------------------------------------------------------------------- void am_destroy_audio() { audio_context.root = NULL; clear_buffer_pool(); } static void update_live_pause_state(am_audio_node *node) { switch (live_pause_state(node)) { case LIVE_PAUSE_STATE_BEGIN: set_live_pause_state(node, LIVE_PAUSE_STATE_PAUSED); break; case LIVE_PAUSE_STATE_END: set_live_pause_state(node, LIVE_PAUSE_STATE_UNPAUSED); break; } } static void do_post_render(am_audio_context *context, int num_samples, am_audio_node *node) { if (node->last_render >= context->render_id) return; // already processed node->last_render = context->render_id; update_live_pause_state(node); node->post_render(context, num_samples); for (int i = 0; i < node->live_children.size; i++) { am_audio_node_child *child = &node->live_children.arr[i]; if (child->state == AM_AUDIO_NODE_CHILD_STATE_REMOVED) { child->state = AM_AUDIO_NODE_CHILD_STATE_DONE; } else if (child->state == AM_AUDIO_NODE_CHILD_STATE_NEW) { child->state = AM_AUDIO_NODE_CHILD_STATE_OLD; } do_post_render(context, num_samples, child->child); } } void am_fill_audio_bus(am_audio_bus *bus) { if (audio_context.root == NULL) return; double t0 = 0.0; if (am_record_perf_timings) { t0 = am_get_current_time(); } if (!am_conf_audio_mute) { #if AM_STEAMWORKS // mute audio if steam overlay shown if (!am_steam_overlay_enabled) { #endif audio_context.root->render_audio(&audio_context, bus); #if AM_STEAMWORKS } #endif } audio_context.render_id++; do_post_render(&audio_context, bus->num_samples, audio_context.root); if (am_record_perf_timings) { audio_time_accum += am_get_current_time() - t0; } } static void sync_children_list(lua_State *L, am_audio_node *node) { int p = 0; int l = 0; am_lua_array<am_audio_node_child> *parr = &node->pending_children; am_lua_array<am_audio_node_child> *larr = &node->live_children; // remove live children that need to be removed for (l = larr->size-1; l >= 0; l--) { if (larr->arr[l].state == AM_AUDIO_NODE_CHILD_STATE_DONE) { node->unref(L, larr->arr[l].ref); larr->remove(l); } } if (children_dirty(node)) { // insert NEW children and mark REMOVED children l = 0; p = 0; while (p < parr->size && l < larr->size) { while (p < parr->size && parr->arr[p].child < larr->arr[l].child) { parr->arr[p].state = AM_AUDIO_NODE_CHILD_STATE_NEW; larr->insert(L, l, parr->arr[p]); larr->arr[l].child->push(L); larr->arr[l].ref = node->ref(L, -1); lua_pop(L, 1); p++; l++; } while (p < parr->size && l < larr->size && parr->arr[p].child == larr->arr[l].child) { p++; l++; } while (p < parr->size && l < larr->size && parr->arr[p].child > larr->arr[l].child) { if (larr->arr[l].state != AM_AUDIO_NODE_CHILD_STATE_DONE) { larr->arr[l].state = AM_AUDIO_NODE_CHILD_STATE_REMOVED; } l++; } } while (p < parr->size) { parr->arr[p].state = AM_AUDIO_NODE_CHILD_STATE_NEW; larr->insert(L, l, parr->arr[p]); larr->arr[l].child->push(L); larr->arr[l].ref = node->ref(L, -1); lua_pop(L, 1); p++; l++; } while (l < larr->size) { if (larr->arr[l].state != AM_AUDIO_NODE_CHILD_STATE_DONE) { larr->arr[l].state = AM_AUDIO_NODE_CHILD_STATE_REMOVED; } l++; } clear_children_dirty(node); } } static void sync_paused(am_audio_node *node) { int live_state = live_pause_state(node); if (pending_pause(node) && live_state != LIVE_PAUSE_STATE_PAUSED) { set_live_pause_state(node, LIVE_PAUSE_STATE_BEGIN); } else if (!pending_pause(node) && live_state != LIVE_PAUSE_STATE_UNPAUSED) { set_live_pause_state(node, LIVE_PAUSE_STATE_END); } } static void sync_audio_graph(lua_State *L, am_audio_context *context, am_audio_node *node) { if (node->last_sync >= context->sync_id) return; // already synced node->last_sync = context->sync_id; node->sync_params(); sync_children_list(L, node); sync_paused(node); for (int i = 0; i < node->live_children.size; i++) { sync_audio_graph(L, context, node->live_children.arr[i].child); } am_last_frame_audio_time = audio_time_accum; audio_time_accum = 0.0; } void am_sync_audio_graph(lua_State *L) { if (audio_context.root == NULL) return; audio_context.sync_id++; sync_audio_graph(L, &audio_context, audio_context.root); } //------------------------------------------------------------------------- // Backend utility functions void am_interleave_audio(float* AM_RESTRICT dest, float* AM_RESTRICT src, int num_channels, int num_samples, int sample_offset, int count) { int i, j; int k = sample_offset + count; assert(k <= num_samples); for (int c = 0; c < num_channels; c++) { i = k - count; j = c; while (i != k) { dest[j] = src[i]; i++; j += num_channels; } k += num_samples; } } void am_interleave_audio16(int16_t* AM_RESTRICT dest, float* AM_RESTRICT src, int num_channels, int num_samples, int sample_offset, int count) { int i, j; int k = sample_offset + count; assert(k <= num_samples); for (int c = 0; c < num_channels; c++) { i = k - count; j = c; while (i != k) { dest[j] = (int16_t)(src[i] * 32767.0); i++; j += num_channels; } k += num_samples; } } void am_uninterleave_audio(float* AM_RESTRICT dest, float* AM_RESTRICT src, int num_channels, int num_samples) { int i = 0; int j; for (int c = 0; c < num_channels; c++) { j = c; for (int k = 0; k < num_samples; k++) { dest[i] = src[j]; i++; j += num_channels; } } } //------------------------------------------------------------------------- // Module registration void am_open_audio_module(lua_State *L) { luaL_Reg funcs[] = { {"audio_buffer", create_audio_buffer}, {"audio_node", create_audio_node}, {"oscillator", create_oscillator_node}, {"capture_audio", create_capture_node}, {"track", create_audio_track_node}, {"stream", create_audio_stream_node}, {"load_audio", load_audio}, {"root_audio_node", get_root_audio_node}, {NULL, NULL} }; am_open_module(L, AMULET_LUA_MODULE_NAME, funcs); register_audio_buffer_mt(L); register_audio_node_mt(L); register_gain_node_mt(L); register_lowpass_filter_node_mt(L); register_highpass_filter_node_mt(L); register_audio_track_node_mt(L); register_audio_stream_node_mt(L); register_oscillator_node_mt(L); register_capture_node_mt(L); register_spectrum_node_mt(L); audio_context.sample_rate = am_conf_audio_sample_rate; audio_context.sync_id = 0; audio_context.render_id = 0; // Create root audio node create_audio_node(L); audio_context.root = am_get_userdata(L, am_audio_node, -1); lua_rawseti(L, LUA_REGISTRYINDEX, AM_ROOT_AUDIO_NODE); }
28,070
492
#ifndef __COMPRESSION_TYPES_H__ #define __COMPRESSION_TYPES_H__ #include "BaseTypes.h" /*++ Routine Description: The compression routine. Arguments: SrcBuffer - The buffer storing the source data SrcSize - The size of source data DstBuffer - The buffer to store the compressed data DstSize - On input, the size of DstBuffer; On output, the size of the actual compressed data. Returns: EFI_BUFFER_TOO_SMALL - The DstBuffer is too small. In this case, DstSize contains the size needed. EFI_SUCCESS - Compression is successful. EFI_OUT_OF_RESOURCES - No resource to complete function. EFI_INVALID_PARAMETER - Parameter supplied is wrong. --*/ typedef EFI_STATUS (*COMPRESS_FUNCTION) ( IN char *SrcBuffer, IN size_t SrcSize, IN char *DstBuffer, IN OUT size_t *DstSize ); typedef EFI_STATUS (*GETINFO_FUNCTION) ( IN VOID *Source, IN size_t SrcSize, OUT size_t *DstSize, OUT size_t *ScratchSize ); typedef EFI_STATUS (*DECOMPRESS_FUNCTION) ( IN VOID *Source, IN size_t SrcSize, IN OUT VOID *Destination, IN size_t DstSize, IN OUT VOID *Scratch, IN size_t ScratchSize ); #endif
570
444
<reponame>code-review-doctor/orchestra<gh_stars>100-1000 from django.contrib.postgres.fields import JSONField from django.core.exceptions import ValidationError from jsonschema import Draft4Validator from jsonschema.exceptions import ValidationError as SchemaValidationError from django.utils.translation import gettext_lazy from orchestra.utils.json_schema import DefaultValidatingDraft4Validator class JSONField(JSONField): default_error_messages = { 'schema_invalid': gettext_lazy( 'Value failed schema validation: %(err)s'), } def __init__(self, schema=None, add_defaults=False, *args, **kwargs): self.schema = schema self.add_defaults = add_defaults if self.schema and not isinstance(self.schema, dict): raise ValueError('Schema must be a dict object.') super().__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() # Only include kwarg if it's not the default if self.schema: kwargs['schema'] = self.schema if self.add_defaults: kwargs['add_defaults'] = self.add_defaults return name, path, args, kwargs def get_prep_value(self, value): self._validate_with_schema(value) return super().get_prep_value(value) def _validate_with_schema(self, value): if value and self.schema: try: self._get_validator_class()(self.schema).validate(value) except SchemaValidationError as err: raise ValidationError( self.error_messages['schema_invalid'], code='schema_invalid', params={'err': err}, ) def _get_validator_class(self): if self.add_defaults: return DefaultValidatingDraft4Validator else: return Draft4Validator
834
14,668
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CONTENT_BROWSER_CACHE_STORAGE_SCOPED_WRITABLE_ENTRY_H_ #define CONTENT_BROWSER_CACHE_STORAGE_SCOPED_WRITABLE_ENTRY_H_ #include <memory> #include "net/disk_cache/disk_cache.h" namespace content { // A custom deleter that closes the entry. But if WritingCompleted() hasn't been // called, it will doom the entry before closing it. class ScopedWritableDeleter { public: ScopedWritableDeleter() = default; ScopedWritableDeleter(ScopedWritableDeleter&& other) = default; ScopedWritableDeleter& operator=(ScopedWritableDeleter&& other) = default; void operator()(disk_cache::Entry* entry) { if (!completed_) entry->Doom(); // |entry| is owned by the backend, we just need to close it as it's // ref-counted. entry->Close(); } void WritingCompleted() { completed_ = true; } private: bool completed_ = false; }; // Use this to manage disk_cache::Entry*'s that should be doomed before closing // unless told otherwise (via calling WritingCompleted on the deleter). // // Example: // ScopedWritableEntry entry(my_entry); // .. write some stuff .. // entry.get_deleter().WritingCompleted(); typedef std::unique_ptr<disk_cache::Entry, ScopedWritableDeleter> ScopedWritableEntry; } // namespace content #endif // CONTENT_BROWSER_CACHE_STORAGE_SCOPED_WRITABLE_ENTRY_H_
499
557
<reponame>cockcrow/python-mammoth from nose.tools import istest, assert_equal from mammoth import html @istest def collapsing_does_nothing_to_single_text_node(): assert_equal( html.collapse([html.text("Bluebells")]), [html.text("Bluebells")]) @istest def consecutive_fresh_elements_are_not_collapsed(): assert_equal( html.collapse([html.element("p"), html.element("p")]), [html.element("p"), html.element("p")]) @istest def consecutive_collapsible_elements_are_collapsed_if_they_have_the_same_tag_and_attributes(): assert_equal( [html.collapsible_element("p", {}, [html.text("One"), html.text("Two")])], html.collapse([ html.collapsible_element("p", {}, [html.text("One")]), html.collapsible_element("p", {}, [html.text("Two")]) ])) @istest def elements_with_different_tag_names_are_not_collapsed(): assert_equal( [ html.collapsible_element("p", {}, [html.text("One")]), html.collapsible_element("div", {}, [html.text("Two")]) ], html.collapse([ html.collapsible_element("p", {}, [html.text("One")]), html.collapsible_element("div", {}, [html.text("Two")]) ])) @istest def elements_with_different_attributes_are_not_collapsed(): assert_equal( [ html.collapsible_element("p", {"id": "a"}, [html.text("One")]), html.collapsible_element("p", {}, [html.text("Two")]) ], html.collapse([ html.collapsible_element("p", {"id": "a"}, [html.text("One")]), html.collapsible_element("p", {}, [html.text("Two")]) ])) @istest def children_of_collapsed_element_can_collapse_with_children_of_previous_element(): assert_equal( [ html.collapsible_element("blockquote", {}, [ html.collapsible_element("p", {}, [ html.text("One"), html.text("Two") ]) ]), ], html.collapse([ html.collapsible_element("blockquote", {}, [ html.collapsible_element("p", {}, [html.text("One")]) ]), html.collapsible_element("blockquote", {}, [ html.collapsible_element("p", {}, [html.text("Two")]) ]), ])) @istest def collapsible_element_can_collapse_into_previous_fresh_element(): assert_equal( [html.element("p", {}, [html.text("One"), html.text("Two")])], html.collapse([ html.element("p", {}, [html.text("One")]), html.collapsible_element("p", {}, [html.text("Two")]) ])) @istest def element_with_choice_of_tag_names_can_collapse_into_previous_element_if_it_has_one_of_those_tag_names_as_its_main_tag_name(): assert_equal( [html.collapsible_element(["ol"])], html.collapse([ html.collapsible_element("ol"), html.collapsible_element(["ul", "ol"]) ])) assert_equal( [ html.collapsible_element(["ul", "ol"]), html.collapsible_element("ol") ], html.collapse([ html.collapsible_element(["ul", "ol"]), html.collapsible_element("ol") ])) @istest def when_separator_is_present_then_separator_is_prepended_to_collapsed_element(): assert_equal( [ html.element("pre", collapsible=False, children=[ html.text("Hello"), html.text("\n"), html.text(" the"), html.text("re") ]) ], html.collapse([ html.element("pre", collapsible=False, children=[html.text("Hello")]), html.element("pre", collapsible=True, separator="\n", children=[html.text(" the"), html.text("re")]), ]), )
1,917
1,408
package com.pancm.client; import java.util.Date; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import com.pancm.protobuf.UserInfo; import com.pancm.protobuf.UserInfo.UserMsg; import io.netty.bootstrap.Bootstrap; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; import io.netty.channel.EventLoop; import io.netty.handler.timeout.IdleState; import io.netty.handler.timeout.IdleStateEvent; import io.netty.util.ReferenceCountUtil; /** * * @Title: NettyClientHandler * @Description: 客户端业务逻辑实现 * @Version:1.0.0 * @author pancm * @date 2017年10月8日 */ @Service("nettyClientHandler") @ChannelHandler.Sharable public class NettyClientHandler extends ChannelInboundHandlerAdapter { @Autowired private NettyClient nettyClient; /** 循环次数 */ private int fcount = 1; /** * 建立连接时 */ @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { System.out.println("建立连接时:" + new Date()); ctx.fireChannelActive(); } /** * 关闭连接时 */ @Override public void channelInactive(ChannelHandlerContext ctx) throws Exception { System.out.println("关闭连接时:" + new Date()); final EventLoop eventLoop = ctx.channel().eventLoop(); nettyClient.doConnect(new Bootstrap(), eventLoop); super.channelInactive(ctx); } /** * 心跳请求处理 每4秒发送一次心跳请求; * */ @Override public void userEventTriggered(ChannelHandlerContext ctx, Object obj) throws Exception { System.out.println("循环请求的时间:" + new Date() + ",次数" + fcount); if (obj instanceof IdleStateEvent) { IdleStateEvent event = (IdleStateEvent) obj; if (IdleState.WRITER_IDLE.equals(event.state())) { // 如果写通道处于空闲状态,就发送心跳命令 UserMsg.Builder userState = UserMsg.newBuilder().setState(2); ctx.channel().writeAndFlush(userState); fcount++; } } } /** * 业务逻辑处理 */ @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { // 如果不是protobuf类型的数据 if (!(msg instanceof UserMsg)) { System.out.println("未知数据!" + msg); return; } try { // 得到protobuf的数据 UserInfo.UserMsg userMsg = (UserInfo.UserMsg) msg; // 进行相应的业务处理。。。 // 这里就从简了,只是打印而已 System.out.println( "客户端接受到的用户信息。编号:" + userMsg.getId() + ",姓名:" + userMsg.getName() + ",年龄:" + userMsg.getAge()); // 这里返回一个已经接受到数据的状态 UserMsg.Builder userState = UserMsg.newBuilder().setState(1); ctx.writeAndFlush(userState); System.out.println("成功发送给服务端!"); } catch (Exception e) { e.printStackTrace(); } finally { ReferenceCountUtil.release(msg); } } }
1,296
2,114
<gh_stars>1000+ package io.searchbox.cluster; import io.searchbox.action.AbstractAction; import io.searchbox.action.GenericResultAbstractAction; import io.searchbox.client.config.ElasticsearchVersion; /** * Allows to update cluster wide specific settings. Settings updated can either be persistent (applied cross restarts) * or transient (will not survive a full cluster restart). The cluster responds with the settings updated. * <br/> * <br/> * There is a specific list of settings that can be updated, please see * <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html#cluster-settings">Elasticsearch docs</a> * for more information. * * @author <NAME> */ public class UpdateSettings extends GenericResultAbstractAction { protected UpdateSettings(Builder builder) { super(builder); this.payload = builder.source; } protected String buildURI(ElasticsearchVersion elasticsearchVersion) { return super.buildURI(elasticsearchVersion) + "/_cluster/settings"; } @Override public String getRestMethodName() { return "PUT"; } public static class Builder extends AbstractAction.Builder<UpdateSettings, Builder> { private final Object source; /** * There is a specific list of settings that can be updated, please see * <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html#cluster-settings">Elasticsearch docs</a> * for more information. */ public Builder(Object source) { this.source = source; } @Override public UpdateSettings build() { return new UpdateSettings(this); } } }
589
334
# Copyright 2018 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import logging import joblib import pandas as pd from sklearn.metrics import mean_absolute_error from sklearn.model_selection import train_test_split try: from sklearn.preprocessing import Imputer except ImportError: from sklearn.impute import SimpleImputer as Imputer from xgboost import XGBRegressor def read_input(file_name, test_size=0.25): """Read input data and split it into train and test.""" data = pd.read_csv(file_name[0]) data.dropna(axis=0, subset=['SalePrice'], inplace=True) y = data.SalePrice X = data.drop(['SalePrice'], axis=1).select_dtypes(exclude=['object']) train_X, test_X, train_y, test_y = train_test_split(X.values, y.values, test_size=test_size, shuffle=False) imputer = Imputer() train_X = imputer.fit_transform(train_X) test_X = imputer.transform(test_X) return (train_X, train_y), (test_X, test_y) def train_model(train_X, train_y, test_X, test_y, n_estimators, learning_rate): """Train the model using XGBRegressor.""" model = XGBRegressor(n_estimators=n_estimators, learning_rate=learning_rate) model.fit(train_X, train_y, early_stopping_rounds=40, eval_set=[(test_X, test_y)]) logging.info("Best RMSE on eval: %.2f with %d rounds", model.best_score, model.best_iteration+1) return model def eval_model(model, test_X, test_y): """Evaluate the model performance.""" predictions = model.predict(test_X) logging.info("mean_absolute_error=%.2f", mean_absolute_error(predictions, test_y)) def save_model(model, model_file): """Save XGBoost model for serving.""" joblib.dump(model, model_file) logging.info("Model export success: %s", model_file) def main(args): (train_X, train_y), (test_X, test_y) = read_input(args.train_input) model = train_model(train_X, train_y, test_X, test_y, args.n_estimators, args.learning_rate) eval_model(model, test_X, test_y) save_model(model, args.model_file) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument( '--train-input', help="Input training file", nargs='+', required=True ) parser.add_argument( '--n-estimators', help='Number of trees in the model', type=int, default=1000 ) parser.add_argument( '--learning-rate', help='Learning rate for the model', default=0.1 ) parser.add_argument( '--model-file', help='Model file location for XGBoost', required=True ) parser.add_argument( '--test-size', help='Fraction of training data to be reserved for test', default=0.25 ) parser.add_argument( '--early-stopping-rounds', help='XGBoost argument for stopping early', default=50 ) logging.basicConfig(format='%(message)s') logging.getLogger().setLevel(logging.INFO) main_args = parser.parse_args() main(main_args)
1,852
8,747
/* SPI Slave Halfduplex example This example code is in the Public Domain (or CC0 licensed, at your option.) Unless required by applicable law or agreed to in writing, this software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ #include <string.h> #include "esp_log.h" #include "esp_err.h" #include "freertos/FreeRTOS.h" #include "freertos/task.h" #include "driver/spi_master.h" #include "esp_serial_slave_link/essl_spi.h" //Pin setting #if !CONFIG_IDF_TARGET_ESP32C3 #define GPIO_MOSI 11 #define GPIO_MISO 13 #define GPIO_SCLK 12 #define GPIO_CS 10 #else #define GPIO_MOSI 7 #define GPIO_MISO 2 #define GPIO_SCLK 6 #define GPIO_CS 10 #endif #define MASTER_HOST SPI2_HOST #define DMA_CHAN SPI_DMA_CH_AUTO #define TX_SIZE_MIN 40 /** * Helper Macros for Master-Slave synchronization, each setting is 4-byte-width * * The address and value should be negotiated with Master beforehand */ //----------------------General Settings---------------------// //Indicate Slave General Settings are ready #define SLAVE_READY_FLAG_REG 0 #define SLAVE_READY_FLAG 0xEE //Value in these 4 registers (Byte 4, 5, 6, 7) indicates the MAX Slave TX buffer length #define SLAVE_MAX_TX_BUF_LEN_REG 4 //Value in these 4 registers indicates the MAX Slave RX buffer length #define SLAVE_MAX_RX_BUF_LEN_REG 8 //----------------------Updating Info------------------------// //Value in these 4 registers indicates size of the TX buffer that Slave has loaded to the DMA #define SLAVE_TX_READY_BUF_SIZE_REG 12 //Value in these 4 registers indicates number of the RX buffer that Slave has loaded to the DMA #define SLAVE_RX_READY_BUF_NUM_REG 16 static const char TAG[] = "SEG_MASTER"; static void get_spi_bus_default_config(spi_bus_config_t *bus_cfg) { memset(bus_cfg, 0x0, sizeof(spi_bus_config_t)); bus_cfg->mosi_io_num = GPIO_MOSI; bus_cfg->miso_io_num = GPIO_MISO; bus_cfg->sclk_io_num = GPIO_SCLK; bus_cfg->quadwp_io_num = -1; bus_cfg->quadhd_io_num = -1; bus_cfg->max_transfer_sz = 14000; bus_cfg->flags = 0; bus_cfg->intr_flags = 0; } static void get_spi_device_default_config(spi_device_interface_config_t *dev_cfg) { memset(dev_cfg, 0x0, sizeof(spi_device_interface_config_t)); dev_cfg->clock_speed_hz = 10*1000*1000; dev_cfg->mode = 0; dev_cfg->spics_io_num = GPIO_CS; dev_cfg->cs_ena_pretrans = 0; dev_cfg->cs_ena_posttrans = 0; dev_cfg->command_bits = 8; dev_cfg->address_bits = 8; dev_cfg->dummy_bits = 8; dev_cfg->queue_size = 16; dev_cfg->flags = SPI_DEVICE_HALFDUPLEX; dev_cfg->duty_cycle_pos = 0; dev_cfg->input_delay_ns = 0; dev_cfg->pre_cb = NULL; dev_cfg->post_cb = NULL; } static void init_master_hd(spi_device_handle_t* out_spi) { //init bus spi_bus_config_t bus_cfg = {}; get_spi_bus_default_config(&bus_cfg); ESP_ERROR_CHECK(spi_bus_initialize(MASTER_HOST, &bus_cfg, DMA_CHAN)); //add device spi_device_interface_config_t dev_cfg = {}; get_spi_device_default_config(&dev_cfg); ESP_ERROR_CHECK(spi_bus_add_device(MASTER_HOST, &dev_cfg, out_spi)); } //-------------------------------Function used for Master-Slave Synchronization---------------------------// //Wait for Slave to init the shared registers for its configurations, see the Helper Macros above static esp_err_t wait_for_slave_ready(spi_device_handle_t spi) { esp_err_t ret; uint32_t slave_ready_flag; while (1) { //Master sends CMD2 to get slave configuration //The first byte is a flag assigned by slave as a start signal, here it's 0xee ret = essl_spi_rdbuf(spi, (uint8_t *)&slave_ready_flag, SLAVE_READY_FLAG_REG, 4, 0); if (ret != ESP_OK) { return ret; } if (slave_ready_flag != SLAVE_READY_FLAG) { printf("Waiting for Slave to be ready...\n"); vTaskDelay(1000 / portTICK_PERIOD_MS); } else if (slave_ready_flag == SLAVE_READY_FLAG) { return ESP_OK; } } } //Get the MAX length of Slave's TX/RX buffer static esp_err_t get_slave_max_buf_size(spi_device_handle_t spi, uint32_t *out_send_size, uint32_t *out_recv_size) { esp_err_t ret; ret = essl_spi_rdbuf(spi, (uint8_t *)out_send_size, SLAVE_MAX_TX_BUF_LEN_REG, 4, 0); if (ret != ESP_OK) { return ret; } ret = essl_spi_rdbuf(spi, (uint8_t *)out_recv_size, SLAVE_MAX_RX_BUF_LEN_REG, 4, 0); if (ret != ESP_OK) { return ret; } return ret; } /** * To get the size of the ready Slave TX buffer * This size can be read from the pre-negotiated shared register (here `SLAVE_TX_READY_BUF_SIZE_REG`) */ static uint32_t get_slave_tx_buf_size(spi_device_handle_t spi) { uint32_t updated_size; uint32_t temp; ESP_ERROR_CHECK(essl_spi_rdbuf_polling(spi, (uint8_t *)&temp, SLAVE_TX_READY_BUF_SIZE_REG, 4, 0)); /** * Read until the last 2 reading result are same. Reason: * SPI transaction is carried on per 1 Byte. So when Master is reading the shared register, if the * value is changed by Slave at this time, Master may get wrong data. */ while (1) { ESP_ERROR_CHECK(essl_spi_rdbuf_polling(spi, (uint8_t *)&updated_size, SLAVE_TX_READY_BUF_SIZE_REG, 4, 0)); if (updated_size == temp) { return updated_size; } temp = updated_size; } } /** * To get the number of the ready Slave RX buffers * This number can be read from the pre-negotiated shared register (here `SLAVE_RX_READY_BUF_NUM_REG`) */ static uint32_t get_slave_rx_buf_num(spi_device_handle_t spi) { uint32_t updated_num; uint32_t temp; ESP_ERROR_CHECK(essl_spi_rdbuf_polling(spi, (uint8_t *)&temp, SLAVE_RX_READY_BUF_NUM_REG, 4, 0)); /** * Read until the last 2 reading result are same. Reason: * SPI transaction is carried on per 1 Byte. So when Master is reading the shared register, if the * value is changed by Slave at this time, Master may get wrong data. */ while (1) { ESP_ERROR_CHECK(essl_spi_rdbuf_polling(spi, (uint8_t *)&updated_num, SLAVE_RX_READY_BUF_NUM_REG, 4, 0)); if (updated_num == temp) { return updated_num; } temp = updated_num; } } void app_main(void) { spi_device_handle_t spi; init_master_hd(&spi); ESP_ERROR_CHECK(wait_for_slave_ready(spi)); /** * Here we let the Slave to claim its transaction size. You can modify it in your own way, * e.g. let the Senders to claim its MAX length, or let the Master/Slave determine the length themselves, without considering * throughputs in the opposite side. */ uint32_t slave_max_tx_buf_size; uint32_t slave_max_rx_buf_size; ESP_ERROR_CHECK(get_slave_max_buf_size(spi, &slave_max_tx_buf_size, &slave_max_rx_buf_size)); uint32_t rx_buf_size = slave_max_tx_buf_size; printf("\n\n---------SLAVE INFO---------\n\n"); printf("Slave MAX Send Buffer Size: %d\n", slave_max_tx_buf_size); printf("Slave MAX Receive Buffer Size: %d\n", slave_max_rx_buf_size); uint8_t *recv_buf = heap_caps_calloc(1, rx_buf_size, MALLOC_CAP_DMA); if (!recv_buf) { ESP_LOGE(TAG, "No enough memory!"); abort(); } uint8_t *send_buf = heap_caps_calloc(1, slave_max_rx_buf_size, MALLOC_CAP_DMA); if (!send_buf) { ESP_LOGE(TAG, "No enough memory!"); abort(); } uint32_t size_has_read = 0; //Counter of the size that Master has received. uint32_t size_to_read = 0; uint32_t num_has_sent = 0; //Counter of the buffer number that Master has sent. uint32_t tx_trans_id = 0; srand(30); while (1) { //RECV ESP_LOGI(TAG, "RECEIVING......"); /** * This (`size_has_read`) is the counter mentioned in examples/peripherals/spi_slave_hd/segment_mode/seg_slave/app_main.c. * See its Note for Function used for Master-Slave Synchronization. * * Condition when this counter overflows: * If the Slave increases its counter with the value smaller than 2^32, then the calculation is still safe. For example: * 1. Initially, Slave's counter is (2^32 - 1 - 10), Master's counter is (2^32 - 1 - 20). So the difference would be 10B initially. * 2. Slave loads 20 bytes to the DMA, and increase its counter. So the value would be ((2^32 - 1 - 10) + 20) = 9; * 3. The difference (`size_can_be_read`) would be (9 - (2^32 - 1 - 20)) = 30; * * If this is 0, it means Slave didn't load new TX buffer to the bus yet. */ uint32_t size_can_be_read = get_slave_tx_buf_size(spi) - size_has_read; if (size_can_be_read > rx_buf_size) { ESP_LOGW(TAG, "Slave is going to send buffer(%d Bytes) larger than pre-negotiated MAX size", size_can_be_read); /** * NOTE: * In this condition, Master should still increase its counter (``size_has_read``) by the size that Slave has loaded, * because Master RX buffer is not large enough, and it should read as large as it can, then send the CMD8. The extra * bits will be missed by Master. */ size_to_read = rx_buf_size; } else { size_to_read = size_can_be_read; } if (size_to_read) { ESP_ERROR_CHECK(essl_spi_rddma(spi, recv_buf, size_to_read, -1, 0)); size_has_read += size_can_be_read; //See NOTE above //Process the data. Here we just print it out. printf("%s\n", recv_buf); memset(recv_buf, 0x0, rx_buf_size); } //SEND ESP_LOGI(TAG, "SENDING......"); /** * Similar logic, see the comment for `size_can_be_read` above. * If this is 0, it means Slave didn't load RX buffer to the bus yet. */ uint32_t num_to_send = get_slave_rx_buf_num(spi) - num_has_sent; //Prepare your TX transaction in your own way. Here is an example. //You can set any size to send (shorter, longer or equal to the Slave Max RX buf size), Slave can get the actual length by ``trans_len`` member of ``spi_slave_hd_data_t`` uint32_t actual_tx_size = (rand() % (slave_max_rx_buf_size - TX_SIZE_MIN + 1)) + TX_SIZE_MIN; snprintf((char *)send_buf, slave_max_rx_buf_size, "this is master's transaction %d", tx_trans_id); for (int i = 0; i < num_to_send; i++) { ESP_ERROR_CHECK(essl_spi_wrdma(spi, send_buf, actual_tx_size, -1, 0)); num_has_sent++; tx_trans_id++; } } free(recv_buf); free(send_buf); spi_bus_remove_device(spi); spi_bus_free(MASTER_HOST); }
4,699
2,151
<reponame>zipated/src<filename>chromeos/components/tether/fake_ble_scanner.h // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROMEOS_COMPONENTS_TETHER_FAKE_BLE_SCANNER_H_ #define CHROMEOS_COMPONENTS_TETHER_FAKE_BLE_SCANNER_H_ #include <vector> #include "base/macros.h" #include "chromeos/components/tether/ble_scanner.h" #include "components/cryptauth/remote_device_ref.h" namespace device { class BluetoothDevice; } namespace chromeos { namespace tether { // Test double for BleScanner. class FakeBleScanner : public BleScanner { public: // If |automatically_update_discovery_session| is true, // IsDiscoverySessionActive() will simply return whether at least once device // is registered; otherwise, that value must be determined manually via // set_is_discovery_session_active(). FakeBleScanner(bool automatically_update_discovery_session); ~FakeBleScanner() override; const std::vector<std::string>& registered_device_ids() { return registered_device_ids_; } void set_should_fail_to_register(bool should_fail_to_register) { should_fail_to_register_ = should_fail_to_register; } void set_is_discovery_session_active(bool is_discovery_session_active) { is_discovery_session_active_ = is_discovery_session_active; } void NotifyReceivedAdvertisementFromDevice( cryptauth::RemoteDeviceRef remote_device, device::BluetoothDevice* bluetooth_device, bool is_background_advertisement); void NotifyDiscoverySessionStateChanged(bool discovery_session_active); // BleScanner: bool RegisterScanFilterForDevice(const std::string& device_id) override; bool UnregisterScanFilterForDevice(const std::string& device_id) override; bool ShouldDiscoverySessionBeActive() override; bool IsDiscoverySessionActive() override; private: const bool automatically_update_discovery_session_; bool should_fail_to_register_ = false; bool is_discovery_session_active_ = false; std::vector<std::string> registered_device_ids_; DISALLOW_COPY_AND_ASSIGN(FakeBleScanner); }; } // namespace tether } // namespace chromeos #endif // CHROMEOS_COMPONENTS_TETHER_FAKE_BLE_SCANNER_H_
736
45,293
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: compiler/ir/serialization.common/src/KotlinIr.proto package org.jetbrains.kotlin.backend.common.serialization.proto; /** * Protobuf type {@code org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias} */ public final class IrTypeAlias extends org.jetbrains.kotlin.protobuf.GeneratedMessageLite implements // @@protoc_insertion_point(message_implements:org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias) IrTypeAliasOrBuilder { // Use IrTypeAlias.newBuilder() to construct. private IrTypeAlias(org.jetbrains.kotlin.protobuf.GeneratedMessageLite.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private IrTypeAlias(boolean noInit) { this.unknownFields = org.jetbrains.kotlin.protobuf.ByteString.EMPTY;} private static final IrTypeAlias defaultInstance; public static IrTypeAlias getDefaultInstance() { return defaultInstance; } public IrTypeAlias getDefaultInstanceForType() { return defaultInstance; } private final org.jetbrains.kotlin.protobuf.ByteString unknownFields; private IrTypeAlias( org.jetbrains.kotlin.protobuf.CodedInputStream input, org.jetbrains.kotlin.protobuf.ExtensionRegistryLite extensionRegistry) throws org.jetbrains.kotlin.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; org.jetbrains.kotlin.protobuf.ByteString.Output unknownFieldsOutput = org.jetbrains.kotlin.protobuf.ByteString.newOutput(); org.jetbrains.kotlin.protobuf.CodedOutputStream unknownFieldsCodedOutput = org.jetbrains.kotlin.protobuf.CodedOutputStream.newInstance( unknownFieldsOutput, 1); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFieldsCodedOutput, extensionRegistry, tag)) { done = true; } break; } case 10: { org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = base_.toBuilder(); } base_ = input.readMessage(org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(base_); base_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 16: { bitField0_ |= 0x00000002; nameType_ = input.readInt64(); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { typeParameter_ = new java.util.ArrayList<org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter>(); mutable_bitField0_ |= 0x00000004; } typeParameter_.add(input.readMessage(org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter.PARSER, extensionRegistry)); break; } } } } catch (org.jetbrains.kotlin.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.jetbrains.kotlin.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { typeParameter_ = java.util.Collections.unmodifiableList(typeParameter_); } try { unknownFieldsCodedOutput.flush(); } catch (java.io.IOException e) { // Should not happen } finally { unknownFields = unknownFieldsOutput.toByteString(); } makeExtensionsImmutable(); } } public static org.jetbrains.kotlin.protobuf.Parser<IrTypeAlias> PARSER = new org.jetbrains.kotlin.protobuf.AbstractParser<IrTypeAlias>() { public IrTypeAlias parsePartialFrom( org.jetbrains.kotlin.protobuf.CodedInputStream input, org.jetbrains.kotlin.protobuf.ExtensionRegistryLite extensionRegistry) throws org.jetbrains.kotlin.protobuf.InvalidProtocolBufferException { return new IrTypeAlias(input, extensionRegistry); } }; @java.lang.Override public org.jetbrains.kotlin.protobuf.Parser<IrTypeAlias> getParserForType() { return PARSER; } private int bitField0_; public static final int BASE_FIELD_NUMBER = 1; private org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase base_; /** * <code>required .org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase base = 1;</code> */ public boolean hasBase() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase base = 1;</code> */ public org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase getBase() { return base_; } public static final int NAME_TYPE_FIELD_NUMBER = 2; private long nameType_; /** * <code>required int64 name_type = 2;</code> */ public boolean hasNameType() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required int64 name_type = 2;</code> */ public long getNameType() { return nameType_; } public static final int TYPE_PARAMETER_FIELD_NUMBER = 3; private java.util.List<org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter> typeParameter_; /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public java.util.List<org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter> getTypeParameterList() { return typeParameter_; } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public java.util.List<? extends org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameterOrBuilder> getTypeParameterOrBuilderList() { return typeParameter_; } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public int getTypeParameterCount() { return typeParameter_.size(); } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter getTypeParameter(int index) { return typeParameter_.get(index); } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameterOrBuilder getTypeParameterOrBuilder( int index) { return typeParameter_.get(index); } private void initFields() { base_ = org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase.getDefaultInstance(); nameType_ = 0L; typeParameter_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasBase()) { memoizedIsInitialized = 0; return false; } if (!hasNameType()) { memoizedIsInitialized = 0; return false; } if (!getBase().isInitialized()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getTypeParameterCount(); i++) { if (!getTypeParameter(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.jetbrains.kotlin.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, base_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt64(2, nameType_); } for (int i = 0; i < typeParameter_.size(); i++) { output.writeMessage(3, typeParameter_.get(i)); } output.writeRawBytes(unknownFields); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.jetbrains.kotlin.protobuf.CodedOutputStream .computeMessageSize(1, base_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.jetbrains.kotlin.protobuf.CodedOutputStream .computeInt64Size(2, nameType_); } for (int i = 0; i < typeParameter_.size(); i++) { size += org.jetbrains.kotlin.protobuf.CodedOutputStream .computeMessageSize(3, typeParameter_.get(i)); } size += unknownFields.size(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias parseFrom( org.jetbrains.kotlin.protobuf.ByteString data) throws org.jetbrains.kotlin.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias parseFrom( org.jetbrains.kotlin.protobuf.ByteString data, org.jetbrains.kotlin.protobuf.ExtensionRegistryLite extensionRegistry) throws org.jetbrains.kotlin.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias parseFrom(byte[] data) throws org.jetbrains.kotlin.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias parseFrom( byte[] data, org.jetbrains.kotlin.protobuf.ExtensionRegistryLite extensionRegistry) throws org.jetbrains.kotlin.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias parseFrom( java.io.InputStream input, org.jetbrains.kotlin.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias parseDelimitedFrom( java.io.InputStream input, org.jetbrains.kotlin.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias parseFrom( org.jetbrains.kotlin.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias parseFrom( org.jetbrains.kotlin.protobuf.CodedInputStream input, org.jetbrains.kotlin.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } /** * Protobuf type {@code org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias} */ public static final class Builder extends org.jetbrains.kotlin.protobuf.GeneratedMessageLite.Builder< org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias, Builder> implements // @@protoc_insertion_point(builder_implements:org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias) org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAliasOrBuilder { // Construct using org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); base_ = org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase.getDefaultInstance(); bitField0_ = (bitField0_ & ~0x00000001); nameType_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); typeParameter_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias getDefaultInstanceForType() { return org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias.getDefaultInstance(); } public org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias build() { org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias buildPartial() { org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias result = new org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.base_ = base_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.nameType_ = nameType_; if (((bitField0_ & 0x00000004) == 0x00000004)) { typeParameter_ = java.util.Collections.unmodifiableList(typeParameter_); bitField0_ = (bitField0_ & ~0x00000004); } result.typeParameter_ = typeParameter_; result.bitField0_ = to_bitField0_; return result; } public Builder mergeFrom(org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias other) { if (other == org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias.getDefaultInstance()) return this; if (other.hasBase()) { mergeBase(other.getBase()); } if (other.hasNameType()) { setNameType(other.getNameType()); } if (!other.typeParameter_.isEmpty()) { if (typeParameter_.isEmpty()) { typeParameter_ = other.typeParameter_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureTypeParameterIsMutable(); typeParameter_.addAll(other.typeParameter_); } } setUnknownFields( getUnknownFields().concat(other.unknownFields)); return this; } public final boolean isInitialized() { if (!hasBase()) { return false; } if (!hasNameType()) { return false; } if (!getBase().isInitialized()) { return false; } for (int i = 0; i < getTypeParameterCount(); i++) { if (!getTypeParameter(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.jetbrains.kotlin.protobuf.CodedInputStream input, org.jetbrains.kotlin.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.jetbrains.kotlin.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase base_ = org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase.getDefaultInstance(); /** * <code>required .org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase base = 1;</code> */ public boolean hasBase() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase base = 1;</code> */ public org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase getBase() { return base_; } /** * <code>required .org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase base = 1;</code> */ public Builder setBase(org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase value) { if (value == null) { throw new NullPointerException(); } base_ = value; bitField0_ |= 0x00000001; return this; } /** * <code>required .org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase base = 1;</code> */ public Builder setBase( org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase.Builder builderForValue) { base_ = builderForValue.build(); bitField0_ |= 0x00000001; return this; } /** * <code>required .org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase base = 1;</code> */ public Builder mergeBase(org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase value) { if (((bitField0_ & 0x00000001) == 0x00000001) && base_ != org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase.getDefaultInstance()) { base_ = org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase.newBuilder(base_).mergeFrom(value).buildPartial(); } else { base_ = value; } bitField0_ |= 0x00000001; return this; } /** * <code>required .org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase base = 1;</code> */ public Builder clearBase() { base_ = org.jetbrains.kotlin.backend.common.serialization.proto.IrDeclarationBase.getDefaultInstance(); bitField0_ = (bitField0_ & ~0x00000001); return this; } private long nameType_ ; /** * <code>required int64 name_type = 2;</code> */ public boolean hasNameType() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required int64 name_type = 2;</code> */ public long getNameType() { return nameType_; } /** * <code>required int64 name_type = 2;</code> */ public Builder setNameType(long value) { bitField0_ |= 0x00000002; nameType_ = value; return this; } /** * <code>required int64 name_type = 2;</code> */ public Builder clearNameType() { bitField0_ = (bitField0_ & ~0x00000002); nameType_ = 0L; return this; } private java.util.List<org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter> typeParameter_ = java.util.Collections.emptyList(); private void ensureTypeParameterIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { typeParameter_ = new java.util.ArrayList<org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter>(typeParameter_); bitField0_ |= 0x00000004; } } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public java.util.List<org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter> getTypeParameterList() { return java.util.Collections.unmodifiableList(typeParameter_); } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public int getTypeParameterCount() { return typeParameter_.size(); } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter getTypeParameter(int index) { return typeParameter_.get(index); } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public Builder setTypeParameter( int index, org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter value) { if (value == null) { throw new NullPointerException(); } ensureTypeParameterIsMutable(); typeParameter_.set(index, value); return this; } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public Builder setTypeParameter( int index, org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter.Builder builderForValue) { ensureTypeParameterIsMutable(); typeParameter_.set(index, builderForValue.build()); return this; } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public Builder addTypeParameter(org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter value) { if (value == null) { throw new NullPointerException(); } ensureTypeParameterIsMutable(); typeParameter_.add(value); return this; } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public Builder addTypeParameter( int index, org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter value) { if (value == null) { throw new NullPointerException(); } ensureTypeParameterIsMutable(); typeParameter_.add(index, value); return this; } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public Builder addTypeParameter( org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter.Builder builderForValue) { ensureTypeParameterIsMutable(); typeParameter_.add(builderForValue.build()); return this; } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public Builder addTypeParameter( int index, org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter.Builder builderForValue) { ensureTypeParameterIsMutable(); typeParameter_.add(index, builderForValue.build()); return this; } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public Builder addAllTypeParameter( java.lang.Iterable<? extends org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter> values) { ensureTypeParameterIsMutable(); org.jetbrains.kotlin.protobuf.AbstractMessageLite.Builder.addAll( values, typeParameter_); return this; } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public Builder clearTypeParameter() { typeParameter_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * <code>repeated .org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeParameter type_parameter = 3;</code> */ public Builder removeTypeParameter(int index) { ensureTypeParameterIsMutable(); typeParameter_.remove(index); return this; } // @@protoc_insertion_point(builder_scope:org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias) } static { defaultInstance = new IrTypeAlias(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:org.jetbrains.kotlin.backend.common.serialization.proto.IrTypeAlias) }
10,406
4,267
<reponame>kangwenhang/WindTerm /* * Copyright 2020, WindTerm. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "Splitter.h" #include <QEvent> #include <QVariant> const char* const SPLITTER_RATIO = "ratio"; Splitter::Splitter(QWidget *parent) : QSplitter(parent) , m_mainIndex(0) {} Splitter::~Splitter() {} void Splitter::showIndex(int index) { if (isIndexVisible(index)) return; emit showIndexRequested(index); if (QSplitterHandle *handle = this->handle(index)) { QList<int> sizes = this->sizes(); int total = 0; for (int size : sizes) { total += size; } float ratio = handle->property(SPLITTER_RATIO).toFloat(); if (ratio == 0.0) { ratio = 0.18f; } int offset = 0; int size = total * ratio; if (index > 0) { for (int i = 0; i < index; i++) { offset += sizes[i]; } offset -= size; } else { offset = size; } if (index <= m_mainIndex) { index += 1; } moveSplitter(offset, index); } } void Splitter::hideIndex(int index) { if (isIndexVisible(index) == false) return; emit hideIndexRequested(index); if (QSplitterHandle *handle = this->handle(index)) { QList<int> sizes = this->sizes(); int size = sizes[index]; float total = 0; for (int size : sizes) { total += size; } handle->setProperty(SPLITTER_RATIO, size / total); int offset = 0; for (int i = 0; i <= index; i++) { if (i == index && index <= m_mainIndex) break; offset += sizes[i]; } if (index <= m_mainIndex) { index += 1; } moveSplitter(offset, index); } } bool Splitter::isIndexVisible(int index) { if (index >= 0 && index < count()) { return sizes()[index] > 0; } Q_ASSERT(false); return false; } void Splitter::setIndexVisible(int index, bool visible) { if (visible) { showIndex(index); } else { hideIndex(index); } } void Splitter::setMainIndex(int mainIndex) { m_mainIndex = mainIndex; } bool Splitter::eventFilter(QObject *obj, QEvent *event) { if (event->type() == QEvent::MouseButtonDblClick) { if (QSplitterHandle *handle = dynamic_cast<QSplitterHandle *>(obj)) { for (int i = 0; i < count(); i++) { if (handle == this->handle(i)) { if (i <= m_mainIndex && i > 0) { i--; } bool visible = isIndexVisible(i); setIndexVisible(i, !visible); break; } } } } return QSplitter::eventFilter(obj, event); } QSplitterHandle *Splitter::createHandle() { QSplitterHandle *handle = QSplitter::createHandle(); handle->installEventFilter(this); return handle; }
1,181
552
#include "stdafx.h" #include "CelestialBodySystem.h" #include <EtFramework/Systems/TransformSystem.h> namespace et { namespace demo { //======================= // Celestial Body System //======================= //---------------------------- // CelestialBodySystem::c-tor // CelestialBodySystem::CelestialBodySystem() { DeclareDependents<fw::TransformSystem::Compute>(); // update before transform matrix recalculation } //------------------------------ // CelestialBodySystem::Process // void CelestialBodySystem::Process(fw::ComponentRange<CelestialBodySystemView>& range) { // common variables bool const toggle = (core::InputManager::GetInstance()->GetKeyState(E_KbdKey::R) == E_KeyState::Pressed); float const dt = core::ContextManager::GetInstance()->GetActiveContext()->time->DeltaTime(); for (CelestialBodySystemView& view : range) { if (toggle) { view.body->isRotating = !(view.body->isRotating); } if (view.body->isRotating) { view.transf->Rotate(quat(vec3::UP, dt * view.body->rotationSpeed)); } } } } // namespace demo } // namespace et
354
735
<gh_stars>100-1000 // NOTE: This file was generated by the ServiceGenerator. // ---------------------------------------------------------------------------- // API: // Dialogflow API (dialogflow/v3) // Description: // Builds conversational interfaces (for example, chatbots, and voice-powered // apps and devices). // Documentation: // https://cloud.google.com/dialogflow/ #if SWIFT_PACKAGE || GTLR_USE_MODULAR_IMPORT @import GoogleAPIClientForRESTCore; #elif GTLR_BUILT_AS_FRAMEWORK #import "GTLR/GTLRQuery.h" #else #import "GTLRQuery.h" #endif #if GTLR_RUNTIME_VERSION != 3000 #error This file was generated by a different version of ServiceGenerator which is incompatible with this GTLR library source. #endif @class GTLRDialogflow_GoogleCloudDialogflowCxV3Agent; @class GTLRDialogflow_GoogleCloudDialogflowCxV3BatchDeleteTestCasesRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3BatchRunTestCasesRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3CompareVersionsRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3DeployFlowRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3DetectIntentRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3EntityType; @class GTLRDialogflow_GoogleCloudDialogflowCxV3Environment; @class GTLRDialogflow_GoogleCloudDialogflowCxV3Experiment; @class GTLRDialogflow_GoogleCloudDialogflowCxV3ExportAgentRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3ExportFlowRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3ExportTestCasesRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3Flow; @class GTLRDialogflow_GoogleCloudDialogflowCxV3FulfillIntentRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3ImportFlowRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3ImportTestCasesRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3Intent; @class GTLRDialogflow_GoogleCloudDialogflowCxV3LoadVersionRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3MatchIntentRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3Page; @class GTLRDialogflow_GoogleCloudDialogflowCxV3RestoreAgentRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3RunContinuousTestRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3RunTestCaseRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3SecuritySettings; @class GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType; @class GTLRDialogflow_GoogleCloudDialogflowCxV3StartExperimentRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3StopExperimentRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3TestCase; @class GTLRDialogflow_GoogleCloudDialogflowCxV3TrainFlowRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3TransitionRouteGroup; @class GTLRDialogflow_GoogleCloudDialogflowCxV3ValidateAgentRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3ValidateFlowRequest; @class GTLRDialogflow_GoogleCloudDialogflowCxV3Version; @class GTLRDialogflow_GoogleCloudDialogflowCxV3Webhook; // Generated comments include content from the discovery document; avoid them // causing warnings since clang's checks are some what arbitrary. #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdocumentation" NS_ASSUME_NONNULL_BEGIN // ---------------------------------------------------------------------------- // Constants - For some of the query classes' properties below. // ---------------------------------------------------------------------------- // intentView /** * All fields are populated. * * Value: "INTENT_VIEW_FULL" */ FOUNDATION_EXTERN NSString * const kGTLRDialogflowIntentViewIntentViewFull; /** * Training phrases field is not populated in the response. * * Value: "INTENT_VIEW_PARTIAL" */ FOUNDATION_EXTERN NSString * const kGTLRDialogflowIntentViewIntentViewPartial; /** * Not specified. Treated as INTENT_VIEW_FULL. * * Value: "INTENT_VIEW_UNSPECIFIED" */ FOUNDATION_EXTERN NSString * const kGTLRDialogflowIntentViewIntentViewUnspecified; // ---------------------------------------------------------------------------- // type /** * Should never be used. * * Value: "COVERAGE_TYPE_UNSPECIFIED" */ FOUNDATION_EXTERN NSString * const kGTLRDialogflowTypeCoverageTypeUnspecified; /** * Intent coverage. * * Value: "INTENT" */ FOUNDATION_EXTERN NSString * const kGTLRDialogflowTypeIntent; /** * Page transition coverage. * * Value: "PAGE_TRANSITION" */ FOUNDATION_EXTERN NSString * const kGTLRDialogflowTypePageTransition; /** * Transition route group coverage. * * Value: "TRANSITION_ROUTE_GROUP" */ FOUNDATION_EXTERN NSString * const kGTLRDialogflowTypeTransitionRouteGroup; // ---------------------------------------------------------------------------- // view /** * Include basic metadata about the test case, but not the conversation turns. * This is the default value. * * Value: "BASIC" */ FOUNDATION_EXTERN NSString * const kGTLRDialogflowViewBasic; /** * Include everything. * * Value: "FULL" */ FOUNDATION_EXTERN NSString * const kGTLRDialogflowViewFull; /** * The default / unset value. The API will default to the BASIC view. * * Value: "TEST_CASE_VIEW_UNSPECIFIED" */ FOUNDATION_EXTERN NSString * const kGTLRDialogflowViewTestCaseViewUnspecified; // ---------------------------------------------------------------------------- // Query Classes // /** * Parent class for other Dialogflow query classes. */ @interface GTLRDialogflowQuery : GTLRQuery /** Selector specifying which fields to include in a partial response. */ @property(nonatomic, copy, nullable) NSString *fields; @end /** * Retrieves the specified Changelog. * * Method: dialogflow.projects.locations.agents.changelogs.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsChangelogsGet : GTLRDialogflowQuery /** * Required. The name of the changelog to get. Format: * `projects//locations//agents//changelogs/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Changelog. * * Retrieves the specified Changelog. * * @param name Required. The name of the changelog to get. Format: * `projects//locations//agents//changelogs/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsChangelogsGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Returns the list of Changelogs. * * Method: dialogflow.projects.locations.agents.changelogs.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsChangelogsList : GTLRDialogflowQuery /** * The filter string. Supports filter by user_email, resource, type and * create_time. Some examples: 1. By user email: user_email = * "<EMAIL>" 2. By resource name: resource = * "projects/123/locations/global/agents/456/flows/789" 3. By resource display * name: display_name = "my agent" 4. By action: action = "Create" 5. By type: * type = "flows" 6. By create time. Currently predicates on `create_time` and * `create_time_epoch_seconds` are supported: create_time_epoch_seconds > * 1551790877 AND create_time <= 2017-01-15T01:30:15.01Z 7. Combination of * above filters: resource = * "projects/123/locations/global/agents/456/flows/789" AND user_email = * "<EMAIL>" AND create_time <= 2017-01-15T01:30:15.01Z */ @property(nonatomic, copy, nullable) NSString *filter; /** * The maximum number of items to return in a single page. By default 100 and * at most 1000. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The agent containing the changelogs. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3ListChangelogsResponse. * * Returns the list of Changelogs. * * @param parent Required. The agent containing the changelogs. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsChangelogsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Creates an agent in the specified location. Note: You should always train * flows prior to sending them queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.create * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsCreate : GTLRDialogflowQuery /** * Required. The location to create a agent for. Format: * `projects//locations/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Agent. * * Creates an agent in the specified location. Note: You should always train * flows prior to sending them queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Agent to * include in the query. * @param parent Required. The location to create a agent for. Format: * `projects//locations/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsCreate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Agent *)object parent:(NSString *)parent; @end /** * Deletes the specified agent. * * Method: dialogflow.projects.locations.agents.delete * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsDelete : GTLRDialogflowQuery /** * Required. The name of the agent to delete. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Deletes the specified agent. * * @param name Required. The name of the agent to delete. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsDelete */ + (instancetype)queryWithName:(NSString *)name; @end /** * Creates an entity type in the specified agent. Note: You should always train * a flow prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.entityTypes.create * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEntityTypesCreate : GTLRDialogflowQuery /** * The language of the following fields in `entity_type`: * * `EntityType.entities.value` * `EntityType.entities.synonyms` * * `EntityType.excluded_phrases.value` If not specified, the agent's default * language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * Required. The agent to create a entity type for. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3EntityType. * * Creates an entity type in the specified agent. Note: You should always train * a flow prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3EntityType to * include in the query. * @param parent Required. The agent to create a entity type for. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEntityTypesCreate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3EntityType *)object parent:(NSString *)parent; @end /** * Deletes the specified entity type. Note: You should always train a flow * prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.entityTypes.delete * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEntityTypesDelete : GTLRDialogflowQuery /** * This field has no effect for entity type not being used. For entity types * that are used by intents or pages: * If `force` is set to false, an error * will be returned with message indicating the referencing resources. * If * `force` is set to true, Dialogflow will remove the entity type, as well as * any references to the entity type (i.e. Page parameter of the entity type * will be changed to '\@sys.any' and intent parameter of the entity type will * be removed). */ @property(nonatomic, assign) BOOL force; /** * Required. The name of the entity type to delete. Format: * `projects//locations//agents//entityTypes/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Deletes the specified entity type. Note: You should always train a flow * prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param name Required. The name of the entity type to delete. Format: * `projects//locations//agents//entityTypes/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEntityTypesDelete */ + (instancetype)queryWithName:(NSString *)name; @end /** * Retrieves the specified entity type. * * Method: dialogflow.projects.locations.agents.entityTypes.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEntityTypesGet : GTLRDialogflowQuery /** * The language to retrieve the entity type for. The following fields are * language dependent: * `EntityType.entities.value` * * `EntityType.entities.synonyms` * `EntityType.excluded_phrases.value` If not * specified, the agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * Required. The name of the entity type. Format: * `projects//locations//agents//entityTypes/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3EntityType. * * Retrieves the specified entity type. * * @param name Required. The name of the entity type. Format: * `projects//locations//agents//entityTypes/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEntityTypesGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Returns the list of all entity types in the specified agent. * * Method: dialogflow.projects.locations.agents.entityTypes.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEntityTypesList : GTLRDialogflowQuery /** * The language to list entity types for. The following fields are language * dependent: * `EntityType.entities.value` * `EntityType.entities.synonyms` * * `EntityType.excluded_phrases.value` If not specified, the agent's default * language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * The maximum number of items to return in a single page. By default 100 and * at most 1000. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The agent to list all entity types for. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ListEntityTypesResponse. * * Returns the list of all entity types in the specified agent. * * @param parent Required. The agent to list all entity types for. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEntityTypesList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Updates the specified entity type. Note: You should always train a flow * prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.entityTypes.patch * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEntityTypesPatch : GTLRDialogflowQuery /** * The language of the following fields in `entity_type`: * * `EntityType.entities.value` * `EntityType.entities.synonyms` * * `EntityType.excluded_phrases.value` If not specified, the agent's default * language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * The unique identifier of the entity type. Required for * EntityTypes.UpdateEntityType. Format: * `projects//locations//agents//entityTypes/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * The mask to control which fields get updated. * * String format is a comma-separated list of fields. */ @property(nonatomic, copy, nullable) NSString *updateMask; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3EntityType. * * Updates the specified entity type. Note: You should always train a flow * prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3EntityType to * include in the query. * @param name The unique identifier of the entity type. Required for * EntityTypes.UpdateEntityType. Format: * `projects//locations//agents//entityTypes/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEntityTypesPatch */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3EntityType *)object name:(NSString *)name; @end /** * Fetches a list of continuous test results for a given environment. * * Method: dialogflow.projects.locations.agents.environments.continuousTestResults.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsContinuousTestResultsList : GTLRDialogflowQuery /** * The maximum number of items to return in a single page. By default 100 and * at most 1000. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The environment to list results for. Format: * `projects//locations//agents// environments/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ListContinuousTestResultsResponse. * * Fetches a list of continuous test results for a given environment. * * @param parent Required. The environment to list results for. Format: * `projects//locations//agents// environments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsContinuousTestResultsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Creates an Environment in the specified Agent. This method is a * [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: Environment * * Method: dialogflow.projects.locations.agents.environments.create * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsCreate : GTLRDialogflowQuery /** * Required. The Agent to create an Environment for. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Creates an Environment in the specified Agent. This method is a * [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: Environment * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Environment to * include in the query. * @param parent Required. The Agent to create an Environment for. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsCreate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Environment *)object parent:(NSString *)parent; @end /** * Deletes the specified Environment. * * Method: dialogflow.projects.locations.agents.environments.delete * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsDelete : GTLRDialogflowQuery /** * Required. The name of the Environment to delete. Format: * `projects//locations//agents//environments/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Deletes the specified Environment. * * @param name Required. The name of the Environment to delete. Format: * `projects//locations//agents//environments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsDelete */ + (instancetype)queryWithName:(NSString *)name; @end /** * Deploys a flow to the specified Environment. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: DeployFlowMetadata - `response`: DeployFlowResponse * * Method: dialogflow.projects.locations.agents.environments.deployFlow * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsDeployFlow : GTLRDialogflowQuery /** * Required. The environment to deploy the flow to. Format: * `projects//locations//agents// environments/`. */ @property(nonatomic, copy, nullable) NSString *environment; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Deploys a flow to the specified Environment. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: DeployFlowMetadata - `response`: DeployFlowResponse * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3DeployFlowRequest to include in * the query. * @param environment Required. The environment to deploy the flow to. Format: * `projects//locations//agents// environments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsDeployFlow */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3DeployFlowRequest *)object environment:(NSString *)environment; @end /** * Retrieves the specified Deployment. * * Method: dialogflow.projects.locations.agents.environments.deployments.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsDeploymentsGet : GTLRDialogflowQuery /** * Required. The name of the Deployment. Format: * `projects//locations//agents//environments//deployments/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Deployment. * * Retrieves the specified Deployment. * * @param name Required. The name of the Deployment. Format: * `projects//locations//agents//environments//deployments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsDeploymentsGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Returns the list of all deployments in the specified Environment. * * Method: dialogflow.projects.locations.agents.environments.deployments.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsDeploymentsList : GTLRDialogflowQuery /** * The maximum number of items to return in a single page. By default 20 and at * most 100. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The Environment to list all environments for. Format: * `projects//locations//agents//environments/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ListDeploymentsResponse. * * Returns the list of all deployments in the specified Environment. * * @param parent Required. The Environment to list all environments for. * Format: `projects//locations//agents//environments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsDeploymentsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Creates an Experiment in the specified Environment. * * Method: dialogflow.projects.locations.agents.environments.experiments.create * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsExperimentsCreate : GTLRDialogflowQuery /** * Required. The Agent to create an Environment for. Format: * `projects//locations//agents//environments/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Experiment. * * Creates an Experiment in the specified Environment. * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Experiment to * include in the query. * @param parent Required. The Agent to create an Environment for. Format: * `projects//locations//agents//environments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsExperimentsCreate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Experiment *)object parent:(NSString *)parent; @end /** * Deletes the specified Experiment. * * Method: dialogflow.projects.locations.agents.environments.experiments.delete * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsExperimentsDelete : GTLRDialogflowQuery /** * Required. The name of the Environment to delete. Format: * `projects//locations//agents//environments//experiments/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Deletes the specified Experiment. * * @param name Required. The name of the Environment to delete. Format: * `projects//locations//agents//environments//experiments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsExperimentsDelete */ + (instancetype)queryWithName:(NSString *)name; @end /** * Retrieves the specified Experiment. * * Method: dialogflow.projects.locations.agents.environments.experiments.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsExperimentsGet : GTLRDialogflowQuery /** * Required. The name of the Environment. Format: * `projects//locations//agents//environments//experiments/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Experiment. * * Retrieves the specified Experiment. * * @param name Required. The name of the Environment. Format: * `projects//locations//agents//environments//experiments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsExperimentsGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Returns the list of all experiments in the specified Environment. * * Method: dialogflow.projects.locations.agents.environments.experiments.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsExperimentsList : GTLRDialogflowQuery /** * The maximum number of items to return in a single page. By default 20 and at * most 100. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The Environment to list all environments for. Format: * `projects//locations//agents//environments/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ListExperimentsResponse. * * Returns the list of all experiments in the specified Environment. * * @param parent Required. The Environment to list all environments for. * Format: `projects//locations//agents//environments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsExperimentsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Updates the specified Experiment. * * Method: dialogflow.projects.locations.agents.environments.experiments.patch * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsExperimentsPatch : GTLRDialogflowQuery /** * The name of the experiment. Format: * projects//locations//agents//environments//experiments/.. */ @property(nonatomic, copy, nullable) NSString *name; /** * Required. The mask to control which fields get updated. * * String format is a comma-separated list of fields. */ @property(nonatomic, copy, nullable) NSString *updateMask; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Experiment. * * Updates the specified Experiment. * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Experiment to * include in the query. * @param name The name of the experiment. Format: * projects//locations//agents//environments//experiments/.. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsExperimentsPatch */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Experiment *)object name:(NSString *)name; @end /** * Starts the specified Experiment. This rpc only changes the state of * experiment from PENDING to RUNNING. * * Method: dialogflow.projects.locations.agents.environments.experiments.start * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsExperimentsStart : GTLRDialogflowQuery /** * Required. Resource name of the experiment to start. Format: * `projects//locations//agents//environments//experiments/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Experiment. * * Starts the specified Experiment. This rpc only changes the state of * experiment from PENDING to RUNNING. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3StartExperimentRequest to include * in the query. * @param name Required. Resource name of the experiment to start. Format: * `projects//locations//agents//environments//experiments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsExperimentsStart */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3StartExperimentRequest *)object name:(NSString *)name; @end /** * Stops the specified Experiment. This rpc only changes the state of * experiment from RUNNING to DONE. * * Method: dialogflow.projects.locations.agents.environments.experiments.stop * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsExperimentsStop : GTLRDialogflowQuery /** * Required. Resource name of the experiment to stop. Format: * `projects//locations//agents//environments//experiments/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Experiment. * * Stops the specified Experiment. This rpc only changes the state of * experiment from RUNNING to DONE. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3StopExperimentRequest to include * in the query. * @param name Required. Resource name of the experiment to stop. Format: * `projects//locations//agents//environments//experiments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsExperimentsStop */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3StopExperimentRequest *)object name:(NSString *)name; @end /** * Retrieves the specified Environment. * * Method: dialogflow.projects.locations.agents.environments.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsGet : GTLRDialogflowQuery /** * Required. The name of the Environment. Format: * `projects//locations//agents//environments/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Environment. * * Retrieves the specified Environment. * * @param name Required. The name of the Environment. Format: * `projects//locations//agents//environments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Returns the list of all environments in the specified Agent. * * Method: dialogflow.projects.locations.agents.environments.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsList : GTLRDialogflowQuery /** * The maximum number of items to return in a single page. By default 20 and at * most 100. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The Agent to list all environments for. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ListEnvironmentsResponse. * * Returns the list of all environments in the specified Agent. * * @param parent Required. The Agent to list all environments for. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Looks up the history of the specified Environment. * * Method: dialogflow.projects.locations.agents.environments.lookupEnvironmentHistory * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsLookupEnvironmentHistory : GTLRDialogflowQuery /** * Required. Resource name of the environment to look up the history for. * Format: `projects//locations//agents//environments/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * The maximum number of items to return in a single page. By default 100 and * at most 1000. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Fetches a @c * GTLRDialogflow_GoogleCloudDialogflowCxV3LookupEnvironmentHistoryResponse. * * Looks up the history of the specified Environment. * * @param name Required. Resource name of the environment to look up the * history for. Format: `projects//locations//agents//environments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsLookupEnvironmentHistory * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithName:(NSString *)name; @end /** * Updates the specified Environment. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: Environment * * Method: dialogflow.projects.locations.agents.environments.patch * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsPatch : GTLRDialogflowQuery /** * The name of the environment. Format: * `projects//locations//agents//environments/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Required. The mask to control which fields get updated. * * String format is a comma-separated list of fields. */ @property(nonatomic, copy, nullable) NSString *updateMask; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Updates the specified Environment. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: Environment * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Environment to * include in the query. * @param name The name of the environment. Format: * `projects//locations//agents//environments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsPatch */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Environment *)object name:(NSString *)name; @end /** * Kicks off a continuous test under the specified Environment. This method is * a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: RunContinuousTestMetadata - `response`: * RunContinuousTestResponse * * Method: dialogflow.projects.locations.agents.environments.runContinuousTest * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsRunContinuousTest : GTLRDialogflowQuery /** Required. Format: `projects//locations//agents//environments/`. */ @property(nonatomic, copy, nullable) NSString *environment; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Kicks off a continuous test under the specified Environment. This method is * a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: RunContinuousTestMetadata - `response`: * RunContinuousTestResponse * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3RunContinuousTestRequest to * include in the query. * @param environment Required. Format: * `projects//locations//agents//environments/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsRunContinuousTest */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3RunContinuousTestRequest *)object environment:(NSString *)environment; @end /** * Processes a natural language query and returns structured, actionable data * as a result. This method is not idempotent, because it may cause session * entity types to be updated, which in turn might affect results of future * queries. Note: Always use agent versions for production traffic. See * [Versions and * environments](https://cloud.google.com/dialogflow/cx/docs/concept/version). * * Method: dialogflow.projects.locations.agents.environments.sessions.detectIntent * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsDetectIntent : GTLRDialogflowQuery /** * Required. The name of the session this query is sent to. Format: * `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment ID` * is not specified, we assume default 'draft' environment. It's up to the API * caller to choose an appropriate `Session ID`. It can be a random number or * some type of session identifiers (preferably hashed). The length of the * `Session ID` must not exceed 36 characters. For more information, see the * [sessions * guide](https://cloud.google.com/dialogflow/cx/docs/concept/session). Note: * Always use agent versions for production traffic. See [Versions and * environments](https://cloud.google.com/dialogflow/cx/docs/concept/version). */ @property(nonatomic, copy, nullable) NSString *session; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3DetectIntentResponse. * * Processes a natural language query and returns structured, actionable data * as a result. This method is not idempotent, because it may cause session * entity types to be updated, which in turn might affect results of future * queries. Note: Always use agent versions for production traffic. See * [Versions and * environments](https://cloud.google.com/dialogflow/cx/docs/concept/version). * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3DetectIntentRequest to include in * the query. * @param session Required. The name of the session this query is sent to. * Format: `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment * ID` is not specified, we assume default 'draft' environment. It's up to * the API caller to choose an appropriate `Session ID`. It can be a random * number or some type of session identifiers (preferably hashed). The length * of the `Session ID` must not exceed 36 characters. For more information, * see the [sessions * guide](https://cloud.google.com/dialogflow/cx/docs/concept/session). Note: * Always use agent versions for production traffic. See [Versions and * environments](https://cloud.google.com/dialogflow/cx/docs/concept/version). * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsDetectIntent */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3DetectIntentRequest *)object session:(NSString *)session; @end /** * Creates a session entity type. * * Method: dialogflow.projects.locations.agents.environments.sessions.entityTypes.create * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsEntityTypesCreate : GTLRDialogflowQuery /** * Required. The session to create a session entity type for. Format: * `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment ID` * is not specified, we assume default 'draft' environment. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType. * * Creates a session entity type. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType to include in * the query. * @param parent Required. The session to create a session entity type for. * Format: `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment * ID` is not specified, we assume default 'draft' environment. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsEntityTypesCreate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType *)object parent:(NSString *)parent; @end /** * Deletes the specified session entity type. * * Method: dialogflow.projects.locations.agents.environments.sessions.entityTypes.delete * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsEntityTypesDelete : GTLRDialogflowQuery /** * Required. The name of the session entity type to delete. Format: * `projects//locations//agents//sessions//entityTypes/` or * `projects//locations//agents//environments//sessions//entityTypes/`. If * `Environment ID` is not specified, we assume default 'draft' environment. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Deletes the specified session entity type. * * @param name Required. The name of the session entity type to delete. Format: * `projects//locations//agents//sessions//entityTypes/` or * `projects//locations//agents//environments//sessions//entityTypes/`. If * `Environment ID` is not specified, we assume default 'draft' environment. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsEntityTypesDelete */ + (instancetype)queryWithName:(NSString *)name; @end /** * Retrieves the specified session entity type. * * Method: dialogflow.projects.locations.agents.environments.sessions.entityTypes.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsEntityTypesGet : GTLRDialogflowQuery /** * Required. The name of the session entity type. Format: * `projects//locations//agents//sessions//entityTypes/` or * `projects//locations//agents//environments//sessions//entityTypes/`. If * `Environment ID` is not specified, we assume default 'draft' environment. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType. * * Retrieves the specified session entity type. * * @param name Required. The name of the session entity type. Format: * `projects//locations//agents//sessions//entityTypes/` or * `projects//locations//agents//environments//sessions//entityTypes/`. If * `Environment ID` is not specified, we assume default 'draft' environment. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsEntityTypesGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Returns the list of all session entity types in the specified session. * * Method: dialogflow.projects.locations.agents.environments.sessions.entityTypes.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsEntityTypesList : GTLRDialogflowQuery /** * The maximum number of items to return in a single page. By default 100 and * at most 1000. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The session to list all session entity types from. Format: * `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment ID` * is not specified, we assume default 'draft' environment. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ListSessionEntityTypesResponse. * * Returns the list of all session entity types in the specified session. * * @param parent Required. The session to list all session entity types from. * Format: `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment * ID` is not specified, we assume default 'draft' environment. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsEntityTypesList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Updates the specified session entity type. * * Method: dialogflow.projects.locations.agents.environments.sessions.entityTypes.patch * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsEntityTypesPatch : GTLRDialogflowQuery /** * Required. The unique identifier of the session entity type. Format: * `projects//locations//agents//sessions//entityTypes/` or * `projects//locations//agents//environments//sessions//entityTypes/`. If * `Environment ID` is not specified, we assume default 'draft' environment. */ @property(nonatomic, copy, nullable) NSString *name; /** * The mask to control which fields get updated. * * String format is a comma-separated list of fields. */ @property(nonatomic, copy, nullable) NSString *updateMask; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType. * * Updates the specified session entity type. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType to include in * the query. * @param name Required. The unique identifier of the session entity type. * Format: `projects//locations//agents//sessions//entityTypes/` or * `projects//locations//agents//environments//sessions//entityTypes/`. If * `Environment ID` is not specified, we assume default 'draft' environment. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsEntityTypesPatch */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType *)object name:(NSString *)name; @end /** * Fulfills a matched intent returned by MatchIntent. Must be called after * MatchIntent, with input from MatchIntentResponse. Otherwise, the behavior is * undefined. * * Method: dialogflow.projects.locations.agents.environments.sessions.fulfillIntent * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsFulfillIntent : GTLRDialogflowQuery /** * Required. The name of the session this query is sent to. Format: * `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment ID` * is not specified, we assume default 'draft' environment. It's up to the API * caller to choose an appropriate `Session ID`. It can be a random number or * some type of session identifiers (preferably hashed). The length of the * `Session ID` must not exceed 36 characters. For more information, see the * [sessions * guide](https://cloud.google.com/dialogflow/cx/docs/concept/session). */ @property(nonatomic, copy, nullable) NSString *session; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3FulfillIntentResponse. * * Fulfills a matched intent returned by MatchIntent. Must be called after * MatchIntent, with input from MatchIntentResponse. Otherwise, the behavior is * undefined. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3FulfillIntentRequest to include in * the query. * @param session Required. The name of the session this query is sent to. * Format: `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment * ID` is not specified, we assume default 'draft' environment. It's up to * the API caller to choose an appropriate `Session ID`. It can be a random * number or some type of session identifiers (preferably hashed). The length * of the `Session ID` must not exceed 36 characters. For more information, * see the [sessions * guide](https://cloud.google.com/dialogflow/cx/docs/concept/session). * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsFulfillIntent */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3FulfillIntentRequest *)object session:(NSString *)session; @end /** * Returns preliminary intent match results, doesn't change the session status. * * Method: dialogflow.projects.locations.agents.environments.sessions.matchIntent * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsMatchIntent : GTLRDialogflowQuery /** * Required. The name of the session this query is sent to. Format: * `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment ID` * is not specified, we assume default 'draft' environment. It's up to the API * caller to choose an appropriate `Session ID`. It can be a random number or * some type of session identifiers (preferably hashed). The length of the * `Session ID` must not exceed 36 characters. For more information, see the * [sessions * guide](https://cloud.google.com/dialogflow/cx/docs/concept/session). */ @property(nonatomic, copy, nullable) NSString *session; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3MatchIntentResponse. * * Returns preliminary intent match results, doesn't change the session status. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3MatchIntentRequest to include in * the query. * @param session Required. The name of the session this query is sent to. * Format: `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment * ID` is not specified, we assume default 'draft' environment. It's up to * the API caller to choose an appropriate `Session ID`. It can be a random * number or some type of session identifiers (preferably hashed). The length * of the `Session ID` must not exceed 36 characters. For more information, * see the [sessions * guide](https://cloud.google.com/dialogflow/cx/docs/concept/session). * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsEnvironmentsSessionsMatchIntent */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3MatchIntentRequest *)object session:(NSString *)session; @end /** * Exports the specified agent to a binary file. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: ExportAgentResponse * * Method: dialogflow.projects.locations.agents.export * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsExport : GTLRDialogflowQuery /** * Required. The name of the agent to export. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Exports the specified agent to a binary file. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: ExportAgentResponse * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ExportAgentRequest to include in * the query. * @param name Required. The name of the agent to export. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsExport */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3ExportAgentRequest *)object name:(NSString *)name; @end /** * Creates a flow in the specified agent. Note: You should always train a flow * prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.flows.create * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsCreate : GTLRDialogflowQuery /** * The language of the following fields in `flow`: * * `Flow.event_handlers.trigger_fulfillment.messages` * * `Flow.event_handlers.trigger_fulfillment.conditional_cases` * * `Flow.transition_routes.trigger_fulfillment.messages` * * `Flow.transition_routes.trigger_fulfillment.conditional_cases` If not * specified, the agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * Required. The agent to create a flow for. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Flow. * * Creates a flow in the specified agent. Note: You should always train a flow * prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Flow to include * in the query. * @param parent Required. The agent to create a flow for. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsCreate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Flow *)object parent:(NSString *)parent; @end /** * Deletes a specified flow. * * Method: dialogflow.projects.locations.agents.flows.delete * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsDelete : GTLRDialogflowQuery /** * This field has no effect for flows with no incoming transitions. For flows * with incoming transitions: * If `force` is set to false, an error will be * returned with message indicating the incoming transitions. * If `force` is * set to true, Dialogflow will remove the flow, as well as any transitions to * the flow (i.e. Target flow in event handlers or Target flow in transition * routes that point to this flow will be cleared). */ @property(nonatomic, assign) BOOL force; /** * Required. The name of the flow to delete. Format: * `projects//locations//agents//flows/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Deletes a specified flow. * * @param name Required. The name of the flow to delete. Format: * `projects//locations//agents//flows/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsDelete */ + (instancetype)queryWithName:(NSString *)name; @end /** * Exports the specified flow to a binary file. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: ExportFlowResponse Note that resources (e.g. intents, * entities, webhooks) that the flow references will also be exported. * * Method: dialogflow.projects.locations.agents.flows.export * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsExport : GTLRDialogflowQuery /** * Required. The name of the flow to export. Format: * `projects//locations//agents//flows/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Exports the specified flow to a binary file. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: ExportFlowResponse Note that resources (e.g. intents, * entities, webhooks) that the flow references will also be exported. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ExportFlowRequest to include in * the query. * @param name Required. The name of the flow to export. Format: * `projects//locations//agents//flows/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsExport */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3ExportFlowRequest *)object name:(NSString *)name; @end /** * Retrieves the specified flow. * * Method: dialogflow.projects.locations.agents.flows.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsGet : GTLRDialogflowQuery /** * The language to retrieve the flow for. The following fields are language * dependent: * `Flow.event_handlers.trigger_fulfillment.messages` * * `Flow.event_handlers.trigger_fulfillment.conditional_cases` * * `Flow.transition_routes.trigger_fulfillment.messages` * * `Flow.transition_routes.trigger_fulfillment.conditional_cases` If not * specified, the agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * Required. The name of the flow to get. Format: * `projects//locations//agents//flows/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Flow. * * Retrieves the specified flow. * * @param name Required. The name of the flow to get. Format: * `projects//locations//agents//flows/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Gets the latest flow validation result. Flow validation is performed when * ValidateFlow is called. * * Method: dialogflow.projects.locations.agents.flows.getValidationResult * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsGetValidationResult : GTLRDialogflowQuery /** If not specified, the agent's default language is used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * Required. The flow name. Format: * `projects//locations//agents//flows//validationResult`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3FlowValidationResult. * * Gets the latest flow validation result. Flow validation is performed when * ValidateFlow is called. * * @param name Required. The flow name. Format: * `projects//locations//agents//flows//validationResult`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsGetValidationResult */ + (instancetype)queryWithName:(NSString *)name; @end /** * Imports the specified flow to the specified agent from a binary file. This * method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: ImportFlowResponse Note: You should always train a flow prior * to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.flows.import * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsImport : GTLRDialogflowQuery /** * Required. The agent to import the flow into. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Imports the specified flow to the specified agent from a binary file. This * method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: ImportFlowResponse Note: You should always train a flow prior * to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ImportFlowRequest to include in * the query. * @param parent Required. The agent to import the flow into. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsImport */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3ImportFlowRequest *)object parent:(NSString *)parent; @end /** * Returns the list of all flows in the specified agent. * * Method: dialogflow.projects.locations.agents.flows.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsList : GTLRDialogflowQuery /** * The language to list flows for. The following fields are language dependent: * * `Flow.event_handlers.trigger_fulfillment.messages` * * `Flow.event_handlers.trigger_fulfillment.conditional_cases` * * `Flow.transition_routes.trigger_fulfillment.messages` * * `Flow.transition_routes.trigger_fulfillment.conditional_cases` If not * specified, the agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * The maximum number of items to return in a single page. By default 100 and * at most 1000. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The agent containing the flows. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3ListFlowsResponse. * * Returns the list of all flows in the specified agent. * * @param parent Required. The agent containing the flows. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Creates a page in the specified flow. Note: You should always train a flow * prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.flows.pages.create * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsPagesCreate : GTLRDialogflowQuery /** * The language of the following fields in `page`: * * `Page.entry_fulfillment.messages` * * `Page.entry_fulfillment.conditional_cases` * * `Page.event_handlers.trigger_fulfillment.messages` * * `Page.event_handlers.trigger_fulfillment.conditional_cases` * * `Page.form.parameters.fill_behavior.initial_prompt_fulfillment.messages` * * `Page.form.parameters.fill_behavior.initial_prompt_fulfillment.conditional_cases` * * `Page.form.parameters.fill_behavior.reprompt_event_handlers.messages` * * `Page.form.parameters.fill_behavior.reprompt_event_handlers.conditional_cases` * * `Page.transition_routes.trigger_fulfillment.messages` * * `Page.transition_routes.trigger_fulfillment.conditional_cases` If not * specified, the agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * Required. The flow to create a page for. Format: * `projects//locations//agents//flows/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Page. * * Creates a page in the specified flow. Note: You should always train a flow * prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Page to include * in the query. * @param parent Required. The flow to create a page for. Format: * `projects//locations//agents//flows/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsPagesCreate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Page *)object parent:(NSString *)parent; @end /** * Deletes the specified page. Note: You should always train a flow prior to * sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.flows.pages.delete * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsPagesDelete : GTLRDialogflowQuery /** * This field has no effect for pages with no incoming transitions. For pages * with incoming transitions: * If `force` is set to false, an error will be * returned with message indicating the incoming transitions. * If `force` is * set to true, Dialogflow will remove the page, as well as any transitions to * the page (i.e. Target page in event handlers or Target page in transition * routes that point to this page will be cleared). */ @property(nonatomic, assign) BOOL force; /** * Required. The name of the page to delete. Format: * `projects//locations//agents//Flows//pages/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Deletes the specified page. Note: You should always train a flow prior to * sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param name Required. The name of the page to delete. Format: * `projects//locations//agents//Flows//pages/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsPagesDelete */ + (instancetype)queryWithName:(NSString *)name; @end /** * Retrieves the specified page. * * Method: dialogflow.projects.locations.agents.flows.pages.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsPagesGet : GTLRDialogflowQuery /** * The language to retrieve the page for. The following fields are language * dependent: * `Page.entry_fulfillment.messages` * * `Page.entry_fulfillment.conditional_cases` * * `Page.event_handlers.trigger_fulfillment.messages` * * `Page.event_handlers.trigger_fulfillment.conditional_cases` * * `Page.form.parameters.fill_behavior.initial_prompt_fulfillment.messages` * * `Page.form.parameters.fill_behavior.initial_prompt_fulfillment.conditional_cases` * * `Page.form.parameters.fill_behavior.reprompt_event_handlers.messages` * * `Page.form.parameters.fill_behavior.reprompt_event_handlers.conditional_cases` * * `Page.transition_routes.trigger_fulfillment.messages` * * `Page.transition_routes.trigger_fulfillment.conditional_cases` If not * specified, the agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * Required. The name of the page. Format: * `projects//locations//agents//flows//pages/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Page. * * Retrieves the specified page. * * @param name Required. The name of the page. Format: * `projects//locations//agents//flows//pages/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsPagesGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Returns the list of all pages in the specified flow. * * Method: dialogflow.projects.locations.agents.flows.pages.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsPagesList : GTLRDialogflowQuery /** * The language to list pages for. The following fields are language dependent: * * `Page.entry_fulfillment.messages` * * `Page.entry_fulfillment.conditional_cases` * * `Page.event_handlers.trigger_fulfillment.messages` * * `Page.event_handlers.trigger_fulfillment.conditional_cases` * * `Page.form.parameters.fill_behavior.initial_prompt_fulfillment.messages` * * `Page.form.parameters.fill_behavior.initial_prompt_fulfillment.conditional_cases` * * `Page.form.parameters.fill_behavior.reprompt_event_handlers.messages` * * `Page.form.parameters.fill_behavior.reprompt_event_handlers.conditional_cases` * * `Page.transition_routes.trigger_fulfillment.messages` * * `Page.transition_routes.trigger_fulfillment.conditional_cases` If not * specified, the agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * The maximum number of items to return in a single page. By default 100 and * at most 1000. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The flow to list all pages for. Format: * `projects//locations//agents//flows/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3ListPagesResponse. * * Returns the list of all pages in the specified flow. * * @param parent Required. The flow to list all pages for. Format: * `projects//locations//agents//flows/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsPagesList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Updates the specified page. Note: You should always train a flow prior to * sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.flows.pages.patch * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsPagesPatch : GTLRDialogflowQuery /** * The language of the following fields in `page`: * * `Page.entry_fulfillment.messages` * * `Page.entry_fulfillment.conditional_cases` * * `Page.event_handlers.trigger_fulfillment.messages` * * `Page.event_handlers.trigger_fulfillment.conditional_cases` * * `Page.form.parameters.fill_behavior.initial_prompt_fulfillment.messages` * * `Page.form.parameters.fill_behavior.initial_prompt_fulfillment.conditional_cases` * * `Page.form.parameters.fill_behavior.reprompt_event_handlers.messages` * * `Page.form.parameters.fill_behavior.reprompt_event_handlers.conditional_cases` * * `Page.transition_routes.trigger_fulfillment.messages` * * `Page.transition_routes.trigger_fulfillment.conditional_cases` If not * specified, the agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * The unique identifier of the page. Required for the Pages.UpdatePage method. * Pages.CreatePage populates the name automatically. Format: * `projects//locations//agents//flows//pages/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * The mask to control which fields get updated. If the mask is not present, * all fields will be updated. * * String format is a comma-separated list of fields. */ @property(nonatomic, copy, nullable) NSString *updateMask; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Page. * * Updates the specified page. Note: You should always train a flow prior to * sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Page to include * in the query. * @param name The unique identifier of the page. Required for the * Pages.UpdatePage method. Pages.CreatePage populates the name * automatically. Format: `projects//locations//agents//flows//pages/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsPagesPatch */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Page *)object name:(NSString *)name; @end /** * Updates the specified flow. Note: You should always train a flow prior to * sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.flows.patch * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsPatch : GTLRDialogflowQuery /** * The language of the following fields in `flow`: * * `Flow.event_handlers.trigger_fulfillment.messages` * * `Flow.event_handlers.trigger_fulfillment.conditional_cases` * * `Flow.transition_routes.trigger_fulfillment.messages` * * `Flow.transition_routes.trigger_fulfillment.conditional_cases` If not * specified, the agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * The unique identifier of the flow. Format: * `projects//locations//agents//flows/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * The mask to control which fields get updated. If the mask is not present, * all fields will be updated. * * String format is a comma-separated list of fields. */ @property(nonatomic, copy, nullable) NSString *updateMask; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Flow. * * Updates the specified flow. Note: You should always train a flow prior to * sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Flow to include * in the query. * @param name The unique identifier of the flow. Format: * `projects//locations//agents//flows/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsPatch */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Flow *)object name:(NSString *)name; @end /** * Trains the specified flow. Note that only the flow in 'draft' environment is * trained. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * Note: You should always train a flow prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.flows.train * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsTrain : GTLRDialogflowQuery /** * Required. The flow to train. Format: `projects//locations//agents//flows/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Trains the specified flow. Note that only the flow in 'draft' environment is * trained. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * Note: You should always train a flow prior to sending it queries. See the * [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3TrainFlowRequest to include in the * query. * @param name Required. The flow to train. Format: * `projects//locations//agents//flows/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsTrain */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3TrainFlowRequest *)object name:(NSString *)name; @end /** * Creates an TransitionRouteGroup in the specified flow. Note: You should * always train a flow prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.flows.transitionRouteGroups.create * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsTransitionRouteGroupsCreate : GTLRDialogflowQuery /** * The language of the following fields in `TransitionRouteGroup`: * * `TransitionRouteGroup.transition_routes.trigger_fulfillment.messages` * * `TransitionRouteGroup.transition_routes.trigger_fulfillment.conditional_cases` * If not specified, the agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * Required. The flow to create an TransitionRouteGroup for. Format: * `projects//locations//agents//flows/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3TransitionRouteGroup. * * Creates an TransitionRouteGroup in the specified flow. Note: You should * always train a flow prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3TransitionRouteGroup to include in * the query. * @param parent Required. The flow to create an TransitionRouteGroup for. * Format: `projects//locations//agents//flows/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsTransitionRouteGroupsCreate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3TransitionRouteGroup *)object parent:(NSString *)parent; @end /** * Deletes the specified TransitionRouteGroup. Note: You should always train a * flow prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.flows.transitionRouteGroups.delete * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsTransitionRouteGroupsDelete : GTLRDialogflowQuery /** * This field has no effect for transition route group that no page is using. * If the transition route group is referenced by any page: * If `force` is set * to false, an error will be returned with message indicating pages that * reference the transition route group. * If `force` is set to true, * Dialogflow will remove the transition route group, as well as any reference * to it. */ @property(nonatomic, assign) BOOL force; /** * Required. The name of the TransitionRouteGroup to delete. Format: * `projects//locations//agents//flows//transitionRouteGroups/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Deletes the specified TransitionRouteGroup. Note: You should always train a * flow prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param name Required. The name of the TransitionRouteGroup to delete. * Format: `projects//locations//agents//flows//transitionRouteGroups/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsTransitionRouteGroupsDelete */ + (instancetype)queryWithName:(NSString *)name; @end /** * Retrieves the specified TransitionRouteGroup. * * Method: dialogflow.projects.locations.agents.flows.transitionRouteGroups.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsTransitionRouteGroupsGet : GTLRDialogflowQuery /** * The language to retrieve the transition route group for. The following * fields are language dependent: * * `TransitionRouteGroup.transition_routes.trigger_fulfillment.messages` * * `TransitionRouteGroup.transition_routes.trigger_fulfillment.conditional_cases` * If not specified, the agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * Required. The name of the TransitionRouteGroup. Format: * `projects//locations//agents//flows//transitionRouteGroups/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3TransitionRouteGroup. * * Retrieves the specified TransitionRouteGroup. * * @param name Required. The name of the TransitionRouteGroup. Format: * `projects//locations//agents//flows//transitionRouteGroups/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsTransitionRouteGroupsGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Returns the list of all transition route groups in the specified flow. * * Method: dialogflow.projects.locations.agents.flows.transitionRouteGroups.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsTransitionRouteGroupsList : GTLRDialogflowQuery /** * The language to list transition route groups for. The following fields are * language dependent: * * `TransitionRouteGroup.transition_routes.trigger_fulfillment.messages` * * `TransitionRouteGroup.transition_routes.trigger_fulfillment.conditional_cases` * If not specified, the agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * The maximum number of items to return in a single page. By default 100 and * at most 1000. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The flow to list all transition route groups for. Format: * `projects//locations//agents//flows/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ListTransitionRouteGroupsResponse. * * Returns the list of all transition route groups in the specified flow. * * @param parent Required. The flow to list all transition route groups for. * Format: `projects//locations//agents//flows/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsTransitionRouteGroupsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Updates the specified TransitionRouteGroup. Note: You should always train a * flow prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.flows.transitionRouteGroups.patch * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch : GTLRDialogflowQuery /** * The language of the following fields in `TransitionRouteGroup`: * * `TransitionRouteGroup.transition_routes.trigger_fulfillment.messages` * * `TransitionRouteGroup.transition_routes.trigger_fulfillment.conditional_cases` * If not specified, the agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * The unique identifier of the transition route group. * TransitionRouteGroups.CreateTransitionRouteGroup populates the name * automatically. Format: * `projects//locations//agents//flows//transitionRouteGroups/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * The mask to control which fields get updated. * * String format is a comma-separated list of fields. */ @property(nonatomic, copy, nullable) NSString *updateMask; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3TransitionRouteGroup. * * Updates the specified TransitionRouteGroup. Note: You should always train a * flow prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3TransitionRouteGroup to include in * the query. * @param name The unique identifier of the transition route group. * TransitionRouteGroups.CreateTransitionRouteGroup populates the name * automatically. Format: * `projects//locations//agents//flows//transitionRouteGroups/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3TransitionRouteGroup *)object name:(NSString *)name; @end /** * Validates the specified flow and creates or updates validation results. * Please call this API after the training is completed to get the complete * validation results. * * Method: dialogflow.projects.locations.agents.flows.validate * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsValidate : GTLRDialogflowQuery /** * Required. The flow to validate. Format: * `projects//locations//agents//flows/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3FlowValidationResult. * * Validates the specified flow and creates or updates validation results. * Please call this API after the training is completed to get the complete * validation results. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ValidateFlowRequest to include in * the query. * @param name Required. The flow to validate. Format: * `projects//locations//agents//flows/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsValidate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3ValidateFlowRequest *)object name:(NSString *)name; @end /** * Compares the specified base version with target version. * * Method: dialogflow.projects.locations.agents.flows.versions.compareVersions * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsVersionsCompareVersions : GTLRDialogflowQuery /** * Required. Name of the base flow version to compare with the target version. * Use version ID `0` to indicate the draft version of the specified flow. * Format: `projects//locations//agents/ /flows//versions/`. */ @property(nonatomic, copy, nullable) NSString *baseVersion; /** * Fetches a @c * GTLRDialogflow_GoogleCloudDialogflowCxV3CompareVersionsResponse. * * Compares the specified base version with target version. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3CompareVersionsRequest to include * in the query. * @param baseVersion Required. Name of the base flow version to compare with * the target version. Use version ID `0` to indicate the draft version of * the specified flow. Format: `projects//locations//agents/ * /flows//versions/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsVersionsCompareVersions */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3CompareVersionsRequest *)object baseVersion:(NSString *)baseVersion; @end /** * Creates a Version in the specified Flow. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: CreateVersionOperationMetadata - `response`: Version * * Method: dialogflow.projects.locations.agents.flows.versions.create * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsVersionsCreate : GTLRDialogflowQuery /** * Required. The Flow to create an Version for. Format: * `projects//locations//agents//flows/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Creates a Version in the specified Flow. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: CreateVersionOperationMetadata - `response`: Version * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Version to * include in the query. * @param parent Required. The Flow to create an Version for. Format: * `projects//locations//agents//flows/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsVersionsCreate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Version *)object parent:(NSString *)parent; @end /** * Deletes the specified Version. * * Method: dialogflow.projects.locations.agents.flows.versions.delete * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsVersionsDelete : GTLRDialogflowQuery /** * Required. The name of the Version to delete. Format: * `projects//locations//agents//flows//versions/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Deletes the specified Version. * * @param name Required. The name of the Version to delete. Format: * `projects//locations//agents//flows//versions/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsVersionsDelete */ + (instancetype)queryWithName:(NSString *)name; @end /** * Retrieves the specified Version. * * Method: dialogflow.projects.locations.agents.flows.versions.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsVersionsGet : GTLRDialogflowQuery /** * Required. The name of the Version. Format: * `projects//locations//agents//flows//versions/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Version. * * Retrieves the specified Version. * * @param name Required. The name of the Version. Format: * `projects//locations//agents//flows//versions/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsVersionsGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Returns the list of all versions in the specified Flow. * * Method: dialogflow.projects.locations.agents.flows.versions.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsVersionsList : GTLRDialogflowQuery /** * The maximum number of items to return in a single page. By default 20 and at * most 100. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The Flow to list all versions for. Format: * `projects//locations//agents//flows/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3ListVersionsResponse. * * Returns the list of all versions in the specified Flow. * * @param parent Required. The Flow to list all versions for. Format: * `projects//locations//agents//flows/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsVersionsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Loads resources in the specified version to the draft flow. This method is a * [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * * Method: dialogflow.projects.locations.agents.flows.versions.load * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsVersionsLoad : GTLRDialogflowQuery /** * Required. The Version to be loaded to draft flow. Format: * `projects//locations//agents//flows//versions/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Loads resources in the specified version to the draft flow. This method is a * [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3LoadVersionRequest to include in * the query. * @param name Required. The Version to be loaded to draft flow. Format: * `projects//locations//agents//flows//versions/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsVersionsLoad */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3LoadVersionRequest *)object name:(NSString *)name; @end /** * Updates the specified Version. * * Method: dialogflow.projects.locations.agents.flows.versions.patch * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsVersionsPatch : GTLRDialogflowQuery /** * Format: projects//locations//agents//flows//versions/. Version ID is a * self-increasing number generated by Dialogflow upon version creation. */ @property(nonatomic, copy, nullable) NSString *name; /** * Required. The mask to control which fields get updated. Currently only * `description` and `display_name` can be updated. * * String format is a comma-separated list of fields. */ @property(nonatomic, copy, nullable) NSString *updateMask; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Version. * * Updates the specified Version. * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Version to * include in the query. * @param name Format: projects//locations//agents//flows//versions/. Version * ID is a self-increasing number generated by Dialogflow upon version * creation. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsFlowsVersionsPatch */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Version *)object name:(NSString *)name; @end /** * Retrieves the specified agent. * * Method: dialogflow.projects.locations.agents.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsGet : GTLRDialogflowQuery /** * Required. The name of the agent. Format: `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Agent. * * Retrieves the specified agent. * * @param name Required. The name of the agent. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Gets the latest agent validation result. Agent validation is performed when * ValidateAgent is called. * * Method: dialogflow.projects.locations.agents.getValidationResult * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsGetValidationResult : GTLRDialogflowQuery /** If not specified, the agent's default language is used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * Required. The agent name. Format: * `projects//locations//agents//validationResult`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3AgentValidationResult. * * Gets the latest agent validation result. Agent validation is performed when * ValidateAgent is called. * * @param name Required. The agent name. Format: * `projects//locations//agents//validationResult`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsGetValidationResult */ + (instancetype)queryWithName:(NSString *)name; @end /** * Creates an intent in the specified agent. Note: You should always train a * flow prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.intents.create * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsIntentsCreate : GTLRDialogflowQuery /** * The language of the following fields in `intent`: * * `Intent.training_phrases.parts.text` If not specified, the agent's default * language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * Required. The agent to create an intent for. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Intent. * * Creates an intent in the specified agent. Note: You should always train a * flow prior to sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Intent to * include in the query. * @param parent Required. The agent to create an intent for. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsIntentsCreate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Intent *)object parent:(NSString *)parent; @end /** * Deletes the specified intent. Note: You should always train a flow prior to * sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.intents.delete * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsIntentsDelete : GTLRDialogflowQuery /** * Required. The name of the intent to delete. Format: * `projects//locations//agents//intents/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Deletes the specified intent. Note: You should always train a flow prior to * sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param name Required. The name of the intent to delete. Format: * `projects//locations//agents//intents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsIntentsDelete */ + (instancetype)queryWithName:(NSString *)name; @end /** * Retrieves the specified intent. * * Method: dialogflow.projects.locations.agents.intents.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsIntentsGet : GTLRDialogflowQuery /** * The language to retrieve the intent for. The following fields are language * dependent: * `Intent.training_phrases.parts.text` If not specified, the * agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * Required. The name of the intent. Format: * `projects//locations//agents//intents/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Intent. * * Retrieves the specified intent. * * @param name Required. The name of the intent. Format: * `projects//locations//agents//intents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsIntentsGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Returns the list of all intents in the specified agent. * * Method: dialogflow.projects.locations.agents.intents.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsIntentsList : GTLRDialogflowQuery /** * The resource view to apply to the returned intent. * * Likely values: * @arg @c kGTLRDialogflowIntentViewIntentViewUnspecified Not specified. * Treated as INTENT_VIEW_FULL. (Value: "INTENT_VIEW_UNSPECIFIED") * @arg @c kGTLRDialogflowIntentViewIntentViewPartial Training phrases field * is not populated in the response. (Value: "INTENT_VIEW_PARTIAL") * @arg @c kGTLRDialogflowIntentViewIntentViewFull All fields are populated. * (Value: "INTENT_VIEW_FULL") */ @property(nonatomic, copy, nullable) NSString *intentView; /** * The language to list intents for. The following fields are language * dependent: * `Intent.training_phrases.parts.text` If not specified, the * agent's default language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * The maximum number of items to return in a single page. By default 100 and * at most 1000. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The agent to list all intents for. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3ListIntentsResponse. * * Returns the list of all intents in the specified agent. * * @param parent Required. The agent to list all intents for. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsIntentsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Updates the specified intent. Note: You should always train a flow prior to * sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.intents.patch * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsIntentsPatch : GTLRDialogflowQuery /** * The language of the following fields in `intent`: * * `Intent.training_phrases.parts.text` If not specified, the agent's default * language is used. [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. Note: languages must be enabled in the agent before they can * be used. */ @property(nonatomic, copy, nullable) NSString *languageCode; /** * The unique identifier of the intent. Required for the Intents.UpdateIntent * method. Intents.CreateIntent populates the name automatically. Format: * `projects//locations//agents//intents/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * The mask to control which fields get updated. If the mask is not present, * all fields will be updated. * * String format is a comma-separated list of fields. */ @property(nonatomic, copy, nullable) NSString *updateMask; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Intent. * * Updates the specified intent. Note: You should always train a flow prior to * sending it queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Intent to * include in the query. * @param name The unique identifier of the intent. Required for the * Intents.UpdateIntent method. Intents.CreateIntent populates the name * automatically. Format: `projects//locations//agents//intents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsIntentsPatch */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Intent *)object name:(NSString *)name; @end /** * Returns the list of all agents in the specified location. * * Method: dialogflow.projects.locations.agents.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsList : GTLRDialogflowQuery /** * The maximum number of items to return in a single page. By default 100 and * at most 1000. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The location to list all agents for. Format: * `projects//locations/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3ListAgentsResponse. * * Returns the list of all agents in the specified location. * * @param parent Required. The location to list all agents for. Format: * `projects//locations/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Updates the specified agent. Note: You should always train flows prior to * sending them queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.patch * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsPatch : GTLRDialogflowQuery /** * The unique identifier of the agent. Required for the Agents.UpdateAgent * method. Agents.CreateAgent populates the name automatically. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * The mask to control which fields get updated. If the mask is not present, * all fields will be updated. * * String format is a comma-separated list of fields. */ @property(nonatomic, copy, nullable) NSString *updateMask; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Agent. * * Updates the specified agent. Note: You should always train flows prior to * sending them queries. See the [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Agent to * include in the query. * @param name The unique identifier of the agent. Required for the * Agents.UpdateAgent method. Agents.CreateAgent populates the name * automatically. Format: `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsPatch */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Agent *)object name:(NSString *)name; @end /** * Restores the specified agent from a binary file. Replaces the current agent * with a new one. Note that all existing resources in agent (e.g. intents, * entity types, flows) will be removed. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * Note: You should always train flows prior to sending them queries. See the * [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * Method: dialogflow.projects.locations.agents.restore * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsRestore : GTLRDialogflowQuery /** * Required. The name of the agent to restore into. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Restores the specified agent from a binary file. Replaces the current agent * with a new one. Note that all existing resources in agent (e.g. intents, * entity types, flows) will be removed. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: An empty [Struct * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) * - `response`: An [Empty * message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) * Note: You should always train flows prior to sending them queries. See the * [training * documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training). * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3RestoreAgentRequest to include in * the query. * @param name Required. The name of the agent to restore into. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsRestore */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3RestoreAgentRequest *)object name:(NSString *)name; @end /** * Processes a natural language query and returns structured, actionable data * as a result. This method is not idempotent, because it may cause session * entity types to be updated, which in turn might affect results of future * queries. Note: Always use agent versions for production traffic. See * [Versions and * environments](https://cloud.google.com/dialogflow/cx/docs/concept/version). * * Method: dialogflow.projects.locations.agents.sessions.detectIntent * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsDetectIntent : GTLRDialogflowQuery /** * Required. The name of the session this query is sent to. Format: * `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment ID` * is not specified, we assume default 'draft' environment. It's up to the API * caller to choose an appropriate `Session ID`. It can be a random number or * some type of session identifiers (preferably hashed). The length of the * `Session ID` must not exceed 36 characters. For more information, see the * [sessions * guide](https://cloud.google.com/dialogflow/cx/docs/concept/session). Note: * Always use agent versions for production traffic. See [Versions and * environments](https://cloud.google.com/dialogflow/cx/docs/concept/version). */ @property(nonatomic, copy, nullable) NSString *session; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3DetectIntentResponse. * * Processes a natural language query and returns structured, actionable data * as a result. This method is not idempotent, because it may cause session * entity types to be updated, which in turn might affect results of future * queries. Note: Always use agent versions for production traffic. See * [Versions and * environments](https://cloud.google.com/dialogflow/cx/docs/concept/version). * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3DetectIntentRequest to include in * the query. * @param session Required. The name of the session this query is sent to. * Format: `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment * ID` is not specified, we assume default 'draft' environment. It's up to * the API caller to choose an appropriate `Session ID`. It can be a random * number or some type of session identifiers (preferably hashed). The length * of the `Session ID` must not exceed 36 characters. For more information, * see the [sessions * guide](https://cloud.google.com/dialogflow/cx/docs/concept/session). Note: * Always use agent versions for production traffic. See [Versions and * environments](https://cloud.google.com/dialogflow/cx/docs/concept/version). * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsDetectIntent */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3DetectIntentRequest *)object session:(NSString *)session; @end /** * Creates a session entity type. * * Method: dialogflow.projects.locations.agents.sessions.entityTypes.create * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsEntityTypesCreate : GTLRDialogflowQuery /** * Required. The session to create a session entity type for. Format: * `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment ID` * is not specified, we assume default 'draft' environment. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType. * * Creates a session entity type. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType to include in * the query. * @param parent Required. The session to create a session entity type for. * Format: `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment * ID` is not specified, we assume default 'draft' environment. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsEntityTypesCreate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType *)object parent:(NSString *)parent; @end /** * Deletes the specified session entity type. * * Method: dialogflow.projects.locations.agents.sessions.entityTypes.delete * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsEntityTypesDelete : GTLRDialogflowQuery /** * Required. The name of the session entity type to delete. Format: * `projects//locations//agents//sessions//entityTypes/` or * `projects//locations//agents//environments//sessions//entityTypes/`. If * `Environment ID` is not specified, we assume default 'draft' environment. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Deletes the specified session entity type. * * @param name Required. The name of the session entity type to delete. Format: * `projects//locations//agents//sessions//entityTypes/` or * `projects//locations//agents//environments//sessions//entityTypes/`. If * `Environment ID` is not specified, we assume default 'draft' environment. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsEntityTypesDelete */ + (instancetype)queryWithName:(NSString *)name; @end /** * Retrieves the specified session entity type. * * Method: dialogflow.projects.locations.agents.sessions.entityTypes.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsEntityTypesGet : GTLRDialogflowQuery /** * Required. The name of the session entity type. Format: * `projects//locations//agents//sessions//entityTypes/` or * `projects//locations//agents//environments//sessions//entityTypes/`. If * `Environment ID` is not specified, we assume default 'draft' environment. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType. * * Retrieves the specified session entity type. * * @param name Required. The name of the session entity type. Format: * `projects//locations//agents//sessions//entityTypes/` or * `projects//locations//agents//environments//sessions//entityTypes/`. If * `Environment ID` is not specified, we assume default 'draft' environment. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsEntityTypesGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Returns the list of all session entity types in the specified session. * * Method: dialogflow.projects.locations.agents.sessions.entityTypes.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsEntityTypesList : GTLRDialogflowQuery /** * The maximum number of items to return in a single page. By default 100 and * at most 1000. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The session to list all session entity types from. Format: * `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment ID` * is not specified, we assume default 'draft' environment. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ListSessionEntityTypesResponse. * * Returns the list of all session entity types in the specified session. * * @param parent Required. The session to list all session entity types from. * Format: `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment * ID` is not specified, we assume default 'draft' environment. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsEntityTypesList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Updates the specified session entity type. * * Method: dialogflow.projects.locations.agents.sessions.entityTypes.patch * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsEntityTypesPatch : GTLRDialogflowQuery /** * Required. The unique identifier of the session entity type. Format: * `projects//locations//agents//sessions//entityTypes/` or * `projects//locations//agents//environments//sessions//entityTypes/`. If * `Environment ID` is not specified, we assume default 'draft' environment. */ @property(nonatomic, copy, nullable) NSString *name; /** * The mask to control which fields get updated. * * String format is a comma-separated list of fields. */ @property(nonatomic, copy, nullable) NSString *updateMask; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType. * * Updates the specified session entity type. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType to include in * the query. * @param name Required. The unique identifier of the session entity type. * Format: `projects//locations//agents//sessions//entityTypes/` or * `projects//locations//agents//environments//sessions//entityTypes/`. If * `Environment ID` is not specified, we assume default 'draft' environment. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsEntityTypesPatch */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3SessionEntityType *)object name:(NSString *)name; @end /** * Fulfills a matched intent returned by MatchIntent. Must be called after * MatchIntent, with input from MatchIntentResponse. Otherwise, the behavior is * undefined. * * Method: dialogflow.projects.locations.agents.sessions.fulfillIntent * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsFulfillIntent : GTLRDialogflowQuery /** * Required. The name of the session this query is sent to. Format: * `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment ID` * is not specified, we assume default 'draft' environment. It's up to the API * caller to choose an appropriate `Session ID`. It can be a random number or * some type of session identifiers (preferably hashed). The length of the * `Session ID` must not exceed 36 characters. For more information, see the * [sessions * guide](https://cloud.google.com/dialogflow/cx/docs/concept/session). */ @property(nonatomic, copy, nullable) NSString *session; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3FulfillIntentResponse. * * Fulfills a matched intent returned by MatchIntent. Must be called after * MatchIntent, with input from MatchIntentResponse. Otherwise, the behavior is * undefined. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3FulfillIntentRequest to include in * the query. * @param session Required. The name of the session this query is sent to. * Format: `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment * ID` is not specified, we assume default 'draft' environment. It's up to * the API caller to choose an appropriate `Session ID`. It can be a random * number or some type of session identifiers (preferably hashed). The length * of the `Session ID` must not exceed 36 characters. For more information, * see the [sessions * guide](https://cloud.google.com/dialogflow/cx/docs/concept/session). * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsFulfillIntent */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3FulfillIntentRequest *)object session:(NSString *)session; @end /** * Returns preliminary intent match results, doesn't change the session status. * * Method: dialogflow.projects.locations.agents.sessions.matchIntent * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsMatchIntent : GTLRDialogflowQuery /** * Required. The name of the session this query is sent to. Format: * `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment ID` * is not specified, we assume default 'draft' environment. It's up to the API * caller to choose an appropriate `Session ID`. It can be a random number or * some type of session identifiers (preferably hashed). The length of the * `Session ID` must not exceed 36 characters. For more information, see the * [sessions * guide](https://cloud.google.com/dialogflow/cx/docs/concept/session). */ @property(nonatomic, copy, nullable) NSString *session; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3MatchIntentResponse. * * Returns preliminary intent match results, doesn't change the session status. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3MatchIntentRequest to include in * the query. * @param session Required. The name of the session this query is sent to. * Format: `projects//locations//agents//sessions/` or * `projects//locations//agents//environments//sessions/`. If `Environment * ID` is not specified, we assume default 'draft' environment. It's up to * the API caller to choose an appropriate `Session ID`. It can be a random * number or some type of session identifiers (preferably hashed). The length * of the `Session ID` must not exceed 36 characters. For more information, * see the [sessions * guide](https://cloud.google.com/dialogflow/cx/docs/concept/session). * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsSessionsMatchIntent */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3MatchIntentRequest *)object session:(NSString *)session; @end /** * Batch deletes test cases. * * Method: dialogflow.projects.locations.agents.testCases.batchDelete * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesBatchDelete : GTLRDialogflowQuery /** * Required. The agent to delete test cases from. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Batch deletes test cases. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3BatchDeleteTestCasesRequest to * include in the query. * @param parent Required. The agent to delete test cases from. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesBatchDelete */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3BatchDeleteTestCasesRequest *)object parent:(NSString *)parent; @end /** * Kicks off a batch run of test cases. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: BatchRunTestCasesMetadata - `response`: * BatchRunTestCasesResponse * * Method: dialogflow.projects.locations.agents.testCases.batchRun * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesBatchRun : GTLRDialogflowQuery /** Required. Agent name. Format: `projects//locations//agents/ `. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Kicks off a batch run of test cases. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: BatchRunTestCasesMetadata - `response`: * BatchRunTestCasesResponse * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3BatchRunTestCasesRequest to * include in the query. * @param parent Required. Agent name. Format: `projects//locations//agents/ `. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesBatchRun */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3BatchRunTestCasesRequest *)object parent:(NSString *)parent; @end /** * Calculates the test coverage for an agent. * * Method: dialogflow.projects.locations.agents.testCases.calculateCoverage * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesCalculateCoverage : GTLRDialogflowQuery /** * Required. The agent to calculate coverage for. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *agent; /** * Required. The type of coverage requested. * * Likely values: * @arg @c kGTLRDialogflowTypeCoverageTypeUnspecified Should never be used. * (Value: "COVERAGE_TYPE_UNSPECIFIED") * @arg @c kGTLRDialogflowTypeIntent Intent coverage. (Value: "INTENT") * @arg @c kGTLRDialogflowTypePageTransition Page transition coverage. * (Value: "PAGE_TRANSITION") * @arg @c kGTLRDialogflowTypeTransitionRouteGroup Transition route group * coverage. (Value: "TRANSITION_ROUTE_GROUP") */ @property(nonatomic, copy, nullable) NSString *type; /** * Fetches a @c * GTLRDialogflow_GoogleCloudDialogflowCxV3CalculateCoverageResponse. * * Calculates the test coverage for an agent. * * @param agent Required. The agent to calculate coverage for. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesCalculateCoverage */ + (instancetype)queryWithAgent:(NSString *)agent; @end /** * Creates a test case for the given agent. * * Method: dialogflow.projects.locations.agents.testCases.create * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesCreate : GTLRDialogflowQuery /** * Required. The agent to create the test case for. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3TestCase. * * Creates a test case for the given agent. * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3TestCase to * include in the query. * @param parent Required. The agent to create the test case for. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesCreate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3TestCase *)object parent:(NSString *)parent; @end /** * Exports the test cases under the agent to a Cloud Storage bucket or a local * file. Filter can be applied to export a subset of test cases. This method is * a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: ExportTestCasesMetadata - `response`: ExportTestCasesResponse * * Method: dialogflow.projects.locations.agents.testCases.export * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesExport : GTLRDialogflowQuery /** * Required. The agent where to export test cases from. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Exports the test cases under the agent to a Cloud Storage bucket or a local * file. Filter can be applied to export a subset of test cases. This method is * a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: ExportTestCasesMetadata - `response`: ExportTestCasesResponse * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ExportTestCasesRequest to include * in the query. * @param parent Required. The agent where to export test cases from. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesExport */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3ExportTestCasesRequest *)object parent:(NSString *)parent; @end /** * Gets a test case. * * Method: dialogflow.projects.locations.agents.testCases.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesGet : GTLRDialogflowQuery /** * Required. The name of the testcase. Format: * `projects//locations//agents//testCases/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3TestCase. * * Gets a test case. * * @param name Required. The name of the testcase. Format: * `projects//locations//agents//testCases/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Imports the test cases from a Cloud Storage bucket or a local file. It * always creates new test cases and won't overwite any existing ones. The * provided ID in the imported test case is neglected. This method is a * [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: ImportTestCasesMetadata - `response`: ImportTestCasesResponse * * Method: dialogflow.projects.locations.agents.testCases.import * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesImport : GTLRDialogflowQuery /** * Required. The agent to import test cases to. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Imports the test cases from a Cloud Storage bucket or a local file. It * always creates new test cases and won't overwite any existing ones. The * provided ID in the imported test case is neglected. This method is a * [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: ImportTestCasesMetadata - `response`: ImportTestCasesResponse * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ImportTestCasesRequest to include * in the query. * @param parent Required. The agent to import test cases to. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesImport */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3ImportTestCasesRequest *)object parent:(NSString *)parent; @end /** * Fetches a list of test cases for a given agent. * * Method: dialogflow.projects.locations.agents.testCases.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesList : GTLRDialogflowQuery /** * The maximum number of items to return in a single page. By default 20. Note * that when TestCaseView = FULL, the maximum page size allowed is 20. When * TestCaseView = BASIC, the maximum page size allowed is 500. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The agent to list all pages for. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Specifies whether response should include all fields or just the metadata. * * Likely values: * @arg @c kGTLRDialogflowViewTestCaseViewUnspecified The default / unset * value. The API will default to the BASIC view. (Value: * "TEST_CASE_VIEW_UNSPECIFIED") * @arg @c kGTLRDialogflowViewBasic Include basic metadata about the test * case, but not the conversation turns. This is the default value. * (Value: "BASIC") * @arg @c kGTLRDialogflowViewFull Include everything. (Value: "FULL") */ @property(nonatomic, copy, nullable) NSString *view; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3ListTestCasesResponse. * * Fetches a list of test cases for a given agent. * * @param parent Required. The agent to list all pages for. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Updates the specified test case. * * Method: dialogflow.projects.locations.agents.testCases.patch * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesPatch : GTLRDialogflowQuery /** * The unique identifier of the test case. TestCases.CreateTestCase will * populate the name automatically. Otherwise use format: * `projects//locations//agents/ /testCases/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Required. The mask to specify which fields should be updated. The * `creationTime` and `lastTestResult` cannot be updated. * * String format is a comma-separated list of fields. */ @property(nonatomic, copy, nullable) NSString *updateMask; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3TestCase. * * Updates the specified test case. * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3TestCase to * include in the query. * @param name The unique identifier of the test case. TestCases.CreateTestCase * will populate the name automatically. Otherwise use format: * `projects//locations//agents/ /testCases/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesPatch */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3TestCase *)object name:(NSString *)name; @end /** * Gets a test case result. * * Method: dialogflow.projects.locations.agents.testCases.results.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesResultsGet : GTLRDialogflowQuery /** * Required. The name of the testcase. Format: * `projects//locations//agents//testCases//results/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3TestCaseResult. * * Gets a test case result. * * @param name Required. The name of the testcase. Format: * `projects//locations//agents//testCases//results/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesResultsGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Fetches a list of results for a given test case. * * Method: dialogflow.projects.locations.agents.testCases.results.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesResultsList : GTLRDialogflowQuery /** * The filter expression used to filter test case results. See [API * Filtering](https://aip.dev/160). The expression is case insensitive. Only * 'AND' is supported for logical operators. The supported syntax is listed * below in detail: [AND ] ... [AND latest] The supported fields and operators * are: field operator `environment` `=`, `IN` (Use value `draft` for draft * environment) `test_time` `>`, `<` `latest` only returns the latest test * result in all results for each test case. Examples: * "environment=draft AND * latest" matches the latest test result for each test case in the draft * environment. * "environment IN (e1,e2)" matches any test case results with * an environment resource name of either "e1" or "e2". * "test_time > * 1602540713" matches any test case results with test time later than a unix * timestamp in seconds 1602540713. */ @property(nonatomic, copy, nullable) NSString *filter; /** * The maximum number of items to return in a single page. By default 100 and * at most 1000. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The test case to list results for. Format: * `projects//locations//agents// testCases/`. Specify a `-` as a wildcard for * TestCase ID to list results across multiple test cases. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ListTestCaseResultsResponse. * * Fetches a list of results for a given test case. * * @param parent Required. The test case to list results for. Format: * `projects//locations//agents// testCases/`. Specify a `-` as a wildcard * for TestCase ID to list results across multiple test cases. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesResultsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Kicks off a test case run. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: RunTestCaseMetadata - `response`: RunTestCaseResponse * * Method: dialogflow.projects.locations.agents.testCases.run * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesRun : GTLRDialogflowQuery /** * Required. Format of test case name to run: `projects//locations/ * /agents//testCases/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Kicks off a test case run. This method is a [long-running * operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). * The returned `Operation` type has the following method-specific fields: - * `metadata`: RunTestCaseMetadata - `response`: RunTestCaseResponse * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3RunTestCaseRequest to include in * the query. * @param name Required. Format of test case name to run: `projects//locations/ * /agents//testCases/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsTestCasesRun */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3RunTestCaseRequest *)object name:(NSString *)name; @end /** * Validates the specified agent and creates or updates validation results. The * agent in draft version is validated. Please call this API after the training * is completed to get the complete validation results. * * Method: dialogflow.projects.locations.agents.validate * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsValidate : GTLRDialogflowQuery /** * Required. The agent to validate. Format: `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3AgentValidationResult. * * Validates the specified agent and creates or updates validation results. The * agent in draft version is validated. Please call this API after the training * is completed to get the complete validation results. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ValidateAgentRequest to include in * the query. * @param name Required. The agent to validate. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsValidate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3ValidateAgentRequest *)object name:(NSString *)name; @end /** * Creates a webhook in the specified agent. * * Method: dialogflow.projects.locations.agents.webhooks.create * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsWebhooksCreate : GTLRDialogflowQuery /** * Required. The agent to create a webhook for. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Webhook. * * Creates a webhook in the specified agent. * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Webhook to * include in the query. * @param parent Required. The agent to create a webhook for. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsWebhooksCreate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Webhook *)object parent:(NSString *)parent; @end /** * Deletes the specified webhook. * * Method: dialogflow.projects.locations.agents.webhooks.delete * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsWebhooksDelete : GTLRDialogflowQuery /** * This field has no effect for webhook not being used. For webhooks that are * used by pages/flows/transition route groups: * If `force` is set to false, * an error will be returned with message indicating the referenced resources. * * If `force` is set to true, Dialogflow will remove the webhook, as well as * any references to the webhook (i.e. Webhook and tagin fulfillments that * point to this webhook will be removed). */ @property(nonatomic, assign) BOOL force; /** * Required. The name of the webhook to delete. Format: * `projects//locations//agents//webhooks/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Deletes the specified webhook. * * @param name Required. The name of the webhook to delete. Format: * `projects//locations//agents//webhooks/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsWebhooksDelete */ + (instancetype)queryWithName:(NSString *)name; @end /** * Retrieves the specified webhook. * * Method: dialogflow.projects.locations.agents.webhooks.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsWebhooksGet : GTLRDialogflowQuery /** * Required. The name of the webhook. Format: * `projects//locations//agents//webhooks/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Webhook. * * Retrieves the specified webhook. * * @param name Required. The name of the webhook. Format: * `projects//locations//agents//webhooks/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsWebhooksGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Returns the list of all webhooks in the specified agent. * * Method: dialogflow.projects.locations.agents.webhooks.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsWebhooksList : GTLRDialogflowQuery /** * The maximum number of items to return in a single page. By default 100 and * at most 1000. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The agent to list all webhooks for. Format: * `projects//locations//agents/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3ListWebhooksResponse. * * Returns the list of all webhooks in the specified agent. * * @param parent Required. The agent to list all webhooks for. Format: * `projects//locations//agents/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsWebhooksList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Updates the specified webhook. * * Method: dialogflow.projects.locations.agents.webhooks.patch * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsAgentsWebhooksPatch : GTLRDialogflowQuery /** * The unique identifier of the webhook. Required for the * Webhooks.UpdateWebhook method. Webhooks.CreateWebhook populates the name * automatically. Format: `projects//locations//agents//webhooks/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * The mask to control which fields get updated. If the mask is not present, * all fields will be updated. * * String format is a comma-separated list of fields. */ @property(nonatomic, copy, nullable) NSString *updateMask; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3Webhook. * * Updates the specified webhook. * * @param object The @c GTLRDialogflow_GoogleCloudDialogflowCxV3Webhook to * include in the query. * @param name The unique identifier of the webhook. Required for the * Webhooks.UpdateWebhook method. Webhooks.CreateWebhook populates the name * automatically. Format: `projects//locations//agents//webhooks/`. * * @return GTLRDialogflowQuery_ProjectsLocationsAgentsWebhooksPatch */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3Webhook *)object name:(NSString *)name; @end /** * Gets information about a location. * * Method: dialogflow.projects.locations.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsGet : GTLRDialogflowQuery /** Resource name for the location. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudLocationLocation. * * Gets information about a location. * * @param name Resource name for the location. * * @return GTLRDialogflowQuery_ProjectsLocationsGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Lists information about the supported locations for this service. * * Method: dialogflow.projects.locations.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsList : GTLRDialogflowQuery /** * A filter to narrow down results to a preferred subset. The filtering * language accepts strings like "displayName=tokyo", and is documented in more * detail in [AIP-160](https://google.aip.dev/160). */ @property(nonatomic, copy, nullable) NSString *filter; /** The resource that owns the locations collection, if applicable. */ @property(nonatomic, copy, nullable) NSString *name; /** * The maximum number of results to return. If not set, the service selects a * default. */ @property(nonatomic, assign) NSInteger pageSize; /** * A page token received from the `next_page_token` field in the response. Send * that page token to receive the subsequent page. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Fetches a @c GTLRDialogflow_GoogleCloudLocationListLocationsResponse. * * Lists information about the supported locations for this service. * * @param name The resource that owns the locations collection, if applicable. * * @return GTLRDialogflowQuery_ProjectsLocationsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithName:(NSString *)name; @end /** * Starts asynchronous cancellation on a long-running operation. The server * makes a best effort to cancel the operation, but success is not guaranteed. * If the server doesn't support this method, it returns * `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or * other methods to check whether the cancellation succeeded or whether the * operation completed despite cancellation. On successful cancellation, the * operation is not deleted; instead, it becomes an operation with an * Operation.error value with a google.rpc.Status.code of 1, corresponding to * `Code.CANCELLED`. * * Method: dialogflow.projects.locations.operations.cancel * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsOperationsCancel : GTLRDialogflowQuery /** The name of the operation resource to be cancelled. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Starts asynchronous cancellation on a long-running operation. The server * makes a best effort to cancel the operation, but success is not guaranteed. * If the server doesn't support this method, it returns * `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or * other methods to check whether the cancellation succeeded or whether the * operation completed despite cancellation. On successful cancellation, the * operation is not deleted; instead, it becomes an operation with an * Operation.error value with a google.rpc.Status.code of 1, corresponding to * `Code.CANCELLED`. * * @param name The name of the operation resource to be cancelled. * * @return GTLRDialogflowQuery_ProjectsLocationsOperationsCancel */ + (instancetype)queryWithName:(NSString *)name; @end /** * Gets the latest state of a long-running operation. Clients can use this * method to poll the operation result at intervals as recommended by the API * service. * * Method: dialogflow.projects.locations.operations.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsOperationsGet : GTLRDialogflowQuery /** The name of the operation resource. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Gets the latest state of a long-running operation. Clients can use this * method to poll the operation result at intervals as recommended by the API * service. * * @param name The name of the operation resource. * * @return GTLRDialogflowQuery_ProjectsLocationsOperationsGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Lists operations that match the specified filter in the request. If the * server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the * `name` binding allows API services to override the binding to use different * resource name schemes, such as `users/ * /operations`. To override the * binding, API services can add a binding such as `"/v1/{name=users/ * *}/operations"` to their service configuration. For backwards compatibility, * the default name includes the operations collection id, however overriding * users must ensure the name binding is the parent resource, without the * operations collection id. * * Method: dialogflow.projects.locations.operations.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsOperationsList : GTLRDialogflowQuery /** The standard list filter. */ @property(nonatomic, copy, nullable) NSString *filter; /** The name of the operation's parent resource. */ @property(nonatomic, copy, nullable) NSString *name; /** The standard list page size. */ @property(nonatomic, assign) NSInteger pageSize; /** The standard list page token. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningListOperationsResponse. * * Lists operations that match the specified filter in the request. If the * server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the * `name` binding allows API services to override the binding to use different * resource name schemes, such as `users/ * /operations`. To override the * binding, API services can add a binding such as `"/v1/{name=users/ * *}/operations"` to their service configuration. For backwards compatibility, * the default name includes the operations collection id, however overriding * users must ensure the name binding is the parent resource, without the * operations collection id. * * @param name The name of the operation's parent resource. * * @return GTLRDialogflowQuery_ProjectsLocationsOperationsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithName:(NSString *)name; @end /** * Create security settings in the specified location. * * Method: dialogflow.projects.locations.securitySettings.create * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsSecuritySettingsCreate : GTLRDialogflowQuery /** * Required. The location to create an SecuritySettings for. Format: * `projects//locations/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3SecuritySettings. * * Create security settings in the specified location. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3SecuritySettings to include in the * query. * @param parent Required. The location to create an SecuritySettings for. * Format: `projects//locations/`. * * @return GTLRDialogflowQuery_ProjectsLocationsSecuritySettingsCreate */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3SecuritySettings *)object parent:(NSString *)parent; @end /** * Deletes the specified SecuritySettings. * * Method: dialogflow.projects.locations.securitySettings.delete * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsSecuritySettingsDelete : GTLRDialogflowQuery /** * Required. The name of the SecuritySettings to delete. Format: * `projects//locations//securitySettings/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Deletes the specified SecuritySettings. * * @param name Required. The name of the SecuritySettings to delete. Format: * `projects//locations//securitySettings/`. * * @return GTLRDialogflowQuery_ProjectsLocationsSecuritySettingsDelete */ + (instancetype)queryWithName:(NSString *)name; @end /** * Retrieves the specified SecuritySettings. The returned settings may be stale * by up to 1 minute. * * Method: dialogflow.projects.locations.securitySettings.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsSecuritySettingsGet : GTLRDialogflowQuery /** * Required. Resource name of the settings. Format: * `projects//locations//securitySettings/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3SecuritySettings. * * Retrieves the specified SecuritySettings. The returned settings may be stale * by up to 1 minute. * * @param name Required. Resource name of the settings. Format: * `projects//locations//securitySettings/`. * * @return GTLRDialogflowQuery_ProjectsLocationsSecuritySettingsGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Returns the list of all security settings in the specified location. * * Method: dialogflow.projects.locations.securitySettings.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsSecuritySettingsList : GTLRDialogflowQuery /** * The maximum number of items to return in a single page. By default 20 and at * most 100. */ @property(nonatomic, assign) NSInteger pageSize; /** The next_page_token value returned from a previous list request. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Required. The location to list all security settings for. Format: * `projects//locations/`. */ @property(nonatomic, copy, nullable) NSString *parent; /** * Fetches a @c * GTLRDialogflow_GoogleCloudDialogflowCxV3ListSecuritySettingsResponse. * * Returns the list of all security settings in the specified location. * * @param parent Required. The location to list all security settings for. * Format: `projects//locations/`. * * @return GTLRDialogflowQuery_ProjectsLocationsSecuritySettingsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithParent:(NSString *)parent; @end /** * Updates the specified SecuritySettings. * * Method: dialogflow.projects.locations.securitySettings.patch * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsLocationsSecuritySettingsPatch : GTLRDialogflowQuery /** * Resource name of the settings. Required for the * SecuritySettingsService.UpdateSecuritySettings method. * SecuritySettingsService.CreateSecuritySettings populates the name * automatically. Format: `projects//locations//securitySettings/`. */ @property(nonatomic, copy, nullable) NSString *name; /** * Required. The mask to control which fields get updated. If the mask is not * present, all fields will be updated. * * String format is a comma-separated list of fields. */ @property(nonatomic, copy, nullable) NSString *updateMask; /** * Fetches a @c GTLRDialogflow_GoogleCloudDialogflowCxV3SecuritySettings. * * Updates the specified SecuritySettings. * * @param object The @c * GTLRDialogflow_GoogleCloudDialogflowCxV3SecuritySettings to include in the * query. * @param name Resource name of the settings. Required for the * SecuritySettingsService.UpdateSecuritySettings method. * SecuritySettingsService.CreateSecuritySettings populates the name * automatically. Format: `projects//locations//securitySettings/`. * * @return GTLRDialogflowQuery_ProjectsLocationsSecuritySettingsPatch */ + (instancetype)queryWithObject:(GTLRDialogflow_GoogleCloudDialogflowCxV3SecuritySettings *)object name:(NSString *)name; @end /** * Starts asynchronous cancellation on a long-running operation. The server * makes a best effort to cancel the operation, but success is not guaranteed. * If the server doesn't support this method, it returns * `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or * other methods to check whether the cancellation succeeded or whether the * operation completed despite cancellation. On successful cancellation, the * operation is not deleted; instead, it becomes an operation with an * Operation.error value with a google.rpc.Status.code of 1, corresponding to * `Code.CANCELLED`. * * Method: dialogflow.projects.operations.cancel * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsOperationsCancel : GTLRDialogflowQuery /** The name of the operation resource to be cancelled. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleProtobufEmpty. * * Starts asynchronous cancellation on a long-running operation. The server * makes a best effort to cancel the operation, but success is not guaranteed. * If the server doesn't support this method, it returns * `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or * other methods to check whether the cancellation succeeded or whether the * operation completed despite cancellation. On successful cancellation, the * operation is not deleted; instead, it becomes an operation with an * Operation.error value with a google.rpc.Status.code of 1, corresponding to * `Code.CANCELLED`. * * @param name The name of the operation resource to be cancelled. * * @return GTLRDialogflowQuery_ProjectsOperationsCancel */ + (instancetype)queryWithName:(NSString *)name; @end /** * Gets the latest state of a long-running operation. Clients can use this * method to poll the operation result at intervals as recommended by the API * service. * * Method: dialogflow.projects.operations.get * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsOperationsGet : GTLRDialogflowQuery /** The name of the operation resource. */ @property(nonatomic, copy, nullable) NSString *name; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningOperation. * * Gets the latest state of a long-running operation. Clients can use this * method to poll the operation result at intervals as recommended by the API * service. * * @param name The name of the operation resource. * * @return GTLRDialogflowQuery_ProjectsOperationsGet */ + (instancetype)queryWithName:(NSString *)name; @end /** * Lists operations that match the specified filter in the request. If the * server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the * `name` binding allows API services to override the binding to use different * resource name schemes, such as `users/ * /operations`. To override the * binding, API services can add a binding such as `"/v1/{name=users/ * *}/operations"` to their service configuration. For backwards compatibility, * the default name includes the operations collection id, however overriding * users must ensure the name binding is the parent resource, without the * operations collection id. * * Method: dialogflow.projects.operations.list * * Authorization scope(s): * @c kGTLRAuthScopeDialogflow * @c kGTLRAuthScopeDialogflowCloudPlatform */ @interface GTLRDialogflowQuery_ProjectsOperationsList : GTLRDialogflowQuery /** The standard list filter. */ @property(nonatomic, copy, nullable) NSString *filter; /** The name of the operation's parent resource. */ @property(nonatomic, copy, nullable) NSString *name; /** The standard list page size. */ @property(nonatomic, assign) NSInteger pageSize; /** The standard list page token. */ @property(nonatomic, copy, nullable) NSString *pageToken; /** * Fetches a @c GTLRDialogflow_GoogleLongrunningListOperationsResponse. * * Lists operations that match the specified filter in the request. If the * server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the * `name` binding allows API services to override the binding to use different * resource name schemes, such as `users/ * /operations`. To override the * binding, API services can add a binding such as `"/v1/{name=users/ * *}/operations"` to their service configuration. For backwards compatibility, * the default name includes the operations collection id, however overriding * users must ensure the name binding is the parent resource, without the * operations collection id. * * @param name The name of the operation's parent resource. * * @return GTLRDialogflowQuery_ProjectsOperationsList * * @note Automatic pagination will be done when @c shouldFetchNextPages is * enabled. See @c shouldFetchNextPages on @c GTLRService for more * information. */ + (instancetype)queryWithName:(NSString *)name; @end NS_ASSUME_NONNULL_END #pragma clang diagnostic pop
57,359
12,063
import torch import torchvision import torchvision.datasets as dset import torchvision.transforms
25
1,475
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.persistence.query.mock; import java.util.Comparator; import org.apache.geode.internal.cache.CachedDeserializable; /** * Compare two cached deserializable objects by unwrapping the underlying object. * * If either object is not a cached deserializable, just use the object directly. * */ class CachedDeserializableComparator implements Comparator<Object> { private Comparator comparator; public CachedDeserializableComparator(Comparator<?> comparator) { this.comparator = comparator; } @Override public int compare(Object o1, Object o2) { if (o1 instanceof CachedDeserializable) { o1 = ((CachedDeserializable) o1).getDeserializedForReading(); } if (o2 instanceof CachedDeserializable) { o2 = ((CachedDeserializable) o2).getDeserializedForReading(); } return comparator.compare(o1, o2); } }
475
1,082
<filename>game-tool-scripts/src/main/java/com/jjy/game/tool/tcp/user/LoginResHandler.java package com.jjy.game.tool.tcp.user; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.jjy.game.message.Mid.MID; import com.jjy.game.message.hall.HallChatMessage.ChatRequest; import com.jjy.game.message.hall.HallChatMessage.ChatType; import com.jjy.game.message.hall.HallLoginMessage.LoginResponse; import com.jjy.game.message.hall.HallLoginMessage.LoginSubGameRequest; import com.jjy.game.message.system.SystemMessage.UdpConnectRequest; import com.jjy.game.tool.client.Player; import com.jjy.game.tool.client.PressureClientTool; import com.jjy.game.tool.client.PressureServiceThread; import com.jzy.game.engine.handler.HandlerEntity; import com.jzy.game.engine.handler.TcpHandler; import com.jzy.game.engine.server.ServerType; /** * 登录返回 * * @author JiangZhiYong * @QQ 359135103 2017年7月10日 下午6:26:19 */ @HandlerEntity(mid = MID.LoginRes_VALUE, msg = LoginResponse.class) public class LoginResHandler extends TcpHandler { private static final Logger LOGGER = LoggerFactory.getLogger(LoginResHandler.class); public void run() { LoginResponse res = getMsg(); long uid = res.getRid(); long sendTime = (Long) session.getAttribute(PressureServiceThread.SEND_TIME, Long.MAX_VALUE); LOGGER.info("用户[{}] 登录成功,耗时:{}ms", uid, (System.currentTimeMillis() - sendTime)); session.setAttribute("roleId", uid); Player player = (Player) session.getAttribute(Player.PLAYER); player.showLog(String.format("用户%s登录,耗时:%d", player.getUserName(), (System.currentTimeMillis() - sendTime))); // 登录子游戏 LoginSubGameRequest.Builder builder3 = LoginSubGameRequest.newBuilder(); builder3.setType(0); builder3.setRid(uid); builder3.setGameType(ServerType.GAME_BYDR.getType()); session.write(builder3.build()); // 登录udp UdpConnectRequest.Builder udpBuilder = UdpConnectRequest.newBuilder(); udpBuilder.setSessionId(res.getSessionId()); udpBuilder.setRid(res.getRid()); player.getTcpSession().write(udpBuilder.build()); // // 聊天测试 // ChatRequest.Builder chatBuilder = ChatRequest.newBuilder(); // chatBuilder.setChatType(ChatType.WORLD); // chatBuilder.setMsg("hello from " + player.getUserName()); // player.sendUdpMsg(chatBuilder.build()); } }
939
455
/* Copyright (c) 2003-2004, <NAME> * Copyright (c) 2004-2006, <NAME>, <NAME>. * Copyright (c) 2007-2019, The Tor Project, Inc. */ /* See LICENSE for licensing information */ /** * \file userdb.h * * \brief Header for userdb.c **/ #ifndef TOR_USERDB_H #define TOR_USERDB_H #include "orconfig.h" #ifndef _WIN32 #include <sys/types.h> struct passwd; const struct passwd *tor_getpwnam(const char *username); const struct passwd *tor_getpwuid(uid_t uid); char *get_user_homedir(const char *username); #endif #endif
198
1,695
<filename>plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/query/AggregateExpression.java<gh_stars>1000+ /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.plugin.pinot.query; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import io.trino.plugin.pinot.PinotColumnHandle; import java.util.Objects; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkState; import static io.trino.plugin.pinot.query.DynamicTablePqlExtractor.quoteIdentifier; import static java.lang.String.format; import static java.util.Objects.requireNonNull; public class AggregateExpression { private final String function; private final String argument; private final boolean returnNullOnEmptyGroup; public static AggregateExpression replaceIdentifier(AggregateExpression aggregationExpression, PinotColumnHandle columnHandle) { return new AggregateExpression(aggregationExpression.getFunction(), stripDoubleQuotes(columnHandle.getExpression()), aggregationExpression.isReturnNullOnEmptyGroup()); } private static String stripDoubleQuotes(String expression) { checkState(expression.startsWith("\"") && expression.endsWith("\"") && expression.length() >= 3, "expression is not enclosed in double quotes"); return expression.substring(1, expression.length() - 1).replaceAll("\"\"", "\""); } @JsonCreator public AggregateExpression(@JsonProperty String function, @JsonProperty String argument, @JsonProperty boolean returnNullOnEmptyGroup) { this.function = requireNonNull(function, "function is null"); this.argument = requireNonNull(argument, "argument is null"); this.returnNullOnEmptyGroup = returnNullOnEmptyGroup; } @JsonProperty public String getFunction() { return function; } @JsonProperty public String getArgument() { return argument; } @JsonProperty public boolean isReturnNullOnEmptyGroup() { return returnNullOnEmptyGroup; } public String toFieldName() { return format("%s(%s)", function, argument); } public String toExpression() { return format("%s(%s)", function, quoteIdentifier(argument)); } @Override public boolean equals(Object other) { if (this == other) { return true; } if (!(other instanceof AggregateExpression)) { return false; } AggregateExpression that = (AggregateExpression) other; return that.function.equals(function) && that.argument.equals(argument) && that.returnNullOnEmptyGroup == returnNullOnEmptyGroup; } @Override public int hashCode() { return Objects.hash(function, argument, returnNullOnEmptyGroup); } @Override public String toString() { return toStringHelper(this) .add("function", function) .add("argument", argument) .add("returnNullOnEmptyGroup", returnNullOnEmptyGroup) .toString(); } }
1,306
508
from unittest import TestCase import pandas as pd from pytz import UTC from trading_calendars.exchange_calendar_xist import XISTExchangeCalendar from .test_trading_calendar import NoDSTExchangeCalendarTestBase class XISTCalendarTestCase(NoDSTExchangeCalendarTestBase, TestCase): answer_key_filename = 'xist' calendar_class = XISTExchangeCalendar # The XIST is open from 10:00 am to 6:00 pm MAX_SESSION_HOURS = 8.0 def test_regular_holidays(self): all_sessions = self.calendar.all_sessions expected_holidays = [ pd.Timestamp('2019-01-01', tz=UTC), # New Year's Day pd.Timestamp('2019-04-23', tz=UTC), # Natl Sov and Children's Day pd.Timestamp('2019-05-01', tz=UTC), # Labour Day pd.Timestamp('2017-05-19', tz=UTC), # CAYS Day pd.Timestamp('2019-06-04', tz=UTC), # Eid al Fitr Day 1 pd.Timestamp('2019-06-05', tz=UTC), # Eid al Fitr Day 2 pd.Timestamp('2019-06-06', tz=UTC), # Eid al Fitr Day 3 pd.Timestamp('2019-07-15', tz=UTC), # Dem and Natl Unity Day pd.Timestamp('2016-09-12', tz=UTC), # Eid al Adha Day 1 pd.Timestamp('2016-09-13', tz=UTC), # Eid al Adha Day 2 pd.Timestamp('2016-09-14', tz=UTC), # Eid al Adha Day 3 pd.Timestamp('2016-09-15', tz=UTC), # Eid al Adha Day 4 pd.Timestamp('2019-08-30', tz=UTC), # Victory Day pd.Timestamp('2019-10-29', tz=UTC), # Republic Day ] for holiday_label in expected_holidays: self.assertNotIn(holiday_label, all_sessions) def test_holidays_fall_on_weekend(self): all_sessions = self.calendar.all_sessions # All holidays falling on a weekend should not be made up, so verify # that the surrounding Fridays/Mondays are trading days. expected_sessions = [ # New Year's Day on Sunday, Jan 1st. pd.Timestamp('2011-12-30', tz=UTC), pd.Timestamp('2012-01-02', tz=UTC), # Natl Sovereignty and Children's Day on Sunday, Apr 23rd. pd.Timestamp('2017-04-21', tz=UTC), pd.Timestamp('2017-04-24', tz=UTC), # Labour Day on Sunday, May 1st. pd.Timestamp('2016-04-29', tz=UTC), pd.Timestamp('2016-05-02', tz=UTC), # Com. of Attaturk Youth and Sport's Day on Saturday, May 19th. pd.Timestamp('2018-05-18', tz=UTC), pd.Timestamp('2018-05-21', tz=UTC), # Eid <NAME> (Day 3) on Sunday, Jun 17th (Friday is a holiday). pd.Timestamp('2018-06-18', tz=UTC), # Democracy and National Unity Day on Sunday, Jul 15th. pd.Timestamp('2018-08-13', tz=UTC), pd.Timestamp('2018-07-16', tz=UTC), # <NAME> (Day 1) on Sunday, Aug 11th (Monday is a holiday). pd.Timestamp('2019-08-09', tz=UTC), # Victory Day on Saturday, Aug 30th. pd.Timestamp('2014-08-29', tz=UTC), pd.Timestamp('2014-09-01', tz=UTC), # Republic Day on Saturday, Oct 29th. pd.Timestamp('2016-10-28', tz=UTC), pd.Timestamp('2016-10-31', tz=UTC), ] for session_label in expected_sessions: self.assertIn(session_label, all_sessions) def test_early_closes(self): # The session label and close time for expected early closes. expected_early_closes = [ # Day before Republic Day. ( pd.Timestamp('2019-10-28', tz=UTC), pd.Timestamp('2019-10-28 12:30', tz='Europe/Istanbul'), ), # Day before Eid al Fitr. ( pd.Timestamp('2019-06-03', tz=UTC), pd.Timestamp('2019-06-03 12:30', tz='Europe/Istanbul'), ), # Day before Eid al Adha. ( pd.Timestamp('2018-08-20', tz=UTC), pd.Timestamp('2018-08-20 12:30', tz='Europe/Istanbul'), ), ] for session, expected_close in expected_early_closes: self.assertEqual( self.calendar.session_close(session), expected_close, ) def test_adhoc_holidays(self): all_sessions = self.calendar.all_sessions expected_holidays = [ # Miscellaneous closures pd.Timestamp('2002-01-04', tz=UTC), # Market Holiday pd.Timestamp('2003-11-21', tz=UTC), # Terror attacks pd.Timestamp('2004-01-23', tz=UTC), # Bad weather pd.Timestamp('2004-12-30', tz=UTC), # Closure for redenomination pd.Timestamp('2004-12-31', tz=UTC), # Closure for redenomination # Eid al Adha and Eid al Fitr extra closures pd.Timestamp('2003-02-14', tz=UTC), # Eid al Adha extra holiday pd.Timestamp('2003-11-24', tz=UTC), # Eid al Fitr extra holiday pd.Timestamp('2003-11-28', tz=UTC), # Eid al Fitr extra holiday pd.Timestamp('2006-01-13', tz=UTC), # Eid al Adha extra holiday ] for holiday_label in expected_holidays: self.assertNotIn(holiday_label, all_sessions)
2,591
2,151
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.media.router.cast; import android.os.Handler; import android.support.v7.media.MediaRouteSelector; import android.support.v7.media.MediaRouter; import android.support.v7.media.MediaRouter.RouteInfo; import org.chromium.base.Log; import org.chromium.chrome.browser.media.router.DiscoveryCallback; import org.chromium.chrome.browser.media.router.DiscoveryDelegate; import org.chromium.chrome.browser.media.router.MediaController; import org.chromium.chrome.browser.media.router.MediaRoute; import org.chromium.chrome.browser.media.router.MediaRouteManager; import org.chromium.chrome.browser.media.router.MediaRouteProvider; import org.chromium.chrome.browser.media.router.MediaSink; import org.chromium.chrome.browser.media.router.MediaSource; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; import javax.annotation.Nullable; /** * A {@link BaseMediaRouteProvider} common implementation for MediaRouteProviders. */ public abstract class BaseMediaRouteProvider implements MediaRouteProvider, DiscoveryDelegate, ChromeCastSessionManager.CastSessionManagerListener { private static final String TAG = "MediaRouter"; protected static final List<MediaSink> NO_SINKS = Collections.emptyList(); protected final MediaRouter mAndroidMediaRouter; protected final MediaRouteManager mManager; protected final Map<String, DiscoveryCallback> mDiscoveryCallbacks = new HashMap<String, DiscoveryCallback>(); protected final Map<String, MediaRoute> mRoutes = new HashMap<String, MediaRoute>(); protected Handler mHandler = new Handler(); // There can be only one Cast session at the same time on Android. protected CastSession mSession; protected BaseMediaRouteProvider(MediaRouter androidMediaRouter, MediaRouteManager manager) { mAndroidMediaRouter = androidMediaRouter; mManager = manager; } /** * @return A MediaSource object constructed from |sourceId|, or null if the derived class does * not support the source. */ @Nullable protected abstract MediaSource getSourceFromId(@Nonnull String sourceId); /** * @return A CastSessionLaunchRequest encapsulating a session launch request. */ @Nullable protected abstract ChromeCastSessionManager.CastSessionLaunchRequest createSessionLaunchRequest( MediaSource source, MediaSink sink, String presentationId, String origin, int tabId, boolean isIncognito, int nativeRequestId); /** * Forward the sinks back to the native counterpart. */ // Migrated to CafMediaRouteProvider. See https://crbug.com/711860. protected void onSinksReceivedInternal(String sourceId, @Nonnull List<MediaSink> sinks) { Log.d(TAG, "Reporting %d sinks for source: %s", sinks.size(), sourceId); mManager.onSinksReceived(sourceId, this, sinks); } /** * {@link DiscoveryDelegate} implementation. */ // Migrated to CafMediaRouteProvider. See https://crbug.com/711860. @Override public void onSinksReceived(String sourceId, @Nonnull List<MediaSink> sinks) { Log.d(TAG, "Received %d sinks for sourceId: %s", sinks.size(), sourceId); mHandler.post(() -> { onSinksReceivedInternal(sourceId, sinks); }); } /** * {@link MediaRouteProvider} implementation. */ // Migrated to CafMediaRouteProvider. See https://crbug.com/711860. @Override public boolean supportsSource(@Nonnull String sourceId) { return getSourceFromId(sourceId) != null; } // Migrated to CafMediaRouteProvider. See https://crbug.com/711860. @Override public void startObservingMediaSinks(@Nonnull String sourceId) { Log.d(TAG, "startObservingMediaSinks: " + sourceId); if (mAndroidMediaRouter == null) { // If the MediaRouter API is not available, report no devices so the page doesn't even // try to cast. onSinksReceived(sourceId, NO_SINKS); return; } MediaSource source = getSourceFromId(sourceId); if (source == null) { // If the source is invalid or not supported by this provider, report no devices // available. onSinksReceived(sourceId, NO_SINKS); return; } // No-op, if already monitoring the application for this source. String applicationId = source.getApplicationId(); DiscoveryCallback callback = mDiscoveryCallbacks.get(applicationId); if (callback != null) { callback.addSourceUrn(sourceId); return; } MediaRouteSelector routeSelector = source.buildRouteSelector(); if (routeSelector == null) { // If the application invalid, report no devices available. onSinksReceived(sourceId, NO_SINKS); return; } List<MediaSink> knownSinks = new ArrayList<MediaSink>(); for (RouteInfo route : mAndroidMediaRouter.getRoutes()) { if (route.matchesSelector(routeSelector)) { knownSinks.add(MediaSink.fromRoute(route)); } } callback = new DiscoveryCallback(sourceId, knownSinks, this, routeSelector); mAndroidMediaRouter.addCallback( routeSelector, callback, MediaRouter.CALLBACK_FLAG_REQUEST_DISCOVERY); mDiscoveryCallbacks.put(applicationId, callback); } // Migrated to CafMediaRouteProvider. See https://crbug.com/711860. @Override public void stopObservingMediaSinks(@Nonnull String sourceId) { Log.d(TAG, "stopObservingMediaSinks: " + sourceId); if (mAndroidMediaRouter == null) return; MediaSource source = getSourceFromId(sourceId); if (source == null) return; String applicationId = source.getApplicationId(); DiscoveryCallback callback = mDiscoveryCallbacks.get(applicationId); if (callback == null) return; callback.removeSourceUrn(sourceId); if (callback.isEmpty()) { mAndroidMediaRouter.removeCallback(callback); mDiscoveryCallbacks.remove(applicationId); } } @Override public void createRoute(String sourceId, String sinkId, String presentationId, String origin, int tabId, boolean isIncognito, int nativeRequestId) { if (mAndroidMediaRouter == null) { mManager.onRouteRequestError("Not supported", nativeRequestId); return; } MediaSink sink = MediaSink.fromSinkId(sinkId, mAndroidMediaRouter); if (sink == null) { mManager.onRouteRequestError("No sink", nativeRequestId); return; } MediaSource source = getSourceFromId(sourceId); if (source == null) { mManager.onRouteRequestError("Unsupported source URL", nativeRequestId); return; } ChromeCastSessionManager.CastSessionLaunchRequest request = createSessionLaunchRequest( source, sink, presentationId, origin, tabId, isIncognito, nativeRequestId); ChromeCastSessionManager.get().requestSessionLaunch(request); } @Override public abstract void joinRoute( String sourceId, String presentationId, String origin, int tabId, int nativeRequestId); @Override public abstract void closeRoute(String routeId); @Override public abstract void detachRoute(String routeId); @Override public abstract void sendStringMessage(String routeId, String message, int nativeCallbackId); // ChromeCastSessionObserver implementation. @Override public abstract void onSessionStarting( ChromeCastSessionManager.CastSessionLaunchRequest originalRequest); @Override public abstract void onSessionEnded(); @Override public void onSessionStartFailed() { for (String routeId : mRoutes.keySet()) { mManager.onRouteClosedWithError(routeId, "Launch error"); } mRoutes.clear(); }; @Override public void onSessionStarted(CastSession session) { mSession = session; } @Override public void onSessionStopAction() { if (mSession == null) return; for (String routeId : mRoutes.keySet()) closeRoute(routeId); } @Override @Nullable public MediaController getMediaController(String routeId) { return null; } }
3,183
1,248
<reponame>konistehrad/rpi-vk-driver<filename>driver/compute.c<gh_stars>1000+ #include "common.h" #include "declarations.h" //TODO //compute shaders need kernel support VKAPI_ATTR VkResult VKAPI_CALL RPIFUNC(vkCreateComputePipelines)( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) { UNSUPPORTED(vkCreateComputePipelines); return UNSUPPORTED_RETURN; } VKAPI_ATTR void VKAPI_CALL RPIFUNC(vkCmdDispatchIndirect)( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) { UNSUPPORTED(vkCmdDispatchIndirect); } VKAPI_ATTR void VKAPI_CALL RPIFUNC(vkCmdDispatch)( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) { UNSUPPORTED(vkCmdDispatch); } VKAPI_ATTR void VKAPI_CALL RPIFUNC(vkCmdDispatchBase)( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) { UNSUPPORTED(vkCmdDispatchBase); }
1,076
350
<reponame>zqn1996-alan/talkback<filename>talkback/src/main/java/com/google/android/accessibility/talkback/quickmenu/QuickMenuOverlay.java /* * Copyright (C) 2020 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.android.accessibility.talkback.quickmenu; import android.content.Context; import android.graphics.PixelFormat; import android.os.Handler; import android.text.TextUtils; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup.LayoutParams; import android.view.WindowManager; import android.view.accessibility.AccessibilityManager; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import com.google.android.accessibility.talkback.R; import com.google.android.accessibility.utils.FeatureSupport; import com.google.android.accessibility.utils.widget.SimpleOverlay; import org.checkerframework.checker.nullness.qual.Nullable; /** * An non-focusable overlay window to show what quick menu item or action is changed. The focus * shouldn't be changed while the overlay is showing or hiding. */ public class QuickMenuOverlay extends SimpleOverlay { // The same timeout as Snackbar by default (SnackbarManager.LONG_DURATION_MS). private static final int SHOWING_TIME_MS = 2750; private final Context context; private final Handler handler = new Handler(); private final int layoutResId; private LinearLayout overlay; private TextView settingText; private ImageView leftIcon; private ImageView rightIcon; private final Runnable hideOverlay = this::hide; private @Nullable CharSequence message; private boolean supported = true; public QuickMenuOverlay(Context context, int layoutResId) { super(context); this.context = context; this.layoutResId = layoutResId; } public void show(boolean showIcon) { if (!supported || TextUtils.isEmpty(message)) { return; } if (overlay == null) { createOverlay(); } settingText.setText(message); if (showIcon) { leftIcon.setVisibility(View.VISIBLE); rightIcon.setVisibility(View.VISIBLE); } else { leftIcon.setVisibility(View.GONE); rightIcon.setVisibility(View.GONE); } if (isShowing()) { // Updates the view if the overlay is showing to avoid adding the view to WindowManager again. updateViewLayout(); } else { super.show(); } handler.removeCallbacks(hideOverlay); // Users can choose their preferred timeout in accessibility settings on Android Q and above // devices. int timeout = SHOWING_TIME_MS; if (FeatureSupport.supportRecommendedTimeout()) { @Nullable AccessibilityManager accessibilityManager = (AccessibilityManager) context.getSystemService(Context.ACCESSIBILITY_SERVICE); if (accessibilityManager != null) { timeout = accessibilityManager.getRecommendedTimeoutMillis( SHOWING_TIME_MS, AccessibilityManager.FLAG_CONTENT_TEXT); } } handler.postDelayed(hideOverlay, timeout); } @Override public void hide() { if (overlay == null) { return; } handler.removeCallbacks(hideOverlay); super.hide(); } public boolean isShowing() { return (overlay != null) && isVisible(); } public void setMessage(@Nullable CharSequence message) { this.message = message; } /** * Supports this overlay or not. If {@code supported} is false, to hide this overlay while it's * showing. */ public void setSupported(boolean supported) { if (!supported) { hide(); } this.supported = supported; } private void createOverlay() { WindowManager.LayoutParams parameters = new WindowManager.LayoutParams(); parameters.type = WindowManager.LayoutParams.TYPE_ACCESSIBILITY_OVERLAY; parameters.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE; parameters.format = PixelFormat.TRANSLUCENT; parameters.width = LayoutParams.MATCH_PARENT; parameters.height = LayoutParams.WRAP_CONTENT; parameters.gravity = Gravity.CENTER; setParams(parameters); LayoutInflater layoutInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); overlay = (LinearLayout) layoutInflater.inflate(layoutResId, null); settingText = overlay.findViewById(R.id.quick_menu_text); leftIcon = overlay.findViewById(R.id.quick_menu_left_icon); rightIcon = overlay.findViewById(R.id.quick_menu_right_icon); setContentView(overlay); } }
1,687
1,431
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.hwmf.draw; import java.awt.Color; import java.awt.Composite; import java.awt.Font; import java.awt.Graphics2D; import java.awt.Paint; import java.awt.Shape; import java.awt.Stroke; import java.awt.geom.AffineTransform; import org.apache.poi.util.Internal; /** * An util class for saving the state of a {@link java.awt.Graphics2D} object */ @Internal public class HwmfGraphicsState { private Color background; private Shape clip; private Color color; private Composite composite; private Font font; private Paint paint; private Stroke stroke; private AffineTransform trans; /** * Saves the state of the graphics2D object */ public void backup(Graphics2D graphics2D) { background = graphics2D.getBackground(); clip = graphics2D.getClip(); color = graphics2D.getColor(); composite = graphics2D.getComposite(); font = graphics2D.getFont(); paint = graphics2D.getPaint(); stroke = graphics2D.getStroke(); trans = graphics2D.getTransform(); } /** * Retrieves the state into the graphics2D object */ public void restore(Graphics2D graphics2D) { graphics2D.setBackground(background); graphics2D.setClip(clip); graphics2D.setColor(color); graphics2D.setComposite(composite); graphics2D.setFont(font); graphics2D.setPaint(paint); graphics2D.setStroke(stroke); graphics2D.setTransform(trans); } }
813
3,212
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.nar; import java.io.IOException; import java.io.InputStream; import java.util.Collection; /** * Represents an external source where the NAR files might be acquired from. Used by the NAR auto loader functionality * in order to poll an external source for new NAR files to load. */ public interface NarProvider { /** * Initializes the NAR Provider based on the given set of properties. */ void initialize(NarProviderInitializationContext context); /** * Performs a listing of all NAR's that are available. * * @Return The result is a list of locations, where the format depends on the actual implementation. */ Collection<String> listNars() throws IOException; /** * Fetches the NAR at the given location. The location should be one of the values returned by <code>listNars()</code>. */ InputStream fetchNarContents(String location) throws IOException; }
471
597
{ "appname": "Película", "searchbartxt": "Busca una película, un programa de televisión, una persona", "home": "Hogar", "discover": "Descubrir", "coming": "Viniendo", "account": "Cuenta", "inTheaters": "En los cines", "onTV": "En TV", "overView": "Visión de conjunto", "topBilledCast": "Reparto más facturado", "userScore": "Puntuación de usuario", "playTrailer": "Reproducir tráiler", "main": "Principal", "videos": "Videos", "images": "Imagenes", "reviews": "Reseñas", "tags": "Etiquetas", "recommendations": "Recomendaciones", "biography": "Biografía", "knownFor": "Conocido por", "acting": "Interino", "movies": "Películas", "tvShows": "Programas de tv", "personalInfo": "Información personal", "gender": "Género", "birthday": "Cumpleaños", "knownCredits": "Créditos conocidos", "placeOfBirth": "Lugar de nacimiento", "officialSite": "sitio oficial", "alsoKnownAs": "También conocido como", "sortBy": "Ordenar por", "filter": "Filtrar", "watchlist": "Lista de seguimiento", "lists": "Liza", "favorites": "Favoritos", "ratingsReviews": "Calificaciones y reseñas", "popular": "Popular", "more": "Más", "featuredCrew": "Tripulación destacada", "creator": "Creador", "currentSeason": "Temporada actual", "viewAllSeasons": "Ver todas las temporadas", "seasonDetail": "Detalle de temporada", "seasonCast": "Reparto de la temporada", "episodes": "Episodios", "guestStars": "Estrellas invitadas", "guestStarsEmpty": "No se ha agregado ninguna estrella invitada", "crew": "Tripulación", "crewEmpty": "No se ha agregado tripulación", "episodeImages": "Imágenes del episodio", "episodeImagesEmpty": "No se ha agregado ninguna imagen de episodio", "facts": "Hechos", "network": "Red", "status": "Estado", "type": "Tipo", "originalLanguage": "Idioma original", "runtime": "Tiempo de ejecución", "genders": "Géneros", "releaseInformation": "Información de lanzamiento", "budget": "Presupuesto", "revenue": "Ingresos", "company": "Empresa", "play": "Jugar", "welcomeBack": "Dar una buena acogida", "settings": "Settings", "adultContent": "Contenido para adultos", "notifications": "Notificaciones", "darkMode": "Modo oscuro", "language": "Idioma", "feedback": "Realimentación", "version": "Versión", "stills": "Fotogramas", "keyWords": "Palabras clave", "trailers": "Remolques", "trending": "Tendencias", "newShare": "Compartir nuevo", "shared": "Compartido", "seasons": "Estaciones", "welcome": "Bienvenidos", "letStartWithFewSteps": "vamos a empezar con unos pocos pasos", "continueA": "Seguir", "next": "Próximo", "back": "Espalda", "start": "Comienzo", "whatKindOfMovieDoYouLike": "Que tipo de peliculas te gustan", "whatKindOfTvShowDoYouLike": "¿Qué tipo de programa de televisión te gusta" }
1,327
1,738
<gh_stars>1000+ /* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ #include <AzCore/std/parallel/thread.h> #include <sched.h> #include <errno.h> namespace AZStd { namespace Platform { void NameCurrentThread(const char*) { // Threads are named in PostCreateThread on Linux } void PreCreateSetThreadAffinity(int cpuId, pthread_attr_t& attr) { // On Linux, thread affinity is set on attr before creating the thread if (cpuId >= 0) { cpu_set_t cpuset; CPU_ZERO(&cpuset); CPU_SET(cpuId, &cpuset); int result = pthread_attr_setaffinity_np(&attr, sizeof(cpuset), &cpuset); (void)result; AZ_Warning("System", result == 0, "pthread_setaffinity_np failed %s\n", strerror(errno)); } } void SetThreadPriority(int priority, pthread_attr_t& attr) { if (priority == -1) { pthread_attr_setinheritsched(&attr, PTHREAD_INHERIT_SCHED); } else { struct sched_param schedParam; memset(&schedParam, 0, sizeof(schedParam)); pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED); schedParam.sched_priority = priority; pthread_attr_setschedparam(&attr, &schedParam); } } void PostCreateThread(pthread_t tId, const char* name, int) { pthread_setname_np(tId, name); } } }
968
607
<filename>lib/include/can/CanPeripheral.h /* * This file is a part of the open source stm32plus library. * Copyright (c) 2011,2012,2013,2014 <NAME> <www.andybrown.me.uk> * Please see website for licensing terms. */ #pragma once namespace stm32plus { /** * Can peripheral base template. The specific instances of the CAN peripheral * inherit from this */ template<class TPinPackage, PeripheralName TPeripheralName> class CanPeripheral : public Can { public: struct Parameters { uint16_t can_Prescaler; /*!< Specifies the length of a time quantum. It ranges from 1 to 1024. */ uint8_t can_Mode; /*!< Specifies the CAN operating mode. This parameter can be a value of @ref CAN_operating_mode */ uint8_t can_SJW; /*!< Specifies the maximum number of time quanta the CAN hardware is allowed to lengthen or shorten a bit to perform resynchronization. This parameter can be a value of @ref CAN_synchronisation_jump_width */ uint8_t can_BS1; /*!< Specifies the number of time quanta in Bit Segment 1. This parameter can be a value of @ref CAN_time_quantum_in_bit_segment_1 */ uint8_t can_BS2; /*!< Specifies the number of time quanta in Bit Segment 2. This parameter can be a value of @ref CAN_time_quantum_in_bit_segment_2 */ FunctionalState can_TTCM; /*!< Enable or disable the time triggered communication mode. This parameter can be set either to ENABLE or DISABLE. */ FunctionalState can_ABOM; /*!< Enable or disable the automatic bus-off management. This parameter can be set either to ENABLE or DISABLE. */ FunctionalState can_AWUM; /*!< Enable or disable the automatic wake-up mode. This parameter can be set either to ENABLE or DISABLE. */ FunctionalState can_NART; /*!< Enable or disable the no-automatic retransmission mode. This parameter can be set either to ENABLE or DISABLE. */ FunctionalState can_RFLM; /*!< Enable or disable the Receive FIFO Locked mode. This parameter can be set either to ENABLE or DISABLE. */ FunctionalState can_TXFP; /*!< Enable or disable the transmit FIFO priority. This parameter can be set either to ENABLE or DISABLE. */ Parameters(uint32_t baudrate=100000,uint32_t samplepoint=500) { uint32_t T1; uint32_t T2; uint32_t Tsum; RCC_ClocksTypeDef Clocks; RCC_GetClocksFreq(&Clocks); if( Clocks.PCLK1_Frequency%baudrate != 0 ){ //Configuration failed return; } Tsum = Clocks.PCLK1_Frequency/baudrate; while( Tsum > 20 ){ if( Tsum % 11 == 0 ) Tsum/=11; else if( Tsum % 7 == 0 ) Tsum/=7; else if( Tsum % 5 == 0 ) Tsum/=5; else if( Tsum % 3 == 0 ) Tsum/=3; else if( Tsum % 2 == 0 ) Tsum/=2; else{ if( Tsum == 23 ) break; //Configuration failed return; } } Tsum -= 1; T1 = (uint32_t)(Tsum*samplepoint+499)/1000; T2 = Tsum - T1; if( T1 == 0 ){ T1 += 1; T2 -= 1; } else if( T2 == 0 ){ T1 -= 1; T2 += 1; } if( T1 > 16){ T1 = 16; T2 = Tsum-16; } else if( T2 > 8){ T2 = 8; T1 = Tsum-8; } Tsum=T1+T2+1; can_Prescaler=Clocks.PCLK1_Frequency/(baudrate*Tsum); can_BS1=T1-1; can_BS2=T2-1; can_Mode = CAN_Mode_Normal; can_SJW = CAN_SJW_1tq; can_TTCM = DISABLE; can_ABOM = DISABLE; can_AWUM = ENABLE; can_NART = DISABLE; can_RFLM = DISABLE; can_TXFP = ENABLE; } }; CanPeripheral(const Parameters& params); ~CanPeripheral(); protected: void initialisePeripheral(); }; /** * Constructor * @param params */ template<class TPinPackage, PeripheralName TPeripheralName> inline CanPeripheral<TPinPackage,TPeripheralName>::CanPeripheral(const Parameters& params) : Can((CAN_TypeDef * )PeripheralTraits<TPeripheralName>::PERIPHERAL_BASE) { // clocks on first before any features get initialised ClockControl<TPeripheralName>::On(); CanPinInitialiser<TPinPackage,TPeripheralName>::initialise(); // initialise the peripheral _init.CAN_ABOM=params.can_ABOM; _init.CAN_AWUM=params.can_AWUM; _init.CAN_BS1=params.can_BS1; _init.CAN_BS2=params.can_BS2; _init.CAN_Mode=params.can_Mode; _init.CAN_NART=params.can_NART; _init.CAN_Prescaler=params.can_Prescaler; _init.CAN_RFLM=params.can_RFLM; _init.CAN_SJW=params.can_SJW; _init.CAN_TTCM=params.can_TTCM; _init.CAN_TXFP=params.can_TXFP; } /** * Destructor, disable clocks */ template<class TPinPackage,PeripheralName TPeripheralName> inline CanPeripheral<TPinPackage,TPeripheralName>::~CanPeripheral(){ // clocks off ClockControl<TPeripheralName>::Off(); } /* * Initialise the peripheral */ template<class TPinPackage, PeripheralName TPeripheralName> inline void CanPeripheral<TPinPackage,TPeripheralName>::initialisePeripheral() { // initialise CAN_Init((CAN_TypeDef *)PeripheralTraits<TPeripheralName>::PERIPHERAL_BASE,&_init); } }
3,492
578
<reponame>raptoravis/two #include <two/pool.h> #include <two/infra.h> #include <two/type.h> #ifdef TWO_MODULES module two.pool; #else //#include <refl/Class.h> #endif namespace two { vector<unique<Pool>> g_pools = vector<unique<Pool>>(c_max_types); ObjectPool::ObjectPool() : m_pools(c_max_types) {} void ObjectPool::clear() { for(auto& pool : m_pools) if(pool) pool->clear(); } } #ifdef TWO_MODULES module two.pool; #else #endif namespace two {} #ifndef USE_STL #ifdef TWO_MODULES module two.uio; #else #include <stl/vector.hpp> #endif namespace stl { using namespace two; template class TWO_POOL_EXPORT vector<unique<Pool>>; } #endif #ifdef TWO_MODULES module two.pool; #else #endif namespace two { // Exported types template <> TWO_POOL_EXPORT Type& type<two::Pool>() { static Type ty("Pool", sizeof(two::Pool)); return ty; } template <> TWO_POOL_EXPORT Type& type<two::HandlePool>() { static Type ty("HandlePool", sizeof(two::HandlePool)); return ty; } }
427
2,577
<gh_stars>1000+ /* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.test.jobexecutor; import static org.assertj.core.api.Assertions.assertThat; import java.util.List; import org.camunda.bpm.engine.HistoryService; import org.camunda.bpm.engine.impl.interceptor.Command; import org.camunda.bpm.engine.impl.interceptor.CommandContext; import org.camunda.bpm.engine.impl.persistence.entity.AcquirableJobEntity; import org.camunda.bpm.engine.impl.persistence.entity.JobEntity; import org.camunda.bpm.engine.runtime.Job; import org.junit.After; import org.junit.Before; import org.junit.Test; public class HistoryCleanupJobPriorityRangeTest extends AbstractJobExecutorAcquireJobsTest { protected HistoryService historyService; protected long defaultHistoryCleanupJobPriority; @Before public void setup() { historyService = rule.getHistoryService(); defaultHistoryCleanupJobPriority = configuration.getHistoryCleanupJobPriority(); } @After public void tearDown() { configuration.setHistoryCleanupJobPriority(defaultHistoryCleanupJobPriority); resetDatabase(); } private void resetDatabase() { configuration.getCommandExecutorTxRequired().execute(new Command<Void>() { public Void execute(CommandContext commandContext) { List<Job> jobs = historyService.findHistoryCleanupJobs(); for (Job job : jobs) { commandContext.getJobManager().deleteJob((JobEntity) job); commandContext.getHistoricJobLogManager().deleteHistoricJobLogByJobId(job.getId()); } commandContext.getMeterLogManager().deleteAll(); return null; } }); } @Test public void shouldSetConfiguredPriorityOnHistoryCleanupJob() { // given configuration.setHistoryCleanupJobPriority(10L); // when historyService.cleanUpHistoryAsync(true); List<Job> historyCleanupJobs = historyService.findHistoryCleanupJobs(); // then assertThat(historyCleanupJobs).hasSize(1); assertThat(historyCleanupJobs.get(0).getPriority()).isEqualTo(10L); } @Test public void shouldAcquireHistoryCleanupJobInPriorityRange() { // given configuration.setJobExecutorPriorityRangeMin(5L); configuration.setJobExecutorPriorityRangeMax(15L); configuration.setHistoryCleanupJobPriority(10L); // when historyService.cleanUpHistoryAsync(true); // then List<AcquirableJobEntity> acquirableJobs = findAcquirableJobs(); assertThat(acquirableJobs).hasSize(1); assertThat(findJobById(acquirableJobs.get(0).getId()).getPriority()).isEqualTo(10L); } @Test public void shouldNotAcquireHistoryCleanupJobOutsidePriorityRange() { // given configuration.setJobExecutorPriorityRangeMin(5L); configuration.setJobExecutorPriorityRangeMax(15L); configuration.setHistoryCleanupJobPriority(20L); // when historyService.cleanUpHistoryAsync(true); // then List<AcquirableJobEntity> acquirableJobs = findAcquirableJobs(); assertThat(acquirableJobs).hasSize(0); List<Job> historyCleanupJobs = historyService.findHistoryCleanupJobs(); assertThat(historyCleanupJobs).hasSize(1); assertThat(historyCleanupJobs.get(0).getPriority()).isEqualTo(20L); } }
1,287
2,695
<gh_stars>1000+ // Copyright (C) 2009 <NAME> (<EMAIL>) // License: Boost Software License See LICENSE.txt for the full license. #include <dlib/matrix.h> #include <sstream> #include <string> #include <cstdlib> #include <ctime> #include <vector> #include "../stl_checked.h" #include "../array.h" #include "../rand.h" #include <dlib/string.h> #include "tester.h" namespace { using namespace test; using namespace dlib; using namespace std; logger dlog("test.matrix_chol"); dlib::rand rnd; // ---------------------------------------------------------------------------------------- template <typename mat_type> const matrix<typename mat_type::type> symm(const mat_type& m) { return m*trans(m); } // ---------------------------------------------------------------------------------------- template <typename type> const matrix<type> randmat(long r, long c) { matrix<type> m(r,c); for (long row = 0; row < m.nr(); ++row) { for (long col = 0; col < m.nc(); ++col) { m(row,col) = static_cast<type>(rnd.get_random_double()); } } return m; } template <typename type, long NR, long NC> const matrix<type,NR,NC> randmat() { matrix<type,NR,NC> m; for (long row = 0; row < m.nr(); ++row) { for (long col = 0; col < m.nc(); ++col) { m(row,col) = static_cast<type>(rnd.get_random_double()); } } return m; } // ---------------------------------------------------------------------------------------- template <typename matrix_type> void test_cholesky ( const matrix_type& m) { typedef typename matrix_type::type type; const type eps = 10*max(abs(m))*sqrt(std::numeric_limits<type>::epsilon()); dlog << LDEBUG << "test_cholesky(): " << m.nr() << " x " << m.nc() << " eps: " << eps; print_spinner(); cholesky_decomposition<matrix_type> test(m); // none of the matrices we should be passing in to test_cholesky() should be non-spd. DLIB_TEST(test.is_spd() == true); type temp; DLIB_TEST_MSG( (temp= max(abs(test.get_l()*trans(test.get_l()) - m))) < eps,temp); { matrix<type> mat = chol(m); DLIB_TEST_MSG( (temp= max(abs(mat*trans(mat) - m))) < eps,temp); } matrix<type> m2; matrix<type,0,1> col; m2 = identity_matrix<type>(m.nr()); DLIB_TEST_MSG(equal(m*test.solve(m2), m2,eps),max(abs(m*test.solve(m2)- m2))); m2 = randmat<type>(m.nr(),5); DLIB_TEST_MSG(equal(m*test.solve(m2), m2,eps),max(abs(m*test.solve(m2)- m2))); m2 = randmat<type>(m.nr(),1); DLIB_TEST_MSG(equal(m*test.solve(m2), m2,eps),max(abs(m*test.solve(m2)- m2))); col = randmat<type>(m.nr(),1); DLIB_TEST_MSG(equal(m*test.solve(col), col,eps),max(abs(m*test.solve(m2)- m2))); // now make us a non-spd matrix if (m.nr() > 2) { matrix<type> sm(lowerm(m)); sm(1,1) = 0; cholesky_decomposition<matrix_type> test2(sm); DLIB_TEST_MSG(test2.is_spd() == false, test2.get_l()); cholesky_decomposition<matrix_type> test3(sm*trans(sm)); DLIB_TEST_MSG(test3.is_spd() == false, test3.get_l()); sm = sm*trans(sm); sm(1,1) = 5; sm(1,0) -= 1; cholesky_decomposition<matrix_type> test4(sm); DLIB_TEST_MSG(test4.is_spd() == false, test4.get_l()); } } // ---------------------------------------------------------------------------------------- void matrix_test_double() { test_cholesky(uniform_matrix<double>(1,1,1) + 10*symm(randmat<double>(1,1))); test_cholesky(uniform_matrix<double>(2,2,1) + 10*symm(randmat<double>(2,2))); test_cholesky(uniform_matrix<double>(3,3,1) + 10*symm(randmat<double>(3,3))); test_cholesky(uniform_matrix<double>(4,4,1) + 10*symm(randmat<double>(4,4))); test_cholesky(uniform_matrix<double>(15,15,1) + 10*symm(randmat<double>(15,15))); test_cholesky(uniform_matrix<double>(101,101,1) + 10*symm(randmat<double>(101,101))); typedef matrix<double,0,0,default_memory_manager, column_major_layout> mat; test_cholesky(mat(uniform_matrix<double>(101,101,1) + 10*symm(randmat<double>(101,101)))); } // ---------------------------------------------------------------------------------------- void matrix_test_float() { test_cholesky(uniform_matrix<float>(1,1,1) + 2*symm(randmat<float>(1,1))); test_cholesky(uniform_matrix<float>(2,2,1) + 2*symm(randmat<float>(2,2))); test_cholesky(uniform_matrix<float>(3,3,1) + 2*symm(randmat<float>(3,3))); typedef matrix<float,0,0,default_memory_manager, column_major_layout> mat; test_cholesky(mat(uniform_matrix<float>(3,3,1) + 2*symm(randmat<float>(3,3)))); } // ---------------------------------------------------------------------------------------- class matrix_tester : public tester { public: matrix_tester ( ) : tester ("test_matrix_chol", "Runs tests on the matrix cholesky component.") { rnd.set_seed(cast_to_string(time(0))); } void perform_test ( ) { dlog << LINFO << "seed string: " << rnd.get_seed(); dlog << LINFO << "begin testing with double"; matrix_test_double(); dlog << LINFO << "begin testing with float"; matrix_test_float(); } } a; }
2,686
8,969
// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. #ifndef RUNTIME_VM_HEAP_BECOME_H_ #define RUNTIME_VM_HEAP_BECOME_H_ #include "platform/atomic.h" #include "platform/growable_array.h" #include "vm/allocation.h" #include "vm/raw_object.h" namespace dart { class Array; // Objects that are a source in a become are tranformed into forwarding // corpses pointing to the corresponding target. Forwarding corpses have the // same heap sizes as the source object to ensure the heap remains walkable. // If the heap sizes is small enough to be encoded in the size field of the // header, a forwarding corpse consists only of a header and the target pointer. // If the heap size is too big to be encoded in the header's size field, the // word after the target pointer contains the size. This is the same // representation as a FreeListElement. class ForwardingCorpse { public: ObjectPtr target() const { return target_; } void set_target(ObjectPtr target) { target_ = target; } intptr_t HeapSize() { return HeapSize(tags_); } intptr_t HeapSize(uword tags) { intptr_t size = UntaggedObject::SizeTag::decode(tags); if (size != 0) return size; return *SizeAddress(); } static ForwardingCorpse* AsForwarder(uword addr, intptr_t size); static void Init(); // Used to allocate class for forwarding corpses in Object::InitOnce. class FakeInstance { public: FakeInstance() {} static cpp_vtable vtable() { return 0; } static intptr_t InstanceSize() { return 0; } static intptr_t NextFieldOffset() { return -kWordSize; } static const ClassId kClassId = kForwardingCorpse; static bool IsInstance() { return true; } private: DISALLOW_ALLOCATION(); DISALLOW_COPY_AND_ASSIGN(FakeInstance); }; private: // This layout mirrors the layout of RawObject. RelaxedAtomic<uword> tags_; RelaxedAtomic<ObjectPtr> target_; // Returns the address of the embedded size. intptr_t* SizeAddress() const { uword addr = reinterpret_cast<uword>(&target_) + kWordSize; return reinterpret_cast<intptr_t*>(addr); } // ForwardingCorpses cannot be allocated. Instead references to them are // created using the AsForwarder factory method. DISALLOW_ALLOCATION(); DISALLOW_IMPLICIT_CONSTRUCTORS(ForwardingCorpse); }; // Forward/exchange object identity within pairs of objects. // // Forward: Redirects all pointers to each 'before' object to the corresponding // 'after' object. Every 'before' object is guaranteed to be unreachable after // the operation. The identity hash of the 'before' object is retained. // // This is useful for atomically applying behavior and schema changes, which can // be done by allocating fresh objects with the new schema and forwarding the // identity of the old objects to the new objects. // // Exchange: Redirect all pointers to each 'before' object to the corresponding // 'after' object and vice versa. Both objects remain reachable after the // operation. // // This is useful for implementing certain types of proxies. For example, an // infrequently accessed object may be written to disk and swapped with a // so-called "husk", and swapped back when it is later accessed. // // This operation is named 'become' after its original in Smalltalk: // x become: y "exchange identity for one pair" // x becomeForward: y "forward identity for one pair" // #(x ...) elementsExchangeIdentityWith: #(y ...) // #(x ...) elementsForwardIdentityTo: #(y ...) class Become { public: Become(); ~Become(); void Add(const Object& before, const Object& after); void Forward(); void Exchange() { UNIMPLEMENTED(); } void VisitObjectPointers(ObjectPointerVisitor* visitor); // Convert and instance object into a dummy object, // making the instance independent of its class. // (used for morphic instances during reload). static void MakeDummyObject(const Instance& instance); // Update any references pointing to forwarding objects to point the // forwarding objects' targets. static void FollowForwardingPointers(Thread* thread); private: MallocGrowableArray<ObjectPtr> pointers_; DISALLOW_COPY_AND_ASSIGN(Become); }; } // namespace dart #endif // RUNTIME_VM_HEAP_BECOME_H_
1,295
5,169
{ "name": "TestBinary", "version": "0.0.1", "license": { "type": "BSD", "file": "Carthage/LICENSE" }, "summary": "Test", "homepage": "https://github.com/Ogerets/test-binary-x", "authors": { "Ogerets": "https://github.com/Ogerets" }, "source": { "http": "https://github.com/Ogerets/test-binary-x/releases/download/v0.0.1/TestBinary.xcframework.zip" }, "platforms": { "ios": "9.0", "osx": "10.9", "tvos": "9.0", "watchos": "2.0" }, "subspecs": [ { "name": "Common", "vendored_frameworks": "Carthage/VSCCommon.xcframework" }, { "name": "Foundation", "vendored_frameworks": "Carthage/VSCFoundation.xcframework" }, { "name": "Pythia", "vendored_frameworks": "Carthage/VSCPythia.xcframework" }, { "name": "Ratchet", "vendored_frameworks": "Carthage/VSCRatchet.xcframework" } ] }
457
957
#include "DynamicLinkLibrary.h" #include <assert.h> DynamicLinkLibrary::DynamicLinkLibrary() : reference_(1) { dll_ = nullptr; } DynamicLinkLibrary ::~DynamicLinkLibrary() { Reset(); } void DynamicLinkLibrary::Reset() { if (dll_ != nullptr) { #if _WIN32 ::FreeLibrary(dll_); #else dlclose(dll_); #endif dll_ = nullptr; } } bool DynamicLinkLibrary::Load(const char* path) { Reset(); #if _WIN32 dll_ = ::LoadLibraryA(path); #else dll_ = dlopen(path, RTLD_LAZY); #endif if (dll_ == nullptr) return false; return true; } int DynamicLinkLibrary::AddRef() { std::atomic_fetch_add_explicit(&reference_, 1, std::memory_order_consume); return reference_; } int DynamicLinkLibrary::GetRef() { return reference_; } int DynamicLinkLibrary::Release() { assert(reference_ > 0); bool destroy = std::atomic_fetch_sub_explicit(&reference_, 1, std::memory_order_consume) == 1; if (destroy) { delete this; return 0; } return reference_; }
378
386
<reponame>bradleyhenke/cortex ////////////////////////////////////////////////////////////////////////// // // Copyright (c) 2007-2012, Image Engine Design Inc. All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // * Neither the name of Image Engine Design nor the names of any // other contributors to this software may be used to endorse or // promote products derived from this software without specific prior // written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // ////////////////////////////////////////////////////////////////////////// #ifndef IECOREGL_PRIMITIVE_H #define IECOREGL_PRIMITIVE_H #include "IECoreGL/Export.h" #include "IECoreGL/GL.h" #include "IECoreGL/Renderable.h" #include "IECoreGL/Shader.h" #include "IECoreGL/TypedStateComponent.h" #include "IECoreScene/PrimitiveVariable.h" #include "IECore/Export.h" #include "IECore/VectorTypedData.h" IECORE_PUSH_DEFAULT_VISIBILITY #include "OpenEXR/ImathBox.h" IECORE_POP_DEFAULT_VISIBILITY namespace IECoreGL { IE_CORE_FORWARDDECLARE( State ); /// The Primitive class represents geometric objects that can /// be rendered in OpenGL. Primitives may be rendered in a variety /// of styles defined by State objects, or just rendered as raw geometry /// in the current OpenGL state. class IECOREGL_API Primitive : public Renderable { public : IE_CORE_DECLARERUNTIMETYPEDEXTENSION( IECoreGL::Primitive, PrimitiveTypeId, Renderable ); Primitive(); ~Primitive() override; /// Adds a primitive variable to this primitive. Derived classes should implement any filtering /// or conversions that are necessary and then call addVertexAttribute() or addUniformAttribute(). /// The default implementation calls addUniformAttribute() for uniform primitive variables and /// addVertexAttribute() for all others. virtual void addPrimitiveVariable( const std::string &name, const IECoreScene::PrimitiveVariable &primVar ) = 0; /// Returns the bounding box for the primitive. Imath::Box3f bound() const override = 0; /// High level rendering function which renders in the styles represented by /// currentState, allowing representations such as wireframe over shaded etc to /// be achieved with a single call. The currentState must be complete and /// already have been bound. Finer grained control over rendering can be achieved /// by using the shaderSetup() and renderInstances() methods - in fact those methods /// are used to implement this one. void render( State *currentState ) const override; //! @name Lower level rendering methods /// These methods are used to implement the higher level render() method - they /// may also be called directly to implement custom rendering. ////////////////////////////////////////////////////////////////////////////// /// This method returns a Shader::Setup binding the primitive to a shader for /// rendering in a particular state. It may be used in conjunction with renderInstances() /// to provide finer grained control over rendering. All vertex attributes are /// mapped to shader parameters prefixed with "vertex" so for instance "P" will be /// mapped to "vertexP". Uniform attributes are mapped directly to shader uniforms with /// no prefix. This naming convention corresponds to the inputs defined by the default /// source defined in the Shader class, and should be adopted when writing custom shaders. /// /// Most classes will not need to override this method - reasons for overriding would be /// to substitute in custom geometry or vertex shaders and/or to bind in attributes /// not already specified with addUniformAttribute() or addVertexAttribute(). /// /// \todo We need to rethink this mechanism. The problem is that we've ended up using /// this method for two things - firstly to get a ShaderSetup where all the primitive /// variables are bound (good), and secondly we've abused it to actually change the /// shader in PointsPrimitive and CurvePrimitive. Asking for a setup for one shader and getting /// back a setup for another doesn't make a great deal of sense. There are several /// competing sources of source code for shaders : /// /// - The user-provided source coming through Renderer::shader(). /// - The vertex and geometry shaders that PointsPrimitive and CurvesPrimitive need /// to insert. /// - The constant fragment shader that Primitive needs to insert to do wireframe /// shading etc. /// - The ID fragment shader needed for the Selector. /// /// We should redesign our API so that we first resolve these requirements to generate /// a shader, and then use shaderSetup() just to apply primitive variables to it. virtual const Shader::Setup *shaderSetup( const Shader *shader, State *state ) const; /// Adds the primitive variables held by this Primitive to the specified Shader::Setup. /// Vertex attributes will be prefixed as specified, and for each vertex attribute /// a boolean uniform parameter called "${prefix}${attributeName}Active" will also be /// added so the shader can determine whether or not the values for that input are useful. void addPrimitiveVariablesToShaderSetup( Shader::Setup *shaderSetup, const std::string &vertexPrefix = "vertex", GLuint vertexDivisor = 0 ) const; /// Renders the primitive using the specified state and with a particular style. /// The style is specified using the TypeId of the StateComponent representing that style /// (e.g. PrimitiveWireframeTypeId is passed for wireframe rendering). /// /// The default implementation calls renderInstances() but derived classes may override it /// to modify their drawing based on the state. A Shader::Setup /// created for this primitive must be bound before calling this method. virtual void render( const State *currentState, IECore::TypeId style ) const; /// Renders a number of instances of the primitive by issuing a single call to /// glDrawElementsInstanced() or glDrawArraysInstanced(). A Shader::Setup created for this /// primitive must be bound before calling this method. virtual void renderInstances( size_t numInstances = 1 ) const = 0; ///@} //! @name StateComponents /// The following StateComponent classes have an effect only on /// Primitive objects. ////////////////////////////////////////////////////////////////////////////// //@{ typedef TypedStateComponent<bool, PrimitiveBoundTypeId> DrawBound; IE_CORE_DECLAREPTR( DrawBound ); typedef TypedStateComponent<bool, PrimitiveWireframeTypeId> DrawWireframe; IE_CORE_DECLAREPTR( DrawWireframe ); typedef TypedStateComponent<float, PrimitiveWireframeWidthTypeId> WireframeWidth; IE_CORE_DECLAREPTR( WireframeWidth ); typedef TypedStateComponent<bool, PrimitiveSolidTypeId> DrawSolid; IE_CORE_DECLAREPTR( DrawSolid ); typedef TypedStateComponent<bool, PrimitiveOutlineTypeId> DrawOutline; IE_CORE_DECLAREPTR( DrawOutline ); typedef TypedStateComponent<float, PrimitiveOutlineWidthTypeId> OutlineWidth; IE_CORE_DECLAREPTR( OutlineWidth ); typedef TypedStateComponent<bool, PrimitivePointsTypeId> DrawPoints; IE_CORE_DECLAREPTR( DrawPoints ); typedef TypedStateComponent<float, PrimitivePointWidthTypeId> PointWidth; IE_CORE_DECLAREPTR( PointWidth ); typedef TypedStateComponent<bool, PrimitiveSelectableTypeId> Selectable; IE_CORE_DECLAREPTR( Selectable ); /// Used to trigger sorting of the components of a primitive when the TransparentShadingStateComponent has a value of true. typedef TypedStateComponent<bool, PrimitiveTransparencySortStateComponentTypeId> TransparencySort; IE_CORE_DECLAREPTR( TransparencySort ); //@} protected : /// Called by derived classes to register a uniform attribute. There are no type or length checks on this call. void addUniformAttribute( const std::string &name, IECore::ConstDataPtr data ); /// Called by derived classes to register a vertex attribute. There are no type or length checks on this call. void addVertexAttribute( const std::string &name, IECore::ConstDataPtr data ); /// Convenience function for use in render() implementations. Returns /// true if TransparentShadingStateComponent is true and /// PrimitiveTransparencySortStateComponent is true. bool depthSortRequested( const State *state ) const; private : typedef std::vector<Shader::SetupPtr> ShaderSetupVector; mutable ShaderSetupVector m_shaderSetups; mutable Shader::SetupPtr m_boundSetup; const Shader::Setup *boundSetup() const; typedef std::map<std::string, IECore::ConstDataPtr> AttributeMap; AttributeMap m_vertexAttributes; AttributeMap m_uniformAttributes; }; IE_CORE_DECLAREPTR( Primitive ); } // namespace IECoreGL #endif // IECOREGL_PRIMITIVE_H
2,823
14,668
<filename>components/module_installer/android/junit/src/org/chromium/components/module_installer/engine/SplitCompatEngineTest.java // Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.components.module_installer.engine; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import android.app.Activity; import com.google.android.play.core.splitinstall.SplitInstallException; import com.google.android.play.core.splitinstall.SplitInstallManager; import com.google.android.play.core.splitinstall.SplitInstallRequest; import com.google.android.play.core.splitinstall.SplitInstallSessionState; import com.google.android.play.core.splitinstall.SplitInstallStateUpdatedListener; import com.google.android.play.core.splitinstall.model.SplitInstallErrorCode; import com.google.android.play.core.splitinstall.model.SplitInstallSessionStatus; import com.google.android.play.core.tasks.OnFailureListener; import com.google.android.play.core.tasks.Task; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.InOrder; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.chromium.base.test.BaseRobolectricTestRunner; import org.chromium.components.module_installer.logger.Logger; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Test suite for the SplitCompatEngine class. */ @RunWith(BaseRobolectricTestRunner.class) public class SplitCompatEngineTest { @Mock private Logger mLogger; @Mock private SplitInstallManager mManager; @Mock private SplitInstallRequest mInstallRequest; @Mock private Task<Integer> mTask; private SplitCompatEngine mInstaller; private SplitCompatEngineFacade mInstallerFacade; @Before public void setUp() { MockitoAnnotations.initMocks(this); mInstallerFacade = mock(SplitCompatEngineFacade.class); // Mock SplitCompatEngineFacade. doReturn(mLogger).when(mInstallerFacade).getLogger(); doReturn(mManager).when(mInstallerFacade).getSplitManager(); doReturn(mInstallRequest).when(mInstallerFacade).createSplitInstallRequest(any()); // Mock SplitInstallManager. doReturn(mTask).when(mManager).startInstall(any()); mInstaller = new SplitCompatEngine(mInstallerFacade); mInstaller.resetSessionQueue(); } @Test public void whenInitActivity_verifyActivityInstalled() { // Arrange. Activity activityMock = mock(Activity.class); // Act. mInstaller.initActivity(activityMock); // Assert. verify(mInstallerFacade, times(1)).installActivity(activityMock); } @Test public void whenIsInstalled_verifyModuleIsInstalled() { // Arrange. String installedModule = "m1"; String uninstalledModule = "m2"; Set<String> installedModules = new HashSet<String>() { { add(installedModule); } }; doReturn(installedModules).when(mManager).getInstalledModules(); // Act & Assert. assertTrue(mInstaller.isInstalled(installedModule)); assertFalse(mInstaller.isInstalled(uninstalledModule)); } @Test public void whenInstallDeferred_verifyModuleInstalled() { // Arrange. String moduleName = "whenInstallDeferred_verifyModuleInstalled"; List<String> moduleList = Collections.singletonList(moduleName); // Act. mInstaller.installDeferred(moduleName); // Assert. verify(mManager, times(1)).deferredInstall(moduleList); verify(mLogger, times(1)).logRequestDeferredStart(moduleName); } @Test public void whenInstalling_verifyInstallSequence() { // Arrange. String moduleName = "whenInstalling_verifyInstallSequence"; InstallListener listener = mock(InstallListener.class); InOrder inOrder = inOrder(mInstallerFacade, mManager, mLogger, mTask); // Act. mInstaller.install(moduleName, listener); // Assert. inOrder.verify(mManager).registerListener(any()); inOrder.verify(mInstallerFacade).createSplitInstallRequest(moduleName); inOrder.verify(mManager).startInstall(mInstallRequest); inOrder.verify(mTask).addOnFailureListener(any()); inOrder.verify(mLogger).logRequestStart(moduleName); inOrder.verifyNoMoreInteractions(); } @Test public void whenInstallingSameModuleConcurrently_verifySingleInstall() { // Arrange. String moduleName = "whenInstallingSameModuleConcurrently_verifySingleInstall"; InstallListener listener = mock(InstallListener.class); SplitCompatEngine instance1 = new SplitCompatEngine(mInstallerFacade); SplitCompatEngine instance2 = new SplitCompatEngine(mInstallerFacade); // Act. instance1.install(moduleName, listener); instance1.install(moduleName, listener); instance2.install(moduleName, listener); instance2.install(moduleName, listener); // Assert. verify(mInstallerFacade, times(1)).createSplitInstallRequest(moduleName); } @Test public void whenInstallingWithException_verifyErrorHandled() { // Arrange. String moduleName = "whenInstallingWithException_verifyErrorHandled"; String exceptionMessage = moduleName + "_ex_msg"; Integer errorCode = -1; InstallListener listener = mock(InstallListener.class); ArgumentCaptor<OnFailureListener> arg = ArgumentCaptor.forClass(OnFailureListener.class); doReturn(errorCode).when(mLogger).getUnknownRequestErrorCode(); // Act. mInstaller.install(moduleName, listener); verify(mTask).addOnFailureListener(arg.capture()); arg.getValue().onFailure(new Exception(exceptionMessage)); // Assert. verify(mLogger, times(1)).logRequestFailure(moduleName, errorCode); verify(listener, times(1)).onComplete(false); } @Test public void whenInstallingWithSplitException_verifyErrorHandled() { // Arrange. String moduleName = "whenInstallingWithSplitException_verifyErrorHandled"; InstallListener listener = mock(InstallListener.class); ArgumentCaptor<OnFailureListener> arg = ArgumentCaptor.forClass(OnFailureListener.class); // Act. mInstaller.install(moduleName, listener); verify(mTask).addOnFailureListener(arg.capture()); arg.getValue().onFailure(new SplitInstallException(-1)); // Assert. verify(mLogger, times(1)).logRequestFailure(moduleName, -1); verify(listener, times(1)).onComplete(false); } @Test public void whenInstallingWithException_verifyCanTryAgainAfterFailure() { // Arrange. String moduleName = "whenInstallingWithException_verifyCanTryAgainAfterFailure"; ArgumentCaptor<OnFailureListener> arg = ArgumentCaptor.forClass(OnFailureListener.class); // Act. mInstaller.install(moduleName, mock(InstallListener.class)); verify(mTask).addOnFailureListener(arg.capture()); arg.getValue().onFailure(new Exception("")); mInstaller.install(moduleName, mock(InstallListener.class)); // 2nd call. // Assert. verify(mLogger, times(2)).logRequestStart(moduleName); } @Test(expected = UnsupportedOperationException.class) public void whenInstallingWithMoreThanOneModule_verifyException() { // Arrange. String moduleName = "whenInstallingWithMoreThanOneModule_verifyException"; InstallListener listener = mock(InstallListener.class); // Mock SplitInstallSessionState. SplitInstallSessionState state = mock(SplitInstallSessionState.class); doReturn(Arrays.asList("m1", "m2")).when(state).moduleNames(); ArgumentCaptor<SplitInstallStateUpdatedListener> arg = ArgumentCaptor.forClass(SplitInstallStateUpdatedListener.class); // Act & Assert. mInstaller.install(moduleName, listener); verify(mManager).registerListener(arg.capture()); arg.getValue().onStateUpdate(state); } @Test public void whenInstalled_verifyListenerAndLogger() { // Arrange. String moduleName = "whenInstalled_verifyListenerAndLogger"; Integer status = SplitInstallSessionStatus.INSTALLED; InstallListener listener = mock(InstallListener.class); // Mock SplitInstallSessionState. SplitInstallSessionState state = mock(SplitInstallSessionState.class); doReturn(status).when(state).status(); doReturn(Arrays.asList(moduleName)).when(state).moduleNames(); InOrder inOrder = inOrder(listener, mManager, mLogger, mInstallerFacade); ArgumentCaptor<SplitInstallStateUpdatedListener> arg = ArgumentCaptor.forClass(SplitInstallStateUpdatedListener.class); // Act. mInstaller.install(moduleName, listener); verify(mManager).registerListener(arg.capture()); arg.getValue().onStateUpdate(state); // Assert. inOrder.verify(mInstallerFacade, times(1)).updateCrashKeys(); inOrder.verify(listener, times(1)).onComplete(true); inOrder.verify(mManager, times(1)).unregisterListener(any()); inOrder.verify(mLogger, times(1)).logStatus(moduleName, status); inOrder.verifyNoMoreInteractions(); } @Test public void whenFailureToInstall_verifyListenerAndLogger() { // Arrange. String moduleName = "whenFailureToInstall_verifyListenerAndLogger"; Integer status = SplitInstallSessionStatus.FAILED; Integer errorCode = SplitInstallErrorCode.NO_ERROR; InstallListener listener = mock(InstallListener.class); // Mock SplitInstallSessionState. SplitInstallSessionState state = mock(SplitInstallSessionState.class); doReturn(status).when(state).status(); doReturn(errorCode).when(state).errorCode(); doReturn(Arrays.asList(moduleName)).when(state).moduleNames(); InOrder inOrder = inOrder(listener, mLogger, mManager); ArgumentCaptor<SplitInstallStateUpdatedListener> arg = ArgumentCaptor.forClass(SplitInstallStateUpdatedListener.class); // Act. mInstaller.install(moduleName, listener); verify(mManager).registerListener(arg.capture()); arg.getValue().onStateUpdate(state); // Assert. inOrder.verify(listener, times(1)).onComplete(false); inOrder.verify(mManager, times(1)).unregisterListener(any()); inOrder.verify(mLogger, times(1)).logStatusFailure(moduleName, errorCode); inOrder.verify(mLogger, times(1)).logStatus(moduleName, status); inOrder.verifyNoMoreInteractions(); } @Test public void whenNotInstalledOrFailed_verifyStatusLogged() { // Arrange. String moduleName = "whenNotInstalledOrFailed_verifyStatusLogged"; Integer status = SplitInstallSessionStatus.UNKNOWN; InstallListener listener = mock(InstallListener.class); // Mock SplitInstallSessionState. SplitInstallSessionState state = mock(SplitInstallSessionState.class); doReturn(status).when(state).status(); doReturn(Arrays.asList(moduleName)).when(state).moduleNames(); InOrder inOrder = inOrder(listener, mLogger); ArgumentCaptor<SplitInstallStateUpdatedListener> arg = ArgumentCaptor.forClass(SplitInstallStateUpdatedListener.class); // Act. mInstaller.install(moduleName, mock(InstallListener.class)); verify(mManager).registerListener(arg.capture()); arg.getValue().onStateUpdate(state); // Assert. inOrder.verify(mLogger, times(1)).logStatus(moduleName, status); inOrder.verifyNoMoreInteractions(); } }
4,560
407
<gh_stars>100-1000 /* * Copyright (c) 2016-2019, NVIDIA CORPORATION. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of NVIDIA CORPORATION nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include <algorithm> #include <map> #include <sstream> #include <tgmath.h> #include "priv/CanonicalAST.h" #include "priv/Check.h" #include "priv/Layer.h" #include "priv/Network.h" #include "priv/Tensor.h" #include "priv/Wisdom.h" #include "priv/WisdomContainer.h" using std::map; using std::vector; using std::string; using std::endl; using std::stringstream; namespace nvdla { INetwork::INetwork() { } INetwork::~INetwork() { } INetwork *createNetwork() { priv::NetworkFactory::NetworkPrivPair n = priv::NetworkFactory::newNetwork(); return n.i(); } NvDlaError destroyNetwork(INetwork *network) { NvDlaError e = NvDlaSuccess; PROPAGATE_ERROR_FAIL(priv::NetworkFactory::deleteNetwork(network)); fail: return e; } // TBD: why is this not Network Dims2 INetwork::NetworkDefaultConvolutionFormula::compute(Dims2 input, Dims2 kernel, Dims2 stride, Dims2 tlPadding, Dims2 brPadding, const char*) const { return Dims2((input.h + tlPadding.h + brPadding.h - kernel.h) / stride.h + 1, (input.w + tlPadding.w + brPadding.w - kernel.w) / stride.w + 1); } Dims2 INetwork::NetworkDefaultConvolutionFormula::compute(Dims2 input, Dims2 kernel, Dims2 stride, Dims2 tlPadding, Dims2 brPadding, Dims2 dilation, const char*) const { NvS32 dilatedH = (kernel.h - 1)*dilation.h + 1; NvS32 dilatedW = (kernel.w - 1)*dilation.w + 1; return Dims2((input.h + tlPadding.h + brPadding.h - dilatedH) / stride.h + 1, (input.w + tlPadding.w + brPadding.w - dilatedW) / stride.w + 1); } Dims2 INetwork::NetworkDefaultDeconvolutionFormula::compute(Dims2 input, Dims2 kernel, Dims2 stride, Dims2 tlPadding, Dims2 brPadding, const char*) const { // exact inverse of the computation for convolution forward return Dims2((input.h - 1) * stride.h + kernel.h - (tlPadding.h + brPadding.h), (input.w - 1) * stride.w + kernel.w - (tlPadding.w + brPadding.w)); } Dims2 INetwork::NetworkDefaultDeconvolutionFormula::compute(Dims2 input, Dims2 kernel, Dims2 stride, Dims2 tlPadding, Dims2 brPadding, Dims2 dilation, const char*) const { NvS32 dilatedH = (kernel.h - 1)*dilation.h + 1; NvS32 dilatedW = (kernel.w - 1)*dilation.w + 1; // exact inverse of the computation for convolution forward return Dims2((input.h - 1) * stride.h + dilatedH - (tlPadding.h + brPadding.h), (input.w - 1) * stride.w + dilatedW - (tlPadding.w + brPadding.w)); } Dims2 INetwork::NetworkDefaultPoolingFormula::compute(Dims2 input, Dims2 kernel, Dims2 stride, Dims2 tlPadding, Dims2 brPadding, const char*) const { int pooledH, pooledW; pooledH = static_cast<int> (ceil(static_cast<float>(input.h + tlPadding.h + brPadding.h - kernel.h) / stride.h)) + 1; pooledW = static_cast<int> (ceil(static_cast<float>(input.w + tlPadding.w + brPadding.w - kernel.w) / stride.w)) + 1; if (tlPadding.h || tlPadding.w) { // DS: caffe comment for this (which doesn't work if padding is very large) is: // "If we have padding, ensure that the last pooling starts strictly inside the image (instead of at the padding); otherwise clip the last." if ((pooledH - 1) * stride.h >= input.h + tlPadding.h) --pooledH; if ((pooledW - 1) * stride.w >= input.w + tlPadding.w) --pooledW; assert((pooledH - 1) * stride.h < input.h + tlPadding.h); assert((pooledW - 1) * stride.w < input.w + tlPadding.w); } return Dims2(pooledH, pooledW); } namespace priv { static INetwork::NetworkDefaultConvolutionFormula sDefaultConvDims; static INetwork::NetworkDefaultDeconvolutionFormula sDefaultDeconvDims; static INetwork::NetworkDefaultPoolingFormula sDefaultPoolingDims; NetworkFactory::NetworkPrivPair NetworkFactory::newNetwork() { INetwork *network; Network *network_priv; network = network_priv = new priv::Network(); if (network) { s_priv.insert(network, network_priv); s_self.insert(network, network); } return NetworkPrivPair(network, network_priv); } NvDlaError NetworkFactory::deleteNetwork(INetwork *network) { if (network != NULL) { Network *network_priv = priv(network); if (network_priv != NULL) { delete network_priv; } s_priv.remove(network); s_self.remove(network); } return NvDlaSuccess; } Network *NetworkFactory::priv(INetwork *network) { BiMap<INetwork *, Network *>::left_iterator f = s_priv.find_left(network); if ( f == s_priv.end_left() ) { return NULL; } return f->second; } INetwork *NetworkFactory::i(Network *network) { BiMap<INetwork *, Network *>::right_iterator f = s_priv.find_right(network); if ( f == s_priv.end_right() ) { return NULL; } return f->second; } INetwork *NetworkFactory::self(void *s) { BiMap<void *, INetwork *>::left_iterator f = s_self.find_left(s); if ( f == s_self.end_left() ) { return NULL; } return f->second; } INetwork *NetworkFactory::deserializeFrom(WisdomContainerEntry *entry) { // gLogError << __func__ << endl; bool ok = true; NVDLA_UNUSED(ok); INetwork *network = NULL; if ( entry->type() != IWisdomContainerEntry::ENTRY_TYPE_OBJECT ) { gLogError << __func__ << " container should be of object type" << endl; goto done; } // only one type of network right now (INetwork/Network)... //WisdomContainerEntry factory_type_entry; // NetworkTypeEnum factory_type; // NvU32 v; // ok = entry->getEntry("factory_type", IWisdomContainerEntry::ENTRY_TYPE_UINT32, &factory_type_entry); // ok = ok && factory_type_entry.readUInt32(v); // if ( !ok ) { // goto done; // } //factory_type = LayerTypeEnum::underlying_type(v); //ok = factory_type.valid(); //if ( !ok ) { // goto done; // } // switch ( factory_type.e() ) // { // case NetworkFactoryType::canonical_ast: network = deserializeNetwork(entry); //default: // but, shouldn't be possible since l_type.valid() is true... // ok = false; // goto done; // } done: return network; } BiMap<INetwork *, Network*> NetworkFactory::s_priv; BiMap<void *, INetwork*> NetworkFactory::s_self; // there's only one type of "Tensor" for now. so only one of these... so it looks // silly. see the same paths in "LayerFactory::deserialize*" for why it makes sense // to organize this way preemptively. INetwork *NetworkFactory::deserializeNetwork(WisdomContainerEntry *entry) { // gLogError << __func__ << endl; NetworkFactory::NetworkPrivPair n = NetworkFactory::newNetwork(); if ( !n ) { gLogError << __func__ << " error allocating new network" << endl; return NULL; } n.priv()->deserializeFrom(entry); return n.i(); } Network::Network() : mConvDims(&sDefaultConvDims), mDeconvDims(&sDefaultDeconvDims), mPoolDims(&sDefaultPoolingDims) { } NvU16 Network::getFactoryType() const { return 0; // only one type of network so far, not complicated by factory splits } IConvolutionLayer* Network::addConvolution(ITensor* inputTensor, int numOutputChannels, int paddingValue, Dims2 kernelSize, Dims2 tlPadding, Dims2 brPadding, Dims2 stride, Dims2 dilation, Weights kernelWeights, Weights biasWeights, BiasMode biasMode, int numGroups) { API_CHECK_NULL_RET_NULL(inputTensor); API_CHECK_RETVAL(numOutputChannels >= 1 && numOutputChannels < MAX_OUTPUT_MAPS, 0 ); API_CHECK_RETVAL(kernelSize.h > 0, 0); API_CHECK_RETVAL(kernelSize.w > 0, 0); API_CHECK_RETVAL((kernelSize.h * kernelSize.w) < MAX_KERNEL_DIMS_PRODUCT, 0); API_CHECK_WEIGHTS_RETVAL(kernelWeights, 0); API_CHECK_WEIGHTS0_RETVAL(biasWeights, 0); API_CHECK_ENUM_RANGE_RETVAL(BiasMode, biasMode, 0); string name = newLayerName(); ITensor* output = addTensor(newTensorName()); Tensor* output_priv = TensorFactory::priv(output); NVDLA_UNUSED(output_priv); ConvolutionLayerDiamond d = LayerFactory::newConvolutionLayer(this, name, inputTensor, output, numOutputChannels, paddingValue, kernelSize, tlPadding, brPadding, stride, dilation, kernelWeights, biasWeights, biasMode, numGroups); output->setDimensions( d.derived().priv()->getOutputDimensions() ); mLayers.push_back(d.base().i()); return d.derived().i(); } IFullyConnectedLayer* Network::addFullyConnected(ITensor* inputTensor, int outputSize, Weights kernelWeights, Weights biasWeights, BiasMode biasMode) { API_CHECK_NULL_RET_NULL(inputTensor); API_CHECK_RETVAL(outputSize >= 1 && outputSize < MAX_OUTPUT_MAPS, 0); API_CHECK_WEIGHTS_RETVAL(kernelWeights, 0); API_CHECK_WEIGHTS0_RETVAL(biasWeights, 0); string name = newLayerName(); ITensor* output = addTensor(newTensorName()); Tensor* output_priv = TensorFactory::priv(output); NVDLA_UNUSED(output_priv); FullyConnectedLayerDiamond d = LayerFactory::newFullyConnectedLayer(this, name, inputTensor, output, outputSize, kernelWeights, biasWeights, biasMode); output->setDimensions( d.derived().priv()->getOutputDimensions() ); mLayers.push_back(d.base().i()); return d.derived().i(); } IActivationLayer* Network::addActivation(ITensor* inputTensor, ActivationType type) { API_CHECK_NULL_RET_NULL(inputTensor); API_CHECK_ENUM_RANGE_RETVAL(ActivationType, type, 0); string name = newLayerName(); ITensor* output = addTensor(newTensorName()); Tensor* output_priv = TensorFactory::priv(output); NVDLA_UNUSED(output_priv); ActivationLayerDiamond d = LayerFactory::newActivationLayer(this, name, inputTensor, output, type); output->setDimensions( d.derived().priv()->getOutputDimensions() ); mLayers.push_back(d.base().i()); return d.derived().i(); } IPoolingLayer* Network::addPooling(ITensor* input, PoolingType type, Dims2 windowSize, Dims2 stride, Dims2 tlPadding, Dims2 brPadding) { API_CHECK_NULL_RET_NULL(input); API_CHECK_RETVAL(type.v() <= EnumMax<PoolingType>(), 0); API_CHECK_RETVAL(windowSize.h > 0, 0); API_CHECK_RETVAL(windowSize.w > 0, 0); API_CHECK_RETVAL((windowSize.h*windowSize.w) < MAX_KERNEL_DIMS_PRODUCT, 0); API_CHECK_RETVAL((stride.h + stride.w) < MAX_STRIDE_SUM, 0); API_CHECK_RETVAL(tlPadding.w < windowSize.w, 0); API_CHECK_RETVAL(tlPadding.h < windowSize.h, 0); API_CHECK_RETVAL(brPadding.w < windowSize.w, 0); API_CHECK_RETVAL(brPadding.h < windowSize.h, 0); string name = newLayerName(); ITensor* output = addTensor(newTensorName()); Tensor* output_priv = TensorFactory::priv(output); NVDLA_UNUSED(output_priv); PoolingLayerDiamond d = LayerFactory::newPoolingLayer(this, name, input, output, type, windowSize, stride, tlPadding, brPadding); output->setDimensions( d.derived().priv()->getOutputDimensions() ); mLayers.push_back(d.base().i()); return d.derived().i(); } ILRNLayer* Network::addLRN(ITensor* input, int lrnWindow, float alpha, float beta, float k) { API_CHECK_NULL_RET_NULL(input); API_CHECK_RETVAL(lrnWindow >= ILRNLayer::Parameters::minWindowSize() && lrnWindow <= ILRNLayer::Parameters::maxWindowSize(), 0); API_CHECK_RETVAL(fabsf(alpha) <= ILRNLayer::Parameters::maxAbsAlpha(), 0); API_CHECK_RETVAL(beta >= ILRNLayer::Parameters::minBeta() && beta <= ILRNLayer::Parameters::maxBeta(), 0); API_CHECK_RETVAL(k >= ILRNLayer::Parameters::minK() && k <= ILRNLayer::Parameters::maxK(), 0); string name = newLayerName(); ITensor* output = addTensor(newTensorName()); Tensor* output_priv = TensorFactory::priv(output); NVDLA_UNUSED(output_priv); LRNLayerDiamond d = LayerFactory::newLRNLayer(this, name, input, output, lrnWindow, alpha, beta, k); output->setDimensions( d.derived().priv()->getOutputDimensions() ); mLayers.push_back(d.base().i()); return d.derived().i(); } IScaleLayer* Network::addScale(ITensor* inputTensor, ScaleMode mode, Weights shift, Weights scale, Weights power) { API_CHECK_NULL_RET_NULL(inputTensor); API_CHECK_ENUM_RANGE_RETVAL(ScaleMode, mode, 0); API_CHECK_RETVAL(scale.type == shift.type && shift.type == power.type, 0); API_CHECK_WEIGHTS0_RETVAL(shift, 0); API_CHECK_WEIGHTS0_RETVAL(scale, 0); API_CHECK_WEIGHTS0_RETVAL(power, 0); string name = newLayerName(); ITensor* output = addTensor(newTensorName()); Tensor* output_priv = TensorFactory::priv(output); NVDLA_UNUSED(output_priv); ScaleLayerDiamond d = LayerFactory::newScaleLayer(this, name, inputTensor, output, mode, shift, scale, power); output->setDimensions( d.derived().priv()->getOutputDimensions() ); mLayers.push_back(d.base().i()); return d.derived().i(); } IBatchNormLayer* Network::addBatchNorm(ITensor* inputTensor, BatchNormMode mode, Weights mean, Weights variance, float epsilon) { API_CHECK_NULL_RET_NULL(inputTensor); API_CHECK_ENUM_RANGE_RETVAL(BatchNormMode, mode, 0); API_CHECK_RETVAL(mean.type == variance.type, 0); API_CHECK_WEIGHTS0_RETVAL(mean, 0); API_CHECK_WEIGHTS0_RETVAL(variance, 0); string name = newLayerName(); ITensor* output = addTensor(newTensorName()); BatchNormLayerDiamond d = LayerFactory::newBatchNormLayer(this, name, inputTensor, output, mode, mean, variance, epsilon); output->setDimensions( d.derived().priv()->getOutputDimensions() ); mLayers.push_back(d.base().i()); return d.derived().i(); } ISoftMaxLayer* Network::addSoftMax(ITensor* input) { API_CHECK_NULL_RET_NULL(input); string name = newLayerName(); ITensor* output = addTensor(newTensorName()); Tensor* output_priv = TensorFactory::priv(output); NVDLA_UNUSED(output_priv); SoftMaxLayerDiamond d = LayerFactory::newSoftMaxLayer(this, name, input, output); output->setDimensions( d.derived().priv()->getOutputDimensions() ); mLayers.push_back(d.base().i()); return d.derived().i(); } IConcatenationLayer *Network::addConcatenation(ITensor * const * inputs, int numInputs ) { API_CHECK_RETVAL(numInputs > 0 && numInputs < MAX_CONCAT_INPUTS, 0); API_CHECK_NULL_RET_NULL(inputs); for (int j = 0; j < numInputs; j++) { API_CHECK_NULL_RET_NULL(inputs[j]); } string name = newLayerName(); ITensor* output = addTensor(newTensorName()); Tensor* output_priv = TensorFactory::priv(output); NVDLA_UNUSED(output_priv); ConcatenationLayerDiamond d = LayerFactory::newConcatenationLayer(this, name, inputs, numInputs, output); output->setDimensions( d.derived().priv()->getOutputDimensions() ); mLayers.push_back(d.base().i()); return d.derived().i(); } ISliceLayer *Network::addSlice(ITensor* input, int numOutputs) { API_CHECK_RETVAL(numOutputs > 0 && numOutputs < MAX_SLICE_OUTPUTS, 0); API_CHECK_NULL_RET_NULL(input); string name = newLayerName(); ITensor* outputs[numOutputs]; for (int ii=0; ii<numOutputs; ii++) { outputs[ii] = addTensor(newTensorName()); } SliceLayerDiamond d = LayerFactory::newSliceLayer(this, name, input, outputs, numOutputs); for (int ii=0; ii<numOutputs; ii++) { outputs[ii]->setDimensions(d.derived().priv()->getOutputDimensions()); } mLayers.push_back(d.base().i()); return d.derived().i(); } IDeconvolutionLayer* Network::addDeconvolution ( ITensor* input, int numOutputs, int paddingValue, Dims2 kernelSize, Dims2 tlPadding, Dims2 brPadding, Dims2 stride, Dims2 dilation, Weights kernelWeights, Weights biasWeights, BiasMode biasMode, int numGroups ) { API_CHECK_NULL_RET_NULL(input); API_CHECK_RETVAL(numOutputs > 0 && numOutputs < MAX_OUTPUT_MAPS, 0); API_CHECK_RETVAL(kernelSize.h > 0, 0); API_CHECK_RETVAL(kernelSize.w > 0, 0); API_CHECK_RETVAL((kernelSize.h * kernelSize.w) < MAX_KERNEL_DIMS_PRODUCT, 0); API_CHECK_WEIGHTS_RETVAL(kernelWeights, 0); API_CHECK_WEIGHTS0_RETVAL(biasWeights, 0); API_CHECK_ENUM_RANGE_RETVAL(BiasMode, biasMode, 0); string name = newLayerName(); ITensor* output = addTensor(newTensorName()); Tensor* output_priv = TensorFactory::priv(output); NVDLA_UNUSED(output_priv); DeconvolutionLayerDiamond d = LayerFactory::newDeconvolutionLayer(this, name, input, output, numOutputs, paddingValue, kernelSize, tlPadding, brPadding, stride, dilation, kernelWeights, biasWeights, biasMode, numGroups); output->setDimensions( d.derived().priv()->getOutputDimensions() ); mLayers.push_back(d.base().i()); return d.derived().i(); } IElementWiseLayer* Network::addElementWise(ITensor* input1, ITensor* input2, ElementWiseOperation op) { API_CHECK_NULL_RET_NULL(input1); API_CHECK_NULL_RET_NULL(input2); API_CHECK_ENUM_RANGE_RETVAL(ElementWiseOperation, op, 0); string name = newLayerName(); ITensor* output = addTensor(newTensorName()); Tensor* output_priv = TensorFactory::priv(output); NVDLA_UNUSED(output_priv); ITensor* inputs[2] = { input1, input2 }; ElementWiseLayerDiamond d = LayerFactory::newElementWiseLayer(this, name, inputs, output, op); output->setDimensions( d.derived().priv()->getOutputDimensions() ); mLayers.push_back(d.base().i()); return d.derived().i(); } ITensor* Network::addInput(const char* name, Dims4 dims) { API_CHECK_NULL_RET_NULL(name); API_CHECK_DIMS4_TENSOR_RETVAL(dims, 0); ITensor* tensor = addTensor(string(name)); tensor->setDimensions(dims); mInputs.push_back(tensor); return tensor; } bool Network::markInput(ITensor *tensor) { API_CHECK_NULL_RETVAL(tensor, false); API_CHECK_DIMS3_TENSOR_RETVAL(tensor->getDimensions(), false); //TBD: check that this isn't already marked. mInputs.push_back(tensor); return true; } void Network::markOutput(ITensor* tensor) { API_CHECK_NULL(tensor); if (std::find(mOutputs.begin(), mOutputs.end(), tensor) == mOutputs.end()) { mOutputs.push_back(tensor); } } int Network::getNumLayers() const { return static_cast<int>(mLayers.size()); } ILayer* Network::getLayer(int i) const { if (i < 0 || i >= int(mLayers.size())) { return 0; } return mLayers[i]; } void Network::destroy() { delete this; } Network::~Network() { } const ILayer* Network::findLayer(const string& name) const { vector< ILayer * >::const_iterator it; for (it = mLayers.begin(); it != mLayers.end(); it++ ) { if ( (*it)->getName() == name) return *it; } return 0; } string Network::newTensorName() const { stringstream s; s << "tensor-anon-" << mTensors.size(); return s.str(); } string Network::newLayerName() const { stringstream s; s << "layer-anon-" << mLayers.size(); return s.str(); } void Network::setPoolingOutputDimensionsFormula(INetwork::OutputDimensionsFormula* callback) { mPoolDims = callback ? callback : &sDefaultPoolingDims; } void Network::setConvolutionOutputDimensionsFormula(INetwork::OutputDimensionsFormula* callback) { mConvDims = callback ? callback : &sDefaultConvDims; } void Network::setDeconvolutionOutputDimensionsFormula(INetwork::OutputDimensionsFormula* callback) { mDeconvDims = callback ? callback : &sDefaultDeconvDims; } int Network::getNumInputs() const { return (int)mInputs.size(); } int Network::getNumOutputs() const { return (int)mOutputs.size(); } ITensor* Network::getOutput(int index) const { if (index < 0 || index >= int(mOutputs.size())) { return 0; } return mOutputs[index]; } ITensor* Network::getInput(int index) const { if (index < 0 || index >= int(mInputs.size())) { return 0; } return mInputs[index]; } INetwork::OutputDimensionsFormula& Network::getPoolingOutputDimensionsFormula() const { return *mPoolDims; } INetwork::OutputDimensionsFormula& Network::getConvolutionOutputDimensionsFormula() const { return *mConvDims; } INetwork::OutputDimensionsFormula& Network::getDeconvolutionOutputDimensionsFormula() const { return *mDeconvDims; } const vector<ITensor*>& Network::getInputs() const { return mInputs; } const vector< ILayer * >& Network::getLayers() const { return mLayers; } const vector<ITensor *>& Network::getOutputs() const { return mOutputs; } ITensor* Network::addTensor(const string &s) { TensorFactory::TensorPrivPair t = TensorFactory::newTensor(); if ( !t ) { return NULL; } t.priv()->setNetwork(this); t.priv()->setName(s.c_str()); mTensors.push_back(t.i()); return t.i(); } bool Network::assignSymbols(Wisdom *wisdom) { bool ok = true; for (size_t l = 0; l < mLayers.size(); l++ ) { Layer *layer = LayerFactory::priv(mLayers[l]); if ( !layer ) { gLogError << "missing layer " << l << " in network?" << endl; continue; } string sym; ok = wisdom->findLayerSymbol(layer, sym); if ( ! ok ) { ok = wisdom->assignLayerSymbol(layer, sym); if ( !ok ) { gLogError << "unable to assign symbol name to layer " << layer->getName() << " ?" << endl; goto done; } } // tell the layer to assign symbols for whatever it references... ok = layer->assignSymbols(wisdom); if ( !ok ) { gLogError << "unable to assign symbols for layer " << layer->getName() << endl; goto done; } } done: return ok; } bool Network::serializeTo(WisdomContainerEntry *e) const { vector<Layer *> layers; Wisdom *wisdom; WisdomContainerEntry inputs_entry, outputs_entry, layers_entry; map<Tensor *, bool> gather_tensors; bool ok = e && e->container_priv() && e->container_priv()->wisdom(); if ( !ok ) { gLogError << "can't serialize a network without a working wisdom context." << endl; goto done; } wisdom = e->container_priv()->wisdom_priv(); // gLogError << "serializing network with " << getNumLayers() << " layers, " << getNumInputs() << " inputs, and " << getNumOutputs() << " outputs." << endl; ok = ok && e->writeUInt32("factory_type", getFactoryType()); ok = ok && e->writeUInt32("num_inputs", getNumInputs()); ok = ok && e->writeUInt32("num_outputs", getNumOutputs()); ok = ok && e->writeUInt32("num_layers", getNumLayers()); ok = ok && e->insertEntryIfNotPresent("inputs", IWisdomContainerEntry::ENTRY_TYPE_OBJECT, &inputs_entry); ok = ok && e->insertEntryIfNotPresent("outputs",IWisdomContainerEntry::ENTRY_TYPE_OBJECT, &outputs_entry); ok = ok && e->insertEntryIfNotPresent("layers", IWisdomContainerEntry::ENTRY_TYPE_OBJECT, &layers_entry); for (size_t l = 0; l < mLayers.size(); l++ ) { ILayer *ilayer = mLayers[l]; Layer *layer = LayerFactory::priv(ilayer); if ( !(ilayer && layer) ) { gLogError << "missing layer " << l << " in network?" << endl; ok = false; goto done; } layers.push_back(layer); // be sure there's a symbol associated with the layer so it can be referred // to by the network later during deserialization. string sym; ok = wisdom->findLayerSymbol(layer, sym); // layer symbols should already have been assigned if needed. if ( ! ok ) { gLogError << "unassigned layer " << layer->getName() << " ?" << endl; goto done; } ok = ok && layers_entry.writeString(toString(l), sym); // gLogError << "writing symbol=[" << sym << "] to layers_entry=[" << ss.str() << "]" << endl; if ( !ok ) { gLogError << "failed to write symbol to layers_entry index" << endl; goto done; } } // // gather up all tensors referred to by the layers and set them // for (size_t l = 0; l < layers.size(); l++ ) { int num_inputs = layers[l]->getNumInputs(); int num_outputs = layers[l]->getNumOutputs(); for(int i = 0; i < num_inputs; i++) { gather_tensors[TensorFactory::priv(layers[l]->getInput(i))] = true; } for(int o = 0; o < num_outputs; o++) { gather_tensors[TensorFactory::priv(layers[l]->getOutput(o))] = true; } } for ( map<Tensor *, bool>::iterator ti = gather_tensors.begin(); ok && (ti != gather_tensors.end()); ti++ ) { Tensor *t = ti->first; ok = ok && wisdom->setTensor(t); } if ( !ok ) { gLogError << __func__ << " failed to serialize one or more tensors" << endl; goto done; } // // now set the layers // for (size_t l = 0; l < layers.size(); l++ ) { ok = ok && wisdom->setLayer(layers[l]); } if ( !ok ) { gLogError << __func__ << " failed to serialize one or more layers" << endl; goto done; } // // record the input and output tensors // for (size_t i = 0; i < mInputs.size(); i++ ) { string sym; ok = ok && wisdom->findITensorSymbol(getInput(i), sym); ok = ok && inputs_entry.writeString(toString(i), sym); } if ( !ok ) { gLogError << __func__ << " failed to serialize one or more inputs" << endl; goto done; } for (size_t o = 0; o < mOutputs.size(); o++ ) { string sym; ok = ok && wisdom->findITensorSymbol(getOutput(o), sym); ok = ok && outputs_entry.writeString(toString(o), sym); } if ( !ok ) { gLogError << __func__ << " failed to serialize one or more outputs" << endl; goto done; } done: return ok; } // // read from the wisdom container entry and deserialize // all the objects/data necessary to bring the network // into live memory. // bool Network::deserializeFrom(WisdomContainerEntry *e) { canonical_ast::Graph *graph; NVDLA_UNUSED(graph); // gLogError << __func__ << " 1 " << endl; Wisdom *wisdom; WisdomContainerEntry inputs_entry, outputs_entry, layers_entry; NvU32 num_inputs, num_outputs, num_layers; map<string, ITensor *> tensor_symbols; vector<ILayer *> layers; bool ok = true; wisdom = e->container_priv()->wisdom_priv(); ok = NULL != wisdom; if ( !ok ) { gLogError << __func__ << "missing Wisdom" << endl; goto done; } ok = ok && e->getEntry(string("num_inputs"), IWisdomContainerEntry::ENTRY_TYPE_UINT32, &inputs_entry); ok = ok && e->getEntry(string("num_outputs"), IWisdomContainerEntry::ENTRY_TYPE_UINT32, &outputs_entry); ok = ok && e->getEntry(string("num_layers"), IWisdomContainerEntry::ENTRY_TYPE_UINT32, &layers_entry); ok = ok && inputs_entry. readUInt32(num_inputs); ok = ok && outputs_entry.readUInt32(num_outputs); ok = ok && layers_entry. readUInt32(num_layers); if ( !ok ) { gLogError << __func__ << " failed to get all num_* entries" << endl; goto done; } //XXX upper bounds check? if ( num_inputs == 0 || num_outputs == 0 || num_layers == 0 ) { //ok = false; gLogError << __func__ << " invalid network deserialization data?" << endl; gLogError << __func__ << " inputs=" << num_inputs << " outputs=" << num_outputs << " layers=" << num_layers << endl; // goto done; } // note re-use of the *_entry locals from above... ok = ok && e->getEntry(string("inputs"), IWisdomContainerEntry::ENTRY_TYPE_OBJECT, &inputs_entry); ok = ok && e->getEntry(string("outputs"), IWisdomContainerEntry::ENTRY_TYPE_OBJECT, &outputs_entry); ok = ok && e->getEntry(string("layers"), IWisdomContainerEntry::ENTRY_TYPE_OBJECT, &layers_entry); if ( !ok ) { gLogError << __func__ << " failed to get inputs, outputs and layers entries" << endl; goto done; } // // Gather up the layers referenced by the network. For each, ascertain // whether or not it has been deserialized (check with the Wisdom). // If not, do so. // // The set of layers used by the network is stored as an array of layer symbols. // layers.clear(); for ( size_t l = 0; ok && (l < num_layers); l++ ) { WisdomContainerEntry layer_index_entry; string layer_symbol; ILayer *layer; ok = ok && layers_entry.getEntry(toString(l), IWisdomContainerEntry::ENTRY_TYPE_STRING, &layer_index_entry); if ( !ok ) { gLogError << "couldn't get layers entry for " << toString(l) << endl; goto done; } ok = ok && layer_index_entry.readString(layer_symbol); if ( !ok ) { gLogError << "couldn't read layer index symbol string? " << toString(l) << endl; goto done; break; } layer = wisdom->getLayerFromSymbol(layer_symbol); ok = (NULL != layer); if ( ok ) { layers.push_back(layer); } else { gLogError << "couldn't get layer from symbol=[" << layer_symbol << "]" << endl; goto done; } mLayers.push_back(layer); } if ( !ok ) { gLogError << __func__ << " failed to find or instantiate (some) network layers" << endl; goto done; } // go through the input and output tensors and mark them as such. // they should have all been deserialized by way of layer references. // so if they aren't found something is really wrong. for ( size_t i = 0; ok && (i < num_inputs); i++ ) { WisdomContainerEntry input_index_entry; string input_symbol; ITensor *tensor; ok = ok && inputs_entry.getEntry(toString(i), IWisdomContainerEntry::ENTRY_TYPE_STRING, &input_index_entry); if ( !ok ) { gLogError << "couldn't get input entry for " << toString(i) << endl; goto done; } ok = ok && input_index_entry.readString(input_symbol); if ( !ok ) { gLogError << "couldn't read input index symbol string? " << toString(i) << endl; goto done; } tensor = wisdom->findTensorSymbol(input_symbol); if ( !tensor ) { ok = false; gLogError << " couldn't find input tensor sym=[" << input_symbol << "]" << endl; goto done; } ok = markInput(tensor); if ( !ok ) { gLogError << " problem marking tensor sym=[" << input_symbol << "] as a network input." << endl; goto done; } } for ( size_t o = 0; ok && (o < num_outputs); o++ ) { WisdomContainerEntry output_index_entry; string output_symbol; ITensor *tensor; ok = ok && outputs_entry.getEntry(toString(o), IWisdomContainerEntry::ENTRY_TYPE_STRING, &output_index_entry); if ( !ok ) { gLogError << "couldn't get output entry for " << toString(o) << endl; goto done; } ok = ok && output_index_entry.readString(output_symbol); if ( !ok ) { gLogError << "couldn't read output index symbol string? " << toString(o) << endl; goto done; } tensor = wisdom->findTensorSymbol(output_symbol); if ( !tensor ) { ok = false; gLogError << " couldn't find output tensor sym=[" << output_symbol << "]" << endl; goto done; } markOutput(tensor); } done: return ok; } } // nvdla::priv } // nvdla::
15,130
348
<gh_stars>100-1000 {"nom":"Santa-Maria-Siché","circ":"2ème circonscription","dpt":"Corse-du-Sud","inscrits":371,"abs":145,"votants":226,"blancs":13,"nuls":4,"exp":209,"res":[{"nuance":"REG","nom":"<NAME>","voix":132},{"nuance":"LR","nom":"<NAME>","voix":77}]}
106
1,244
#include <wchar.h> #include "stdio_impl.h" #define SH (8*sizeof(int)-1) #define NORMALIZE(x) ((x)>>SH | -((-(x))>>SH)) int fwide(FILE *f, int mode) { FLOCK(f); if (!f->mode) mode = f->mode = NORMALIZE(mode); FUNLOCK(f); return mode; }
114
852
import FWCore.ParameterSet.Config as cms cms.checkImportPermission(allowedPatterns = ['allowed'])
29
1,968
<reponame>agramonte/corona ////////////////////////////////////////////////////////////////////////////// // // This file is part of the Corona game engine. // For overview and more information on licensing please refer to README.md // Home page: https://github.com/coronalabs/corona // Contact: <EMAIL> // ////////////////////////////////////////////////////////////////////////////// #pragma once Rtt_DISABLE_WIN_XML_COMMENT_COMPILER_WARNINGS_BEGIN # include "Rtt_PlatformInAppStore.h" Rtt_DISABLE_WIN_XML_COMMENT_COMPILER_WARNINGS_END namespace Rtt { /// Stores information about a single product transaction. class WinRTStoreTransaction : public PlatformStoreTransaction { public: WinRTStoreTransaction(); virtual ~WinRTStoreTransaction(); PlatformStoreTransaction::State GetState() const; PlatformStoreTransaction::ErrorType GetErrorType() const; const char* GetErrorString() const; const char* GetProductIdentifier() const; const char* GetReceipt() const; const char* GetSignature() const; const char* GetIdentifier() const; const char* GetDate() const; const char* GetOriginalReceipt() const; const char* GetOriginalIdentifier() const; const char* GetOriginalDate() const; }; } // namespace Rtt
356
306
<filename>WeChatPlugin/Sources/Common/Category/NSMenuItem+Action.h // // NSMenuItem+Action.h // WeChatPlugin // // Created by TK on 2018/4/25. // Copyright © 2018年 tk. All rights reserved. // #import <Cocoa/Cocoa.h> @interface NSMenuItem (Action) + (NSMenuItem *)menuItemWithTitle:(NSString *)title action:(SEL)selector target:(id)target keyEquivalent:(NSString *)key state:(NSControlStateValue)state; @end
147
399
<filename>core/src/test/java/edu/wpi/grip/core/sources/SimpleMockFrameGrabber.java<gh_stars>100-1000 package edu.wpi.grip.core.sources; import org.bytedeco.javacv.Frame; import org.bytedeco.javacv.FrameGrabber; @SuppressWarnings({"PMD.AvoidThrowingRawExceptionTypes", "PMD.SignatureDeclareThrowsException"}) class SimpleMockFrameGrabber extends FrameGrabber { @Override public void start() throws Exception { /* no-op */ } @Override public void stop() throws Exception { /* no-op */ } @Override public void trigger() throws Exception { /* no-op */ } @Override public Frame grab() throws Exception { return null; } @Override public void release() throws Exception { /* no-op */ } }
277
1,436
// Copyright 2016 Yahoo Inc. // Licensed under the terms of the Apache 2.0 license. // Please see LICENSE file in the project root for terms. #ifndef CAFFE_DISTRI_SOCKET_SYNC_HPP_ #define CAFFE_DISTRI_SOCKET_SYNC_HPP_ #include <boost/shared_ptr.hpp> #include <vector> #include "caffe/parallel.hpp" #include "caffe/solver.hpp" #include "util/socket.hpp" namespace caffe { /** * Synchronous data parallelism between machines over RDMA. It builds on top * of the existing single node multi-GPU code in Caffe, by adding an extra * step to synchronize nodes' root GPUs. * * During creation, the weight and gradient buffers are sharded by the number * of nodes in the cluster. Each node is assigned a shard for which it will * behave as a parameter server. All nodes contain and compute on the full * buffers, but are only parameter servers for a subset. * * An SGD iteration goes as follow, first each node sends its shard of weights * to all others. This could be implemented using a broadcast collective, but * since all nodes send to all others concurrently, bandwidth is uniform, and * point to point communication should already be optimal. * * Each node's root GPU now has the weights ready, and propagates them to other * GPUs using Caffe's single node code. Gradients are then computed using a * forward/backward pass on each GPU, and reduced to root GPUs, again using * the single node code. * * The last step is symmetric to the first, gradients are sharded, and each * node sends their shards to their respective parameter server peer. Transfers * are again concurrent, and bandwidth uniform between nodes. Each node then * averages gradients for which it is parameter server, and applies the solver. * The solver code has not been optimized to run only on the relevant shard, * the remaining weights are simply ignored and will be overridden during the * first phase of the next iteration. */ template<typename Dtype> class SocketSync : public P2PSync<Dtype> { public: SocketSync(shared_ptr<Solver<Dtype> > solver, const vector<shared_ptr<SocketChannel> >& peers, int rank); virtual ~SocketSync(); void sync(bool data=true); protected: void chunk(int peer, size_t* offs, size_t* size); void CreateMasterBuffers(int peer); void CreateWorkerBuffers(int peer); virtual void on_start(); virtual void on_gradients_ready(); vector<shared_ptr<SocketChannel> > peers_; // Rank of the current node, MPI like int rank_; // Each node is parameter server for a shard, defined as an offset and size size_t own_offs_; size_t own_size_; // RDMA mappings on weights and gradients buffers, allow send and receive vector<shared_ptr<SocketBuffer> > data_send_; vector<shared_ptr<SocketBuffer> > data_recv_; vector<shared_ptr<SocketBuffer> > diff_send_; vector<shared_ptr<SocketBuffer> > diff_recv_; vector<shared_ptr<SocketBuffer> > ctrl_send_; vector<shared_ptr<SocketBuffer> > ctrl_recv_; // Weights and gradients buffers and size using Params<Dtype>::size_; using Params<Dtype>::data_; using Params<Dtype>::diff_; DISABLE_COPY_AND_ASSIGN(SocketSync); }; } // namespace caffe #endif
939
2,406
<gh_stars>1000+ # Copyright (C) 2018-2021 Intel Corporation # SPDX-License-Identifier: Apache-2.0 import os import re import tensorflow as tf import numpy as np os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' def load_graph(model_file, output_nodes_for_freeze=None): is_meta = os.path.splitext(model_file)[-1] == ".meta" tf.compat.v1.reset_default_graph() graph = tf.Graph() graph_def = tf.compat.v1.GraphDef() if not is_meta else tf.compat.v1.MetaGraphDef() with open(model_file, "rb") as f: graph_def.ParseFromString(f.read()) nodes_to_clear_device = graph_def.node if isinstance(graph_def, tf.compat.v1.GraphDef) else graph_def.graph_def.node for node in nodes_to_clear_device: node.device = "" if is_meta: with tf.compat.v1.Session() as sess: restorer = tf.compat.v1.train.import_meta_graph(graph_def) restorer.restore(sess, re.sub('\.meta$', '', model_file)) graph_def = tf.compat.v1.graph_util.convert_variables_to_constants(sess, graph_def.graph_def, output_nodes_for_freeze) with graph.as_default(): tf.import_graph_def(graph_def, name='') return graph def collect_tf_references(model_path, feed_dict, out_layer, output_nodes_for_freeze=None): _feed_dict = dict() graph = load_graph(model_path, output_nodes_for_freeze) output_tensors_list = list() outputs_list = list() for input in feed_dict: input_node = [node for node in graph.as_graph_def().node if node.name == input][0] if input_node.op == "Placeholder": tensor = graph.get_tensor_by_name(input + ":0") _feed_dict[tensor] = feed_dict[input] else: for parrent_input in input_node.input: in_node = [node for node in graph.as_graph_def().node if node.name == parrent_input][0] if in_node.op in ['Const', 'Assign', 'NoOp', 'Assert']: continue else: tensor = graph.get_tensor_by_name(parrent_input + ":0") _feed_dict[tensor] = feed_dict[input] for output in out_layer: tensor = graph.get_tensor_by_name(output + ":0") output_tensors_list.append(tensor) outputs_list.append(output) with graph.as_default(): with tf.compat.v1.Session(graph=graph) as sess: outputs = sess.run(output_tensors_list, feed_dict=_feed_dict) out_dict = dict(zip(outputs_list, outputs)) return out_dict def children(op, graph): op = graph.get_operation_by_name(op) return set(op for out in op.outputs for op in out.consumers()) def summarize_graph(model_path, output_nodes_for_freeze=None, reshape_net=None): placeholders = dict() variables = list() outputs = list() graph = load_graph(model_path, output_nodes_for_freeze) unlikely_output_types = ['Const', 'Assign', 'NoOp', 'Placeholder', 'Assert', 'switch_t', 'switch_f'] for node in graph.as_graph_def().node: if node.op == 'Placeholder': node_dict = dict() node_dict['type'] = tf.DType(node.attr['dtype'].type).name node_dict['shape'] = str(node.attr['shape'].shape.dim).replace('\n', '').replace(' ', '').replace( 'size:', '').replace('[', '').replace(']', '') node_dict['shape'] = tuple(map(lambda x: int(x), node_dict['shape'].split(','))) placeholders[node.name] = node_dict if node.op == "Variable" or node.op == "VariableV2": variables.append(node.name) if len(children(node.name, graph)) == 0: if node.op not in unlikely_output_types and node.name.split('/')[-1] not in unlikely_output_types: outputs.append(node.name) result = dict() result['inputs'] = placeholders result['outputs'] = outputs if reshape_net: out_layer = list(result['inputs'].keys()) + result['outputs'] feed_dict = {} for inputl in reshape_net: feed_dict.update({inputl: np.ones(shape=reshape_net[inputl])}) scoring_res = collect_tf_references(model_path=model_path, feed_dict=feed_dict, out_layer=out_layer) for layer in scoring_res: if layer in result['inputs']: result['inputs'][layer]['shape'] = scoring_res[layer].shape return result
1,925
1,144
"""This file is for dev server only. DO NOT USE FOR PROD """ from gevent import monkey monkey.patch_all() from lib.patch import patch_all patch_all() import multiprocessing import os import shlex import sys import subprocess import gevent.pywsgi import gevent.socket from app.server import flask_app def main(): host = "0.0.0.0" port = 5000 if len(sys.argv) > 1: port = int(sys.argv[-1]) debug = "--debug" in sys.argv run_webpack = "--webpack" in sys.argv webpack_process = None if debug: from flask_compress import Compress Compress(flask_app) # We are on the parent process if os.environ.get("WERKZEUG_RUN_MAIN") != "true": if run_webpack: webpack_process = multiprocessing.Process(target=webpack) webpack_process.start() else: print("Webpack is disabled. html/js/css will not be built.") print("To make web files: python runweb.py --debug --webpack port") else: print("You are not running in debug mode, so files are not autoreloaded.") print("To run in debug mode: python runweb.py --debug port") try: socketio_server(host=host, port=port, debug=debug) finally: terminate_process_if_live(webpack_process) def socketio_server(host="0.0.0.0", port=5000, debug=False): from app.flask_app import socketio gevent.socket.setdefaulttimeout(30000) print("Running Querybook(w/ socketio) in port: {}".format(port)) socketio.run(flask_app, host=host, port=port, debug=debug) def webpack(): webpack_subprocess = subprocess.Popen( shlex.split("./node_modules/.bin/webpack --progress --color --watch"), stdout=subprocess.PIPE, ) while True: output = webpack_subprocess.stdout.readline() if output == "" and webpack_subprocess.poll() is not None: break if output: sys.stdout.write(output.decode("utf-8")) def terminate_process_if_live(process): if process is not None and process.is_alive(): process.terminate() if __name__ == "__main__": main()
908
6,663
# mode: run # tag: pure3, pure2 import cython @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") @cython.locals(x=bytearray) def basic_bytearray_iter(x): """ >>> basic_bytearray_iter(bytearray(b"hello")) h e l l o """ for a in x: print(chr(a)) @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") @cython.locals(x=bytearray) def reversed_bytearray_iter(x): """ >>> reversed_bytearray_iter(bytearray(b"hello")) o l l e h """ for a in reversed(x): print(chr(a)) @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") @cython.locals(x=bytearray) def modifying_bytearray_iter1(x): """ >>> modifying_bytearray_iter1(bytearray(b"abcdef")) a b c 3 """ count = 0 for a in x: print(chr(a)) del x[-1] count += 1 print(count) @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") @cython.locals(x=bytearray) def modifying_bytearray_iter2(x): """ >>> modifying_bytearray_iter2(bytearray(b"abcdef")) a c e 3 """ count = 0 for a in x: print(chr(a)) del x[0] count += 1 print(count) @cython.test_assert_path_exists("//ForFromStatNode") @cython.test_fail_if_path_exists("//ForInStatNode") @cython.locals(x=bytearray) def modifying_reversed_bytearray_iter(x): """ NOTE - I'm not 100% sure how well-defined this behaviour is in Python. However, for the moment Python and Cython seem to do the same thing. Testing that it doesn't crash is probably more important than the exact output! >>> modifying_reversed_bytearray_iter(bytearray(b"abcdef")) f f f f f f """ for a in reversed(x): print(chr(a)) del x[0]
967
1,006
/**************************************************************************** * arch/arm/src/kinetis/hardware/kinetis_cmt.h * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The * ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * ****************************************************************************/ #ifndef __ARCH_ARM_SRC_KINETIS_HARDWARE_KINETIS_CMT_H #define __ARCH_ARM_SRC_KINETIS_HARDWARE_KINETIS_CMT_H /**************************************************************************** * Included Files ****************************************************************************/ #include <nuttx/config.h> #include "chip.h" /**************************************************************************** * Pre-processor Definitions ****************************************************************************/ /* Register Offsets *********************************************************/ #define KINETIS_CMT_CGH1_OFFSET 0x0000 /* CMT Carrier Generator High Data Register 1 */ #define KINETIS_CMT_CGL1_OFFSET 0x0001 /* CMT Carrier Generator Low Data Register 1 */ #define KINETIS_CMT_CGH2_OFFSET 0x0002 /* CMT Carrier Generator High Data Register 2 */ #define KINETIS_CMT_CGL2_OFFSET 0x0003 /* CMT Carrier Generator Low Data Register 2 */ #define KINETIS_CMT_OC_OFFSET 0x0004 /* CMT Output Control Register */ #define KINETIS_CMT_MSC_OFFSET 0x0005 /* CMT Modulator Status and Control Register */ #define KINETIS_CMT_CMD1_OFFSET 0x0006 /* CMT Modulator Data Register Mark High */ #define KINETIS_CMT_CMD2_OFFSET 0x0007 /* CMT Modulator Data Register Mark Low */ #define KINETIS_CMT_CMD3_OFFSET 0x0008 /* CMT Modulator Data Register Space High */ #define KINETIS_CMT_CMD4_OFFSET 0x0009 /* CMT Modulator Data Register Space Low */ #define KINETIS_CMT_PPS_OFFSET 0x000a /* CMT Primary Prescaler Register */ #define KINETIS_CMT_DMA_OFFSET 0x000b /* CMT Direct Memory Access */ /* Register Addresses *******************************************************/ #define KINETIS_CMT_CGH1 (KINETIS_CMT_BASE+KINETIS_CMT_CGH1_OFFSET) #define KINETIS_CMT_CGL1 (KINETIS_CMT_BASE+KINETIS_CMT_CGL1_OFFSET) #define KINETIS_CMT_CGH2 (KINETIS_CMT_BASE+KINETIS_CMT_CGH2_OFFSET) #define KINETIS_CMT_CGL2 (KINETIS_CMT_BASE+KINETIS_CMT_CGL2_OFFSET) #define KINETIS_CMT_OC (KINETIS_CMT_BASE+KINETIS_CMT_OC_OFFSET) #define KINETIS_CMT_MSC (KINETIS_CMT_BASE+KINETIS_CMT_MSC_OFFSET) #define KINETIS_CMT_CMD1 (KINETIS_CMT_BASE+KINETIS_CMT_CMD1_OFFSET) #define KINETIS_CMT_CMD2 (KINETIS_CMT_BASE+KINETIS_CMT_CMD2_OFFSET) #define KINETIS_CMT_CMD3 (KINETIS_CMT_BASE+KINETIS_CMT_CMD3_OFFSET) #define KINETIS_CMT_CMD4 (KINETIS_CMT_BASE+KINETIS_CMT_CMD4_OFFSET) #define KINETIS_CMT_PPS (KINETIS_CMT_BASE+KINETIS_CMT_PPS_OFFSET) #define KINETIS_CMT_DMA (KINETIS_CMT_BASE+KINETIS_CMT_DMA_OFFSET) /* Register Bit Definitions *************************************************/ /* CMT Carrier Generator High/Low Data Register 1 * (8-bit Primary Carrier High Time Data Value) */ /* CMT Carrier Generator High/Low Data Register 2 * (8-bit Secondary Carrier High Time Data Value) */ /* CMT Output Control Register (8-bit) */ /* Bits 0-4: Reserved */ #define CMT_OC_IROPEN (1 << 5) /* Bit 5: IRO Pin Enable */ #define CMT_OC_CMTPOL (1 << 6) /* Bit 6: CMT Output Polarity */ #define CMT_OC_IROL (1 << 7) /* Bit 7: IRO Latch Control */ /* CMT Modulator Status and Control Register (8-bit) */ #define CMT_MSC_MCGEN (1 << 0) /* Bit 0: Modulator and Carrier Generator Enable */ #define CMT_MSC_EOCIE (1 << 1) /* Bit 1: End of Cycle Interrupt Enable */ #define CMT_MSC_FSK (1 << 2) /* Bit 2: FSK Mode Select */ #define CMT_MSC_BASE (1 << 3) /* Bit 3: Baseband Enable */ #define CMT_MSC_EXSPC (1 << 4) /* Bit 4: Extended Space Enable */ #define CMT_MSC_CMTDIV_SHIFT (5) /* Bits 5-6: CMT Clock Divide Prescaler */ #define CMT_MSC_CMTDIV_MASK (3 << CMT_MSC_CMTDIV_SHIFT) # define CMT_MSC_CMTDIV_DIV1 (0 << CMT_MSC_CMTDIV_SHIFT) /* IF / 1 */ # define CMT_MSC_CMTDIV_DIV2 (1 << CMT_MSC_CMTDIV_SHIFT) /* IF / 2 */ # define CMT_MSC_CMTDIV_DIV4 (2 << CMT_MSC_CMTDIV_SHIFT) /* IF / 4 */ # define CMT_MSC_CMTDIV_DIV8 (3 << CMT_MSC_CMTDIV_SHIFT) /* IF / 8 */ #define CMT_MSC_EOCF (1 << 7) /* Bit 7: End Of Cycle Status Flag */ /* CMT Modulator Data Register Mark High/Low (8-bit command data) */ /* CMT Modulator Data Register Space High/Low (8-bit command data) */ /* CMT Primary Prescaler Register (8-bit) */ #define CMT_PPS_SHIFT (0) /* Bits 0-3: Primary Prescaler Divider */ #define CMT_PPS_MASK (15 << CMT_PPS_SHIFT) # define CMT_PPS_DIV(n) (((n)-1) << CMT_PPS_SHIFT) /* Bus clock / n, n=1..16 */ /* Bits 4-7: Reserved */ /* CMT Direct Memory Access (8-bit) */ #define CMT_DMA_ENABLE (1 << 0) /* Bit 0: DMA Enable */ /* Bits 1-7: Reserved */ /**************************************************************************** * Public Types ****************************************************************************/ /**************************************************************************** * Public Data ****************************************************************************/ /**************************************************************************** * Public Functions Prototypes ****************************************************************************/ #endif /* __ARCH_ARM_SRC_KINETIS_HARDWARE_KINETIS_CMT_H */
2,481
3,639
package top.tangyh.lamp.file.mapper; import top.tangyh.basic.base.mapper.SuperMapper; import top.tangyh.lamp.file.entity.Appendix; import org.springframework.stereotype.Repository; /** * <p> * Mapper 接口 * 业务附件 * </p> * * @author tangyh * @date 2021-06-30 * @create [2021-06-30] [tangyh] [初始创建] */ @Repository public interface AppendixMapper extends SuperMapper<Appendix> { }
176
14,668
// Copyright (c) 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef BASE_ALLOCATOR_PARTITION_ALLOCATOR_STARSCAN_LOGGING_H_ #define BASE_ALLOCATOR_PARTITION_ALLOCATOR_STARSCAN_LOGGING_H_ #include "base/allocator/partition_allocator/allocation_guard.h" #include "base/logging.h" namespace base { namespace internal { // Logging requires allocations. This logger allows reentrant allocations to // happen within the allocator context. struct LoggerWithAllowedAllocations : ScopedAllowAllocations, logging::LogMessage { using logging::LogMessage::LogMessage; }; #define PA_PCSCAN_VLOG_STREAM(verbose_level) \ ::base::internal::LoggerWithAllowedAllocations(__FILE__, __LINE__, \ -(verbose_level)) \ .stream() // Logging macro that is meant to be used inside *Scan. Generally, reentrancy // may be an issue if the macro is called from malloc()/free(). Currently, it's // only called at the end of *Scan and when scheduling a new *Scan task. // Allocating from these paths should not be an issue, since we make sure that // no infinite recursion can occur (e.g. we can't schedule two *Scan tasks and // the inner free() call must be non-reentrant). However, these sorts of things // are tricky to enforce and easy to mess up with. Since verbose *Scan logging // is essential for debugging, we choose to provide support for it inside *Scan. #define PA_PCSCAN_VLOG(verbose_level) \ LAZY_STREAM(PA_PCSCAN_VLOG_STREAM(verbose_level), VLOG_IS_ON(verbose_level)) } // namespace internal } // namespace base #endif // BASE_ALLOCATOR_PARTITION_ALLOCATOR_STARSCAN_LOGGING_H_
639
1,599
<reponame>prity-k/molecule<gh_stars>1000+ """Console and terminal utilities.""" import os import sys from typing import Any, Dict from enrich.console import Console from rich.style import Style from rich.theme import Theme theme = Theme( { "info": "dim cyan", "warning": "magenta", "danger": "bold red", "scenario": "green", "action": "green", "section_title": "bold cyan", "logging.level.notset": Style(dim=True), "logging.level.debug": Style(color="white", dim=True), "logging.level.info": Style(color="blue"), "logging.level.warning": Style(color="red"), "logging.level.error": Style(color="red", bold=True), "logging.level.critical": Style(color="red", bold=True), "logging.level.success": Style(color="green", bold=True), } ) # Based on Ansible implementation def to_bool(a: Any) -> bool: """Return a bool for the arg.""" if a is None or isinstance(a, bool): return bool(a) if isinstance(a, str): a = a.lower() if a in ("yes", "on", "1", "true", 1): return True return False def should_do_markup() -> bool: """Decide about use of ANSI colors.""" py_colors = None # https://xkcd.com/927/ for v in ["PY_COLORS", "CLICOLOR", "FORCE_COLOR", "ANSIBLE_FORCE_COLOR"]: value = os.environ.get(v, None) if value is not None: py_colors = to_bool(value) break # If deliverately disabled colors if os.environ.get("NO_COLOR", None): return False # User configuration requested colors if py_colors is not None: return to_bool(py_colors) term = os.environ.get("TERM", "") if "xterm" in term: return True if term == "dumb": return False # Use tty detection logic as last resort because there are numerous # factors that can make isatty return a misleading value, including: # - stdin.isatty() is the only one returning true, even on a real terminal # - stderr returting false if user user uses a error stream coloring solution return sys.stdout.isatty() console_options: Dict[str, Any] = {"emoji": False, "theme": theme, "soft_wrap": True} console = Console( force_terminal=should_do_markup(), theme=theme, record=True, redirect=True ) console_options_stderr = console_options.copy() console_options_stderr["stderr"] = True console_stderr: Console = Console(**console_options_stderr) # Define ANSIBLE_FORCE_COLOR if markup is enabled and another value is not # already given. This assures that Ansible subprocesses are still colored, # even if they do not run with a real TTY. if should_do_markup(): os.environ["ANSIBLE_FORCE_COLOR"] = os.environ.get("ANSIBLE_FORCE_COLOR", "1")
1,083
372
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.bigquery.model; /** * Response message for the ListRowAccessPolicies method. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the BigQuery API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class ListRowAccessPoliciesResponse extends com.google.api.client.json.GenericJson { /** * A token to request the next page of results. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String nextPageToken; /** * Row access policies on the requested table. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<RowAccessPolicy> rowAccessPolicies; /** * A token to request the next page of results. * @return value or {@code null} for none */ public java.lang.String getNextPageToken() { return nextPageToken; } /** * A token to request the next page of results. * @param nextPageToken nextPageToken or {@code null} for none */ public ListRowAccessPoliciesResponse setNextPageToken(java.lang.String nextPageToken) { this.nextPageToken = nextPageToken; return this; } /** * Row access policies on the requested table. * @return value or {@code null} for none */ public java.util.List<RowAccessPolicy> getRowAccessPolicies() { return rowAccessPolicies; } /** * Row access policies on the requested table. * @param rowAccessPolicies rowAccessPolicies or {@code null} for none */ public ListRowAccessPoliciesResponse setRowAccessPolicies(java.util.List<RowAccessPolicy> rowAccessPolicies) { this.rowAccessPolicies = rowAccessPolicies; return this; } @Override public ListRowAccessPoliciesResponse set(String fieldName, Object value) { return (ListRowAccessPoliciesResponse) super.set(fieldName, value); } @Override public ListRowAccessPoliciesResponse clone() { return (ListRowAccessPoliciesResponse) super.clone(); } }
915
347
package org.ovirt.engine.core.aaa.filters; import java.io.IOException; import java.util.Arrays; import java.util.Map; import javax.naming.InitialContext; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang.StringUtils; import org.ovirt.engine.core.aaa.SsoOAuthServiceUtils; import org.ovirt.engine.core.aaa.SsoUtils; import org.ovirt.engine.core.common.constants.SessionConstants; import org.ovirt.engine.core.common.queries.GetEngineSessionIdForSsoTokenQueryParameters; import org.ovirt.engine.core.common.queries.QueryReturnValue; import org.ovirt.engine.core.common.queries.QueryType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SsoRestApiAuthFilter implements Filter { private final Logger log = LoggerFactory.getLogger(getClass()); private static final String scope = "ovirt-app-api"; private static final String BASIC = "Basic"; private static final String BEARER = "Bearer"; @Override public void init(FilterConfig filterConfig) { // empty } @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { log.debug("Entered SsoRestApiAuthFilter"); HttpServletRequest req = (HttpServletRequest) request; if (!FiltersHelper.isAuthenticated(req) || !FiltersHelper.isSessionValid((HttpServletRequest) request)) { log.debug("SsoRestApiAuthFilter authenticating with sso"); authenticateWithSso(req); } chain.doFilter(request, response); } protected void authenticateWithSso(HttpServletRequest req) { String headerValue = req.getHeader(FiltersHelper.Constants.HEADER_AUTHORIZATION); if (headerValue != null && (headerValue.startsWith(BASIC) || headerValue.startsWith(BEARER))) { try { String token; boolean userSessionExists = false; if (headerValue.startsWith(BASIC)) { log.debug("SsoRestApiAuthFilter authenticating using BASIC header"); Map<String, Object> response = SsoOAuthServiceUtils.authenticate(req, scope); FiltersHelper.isStatusOk(response); token = (String) response.get("access_token"); log.debug("SsoRestApiAuthFilter successfully authenticated using BASIC header"); } else if (headerValue.startsWith(BEARER)) { log.debug("SsoRestApiAuthFilter authenticating using BEARER header"); token = headerValue.substring("Bearer".length()).trim(); InitialContext ctx = new InitialContext(); try { QueryReturnValue queryRetVal = FiltersHelper.getBackend(ctx).runPublicQuery( QueryType.GetEngineSessionIdForSsoToken, new GetEngineSessionIdForSsoTokenQueryParameters(token)); if (queryRetVal.getSucceeded() && StringUtils.isNotEmpty(queryRetVal.getReturnValue())) { log.debug("SsoRestApiAuthFilter successfully authenticated using BEARER header"); req.setAttribute( SessionConstants.HTTP_SESSION_ENGINE_SESSION_ID_KEY, queryRetVal.getReturnValue()); req.setAttribute( FiltersHelper.Constants.REQUEST_LOGIN_FILTER_AUTHENTICATION_DONE, true); userSessionExists = true; } } finally { ctx.close(); } } else { throw new RuntimeException(String.format("Unsupported authentication header: %s", headerValue)); } if (!userSessionExists) { Map<String, Object> payload = FiltersHelper.getPayloadForToken(token); String scope = (String) payload.get("scope"); if (StringUtils.isEmpty(scope) || !Arrays.asList(scope.trim().split("\\s *")).contains("ovirt-app-api")) { throw new RuntimeException("The required scope ovirt-app-api is not granted."); } SsoUtils.createUserSession(req, payload, false); } } catch (Exception e) { req.setAttribute( SessionConstants.SSO_AUTHENTICATION_ERR_MSG, e.getMessage()); log.error("Cannot authenticate using authentication Headers: {}", e.getMessage()); log.debug("Cannot authenticate using authentication Headers", e); } } } @Override public void destroy() { // empty } }
2,400
410
<gh_stars>100-1000 import os BASE_DIR = os.path.dirname(os.path.abspath(__file__)) DEBUG = True ALLOWED_HOSTS = [] DATABASES = { 'default': {'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'djangox509.db'} } SECRET_KEY = '<KEY>' INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.admin', 'django_x509', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'openwisp2.urls' TIME_ZONE = 'Europe/Rome' LANGUAGE_CODE = 'en-gb' USE_TZ = True USE_I18N = False USE_L10N = False STATIC_URL = '/static/' CORS_ORIGIN_ALLOW_ALL = True TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'OPTIONS': { 'loaders': [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', 'openwisp_utils.loaders.DependencyLoader', ], 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, } ] if os.environ.get('SAMPLE_APP', False): INSTALLED_APPS.remove('django_x509') EXTENDED_APPS = ['django_x509'] INSTALLED_APPS.append('openwisp2.sample_x509') DJANGO_X509_CA_MODEL = 'sample_x509.Ca' DJANGO_X509_CERT_MODEL = 'sample_x509.Cert' # local settings must be imported before test runner otherwise they'll be ignored try: from local_settings import * # noqa except ImportError: pass
982
352
<reponame>ponderng/recon-pipeline<filename>pipeline/models/nmap_model.py import textwrap from sqlalchemy.orm import relationship from sqlalchemy import Column, Integer, ForeignKey, String, Boolean from .base_model import Base from .port_model import Port from .ip_address_model import IPAddress from .nse_model import nse_result_association_table class NmapResult(Base): """ Database model that describes the TARGET.nmap scan results. Represents nmap data. Relationships: ``target``: many to one -> :class:`pipeline.models.target_model.Target` ``ip_address``: one to one -> :class:`pipeline.models.ip_address_model.IPAddress` ``port``: one to one -> :class:`pipeline.models.port_model.Port` ``nse_results``: one to many -> :class:`pipeline.models.nse_model.NSEResult` """ def __str__(self): return self.pretty() def pretty(self, commandline=False, nse_results=None): pad = " " ip_address = self.ip_address.ipv4_address or self.ip_address.ipv6_address msg = f"{ip_address} - {self.service}\n" msg += f"{'=' * (len(ip_address) + len(self.service) + 3)}\n\n" msg += f"{self.port.protocol} port: {self.port.port_number} - {'open' if self.open else 'closed'} - {self.reason}\n" msg += f"product: {self.product} :: {self.product_version}\n" msg += "nse script(s) output:\n" if nse_results is None: # add all nse scripts for nse_result in self.nse_results: msg += f"{pad}{nse_result.script_id}\n" msg += textwrap.indent(nse_result.script_output, pad * 2) msg += "\n" else: # filter used, only return those specified for nse_result in nse_results: if nse_result in self.nse_results: msg += f"{pad}{nse_result.script_id}\n" msg += textwrap.indent(nse_result.script_output, pad * 2) msg += "\n" if commandline: msg += "command used:\n" msg += f"{pad}{self.commandline}\n" return msg __tablename__ = "nmap_result" id = Column(Integer, primary_key=True) open = Column(Boolean) reason = Column(String) service = Column(String) product = Column(String) commandline = Column(String) product_version = Column(String) port = relationship(Port) port_id = Column(Integer, ForeignKey("port.id")) ip_address = relationship(IPAddress) ip_address_id = Column(Integer, ForeignKey("ip_address.id")) target_id = Column(Integer, ForeignKey("target.id")) target = relationship("Target", back_populates="nmap_results") nse_results = relationship("NSEResult", secondary=nse_result_association_table, back_populates="nmap_results")
1,222
634
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.file.impl; import consulo.disposer.Disposable; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.FileViewProvider; import com.intellij.psi.PsiDirectory; import com.intellij.psi.PsiFile; import com.intellij.psi.impl.PsiTreeChangeEventImpl; import consulo.annotation.access.RequiredReadAction; import consulo.annotation.access.RequiredWriteAction; import org.jetbrains.annotations.TestOnly; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.List; public interface FileManager extends Disposable { @Nullable @RequiredReadAction PsiFile findFile(@Nonnull VirtualFile vFile); @Nullable @RequiredReadAction PsiDirectory findDirectory(@Nonnull VirtualFile vFile); @RequiredWriteAction default void reloadFromDisk(@Nonnull PsiFile file) { reloadFromDisk(file, false); } default void reloadFromDisk(@Nonnull PsiFile file, boolean ignoreDocument) { } @Nullable @RequiredReadAction PsiFile getCachedPsiFile(@Nonnull VirtualFile vFile); @TestOnly void cleanupForNextTest(); @RequiredReadAction FileViewProvider findViewProvider(@Nonnull VirtualFile file); @RequiredReadAction FileViewProvider findCachedViewProvider(@Nonnull VirtualFile file); @RequiredReadAction void setViewProvider(@Nonnull VirtualFile virtualFile, FileViewProvider fileViewProvider); @Nonnull List<PsiFile> getAllCachedFiles(); @Nonnull FileViewProvider createFileViewProvider(@Nonnull VirtualFile file, boolean physical); default void processFileTypesChanged() { } default void markInitialized() { } default PsiDirectory getCachedDirectory(@Nonnull VirtualFile vFile) { return null; } default void removeInvalidFilesAndDirs(boolean useFind) { } default PsiFile getCachedPsiFileInner(@Nonnull VirtualFile file) { return null; } default void forceReload(@Nonnull VirtualFile vFile) { } default void removeFilesAndDirsRecursively(@Nonnull VirtualFile vFile) { } default boolean isInitialized() { return true; } default void dispatchPendingEvents() { } default void firePropertyChangedForUnloadedPsi(@Nonnull PsiTreeChangeEventImpl event, @Nonnull VirtualFile vFile) { } }
856
1,738
/* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ // Original file Copyright Crytek GMBH or its affiliates, used under license. #ifndef CRYINCLUDE_CRYACTION_NETWORK_SERIALIZEBITS_H #define CRYINCLUDE_CRYACTION_NETWORK_SERIALIZEBITS_H #pragma once class CBitArray { public: /* This is a naive implementation, but actually packs bits without padding, unlike CryNetwork */ CBitArray(TSerialize* m_ser); void ResetForWrite(); void ResetForRead(); bool IsReading() { return m_isReading; } bool IsWriting() { return m_isReading == false; } int NumberOfBitsPushed(); void PushBit(int bit); int PopBit(); void ReadBits(unsigned char* out, int numBits); void WriteBits(const unsigned char* in, int numBits); inline uint32 bitsneeded(uint32 v); inline void SerializeInt(int* v, int min, int max); inline void SerializeUInt(unsigned int* v, unsigned int min, unsigned int max); void SerializeFloat(float* data, float min, float max, int totalNumBits, int reduceRange = 0); inline void Serialize(bool& v); inline void Serialize(uint32& v, uint32 min = 0, uint32 max = 0xffffffff); inline void Serialize(int32& v, int min, int max); inline void Serialize(int16& v, int min, int max); inline void Serialize(uint16& v, uint16 min = 0, uint16 max = 0xffff); inline void Serialize(unsigned char& v, int min, int max); inline void Serialize(float& f, float min, float max, int totalNumBits, int reduceRange = 0); inline void Serialize(Vec3& v, float min, float max, int numBitsPerElem, int reduceRange = 0); inline void Serialize(Quat& v); void WriteToSerializer(); private: template <class INT> void SerializeInt_T(INT* v, INT min, INT max); public: enum { maxBytes = 1 << 13 }; int m_bytePos; int m_bitPos; int m_numberBytes; bool m_isReading; int m_multiplier; TSerialize* m_ser; unsigned char m_readByte; unsigned char m_data[maxBytes]; }; /* ========================================================================================================= Implementation ========================================================================================================= */ /* This is a naive implementation */ NO_INLINE_WEAK CBitArray::CBitArray(TSerialize* ser) { m_ser = ser; if (ser->IsReading()) { ResetForRead(); } else { ResetForWrite(); } } NO_INLINE_WEAK void CBitArray::ResetForWrite() { m_bytePos = -1; m_bitPos = 7; m_multiplier = 1; m_numberBytes = 0; m_isReading = false; } NO_INLINE_WEAK void CBitArray::ResetForRead() { m_bitPos = 7; m_bytePos = -1; m_isReading = true; } NO_INLINE_WEAK void CBitArray::PushBit(int bit) { #if !defined(_RELEASE) if (m_bytePos >= maxBytes) { CryFatalError("CBitArray ran out of room, maxBytes: %d, will need to be increased, or break up serialisation into separate CBitArray", maxBytes); } #endif m_bitPos++; if (m_bitPos == 8) { m_multiplier = 1; m_bitPos = 0; m_bytePos++; assert(m_bytePos < maxBytes); PREFAST_ASSUME(m_bytePos < maxBytes); m_data[m_bytePos] = 0; m_numberBytes++; } assert((unsigned int)m_bytePos < (unsigned int)maxBytes); PREFAST_ASSUME((unsigned int)m_bytePos < (unsigned int)maxBytes); m_data[m_bytePos] |= m_multiplier * (bit & 1); // Use multiplier because variable bit shift on consoles is really slow m_multiplier = m_multiplier << 1; } NO_INLINE_WEAK int CBitArray::NumberOfBitsPushed() { return m_bytePos * 8 + m_bitPos + 1; } NO_INLINE_WEAK int CBitArray::PopBit() /* from the front */ { m_bitPos++; if (m_bitPos == 8) { m_bitPos = 0; m_bytePos++; CRY_ASSERT(m_ser->IsReading()); m_ser->Value("bitarray", m_readByte); // read a byte } unsigned char ret = m_readByte & 1; m_readByte = m_readByte >> 1; return ret; } NO_INLINE_WEAK void CBitArray::ReadBits(unsigned char* out, int numBits) { unsigned char byte; while (numBits >= 8) { byte = PopBit(); byte |= PopBit() << 1; byte |= PopBit() << 2; byte |= PopBit() << 3; byte |= PopBit() << 4; byte |= PopBit() << 5; byte |= PopBit() << 6; byte |= PopBit() << 7; *out = byte; out++; numBits = numBits - 8; } switch (numBits) { case 0: break; case 1: *out = PopBit(); break; case 2: byte = PopBit(); byte |= PopBit() << 1; *out = byte; break; case 3: byte = PopBit(); byte |= PopBit() << 1; byte |= PopBit() << 2; *out = byte; break; case 4: byte = PopBit(); byte |= PopBit() << 1; byte |= PopBit() << 2; byte |= PopBit() << 3; *out = byte; break; case 5: byte = PopBit(); byte |= PopBit() << 1; byte |= PopBit() << 2; byte |= PopBit() << 3; byte |= PopBit() << 4; *out = byte; break; case 6: byte = PopBit(); byte |= PopBit() << 1; byte |= PopBit() << 2; byte |= PopBit() << 3; byte |= PopBit() << 4; byte |= PopBit() << 5; *out = byte; break; case 7: byte = PopBit(); byte |= PopBit() << 1; byte |= PopBit() << 2; byte |= PopBit() << 3; byte |= PopBit() << 4; byte |= PopBit() << 5; byte |= PopBit() << 6; *out = byte; break; } } NO_INLINE_WEAK void CBitArray::WriteBits(const unsigned char* in, int numBits) { unsigned char v; while (numBits >= 8) { v = *in; PushBit((v >> 0) & 1); PushBit((v >> 1) & 1); PushBit((v >> 2) & 1); PushBit((v >> 3) & 1); PushBit((v >> 4) & 1); PushBit((v >> 5) & 1); PushBit((v >> 6) & 1); PushBit((v >> 7) & 1); numBits = numBits - 8; in++; } v = *in; switch (numBits) { case 0: break; case 1: PushBit((v >> 0) & 1); break; case 2: PushBit((v >> 0) & 1); PushBit((v >> 1) & 1); break; case 3: PushBit((v >> 0) & 1); PushBit((v >> 1) & 1); PushBit((v >> 2) & 1); break; case 4: PushBit((v >> 0) & 1); PushBit((v >> 1) & 1); PushBit((v >> 2) & 1); PushBit((v >> 3) & 1); break; case 5: PushBit((v >> 0) & 1); PushBit((v >> 1) & 1); PushBit((v >> 2) & 1); PushBit((v >> 3) & 1); PushBit((v >> 4) & 1); break; case 6: PushBit((v >> 0) & 1); PushBit((v >> 1) & 1); PushBit((v >> 2) & 1); PushBit((v >> 3) & 1); PushBit((v >> 4) & 1); PushBit((v >> 5) & 1); break; case 7: PushBit((v >> 0) & 1); PushBit((v >> 1) & 1); PushBit((v >> 2) & 1); PushBit((v >> 3) & 1); PushBit((v >> 4) & 1); PushBit((v >> 5) & 1); PushBit((v >> 6) & 1); break; } } inline uint32 CBitArray::bitsneeded(uint32 v) { // See bit twiddling hacks static const int MultiplyDeBruijnBitPosition[32] = { 0, 9, 1, 10, 13, 21, 2, 29, 11, 14, 16, 18, 22, 25, 3, 30, 8, 12, 20, 28, 15, 17, 24, 7, 19, 27, 23, 6, 26, 5, 4, 31 }; v |= v >> 1; // first round down to one less than a power of 2 v |= v >> 2; v |= v >> 4; v |= v >> 8; v |= v >> 16; return 1 + MultiplyDeBruijnBitPosition[(uint32)(v * 0x07C4ACDDU) >> 27]; } template<class INT> NO_INLINE_WEAK void CBitArray::SerializeInt_T(INT* v, INT min, INT max) { INT range = max - min; INT nbits = bitsneeded(range); unsigned char c; if (IsReading()) { INT multiplier = 1; *v = 0; while (nbits > 8) { ReadBits(&c, 8); *v |= multiplier * c; // Note: there is no need for endian swapping with this method multiplier = multiplier * 256; nbits = nbits - 8; } ReadBits(&c, nbits); *v |= multiplier * c; *v = *v + min; } else { INT tmp = *v - min; if (tmp < 0) { tmp = 0; } if (tmp > range) { tmp = range; } while (nbits > 8) { c = tmp & 0xff; WriteBits(&c, 8); // Note: there is no need for endian swapping with this method tmp = tmp >> 8; nbits = nbits - 8; } c = tmp & 0xff; WriteBits(&c, nbits); } } inline void CBitArray::Serialize(bool& v) { int tmp = v ? 1 : 0; SerializeInt_T<int>(&tmp, 0, 1); v = (tmp != 0); } inline void CBitArray::SerializeInt(int* v, int min, int max) { SerializeInt_T<int>(v, min, max); } inline void CBitArray::SerializeUInt(unsigned int* v, unsigned int min, unsigned int max) { SerializeInt_T<unsigned int>(v, min, max); } inline void CBitArray::Serialize(uint32& v, uint32 min, uint32 max) { unsigned int tmp = v; SerializeUInt(&tmp, min, max); v = (uint32)tmp; } inline void CBitArray::Serialize(int32& v, int min, int max) { int tmp = v; SerializeInt(&tmp, min, max); v = (int32)tmp; } inline void CBitArray::Serialize(int16& v, int min, int max) { int tmp = v; SerializeInt(&tmp, min, max); v = (int16)tmp; } inline void CBitArray::Serialize(uint16& v, uint16 min, uint16 max) { unsigned int tmp = v; SerializeUInt(&tmp, min, max); v = (uint16)tmp; } inline void CBitArray::Serialize(unsigned char& v, int min, int max) { int tmp = v; SerializeInt(&tmp, min, max); v = (unsigned char)tmp; } inline void CBitArray::Serialize(float& v, float min, float max, int totalNumBits, int reduceRange) { SerializeFloat(&v, min, max, totalNumBits, reduceRange); } inline void CBitArray::Serialize(Vec3& v, float min, float max, int numBitsPerElem, int reduceRange) { SerializeFloat(&v.x, min, max, numBitsPerElem, reduceRange); SerializeFloat(&v.y, min, max, numBitsPerElem, reduceRange); SerializeFloat(&v.z, min, max, numBitsPerElem, reduceRange); } inline void CBitArray::Serialize(Quat& q) { // Should this compression migratate to Cry_Quat.h ? float quat[4]; if (IsWriting()) { uint32 out = 0; uint32 i; float scale = 1.0f; quat[0] = q.w; quat[1] = q.v.x; quat[2] = q.v.y; quat[3] = q.v.z; uint32 largest = 0; for (i = 1; i < 4; i++) { if (fabsf(quat[i]) > fabsf(quat[largest])) { largest = i; } } // Scale the quat so that reconstruction always deals with positive value scale = (float)fsel(quat[largest], 1.f, -1.f); out |= largest; // first 2 bits denote which is the largest uint32 entry = 0; uint32 multiply = 4; for (i = 0; i < 4; i++) { if (i != largest) { // Encode each remaining value in 10 bits, using range 0-1022. NB, range is chosen so zero is reproduced correctly int val = (int)((((scale * quat[i]) + 0.7071f) * (1022.f / 1.4142f)) + 0.5f); if (val < 0) { val = 0; } if (val > 1022) { val = 1022; } out |= val * multiply; multiply *= 1024; entry++; } } Serialize(out); } else // Reading { uint32 in; Serialize(in); static int idx[4][3] = { { 1, 2, 3 }, { 0, 2, 3 }, { 0, 1, 3 }, { 0, 1, 2 } }; int mv = in & 3; int* indices = idx[mv]; uint32 c0 = (in >> 2) & 1023; uint32 c1 = (in >> 12) & 1023; uint32 c2 = (in >> 22) & 1023; float outDatai0 = (c0 * (1.4142f / 1022.f)) - 0.7071f; float outDatai1 = (c1 * (1.4142f / 1022.f)) - 0.7071f; float outDatai2 = (c2 * (1.4142f / 1022.f)) - 0.7071f; float sumOfSqs = 1.f - outDatai0 * outDatai0; quat[ indices[0] ] = outDatai0; sumOfSqs -= outDatai1 * outDatai1; quat[ indices[1] ] = outDatai1; sumOfSqs -= outDatai2 * outDatai2; quat[ indices[2] ] = outDatai2; sumOfSqs = (float)fsel(sumOfSqs, sumOfSqs, 0.0f); quat[mv] = sqrtf(sumOfSqs); q.w = quat[0]; q.v.x = quat[1]; q.v.y = quat[2]; q.v.z = quat[3]; } } inline void CBitArray::SerializeFloat(float* data, float min, float max, int totalNumBits, int reduceRange) { int range = (1 << totalNumBits) - 1 - reduceRange; if (IsReading()) { int n; SerializeInt(&n, 0, range); *data = ((float)n) * (max - min) / (float)(range) + min; } else { float f = clamp_tpl(*data, min, max); int n = (int)(((float)range) / (max - min) * (f - min)); SerializeInt(&n, 0, range); } } NO_INLINE_WEAK void CBitArray::WriteToSerializer() { CRY_ASSERT(IsReading() == 0); for (int i = 0; i < m_numberBytes; i++) { m_ser->Value("bitarray", m_data[i]); } } #endif // CRYINCLUDE_CRYACTION_NETWORK_SERIALIZEBITS_H
6,757
439
<filename>exercises/practice/two-bucket/.meta/config.json { "blurb": "Given two buckets of different size, demonstrate how to measure an exact number of liters.", "authors": [ "jssander" ], "contributors": [ "FridaTveit", "jmrunkle", "lemoncurry", "mirkoperillo", "msomji", "muzimuzhi", "sjwarner-bp", "SleeplessByte", "sshine" ], "files": { "solution": [ "src/main/java/TwoBucket.java" ], "test": [ "src/test/java/TwoBucketTest.java" ], "example": [ ".meta/src/reference/java/TwoBucket.java" ] }, "source": "Water Pouring Problem", "source_url": "http://demonstrations.wolfram.com/WaterPouringProblem/" }
313
6,304
#! /usr/bin/env python # Copyright 2019 Google LLC. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import shutil import subprocess import sys def copy_git_directory(src, dst, out=None): ''' Makes a copy of `src` directory in `dst` directory. If files already exist and are identical, do not touch them. If extra files or directories exist, remove them. Assume that `src` is a git directory so that `git ls-files` can be used to enumerate files. This has the added benefit of ignoring files not tracked by git. Also, if out is not None, write summary of actions to out. If `dst` is a top-level git directory, the `.git` directory will be removed. ''' if not os.path.isdir(src): raise Exception('Directory "%s" does not exist.' % src) if not os.path.isdir(dst): os.makedirs(dst) ls_files = subprocess.check_output([ 'git', 'ls-files', '-z', '.'], cwd=src).decode('utf-8') src_files = set(p for p in ls_files.split('\0') if p) abs_src = os.path.abspath(src) cwd = os.getcwd() try: os.chdir(dst) def output(out, sym, dst, path): if out: out.write('%s %s%s%s\n' % (sym, dst, os.sep, path)) for dirpath, dirnames, filenames in os.walk('.', topdown=False): for filename in filenames: path = os.path.normpath(os.path.join(dirpath, filename)) if path not in src_files: output(out, '-', dst, path) os.remove(path) for filename in dirnames: path = os.path.normpath(os.path.join(dirpath, filename)) if not os.listdir(path): # Remove empty subfolders. output(out, '-', dst, path + os.sep) os.rmdir(path) for path in src_files: src_path = os.path.join(abs_src, path) if os.path.exists(path): with open(path) as f1: with open(src_path) as f2: if f1.read() == f2.read(): continue output(out, '+', dst, path) shutil.copy2(src_path, path) finally: os.chdir(cwd) if __name__ == '__main__': if len(sys.argv) != 3: sys.stderr.write('\nUsage:\n %s SRC_DIR DST_DIR\n\n' % sys.argv[0]) sys.exit(1) copy_git_directory(sys.argv[1], sys.argv[2], sys.stdout)
957
634
/* * Copyright 2013-2018 consulo.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package consulo.ui.layout; import consulo.ui.internal.UIInternal; import consulo.ui.annotation.RequiredUIAccess; import javax.annotation.Nonnull; import java.util.function.Supplier; /** * @author VISTALL * @since 2018-07-01 */ public interface SwipeLayout extends Layout { @Nonnull static SwipeLayout create() { return UIInternal.get()._Layouts_swipe(); } @Nonnull default SwipeLayout register(@Nonnull String id, @Nonnull Layout layout) { return register(id, () -> layout); } @Nonnull SwipeLayout register(@Nonnull String id, @Nonnull @RequiredUIAccess Supplier<Layout> layoutSupplier); /** * @param id of child * @return child layout which will be showed */ @Nonnull Layout swipeLeftTo(@Nonnull String id); /** * @param id of child * @return child layout which will be showed */ @Nonnull Layout swipeRightTo(@Nonnull String id); }
448
4,417
# Copyright 2021 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ This example demonstrates how to list cluster nodes using dynamic client. """ from kubernetes import config, dynamic from kubernetes.client import api_client def main(): # Creating a dynamic client client = dynamic.DynamicClient( api_client.ApiClient(configuration=config.load_kube_config()) ) # fetching the node api api = client.resources.get(api_version="v1", kind="Node") # Listing cluster nodes print("%s\t\t%s\t\t%s" % ("NAME", "STATUS", "VERSION")) for item in api.get().items: node = api.get(name=item.metadata.name) print( "%s\t%s\t\t%s\n" % ( node.metadata.name, node.status.conditions[3]["type"], node.status.nodeInfo.kubeProxyVersion, ) ) if __name__ == "__main__": main()
531
339
#!/usr/bin/env python """Retrieve full genome databases, preparing them for BLAST analysis. Usage: retrieve_org_dbs.py <YAML config file> Requires: - NCBI's blast+ -- for preparing the organism databases ftp://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/LATEST/ - Biopython libraries """ import os import sys import csv import glob import ftplib import subprocess import contextlib import urllib2 import socket import time import yaml from Bio import Entrez def main(config_file): with open(config_file) as in_handle: config = yaml.load(in_handle) Entrez.email = config.get('email', '<EMAIL>') socket.setdefaulttimeout(config['url_timeout']) local_get = LocalRetrieval(config) ncbi_get = NcbiEntrezRetrieval(config) ensembl_get = EnsemblFtpRetrieval(config) organisms = read_org_list(config['org_file']) db_dir = config['db_dir'] ensembl_db_dir = os.path.join(db_dir, "ensembl") for check_dir in [db_dir, ensembl_db_dir]: if not os.path.exists(check_dir): os.makedirs(check_dir) org_files = [] for org in organisms: check_glob = os.path.join(config["db_dir"], "custom", "%s*" % org) print "Preparing organism:", org check_custom = [x for x in glob.glob(check_glob) if not x.endswith((".phr", ".pin", ".psq"))] if org in config.get('problem_orgs', []): db_file = '' elif len(check_custom) == 1: db_file = local_get.retrieve_db(org, check_custom[0], db_dir) else: print("Did not find single pre-downloaded FASTA file in '%s'\n" "Instead Found %s\n" "Attempting to download from Ensembl or NCBI" % (check_glob, check_custom)) db_file = ensembl_get.retrieve_db(org, ensembl_db_dir) if db_file: print "Ensembl" db_file = os.path.join(os.path.basename(ensembl_db_dir), db_file) else: print "NCBI" db_file = ncbi_get.retrieve_db(org, db_dir) org_files.append((org, db_file)) with open(os.path.join(db_dir, "organism_dbs.txt"), "w") as out_handle: for org, fname in org_files: out_handle.write("%s\t%s\n" % (org, fname)) def read_org_list(in_file): with open(in_file, 'rU') as in_handle: reader = csv.reader(in_handle) orgs = [r[-1] for r in reader] return orgs class _BaseRetrieval: def _make_blast_db(self, db_dir, final_file, db_name, organism): with _chdir(db_dir): if not os.path.exists("%s.pin" % db_name): cmd = self._config.get("blastdb_cmd", "makeblastdb") cl = [cmd, "-in", os.path.basename(final_file), "-dbtype", "prot", "-out", db_name, "-title", organism] subprocess.check_call(cl) class LocalRetrieval(_BaseRetrieval): """Prepare a database file from a local FASTA ref. """ def __init__(self, config): self._config = config def retrieve_db(self, org, fname, db_dir): self._make_blast_db(os.path.dirname(fname), os.path.basename(fname), os.path.splitext(os.path.basename(fname))[0], org) return os.path.splitext(fname.replace("%s/" % db_dir, ""))[0] class NcbiEntrezRetrieval(_BaseRetrieval): """Pull down fasta protein genome sequences using NCBI Entrez. """ def __init__(self, config): self._max_tries = 5 self._config = config def retrieve_db(self, organism, db_dir): genome_ids = self._query_for_ids(organism) out_file = os.path.join(db_dir, "%s-entrez.fa" % organism.replace(" ", "_")) db_name = os.path.splitext(os.path.basename(out_file))[0] if not os.path.exists(out_file): num_tries = 1 while 1: try: self._download_and_error_out(out_file, genome_ids) break except urllib2.URLError: print "Timeout error" time.sleep(5) if num_tries > self._max_tries: raise else: num_tries += 1 self._make_blast_db(db_dir, os.path.basename(out_file), db_name, organism) return db_name def _download_and_error_out(self, out_file, genome_ids): """Do the full genome downloading, raising timeout errors to be handled. """ with open(out_file, "w") as out_handle: for genome_id in genome_ids: print "Downloading", genome_id self._download_to_file(genome_id, out_handle) def _download_to_file(self, genome_id, out_handle): entrez_url = "http://www.ncbi.nlm.nih.gov/sites/entrez?Db=genome&" \ "Cmd=File&dopt=Protein+FASTA&list_uids=%s" % genome_id download_handle = urllib2.urlopen(entrez_url) # read off garbage at the beginning of the file related to the genome while 1: line = download_handle.readline() if line.startswith(">"): out_handle.write(line) break if not line: break print line for line in download_handle: out_handle.write(line) download_handle.close() # be sure output has trailing newlines. Who knows what could be there. out_handle.write("\n") def _query_for_ids(self, organism): handle = Entrez.esearch(db="genome", term="%s[Organism]" % organism) record = Entrez.read(handle) return record['IdList'] class EnsemblFtpRetrieval(_BaseRetrieval): """Handle obtaining a reference genome from Ensembl """ def __init__(self, config): self._main_ftp = "ftp://ftp.ensembl.org/pub/current_fasta/" self._genome_ftp = "ftp://ftp.ensemblgenomes.org/pub/%s/current/fasta/" self._genome_dbs = ["bacteria", "protists", "metazoa", "fungi", "plants"] self._initialized = False self._config = config def _initialize(self): if not self._initialized: urls = [self._genome_ftp % d for d in self._genome_dbs] + \ [self._main_ftp] self._org_to_urls = dict() for url in urls: orgs = self._files_at_url(url) for org in orgs: self._org_to_urls[org] = url self._initialized = True def _files_at_url(self, url): """Add organisms available at the provided FTP url. """ parts = url.replace("ftp://", "").split("/") ftp = ftplib.FTP(parts[0]) ftp.login() orgs = ftp.nlst("/".join(parts[1:])) return [o.split("/")[-1] for o in orgs] def retrieve_db(self, organism, db_dir): self._initialize() ftp_url = self._get_ftp_url(organism) if ftp_url is None: return "" file_name = ftp_url.split("/")[-1] final_file = os.path.join(db_dir, file_name.replace(".gz", "")) db_name = os.path.splitext(os.path.basename(final_file))[0] if not os.path.exists(final_file): with _chdir(db_dir): cl = ["wget", ftp_url] subprocess.check_call(cl) cl = ["gunzip", file_name] subprocess.check_call(cl) self._make_blast_db(db_dir, final_file, db_name, organism) return db_name def _get_ftp_url(self, organism): """Retrieve the protein database link for a given organism. """ ftp_url = None org_parts = organism.split() for check_org in [organism.replace(" ", "_").lower(), "_".join([org_parts[0][0], org_parts[1]]).lower()]: try: ftp_url = self._org_to_urls[check_org] break except KeyError: pass if ftp_url: ftp_url = ftp_url + check_org + "/pep/" files = self._files_at_url(ftp_url) for f in files: if f.endswith("pep.all.fa.gz"): ftp_url = ftp_url + f break return ftp_url @contextlib.contextmanager def _chdir(new_dir): orig_dir = os.getcwd() try: os.chdir(new_dir) yield finally: os.chdir(orig_dir) if __name__ == "__main__": if len(sys.argv) == 2: main(*sys.argv[1:]) else: print "Incorrect arguments" print __doc__ sys.exit()
4,393
335
<reponame>Safal08/Hacktoberfest-1 { "word": "Irone", "definitions": [ "Originally: a fragrant oil obtained from orris root (Iris germanica and other species). Later: each of three isomeric cyclic ketones (\"\u03b1-irone\", \"\u03b2-irone\", and \"\u03b3-irone\") found in this oil and in other plants such as violets, and used in perfumery." ], "parts-of-speech": "Noun" }
156
2,477
<filename>app/src/main/java/io/github/hidroh/materialistic/widget/ThreadPreviewRecyclerViewAdapter.java<gh_stars>1000+ /* * Copyright (c) 2015 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.github.hidroh.materialistic.widget; import android.content.Intent; import android.os.Handler; import androidx.recyclerview.widget.RecyclerView; import android.text.TextUtils; import android.view.View; import android.view.ViewGroup; import java.util.ArrayList; import java.util.List; import io.github.hidroh.materialistic.AppUtils; import io.github.hidroh.materialistic.ItemActivity; import io.github.hidroh.materialistic.R; import io.github.hidroh.materialistic.data.Item; import io.github.hidroh.materialistic.data.ItemManager; public class ThreadPreviewRecyclerViewAdapter extends ItemRecyclerViewAdapter<SubmissionViewHolder> { private final List<Item> mItems = new ArrayList<>(); private final List<String> mExpanded = new ArrayList<>(); private int mLevelIndicatorWidth; private final String mUsername; public ThreadPreviewRecyclerViewAdapter(ItemManager itemManager, Item item) { super(itemManager); mItems.add(item); mUsername = item.getBy(); } @Override public void onAttachedToRecyclerView(RecyclerView recyclerView) { super.onAttachedToRecyclerView(recyclerView); attach(recyclerView.getContext(), recyclerView); mLevelIndicatorWidth = AppUtils.getDimensionInDp(mContext, R.dimen.level_indicator_width); } @Override public void onDetachedFromRecyclerView(RecyclerView recyclerView) { super.onDetachedFromRecyclerView(recyclerView); detach(recyclerView.getContext(), recyclerView); } @Override public SubmissionViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { SubmissionViewHolder holder = new SubmissionViewHolder(mLayoutInflater .inflate(R.layout.item_submission, parent, false)); final RecyclerView.LayoutParams params = (RecyclerView.LayoutParams) holder.itemView.getLayoutParams(); params.leftMargin = mLevelIndicatorWidth * viewType; holder.itemView.setLayoutParams(params); holder.mCommentButton.setVisibility(View.GONE); return holder; } @Override public int getItemCount() { return mItems.size(); } @Override public int getItemViewType(int position) { return position; } @Override protected void bind(SubmissionViewHolder holder, final Item item) { super.bind(holder, item); holder.mPostedTextView.setText(item.getDisplayedTime(mContext)); holder.mPostedTextView.append(item.getDisplayedAuthor(mContext, !TextUtils.equals(item.getBy(), mUsername), 0)); holder.mMoreButton.setVisibility(View.GONE); if (TextUtils.equals(item.getType(), Item.COMMENT_TYPE)) { holder.mTitleTextView.setText(null); holder.itemView.setOnClickListener(null); holder.mCommentButton.setVisibility(View.GONE); } else { holder.mTitleTextView.setText(item.getDisplayedTitle()); holder.mCommentButton.setVisibility(View.VISIBLE); holder.mCommentButton.setOnClickListener(v -> openItem(item)); } holder.mTitleTextView.setVisibility(holder.mTitleTextView.length() > 0 ? View.VISIBLE : View.GONE); holder.mContentTextView.setVisibility(holder.mContentTextView.length() > 0 ? View.VISIBLE : View.GONE); if (!mExpanded.contains(item.getId()) && item.getParentItem() != null) { mExpanded.add(item.getId()); new Handler().post(() -> { mItems.add(0, item.getParentItem()); // recursive notifyItemInserted(0); notifyItemRangeChanged(1, mItems.size()); }); } } @Override protected Item getItem(int position) { return mItems.get(position); } private void openItem(Item item) { mContext.startActivity(new Intent(mContext, ItemActivity.class) .putExtra(ItemActivity.EXTRA_ITEM, item)); } }
1,819
674
<filename>Storage/Disk/Encodings/AppleGCR/Sector.hpp // // Sector.hpp // Clock Signal // // Created by <NAME> on 04/05/2018. // Copyright 2018 <NAME>. All rights reserved. // #ifndef Sector_h #define Sector_h #include <cstdint> #include <vector> namespace Storage { namespace Encodings { namespace AppleGCR { struct Sector { /*! Describes the location of a sector, implementing < to allow for use as a set key. */ struct Address { union { /// For Apple II-type sectors, provides the volume number. uint_fast8_t volume; /// For Macintosh-type sectors, provides the format from the sector header. uint_fast8_t format = 0; }; uint_fast8_t track = 0; uint_fast8_t sector = 0; /// Filled in for Macintosh sectors only; always @c false for Apple II sectors. bool is_side_two = false; bool operator < (const Address &rhs) const { return ( ((is_side_two ? 1 : 0) << 24) | (volume << 16) | (track << 8) | sector ) < ( ((rhs.is_side_two ? 1 : 0) << 24) | (rhs.volume << 16) | (rhs.track << 8) | rhs.sector ); } }; Address address; std::vector<uint8_t> data; bool has_data_checksum_error = false; bool has_header_checksum_error = false; enum class Encoding { FiveAndThree, SixAndTwo, Macintosh }; Encoding encoding = Encoding::SixAndTwo; Sector() {} Sector(Sector &&rhs) : address(rhs.address), data(std::move(rhs.data)), has_data_checksum_error(rhs.has_data_checksum_error), has_header_checksum_error(rhs.has_header_checksum_error), encoding(rhs.encoding) {} Sector(const Sector &rhs) : address(rhs.address), data(rhs.data), has_data_checksum_error(rhs.has_data_checksum_error), has_header_checksum_error(rhs.has_header_checksum_error), encoding(rhs.encoding) {} }; } } } #endif /* Sector_h */
728
4,640
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import pytest pytest.importorskip("ethosu.vela") from math import isclose from tvm.contrib.ethosu.cascader import StripeConfig, Propagator def test_propagator(): transform = [ [1, 0, 0, 0], [0, 1 / 2, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1], ] offset = [-1, 1, 2] propagator = Propagator( transform=transform, offset=offset, ) assert list(propagator.offset) == offset for i, row in enumerate(transform): for j, value in enumerate(row): assert isclose(propagator.transform[i][j], value) @pytest.mark.parametrize( ["propagator", "input_stripe_config", "output_stripe_config"], [ ( Propagator( transform=[ [1, 0, 0, 0, 0], [0, 1, 0, 0, 0], [0, 0, 0, 1 / 16, 0], [0, 0, 1, 0, 0], [0, 0, 0, 0, 16], [0, 0, 0, 0, 1], ], offset=[0, 0, 0, 0, 0], ), StripeConfig( shape=[1, 12, 14, 36], extent=[1, 24, 18, 72], strides=[1, 12, 14, 36], order=[1, 2, 3, 4], stripes=[1, 2, 2, 2], offset=[0, 0, 0, 0], ), StripeConfig( shape=[1, 12, 3, 14, 16], extent=[1, 24, 5, 18, 16], strides=[1, 12, 2.25, 14, 0], order=[1, 2, 4, 3, 0], stripes=[1, 2, 2, 2, 1], offset=[0, 0, 0, 0, 0], ), ), ( Propagator( transform=[ [0.5, 0, 0], [0, 0.5, 0], [0, 0, 1], ], offset=[0, 0], ), StripeConfig( shape=[3, 5], extent=[27, 50], strides=[3, 5], order=[1, 2], stripes=[9, 10], offset=[0, 0], ), StripeConfig( shape=[2, 3], extent=[14, 25], strides=[1.5, 2.5], order=[1, 2], stripes=[9, 10], offset=[0, 0], ), ), ( Propagator( transform=[ [2, 0, 0, 4], [0, 1, 0, 2], [0, 0, 0, 8], [0, 0, 0, 1], ], offset=[-2, -1, 0], ), StripeConfig( shape=[4, 6, 32], extent=[48, 60, 64], strides=[4, 6, 32], order=[1, 2, 3], stripes=[12, 10, 2], offset=[0, 0, 0], ), StripeConfig( shape=[12, 8, 8], extent=[100, 62, 8], strides=[8, 6, 0], order=[1, 2, 0], stripes=[12, 10, 1], offset=[-2, -1, 0], ), ), ], ) def test_propagate(propagator, input_stripe_config, output_stripe_config): result_stripe_config = propagator.propagate(input_stripe_config) assert result_stripe_config == output_stripe_config if __name__ == "__main__": pytest.main([__file__])
2,374
1,338
<reponame>Kirishikesan/haiku /* Haiku S3 Trio64 driver adapted from the X.org S3 driver. Copyright 2001 <NAME> <<EMAIL>> Copyright 2008 Haiku, Inc. All rights reserved. Distributed under the terms of the MIT license. Authors: <NAME> 2008 */ #ifndef __TRIO64_H__ #define __TRIO64_H__ // Note that the cursor normally needs only 1024 bytes; however, if 1024 bytes // are used, some of the Trio64 chips draw a short white horizontal line below // and to the right of the cursor. Setting the number of bytes to 2048 solves // the problem. #define CURSOR_BYTES 2048 // see comment above // Command Registers. #define ADVFUNC_CNTL 0x4ae8 #define SUBSYS_STAT 0x42e8 #define SUBSYS_CNTL 0x42e8 #define CUR_Y 0x82e8 #define CUR_X 0x86e8 #define DESTY_AXSTP 0x8ae8 #define DESTX_DIASTP 0x8ee8 #define CUR_WIDTH 0x96e8 #define CMD 0x9ae8 #define GP_STAT 0x9ae8 #define FRGD_COLOR 0xa6e8 #define WRT_MASK 0xaae8 #define FRGD_MIX 0xbae8 #define MULTIFUNC_CNTL 0xbee8 // Command register bits. #define CMD_RECT 0x4000 #define CMD_BITBLT 0xc000 #define INC_Y 0x0080 #define INC_X 0x0020 #define DRAW 0x0010 #define WRTDATA 0x0001 // Foreground mix register. #define FSS_FRGDCOL 0x0020 #define FSS_BITBLT 0x0060 #define GP_BUSY 0x0200 #define SCISSORS_T 0x1000 #define SCISSORS_L 0x2000 #define SCISSORS_B 0x3000 #define SCISSORS_R 0x4000 #endif // __TRIO64_H__
613
14,668
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef EXTENSIONS_BROWSER_EXTENSION_HOST_QUEUE_H_ #define EXTENSIONS_BROWSER_EXTENSION_HOST_QUEUE_H_ #include <list> #include "base/memory/weak_ptr.h" #include "base/time/time.h" namespace extensions { class DeferredStartRenderHost; // A queue of ExtensionHosts waiting for initialization. This initializes // DeferredStartRenderHosts in the order they're Add()ed, with simple rate // limiting logic that re-posts each task to the UI thread, to avoid clogging it // for a long period of time. class ExtensionHostQueue { public: ExtensionHostQueue(); ~ExtensionHostQueue(); ExtensionHostQueue(const ExtensionHostQueue& queue) = delete; ExtensionHostQueue& operator=(const ExtensionHostQueue& queue) = delete; // Returns the single global instance of the ExtensionHostQueue. static ExtensionHostQueue& GetInstance(); // Adds a host to the queue for RenderView creation. void Add(DeferredStartRenderHost* host); // Removes a host from the queue (for example, it may be deleted before // having a chance to start) void Remove(DeferredStartRenderHost* host); // Adds a delay before starting the next ExtensionHost. This can be used for // testing purposes to help flush out flakes. void SetCustomDelayForTesting(base::TimeDelta delay) { delay_ = delay; } private: // Queues up a delayed task to process the next DeferredStartRenderHost in // the queue. void PostTask(); // Creates the RenderView for the next host in the queue. void ProcessOneHost(); // True if this queue is currently in the process of starting an // DeferredStartRenderHost. bool pending_create_; // The delay before starting the next host. By default, this is 0, meaning we // just wait until the event loop yields. base::TimeDelta delay_; // The list of DeferredStartRenderHosts waiting to be started. std::list<DeferredStartRenderHost*> queue_; base::WeakPtrFactory<ExtensionHostQueue> ptr_factory_{this}; }; } // namespace extensions #endif // EXTENSIONS_BROWSER_EXTENSION_HOST_QUEUE_H_
624
822
<gh_stars>100-1000 """ Test our ability to sign and verify digital signatures """ import os from io import BytesIO from cryptos.bitcoin import BITCOIN from cryptos.keys import gen_key_pair from cryptos.ecdsa import Signature, sign, verify from cryptos.transaction import Tx def test_ecdsa(): # let's create two identities sk1, pk1 = gen_key_pair() sk2, pk2 = gen_key_pair() # pylint: disable=unused-variable message = ('user pk1 would like to pay user pk2 1 BTC kkthx').encode('ascii') # an evil user2 attempts to submit the transaction to the network with some totally random signature sig = Signature(int.from_bytes(os.urandom(32), 'big'), int.from_bytes(os.urandom(32), 'big')) # a few seconds later a hero miner inspects the candidate transaction is_legit = verify(pk1, message, sig) assert not is_legit # unlike user2, hero miner is honest and discards the transaction, all is well # evil user2 does not give up and tries to sign with his key pair sig = sign(sk2, message) is_legit = verify(pk1, message, sig) assert not is_legit # denied, again! # lucky for user2, user1 feels sorry for them and the hardships they have been through recently sig = sign(sk1, message) is_legit = verify(pk1, message, sig) assert is_legit # hero miner validates the transaction and adds it to their block # user2 happy, buys a Tesla, and promises to turn things around # the end. def test_sig_der(): # a transaction used as an example in programming bitcoin raw = bytes.fromhex('0100000001813f79011acb80925dfe69b3def355fe914bd1d96a3f5f71bf8303c6a989c7d1000000006b483045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed01210349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278afeffffff02a135ef01000000001976a914bc3b654dca7e56b04dca18f2566cdaf02e8d9ada88ac99c39800000000001976a9141c4bc762dd5423e332166702cb75f40df79fea1288ac19430600') tx = Tx.decode(BytesIO(raw)) der = tx.tx_ins[0].script_sig.cmds[0][:-1] # this is the DER signature of the first input on this tx. :-1 crops out the sighash-type byte sig = Signature.decode(der) # making sure no asserts get tripped up inside this call # from programming bitcoin chapter 4 der = bytes.fromhex('3045022037206a0610995c58074999cb9767b87af4c4978db68c06e8e6e81d282047a7c60221008ca63759c1157ebeaec0d03cecca119fc9a75bf8e6d0fa65c841c8e2738cdaec') sig = Signature.decode(der) assert sig.r == 0x37206a0610995c58074999cb9767b87af4c4978db68c06e8e6e81d282047a7c6 assert sig.s == 0x8ca63759c1157ebeaec0d03cecca119fc9a75bf8e6d0fa65c841c8e2738cdaec # test that we can also recover back the same der encoding der2 = sig.encode() assert der == der2
1,118
5,169
{ "name": "StickySugar", "version": "1.1.0", "summary": "Syntactic sugar untop of AutoLayout to stick views together", "description": "Syntactic sugar untop of AutoLayout to stick views together.\nSticky sugar adds convenience methods on top of auto layout to build and apply constraints to your view in a nice and chainable way.", "homepage": "https://github.com/Kleemann/StickySugar", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "<NAME>": "" }, "source": { "git": "https://github.com/Kleemann/StickySugar.git", "tag": "1.1.0" }, "social_media_url": "https://twitter.com/madskleemann", "platforms": { "ios": "9.0" }, "source_files": "StickySugar/Classes/**/*", "swift_version": "4.2" }
293
39,773
<gh_stars>1000+ { "integration": [ { "name": "app_spec.coffee", "relative": "cypress/integration/app_spec.coffee" } ], "component": [ { "name": "src/components/Foo.spec.js", "relative": "src/components/Foo.spec.js", "specType": "component" }, { "name": "src/Bar.spec.js", "relative": "src/components/Bar.spec.js", "specType": "component" }, { "name": "Navigation.spec.js", "relative": "src/Navigation.spec.js", "specType": "component" } ] }
269
2,151
<filename>third_party/blink/tools/blinkpy/style/optparser_unittest.py # Copyright (C) 2010 <NAME> (<EMAIL>) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Unit tests for parser.py.""" import unittest from blinkpy.common.system.log_testing import LoggingTestCase from blinkpy.style.optparser import ArgumentParser from blinkpy.style.optparser import ArgumentPrinter from blinkpy.style.optparser import CommandOptionValues as ProcessorOptions from blinkpy.style.optparser import DefaultCommandOptionValues class ArgumentPrinterTest(unittest.TestCase): """Tests the ArgumentPrinter class.""" _printer = ArgumentPrinter() def _create_options(self, output_format='emacs', min_confidence=3, filter_rules=None, git_commit=None): return ProcessorOptions(filter_rules=filter_rules, git_commit=git_commit, min_confidence=min_confidence, output_format=output_format) def test_to_flag_string(self): options = self._create_options('vs7', 5, ['+foo', '-bar'], 'git') self.assertEqual('--filter=+foo,-bar --git-commit=git ' '--min-confidence=5 --output=vs7', self._printer.to_flag_string(options)) # This is to check that --filter and --git-commit do not # show up when not user-specified. options = self._create_options() self.assertEqual('--min-confidence=3 --output=emacs', self._printer.to_flag_string(options)) class ArgumentParserTest(LoggingTestCase): """Test the ArgumentParser class.""" class _MockStdErr(object): def write(self, _): # We do not want the usage string or style categories # to print during unit tests, so print nothing. return def _parse(self, args): """Call a test parser.parse().""" parser = self._create_parser() return parser.parse(args) def _create_defaults(self): """Return a DefaultCommandOptionValues instance for testing.""" return DefaultCommandOptionValues(min_confidence=3, output_format='vs7') def _create_parser(self): """Return an ArgumentParser instance for testing.""" default_options = self._create_defaults() all_categories = ['build', 'whitespace'] mock_stderr = self._MockStdErr() return ArgumentParser(all_categories=all_categories, base_filter_rules=[], default_options=default_options, mock_stderr=mock_stderr, usage='test usage') def test_parse_documentation(self): parse = self._parse # FIXME: Test both the printing of the usage string and the # filter categories help. # Request the usage string. with self.assertRaises(SystemExit): parse(['--help']) # Request default filter rules and available style categories. with self.assertRaises(SystemExit): parse(['--filter=']) def test_parse_bad_values(self): parse = self._parse # Pass an unsupported argument. with self.assertRaises(SystemExit): parse(['--bad']) self.assertLog(['ERROR: no such option: --bad\n']) with self.assertRaises(SystemExit): parse(['--min-confidence=bad']) self.assertLog(['ERROR: option --min-confidence: ' "invalid integer value: 'bad'\n"]) with self.assertRaises(SystemExit): parse(['--min-confidence=0']) self.assertLog(['ERROR: option --min-confidence: invalid integer: 0: ' 'value must be between 1 and 5\n']) with self.assertRaises(SystemExit): parse(['--min-confidence=6']) self.assertLog(['ERROR: option --min-confidence: invalid integer: 6: ' 'value must be between 1 and 5\n']) parse(['--min-confidence=1']) # works parse(['--min-confidence=5']) # works with self.assertRaises(SystemExit): parse(['--output=bad']) self.assertLog(['ERROR: option --output-format: invalid choice: ' "'bad' (choose from 'emacs', 'vs7')\n"]) parse(['--output=vs7']) # works # Pass a filter rule not beginning with + or -. with self.assertRaises(SystemExit): parse(['--filter=build']) self.assertLog(['ERROR: Invalid filter rule "build": ' 'every rule must start with + or -.\n']) parse(['--filter=+build']) # works def test_parse_default_arguments(self): parse = self._parse (files, options) = parse([]) self.assertEqual(files, []) self.assertEqual(options.filter_rules, []) self.assertIsNone(options.git_commit) self.assertFalse(options.diff_files) self.assertFalse(options.is_verbose) self.assertEqual(options.min_confidence, 3) self.assertEqual(options.output_format, 'vs7') def test_parse_explicit_arguments(self): parse = self._parse # Pass non-default explicit values. _, options = parse(['--min-confidence=4']) self.assertEqual(options.min_confidence, 4) _, options = parse(['--output=emacs']) self.assertEqual(options.output_format, 'emacs') _, options = parse(['-g', 'commit']) self.assertEqual(options.git_commit, 'commit') _, options = parse(['--git-commit=commit']) self.assertEqual(options.git_commit, 'commit') _, options = parse(['--git-diff=commit']) self.assertEqual(options.git_commit, 'commit') _, options = parse(['--verbose']) self.assertTrue(options.is_verbose) _, options = parse(['--diff-files', 'file.txt']) self.assertTrue(options.diff_files) # Pass user_rules. _, options = parse(['--filter=+build,-whitespace']) self.assertEqual(options.filter_rules, ['+build', '-whitespace']) # Pass spurious white space in user rules. _, options = parse(['--filter=+build, -whitespace']) self.assertEqual(options.filter_rules, ['+build', '-whitespace']) def test_parse_files(self): parse = self._parse files, _ = parse(['foo.cpp']) self.assertEqual(files, ['foo.cpp']) # Pass multiple files. files, _ = parse(['--output=emacs', 'foo.cpp', 'bar.cpp']) self.assertEqual(files, ['foo.cpp', 'bar.cpp']) class CommandOptionValuesTest(unittest.TestCase): """Tests CommandOptionValues class.""" def test_init(self): """Test __init__ constructor.""" # Check default parameters. options = ProcessorOptions() self.assertEqual(options.filter_rules, []) self.assertIsNone(options.git_commit) self.assertFalse(options.is_verbose) self.assertEqual(options.min_confidence, 1) self.assertEqual(options.output_format, 'emacs') # Check argument validation. self.assertRaises(ValueError, ProcessorOptions, output_format='bad') ProcessorOptions(output_format='emacs') # No ValueError: works ProcessorOptions(output_format='vs7') # works self.assertRaises(ValueError, ProcessorOptions, min_confidence=0) self.assertRaises(ValueError, ProcessorOptions, min_confidence=6) ProcessorOptions(min_confidence=1) # works ProcessorOptions(min_confidence=5) # works # Check attributes. options = ProcessorOptions(filter_rules=['+'], git_commit='commit', is_verbose=True, min_confidence=3, output_format='vs7') self.assertEqual(options.filter_rules, ['+']) self.assertEqual(options.git_commit, 'commit') self.assertTrue(options.is_verbose) self.assertEqual(options.min_confidence, 3) self.assertEqual(options.output_format, 'vs7') def test_eq(self): """Test __eq__ equality function.""" self.assertTrue(ProcessorOptions().__eq__(ProcessorOptions())) # Also verify that a difference in any argument causes equality to fail. # Explicitly create a ProcessorOptions instance with all default # values. We do this to be sure we are assuming the right default # values in our self.assertFalse() calls below. options = ProcessorOptions(filter_rules=[], git_commit=None, is_verbose=False, min_confidence=1, output_format='emacs') # Verify that we created options correctly. self.assertTrue(options.__eq__(ProcessorOptions())) self.assertFalse(options.__eq__(ProcessorOptions(filter_rules=['+']))) self.assertFalse(options.__eq__(ProcessorOptions(git_commit='commit'))) self.assertFalse(options.__eq__(ProcessorOptions(is_verbose=True))) self.assertFalse(options.__eq__(ProcessorOptions(min_confidence=2))) self.assertFalse(options.__eq__(ProcessorOptions(output_format='vs7'))) def test_ne(self): """Test __ne__ inequality function.""" # By default, __ne__ always returns true on different objects. # Thus, just check the distinguishing case to verify that the # code defines __ne__. self.assertFalse(ProcessorOptions().__ne__(ProcessorOptions()))
4,723
5,169
{ "name": "EMapgoSDK", "version": "1.0.0", "summary": "EMapgoSDK pod Use.", "description": "TODO: 易图通地图iOS SDK基于iOS 9.0。通过调用地图SDK接口,您可以访问易图通地图服务和数据。", "homepage": "https://github.com/xiaoleiiOS/EMapgoSDK", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "<EMAIL>": "<EMAIL>" }, "source": { "git": "https://github.com/xiaoleiiOS/EMapgoSDK.git", "tag": "1.0.0" }, "platforms": { "ios": "9.0" }, "requires_arc": true, "vendored_frameworks": "EMapgo/*.framework" }
310
634
<gh_stars>100-1000 /**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.mailbox.elasticsearch.v7.json; import java.io.IOException; import java.nio.charset.Charset; import java.util.Deque; import java.util.LinkedList; import java.util.Optional; import org.apache.james.mailbox.extractor.TextExtractor; import org.apache.james.mailbox.model.ContentType.MediaType; import org.apache.james.mailbox.model.ContentType.SubType; import org.apache.james.mailbox.store.mail.model.Message; import org.apache.james.mime4j.MimeException; import org.apache.james.mime4j.codec.DecodeMonitor; import org.apache.james.mime4j.field.LenientFieldParser; import org.apache.james.mime4j.message.DefaultBodyDescriptorBuilder; import org.apache.james.mime4j.message.MaximalBodyDescriptor; import org.apache.james.mime4j.stream.EntityState; import org.apache.james.mime4j.stream.MimeConfig; import org.apache.james.mime4j.stream.MimeTokenStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; public class MimePartParser { private static final Logger LOGGER = LoggerFactory.getLogger(MimePartParser.class); private static final LenientFieldParser FIELD_PARSER = new LenientFieldParser(); private final Message message; private final TextExtractor textExtractor; private final MimeTokenStream stream; private final Deque<MimePartContainerBuilder> builderStack; private MimePart result; private MimePartContainerBuilder currentlyBuildMimePart; public MimePartParser(Message message, TextExtractor textExtractor) { this.message = message; this.textExtractor = textExtractor; this.builderStack = new LinkedList<>(); this.currentlyBuildMimePart = new RootMimePartContainerBuilder(); this.stream = new MimeTokenStream( MimeConfig.PERMISSIVE, new DefaultBodyDescriptorBuilder(null, FIELD_PARSER, DecodeMonitor.SILENT)); } public MimePart parse() throws IOException, MimeException { stream.parse(message.getFullContent()); for (EntityState state = stream.getState(); state != EntityState.T_END_OF_STREAM; state = stream.next()) { processMimePart(stream, state); } return result; } private void processMimePart(MimeTokenStream stream, EntityState state) { switch (state) { case T_START_MULTIPART: case T_START_MESSAGE: stackCurrent(); break; case T_START_HEADER: currentlyBuildMimePart = MimePart.builder(); break; case T_FIELD: currentlyBuildMimePart.addToHeaders(stream.getField()); break; case T_BODY: manageBodyExtraction(stream); closeMimePart(); break; case T_END_MULTIPART: case T_END_MESSAGE: unstackToCurrent(); closeMimePart(); break; default: break; } } private void stackCurrent() { builderStack.push(currentlyBuildMimePart); currentlyBuildMimePart = null; } private void unstackToCurrent() { currentlyBuildMimePart = builderStack.pop(); } private void closeMimePart() { MimePart bodyMimePart = currentlyBuildMimePart.using(textExtractor).build(); if (!builderStack.isEmpty()) { builderStack.peek().addChild(bodyMimePart); } else { Preconditions.checkState(result == null); result = bodyMimePart; } } private void manageBodyExtraction(MimeTokenStream stream) { extractMimePartBodyDescription(stream); currentlyBuildMimePart.addBodyContent(stream.getDecodedInputStream()); } private void extractMimePartBodyDescription(MimeTokenStream stream) { MaximalBodyDescriptor descriptor = (MaximalBodyDescriptor) stream.getBodyDescriptor(); Optional.ofNullable(descriptor.getMediaType()) .map(MediaType::of) .ifPresent(currentlyBuildMimePart::addMediaType); Optional.ofNullable(descriptor.getSubType()) .map(SubType::of) .ifPresent(currentlyBuildMimePart::addSubType); currentlyBuildMimePart.addContentDisposition(descriptor.getContentDispositionType()) .addFileName(descriptor.getContentDispositionFilename()); extractCharset(descriptor); } private void extractCharset(MaximalBodyDescriptor descriptor) { try { Optional.ofNullable(descriptor.getCharset()) .map(Charset::forName) .ifPresent(currentlyBuildMimePart::charset); } catch (Exception e) { LOGGER.info("Failed parsing charset", e); } } }
2,470