file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
Q647.py | """
Q647
Palindromic Substrings
Medium
Given a string, your task is to count how many palindromic substrings in this string.
The substrings with different start indexes or end indexes are counted as different
substrings even they consist of same characters.
"""
class Solution:
def countSubstrings(self, s: str) -> int:
memo = [[0] * len(s) for _ in range(len(s))]
for i in range(len(s)):
memo[i][i] = 1
for i in range(1, len(s)):
if s[i-1] == s[i]:
memo[i-1][i] = 1
for l in range(3, len(s)+1): | for i in range(len(s)-l+1):
if memo[i+1][i+l-2] == 1:
if s[i] == s[i+l-1]:
memo[i][i+l-1] = 1
return sum(sum(item) for item in memo)
s = "abcdcba"
sol = Solution()
print(sol.countSubstrings(s)) | |
backoff.go | // Package backoff implements a convenient mechanism to retry an operation. This is useful when
// talking to a remote system (database, third-party integration) that can fail for any reason
// (e.g. network), and where a retry would usually solve the issue.
package backoff
import (
"context"
"fmt"
"math/rand"
"time"
)
// Backoff implements exponential backoff with randomized wait times. It is not safe to share a
// Backoff structure between multiple goroutines.
type Backoff struct {
MaxRetries int
ctx context.Context
cancel func()
numRetries int
nextDelay time.Duration
}
// New creates a Backoff object that terminates either when the context terminates (built-in
// timeout), or when the maximum number of retries is reached. Passing no maximum number of retries
// means infinite number, in which case the context deadline is used, or a deadline of 2 minutes is
// chosen by default.
func New(ctx context.Context, retries int) *Backoff {
var cancel func()
// if no termination is provided, better provide a reasonable default value
if _, ok := ctx.Deadline(); !ok && retries == 0 |
return &Backoff{MaxRetries: retries, ctx: ctx, cancel: cancel}
}
// Ongoing returns true if caller should keep going
func (b *Backoff) Ongoing() bool {
return b.ctx.Err() == nil && (b.MaxRetries == 0 || b.numRetries < b.MaxRetries)
}
// Err returns the reason for terminating the backoff, or nil if it didn't terminate
func (b *Backoff) Err() error {
if b.ctx.Err() != nil {
return b.ctx.Err()
}
if b.MaxRetries != 0 && b.numRetries >= b.MaxRetries {
return fmt.Errorf("terminated after %d retries", b.numRetries)
}
return nil
}
// NumRetries returns the number of retries so far
func (b *Backoff) NumRetries() int { return b.numRetries }
// Wait sleeps for the backoff time then increases the retry count and backoff time
// Returns immediately if Context is terminated
func (b *Backoff) Wait() {
if b.Ongoing() {
select {
case <-b.ctx.Done():
if b.cancel != nil {
b.cancel()
}
case <-time.After(b.NextDelay()):
}
}
}
// WaitFor can be used to wait for a specific duration, for example if the duration is provided by
// the remote API. Calling this method does increase the backoff, just like a regular Wait call.
func (b *Backoff) WaitFor(d time.Duration) {
// as a special case, we handle no duration so users don’t need to worry about it.
if d == 0 {
b.Wait()
return
}
if b.Ongoing() {
b.numRetries++
select {
case <-b.ctx.Done():
if b.cancel != nil {
b.cancel()
}
case <-time.After(d + time.Duration(rand.Intn(maxmilli))*time.Millisecond):
}
}
}
// 1000 millisecond seems a reasonable max jitter
// https://cloud.google.com/iot/docs/how-tos/exponential-backoff
const maxmilli = 1000
func (b *Backoff) NextDelay() time.Duration {
b.numRetries++
b.nextDelay = (1<<b.numRetries)*time.Second + time.Duration(rand.Intn(maxmilli))*time.Millisecond
return b.nextDelay
}
| {
ctx, cancel = context.WithTimeout(ctx, 64*time.Second)
} |
Store.js | 'use strict';
import _ from 'lodash';
import Application from './Application.js';
var instancesCount = 0;
/**
* Store base class for the flux architecture
*
* @class
*/
export default class Store {
/**
* Class constructor
*
* @param {Application} app
* @param {Object} handlers
*/
constructor(app, handlers = {}) {
if (app) {
this.app = app;
}
this.handlers = _.assign(this.getActionHandlers(), handlers);
this._instanceId = ++instancesCount;
this.initialize();
}
/**
* Hook to do some init work that gets called just after the parents constructor has been called
*/
initialize() {}
/**
* Hook that will save the state of the store between server and client
*
* @return {Object}
*/
saveState() {
throw new Error('The server state of this store is not been saved to be reused on the client');
}
/**
* Hook that will restore the state of the store between server and client
*
* @param {Object} state
*/
restoreState(state) {
throw new Error('The server state of this store is not been restored on the client');
}
/**
* Hook that will register the action handlers of the child stores on instantiation
*
* @return {Object}
*/
getActionHandlers() {
return {};
}
/**
* Return the name of the store for dependency container use
*
* @return {string}
*/
get name() {
throw new Error('You have to implement a getter with the signature "get name()" returning the store identifier.');
}
/**
* Sets the action handlers by passing a hash object.
* The key being the action name and
* the value being a function on name of a function of this class.
*
* @param {Object} handlers
*/
set handlers(handlers) {
let app = this.app;
if (app && this._handlers) {
removeActionHandlers(this);
}
this._handlers = handlers;
if (app) registerActionHandlers(this);
}
/**
* Returns the handlers hash object.
*
* @return {Object}
*/
get handlers() {
return this._handlers;
}
/**
* Sets the app that will take care of action dispatching in the store.
*
* @param {Object} app
*/
set app(app) {
let handlers = this.handlers;
if (!(app instanceof Application)) {
throw new Error('Parameter must be an instance of Application.')
}
if (handlers && this._app) {
removeActionHandlers(this);
}
this._app = app;
if (handlers) registerActionHandlers(this);
}
/**
* Gets the app instance.
*
* @return {Object}
*/
get app() {
return this._app;
}
/**
* Gets the change action name for this store instance
*
* @return {string}
*/
get changeActionName() {
return `CHANGE_ACTION.${this.name}.${this._instanceId}`;
}
/**
* Register a listener that will be notified of every change that happens in the store
*
* @param {Function} listener
*/
registerListener(listener) {
this.app.on(this.changeActionName, listener);
}
/**
* Removes the change listener waiting for changes of this store
*
* @param {Function} listener
*/
removeListener(listener) {
this.app.off(this.changeActionName, listener);
}
/**
* Notifies all listeners that the store has changed its state
*/
emitChanges() {
this.app.emit(this.changeActionName, this);
}
/**
* Returns a indicator telling if this has listeners
* | hasListeners() {
return this.app.hasListeners(this.changeActionName);
}
}
/**
* Registers the action handlers contained in the store
*
* @param {Store} store
*/
function registerActionHandlers(store) {
_.forOwn(store.handlers, (handler, actionName) => {
store.app.on(actionName, _.isFunction(handler) ? handler : _.bind(store[handler], store));
});
}
/**
* Removes the action handlers contained in the store
*
* @param {Store} store
*/
function removeActionHandlers(store) {
_.forOwn(store.handlers, (handler, actionName) => {
store.app.off(actionName, _.isFunction(handler) ? handler : _.bind(store[handler], store));
});
} | * @return {boolean}
*/ |
run.go | package shell
import (
"bufio"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"strconv"
"strings"
"time"
)
// RunCommand - The run command structure
// Note: Run is a command that recurses, so values here can be changed by
// nested RUNs. Use local variables for data to be preserved.
type RunCommand struct {
running int
interrupted bool
header *bool
list *bool
ifCondition *string
stepOption *bool
iterationOption *int
execOption *bool
// Note: nesting makes count only valid from end of execute and calling CommandCount() immediately
count int
}
var DefaultScriptExtension = ".rshell"
func NewRunCommand() *RunCommand {
cmd := RunCommand{running: 0}
return &cmd
}
func (cmd *RunCommand) AddOptions(set CmdSet) {
set.SetParameters("scripts...")
cmd.ifCondition = set.StringLong("cond", 0, "", "run script if specified variable is not empty or matches optional value (k[=v])")
cmd.list = set.BoolLong("list", 0, "List the contexts of script file")
cmd.header = set.BoolLong("header", 0, "Display header of script file (Leading REM commands)")
cmd.stepOption = set.BoolLong("step", 0, "Single step through script")
cmd.iterationOption = set.IntLong("iterations", 'i', 1, "run the script iteration number of times")
cmd.execOption = set.BoolLong("exec", 0, "Execute quoted parameters as script commands")
AddCommonCmdOptions(set, CmdDebug, CmdVerbose, CmdSilent)
}
// ValidateScriptExists -- Validate the script exists by either basename or basename plus suffix
// return the file name modified with extension if necesssary.
// If the error is not a file existence problem the file is returned.
func ValidateScriptExists(file string) (string, error) {
if len(file) == 0 {
return "", errors.New("The file does not exist")
}
_, err := os.Stat(file)
if os.IsNotExist(err) {
if !strings.HasSuffix(strings.ToLower(file), DefaultScriptExtension) {
if _, err2 := os.Stat(file + DefaultScriptExtension); err2 == nil {
file = file + DefaultScriptExtension
err = err2
if IsCmdDebugEnabled() {
fmt.Fprintf(ConsoleWriter(), "Appending extension to file name: %s\n", file)
}
}
}
if os.IsNotExist(err) { // Still not exists
if IsCmdDebugEnabled() {
fmt.Fprintf(ConsoleWriter(), "Unable to open file: %s\n", file)
}
return "", errors.New("The file does not exist")
}
}
if err != nil {
return file, errors.New("Error accessing file")
}
return file, nil
}
func (cmd *RunCommand) executeFile(file string, runSilent bool) (count int, elapsed time.Duration, result error) {
count = 0
elapsed = 0
var path = ""
{
scriptFile, err := ValidateScriptExists(file)
if err != nil {
if runSilent {
// We do not care about file existance issues
return 0, 0, nil
}
return 0, 0, err
}
if abspath, err := filepath.Abs(scriptFile); err == nil {
path = filepath.Dir(abspath)
}
file = scriptFile
}
if IsCmdDebugEnabled() || IsCmdVerboseEnabled() {
fmt.Fprintf(ConsoleWriter(), "Processing file: %s\n", file)
}
h, err := os.Open(file)
if err != nil {
return 0, 0, errors.New("Failed to read script file: " + err.Error())
}
curdir, err := os.Getwd()
if err != nil {
curdir = ""
}
cmd.running = cmd.running + 1
defer func(f *os.File, c *RunCommand) {
f.Close()
c.running = c.running - 1
if c.running < 0 {
c.running = 0
}
if len(curdir) > 0 {
if IsCmdDebugEnabled() {
fmt.Fprintln(ConsoleWriter(), "RUN resetting working directory: ", curdir)
}
os.Chdir(curdir)
}
}(h, cmd)
if len(curdir) > 0 && len(path) > 0 {
if IsCmdDebugEnabled() {
fmt.Fprintln(ConsoleWriter(), "RUN setting working directory: ", path)
}
os.Chdir(path)
}
if *cmd.header || *cmd.list {
listfile(h, *cmd.header)
return 0, 0, nil
}
startTime := time.Now()
commands, success := CommandProcessor("", h, *cmd.stepOption, true)
elapsed = time.Since(startTime)
if !success {
return commands, elapsed, errors.New("Command processor failed")
}
return commands, elapsed, nil
}
func (cmd *RunCommand) executeStream(r io.Reader, runSilent bool) (count int, elapsed time.Duration, result error) {
count = 0
elapsed = 0 | cmd.running = cmd.running + 1
defer func(c *RunCommand) {
c.running = c.running - 1
if c.running < 0 {
c.running = 0
}
}(cmd)
if *cmd.header || *cmd.list {
listfile(r, *cmd.header)
return 0, 0, nil
}
startTime := time.Now()
commands, success := CommandProcessor("", r, *cmd.stepOption, true)
elapsed = time.Since(startTime)
if !success {
return commands, elapsed, errors.New("Command processor failed")
}
return commands, elapsed, nil
}
func (cmd *RunCommand) Execute(args []string) error {
// Cache silent config as it changes with script running
cmd.count = 0
cmd.interrupted = false
iterations := *cmd.iterationOption
if len(args) == 0 {
return errors.New("Need to specify at least one file to run")
}
if cmd.running > 3 {
return errors.New("Too many nested scripts script")
}
if verifyCondition(*cmd.ifCondition) == false {
fmt.Fprintf(OutputWriter(), "Run command aborting; missing required condition: %s.\n", *cmd.ifCondition)
return nil
}
runSilent := IsCmdSilentEnabled() || *cmd.list
i := iterations
var result error
var commands int
var duration time.Duration
var execLocal = *cmd.execOption
resultMsg := "Success"
for ; i > 0; i-- {
if execLocal {
str := strings.Join(args, "\n")
r := strings.NewReader(str)
count, elapsed, err := cmd.executeStream(r, runSilent)
commands = commands + count
duration = duration + elapsed
if err != nil {
result = err
resultMsg = err.Error()
break
}
} else {
for _, fileName := range args {
count, elapsed, err := cmd.executeFile(fileName, runSilent)
commands = commands + count
duration = duration + elapsed
if err != nil {
if len(args) > 1 {
fmt.Fprintf(ErrorWriter(), "Aborting due to errors in script: %s\n", fileName)
}
// TBD: How fatal should "not exists" be considered in list of files
return err
}
}
}
}
cmd.count = commands
if result == nil && LastError != 0 {
resultMsg = "Last command returned an error"
}
if !runSilent || IsCmdDebugEnabled() {
if iterations > 1 {
fmt.Fprintf(OutputWriter(), "Ran %d commands in %s over %d iterations. Exited with %s\n",
commands, getDurationString(duration), iterations, resultMsg)
} else {
fmt.Fprintf(OutputWriter(), "Ran %d commands in %s. Exited with %s\n",
commands, getDurationString(duration), resultMsg)
}
}
return nil
}
func (cmd *RunCommand) DoNotCount() bool {
return true
}
func (cmd *RunCommand) DoNotClearError() bool {
return true
}
func (cmd *RunCommand) CommandCount() int {
return cmd.count
}
func (cmd *RunCommand) Abort() {
cmd.interrupted = true
}
func getDurationString(duration time.Duration) string {
if duration > 2*time.Second {
return strconv.FormatFloat(float64(duration)/float64(time.Second), 'f', 3, 64) + "s"
} else {
return strconv.FormatFloat(float64(duration)/float64(time.Millisecond), 'f', 1, 64) + "ms"
}
}
func verifyCondition(variable string) bool {
variable = strings.TrimSpace(variable)
if len(variable) <= 0 {
return true
}
parts := strings.Split(variable, "=")
variable = parts[0]
value := GetGlobal(variable)
if str, ok := value.(string); ok {
str = strings.TrimSpace(str)
if len(parts) > 1 {
if str == strings.TrimSpace(parts[1]) {
return true
}
return false
} else {
if len(str) > 0 {
return true
}
}
} else if value != nil {
return true
} else if len(parts) > 1 && len(strings.TrimSpace(parts[1])) == 0 {
return true
}
return false
}
func listfile(reader io.Reader, onlyHeader bool) {
scanner := bufio.NewScanner(reader)
quit := false
for !quit && scanner.Scan() {
input := scanner.Text()
// Get first token for special handling
args := strings.SplitN(strings.TrimSpace(input), " ", 2)
command := ""
if len(args) > 0 {
command = strings.ToUpper(args[0])
}
if strings.HasPrefix(command, "REM") || onlyHeader == false {
fmt.Fprintln(OutputWriter(), input)
} else {
quit = true
}
}
if err := scanner.Err(); err != nil {
fmt.Fprintf(ErrorWriter(), "Scanner error %s\n", err.Error())
}
} | |
pathclient.spec.ts | import { record } from "@azure/test-utils-recorder";
import * as assert from "assert";
import * as dotenv from "dotenv";
import {
DataLakeFileClient,
DataLakeFileSystemClient,
PathAccessControlItem,
DataLakeServiceClient
} from "../../src";
import { PathPermissions } from "../../src/models";
import { getDataLakeServiceClient, recorderEnvSetup } from "../utils";
dotenv.config({ path: "../.env" });
describe("DataLakePathClient Node.js only", () => {
let fileSystemName: string;
let fileSystemClient: DataLakeFileSystemClient;
let fileName: string;
let fileClient: DataLakeFileClient;
const content = "Hello World";
let serviceClient: DataLakeServiceClient;
let recorder: any;
beforeEach(async function() {
recorder = record(this, recorderEnvSetup);
serviceClient = getDataLakeServiceClient();
fileSystemName = recorder.getUniqueName("filesystem");
fileSystemClient = serviceClient.getFileSystemClient(fileSystemName);
await fileSystemClient.create();
fileName = recorder.getUniqueName("file");
fileClient = fileSystemClient.getFileClient(fileName);
await fileClient.create();
await fileClient.append(content, 0, content.length);
await fileClient.flush(content.length);
});
afterEach(async function() {
await fileSystemClient.delete();
recorder.stop();
});
it("setAccessControl", async () => {
const acl: PathAccessControlItem[] = [
{
accessControlType: "user",
entityId: "",
defaultScope: false,
permissions: {
read: true,
write: true,
execute: true
}
},
{
accessControlType: "group",
entityId: "",
defaultScope: false,
permissions: {
read: true,
write: false,
execute: true
}
},
{
accessControlType: "other",
entityId: "",
defaultScope: false,
permissions: {
read: false,
write: true,
execute: false
}
}
];
await fileClient.setAccessControl(acl);
const permissions = await fileClient.getAccessControl();
assert.deepStrictEqual(permissions.owner, "$superuser");
assert.deepStrictEqual(permissions.group, "$superuser");
assert.deepStrictEqual(permissions.permissions, {
extendedAcls: false,
stickyBit: false,
owner: {
read: true,
write: true,
execute: true
},
group: {
read: true,
write: false,
execute: true
},
other: {
read: false,
write: true,
execute: false
}
});
assert.deepStrictEqual(permissions.acl, acl);
});
it("setAccessControl with all parameters", async () => {
const acl: PathAccessControlItem[] = [
{
accessControlType: "user",
entityId: "",
defaultScope: false,
permissions: {
read: true,
write: true,
execute: true
}
},
{
accessControlType: "group",
entityId: "",
defaultScope: false,
permissions: {
read: true,
write: false,
execute: true
}
},
{
accessControlType: "other",
entityId: "",
defaultScope: false,
permissions: {
read: false,
write: true,
execute: false
}
}
];
await fileClient.setAccessControl(acl, {
owner: "$superuser",
group: "$superuser"
});
const permissions = await fileClient.getAccessControl();
assert.deepStrictEqual(permissions.owner, "$superuser");
assert.deepStrictEqual(permissions.group, "$superuser");
assert.deepStrictEqual(permissions.permissions, {
extendedAcls: false,
stickyBit: false,
owner: {
read: true,
write: true,
execute: true
},
group: {
read: true,
write: false,
execute: true
},
other: {
read: false,
write: true,
execute: false
} | });
assert.deepStrictEqual(permissions.acl, acl);
});
it("setPermissions", async () => {
const acl: PathAccessControlItem[] = [
{
accessControlType: "user",
entityId: "",
defaultScope: false,
permissions: {
read: true,
write: true,
execute: false
}
},
{
accessControlType: "group",
entityId: "",
defaultScope: false,
permissions: {
read: true,
write: false,
execute: true
}
},
{
accessControlType: "other",
entityId: "",
defaultScope: false,
permissions: {
read: false,
write: true,
execute: true
}
}
];
const permissions: PathPermissions = {
extendedAcls: false,
stickyBit: true,
owner: {
read: true,
write: true,
execute: false
},
group: {
read: true,
write: false,
execute: true
},
other: {
read: false,
write: true,
execute: false
}
};
await fileClient.setPermissions(permissions);
const response = await fileClient.getAccessControl();
assert.deepStrictEqual(response.owner, "$superuser");
assert.deepStrictEqual(response.group, "$superuser");
assert.deepStrictEqual(response.permissions, {
...permissions,
other: { ...permissions.other, execute: true }
});
assert.deepStrictEqual(response.acl, acl);
});
it("setPermissions with all parameters", async () => {
const acl: PathAccessControlItem[] = [
{
accessControlType: "user",
entityId: "",
defaultScope: false,
permissions: {
read: true,
write: true,
execute: false
}
},
{
accessControlType: "group",
entityId: "",
defaultScope: false,
permissions: {
read: true,
write: false,
execute: true
}
},
{
accessControlType: "other",
entityId: "",
defaultScope: false,
permissions: {
read: false,
write: true,
execute: true
}
}
];
const permissions: PathPermissions = {
extendedAcls: false,
stickyBit: true,
owner: {
read: true,
write: true,
execute: false
},
group: {
read: true,
write: false,
execute: true
},
other: {
read: false,
write: true,
execute: false
}
};
await fileClient.setPermissions(permissions, { owner: "$superuser", group: "$superuser" });
const response = await fileClient.getAccessControl();
assert.deepStrictEqual(response.owner, "$superuser");
assert.deepStrictEqual(response.group, "$superuser");
assert.deepStrictEqual(response.permissions, {
...permissions,
other: { ...permissions.other, execute: true }
});
assert.deepStrictEqual(response.acl, acl);
});
it("move", async () => {
const destFileName = recorder.getUniqueName("destfile");
const destFileClient = fileSystemClient.getFileClient(destFileName);
await fileClient.move(destFileName);
await destFileClient.getProperties();
});
it("move cross file system", async () => {
const destFileSystemName = recorder.getUniqueName("destfilesystem");
const destFileSystemClient = serviceClient.getFileSystemClient(destFileSystemName);
await destFileSystemClient.create();
const destFileName = recorder.getUniqueName("destfile");
const destFileClient = destFileSystemClient.getFileClient(destFileName);
await fileClient.move(destFileSystemName, destFileName);
await destFileClient.getProperties();
await destFileSystemClient.delete();
});
}); | |
app.module.ts | import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { AngularFireAuthModule } from 'angularfire2/auth';
import { AngularFireModule } from 'angularfire2';
import { AppComponent } from './app.component';
import { AppRoutingModule } from './/app-routing.module';
import { environment } from '../environments/environment';
import { AngularFirestoreModule } from 'angularfire2/firestore';
import { LoginComponent } from './component/login/login.component';
import { FormsModule} from '@angular/forms';
import { AddAccountListComponent } from './component/login/add-account-list/add-account-list.component';
import { AccountComponent } from './component/account/account.component';
import { PartyComponent } from './component/party/party.component';
import { SearchComponent } from './component/search/search.component';
import { CheckComponent } from './component/party/check/check.component';
import { LobbyComponent } from './component/lobby/lobby.component';
@NgModule({
declarations: [
AppComponent,
LoginComponent,
AddAccountListComponent,
AccountComponent,
PartyComponent,
SearchComponent,
CheckComponent,
LobbyComponent
],
imports: [
BrowserModule,
AppRoutingModule,
AngularFireAuthModule,
AngularFireModule.initializeApp(environment.firebase),
AngularFirestoreModule,
FormsModule
],
providers: [],
bootstrap: [AppComponent]
})
export class | { }
| AppModule |
layers.py | ''' Layers
This file contains various layers for the BigGAN models.
'''
import numpy as np
import paddorch as torch
import paddorch.nn as nn
from paddorch.nn import init
import paddorch.optim as optim
import paddorch.nn.functional as F
from paddorch.nn import Parameter as P
# Projection of x onto y
def proj(x, y):
return torch.mm(y, x.t()) * y / torch.mm(y, y.t())
# Orthogonalize x wrt list of vectors ys
def gram_schmidt(x, ys):
for y in ys:
x = x - proj(x, y)
return x
# Apply num_itrs steps of the power method to estimate top N singular values.
def power_iteration(W, u_, update=True, eps=1e-12):
# Lists holding singular vectors and values
Wt=torch.Tensor(W).t()
us, vs, svs = [], [], []
for i, u in enumerate(u_):
# Run one step of the power iteration
with torch.no_grad():
if W.shape[1] == 27:
a = 1
v = torch.matmul(u, W)
# if (W.shape[0]==u.shape[1]) :
# v = torch.matmul(u, W)
# else:
# v = torch.matmul(u, Wt)
# Run Gram-Schmidt to subtract components of all other singular vectors
v = F.normalize(gram_schmidt(v, vs), eps=eps)
# Add to the list
vs += [v]
# Update the other singular vector
u = torch.matmul(v, Wt)
# if (W.shape[0]!=v.shape[1]):
# u = torch.matmul(v, Wt )
# else:
# u = torch.matmul(v, W)
# Run Gram-Schmidt to subtract components of all other singular vectors
u = F.normalize(gram_schmidt(u, us), eps=eps)
# Add to the list
us += [u]
if update:
torch.copy(u,u_[i])
# u_[i][:] = u
# Compute this singular value and add it to the list
svs += [torch.squeeze(torch.matmul(torch.matmul(v, Wt), u.t()))]
# if (W.shape[0]!=v.shape[1]):
# svs += [torch.squeeze(torch.matmul(torch.matmul(v, Wt ), u.t() ))]
# else:
# svs += [torch.squeeze(torch.matmul(torch.matmul(v, W), u.t()))]
#svs += [torch.sum(F.linear(u, W.transpose(0, 1)) * v)]
return svs, us, vs
# Convenience passthrough function
class identity(nn.Module):
def | (self, input):
return input
# Spectral normalization base class
class SN(object):
def __init__(self, num_svs, num_itrs, num_outputs, transpose=False, eps=1e-12):
# Number of power iterations per step
self.num_itrs = num_itrs
# Number of singular values
self.num_svs = num_svs
# Transposed?
self.transpose = transpose
# Epsilon value for avoiding divide-by-0
self.eps = eps
self.register_buffer=dict()
# Register a singular vector for each sv
self.name="%d_%d_%d"%(num_svs, num_itrs, num_outputs)
for i in range(self.num_svs):
self.__setattr__('u%d' % i,torch.nn.Parameter(torch.randn(1, num_outputs)))
self.__setattr__('sv%d' % i, torch.nn.Parameter(torch.ones(1)))
# self.register_buffer['u%d' % i]=
# self.register_buffer['sv%d' % i]= torch.ones(1)
# Singular vectors (u side)
@property
def u(self):
DD=[self.state_dict()['u%d' % i] for i in range(self.num_svs)]
return DD
# return [self.register_buffer['u%d' % i] for i in range(self.num_svs)]
# Singular values;
# note that these buffers are just for logging and are not used in training.
@property
def sv(self):
return [self.state_dict()['sv%d' % i] for i in range(self.num_svs)]
# return [self.register_buffer['sv%d' % i] for i in range(self.num_svs)]
# Compute the spectrally-normalized weight
def W_(self):
self.training=True
if isinstance(self,SNLinear):
W_mat = torch.Tensor(self.weight).t() ##linear layer weight is different from pytorch weight, need to transpose
else:
W_mat = torch.Tensor(self.weight).view(self.weight.shape[0], -1)
if self.transpose:
W_mat = W_mat.t()
# Apply num_itrs power iterations
for _ in range(self.num_itrs):
svs, us, vs = power_iteration(W_mat, self.u, update=self.training, eps=self.eps)
# Update the svs
if self.training:
with torch.no_grad(): # Make sure to do this in a no_grad() context or you'll get memory leaks!
for i, sv in enumerate(svs):
torch.copy(sv,self.sv[i])
# self.sv[i][:] = sv
return self.weight / svs[0]
# 2D Conv layer with spectral norm
class SNConv2d(nn.Conv2d, SN):
def __init__(self, in_channels, out_channels, kernel_size, stride=1,
padding=0, dilation=1, groups=1, bias=True,
num_svs=1, num_itrs=1, eps=1e-12):
nn.Conv2d.__init__(self, in_channels, out_channels, kernel_size, stride,
padding, dilation, groups, bias)
SN.__init__(self, num_svs, num_itrs, out_channels, eps=eps)
self.stride=stride
self.dilation=dilation
self.groups=groups
self.padding=padding
def forward(self, x):
return F.conv2d(x, self.W_(), self.bias, self.stride,
self.padding, self.dilation, self.groups)
# Linear layer with spectral norm
class SNLinear(nn.Linear, SN):
def __init__(self, in_features, out_features, bias=True,
num_svs=1, num_itrs=1, eps=1e-12):
nn.Linear.__init__(self, in_features, out_features, bias)
SN.__init__(self, num_svs, num_itrs, out_features, eps=eps)
def forward(self, x):
return F.linear(x, self.W_(), self.bias)
# Embedding layer with spectral norm
# We use num_embeddings as the dim instead of embedding_dim here
# for convenience sake
class SNEmbedding(nn.Embedding, SN):
def __init__(self, num_embeddings, embedding_dim, padding_idx=None,
max_norm=None, norm_type=2, scale_grad_by_freq=False,
sparse=False, _weight=None,
num_svs=1, num_itrs=1, eps=1e-12):
nn.Embedding.__init__(self, num_embeddings, embedding_dim, padding_idx,
max_norm, norm_type, scale_grad_by_freq,
sparse, _weight)
SN.__init__(self, num_svs, num_itrs, num_embeddings, eps=eps)
def forward(self, x):
return F.embedding(x ,self.W_())
# A non-local block as used in SA-GAN
# Note that the implementation as described in the paper is largely incorrect;
# refer to the released code for the actual implementation.
class Attention(nn.Module):
def __init__(self, ch, which_conv=SNConv2d, name='attention'):
super(Attention, self).__init__()
# Channel multiplier
self.ch = ch
self.which_conv = which_conv
self.theta = self.which_conv(self.ch, self.ch // 8, kernel_size=1, padding=0, bias=False)
self.phi = self.which_conv(self.ch, self.ch // 8, kernel_size=1, padding=0, bias=False)
self.g = self.which_conv(self.ch, self.ch // 2, kernel_size=1, padding=0, bias=False)
self.o = self.which_conv(self.ch // 2, self.ch, kernel_size=1, padding=0, bias=False)
# Learnable gain parameter
self.gamma = P(torch.tensor(0.), requires_grad=True)
def forward(self, x, y=None):
# Apply convs
theta = self.theta(x)
phi = F.max_pool2d(self.phi(x), [2,2])
g = F.max_pool2d(self.g(x), [2,2])
# Perform reshapes
theta = theta.view(-1, self. ch // 8, x.shape[2] * x.shape[3])
phi = phi.view(-1, self. ch // 8, x.shape[2] * x.shape[3] // 4)
g = g.view(-1, self. ch // 2, x.shape[2] * x.shape[3] // 4)
# Matmul and softmax to get attention maps
beta = F.softmax(torch.bmm(theta.transpose(1, 2), phi), -1)
# Attention map times g path
o = self.o(torch.bmm(g, beta.transpose(1,2)).view(-1, self.ch // 2, x.shape[2], x.shape[3]))
return self.gamma * o + x
# Fused batchnorm op
def fused_bn(x, mean, var, gain=None, bias=None, eps=1e-5):
# Apply scale and shift--if gain and bias are provided, fuse them here
# Prepare scale
scale = torch.rsqrt(var + eps)
# If a gain is provided, use it
if gain is not None:
scale = scale * gain
# Prepare shift
shift = mean * scale
# If bias is provided, use it
if bias is not None:
shift = shift - bias
return x * scale - shift
#return ((x - mean) / ((var + eps) ** 0.5)) * gain + bias # The unfused way.
# Manual BN
# Calculate means and variances using mean-of-squares minus mean-squared
def manual_bn(x, gain=None, bias=None, return_mean_var=False, eps=1e-5):
# Cast x to float32 if necessary
float_x = x.float()
# Calculate expected value of x (m) and expected value of x**2 (m2)
# Mean of x
m = torch.mean(float_x, [0, 2, 3], keepdim=True)
# Mean of x squared
m2 = torch.mean(float_x ** 2, [0, 2, 3], keepdim=True)
# Calculate variance as mean of squared minus mean squared.
var = (m2 - m **2)
# Cast back to float 16 if necessary
var = var.type(x.type())
m = m.type(x.type())
# Return mean and variance for updating stored mean/var if requested
if return_mean_var:
return fused_bn(x, m, var, gain, bias, eps), m.squeeze(), var.squeeze()
else:
return fused_bn(x, m, var, gain, bias, eps)
# My batchnorm, supports standing stats
class myBN(nn.Module):
def __init__(self, num_channels, eps=1e-5, momentum=0.1):
super(myBN, self).__init__()
# momentum for updating running stats
self.momentum = momentum
# epsilon to avoid dividing by 0
self.eps = eps
# Momentum
self.momentum = momentum
# Register buffers
self.stored_mean= torch.nn.Parameter( torch.zeros(num_channels))
self.stored_var= torch.nn.Parameter( torch.ones(num_channels))
self.accumulation_counter= torch.nn.Parameter( torch.zeros(1))
# Accumulate running means and vars
self.accumulate_standing = False
# reset standing stats
def reset_stats(self):
self.stored_mean[:] = 0
self.stored_var[:] = 0
self.accumulation_counter[:] = 0
def forward(self, x, gain, bias):
if self.training:
out, mean, var = manual_bn(x, gain, bias, return_mean_var=True, eps=self.eps)
# If accumulating standing stats, increment them
if self.accumulate_standing:
self.stored_mean[:] = self.stored_mean + mean.data
self.stored_var[:] = self.stored_var + var.data
self.accumulation_counter += 1.0
# If not accumulating standing stats, take running averages
else:
self.stored_mean[:] = self.stored_mean * (1 - self.momentum) + mean * self.momentum
self.stored_var[:] = self.stored_var * (1 - self.momentum) + var * self.momentum
return out
# If not in training mode, use the stored statistics
else:
mean = self.stored_mean.view(1, -1, 1, 1)
var = self.stored_var.view(1, -1, 1, 1)
# If using standing stats, divide them by the accumulation counter
if self.accumulate_standing:
mean = mean / self.accumulation_counter
var = var / self.accumulation_counter
return fused_bn(x, mean, var, gain, bias, self.eps)
# Simple function to handle groupnorm norm stylization
def groupnorm(x, norm_style):
# If number of channels specified in norm_style:
if 'ch' in norm_style:
ch = int(norm_style.split('_')[-1])
groups = max(int(x.shape[1]) // ch, 1)
# If number of groups specified in norm style
elif 'grp' in norm_style:
groups = int(norm_style.split('_')[-1])
# If neither, default to groups = 16
else:
groups = 16
return F.group_norm(x, groups)
# Class-conditional bn
# output size is the number of channels, input size is for the linear layers
# Andy's Note: this class feels messy but I'm not really sure how to clean it up
# Suggestions welcome! (By which I mean, refactor this and make a pull request
# if you want to make this more readable/usable).
class ccbn(nn.Module):
def __init__(self, output_size, input_size, which_linear, eps=1e-5, momentum=0.1,
cross_replica=False, mybn=False, norm_style='bn',):
super(ccbn, self).__init__()
self.output_size, self.input_size = output_size, input_size
# Prepare gain and bias layers
self.gain = which_linear(input_size, output_size)
self.bias = which_linear(input_size, output_size)
# epsilon to avoid dividing by 0
self.eps = eps
# Momentum
self.momentum = momentum
# Use cross-replica batchnorm?
self.cross_replica = cross_replica
# Use my batchnorm?
self.mybn = mybn
# Norm style?
self.norm_style = norm_style
if self.cross_replica:
self.bn = SyncBN2d(output_size, eps=self.eps, momentum=self.momentum, affine=False)
elif self.mybn:
self.bn = myBN(output_size, self.eps, self.momentum)
elif self.norm_style in ['bn', 'in']:
self.stored_mean=torch.nn.Parameter(torch.zeros(output_size))
self.stored_var=torch.nn.Parameter(torch.ones(output_size))
def forward(self, x, y):
# Calculate class-conditional gains and biases
gain = torch.Tensor(1 + self.gain(y)).view(y.size(0), -1, 1, 1)
bias = torch.Tensor(self.bias(y)).view(y.size(0), -1, 1, 1)
# If using my batchnorm
if self.mybn or self.cross_replica:
return self.bn(x, gain=gain, bias=bias)
# else:
else:
if self.norm_style == 'bn':
out = F.batch_norm(x, self.stored_mean, self.stored_var, None, None,
self.training, 0.1, self.eps)
elif self.norm_style == 'in':
out = F.instance_norm(x, self.stored_mean, self.stored_var, None, None,
self.training, 0.1, self.eps)
elif self.norm_style == 'gn':
out = groupnorm(x, self.normstyle)
elif self.norm_style == 'nonorm':
out = x
return out * gain + bias
def extra_repr(self):
s = 'out: {output_size}, in: {input_size},'
s +=' cross_replica={cross_replica}'
return s.format(**self.__dict__)
# Normal, non-class-conditional BN
class bn(nn.Module):
def __init__(self, output_size, eps=1e-5, momentum=0.1,
cross_replica=False, mybn=False):
super(bn, self).__init__()
self.output_size= output_size
# Prepare gain and bias layers
self.gain = torch.nn.Parameter(output_size,1.0)
self.bias = torch.nn.Parameter(output_size,0.0)
# epsilon to avoid dividing by 0
self.eps = eps
# Momentum
self.momentum = momentum
# Use cross-replica batchnorm?
self.cross_replica = cross_replica
# Use my batchnorm?
self.mybn = mybn
if self.cross_replica:
self.bn = SyncBN2d(output_size, eps=self.eps, momentum=self.momentum, affine=False)
elif mybn:
self.bn = myBN(output_size, self.eps, self.momentum)
# Register buffers if neither of the above
else:
self.stored_mean = torch.nn.Parameter(torch.zeros(output_size) )
self.stored_var = torch.nn.Parameter(torch.ones(output_size))
def forward(self, x, y=None):
if self.cross_replica or self.mybn:
gain = self.gain.view(1,-1,1,1)
bias = self.bias.view(1,-1,1,1)
return self.bn(x, gain=gain, bias=bias)
else:
return F.batch_norm(x, self.stored_mean, self.stored_var, self.gain,
self.bias, self.training, self.momentum, self.eps)
# Generator blocks
# Note that this class assumes the kernel size and padding (and any other
# settings) have been selected in the main generator module and passed in
# through the which_conv arg. Similar rules apply with which_bn (the input
# size [which is actually the number of channels of the conditional info] must
# be preselected)
class GBlock(nn.Module):
def __init__(self, in_channels, out_channels,
which_conv=nn.Conv2d, which_bn=bn, activation=None,
upsample=None):
super(GBlock, self).__init__()
self.in_channels, self.out_channels = in_channels, out_channels
self.which_conv, self.which_bn = which_conv, which_bn
self.activation = activation
self.upsample = upsample
# Conv layers
self.conv1 = self.which_conv(self.in_channels, self.out_channels)
self.conv2 = self.which_conv(self.out_channels, self.out_channels)
self.learnable_sc = in_channels != out_channels or upsample
if self.learnable_sc:
self.conv_sc = self.which_conv(in_channels, out_channels,
kernel_size=1, padding=0)
# Batchnorm layers
self.bn1 = self.which_bn(in_channels)
self.bn2 = self.which_bn(out_channels)
# upsample layers
self.upsample = upsample
def forward(self, x, y):
h = self.activation(self.bn1(x, y))
if self.upsample:
h = self.upsample(h)
x = self.upsample(x)
h = self.conv1(h)
h = self.activation(self.bn2(h, y))
h = self.conv2(h)
if self.learnable_sc:
x = self.conv_sc(x)
return h + x
# Residual block for the discriminator
class DBlock(nn.Module):
def __init__(self, in_channels, out_channels, which_conv=SNConv2d, wide=True,
preactivation=False, activation=None, downsample=None,):
super(DBlock, self).__init__()
self.in_channels, self.out_channels = in_channels, out_channels
# If using wide D (as in SA-GAN and BigGAN), change the channel pattern
self.hidden_channels = self.out_channels if wide else self.in_channels
self.which_conv = which_conv
self.preactivation = preactivation
self.activation = activation
self.downsample = downsample
# Conv layers
self.conv1 = self.which_conv(self.in_channels, self.hidden_channels)
self.conv2 = self.which_conv(self.hidden_channels, self.out_channels)
self.learnable_sc = True if (in_channels != out_channels) or downsample else False
if self.learnable_sc:
self.conv_sc = self.which_conv(in_channels, out_channels,
kernel_size=1, padding=0)
def shortcut(self, x):
if self.preactivation:
if self.learnable_sc:
x = self.conv_sc(x)
if self.downsample:
x = self.downsample(x)
else:
if self.downsample:
x = self.downsample(x)
if self.learnable_sc:
x = self.conv_sc(x)
return x
def forward(self, x):
if self.preactivation:
# h = self.activation(x) # NOT TODAY SATAN
# Andy's note: This line *must* be an out-of-place ReLU or it
# will negatively affect the shortcut connection.
h = F.relu(x)
else:
h = x
h = self.conv1(h)
h = self.conv2(self.activation(h))
if self.downsample:
h = self.downsample(h)
return h + self.shortcut(x)
# dogball | forward |
edit-category.component.ts | import { IHelpCenter } from '@gauzy/contracts';
import { Component, OnDestroy, Input } from '@angular/core';
import { NbDialogRef } from '@nebular/theme';
import { Subject } from 'rxjs';
import { TranslateService } from '@ngx-translate/core';
import { TranslationBaseComponent } from '../../language-base/translation-base.component';
import { FormGroup, FormBuilder, Validators } from '@angular/forms';
import { HelpCenterService } from '../../../@core/services/help-center.service';
@Component({
selector: 'ga-edit-category',
templateUrl: 'edit-category.component.html',
styleUrls: ['edit-category.component.scss']
})
export class |
extends TranslationBaseComponent
implements OnDestroy {
private _ngDestroy$ = new Subject<void>();
constructor(
protected dialogRef: NbDialogRef<EditCategoryComponent>,
readonly translateService: TranslateService,
private helpCenterService: HelpCenterService,
private readonly fb: FormBuilder
) {
super(translateService);
}
@Input() category?: IHelpCenter;
@Input() base: IHelpCenter;
@Input() editType: string;
@Input() organizationId: string;
public selectedLang: string;
public isToggled = false;
public selectedIcon: string;
public icons = [
'book-open-outline',
'archive-outline',
'alert-circle-outline',
'attach-outline'
];
public languages = ['en', 'ru', 'he', 'bg'];
public parentId: string;
public color: string;
public form: FormGroup;
ngOnInit() {
if (this.editType === 'edit') {
this.isToggled =
this.category.privacy === 'eye-outline' ? true : false;
this.selectedLang = this.category.language;
this.selectedIcon = this.category.icon;
}
this.form = this.fb.group({
name: ['', Validators.required],
color: [''],
desc: ['', Validators.required]
});
this.loadFormData();
}
toggleStatus(event: boolean) {
this.isToggled = event;
}
loadFormData() {
if (this.editType === 'edit')
this.form.patchValue({
name: this.category.name,
desc: this.category.description,
color: this.category.color
});
if (this.editType === 'add') {
this.form.patchValue({
name: '',
desc: '',
color: '#000000'
});
this.parentId = this.base.id;
}
}
async submit() {
if (this.editType === 'edit')
this.category = await this.helpCenterService.update(
this.category.id,
{
name: `${this.form.value.name}`,
description: `${this.form.value.desc}`,
language: `${this.selectedLang}`,
color: `${this.color}`,
icon: `${this.selectedIcon}`,
privacy:
this.isToggled === true
? 'eye-outline'
: 'eye-off-outline'
}
);
if (this.editType === 'add')
this.category = await this.helpCenterService.create({
name: `${this.form.value.name}`,
privacy:
this.isToggled === true ? 'eye-outline' : 'eye-off-outline',
icon: `${this.selectedIcon}`,
flag: 'category',
index: 0,
organizationId: this.organizationId,
description: `${this.form.value.desc}`,
language: `${this.selectedLang}`,
color: `${this.color}`,
parentId: `${this.parentId}`
});
this.dialogRef.close(this.category);
}
closeDialog() {
this.dialogRef.close();
}
ngOnDestroy() {
this._ngDestroy$.next();
this._ngDestroy$.complete();
}
}
| EditCategoryComponent |
ListPointsService.ts | import { classToPlain } from 'class-transformer'
import { getCustomRepository } from 'typeorm'
import { PointsRepositories } from '../repositories'
export class ListPointsService { |
return classToPlain(point)
}
} | async execute() {
const pointsRepositories = getCustomRepository(PointsRepositories)
let point = await pointsRepositories.find() |
test2.go | package main
var test bool
type num int // simple type alias
type point struct { // point is a struct
x, y float64
}
func | () {
a := 2
f := fib
// Function calls are evaluated left-to-right.
println(f(a), f(a+1), f(a+2), f(a+3), f(a+4))
print (2+2)
}
func fib (a int) {
return a
}
| main |
ffi_arm.rs | /* automatically generated by rust-bindgen 0.59.1 */
#[repr(C)]
#[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct __BindgenBitfieldUnit<Storage> {
storage: Storage,
}
impl<Storage> __BindgenBitfieldUnit<Storage> {
#[inline]
pub const fn new(storage: Storage) -> Self {
Self { storage }
}
}
impl<Storage> __BindgenBitfieldUnit<Storage>
where
Storage: AsRef<[u8]> + AsMut<[u8]>,
{
#[inline]
pub fn get_bit(&self, index: usize) -> bool {
debug_assert!(index / 8 < self.storage.as_ref().len());
let byte_index = index / 8;
let byte = self.storage.as_ref()[byte_index];
let bit_index = if cfg!(target_endian = "big") {
7 - (index % 8)
} else {
index % 8
};
let mask = 1 << bit_index;
byte & mask == mask
}
#[inline]
pub fn set_bit(&mut self, index: usize, val: bool) {
debug_assert!(index / 8 < self.storage.as_ref().len());
let byte_index = index / 8;
let byte = &mut self.storage.as_mut()[byte_index];
let bit_index = if cfg!(target_endian = "big") {
7 - (index % 8)
} else {
index % 8
};
let mask = 1 << bit_index;
if val {
*byte |= mask;
} else {
*byte &= !mask;
}
}
#[inline]
pub fn get(&self, bit_offset: usize, bit_width: u8) -> u64 {
debug_assert!(bit_width <= 64);
debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len());
let mut val = 0;
for i in 0..(bit_width as usize) {
if self.get_bit(i + bit_offset) {
let index = if cfg!(target_endian = "big") {
bit_width as usize - 1 - i
} else {
i
};
val |= 1 << index;
}
}
val
}
#[inline]
pub fn set(&mut self, bit_offset: usize, bit_width: u8, val: u64) {
debug_assert!(bit_width <= 64);
debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len());
for i in 0..(bit_width as usize) {
let mask = 1 << i;
let val_bit_is_set = val & mask == mask;
let index = if cfg!(target_endian = "big") {
bit_width as usize - 1 - i
} else {
i
};
self.set_bit(index + bit_offset, val_bit_is_set);
}
}
}
#[repr(C)]
#[derive(Default)]
pub struct __IncompleteArrayField<T>(::std::marker::PhantomData<T>, [T; 0]);
impl<T> __IncompleteArrayField<T> {
#[inline]
pub const fn new() -> Self {
__IncompleteArrayField(::std::marker::PhantomData, [])
}
#[inline]
pub fn as_ptr(&self) -> *const T {
self as *const _ as *const T
}
#[inline]
pub fn as_mut_ptr(&mut self) -> *mut T {
self as *mut _ as *mut T
}
#[inline]
pub unsafe fn as_slice(&self, len: usize) -> &[T] {
::std::slice::from_raw_parts(self.as_ptr(), len)
}
#[inline]
pub unsafe fn as_mut_slice(&mut self, len: usize) -> &mut [T] {
::std::slice::from_raw_parts_mut(self.as_mut_ptr(), len)
}
}
impl<T> ::std::fmt::Debug for __IncompleteArrayField<T> {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
fmt.write_str("__IncompleteArrayField")
}
}
pub const __BIONIC__: u32 = 1;
pub const __WORDSIZE: u32 = 32;
pub const __bos_level: u32 = 0;
pub const __ANDROID_NDK__: u32 = 1;
pub const __NDK_MAJOR__: u32 = 23;
pub const __NDK_MINOR__: u32 = 1;
pub const __NDK_BETA__: u32 = 0;
pub const __NDK_BUILD__: u32 = 7779620;
pub const __NDK_CANARY__: u32 = 0;
pub const __ANDROID_API_FUTURE__: u32 = 10000;
pub const __ANDROID_API__: u32 = 10000;
pub const __ANDROID_API_G__: u32 = 9;
pub const __ANDROID_API_I__: u32 = 14;
pub const __ANDROID_API_J__: u32 = 16;
pub const __ANDROID_API_J_MR1__: u32 = 17;
pub const __ANDROID_API_J_MR2__: u32 = 18;
pub const __ANDROID_API_K__: u32 = 19;
pub const __ANDROID_API_L__: u32 = 21;
pub const __ANDROID_API_L_MR1__: u32 = 22;
pub const __ANDROID_API_M__: u32 = 23;
pub const __ANDROID_API_N__: u32 = 24;
pub const __ANDROID_API_N_MR1__: u32 = 25;
pub const __ANDROID_API_O__: u32 = 26;
pub const __ANDROID_API_O_MR1__: u32 = 27;
pub const __ANDROID_API_P__: u32 = 28;
pub const __ANDROID_API_Q__: u32 = 29;
pub const __ANDROID_API_R__: u32 = 30;
pub const __ANDROID_API_S__: u32 = 31;
pub const __ANDROID_API_T__: u32 = 33;
pub const WCHAR_MIN: u8 = 0u8;
pub const INT8_MIN: i32 = -128;
pub const INT8_MAX: u32 = 127;
pub const INT_LEAST8_MIN: i32 = -128;
pub const INT_LEAST8_MAX: u32 = 127;
pub const INT_FAST8_MIN: i32 = -128;
pub const INT_FAST8_MAX: u32 = 127;
pub const UINT8_MAX: u32 = 255;
pub const UINT_LEAST8_MAX: u32 = 255;
pub const UINT_FAST8_MAX: u32 = 255;
pub const INT16_MIN: i32 = -32768;
pub const INT16_MAX: u32 = 32767;
pub const INT_LEAST16_MIN: i32 = -32768;
pub const INT_LEAST16_MAX: u32 = 32767;
pub const UINT16_MAX: u32 = 65535;
pub const UINT_LEAST16_MAX: u32 = 65535;
pub const INT32_MIN: i32 = -2147483648;
pub const INT32_MAX: u32 = 2147483647;
pub const INT_LEAST32_MIN: i32 = -2147483648;
pub const INT_LEAST32_MAX: u32 = 2147483647;
pub const INT_FAST32_MIN: i32 = -2147483648;
pub const INT_FAST32_MAX: u32 = 2147483647;
pub const UINT32_MAX: u32 = 4294967295;
pub const UINT_LEAST32_MAX: u32 = 4294967295;
pub const UINT_FAST32_MAX: u32 = 4294967295;
pub const SIG_ATOMIC_MAX: u32 = 2147483647;
pub const SIG_ATOMIC_MIN: i32 = -2147483648;
pub const WINT_MAX: u32 = 4294967295;
pub const WINT_MIN: u32 = 0;
pub const INTPTR_MIN: i32 = -2147483648;
pub const INTPTR_MAX: u32 = 2147483647;
pub const UINTPTR_MAX: u32 = 4294967295;
pub const PTRDIFF_MIN: i32 = -2147483648;
pub const PTRDIFF_MAX: u32 = 2147483647;
pub const SIZE_MAX: u32 = 4294967295;
pub const __BITS_PER_LONG: u32 = 32;
pub const __FD_SETSIZE: u32 = 1024;
pub const __GNUC_VA_LIST: u32 = 1;
pub const true_: u32 = 1;
pub const false_: u32 = 0;
pub const __bool_true_false_are_defined: u32 = 1;
pub const __PRI_64_prefix: &'static [u8; 3usize] = b"ll\0";
pub const PRId8: &'static [u8; 2usize] = b"d\0";
pub const PRId16: &'static [u8; 2usize] = b"d\0";
pub const PRId32: &'static [u8; 2usize] = b"d\0";
pub const PRId64: &'static [u8; 4usize] = b"lld\0";
pub const PRIdLEAST8: &'static [u8; 2usize] = b"d\0";
pub const PRIdLEAST16: &'static [u8; 2usize] = b"d\0";
pub const PRIdLEAST32: &'static [u8; 2usize] = b"d\0";
pub const PRIdLEAST64: &'static [u8; 4usize] = b"lld\0";
pub const PRIdFAST8: &'static [u8; 2usize] = b"d\0";
pub const PRIdFAST64: &'static [u8; 4usize] = b"lld\0";
pub const PRIdMAX: &'static [u8; 3usize] = b"jd\0";
pub const PRIi8: &'static [u8; 2usize] = b"i\0";
pub const PRIi16: &'static [u8; 2usize] = b"i\0";
pub const PRIi32: &'static [u8; 2usize] = b"i\0";
pub const PRIi64: &'static [u8; 4usize] = b"lli\0";
pub const PRIiLEAST8: &'static [u8; 2usize] = b"i\0";
pub const PRIiLEAST16: &'static [u8; 2usize] = b"i\0";
pub const PRIiLEAST32: &'static [u8; 2usize] = b"i\0";
pub const PRIiLEAST64: &'static [u8; 4usize] = b"lli\0";
pub const PRIiFAST8: &'static [u8; 2usize] = b"i\0";
pub const PRIiFAST64: &'static [u8; 4usize] = b"lli\0";
pub const PRIiMAX: &'static [u8; 3usize] = b"ji\0";
pub const PRIo8: &'static [u8; 2usize] = b"o\0";
pub const PRIo16: &'static [u8; 2usize] = b"o\0";
pub const PRIo32: &'static [u8; 2usize] = b"o\0";
pub const PRIo64: &'static [u8; 4usize] = b"llo\0";
pub const PRIoLEAST8: &'static [u8; 2usize] = b"o\0";
pub const PRIoLEAST16: &'static [u8; 2usize] = b"o\0";
pub const PRIoLEAST32: &'static [u8; 2usize] = b"o\0";
pub const PRIoLEAST64: &'static [u8; 4usize] = b"llo\0";
pub const PRIoFAST8: &'static [u8; 2usize] = b"o\0";
pub const PRIoFAST64: &'static [u8; 4usize] = b"llo\0";
pub const PRIoMAX: &'static [u8; 3usize] = b"jo\0";
pub const PRIu8: &'static [u8; 2usize] = b"u\0";
pub const PRIu16: &'static [u8; 2usize] = b"u\0";
pub const PRIu32: &'static [u8; 2usize] = b"u\0";
pub const PRIu64: &'static [u8; 4usize] = b"llu\0";
pub const PRIuLEAST8: &'static [u8; 2usize] = b"u\0";
pub const PRIuLEAST16: &'static [u8; 2usize] = b"u\0";
pub const PRIuLEAST32: &'static [u8; 2usize] = b"u\0";
pub const PRIuLEAST64: &'static [u8; 4usize] = b"llu\0";
pub const PRIuFAST8: &'static [u8; 2usize] = b"u\0";
pub const PRIuFAST64: &'static [u8; 4usize] = b"llu\0";
pub const PRIuMAX: &'static [u8; 3usize] = b"ju\0";
pub const PRIx8: &'static [u8; 2usize] = b"x\0";
pub const PRIx16: &'static [u8; 2usize] = b"x\0";
pub const PRIx32: &'static [u8; 2usize] = b"x\0";
pub const PRIx64: &'static [u8; 4usize] = b"llx\0";
pub const PRIxLEAST8: &'static [u8; 2usize] = b"x\0";
pub const PRIxLEAST16: &'static [u8; 2usize] = b"x\0";
pub const PRIxLEAST32: &'static [u8; 2usize] = b"x\0";
pub const PRIxLEAST64: &'static [u8; 4usize] = b"llx\0";
pub const PRIxFAST8: &'static [u8; 2usize] = b"x\0";
pub const PRIxFAST64: &'static [u8; 4usize] = b"llx\0";
pub const PRIxMAX: &'static [u8; 3usize] = b"jx\0";
pub const PRIX8: &'static [u8; 2usize] = b"X\0";
pub const PRIX16: &'static [u8; 2usize] = b"X\0";
pub const PRIX32: &'static [u8; 2usize] = b"X\0";
pub const PRIX64: &'static [u8; 4usize] = b"llX\0";
pub const PRIXLEAST8: &'static [u8; 2usize] = b"X\0";
pub const PRIXLEAST16: &'static [u8; 2usize] = b"X\0";
pub const PRIXLEAST32: &'static [u8; 2usize] = b"X\0";
pub const PRIXLEAST64: &'static [u8; 4usize] = b"llX\0";
pub const PRIXFAST8: &'static [u8; 2usize] = b"X\0";
pub const PRIXFAST64: &'static [u8; 4usize] = b"llX\0";
pub const PRIXMAX: &'static [u8; 3usize] = b"jX\0";
pub const SCNd8: &'static [u8; 4usize] = b"hhd\0";
pub const SCNd16: &'static [u8; 3usize] = b"hd\0";
pub const SCNd32: &'static [u8; 2usize] = b"d\0";
pub const SCNd64: &'static [u8; 4usize] = b"lld\0";
pub const SCNdLEAST8: &'static [u8; 4usize] = b"hhd\0";
pub const SCNdLEAST16: &'static [u8; 3usize] = b"hd\0";
pub const SCNdLEAST32: &'static [u8; 2usize] = b"d\0";
pub const SCNdLEAST64: &'static [u8; 4usize] = b"lld\0";
pub const SCNdFAST8: &'static [u8; 4usize] = b"hhd\0";
pub const SCNdFAST64: &'static [u8; 4usize] = b"lld\0";
pub const SCNdMAX: &'static [u8; 3usize] = b"jd\0";
pub const SCNi8: &'static [u8; 4usize] = b"hhi\0";
pub const SCNi16: &'static [u8; 3usize] = b"hi\0";
pub const SCNi32: &'static [u8; 2usize] = b"i\0";
pub const SCNi64: &'static [u8; 4usize] = b"lli\0";
pub const SCNiLEAST8: &'static [u8; 4usize] = b"hhi\0";
pub const SCNiLEAST16: &'static [u8; 3usize] = b"hi\0";
pub const SCNiLEAST32: &'static [u8; 2usize] = b"i\0";
pub const SCNiLEAST64: &'static [u8; 4usize] = b"lli\0";
pub const SCNiFAST8: &'static [u8; 4usize] = b"hhi\0";
pub const SCNiFAST64: &'static [u8; 4usize] = b"lli\0";
pub const SCNiMAX: &'static [u8; 3usize] = b"ji\0";
pub const SCNo8: &'static [u8; 4usize] = b"hho\0";
pub const SCNo16: &'static [u8; 3usize] = b"ho\0";
pub const SCNo32: &'static [u8; 2usize] = b"o\0";
pub const SCNo64: &'static [u8; 4usize] = b"llo\0";
pub const SCNoLEAST8: &'static [u8; 4usize] = b"hho\0";
pub const SCNoLEAST16: &'static [u8; 3usize] = b"ho\0";
pub const SCNoLEAST32: &'static [u8; 2usize] = b"o\0";
pub const SCNoLEAST64: &'static [u8; 4usize] = b"llo\0";
pub const SCNoFAST8: &'static [u8; 4usize] = b"hho\0";
pub const SCNoFAST64: &'static [u8; 4usize] = b"llo\0";
pub const SCNoMAX: &'static [u8; 3usize] = b"jo\0";
pub const SCNu8: &'static [u8; 4usize] = b"hhu\0";
pub const SCNu16: &'static [u8; 3usize] = b"hu\0";
pub const SCNu32: &'static [u8; 2usize] = b"u\0";
pub const SCNu64: &'static [u8; 4usize] = b"llu\0";
pub const SCNuLEAST8: &'static [u8; 4usize] = b"hhu\0";
pub const SCNuLEAST16: &'static [u8; 3usize] = b"hu\0";
pub const SCNuLEAST32: &'static [u8; 2usize] = b"u\0";
pub const SCNuLEAST64: &'static [u8; 4usize] = b"llu\0";
pub const SCNuFAST8: &'static [u8; 4usize] = b"hhu\0";
pub const SCNuFAST64: &'static [u8; 4usize] = b"llu\0";
pub const SCNuMAX: &'static [u8; 3usize] = b"ju\0";
pub const SCNx8: &'static [u8; 4usize] = b"hhx\0";
pub const SCNx16: &'static [u8; 3usize] = b"hx\0";
pub const SCNx32: &'static [u8; 2usize] = b"x\0";
pub const SCNx64: &'static [u8; 4usize] = b"llx\0";
pub const SCNxLEAST8: &'static [u8; 4usize] = b"hhx\0";
pub const SCNxLEAST16: &'static [u8; 3usize] = b"hx\0";
pub const SCNxLEAST32: &'static [u8; 2usize] = b"x\0";
pub const SCNxLEAST64: &'static [u8; 4usize] = b"llx\0";
pub const SCNxFAST8: &'static [u8; 4usize] = b"hhx\0";
pub const SCNxFAST64: &'static [u8; 4usize] = b"llx\0";
pub const SCNxMAX: &'static [u8; 3usize] = b"jx\0";
pub const AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT: u32 = 8;
pub const O_DIRECTORY: u32 = 16384;
pub const O_NOFOLLOW: u32 = 32768;
pub const O_DIRECT: u32 = 65536;
pub const O_LARGEFILE: u32 = 131072;
pub const O_ACCMODE: u32 = 3;
pub const O_RDONLY: u32 = 0;
pub const O_WRONLY: u32 = 1;
pub const O_RDWR: u32 = 2;
pub const O_CREAT: u32 = 64;
pub const O_EXCL: u32 = 128;
pub const O_NOCTTY: u32 = 256;
pub const O_TRUNC: u32 = 512;
pub const O_APPEND: u32 = 1024;
pub const O_NONBLOCK: u32 = 2048;
pub const O_DSYNC: u32 = 4096;
pub const FASYNC: u32 = 8192;
pub const O_NOATIME: u32 = 262144;
pub const O_CLOEXEC: u32 = 524288;
pub const __O_SYNC: u32 = 1048576;
pub const O_SYNC: u32 = 1052672;
pub const O_PATH: u32 = 2097152;
pub const __O_TMPFILE: u32 = 4194304;
pub const O_TMPFILE: u32 = 4210688;
pub const O_TMPFILE_MASK: u32 = 4210752;
pub const O_NDELAY: u32 = 2048;
pub const F_DUPFD: u32 = 0;
pub const F_GETFD: u32 = 1;
pub const F_SETFD: u32 = 2;
pub const F_GETFL: u32 = 3;
pub const F_SETFL: u32 = 4;
pub const F_GETLK: u32 = 5;
pub const F_SETLK: u32 = 6;
pub const F_SETLKW: u32 = 7;
pub const F_SETOWN: u32 = 8;
pub const F_GETOWN: u32 = 9;
pub const F_SETSIG: u32 = 10;
pub const F_GETSIG: u32 = 11;
pub const F_GETLK64: u32 = 12;
pub const F_SETLK64: u32 = 13;
pub const F_SETLKW64: u32 = 14;
pub const F_SETOWN_EX: u32 = 15;
pub const F_GETOWN_EX: u32 = 16;
pub const F_GETOWNER_UIDS: u32 = 17;
pub const F_OFD_GETLK: u32 = 36;
pub const F_OFD_SETLK: u32 = 37;
pub const F_OFD_SETLKW: u32 = 38;
pub const F_OWNER_TID: u32 = 0;
pub const F_OWNER_PID: u32 = 1;
pub const F_OWNER_PGRP: u32 = 2;
pub const FD_CLOEXEC: u32 = 1;
pub const F_RDLCK: u32 = 0;
pub const F_WRLCK: u32 = 1;
pub const F_UNLCK: u32 = 2;
pub const F_EXLCK: u32 = 4;
pub const F_SHLCK: u32 = 8;
pub const LOCK_SH: u32 = 1;
pub const LOCK_EX: u32 = 2;
pub const LOCK_NB: u32 = 4;
pub const LOCK_UN: u32 = 8;
pub const LOCK_MAND: u32 = 32;
pub const LOCK_READ: u32 = 64;
pub const LOCK_WRITE: u32 = 128;
pub const LOCK_RW: u32 = 192;
pub const F_LINUX_SPECIFIC_BASE: u32 = 1024;
pub const FIOSETOWN: u32 = 35073;
pub const SIOCSPGRP: u32 = 35074;
pub const FIOGETOWN: u32 = 35075;
pub const SIOCGPGRP: u32 = 35076;
pub const SIOCATMARK: u32 = 35077;
pub const SIOCGSTAMP_OLD: u32 = 35078;
pub const SIOCGSTAMPNS_OLD: u32 = 35079;
pub const SOL_SOCKET: u32 = 1;
pub const SO_DEBUG: u32 = 1;
pub const SO_REUSEADDR: u32 = 2;
pub const SO_TYPE: u32 = 3;
pub const SO_ERROR: u32 = 4;
pub const SO_DONTROUTE: u32 = 5;
pub const SO_BROADCAST: u32 = 6;
pub const SO_SNDBUF: u32 = 7;
pub const SO_RCVBUF: u32 = 8;
pub const SO_SNDBUFFORCE: u32 = 32;
pub const SO_RCVBUFFORCE: u32 = 33;
pub const SO_KEEPALIVE: u32 = 9;
pub const SO_OOBINLINE: u32 = 10;
pub const SO_NO_CHECK: u32 = 11;
pub const SO_PRIORITY: u32 = 12;
pub const SO_LINGER: u32 = 13;
pub const SO_BSDCOMPAT: u32 = 14;
pub const SO_REUSEPORT: u32 = 15;
pub const SO_PASSCRED: u32 = 16;
pub const SO_PEERCRED: u32 = 17;
pub const SO_RCVLOWAT: u32 = 18;
pub const SO_SNDLOWAT: u32 = 19;
pub const SO_RCVTIMEO_OLD: u32 = 20;
pub const SO_SNDTIMEO_OLD: u32 = 21;
pub const SO_SECURITY_AUTHENTICATION: u32 = 22;
pub const SO_SECURITY_ENCRYPTION_TRANSPORT: u32 = 23;
pub const SO_SECURITY_ENCRYPTION_NETWORK: u32 = 24;
pub const SO_BINDTODEVICE: u32 = 25;
pub const SO_ATTACH_FILTER: u32 = 26;
pub const SO_DETACH_FILTER: u32 = 27;
pub const SO_GET_FILTER: u32 = 26;
pub const SO_PEERNAME: u32 = 28;
pub const SO_ACCEPTCONN: u32 = 30;
pub const SO_PEERSEC: u32 = 31;
pub const SO_PASSSEC: u32 = 34;
pub const SO_MARK: u32 = 36;
pub const SO_PROTOCOL: u32 = 38;
pub const SO_DOMAIN: u32 = 39;
pub const SO_RXQ_OVFL: u32 = 40;
pub const SO_WIFI_STATUS: u32 = 41;
pub const SCM_WIFI_STATUS: u32 = 41;
pub const SO_PEEK_OFF: u32 = 42;
pub const SO_NOFCS: u32 = 43;
pub const SO_LOCK_FILTER: u32 = 44;
pub const SO_SELECT_ERR_QUEUE: u32 = 45;
pub const SO_BUSY_POLL: u32 = 46;
pub const SO_MAX_PACING_RATE: u32 = 47;
pub const SO_BPF_EXTENSIONS: u32 = 48;
pub const SO_INCOMING_CPU: u32 = 49;
pub const SO_ATTACH_BPF: u32 = 50;
pub const SO_DETACH_BPF: u32 = 27;
pub const SO_ATTACH_REUSEPORT_CBPF: u32 = 51;
pub const SO_ATTACH_REUSEPORT_EBPF: u32 = 52;
pub const SO_CNX_ADVICE: u32 = 53;
pub const SCM_TIMESTAMPING_OPT_STATS: u32 = 54;
pub const SO_MEMINFO: u32 = 55;
pub const SO_INCOMING_NAPI_ID: u32 = 56;
pub const SO_COOKIE: u32 = 57;
pub const SCM_TIMESTAMPING_PKTINFO: u32 = 58;
pub const SO_PEERGROUPS: u32 = 59;
pub const SO_ZEROCOPY: u32 = 60;
pub const SO_TXTIME: u32 = 61;
pub const SCM_TXTIME: u32 = 61;
pub const SO_BINDTOIFINDEX: u32 = 62;
pub const SO_TIMESTAMP_OLD: u32 = 29;
pub const SO_TIMESTAMPNS_OLD: u32 = 35;
pub const SO_TIMESTAMPING_OLD: u32 = 37;
pub const SO_TIMESTAMP_NEW: u32 = 63;
pub const SO_TIMESTAMPNS_NEW: u32 = 64;
pub const SO_TIMESTAMPING_NEW: u32 = 65;
pub const SO_RCVTIMEO_NEW: u32 = 66;
pub const SO_SNDTIMEO_NEW: u32 = 67;
pub const SO_DETACH_REUSEPORT_BPF: u32 = 68;
pub const SO_PREFER_BUSY_POLL: u32 = 69;
pub const SO_BUSY_POLL_BUDGET: u32 = 70;
pub const SO_NETNS_COOKIE: u32 = 71;
pub const SOCK_IOC_TYPE: u32 = 137;
pub const SIOCADDRT: u32 = 35083;
pub const SIOCDELRT: u32 = 35084;
pub const SIOCRTMSG: u32 = 35085;
pub const SIOCGIFNAME: u32 = 35088;
pub const SIOCSIFLINK: u32 = 35089;
pub const SIOCGIFCONF: u32 = 35090;
pub const SIOCGIFFLAGS: u32 = 35091;
pub const SIOCSIFFLAGS: u32 = 35092;
pub const SIOCGIFADDR: u32 = 35093;
pub const SIOCSIFADDR: u32 = 35094;
pub const SIOCGIFDSTADDR: u32 = 35095;
pub const SIOCSIFDSTADDR: u32 = 35096;
pub const SIOCGIFBRDADDR: u32 = 35097;
pub const SIOCSIFBRDADDR: u32 = 35098;
pub const SIOCGIFNETMASK: u32 = 35099;
pub const SIOCSIFNETMASK: u32 = 35100;
pub const SIOCGIFMETRIC: u32 = 35101;
pub const SIOCSIFMETRIC: u32 = 35102;
pub const SIOCGIFMEM: u32 = 35103;
pub const SIOCSIFMEM: u32 = 35104;
pub const SIOCGIFMTU: u32 = 35105;
pub const SIOCSIFMTU: u32 = 35106;
pub const SIOCSIFNAME: u32 = 35107;
pub const SIOCSIFHWADDR: u32 = 35108;
pub const SIOCGIFENCAP: u32 = 35109;
pub const SIOCSIFENCAP: u32 = 35110;
pub const SIOCGIFHWADDR: u32 = 35111;
pub const SIOCGIFSLAVE: u32 = 35113;
pub const SIOCSIFSLAVE: u32 = 35120;
pub const SIOCADDMULTI: u32 = 35121;
pub const SIOCDELMULTI: u32 = 35122;
pub const SIOCGIFINDEX: u32 = 35123;
pub const SIOGIFINDEX: u32 = 35123;
pub const SIOCSIFPFLAGS: u32 = 35124;
pub const SIOCGIFPFLAGS: u32 = 35125;
pub const SIOCDIFADDR: u32 = 35126;
pub const SIOCSIFHWBROADCAST: u32 = 35127;
pub const SIOCGIFCOUNT: u32 = 35128;
pub const SIOCGIFBR: u32 = 35136;
pub const SIOCSIFBR: u32 = 35137;
pub const SIOCGIFTXQLEN: u32 = 35138;
pub const SIOCSIFTXQLEN: u32 = 35139;
pub const SIOCETHTOOL: u32 = 35142;
pub const SIOCGMIIPHY: u32 = 35143;
pub const SIOCGMIIREG: u32 = 35144;
pub const SIOCSMIIREG: u32 = 35145;
pub const SIOCWANDEV: u32 = 35146;
pub const SIOCOUTQNSD: u32 = 35147;
pub const SIOCGSKNS: u32 = 35148;
pub const SIOCDARP: u32 = 35155;
pub const SIOCGARP: u32 = 35156;
pub const SIOCSARP: u32 = 35157;
pub const SIOCDRARP: u32 = 35168;
pub const SIOCGRARP: u32 = 35169;
pub const SIOCSRARP: u32 = 35170;
pub const SIOCGIFMAP: u32 = 35184;
pub const SIOCSIFMAP: u32 = 35185;
pub const SIOCADDDLCI: u32 = 35200;
pub const SIOCDELDLCI: u32 = 35201;
pub const SIOCGIFVLAN: u32 = 35202;
pub const SIOCSIFVLAN: u32 = 35203;
pub const SIOCBONDENSLAVE: u32 = 35216;
pub const SIOCBONDRELEASE: u32 = 35217;
pub const SIOCBONDSETHWADDR: u32 = 35218;
pub const SIOCBONDSLAVEINFOQUERY: u32 = 35219;
pub const SIOCBONDINFOQUERY: u32 = 35220;
pub const SIOCBONDCHANGEACTIVE: u32 = 35221;
pub const SIOCBRADDBR: u32 = 35232;
pub const SIOCBRDELBR: u32 = 35233;
pub const SIOCBRADDIF: u32 = 35234;
pub const SIOCBRDELIF: u32 = 35235;
pub const SIOCSHWTSTAMP: u32 = 35248;
pub const SIOCGHWTSTAMP: u32 = 35249;
pub const SIOCDEVPRIVATE: u32 = 35312;
pub const SIOCPROTOPRIVATE: u32 = 35296;
pub const UIO_FASTIOV: u32 = 8;
pub const UIO_MAXIOV: u32 = 1024;
pub const SOCK_STREAM: u32 = 1;
pub const SOCK_DGRAM: u32 = 2;
pub const SOCK_RAW: u32 = 3;
pub const SOCK_RDM: u32 = 4;
pub const SOCK_SEQPACKET: u32 = 5;
pub const SOCK_DCCP: u32 = 6;
pub const SOCK_PACKET: u32 = 10;
pub const SOCK_CLOEXEC: u32 = 524288;
pub const SOCK_NONBLOCK: u32 = 2048;
pub const SCM_RIGHTS: u32 = 1;
pub const SCM_CREDENTIALS: u32 = 2;
pub const SCM_SECURITY: u32 = 3;
pub const AF_UNSPEC: u32 = 0;
pub const AF_UNIX: u32 = 1;
pub const AF_LOCAL: u32 = 1;
pub const AF_INET: u32 = 2;
pub const AF_AX25: u32 = 3;
pub const AF_IPX: u32 = 4;
pub const AF_APPLETALK: u32 = 5;
pub const AF_NETROM: u32 = 6;
pub const AF_BRIDGE: u32 = 7;
pub const AF_ATMPVC: u32 = 8;
pub const AF_X25: u32 = 9;
pub const AF_INET6: u32 = 10;
pub const AF_ROSE: u32 = 11;
pub const AF_DECnet: u32 = 12;
pub const AF_NETBEUI: u32 = 13;
pub const AF_SECURITY: u32 = 14;
pub const AF_KEY: u32 = 15;
pub const AF_NETLINK: u32 = 16;
pub const AF_ROUTE: u32 = 16;
pub const AF_PACKET: u32 = 17;
pub const AF_ASH: u32 = 18;
pub const AF_ECONET: u32 = 19;
pub const AF_ATMSVC: u32 = 20;
pub const AF_RDS: u32 = 21;
pub const AF_SNA: u32 = 22;
pub const AF_IRDA: u32 = 23;
pub const AF_PPPOX: u32 = 24;
pub const AF_WANPIPE: u32 = 25;
pub const AF_LLC: u32 = 26;
pub const AF_CAN: u32 = 29;
pub const AF_TIPC: u32 = 30;
pub const AF_BLUETOOTH: u32 = 31;
pub const AF_IUCV: u32 = 32;
pub const AF_RXRPC: u32 = 33;
pub const AF_ISDN: u32 = 34;
pub const AF_PHONET: u32 = 35;
pub const AF_IEEE802154: u32 = 36;
pub const AF_CAIF: u32 = 37;
pub const AF_ALG: u32 = 38;
pub const AF_NFC: u32 = 39;
pub const AF_VSOCK: u32 = 40;
pub const AF_KCM: u32 = 41;
pub const AF_QIPCRTR: u32 = 42;
pub const AF_MAX: u32 = 43;
pub const PF_UNSPEC: u32 = 0;
pub const PF_UNIX: u32 = 1;
pub const PF_LOCAL: u32 = 1;
pub const PF_INET: u32 = 2;
pub const PF_AX25: u32 = 3;
pub const PF_IPX: u32 = 4;
pub const PF_APPLETALK: u32 = 5;
pub const PF_NETROM: u32 = 6;
pub const PF_BRIDGE: u32 = 7;
pub const PF_ATMPVC: u32 = 8;
pub const PF_X25: u32 = 9;
pub const PF_INET6: u32 = 10;
pub const PF_ROSE: u32 = 11;
pub const PF_DECnet: u32 = 12;
pub const PF_NETBEUI: u32 = 13;
pub const PF_SECURITY: u32 = 14;
pub const PF_KEY: u32 = 15;
pub const PF_NETLINK: u32 = 16;
pub const PF_ROUTE: u32 = 16;
pub const PF_PACKET: u32 = 17;
pub const PF_ASH: u32 = 18;
pub const PF_ECONET: u32 = 19;
pub const PF_ATMSVC: u32 = 20;
pub const PF_RDS: u32 = 21;
pub const PF_SNA: u32 = 22;
pub const PF_IRDA: u32 = 23;
pub const PF_PPPOX: u32 = 24;
pub const PF_WANPIPE: u32 = 25;
pub const PF_LLC: u32 = 26;
pub const PF_CAN: u32 = 29;
pub const PF_TIPC: u32 = 30;
pub const PF_BLUETOOTH: u32 = 31;
pub const PF_IUCV: u32 = 32;
pub const PF_RXRPC: u32 = 33;
pub const PF_ISDN: u32 = 34;
pub const PF_PHONET: u32 = 35;
pub const PF_IEEE802154: u32 = 36;
pub const PF_CAIF: u32 = 37;
pub const PF_ALG: u32 = 38;
pub const PF_NFC: u32 = 39;
pub const PF_VSOCK: u32 = 40;
pub const PF_KCM: u32 = 41;
pub const PF_QIPCRTR: u32 = 42;
pub const PF_MAX: u32 = 43;
pub const SOMAXCONN: u32 = 128;
pub const MSG_OOB: u32 = 1;
pub const MSG_PEEK: u32 = 2;
pub const MSG_DONTROUTE: u32 = 4;
pub const MSG_TRYHARD: u32 = 4;
pub const MSG_CTRUNC: u32 = 8;
pub const MSG_PROBE: u32 = 16;
pub const MSG_TRUNC: u32 = 32;
pub const MSG_DONTWAIT: u32 = 64;
pub const MSG_EOR: u32 = 128;
pub const MSG_WAITALL: u32 = 256;
pub const MSG_FIN: u32 = 512;
pub const MSG_SYN: u32 = 1024;
pub const MSG_CONFIRM: u32 = 2048;
pub const MSG_RST: u32 = 4096;
pub const MSG_ERRQUEUE: u32 = 8192;
pub const MSG_NOSIGNAL: u32 = 16384;
pub const MSG_MORE: u32 = 32768;
pub const MSG_WAITFORONE: u32 = 65536;
pub const MSG_BATCH: u32 = 262144;
pub const MSG_FASTOPEN: u32 = 536870912;
pub const MSG_CMSG_CLOEXEC: u32 = 1073741824;
pub const MSG_EOF: u32 = 512;
pub const MSG_CMSG_COMPAT: u32 = 0;
pub const SOL_IP: u32 = 0;
pub const SOL_TCP: u32 = 6;
pub const SOL_UDP: u32 = 17;
pub const SOL_IPV6: u32 = 41;
pub const SOL_ICMPV6: u32 = 58;
pub const SOL_SCTP: u32 = 132;
pub const SOL_RAW: u32 = 255;
pub const SOL_IPX: u32 = 256;
pub const SOL_AX25: u32 = 257;
pub const SOL_ATALK: u32 = 258;
pub const SOL_NETROM: u32 = 259;
pub const SOL_ROSE: u32 = 260;
pub const SOL_DECNET: u32 = 261;
pub const SOL_X25: u32 = 262;
pub const SOL_PACKET: u32 = 263;
pub const SOL_ATM: u32 = 264;
pub const SOL_AAL: u32 = 265;
pub const SOL_IRDA: u32 = 266;
pub const SOL_NETBEUI: u32 = 267;
pub const SOL_LLC: u32 = 268;
pub const SOL_DCCP: u32 = 269;
pub const SOL_NETLINK: u32 = 270;
pub const SOL_TIPC: u32 = 271;
pub const SOL_RXRPC: u32 = 272;
pub const SOL_PPPOL2TP: u32 = 273;
pub const SOL_BLUETOOTH: u32 = 274;
pub const SOL_PNPIPE: u32 = 275;
pub const SOL_RDS: u32 = 276;
pub const SOL_IUCV: u32 = 277;
pub const SOL_CAIF: u32 = 278;
pub const SOL_ALG: u32 = 279;
pub const SOL_NFC: u32 = 280;
pub const SOL_KCM: u32 = 281;
pub const SOL_TLS: u32 = 282;
pub const IPX_TYPE: u32 = 1;
pub const _PATH_HEQUIV: &'static [u8; 24usize] = b"/system/etc/hosts.equiv\0";
pub const _PATH_HOSTS: &'static [u8; 18usize] = b"/system/etc/hosts\0";
pub const _PATH_NETWORKS: &'static [u8; 21usize] = b"/system/etc/networks\0";
pub const _PATH_PROTOCOLS: &'static [u8; 22usize] = b"/system/etc/protocols\0";
pub const _PATH_SERVICES: &'static [u8; 21usize] = b"/system/etc/services\0";
pub const NETDB_INTERNAL: i32 = -1;
pub const NETDB_SUCCESS: u32 = 0;
pub const HOST_NOT_FOUND: u32 = 1;
pub const TRY_AGAIN: u32 = 2;
pub const NO_RECOVERY: u32 = 3;
pub const NO_DATA: u32 = 4;
pub const NO_ADDRESS: u32 = 4;
pub const EAI_ADDRFAMILY: u32 = 1;
pub const EAI_AGAIN: u32 = 2;
pub const EAI_BADFLAGS: u32 = 3;
pub const EAI_FAIL: u32 = 4;
pub const EAI_FAMILY: u32 = 5;
pub const EAI_MEMORY: u32 = 6;
pub const EAI_NODATA: u32 = 7;
pub const EAI_NONAME: u32 = 8;
pub const EAI_SERVICE: u32 = 9;
pub const EAI_SOCKTYPE: u32 = 10;
pub const EAI_SYSTEM: u32 = 11;
pub const EAI_BADHINTS: u32 = 12;
pub const EAI_PROTOCOL: u32 = 13;
pub const EAI_OVERFLOW: u32 = 14;
pub const EAI_MAX: u32 = 15;
pub const AI_PASSIVE: u32 = 1;
pub const AI_CANONNAME: u32 = 2;
pub const AI_NUMERICHOST: u32 = 4;
pub const AI_NUMERICSERV: u32 = 8;
pub const AI_ALL: u32 = 256;
pub const AI_V4MAPPED_CFG: u32 = 512;
pub const AI_ADDRCONFIG: u32 = 1024;
pub const AI_V4MAPPED: u32 = 2048;
pub const AI_DEFAULT: u32 = 1536;
pub const NI_MAXHOST: u32 = 1025;
pub const NI_MAXSERV: u32 = 32;
pub const NI_NOFQDN: u32 = 1;
pub const NI_NUMERICHOST: u32 = 2;
pub const NI_NAMEREQD: u32 = 4;
pub const NI_NUMERICSERV: u32 = 8;
pub const NI_DGRAM: u32 = 16;
pub const SCOPE_DELIMITER: u8 = 37u8;
pub const IPPORT_RESERVED: u32 = 1024;
pub const WNOHANG: u32 = 1;
pub const WUNTRACED: u32 = 2;
pub const WSTOPPED: u32 = 2;
pub const WEXITED: u32 = 4;
pub const WCONTINUED: u32 = 8;
pub const WNOWAIT: u32 = 16777216;
pub const __WNOTHREAD: u32 = 536870912;
pub const __WALL: u32 = 1073741824;
pub const __WCLONE: u32 = 2147483648;
pub const P_ALL: u32 = 0;
pub const P_PID: u32 = 1;
pub const P_PGID: u32 = 2;
pub const P_PIDFD: u32 = 3;
pub const SEEK_SET: u32 = 0;
pub const SEEK_CUR: u32 = 1;
pub const SEEK_END: u32 = 2;
pub const _IOFBF: u32 = 0;
pub const _IOLBF: u32 = 1;
pub const _IONBF: u32 = 2;
pub const BUFSIZ: u32 = 1024;
pub const EOF: i32 = -1;
pub const FOPEN_MAX: u32 = 20;
pub const FILENAME_MAX: u32 = 4096;
pub const L_tmpnam: u32 = 4096;
pub const TMP_MAX: u32 = 308915776;
pub const P_tmpdir: &'static [u8; 6usize] = b"/tmp/\0";
pub const L_ctermid: u32 = 1024;
pub const STRUCT_MALLINFO_DECLARED: u32 = 1;
pub const M_DECAY_TIME: i32 = -100;
pub const M_PURGE: i32 = -101;
pub const M_MEMTAG_TUNING: i32 = -102;
pub const M_MEMTAG_TUNING_BUFFER_OVERFLOW: u32 = 0;
pub const M_MEMTAG_TUNING_UAF: u32 = 1;
pub const M_THREAD_DISABLE_MEM_INIT: i32 = -103;
pub const M_CACHE_COUNT_MAX: i32 = -200;
pub const M_CACHE_SIZE_MAX: i32 = -201;
pub const M_TSDS_COUNT_MAX: i32 = -202;
pub const M_BIONIC_ZERO_INIT: i32 = -203;
pub const M_BIONIC_SET_HEAP_TAGGING_LEVEL: i32 = -204;
pub const EXIT_FAILURE: u32 = 1;
pub const EXIT_SUCCESS: u32 = 0;
pub const RAND_MAX: u32 = 2147483647;
pub const __NNAPI_FL5_MIN_ANDROID_API__: u32 = 31;
pub const NR_OPEN: u32 = 1024;
pub const NGROUPS_MAX: u32 = 65536;
pub const ARG_MAX: u32 = 131072;
pub const LINK_MAX: u32 = 127;
pub const MAX_CANON: u32 = 255;
pub const MAX_INPUT: u32 = 255;
pub const NAME_MAX: u32 = 255;
pub const PATH_MAX: u32 = 4096;
pub const PIPE_BUF: u32 = 4096;
pub const XATTR_NAME_MAX: u32 = 255;
pub const XATTR_SIZE_MAX: u32 = 65536;
pub const XATTR_LIST_MAX: u32 = 65536;
pub const RTSIG_MAX: u32 = 32;
pub const PASS_MAX: u32 = 128;
pub const NL_ARGMAX: u32 = 9;
pub const NL_LANGMAX: u32 = 14;
pub const NL_MSGMAX: u32 = 32767;
pub const NL_NMAX: u32 = 1;
pub const NL_SETMAX: u32 = 255;
pub const NL_TEXTMAX: u32 = 255;
pub const CHAR_BIT: u32 = 8;
pub const LONG_BIT: u32 = 32;
pub const WORD_BIT: u32 = 32;
pub const SCHAR_MAX: u32 = 127;
pub const SCHAR_MIN: i32 = -128;
pub const UCHAR_MAX: u32 = 255;
pub const CHAR_MIN: u32 = 0;
pub const CHAR_MAX: u32 = 255;
pub const USHRT_MAX: u32 = 65535;
pub const SHRT_MAX: u32 = 32767;
pub const SHRT_MIN: i32 = -32768;
pub const UINT_MAX: u32 = 4294967295;
pub const INT_MAX: u32 = 2147483647;
pub const INT_MIN: i32 = -2147483648;
pub const ULONG_MAX: u32 = 4294967295;
pub const LONG_MAX: u32 = 2147483647;
pub const LONG_MIN: i32 = -2147483648;
pub const ULLONG_MAX: i32 = -1;
pub const LLONG_MAX: u64 = 9223372036854775807;
pub const LLONG_MIN: i64 = -9223372036854775808;
pub const LONG_LONG_MIN: i64 = -9223372036854775808;
pub const LONG_LONG_MAX: u64 = 9223372036854775807;
pub const ULONG_LONG_MAX: i32 = -1;
pub const UID_MAX: u32 = 4294967295;
pub const GID_MAX: u32 = 4294967295;
pub const SIZE_T_MAX: u32 = 4294967295;
pub const SSIZE_MAX: u32 = 2147483647;
pub const MB_LEN_MAX: u32 = 4;
pub const NZERO: u32 = 20;
pub const IOV_MAX: u32 = 1024;
pub const SEM_VALUE_MAX: u32 = 1073741823;
pub const _POSIX_VERSION: u32 = 200809;
pub const _POSIX2_VERSION: u32 = 200809;
pub const _XOPEN_VERSION: u32 = 700;
pub const __BIONIC_POSIX_FEATURE_MISSING: i32 = -1;
pub const _POSIX_ASYNCHRONOUS_IO: i32 = -1;
pub const _POSIX_CHOWN_RESTRICTED: u32 = 1;
pub const _POSIX_CPUTIME: u32 = 200809;
pub const _POSIX_FSYNC: u32 = 200809;
pub const _POSIX_IPV6: u32 = 200809;
pub const _POSIX_MAPPED_FILES: u32 = 200809;
pub const _POSIX_MEMLOCK_RANGE: u32 = 200809;
pub const _POSIX_MEMORY_PROTECTION: u32 = 200809;
pub const _POSIX_MESSAGE_PASSING: i32 = -1;
pub const _POSIX_MONOTONIC_CLOCK: u32 = 200809;
pub const _POSIX_NO_TRUNC: u32 = 1;
pub const _POSIX_PRIORITIZED_IO: i32 = -1;
pub const _POSIX_PRIORITY_SCHEDULING: u32 = 200809;
pub const _POSIX_RAW_SOCKETS: u32 = 200809;
pub const _POSIX_READER_WRITER_LOCKS: u32 = 200809;
pub const _POSIX_REGEXP: u32 = 1;
pub const _POSIX_SAVED_IDS: u32 = 1;
pub const _POSIX_SEMAPHORES: u32 = 200809;
pub const _POSIX_SHARED_MEMORY_OBJECTS: i32 = -1;
pub const _POSIX_SHELL: u32 = 1;
pub const _POSIX_SPORADIC_SERVER: i32 = -1;
pub const _POSIX_SYNCHRONIZED_IO: u32 = 200809;
pub const _POSIX_THREAD_ATTR_STACKADDR: u32 = 200809;
pub const _POSIX_THREAD_ATTR_STACKSIZE: u32 = 200809;
pub const _POSIX_THREAD_CPUTIME: u32 = 200809;
pub const _POSIX_THREAD_PRIO_INHERIT: i32 = -1;
pub const _POSIX_THREAD_PRIO_PROTECT: i32 = -1;
pub const _POSIX_THREAD_PRIORITY_SCHEDULING: u32 = 200809;
pub const _POSIX_THREAD_PROCESS_SHARED: u32 = 200809;
pub const _POSIX_THREAD_ROBUST_PRIO_INHERIT: i32 = -1;
pub const _POSIX_THREAD_ROBUST_PRIO_PROTECT: i32 = -1;
pub const _POSIX_THREAD_SAFE_FUNCTIONS: u32 = 200809;
pub const _POSIX_THREAD_SPORADIC_SERVER: i32 = -1;
pub const _POSIX_THREADS: u32 = 200809;
pub const _POSIX_TIMERS: u32 = 200809;
pub const _POSIX_TRACE: i32 = -1;
pub const _POSIX_TRACE_EVENT_FILTER: i32 = -1;
pub const _POSIX_TRACE_INHERIT: i32 = -1;
pub const _POSIX_TRACE_LOG: i32 = -1;
pub const _POSIX_TYPED_MEMORY_OBJECTS: i32 = -1;
pub const _POSIX_VDISABLE: u8 = 0u8;
pub const _POSIX2_C_BIND: u32 = 200809;
pub const _POSIX2_C_DEV: i32 = -1;
pub const _POSIX2_CHAR_TERM: u32 = 200809;
pub const _POSIX2_FORT_DEV: i32 = -1;
pub const _POSIX2_FORT_RUN: i32 = -1;
pub const _POSIX2_LOCALEDEF: i32 = -1;
pub const _POSIX2_SW_DEV: i32 = -1;
pub const _POSIX2_UPE: i32 = -1;
pub const _POSIX_V7_ILP32_OFF32: u32 = 1;
pub const _POSIX_V7_ILP32_OFFBIG: i32 = -1;
pub const _POSIX_V7_LP64_OFF64: i32 = -1;
pub const _POSIX_V7_LPBIG_OFFBIG: i32 = -1;
pub const _XOPEN_CRYPT: i32 = -1;
pub const _XOPEN_ENH_I18N: u32 = 1;
pub const _XOPEN_LEGACY: i32 = -1;
pub const _XOPEN_REALTIME: u32 = 1;
pub const _XOPEN_REALTIME_THREADS: u32 = 1;
pub const _XOPEN_SHM: u32 = 1;
pub const _XOPEN_STREAMS: i32 = -1;
pub const _XOPEN_UNIX: u32 = 1;
pub const _POSIX_AIO_LISTIO_MAX: u32 = 2;
pub const _POSIX_AIO_MAX: u32 = 1;
pub const _POSIX_ARG_MAX: u32 = 4096;
pub const _POSIX_CHILD_MAX: u32 = 25;
pub const _POSIX_CLOCKRES_MIN: u32 = 20000000;
pub const _POSIX_DELAYTIMER_MAX: u32 = 32;
pub const _POSIX_HOST_NAME_MAX: u32 = 255;
pub const _POSIX_LINK_MAX: u32 = 8;
pub const _POSIX_LOGIN_NAME_MAX: u32 = 9;
pub const _POSIX_MAX_CANON: u32 = 255;
pub const _POSIX_MAX_INPUT: u32 = 255;
pub const _POSIX_MQ_OPEN_MAX: u32 = 8;
pub const _POSIX_MQ_PRIO_MAX: u32 = 32;
pub const _POSIX_NAME_MAX: u32 = 14;
pub const _POSIX_NGROUPS_MAX: u32 = 8;
pub const _POSIX_OPEN_MAX: u32 = 20;
pub const _POSIX_PATH_MAX: u32 = 256;
pub const _POSIX_PIPE_BUF: u32 = 512;
pub const _POSIX_RE_DUP_MAX: u32 = 255;
pub const _POSIX_RTSIG_MAX: u32 = 8;
pub const _POSIX_SEM_NSEMS_MAX: u32 = 256;
pub const _POSIX_SEM_VALUE_MAX: u32 = 32767;
pub const _POSIX_SIGQUEUE_MAX: u32 = 32;
pub const _POSIX_SSIZE_MAX: u32 = 32767;
pub const _POSIX_STREAM_MAX: u32 = 8;
pub const _POSIX_SS_REPL_MAX: u32 = 4;
pub const _POSIX_SYMLINK_MAX: u32 = 255;
pub const _POSIX_SYMLOOP_MAX: u32 = 8;
pub const _POSIX_THREAD_DESTRUCTOR_ITERATIONS: u32 = 4;
pub const _POSIX_THREAD_KEYS_MAX: u32 = 128;
pub const _POSIX_THREAD_THREADS_MAX: u32 = 64;
pub const _POSIX_TIMER_MAX: u32 = 32;
pub const _POSIX_TRACE_EVENT_NAME_MAX: u32 = 30;
pub const _POSIX_TRACE_NAME_MAX: u32 = 8;
pub const _POSIX_TRACE_SYS_MAX: u32 = 8;
pub const _POSIX_TRACE_USER_EVENT_MAX: u32 = 32;
pub const _POSIX_TTY_NAME_MAX: u32 = 9;
pub const _POSIX_TZNAME_MAX: u32 = 6;
pub const _POSIX2_BC_BASE_MAX: u32 = 99;
pub const _POSIX2_BC_DIM_MAX: u32 = 2048;
pub const _POSIX2_BC_SCALE_MAX: u32 = 99;
pub const _POSIX2_BC_STRING_MAX: u32 = 1000;
pub const _POSIX2_CHARCLASS_NAME_MAX: u32 = 14;
pub const _POSIX2_COLL_WEIGHTS_MAX: u32 = 2;
pub const _POSIX2_EXPR_NEST_MAX: u32 = 32;
pub const _POSIX2_LINE_MAX: u32 = 2048;
pub const _POSIX2_RE_DUP_MAX: u32 = 255;
pub const _XOPEN_IOV_MAX: u32 = 16;
pub const _XOPEN_NAME_MAX: u32 = 255;
pub const _XOPEN_PATH_MAX: u32 = 1024;
pub const HOST_NAME_MAX: u32 = 255;
pub const LOGIN_NAME_MAX: u32 = 256;
pub const TTY_NAME_MAX: u32 = 32;
pub const PTHREAD_DESTRUCTOR_ITERATIONS: u32 = 4;
pub const PTHREAD_KEYS_MAX: u32 = 128;
pub const FP_INFINITE: u32 = 1;
pub const FP_NAN: u32 = 2;
pub const FP_NORMAL: u32 = 4;
pub const FP_SUBNORMAL: u32 = 8;
pub const FP_ZERO: u32 = 16;
pub const FP_ILOGB0: i32 = -2147483647;
pub const FP_ILOGBNAN: u32 = 2147483647;
pub const MATH_ERRNO: u32 = 1;
pub const MATH_ERREXCEPT: u32 = 2;
pub const math_errhandling: u32 = 2;
pub const M_E: f64 = 2.718281828459045;
pub const M_LOG2E: f64 = 1.4426950408889634;
pub const M_LOG10E: f64 = 0.4342944819032518;
pub const M_LN2: f64 = 0.6931471805599453;
pub const M_LN10: f64 = 2.302585092994046;
pub const M_PI: f64 = 3.141592653589793;
pub const M_PI_2: f64 = 1.5707963267948966;
pub const M_PI_4: f64 = 0.7853981633974483;
pub const M_1_PI: f64 = 0.3183098861837907;
pub const M_2_PI: f64 = 0.6366197723675814;
pub const M_2_SQRTPI: f64 = 1.1283791670955126;
pub const M_SQRT2: f64 = 1.4142135623730951;
pub const M_SQRT1_2: f64 = 0.7071067811865476;
pub const ASENSOR_FIFO_COUNT_INVALID: i32 = -1;
pub const ASENSOR_DELAY_INVALID: i32 = -2147483648;
pub const ASENSOR_INVALID: i32 = -1;
pub const ASENSOR_STANDARD_GRAVITY: f64 = 9.80665;
pub const ASENSOR_MAGNETIC_FIELD_EARTH_MAX: f64 = 60.0;
pub const ASENSOR_MAGNETIC_FIELD_EARTH_MIN: f64 = 30.0;
pub const _IOC_NRBITS: u32 = 8;
pub const _IOC_TYPEBITS: u32 = 8;
pub const _IOC_SIZEBITS: u32 = 14;
pub const _IOC_DIRBITS: u32 = 2;
pub const _IOC_NRMASK: u32 = 255;
pub const _IOC_TYPEMASK: u32 = 255;
pub const _IOC_SIZEMASK: u32 = 16383;
pub const _IOC_DIRMASK: u32 = 3;
pub const _IOC_NRSHIFT: u32 = 0;
pub const _IOC_TYPESHIFT: u32 = 8;
pub const _IOC_SIZESHIFT: u32 = 16;
pub const _IOC_DIRSHIFT: u32 = 30;
pub const _IOC_NONE: u32 = 0;
pub const _IOC_WRITE: u32 = 1;
pub const _IOC_READ: u32 = 2;
pub const IOC_IN: u32 = 1073741824;
pub const IOC_OUT: u32 = 2147483648;
pub const IOC_INOUT: u32 = 3221225472;
pub const IOCSIZE_MASK: u32 = 1073676288;
pub const IOCSIZE_SHIFT: u32 = 16;
pub const SYNC_IOC_MAGIC: u8 = 62u8;
pub const ITIMER_REAL: u32 = 0;
pub const ITIMER_VIRTUAL: u32 = 1;
pub const ITIMER_PROF: u32 = 2;
pub const CLOCK_REALTIME: u32 = 0;
pub const CLOCK_MONOTONIC: u32 = 1;
pub const CLOCK_PROCESS_CPUTIME_ID: u32 = 2;
pub const CLOCK_THREAD_CPUTIME_ID: u32 = 3;
pub const CLOCK_MONOTONIC_RAW: u32 = 4;
pub const CLOCK_REALTIME_COARSE: u32 = 5;
pub const CLOCK_MONOTONIC_COARSE: u32 = 6;
pub const CLOCK_BOOTTIME: u32 = 7;
pub const CLOCK_REALTIME_ALARM: u32 = 8;
pub const CLOCK_BOOTTIME_ALARM: u32 = 9;
pub const CLOCK_SGI_CYCLE: u32 = 10;
pub const CLOCK_TAI: u32 = 11;
pub const MAX_CLOCKS: u32 = 16;
pub const CLOCKS_MASK: u32 = 1;
pub const CLOCKS_MONO: u32 = 1;
pub const TIMER_ABSTIME: u32 = 1;
pub const _KERNEL_NSIG: u32 = 32;
pub const SIGHUP: u32 = 1;
pub const SIGINT: u32 = 2;
pub const SIGQUIT: u32 = 3;
pub const SIGILL: u32 = 4;
pub const SIGTRAP: u32 = 5;
pub const SIGABRT: u32 = 6;
pub const SIGIOT: u32 = 6;
pub const SIGBUS: u32 = 7;
pub const SIGFPE: u32 = 8;
pub const SIGKILL: u32 = 9;
pub const SIGUSR1: u32 = 10;
pub const SIGSEGV: u32 = 11;
pub const SIGUSR2: u32 = 12;
pub const SIGPIPE: u32 = 13;
pub const SIGALRM: u32 = 14;
pub const SIGTERM: u32 = 15;
pub const SIGSTKFLT: u32 = 16;
pub const SIGCHLD: u32 = 17;
pub const SIGCONT: u32 = 18;
pub const SIGSTOP: u32 = 19;
pub const SIGTSTP: u32 = 20;
pub const SIGTTIN: u32 = 21;
pub const SIGTTOU: u32 = 22;
pub const SIGURG: u32 = 23;
pub const SIGXCPU: u32 = 24;
pub const SIGXFSZ: u32 = 25;
pub const SIGVTALRM: u32 = 26;
pub const SIGPROF: u32 = 27;
pub const SIGWINCH: u32 = 28;
pub const SIGIO: u32 = 29;
pub const SIGPOLL: u32 = 29;
pub const SIGPWR: u32 = 30;
pub const SIGSYS: u32 = 31;
pub const SIGUNUSED: u32 = 31;
pub const __SIGRTMIN: u32 = 32;
pub const SIGSWI: u32 = 32;
pub const SA_THIRTYTWO: u32 = 33554432;
pub const SA_RESTORER: u32 = 67108864;
pub const MINSIGSTKSZ: u32 = 2048;
pub const SIGSTKSZ: u32 = 8192;
pub const SA_NOCLDSTOP: u32 = 1;
pub const SA_NOCLDWAIT: u32 = 2;
pub const SA_SIGINFO: u32 = 4;
pub const SA_UNSUPPORTED: u32 = 1024;
pub const SA_EXPOSE_TAGBITS: u32 = 2048;
pub const SA_ONSTACK: u32 = 134217728;
pub const SA_RESTART: u32 = 268435456;
pub const SA_NODEFER: u32 = 1073741824;
pub const SA_RESETHAND: u32 = 2147483648;
pub const SA_NOMASK: u32 = 1073741824;
pub const SA_ONESHOT: u32 = 2147483648;
pub const SIG_BLOCK: u32 = 0;
pub const SIG_UNBLOCK: u32 = 1;
pub const SIG_SETMASK: u32 = 2;
pub const SI_MAX_SIZE: u32 = 128;
pub const SI_USER: u32 = 0;
pub const SI_KERNEL: u32 = 128;
pub const SI_QUEUE: i32 = -1;
pub const SI_TIMER: i32 = -2;
pub const SI_MESGQ: i32 = -3;
pub const SI_ASYNCIO: i32 = -4;
pub const SI_SIGIO: i32 = -5;
pub const SI_TKILL: i32 = -6;
pub const SI_DETHREAD: i32 = -7;
pub const SI_ASYNCNL: i32 = -60;
pub const ILL_ILLOPC: u32 = 1;
pub const ILL_ILLOPN: u32 = 2;
pub const ILL_ILLADR: u32 = 3;
pub const ILL_ILLTRP: u32 = 4;
pub const ILL_PRVOPC: u32 = 5;
pub const ILL_PRVREG: u32 = 6;
pub const ILL_COPROC: u32 = 7;
pub const ILL_BADSTK: u32 = 8;
pub const ILL_BADIADDR: u32 = 9;
pub const __ILL_BREAK: u32 = 10;
pub const __ILL_BNDMOD: u32 = 11;
pub const NSIGILL: u32 = 11;
pub const FPE_INTDIV: u32 = 1;
pub const FPE_INTOVF: u32 = 2;
pub const FPE_FLTDIV: u32 = 3;
pub const FPE_FLTOVF: u32 = 4;
pub const FPE_FLTUND: u32 = 5;
pub const FPE_FLTRES: u32 = 6;
pub const FPE_FLTINV: u32 = 7;
pub const FPE_FLTSUB: u32 = 8;
pub const __FPE_DECOVF: u32 = 9;
pub const __FPE_DECDIV: u32 = 10;
pub const __FPE_DECERR: u32 = 11;
pub const __FPE_INVASC: u32 = 12;
pub const __FPE_INVDEC: u32 = 13;
pub const FPE_FLTUNK: u32 = 14;
pub const FPE_CONDTRAP: u32 = 15;
pub const NSIGFPE: u32 = 15;
pub const SEGV_MAPERR: u32 = 1;
pub const SEGV_ACCERR: u32 = 2;
pub const SEGV_BNDERR: u32 = 3;
pub const SEGV_PKUERR: u32 = 4;
pub const SEGV_ACCADI: u32 = 5;
pub const SEGV_ADIDERR: u32 = 6;
pub const SEGV_ADIPERR: u32 = 7;
pub const SEGV_MTEAERR: u32 = 8;
pub const SEGV_MTESERR: u32 = 9;
pub const NSIGSEGV: u32 = 9;
pub const BUS_ADRALN: u32 = 1;
pub const BUS_ADRERR: u32 = 2;
pub const BUS_OBJERR: u32 = 3;
pub const BUS_MCEERR_AR: u32 = 4;
pub const BUS_MCEERR_AO: u32 = 5;
pub const NSIGBUS: u32 = 5;
pub const TRAP_BRKPT: u32 = 1;
pub const TRAP_TRACE: u32 = 2;
pub const TRAP_BRANCH: u32 = 3;
pub const TRAP_HWBKPT: u32 = 4;
pub const TRAP_UNK: u32 = 5;
pub const TRAP_PERF: u32 = 6;
pub const NSIGTRAP: u32 = 6;
pub const CLD_EXITED: u32 = 1;
pub const CLD_KILLED: u32 = 2;
pub const CLD_DUMPED: u32 = 3;
pub const CLD_TRAPPED: u32 = 4;
pub const CLD_STOPPED: u32 = 5;
pub const CLD_CONTINUED: u32 = 6;
pub const NSIGCHLD: u32 = 6;
pub const POLL_IN: u32 = 1;
pub const POLL_OUT: u32 = 2;
pub const POLL_MSG: u32 = 3;
pub const POLL_ERR: u32 = 4;
pub const POLL_PRI: u32 = 5;
pub const POLL_HUP: u32 = 6;
pub const NSIGPOLL: u32 = 6;
pub const SYS_SECCOMP: u32 = 1;
pub const SYS_USER_DISPATCH: u32 = 2;
pub const NSIGSYS: u32 = 2;
pub const EMT_TAGOVF: u32 = 1;
pub const NSIGEMT: u32 = 1;
pub const SIGEV_SIGNAL: u32 = 0;
pub const SIGEV_NONE: u32 = 1;
pub const SIGEV_THREAD: u32 = 2;
pub const SIGEV_THREAD_ID: u32 = 4;
pub const SIGEV_MAX_SIZE: u32 = 64;
pub const SS_ONSTACK: u32 = 1;
pub const SS_DISABLE: u32 = 2;
pub const SS_AUTODISARM: u32 = 2147483648;
pub const SS_FLAG_BITS: u32 = 2147483648;
pub const _KERNEL__NSIG: u32 = 64;
pub const _NSIG: u32 = 65;
pub const NSIG: u32 = 65;
pub const PAGE_SIZE: u32 = 4096;
pub const PAGE_MASK: i32 = -4096;
pub const NGREG: u32 = 18;
pub const FD_SETSIZE: u32 = 1024;
pub const CLOCKS_PER_SEC: u32 = 1000000;
pub const TIME_UTC: u32 = 1;
pub const AAUDIO_UNSPECIFIED: u32 = 0;
pub const AAUDIO_SYSTEM_USAGE_OFFSET: u32 = 1000;
pub const PROPERTY_VENDOR: &'static [u8; 7usize] = b"vendor\0";
pub const PROPERTY_VERSION: &'static [u8; 8usize] = b"version\0";
pub const PROPERTY_DESCRIPTION: &'static [u8; 12usize] = b"description\0";
pub const PROPERTY_ALGORITHMS: &'static [u8; 11usize] = b"algorithms\0";
pub const PROPERTY_DEVICE_UNIQUE_ID: &'static [u8; 15usize] = b"deviceUniqueId\0";
extern "C" {
pub fn android_get_application_target_sdk_version() -> ::std::os::raw::c_int;
}
extern "C" {
pub fn android_get_device_api_level() -> ::std::os::raw::c_int;
}
pub type size_t = ::std::os::raw::c_uint;
pub type wchar_t = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct max_align_t {
pub __clang_max_align_nonce1: ::std::os::raw::c_longlong,
pub __clang_max_align_nonce2: f64,
}
#[test]
fn bindgen_test_layout_max_align_t() {
assert_eq!(
::std::mem::size_of::<max_align_t>(),
16usize,
concat!("Size of: ", stringify!(max_align_t))
);
assert_eq!(
::std::mem::align_of::<max_align_t>(),
8usize,
concat!("Alignment of ", stringify!(max_align_t))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<max_align_t>())).__clang_max_align_nonce1 as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(max_align_t),
"::",
stringify!(__clang_max_align_nonce1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<max_align_t>())).__clang_max_align_nonce2 as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(max_align_t),
"::",
stringify!(__clang_max_align_nonce2)
)
);
}
pub type __int8_t = ::std::os::raw::c_schar;
pub type __uint8_t = ::std::os::raw::c_uchar;
pub type __int16_t = ::std::os::raw::c_short;
pub type __uint16_t = ::std::os::raw::c_ushort;
pub type __int32_t = ::std::os::raw::c_int;
pub type __uint32_t = ::std::os::raw::c_uint;
pub type __int64_t = ::std::os::raw::c_longlong;
pub type __uint64_t = ::std::os::raw::c_ulonglong;
pub type __intptr_t = ::std::os::raw::c_int;
pub type __uintptr_t = ::std::os::raw::c_uint;
pub type int_least8_t = i8;
pub type uint_least8_t = u8;
pub type int_least16_t = i16;
pub type uint_least16_t = u16;
pub type int_least32_t = i32;
pub type uint_least32_t = u32;
pub type int_least64_t = i64;
pub type uint_least64_t = u64;
pub type int_fast8_t = i8;
pub type uint_fast8_t = u8;
pub type int_fast64_t = i64;
pub type uint_fast64_t = u64;
pub type int_fast16_t = i32;
pub type uint_fast16_t = u32;
pub type int_fast32_t = i32;
pub type uint_fast32_t = u32;
pub type uintmax_t = u64;
pub type intmax_t = i64;
pub type __s8 = ::std::os::raw::c_schar;
pub type __u8 = ::std::os::raw::c_uchar;
pub type __s16 = ::std::os::raw::c_short;
pub type __u16 = ::std::os::raw::c_ushort;
pub type __s32 = ::std::os::raw::c_int;
pub type __u32 = ::std::os::raw::c_uint;
pub type __s64 = ::std::os::raw::c_longlong;
pub type __u64 = ::std::os::raw::c_ulonglong;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __kernel_fd_set {
pub fds_bits: [::std::os::raw::c_ulong; 32usize],
}
#[test]
fn bindgen_test_layout___kernel_fd_set() {
assert_eq!(
::std::mem::size_of::<__kernel_fd_set>(),
128usize,
concat!("Size of: ", stringify!(__kernel_fd_set))
);
assert_eq!(
::std::mem::align_of::<__kernel_fd_set>(),
4usize,
concat!("Alignment of ", stringify!(__kernel_fd_set))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__kernel_fd_set>())).fds_bits as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__kernel_fd_set),
"::",
stringify!(fds_bits)
)
);
}
pub type __kernel_sighandler_t =
::std::option::Option<unsafe extern "C" fn(arg1: ::std::os::raw::c_int)>;
pub type __kernel_key_t = ::std::os::raw::c_int;
pub type __kernel_mqd_t = ::std::os::raw::c_int;
pub type __kernel_mode_t = ::std::os::raw::c_ushort;
pub type __kernel_ipc_pid_t = ::std::os::raw::c_ushort;
pub type __kernel_uid_t = ::std::os::raw::c_ushort;
pub type __kernel_gid_t = ::std::os::raw::c_ushort;
pub type __kernel_old_dev_t = ::std::os::raw::c_ushort;
pub type __kernel_long_t = ::std::os::raw::c_long;
pub type __kernel_ulong_t = ::std::os::raw::c_ulong;
pub type __kernel_ino_t = __kernel_ulong_t;
pub type __kernel_pid_t = ::std::os::raw::c_int;
pub type __kernel_suseconds_t = __kernel_long_t;
pub type __kernel_daddr_t = ::std::os::raw::c_int;
pub type __kernel_uid32_t = ::std::os::raw::c_uint;
pub type __kernel_gid32_t = ::std::os::raw::c_uint;
pub type __kernel_old_uid_t = __kernel_uid_t;
pub type __kernel_old_gid_t = __kernel_gid_t;
pub type __kernel_size_t = ::std::os::raw::c_uint;
pub type __kernel_ssize_t = ::std::os::raw::c_int;
pub type __kernel_ptrdiff_t = ::std::os::raw::c_int;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __kernel_fsid_t {
pub val: [::std::os::raw::c_int; 2usize],
}
#[test]
fn bindgen_test_layout___kernel_fsid_t() {
assert_eq!(
::std::mem::size_of::<__kernel_fsid_t>(),
8usize,
concat!("Size of: ", stringify!(__kernel_fsid_t))
);
assert_eq!(
::std::mem::align_of::<__kernel_fsid_t>(),
4usize,
concat!("Alignment of ", stringify!(__kernel_fsid_t))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__kernel_fsid_t>())).val as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__kernel_fsid_t),
"::",
stringify!(val)
)
);
}
pub type __kernel_off_t = __kernel_long_t;
pub type __kernel_loff_t = ::std::os::raw::c_longlong;
pub type __kernel_old_time_t = __kernel_long_t;
pub type __kernel_time_t = __kernel_long_t;
pub type __kernel_time64_t = ::std::os::raw::c_longlong;
pub type __kernel_clock_t = __kernel_long_t;
pub type __kernel_timer_t = ::std::os::raw::c_int;
pub type __kernel_clockid_t = ::std::os::raw::c_int;
pub type __kernel_caddr_t = *mut ::std::os::raw::c_char;
pub type __kernel_uid16_t = ::std::os::raw::c_ushort;
pub type __kernel_gid16_t = ::std::os::raw::c_ushort;
pub type __le16 = __u16;
pub type __be16 = __u16;
pub type __le32 = __u32;
pub type __be32 = __u32;
pub type __le64 = __u64;
pub type __be64 = __u64;
pub type __sum16 = __u16;
pub type __wsum = __u32;
pub type __poll_t = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct pthread_attr_t {
pub flags: u32,
pub stack_base: *mut ::std::os::raw::c_void,
pub stack_size: size_t,
pub guard_size: size_t,
pub sched_policy: i32,
pub sched_priority: i32,
}
#[test]
fn bindgen_test_layout_pthread_attr_t() {
assert_eq!(
::std::mem::size_of::<pthread_attr_t>(),
24usize,
concat!("Size of: ", stringify!(pthread_attr_t))
);
assert_eq!(
::std::mem::align_of::<pthread_attr_t>(),
4usize,
concat!("Alignment of ", stringify!(pthread_attr_t))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<pthread_attr_t>())).flags as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(pthread_attr_t),
"::",
stringify!(flags)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<pthread_attr_t>())).stack_base as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(pthread_attr_t),
"::",
stringify!(stack_base)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<pthread_attr_t>())).stack_size as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(pthread_attr_t),
"::",
stringify!(stack_size)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<pthread_attr_t>())).guard_size as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(pthread_attr_t),
"::",
stringify!(guard_size)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<pthread_attr_t>())).sched_policy as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(pthread_attr_t),
"::",
stringify!(sched_policy)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<pthread_attr_t>())).sched_priority as *const _ as usize },
20usize,
concat!(
"Offset of field: ",
stringify!(pthread_attr_t),
"::",
stringify!(sched_priority)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct pthread_barrier_t {
pub __private: [i32; 8usize],
}
#[test]
fn bindgen_test_layout_pthread_barrier_t() {
assert_eq!(
::std::mem::size_of::<pthread_barrier_t>(),
32usize,
concat!("Size of: ", stringify!(pthread_barrier_t))
);
assert_eq!(
::std::mem::align_of::<pthread_barrier_t>(),
4usize,
concat!("Alignment of ", stringify!(pthread_barrier_t))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<pthread_barrier_t>())).__private as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(pthread_barrier_t),
"::",
stringify!(__private)
)
);
}
pub type pthread_barrierattr_t = ::std::os::raw::c_int;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct pthread_cond_t {
pub __private: [i32; 1usize],
}
#[test]
fn bindgen_test_layout_pthread_cond_t() {
assert_eq!(
::std::mem::size_of::<pthread_cond_t>(),
4usize,
concat!("Size of: ", stringify!(pthread_cond_t))
);
assert_eq!(
::std::mem::align_of::<pthread_cond_t>(),
4usize,
concat!("Alignment of ", stringify!(pthread_cond_t))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<pthread_cond_t>())).__private as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(pthread_cond_t),
"::",
stringify!(__private)
)
);
}
pub type pthread_condattr_t = ::std::os::raw::c_long;
pub type pthread_key_t = ::std::os::raw::c_int;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct pthread_mutex_t {
pub __private: [i32; 1usize],
}
#[test]
fn bindgen_test_layout_pthread_mutex_t() {
assert_eq!(
::std::mem::size_of::<pthread_mutex_t>(),
4usize,
concat!("Size of: ", stringify!(pthread_mutex_t))
);
assert_eq!(
::std::mem::align_of::<pthread_mutex_t>(),
4usize,
concat!("Alignment of ", stringify!(pthread_mutex_t))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<pthread_mutex_t>())).__private as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(pthread_mutex_t),
"::",
stringify!(__private)
)
);
}
pub type pthread_mutexattr_t = ::std::os::raw::c_long;
pub type pthread_once_t = ::std::os::raw::c_int;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct pthread_rwlock_t {
pub __private: [i32; 10usize],
}
#[test]
fn bindgen_test_layout_pthread_rwlock_t() {
assert_eq!(
::std::mem::size_of::<pthread_rwlock_t>(),
40usize,
concat!("Size of: ", stringify!(pthread_rwlock_t))
);
assert_eq!(
::std::mem::align_of::<pthread_rwlock_t>(),
4usize,
concat!("Alignment of ", stringify!(pthread_rwlock_t))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<pthread_rwlock_t>())).__private as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(pthread_rwlock_t),
"::",
stringify!(__private)
)
);
}
pub type pthread_rwlockattr_t = ::std::os::raw::c_long;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct pthread_spinlock_t {
pub __private: [i32; 2usize],
}
#[test]
fn bindgen_test_layout_pthread_spinlock_t() {
assert_eq!(
::std::mem::size_of::<pthread_spinlock_t>(),
8usize,
concat!("Size of: ", stringify!(pthread_spinlock_t))
);
assert_eq!(
::std::mem::align_of::<pthread_spinlock_t>(),
4usize,
concat!("Alignment of ", stringify!(pthread_spinlock_t))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<pthread_spinlock_t>())).__private as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(pthread_spinlock_t),
"::",
stringify!(__private)
)
);
}
pub type pthread_t = ::std::os::raw::c_long;
pub type __gid_t = __kernel_gid32_t;
pub type gid_t = __gid_t;
pub type __uid_t = __kernel_uid32_t;
pub type uid_t = __uid_t;
pub type __pid_t = __kernel_pid_t;
pub type pid_t = __pid_t;
pub type __id_t = u32;
pub type id_t = __id_t;
pub type blkcnt_t = ::std::os::raw::c_ulong;
pub type blksize_t = ::std::os::raw::c_ulong;
pub type caddr_t = __kernel_caddr_t;
pub type clock_t = __kernel_clock_t;
pub type __clockid_t = __kernel_clockid_t;
pub type clockid_t = __clockid_t;
pub type daddr_t = __kernel_daddr_t;
pub type fsblkcnt_t = ::std::os::raw::c_ulong;
pub type fsfilcnt_t = ::std::os::raw::c_ulong;
pub type __mode_t = __kernel_mode_t;
pub type mode_t = __mode_t;
pub type __key_t = __kernel_key_t;
pub type key_t = __key_t;
pub type __ino_t = __kernel_ino_t;
pub type ino_t = __ino_t;
pub type ino64_t = u64;
pub type __nlink_t = u32;
pub type nlink_t = __nlink_t;
pub type __timer_t = *mut ::std::os::raw::c_void;
pub type timer_t = __timer_t;
pub type __suseconds_t = __kernel_suseconds_t;
pub type suseconds_t = __suseconds_t;
pub type __useconds_t = u32;
pub type useconds_t = __useconds_t;
pub type dev_t = u32;
pub type __time_t = __kernel_time_t;
pub type time_t = __time_t;
pub type off_t = __kernel_off_t;
pub type loff_t = __kernel_loff_t;
pub type off64_t = loff_t;
pub type __socklen_t = i32;
pub type socklen_t = __socklen_t;
pub type ssize_t = __kernel_ssize_t;
pub type uint_t = ::std::os::raw::c_uint;
pub type uint = ::std::os::raw::c_uint;
pub type u_char = ::std::os::raw::c_uchar;
pub type u_short = ::std::os::raw::c_ushort;
pub type u_int = ::std::os::raw::c_uint;
pub type u_long = ::std::os::raw::c_ulong;
pub type u_int32_t = u32;
pub type u_int16_t = u16;
pub type u_int8_t = u8;
pub type u_int64_t = u64;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AAssetManager {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AAssetDir {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AAsset {
_unused: [u8; 0],
}
pub const AASSET_MODE_UNKNOWN: ::std::os::raw::c_uint = 0;
pub const AASSET_MODE_RANDOM: ::std::os::raw::c_uint = 1;
pub const AASSET_MODE_STREAMING: ::std::os::raw::c_uint = 2;
pub const AASSET_MODE_BUFFER: ::std::os::raw::c_uint = 3;
pub type _bindgen_ty_1 = ::std::os::raw::c_uint;
extern "C" {
pub fn AAssetManager_openDir(
mgr: *mut AAssetManager,
dirName: *const ::std::os::raw::c_char,
) -> *mut AAssetDir;
}
extern "C" {
pub fn AAssetManager_open(
mgr: *mut AAssetManager,
filename: *const ::std::os::raw::c_char,
mode: ::std::os::raw::c_int,
) -> *mut AAsset;
}
extern "C" {
pub fn AAssetDir_getNextFileName(assetDir: *mut AAssetDir) -> *const ::std::os::raw::c_char;
}
extern "C" {
pub fn AAssetDir_rewind(assetDir: *mut AAssetDir);
}
extern "C" {
pub fn AAssetDir_close(assetDir: *mut AAssetDir);
}
extern "C" {
pub fn AAsset_read(
asset: *mut AAsset,
buf: *mut ::std::os::raw::c_void,
count: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AAsset_seek(asset: *mut AAsset, offset: off_t, whence: ::std::os::raw::c_int) -> off_t;
}
extern "C" {
pub fn AAsset_seek64(
asset: *mut AAsset,
offset: off64_t,
whence: ::std::os::raw::c_int,
) -> off64_t;
}
extern "C" {
pub fn AAsset_close(asset: *mut AAsset);
}
extern "C" {
pub fn AAsset_getBuffer(asset: *mut AAsset) -> *const ::std::os::raw::c_void;
}
extern "C" {
pub fn AAsset_getLength(asset: *mut AAsset) -> off_t;
}
extern "C" {
pub fn AAsset_getLength64(asset: *mut AAsset) -> off64_t;
}
extern "C" {
pub fn AAsset_getRemainingLength(asset: *mut AAsset) -> off_t;
}
extern "C" {
pub fn AAsset_getRemainingLength64(asset: *mut AAsset) -> off64_t;
}
extern "C" {
pub fn AAsset_openFileDescriptor(
asset: *mut AAsset,
outStart: *mut off_t,
outLength: *mut off_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AAsset_openFileDescriptor64(
asset: *mut AAsset,
outStart: *mut off64_t,
outLength: *mut off64_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AAsset_isAllocated(asset: *mut AAsset) -> ::std::os::raw::c_int;
}
pub type va_list = __builtin_va_list;
pub type __gnuc_va_list = __builtin_va_list;
#[repr(C)]
pub struct JavaVMAttachArgs {
pub version: jint,
pub name: *const ::std::os::raw::c_char,
pub group: jobject,
}
#[test]
fn bindgen_test_layout_JavaVMAttachArgs() {
assert_eq!(
::std::mem::size_of::<JavaVMAttachArgs>(),
12usize,
concat!("Size of: ", stringify!(JavaVMAttachArgs))
);
assert_eq!(
::std::mem::align_of::<JavaVMAttachArgs>(),
4usize,
concat!("Alignment of ", stringify!(JavaVMAttachArgs))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<JavaVMAttachArgs>())).version as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(JavaVMAttachArgs),
"::",
stringify!(version)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<JavaVMAttachArgs>())).name as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(JavaVMAttachArgs),
"::",
stringify!(name)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<JavaVMAttachArgs>())).group as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(JavaVMAttachArgs),
"::",
stringify!(group)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct JavaVMOption {
pub optionString: *const ::std::os::raw::c_char,
pub extraInfo: *mut ::std::os::raw::c_void,
}
#[test]
fn bindgen_test_layout_JavaVMOption() {
assert_eq!(
::std::mem::size_of::<JavaVMOption>(),
8usize,
concat!("Size of: ", stringify!(JavaVMOption))
);
assert_eq!(
::std::mem::align_of::<JavaVMOption>(),
4usize,
concat!("Alignment of ", stringify!(JavaVMOption))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<JavaVMOption>())).optionString as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(JavaVMOption),
"::",
stringify!(optionString)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<JavaVMOption>())).extraInfo as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(JavaVMOption),
"::",
stringify!(extraInfo)
)
);
}
#[repr(C)]
pub struct JavaVMInitArgs {
pub version: jint,
pub nOptions: jint,
pub options: *mut JavaVMOption,
pub ignoreUnrecognized: jboolean,
}
#[test]
fn bindgen_test_layout_JavaVMInitArgs() {
assert_eq!(
::std::mem::size_of::<JavaVMInitArgs>(),
16usize,
concat!("Size of: ", stringify!(JavaVMInitArgs))
);
assert_eq!(
::std::mem::align_of::<JavaVMInitArgs>(),
4usize,
concat!("Alignment of ", stringify!(JavaVMInitArgs))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<JavaVMInitArgs>())).version as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(JavaVMInitArgs),
"::",
stringify!(version)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<JavaVMInitArgs>())).nOptions as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(JavaVMInitArgs),
"::",
stringify!(nOptions)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<JavaVMInitArgs>())).options as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(JavaVMInitArgs),
"::",
stringify!(options)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<JavaVMInitArgs>())).ignoreUnrecognized as *const _ as usize
},
12usize,
concat!(
"Offset of field: ",
stringify!(JavaVMInitArgs),
"::",
stringify!(ignoreUnrecognized)
)
);
}
extern "C" {
pub fn AAssetManager_fromJava(env: *mut JNIEnv, assetManager: jobject) -> *mut AAssetManager;
}
pub const ANDROID_BITMAP_RESULT_SUCCESS: ::std::os::raw::c_int = 0;
pub const ANDROID_BITMAP_RESULT_BAD_PARAMETER: ::std::os::raw::c_int = -1;
pub const ANDROID_BITMAP_RESULT_JNI_EXCEPTION: ::std::os::raw::c_int = -2;
pub const ANDROID_BITMAP_RESULT_ALLOCATION_FAILED: ::std::os::raw::c_int = -3;
pub type _bindgen_ty_2 = ::std::os::raw::c_int;
pub const AndroidBitmapFormat_ANDROID_BITMAP_FORMAT_NONE: AndroidBitmapFormat = 0;
pub const AndroidBitmapFormat_ANDROID_BITMAP_FORMAT_RGBA_8888: AndroidBitmapFormat = 1;
pub const AndroidBitmapFormat_ANDROID_BITMAP_FORMAT_RGB_565: AndroidBitmapFormat = 4;
pub const AndroidBitmapFormat_ANDROID_BITMAP_FORMAT_RGBA_4444: AndroidBitmapFormat = 7;
pub const AndroidBitmapFormat_ANDROID_BITMAP_FORMAT_A_8: AndroidBitmapFormat = 8;
pub const AndroidBitmapFormat_ANDROID_BITMAP_FORMAT_RGBA_F16: AndroidBitmapFormat = 9;
pub type AndroidBitmapFormat = ::std::os::raw::c_uint;
pub const ANDROID_BITMAP_FLAGS_ALPHA_PREMUL: ::std::os::raw::c_uint = 0;
pub const ANDROID_BITMAP_FLAGS_ALPHA_OPAQUE: ::std::os::raw::c_uint = 1;
pub const ANDROID_BITMAP_FLAGS_ALPHA_UNPREMUL: ::std::os::raw::c_uint = 2;
pub const ANDROID_BITMAP_FLAGS_ALPHA_MASK: ::std::os::raw::c_uint = 3;
pub const ANDROID_BITMAP_FLAGS_ALPHA_SHIFT: ::std::os::raw::c_uint = 0;
pub type _bindgen_ty_3 = ::std::os::raw::c_uint;
pub const ANDROID_BITMAP_FLAGS_IS_HARDWARE: ::std::os::raw::c_int = -2147483648;
pub type _bindgen_ty_4 = ::std::os::raw::c_int;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AndroidBitmapInfo {
pub width: u32,
pub height: u32,
pub stride: u32,
pub format: i32,
pub flags: u32,
}
#[test]
fn bindgen_test_layout_AndroidBitmapInfo() {
assert_eq!(
::std::mem::size_of::<AndroidBitmapInfo>(),
20usize,
concat!("Size of: ", stringify!(AndroidBitmapInfo))
);
assert_eq!(
::std::mem::align_of::<AndroidBitmapInfo>(),
4usize,
concat!("Alignment of ", stringify!(AndroidBitmapInfo))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AndroidBitmapInfo>())).width as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(AndroidBitmapInfo),
"::",
stringify!(width)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AndroidBitmapInfo>())).height as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(AndroidBitmapInfo),
"::",
stringify!(height)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AndroidBitmapInfo>())).stride as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(AndroidBitmapInfo),
"::",
stringify!(stride)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AndroidBitmapInfo>())).format as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(AndroidBitmapInfo),
"::",
stringify!(format)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AndroidBitmapInfo>())).flags as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(AndroidBitmapInfo),
"::",
stringify!(flags)
)
);
}
extern "C" {
pub fn AndroidBitmap_getInfo(
env: *mut JNIEnv,
jbitmap: jobject,
info: *mut AndroidBitmapInfo,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AndroidBitmap_getDataSpace(env: *mut JNIEnv, jbitmap: jobject) -> i32;
}
extern "C" {
pub fn AndroidBitmap_lockPixels(
env: *mut JNIEnv,
jbitmap: jobject,
addrPtr: *mut *mut ::std::os::raw::c_void,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AndroidBitmap_unlockPixels(env: *mut JNIEnv, jbitmap: jobject) -> ::std::os::raw::c_int;
}
pub const AndroidBitmapCompressFormat_ANDROID_BITMAP_COMPRESS_FORMAT_JPEG:
AndroidBitmapCompressFormat = 0;
pub const AndroidBitmapCompressFormat_ANDROID_BITMAP_COMPRESS_FORMAT_PNG:
AndroidBitmapCompressFormat = 1;
pub const AndroidBitmapCompressFormat_ANDROID_BITMAP_COMPRESS_FORMAT_WEBP_LOSSY:
AndroidBitmapCompressFormat = 3;
pub const AndroidBitmapCompressFormat_ANDROID_BITMAP_COMPRESS_FORMAT_WEBP_LOSSLESS:
AndroidBitmapCompressFormat = 4;
pub type AndroidBitmapCompressFormat = ::std::os::raw::c_uint;
pub type AndroidBitmap_CompressWriteFunc = ::std::option::Option<
unsafe extern "C" fn(
userContext: *mut ::std::os::raw::c_void,
data: *const ::std::os::raw::c_void,
size: size_t,
) -> bool,
>;
extern "C" {
pub fn AndroidBitmap_compress(
info: *const AndroidBitmapInfo,
dataspace: i32,
pixels: *const ::std::os::raw::c_void,
format: i32,
quality: i32,
userContext: *mut ::std::os::raw::c_void,
fn_: AndroidBitmap_CompressWriteFunc,
) -> ::std::os::raw::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AHardwareBuffer {
_unused: [u8; 0],
}
extern "C" {
pub fn AndroidBitmap_getHardwareBuffer(
env: *mut JNIEnv,
bitmap: jobject,
outBuffer: *mut *mut AHardwareBuffer,
) -> ::std::os::raw::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AChoreographer {
_unused: [u8; 0],
}
pub type AChoreographer_frameCallback = ::std::option::Option<
unsafe extern "C" fn(frameTimeNanos: ::std::os::raw::c_long, data: *mut ::std::os::raw::c_void),
>;
pub type AChoreographer_frameCallback64 = ::std::option::Option<
unsafe extern "C" fn(frameTimeNanos: i64, data: *mut ::std::os::raw::c_void),
>;
pub type AChoreographer_refreshRateCallback = ::std::option::Option<
unsafe extern "C" fn(vsyncPeriodNanos: i64, data: *mut ::std::os::raw::c_void),
>;
extern "C" {
pub fn AChoreographer_getInstance() -> *mut AChoreographer;
}
extern "C" {
pub fn AChoreographer_postFrameCallback(
choreographer: *mut AChoreographer,
callback: AChoreographer_frameCallback,
data: *mut ::std::os::raw::c_void,
);
}
extern "C" {
pub fn AChoreographer_postFrameCallbackDelayed(
choreographer: *mut AChoreographer,
callback: AChoreographer_frameCallback,
data: *mut ::std::os::raw::c_void,
delayMillis: ::std::os::raw::c_long,
);
}
extern "C" {
pub fn AChoreographer_postFrameCallback64(
choreographer: *mut AChoreographer,
callback: AChoreographer_frameCallback64,
data: *mut ::std::os::raw::c_void,
);
}
extern "C" {
pub fn AChoreographer_postFrameCallbackDelayed64(
choreographer: *mut AChoreographer,
callback: AChoreographer_frameCallback64,
data: *mut ::std::os::raw::c_void,
delayMillis: u32,
);
}
extern "C" {
pub fn AChoreographer_registerRefreshRateCallback(
choreographer: *mut AChoreographer,
arg1: AChoreographer_refreshRateCallback,
data: *mut ::std::os::raw::c_void,
);
}
extern "C" {
pub fn AChoreographer_unregisterRefreshRateCallback(
choreographer: *mut AChoreographer,
arg1: AChoreographer_refreshRateCallback,
data: *mut ::std::os::raw::c_void,
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AConfiguration {
_unused: [u8; 0],
}
pub const ACONFIGURATION_ORIENTATION_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_ORIENTATION_PORT: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_ORIENTATION_LAND: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_ORIENTATION_SQUARE: ::std::os::raw::c_uint = 3;
pub const ACONFIGURATION_TOUCHSCREEN_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_TOUCHSCREEN_NOTOUCH: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_TOUCHSCREEN_STYLUS: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_TOUCHSCREEN_FINGER: ::std::os::raw::c_uint = 3;
pub const ACONFIGURATION_DENSITY_DEFAULT: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_DENSITY_LOW: ::std::os::raw::c_uint = 120;
pub const ACONFIGURATION_DENSITY_MEDIUM: ::std::os::raw::c_uint = 160;
pub const ACONFIGURATION_DENSITY_TV: ::std::os::raw::c_uint = 213;
pub const ACONFIGURATION_DENSITY_HIGH: ::std::os::raw::c_uint = 240;
pub const ACONFIGURATION_DENSITY_XHIGH: ::std::os::raw::c_uint = 320;
pub const ACONFIGURATION_DENSITY_XXHIGH: ::std::os::raw::c_uint = 480;
pub const ACONFIGURATION_DENSITY_XXXHIGH: ::std::os::raw::c_uint = 640;
pub const ACONFIGURATION_DENSITY_ANY: ::std::os::raw::c_uint = 65534;
pub const ACONFIGURATION_DENSITY_NONE: ::std::os::raw::c_uint = 65535;
pub const ACONFIGURATION_KEYBOARD_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_KEYBOARD_NOKEYS: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_KEYBOARD_QWERTY: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_KEYBOARD_12KEY: ::std::os::raw::c_uint = 3;
pub const ACONFIGURATION_NAVIGATION_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_NAVIGATION_NONAV: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_NAVIGATION_DPAD: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_NAVIGATION_TRACKBALL: ::std::os::raw::c_uint = 3;
pub const ACONFIGURATION_NAVIGATION_WHEEL: ::std::os::raw::c_uint = 4;
pub const ACONFIGURATION_KEYSHIDDEN_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_KEYSHIDDEN_NO: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_KEYSHIDDEN_YES: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_KEYSHIDDEN_SOFT: ::std::os::raw::c_uint = 3;
pub const ACONFIGURATION_NAVHIDDEN_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_NAVHIDDEN_NO: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_NAVHIDDEN_YES: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_SCREENSIZE_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_SCREENSIZE_SMALL: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_SCREENSIZE_NORMAL: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_SCREENSIZE_LARGE: ::std::os::raw::c_uint = 3;
pub const ACONFIGURATION_SCREENSIZE_XLARGE: ::std::os::raw::c_uint = 4;
pub const ACONFIGURATION_SCREENLONG_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_SCREENLONG_NO: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_SCREENLONG_YES: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_SCREENROUND_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_SCREENROUND_NO: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_SCREENROUND_YES: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_WIDE_COLOR_GAMUT_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_WIDE_COLOR_GAMUT_NO: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_WIDE_COLOR_GAMUT_YES: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_HDR_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_HDR_NO: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_HDR_YES: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_UI_MODE_TYPE_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_UI_MODE_TYPE_NORMAL: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_UI_MODE_TYPE_DESK: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_UI_MODE_TYPE_CAR: ::std::os::raw::c_uint = 3;
pub const ACONFIGURATION_UI_MODE_TYPE_TELEVISION: ::std::os::raw::c_uint = 4;
pub const ACONFIGURATION_UI_MODE_TYPE_APPLIANCE: ::std::os::raw::c_uint = 5;
pub const ACONFIGURATION_UI_MODE_TYPE_WATCH: ::std::os::raw::c_uint = 6;
pub const ACONFIGURATION_UI_MODE_TYPE_VR_HEADSET: ::std::os::raw::c_uint = 7;
pub const ACONFIGURATION_UI_MODE_NIGHT_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_UI_MODE_NIGHT_NO: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_UI_MODE_NIGHT_YES: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_SCREEN_WIDTH_DP_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_SCREEN_HEIGHT_DP_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_SMALLEST_SCREEN_WIDTH_DP_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_LAYOUTDIR_ANY: ::std::os::raw::c_uint = 0;
pub const ACONFIGURATION_LAYOUTDIR_LTR: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_LAYOUTDIR_RTL: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_MCC: ::std::os::raw::c_uint = 1;
pub const ACONFIGURATION_MNC: ::std::os::raw::c_uint = 2;
pub const ACONFIGURATION_LOCALE: ::std::os::raw::c_uint = 4;
pub const ACONFIGURATION_TOUCHSCREEN: ::std::os::raw::c_uint = 8;
pub const ACONFIGURATION_KEYBOARD: ::std::os::raw::c_uint = 16;
pub const ACONFIGURATION_KEYBOARD_HIDDEN: ::std::os::raw::c_uint = 32;
pub const ACONFIGURATION_NAVIGATION: ::std::os::raw::c_uint = 64;
pub const ACONFIGURATION_ORIENTATION: ::std::os::raw::c_uint = 128;
pub const ACONFIGURATION_DENSITY: ::std::os::raw::c_uint = 256;
pub const ACONFIGURATION_SCREEN_SIZE: ::std::os::raw::c_uint = 512;
pub const ACONFIGURATION_VERSION: ::std::os::raw::c_uint = 1024;
pub const ACONFIGURATION_SCREEN_LAYOUT: ::std::os::raw::c_uint = 2048;
pub const ACONFIGURATION_UI_MODE: ::std::os::raw::c_uint = 4096;
pub const ACONFIGURATION_SMALLEST_SCREEN_SIZE: ::std::os::raw::c_uint = 8192;
pub const ACONFIGURATION_LAYOUTDIR: ::std::os::raw::c_uint = 16384;
pub const ACONFIGURATION_SCREEN_ROUND: ::std::os::raw::c_uint = 32768;
pub const ACONFIGURATION_COLOR_MODE: ::std::os::raw::c_uint = 65536;
pub const ACONFIGURATION_MNC_ZERO: ::std::os::raw::c_uint = 65535;
pub type _bindgen_ty_5 = ::std::os::raw::c_uint;
extern "C" {
pub fn AConfiguration_new() -> *mut AConfiguration;
}
extern "C" {
pub fn AConfiguration_delete(config: *mut AConfiguration);
}
extern "C" {
pub fn AConfiguration_fromAssetManager(out: *mut AConfiguration, am: *mut AAssetManager);
}
extern "C" {
pub fn AConfiguration_copy(dest: *mut AConfiguration, src: *mut AConfiguration);
}
extern "C" {
pub fn AConfiguration_getMcc(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setMcc(config: *mut AConfiguration, mcc: i32);
}
extern "C" {
pub fn AConfiguration_getMnc(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setMnc(config: *mut AConfiguration, mnc: i32);
}
extern "C" {
pub fn AConfiguration_getLanguage(
config: *mut AConfiguration,
outLanguage: *mut ::std::os::raw::c_char,
);
}
extern "C" {
pub fn AConfiguration_setLanguage(
config: *mut AConfiguration,
language: *const ::std::os::raw::c_char,
);
}
extern "C" {
pub fn AConfiguration_getCountry(
config: *mut AConfiguration,
outCountry: *mut ::std::os::raw::c_char,
);
}
extern "C" {
pub fn AConfiguration_setCountry(
config: *mut AConfiguration,
country: *const ::std::os::raw::c_char,
);
}
extern "C" {
pub fn AConfiguration_getOrientation(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setOrientation(config: *mut AConfiguration, orientation: i32);
}
extern "C" {
pub fn AConfiguration_getTouchscreen(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setTouchscreen(config: *mut AConfiguration, touchscreen: i32);
}
extern "C" {
pub fn AConfiguration_getDensity(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setDensity(config: *mut AConfiguration, density: i32);
}
extern "C" {
pub fn AConfiguration_getKeyboard(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setKeyboard(config: *mut AConfiguration, keyboard: i32);
}
extern "C" {
pub fn AConfiguration_getNavigation(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setNavigation(config: *mut AConfiguration, navigation: i32);
}
extern "C" {
pub fn AConfiguration_getKeysHidden(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setKeysHidden(config: *mut AConfiguration, keysHidden: i32);
}
extern "C" {
pub fn AConfiguration_getNavHidden(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setNavHidden(config: *mut AConfiguration, navHidden: i32);
}
extern "C" {
pub fn AConfiguration_getSdkVersion(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setSdkVersion(config: *mut AConfiguration, sdkVersion: i32);
}
extern "C" {
pub fn AConfiguration_getScreenSize(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setScreenSize(config: *mut AConfiguration, screenSize: i32);
}
extern "C" {
pub fn AConfiguration_getScreenLong(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setScreenLong(config: *mut AConfiguration, screenLong: i32);
}
extern "C" {
pub fn AConfiguration_getScreenRound(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setScreenRound(config: *mut AConfiguration, screenRound: i32);
}
extern "C" {
pub fn AConfiguration_getUiModeType(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setUiModeType(config: *mut AConfiguration, uiModeType: i32);
}
extern "C" {
pub fn AConfiguration_getUiModeNight(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setUiModeNight(config: *mut AConfiguration, uiModeNight: i32);
}
extern "C" {
pub fn AConfiguration_getScreenWidthDp(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setScreenWidthDp(config: *mut AConfiguration, value: i32);
}
extern "C" {
pub fn AConfiguration_getScreenHeightDp(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setScreenHeightDp(config: *mut AConfiguration, value: i32);
}
extern "C" {
pub fn AConfiguration_getSmallestScreenWidthDp(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setSmallestScreenWidthDp(config: *mut AConfiguration, value: i32);
}
extern "C" {
pub fn AConfiguration_getLayoutDirection(config: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_setLayoutDirection(config: *mut AConfiguration, value: i32);
}
extern "C" {
pub fn AConfiguration_diff(config1: *mut AConfiguration, config2: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_match(base: *mut AConfiguration, requested: *mut AConfiguration) -> i32;
}
extern "C" {
pub fn AConfiguration_isBetterThan(
base: *mut AConfiguration,
test: *mut AConfiguration,
requested: *mut AConfiguration,
) -> i32;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct imaxdiv_t {
pub quot: intmax_t,
pub rem: intmax_t,
}
#[test]
fn bindgen_test_layout_imaxdiv_t() {
assert_eq!(
::std::mem::size_of::<imaxdiv_t>(),
16usize,
concat!("Size of: ", stringify!(imaxdiv_t))
);
assert_eq!(
::std::mem::align_of::<imaxdiv_t>(),
8usize,
concat!("Alignment of ", stringify!(imaxdiv_t))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<imaxdiv_t>())).quot as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(imaxdiv_t),
"::",
stringify!(quot)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<imaxdiv_t>())).rem as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(imaxdiv_t),
"::",
stringify!(rem)
)
);
}
extern "C" {
pub fn imaxabs(__i: intmax_t) -> intmax_t;
}
extern "C" {
pub fn imaxdiv(__numerator: intmax_t, __denominator: intmax_t) -> imaxdiv_t;
}
extern "C" {
pub fn strtoimax(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
__base: ::std::os::raw::c_int,
) -> intmax_t;
}
extern "C" {
pub fn strtoumax(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
__base: ::std::os::raw::c_int,
) -> uintmax_t;
}
extern "C" {
pub fn wcstoimax(
__s: *const wchar_t,
__end_ptr: *mut *mut wchar_t,
__base: ::std::os::raw::c_int,
) -> intmax_t;
}
extern "C" {
pub fn wcstoumax(
__s: *const wchar_t,
__end_ptr: *mut *mut wchar_t,
__base: ::std::os::raw::c_int,
) -> uintmax_t;
}
pub const ADataSpace_ADATASPACE_UNKNOWN: ADataSpace = 0;
pub const ADataSpace_ADATASPACE_SCRGB_LINEAR: ADataSpace = 406913024;
pub const ADataSpace_ADATASPACE_SRGB: ADataSpace = 142671872;
pub const ADataSpace_ADATASPACE_SCRGB: ADataSpace = 411107328;
pub const ADataSpace_ADATASPACE_DISPLAY_P3: ADataSpace = 143261696;
pub const ADataSpace_ADATASPACE_BT2020_PQ: ADataSpace = 163971072;
pub const ADataSpace_ADATASPACE_ADOBE_RGB: ADataSpace = 151715840;
pub const ADataSpace_ADATASPACE_BT2020: ADataSpace = 147193856;
pub const ADataSpace_ADATASPACE_BT709: ADataSpace = 281083904;
pub const ADataSpace_ADATASPACE_DCI_P3: ADataSpace = 155844608;
pub const ADataSpace_ADATASPACE_SRGB_LINEAR: ADataSpace = 138477568;
pub type ADataSpace = ::std::os::raw::c_uint;
pub const ANDROID_DLEXT_RESERVED_ADDRESS: ::std::os::raw::c_uint = 1;
pub const ANDROID_DLEXT_RESERVED_ADDRESS_HINT: ::std::os::raw::c_uint = 2;
pub const ANDROID_DLEXT_WRITE_RELRO: ::std::os::raw::c_uint = 4;
pub const ANDROID_DLEXT_USE_RELRO: ::std::os::raw::c_uint = 8;
pub const ANDROID_DLEXT_USE_LIBRARY_FD: ::std::os::raw::c_uint = 16;
pub const ANDROID_DLEXT_USE_LIBRARY_FD_OFFSET: ::std::os::raw::c_uint = 32;
pub const ANDROID_DLEXT_FORCE_LOAD: ::std::os::raw::c_uint = 64;
pub const ANDROID_DLEXT_USE_NAMESPACE: ::std::os::raw::c_uint = 512;
pub const ANDROID_DLEXT_RESERVED_ADDRESS_RECURSIVE: ::std::os::raw::c_uint = 1024;
pub const ANDROID_DLEXT_VALID_FLAG_BITS: ::std::os::raw::c_uint = 1663;
pub type _bindgen_ty_6 = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct android_namespace_t {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct android_dlextinfo {
pub flags: u64,
pub reserved_addr: *mut ::std::os::raw::c_void,
pub reserved_size: size_t,
pub relro_fd: ::std::os::raw::c_int,
pub library_fd: ::std::os::raw::c_int,
pub library_fd_offset: off64_t,
pub library_namespace: *mut android_namespace_t,
}
#[test]
fn bindgen_test_layout_android_dlextinfo() {
assert_eq!(
::std::mem::size_of::<android_dlextinfo>(),
40usize,
concat!("Size of: ", stringify!(android_dlextinfo))
);
assert_eq!(
::std::mem::align_of::<android_dlextinfo>(),
8usize,
concat!("Alignment of ", stringify!(android_dlextinfo))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<android_dlextinfo>())).flags as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(android_dlextinfo),
"::",
stringify!(flags)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<android_dlextinfo>())).reserved_addr as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(android_dlextinfo),
"::",
stringify!(reserved_addr)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<android_dlextinfo>())).reserved_size as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(android_dlextinfo),
"::",
stringify!(reserved_size)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<android_dlextinfo>())).relro_fd as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(android_dlextinfo),
"::",
stringify!(relro_fd)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<android_dlextinfo>())).library_fd as *const _ as usize },
20usize,
concat!(
"Offset of field: ",
stringify!(android_dlextinfo),
"::",
stringify!(library_fd)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<android_dlextinfo>())).library_fd_offset as *const _ as usize
},
24usize,
concat!(
"Offset of field: ",
stringify!(android_dlextinfo),
"::",
stringify!(library_fd_offset)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<android_dlextinfo>())).library_namespace as *const _ as usize
},
32usize,
concat!(
"Offset of field: ",
stringify!(android_dlextinfo),
"::",
stringify!(library_namespace)
)
);
}
extern "C" {
pub fn android_dlopen_ext(
__filename: *const ::std::os::raw::c_char,
__flags: ::std::os::raw::c_int,
__info: *const android_dlextinfo,
) -> *mut ::std::os::raw::c_void;
}
pub const android_fdsan_owner_type_ANDROID_FDSAN_OWNER_TYPE_GENERIC_00: android_fdsan_owner_type =
0;
pub const android_fdsan_owner_type_ANDROID_FDSAN_OWNER_TYPE_GENERIC_FF: android_fdsan_owner_type =
255;
pub const android_fdsan_owner_type_ANDROID_FDSAN_OWNER_TYPE_FILE: android_fdsan_owner_type = 1;
pub const android_fdsan_owner_type_ANDROID_FDSAN_OWNER_TYPE_DIR: android_fdsan_owner_type = 2;
pub const android_fdsan_owner_type_ANDROID_FDSAN_OWNER_TYPE_UNIQUE_FD: android_fdsan_owner_type = 3;
pub const android_fdsan_owner_type_ANDROID_FDSAN_OWNER_TYPE_SQLITE: android_fdsan_owner_type = 4;
pub const android_fdsan_owner_type_ANDROID_FDSAN_OWNER_TYPE_FILEINPUTSTREAM:
android_fdsan_owner_type = 5;
pub const android_fdsan_owner_type_ANDROID_FDSAN_OWNER_TYPE_FILEOUTPUTSTREAM:
android_fdsan_owner_type = 6;
pub const android_fdsan_owner_type_ANDROID_FDSAN_OWNER_TYPE_RANDOMACCESSFILE:
android_fdsan_owner_type = 7;
pub const android_fdsan_owner_type_ANDROID_FDSAN_OWNER_TYPE_PARCELFILEDESCRIPTOR:
android_fdsan_owner_type = 8;
pub const android_fdsan_owner_type_ANDROID_FDSAN_OWNER_TYPE_ART_FDFILE: android_fdsan_owner_type =
9;
pub const android_fdsan_owner_type_ANDROID_FDSAN_OWNER_TYPE_DATAGRAMSOCKETIMPL:
android_fdsan_owner_type = 10;
pub const android_fdsan_owner_type_ANDROID_FDSAN_OWNER_TYPE_SOCKETIMPL: android_fdsan_owner_type =
11;
pub const android_fdsan_owner_type_ANDROID_FDSAN_OWNER_TYPE_ZIPARCHIVE: android_fdsan_owner_type =
12;
pub type android_fdsan_owner_type = ::std::os::raw::c_uint;
extern "C" {
pub fn android_fdsan_create_owner_tag(type_: android_fdsan_owner_type, tag: u64) -> u64;
}
extern "C" {
pub fn android_fdsan_exchange_owner_tag(
fd: ::std::os::raw::c_int,
expected_tag: u64,
new_tag: u64,
);
}
extern "C" {
pub fn android_fdsan_close_with_tag(
fd: ::std::os::raw::c_int,
tag: u64,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn android_fdsan_get_owner_tag(fd: ::std::os::raw::c_int) -> u64;
}
extern "C" {
pub fn android_fdsan_get_tag_type(tag: u64) -> *const ::std::os::raw::c_char;
}
extern "C" {
pub fn android_fdsan_get_tag_value(tag: u64) -> u64;
}
pub const android_fdsan_error_level_ANDROID_FDSAN_ERROR_LEVEL_DISABLED: android_fdsan_error_level =
0;
pub const android_fdsan_error_level_ANDROID_FDSAN_ERROR_LEVEL_WARN_ONCE: android_fdsan_error_level =
1;
pub const android_fdsan_error_level_ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS:
android_fdsan_error_level = 2;
pub const android_fdsan_error_level_ANDROID_FDSAN_ERROR_LEVEL_FATAL: android_fdsan_error_level = 3;
pub type android_fdsan_error_level = ::std::os::raw::c_uint;
extern "C" {
pub fn android_fdsan_get_error_level() -> android_fdsan_error_level;
}
extern "C" {
pub fn android_fdsan_set_error_level(
new_level: android_fdsan_error_level,
) -> android_fdsan_error_level;
}
extern "C" {
pub fn android_fdsan_set_error_level_from_property(
default_level: android_fdsan_error_level,
) -> android_fdsan_error_level;
}
extern "C" {
pub fn AFileDescriptor_create(env: *mut JNIEnv) -> jobject;
}
extern "C" {
pub fn AFileDescriptor_getFd(
env: *mut JNIEnv,
fileDescriptor: jobject,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AFileDescriptor_setFd(
env: *mut JNIEnv,
fileDescriptor: jobject,
fd: ::std::os::raw::c_int,
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ARect {
pub left: i32,
pub top: i32,
pub right: i32,
pub bottom: i32,
}
#[test]
fn bindgen_test_layout_ARect() {
assert_eq!(
::std::mem::size_of::<ARect>(),
16usize,
concat!("Size of: ", stringify!(ARect))
);
assert_eq!(
::std::mem::align_of::<ARect>(),
4usize,
concat!("Alignment of ", stringify!(ARect))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ARect>())).left as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(ARect),
"::",
stringify!(left)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ARect>())).top as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(ARect),
"::",
stringify!(top)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ARect>())).right as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(ARect),
"::",
stringify!(right)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ARect>())).bottom as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(ARect),
"::",
stringify!(bottom)
)
);
}
pub const AHardwareBuffer_Format_AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM: AHardwareBuffer_Format = 1;
pub const AHardwareBuffer_Format_AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM: AHardwareBuffer_Format = 2;
pub const AHardwareBuffer_Format_AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM: AHardwareBuffer_Format = 3;
pub const AHardwareBuffer_Format_AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM: AHardwareBuffer_Format = 4;
pub const AHardwareBuffer_Format_AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT: AHardwareBuffer_Format =
22;
pub const AHardwareBuffer_Format_AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM: AHardwareBuffer_Format =
43;
pub const AHardwareBuffer_Format_AHARDWAREBUFFER_FORMAT_BLOB: AHardwareBuffer_Format = 33;
pub const AHardwareBuffer_Format_AHARDWAREBUFFER_FORMAT_D16_UNORM: AHardwareBuffer_Format = 48;
pub const AHardwareBuffer_Format_AHARDWAREBUFFER_FORMAT_D24_UNORM: AHardwareBuffer_Format = 49;
pub const AHardwareBuffer_Format_AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT: AHardwareBuffer_Format =
50;
pub const AHardwareBuffer_Format_AHARDWAREBUFFER_FORMAT_D32_FLOAT: AHardwareBuffer_Format = 51;
pub const AHardwareBuffer_Format_AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT: AHardwareBuffer_Format =
52;
pub const AHardwareBuffer_Format_AHARDWAREBUFFER_FORMAT_S8_UINT: AHardwareBuffer_Format = 53;
pub const AHardwareBuffer_Format_AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420: AHardwareBuffer_Format = 35;
pub type AHardwareBuffer_Format = ::std::os::raw::c_uint;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_CPU_READ_NEVER:
AHardwareBuffer_UsageFlags = 0;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_CPU_READ_RARELY:
AHardwareBuffer_UsageFlags = 2;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN:
AHardwareBuffer_UsageFlags = 3;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_CPU_READ_MASK:
AHardwareBuffer_UsageFlags = 15;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_CPU_WRITE_NEVER:
AHardwareBuffer_UsageFlags = 0;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_CPU_WRITE_RARELY:
AHardwareBuffer_UsageFlags = 32;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN:
AHardwareBuffer_UsageFlags = 48;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_CPU_WRITE_MASK:
AHardwareBuffer_UsageFlags = 240;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE:
AHardwareBuffer_UsageFlags = 256;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER:
AHardwareBuffer_UsageFlags = 512;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT:
AHardwareBuffer_UsageFlags = 512;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_COMPOSER_OVERLAY:
AHardwareBuffer_UsageFlags = 2048;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT:
AHardwareBuffer_UsageFlags = 16384;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VIDEO_ENCODE:
AHardwareBuffer_UsageFlags = 65536;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_SENSOR_DIRECT_DATA:
AHardwareBuffer_UsageFlags = 8388608;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER:
AHardwareBuffer_UsageFlags = 16777216;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP:
AHardwareBuffer_UsageFlags = 33554432;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE:
AHardwareBuffer_UsageFlags = 67108864;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_0: AHardwareBuffer_UsageFlags =
268435456;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_1: AHardwareBuffer_UsageFlags =
536870912;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_2: AHardwareBuffer_UsageFlags =
1073741824;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_3: AHardwareBuffer_UsageFlags =
2147483648;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_4: AHardwareBuffer_UsageFlags =
281474976710656;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_5: AHardwareBuffer_UsageFlags =
562949953421312;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_6: AHardwareBuffer_UsageFlags =
1125899906842624;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_7: AHardwareBuffer_UsageFlags =
2251799813685248;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_8: AHardwareBuffer_UsageFlags =
4503599627370496;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_9: AHardwareBuffer_UsageFlags =
9007199254740992;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_10: AHardwareBuffer_UsageFlags =
18014398509481984;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_11: AHardwareBuffer_UsageFlags =
36028797018963968;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_12: AHardwareBuffer_UsageFlags =
72057594037927936;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_13: AHardwareBuffer_UsageFlags =
144115188075855872;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_14: AHardwareBuffer_UsageFlags =
288230376151711744;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_15: AHardwareBuffer_UsageFlags =
576460752303423488;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_16: AHardwareBuffer_UsageFlags =
1152921504606846976;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_17: AHardwareBuffer_UsageFlags =
2305843009213693952;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_18: AHardwareBuffer_UsageFlags =
4611686018427387904;
pub const AHardwareBuffer_UsageFlags_AHARDWAREBUFFER_USAGE_VENDOR_19: AHardwareBuffer_UsageFlags =
9223372036854775808;
pub type AHardwareBuffer_UsageFlags = ::std::os::raw::c_ulonglong;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AHardwareBuffer_Desc {
pub width: u32,
pub height: u32,
pub layers: u32,
pub format: u32,
pub usage: u64,
pub stride: u32,
pub rfu0: u32,
pub rfu1: u64,
}
#[test]
fn bindgen_test_layout_AHardwareBuffer_Desc() {
assert_eq!(
::std::mem::size_of::<AHardwareBuffer_Desc>(),
40usize,
concat!("Size of: ", stringify!(AHardwareBuffer_Desc))
);
assert_eq!(
::std::mem::align_of::<AHardwareBuffer_Desc>(),
8usize,
concat!("Alignment of ", stringify!(AHardwareBuffer_Desc))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AHardwareBuffer_Desc>())).width as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(AHardwareBuffer_Desc),
"::",
stringify!(width)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AHardwareBuffer_Desc>())).height as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(AHardwareBuffer_Desc),
"::",
stringify!(height)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AHardwareBuffer_Desc>())).layers as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(AHardwareBuffer_Desc),
"::",
stringify!(layers)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AHardwareBuffer_Desc>())).format as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(AHardwareBuffer_Desc),
"::",
stringify!(format)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AHardwareBuffer_Desc>())).usage as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(AHardwareBuffer_Desc),
"::",
stringify!(usage)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AHardwareBuffer_Desc>())).stride as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(AHardwareBuffer_Desc),
"::",
stringify!(stride)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AHardwareBuffer_Desc>())).rfu0 as *const _ as usize },
28usize,
concat!(
"Offset of field: ",
stringify!(AHardwareBuffer_Desc),
"::",
stringify!(rfu0)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AHardwareBuffer_Desc>())).rfu1 as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(AHardwareBuffer_Desc),
"::",
stringify!(rfu1)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AHardwareBuffer_Plane {
pub data: *mut ::std::os::raw::c_void,
pub pixelStride: u32,
pub rowStride: u32,
}
#[test]
fn bindgen_test_layout_AHardwareBuffer_Plane() {
assert_eq!(
::std::mem::size_of::<AHardwareBuffer_Plane>(),
12usize,
concat!("Size of: ", stringify!(AHardwareBuffer_Plane))
);
assert_eq!(
::std::mem::align_of::<AHardwareBuffer_Plane>(),
4usize,
concat!("Alignment of ", stringify!(AHardwareBuffer_Plane))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AHardwareBuffer_Plane>())).data as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(AHardwareBuffer_Plane),
"::",
stringify!(data)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AHardwareBuffer_Plane>())).pixelStride as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(AHardwareBuffer_Plane),
"::",
stringify!(pixelStride)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AHardwareBuffer_Plane>())).rowStride as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(AHardwareBuffer_Plane),
"::",
stringify!(rowStride)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AHardwareBuffer_Planes {
pub planeCount: u32,
pub planes: [AHardwareBuffer_Plane; 4usize],
}
#[test]
fn bindgen_test_layout_AHardwareBuffer_Planes() {
assert_eq!(
::std::mem::size_of::<AHardwareBuffer_Planes>(),
52usize,
concat!("Size of: ", stringify!(AHardwareBuffer_Planes))
);
assert_eq!(
::std::mem::align_of::<AHardwareBuffer_Planes>(),
4usize,
concat!("Alignment of ", stringify!(AHardwareBuffer_Planes))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AHardwareBuffer_Planes>())).planeCount as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(AHardwareBuffer_Planes),
"::",
stringify!(planeCount)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AHardwareBuffer_Planes>())).planes as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(AHardwareBuffer_Planes),
"::",
stringify!(planes)
)
);
}
extern "C" {
pub fn AHardwareBuffer_allocate(
desc: *const AHardwareBuffer_Desc,
outBuffer: *mut *mut AHardwareBuffer,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AHardwareBuffer_acquire(buffer: *mut AHardwareBuffer);
}
extern "C" {
pub fn AHardwareBuffer_release(buffer: *mut AHardwareBuffer);
}
extern "C" {
pub fn AHardwareBuffer_describe(
buffer: *const AHardwareBuffer,
outDesc: *mut AHardwareBuffer_Desc,
);
}
extern "C" {
pub fn AHardwareBuffer_lock(
buffer: *mut AHardwareBuffer,
usage: u64,
fence: i32,
rect: *const ARect,
outVirtualAddress: *mut *mut ::std::os::raw::c_void,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AHardwareBuffer_unlock(
buffer: *mut AHardwareBuffer,
fence: *mut i32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AHardwareBuffer_sendHandleToUnixSocket(
buffer: *const AHardwareBuffer,
socketFd: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AHardwareBuffer_recvHandleFromUnixSocket(
socketFd: ::std::os::raw::c_int,
outBuffer: *mut *mut AHardwareBuffer,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AHardwareBuffer_lockPlanes(
buffer: *mut AHardwareBuffer,
usage: u64,
fence: i32,
rect: *const ARect,
outPlanes: *mut AHardwareBuffer_Planes,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AHardwareBuffer_isSupported(desc: *const AHardwareBuffer_Desc) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AHardwareBuffer_lockAndGetInfo(
buffer: *mut AHardwareBuffer,
usage: u64,
fence: i32,
rect: *const ARect,
outVirtualAddress: *mut *mut ::std::os::raw::c_void,
outBytesPerPixel: *mut i32,
outBytesPerStride: *mut i32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AHardwareBuffer_getId(
buffer: *const AHardwareBuffer,
outId: *mut u64,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AHardwareBuffer_fromHardwareBuffer(
env: *mut JNIEnv,
hardwareBufferObj: jobject,
) -> *mut AHardwareBuffer;
}
extern "C" {
pub fn AHardwareBuffer_toHardwareBuffer(
env: *mut JNIEnv,
hardwareBuffer: *mut AHardwareBuffer,
) -> jobject;
}
pub const AHdrMetadataType_HDR10_SMPTE2086: AHdrMetadataType = 1;
pub const AHdrMetadataType_HDR10_CTA861_3: AHdrMetadataType = 2;
pub const AHdrMetadataType_HDR10PLUS_SEI: AHdrMetadataType = 3;
pub type AHdrMetadataType = u32;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AColor_xy {
pub x: f32,
pub y: f32,
}
#[test]
fn bindgen_test_layout_AColor_xy() {
assert_eq!(
::std::mem::size_of::<AColor_xy>(),
8usize,
concat!("Size of: ", stringify!(AColor_xy))
);
assert_eq!(
::std::mem::align_of::<AColor_xy>(),
4usize,
concat!("Alignment of ", stringify!(AColor_xy))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AColor_xy>())).x as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(AColor_xy),
"::",
stringify!(x)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AColor_xy>())).y as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(AColor_xy),
"::",
stringify!(y)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AHdrMetadata_smpte2086 {
pub displayPrimaryRed: AColor_xy,
pub displayPrimaryGreen: AColor_xy,
pub displayPrimaryBlue: AColor_xy,
pub whitePoint: AColor_xy,
pub maxLuminance: f32,
pub minLuminance: f32,
}
#[test]
fn bindgen_test_layout_AHdrMetadata_smpte2086() {
assert_eq!(
::std::mem::size_of::<AHdrMetadata_smpte2086>(),
40usize,
concat!("Size of: ", stringify!(AHdrMetadata_smpte2086))
);
assert_eq!(
::std::mem::align_of::<AHdrMetadata_smpte2086>(),
4usize,
concat!("Alignment of ", stringify!(AHdrMetadata_smpte2086))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AHdrMetadata_smpte2086>())).displayPrimaryRed as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(AHdrMetadata_smpte2086),
"::",
stringify!(displayPrimaryRed)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AHdrMetadata_smpte2086>())).displayPrimaryGreen as *const _
as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(AHdrMetadata_smpte2086),
"::",
stringify!(displayPrimaryGreen)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AHdrMetadata_smpte2086>())).displayPrimaryBlue as *const _
as usize
},
16usize,
concat!(
"Offset of field: ",
stringify!(AHdrMetadata_smpte2086),
"::",
stringify!(displayPrimaryBlue)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AHdrMetadata_smpte2086>())).whitePoint as *const _ as usize
},
24usize,
concat!(
"Offset of field: ",
stringify!(AHdrMetadata_smpte2086),
"::",
stringify!(whitePoint)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AHdrMetadata_smpte2086>())).maxLuminance as *const _ as usize
},
32usize,
concat!(
"Offset of field: ",
stringify!(AHdrMetadata_smpte2086),
"::",
stringify!(maxLuminance)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AHdrMetadata_smpte2086>())).minLuminance as *const _ as usize
},
36usize,
concat!(
"Offset of field: ",
stringify!(AHdrMetadata_smpte2086),
"::",
stringify!(minLuminance)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AHdrMetadata_cta861_3 {
pub maxContentLightLevel: f32,
pub maxFrameAverageLightLevel: f32,
}
#[test]
fn bindgen_test_layout_AHdrMetadata_cta861_3() {
assert_eq!(
::std::mem::size_of::<AHdrMetadata_cta861_3>(),
8usize,
concat!("Size of: ", stringify!(AHdrMetadata_cta861_3))
);
assert_eq!(
::std::mem::align_of::<AHdrMetadata_cta861_3>(),
4usize,
concat!("Alignment of ", stringify!(AHdrMetadata_cta861_3))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AHdrMetadata_cta861_3>())).maxContentLightLevel as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(AHdrMetadata_cta861_3),
"::",
stringify!(maxContentLightLevel)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AHdrMetadata_cta861_3>())).maxFrameAverageLightLevel as *const _
as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(AHdrMetadata_cta861_3),
"::",
stringify!(maxFrameAverageLightLevel)
)
);
}
pub const ANDROID_IMAGE_DECODER_SUCCESS: ::std::os::raw::c_int = 0;
pub const ANDROID_IMAGE_DECODER_INCOMPLETE: ::std::os::raw::c_int = -1;
pub const ANDROID_IMAGE_DECODER_ERROR: ::std::os::raw::c_int = -2;
pub const ANDROID_IMAGE_DECODER_INVALID_CONVERSION: ::std::os::raw::c_int = -3;
pub const ANDROID_IMAGE_DECODER_INVALID_SCALE: ::std::os::raw::c_int = -4;
pub const ANDROID_IMAGE_DECODER_BAD_PARAMETER: ::std::os::raw::c_int = -5;
pub const ANDROID_IMAGE_DECODER_INVALID_INPUT: ::std::os::raw::c_int = -6;
pub const ANDROID_IMAGE_DECODER_SEEK_ERROR: ::std::os::raw::c_int = -7;
pub const ANDROID_IMAGE_DECODER_INTERNAL_ERROR: ::std::os::raw::c_int = -8;
pub const ANDROID_IMAGE_DECODER_UNSUPPORTED_FORMAT: ::std::os::raw::c_int = -9;
pub const ANDROID_IMAGE_DECODER_FINISHED: ::std::os::raw::c_int = -10;
pub const ANDROID_IMAGE_DECODER_INVALID_STATE: ::std::os::raw::c_int = -11;
pub type _bindgen_ty_7 = ::std::os::raw::c_int;
extern "C" {
pub fn AImageDecoder_resultToString(
arg1: ::std::os::raw::c_int,
) -> *const ::std::os::raw::c_char;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AImageDecoder {
_unused: [u8; 0],
}
extern "C" {
pub fn AImageDecoder_createFromAAsset(
asset: *mut AAsset,
outDecoder: *mut *mut AImageDecoder,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AImageDecoder_createFromFd(
fd: ::std::os::raw::c_int,
outDecoder: *mut *mut AImageDecoder,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AImageDecoder_createFromBuffer(
buffer: *const ::std::os::raw::c_void,
length: size_t,
outDecoder: *mut *mut AImageDecoder,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AImageDecoder_delete(decoder: *mut AImageDecoder);
}
extern "C" {
pub fn AImageDecoder_setAndroidBitmapFormat(
decoder: *mut AImageDecoder,
format: i32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AImageDecoder_setUnpremultipliedRequired(
decoder: *mut AImageDecoder,
unpremultipliedRequired: bool,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AImageDecoder_setDataSpace(
decoder: *mut AImageDecoder,
dataspace: i32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AImageDecoder_setTargetSize(
decoder: *mut AImageDecoder,
width: i32,
height: i32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AImageDecoder_computeSampledSize(
decoder: *const AImageDecoder,
sampleSize: ::std::os::raw::c_int,
width: *mut i32,
height: *mut i32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AImageDecoder_setCrop(decoder: *mut AImageDecoder, crop: ARect)
-> ::std::os::raw::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AImageDecoderHeaderInfo {
_unused: [u8; 0],
}
extern "C" {
pub fn AImageDecoder_getHeaderInfo(
decoder: *const AImageDecoder,
) -> *const AImageDecoderHeaderInfo;
}
extern "C" {
pub fn AImageDecoderHeaderInfo_getWidth(arg1: *const AImageDecoderHeaderInfo) -> i32;
}
extern "C" {
pub fn AImageDecoderHeaderInfo_getHeight(arg1: *const AImageDecoderHeaderInfo) -> i32;
}
extern "C" {
pub fn AImageDecoderHeaderInfo_getMimeType(
arg1: *const AImageDecoderHeaderInfo,
) -> *const ::std::os::raw::c_char;
}
extern "C" {
pub fn AImageDecoderHeaderInfo_getAndroidBitmapFormat(
arg1: *const AImageDecoderHeaderInfo,
) -> i32;
}
extern "C" {
pub fn AImageDecoderHeaderInfo_getAlphaFlags(
arg1: *const AImageDecoderHeaderInfo,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AImageDecoderHeaderInfo_getDataSpace(arg1: *const AImageDecoderHeaderInfo) -> i32;
}
extern "C" {
pub fn AImageDecoder_getMinimumStride(decoder: *mut AImageDecoder) -> size_t;
}
extern "C" {
pub fn AImageDecoder_decodeImage(
decoder: *mut AImageDecoder,
pixels: *mut ::std::os::raw::c_void,
stride: size_t,
size: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AImageDecoder_isAnimated(decoder: *mut AImageDecoder) -> bool;
}
pub const ANDROID_IMAGE_DECODER_INFINITE: ::std::os::raw::c_uint = 2147483647;
pub type _bindgen_ty_8 = ::std::os::raw::c_uint;
extern "C" {
pub fn AImageDecoder_getRepeatCount(decoder: *mut AImageDecoder) -> i32;
}
extern "C" {
pub fn AImageDecoder_advanceFrame(decoder: *mut AImageDecoder) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AImageDecoder_rewind(decoder: *mut AImageDecoder) -> ::std::os::raw::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AImageDecoderFrameInfo {
_unused: [u8; 0],
}
extern "C" {
pub fn AImageDecoderFrameInfo_create() -> *mut AImageDecoderFrameInfo;
}
extern "C" {
pub fn AImageDecoderFrameInfo_delete(info: *mut AImageDecoderFrameInfo);
}
extern "C" {
pub fn AImageDecoder_getFrameInfo(
decoder: *mut AImageDecoder,
info: *mut AImageDecoderFrameInfo,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AImageDecoderFrameInfo_getDuration(info: *const AImageDecoderFrameInfo) -> i64;
}
extern "C" {
pub fn AImageDecoderFrameInfo_getFrameRect(info: *const AImageDecoderFrameInfo) -> ARect;
}
extern "C" {
pub fn AImageDecoderFrameInfo_hasAlphaWithinBounds(info: *const AImageDecoderFrameInfo)
-> bool;
}
pub const ANDROID_IMAGE_DECODER_DISPOSE_OP_NONE: ::std::os::raw::c_uint = 1;
pub const ANDROID_IMAGE_DECODER_DISPOSE_OP_BACKGROUND: ::std::os::raw::c_uint = 2;
pub const ANDROID_IMAGE_DECODER_DISPOSE_OP_PREVIOUS: ::std::os::raw::c_uint = 3;
pub type _bindgen_ty_9 = ::std::os::raw::c_uint;
extern "C" {
pub fn AImageDecoderFrameInfo_getDisposeOp(info: *const AImageDecoderFrameInfo) -> i32;
}
pub const ANDROID_IMAGE_DECODER_BLEND_OP_SRC: ::std::os::raw::c_uint = 1;
pub const ANDROID_IMAGE_DECODER_BLEND_OP_SRC_OVER: ::std::os::raw::c_uint = 2;
pub type _bindgen_ty_10 = ::std::os::raw::c_uint;
extern "C" {
pub fn AImageDecoderFrameInfo_getBlendOp(info: *const AImageDecoderFrameInfo) -> i32;
}
extern "C" {
pub fn AImageDecoder_setInternallyHandleDisposePrevious(
decoder: *mut AImageDecoder,
handleInternally: bool,
);
}
pub const AKEYCODE_UNKNOWN: ::std::os::raw::c_uint = 0;
pub const AKEYCODE_SOFT_LEFT: ::std::os::raw::c_uint = 1;
pub const AKEYCODE_SOFT_RIGHT: ::std::os::raw::c_uint = 2;
pub const AKEYCODE_HOME: ::std::os::raw::c_uint = 3;
pub const AKEYCODE_BACK: ::std::os::raw::c_uint = 4;
pub const AKEYCODE_CALL: ::std::os::raw::c_uint = 5;
pub const AKEYCODE_ENDCALL: ::std::os::raw::c_uint = 6;
pub const AKEYCODE_0: ::std::os::raw::c_uint = 7;
pub const AKEYCODE_1: ::std::os::raw::c_uint = 8;
pub const AKEYCODE_2: ::std::os::raw::c_uint = 9;
pub const AKEYCODE_3: ::std::os::raw::c_uint = 10;
pub const AKEYCODE_4: ::std::os::raw::c_uint = 11;
pub const AKEYCODE_5: ::std::os::raw::c_uint = 12;
pub const AKEYCODE_6: ::std::os::raw::c_uint = 13;
pub const AKEYCODE_7: ::std::os::raw::c_uint = 14;
pub const AKEYCODE_8: ::std::os::raw::c_uint = 15;
pub const AKEYCODE_9: ::std::os::raw::c_uint = 16;
pub const AKEYCODE_STAR: ::std::os::raw::c_uint = 17;
pub const AKEYCODE_POUND: ::std::os::raw::c_uint = 18;
pub const AKEYCODE_DPAD_UP: ::std::os::raw::c_uint = 19;
pub const AKEYCODE_DPAD_DOWN: ::std::os::raw::c_uint = 20;
pub const AKEYCODE_DPAD_LEFT: ::std::os::raw::c_uint = 21;
pub const AKEYCODE_DPAD_RIGHT: ::std::os::raw::c_uint = 22;
pub const AKEYCODE_DPAD_CENTER: ::std::os::raw::c_uint = 23;
pub const AKEYCODE_VOLUME_UP: ::std::os::raw::c_uint = 24;
pub const AKEYCODE_VOLUME_DOWN: ::std::os::raw::c_uint = 25;
pub const AKEYCODE_POWER: ::std::os::raw::c_uint = 26;
pub const AKEYCODE_CAMERA: ::std::os::raw::c_uint = 27;
pub const AKEYCODE_CLEAR: ::std::os::raw::c_uint = 28;
pub const AKEYCODE_A: ::std::os::raw::c_uint = 29;
pub const AKEYCODE_B: ::std::os::raw::c_uint = 30;
pub const AKEYCODE_C: ::std::os::raw::c_uint = 31;
pub const AKEYCODE_D: ::std::os::raw::c_uint = 32;
pub const AKEYCODE_E: ::std::os::raw::c_uint = 33;
pub const AKEYCODE_F: ::std::os::raw::c_uint = 34;
pub const AKEYCODE_G: ::std::os::raw::c_uint = 35;
pub const AKEYCODE_H: ::std::os::raw::c_uint = 36;
pub const AKEYCODE_I: ::std::os::raw::c_uint = 37;
pub const AKEYCODE_J: ::std::os::raw::c_uint = 38;
pub const AKEYCODE_K: ::std::os::raw::c_uint = 39;
pub const AKEYCODE_L: ::std::os::raw::c_uint = 40;
pub const AKEYCODE_M: ::std::os::raw::c_uint = 41;
pub const AKEYCODE_N: ::std::os::raw::c_uint = 42;
pub const AKEYCODE_O: ::std::os::raw::c_uint = 43;
pub const AKEYCODE_P: ::std::os::raw::c_uint = 44;
pub const AKEYCODE_Q: ::std::os::raw::c_uint = 45;
pub const AKEYCODE_R: ::std::os::raw::c_uint = 46;
pub const AKEYCODE_S: ::std::os::raw::c_uint = 47;
pub const AKEYCODE_T: ::std::os::raw::c_uint = 48;
pub const AKEYCODE_U: ::std::os::raw::c_uint = 49;
pub const AKEYCODE_V: ::std::os::raw::c_uint = 50;
pub const AKEYCODE_W: ::std::os::raw::c_uint = 51;
pub const AKEYCODE_X: ::std::os::raw::c_uint = 52;
pub const AKEYCODE_Y: ::std::os::raw::c_uint = 53;
pub const AKEYCODE_Z: ::std::os::raw::c_uint = 54;
pub const AKEYCODE_COMMA: ::std::os::raw::c_uint = 55;
pub const AKEYCODE_PERIOD: ::std::os::raw::c_uint = 56;
pub const AKEYCODE_ALT_LEFT: ::std::os::raw::c_uint = 57;
pub const AKEYCODE_ALT_RIGHT: ::std::os::raw::c_uint = 58;
pub const AKEYCODE_SHIFT_LEFT: ::std::os::raw::c_uint = 59;
pub const AKEYCODE_SHIFT_RIGHT: ::std::os::raw::c_uint = 60;
pub const AKEYCODE_TAB: ::std::os::raw::c_uint = 61;
pub const AKEYCODE_SPACE: ::std::os::raw::c_uint = 62;
pub const AKEYCODE_SYM: ::std::os::raw::c_uint = 63;
pub const AKEYCODE_EXPLORER: ::std::os::raw::c_uint = 64;
pub const AKEYCODE_ENVELOPE: ::std::os::raw::c_uint = 65;
pub const AKEYCODE_ENTER: ::std::os::raw::c_uint = 66;
pub const AKEYCODE_DEL: ::std::os::raw::c_uint = 67;
pub const AKEYCODE_GRAVE: ::std::os::raw::c_uint = 68;
pub const AKEYCODE_MINUS: ::std::os::raw::c_uint = 69;
pub const AKEYCODE_EQUALS: ::std::os::raw::c_uint = 70;
pub const AKEYCODE_LEFT_BRACKET: ::std::os::raw::c_uint = 71;
pub const AKEYCODE_RIGHT_BRACKET: ::std::os::raw::c_uint = 72;
pub const AKEYCODE_BACKSLASH: ::std::os::raw::c_uint = 73;
pub const AKEYCODE_SEMICOLON: ::std::os::raw::c_uint = 74;
pub const AKEYCODE_APOSTROPHE: ::std::os::raw::c_uint = 75;
pub const AKEYCODE_SLASH: ::std::os::raw::c_uint = 76;
pub const AKEYCODE_AT: ::std::os::raw::c_uint = 77;
pub const AKEYCODE_NUM: ::std::os::raw::c_uint = 78;
pub const AKEYCODE_HEADSETHOOK: ::std::os::raw::c_uint = 79;
pub const AKEYCODE_FOCUS: ::std::os::raw::c_uint = 80;
pub const AKEYCODE_PLUS: ::std::os::raw::c_uint = 81;
pub const AKEYCODE_MENU: ::std::os::raw::c_uint = 82;
pub const AKEYCODE_NOTIFICATION: ::std::os::raw::c_uint = 83;
pub const AKEYCODE_SEARCH: ::std::os::raw::c_uint = 84;
pub const AKEYCODE_MEDIA_PLAY_PAUSE: ::std::os::raw::c_uint = 85;
pub const AKEYCODE_MEDIA_STOP: ::std::os::raw::c_uint = 86;
pub const AKEYCODE_MEDIA_NEXT: ::std::os::raw::c_uint = 87;
pub const AKEYCODE_MEDIA_PREVIOUS: ::std::os::raw::c_uint = 88;
pub const AKEYCODE_MEDIA_REWIND: ::std::os::raw::c_uint = 89;
pub const AKEYCODE_MEDIA_FAST_FORWARD: ::std::os::raw::c_uint = 90;
pub const AKEYCODE_MUTE: ::std::os::raw::c_uint = 91;
pub const AKEYCODE_PAGE_UP: ::std::os::raw::c_uint = 92;
pub const AKEYCODE_PAGE_DOWN: ::std::os::raw::c_uint = 93;
pub const AKEYCODE_PICTSYMBOLS: ::std::os::raw::c_uint = 94;
pub const AKEYCODE_SWITCH_CHARSET: ::std::os::raw::c_uint = 95;
pub const AKEYCODE_BUTTON_A: ::std::os::raw::c_uint = 96;
pub const AKEYCODE_BUTTON_B: ::std::os::raw::c_uint = 97;
pub const AKEYCODE_BUTTON_C: ::std::os::raw::c_uint = 98;
pub const AKEYCODE_BUTTON_X: ::std::os::raw::c_uint = 99;
pub const AKEYCODE_BUTTON_Y: ::std::os::raw::c_uint = 100;
pub const AKEYCODE_BUTTON_Z: ::std::os::raw::c_uint = 101;
pub const AKEYCODE_BUTTON_L1: ::std::os::raw::c_uint = 102;
pub const AKEYCODE_BUTTON_R1: ::std::os::raw::c_uint = 103;
pub const AKEYCODE_BUTTON_L2: ::std::os::raw::c_uint = 104;
pub const AKEYCODE_BUTTON_R2: ::std::os::raw::c_uint = 105;
pub const AKEYCODE_BUTTON_THUMBL: ::std::os::raw::c_uint = 106;
pub const AKEYCODE_BUTTON_THUMBR: ::std::os::raw::c_uint = 107;
pub const AKEYCODE_BUTTON_START: ::std::os::raw::c_uint = 108;
pub const AKEYCODE_BUTTON_SELECT: ::std::os::raw::c_uint = 109;
pub const AKEYCODE_BUTTON_MODE: ::std::os::raw::c_uint = 110;
pub const AKEYCODE_ESCAPE: ::std::os::raw::c_uint = 111;
pub const AKEYCODE_FORWARD_DEL: ::std::os::raw::c_uint = 112;
pub const AKEYCODE_CTRL_LEFT: ::std::os::raw::c_uint = 113;
pub const AKEYCODE_CTRL_RIGHT: ::std::os::raw::c_uint = 114;
pub const AKEYCODE_CAPS_LOCK: ::std::os::raw::c_uint = 115;
pub const AKEYCODE_SCROLL_LOCK: ::std::os::raw::c_uint = 116;
pub const AKEYCODE_META_LEFT: ::std::os::raw::c_uint = 117;
pub const AKEYCODE_META_RIGHT: ::std::os::raw::c_uint = 118;
pub const AKEYCODE_FUNCTION: ::std::os::raw::c_uint = 119;
pub const AKEYCODE_SYSRQ: ::std::os::raw::c_uint = 120;
pub const AKEYCODE_BREAK: ::std::os::raw::c_uint = 121;
pub const AKEYCODE_MOVE_HOME: ::std::os::raw::c_uint = 122;
pub const AKEYCODE_MOVE_END: ::std::os::raw::c_uint = 123;
pub const AKEYCODE_INSERT: ::std::os::raw::c_uint = 124;
pub const AKEYCODE_FORWARD: ::std::os::raw::c_uint = 125;
pub const AKEYCODE_MEDIA_PLAY: ::std::os::raw::c_uint = 126;
pub const AKEYCODE_MEDIA_PAUSE: ::std::os::raw::c_uint = 127;
pub const AKEYCODE_MEDIA_CLOSE: ::std::os::raw::c_uint = 128;
pub const AKEYCODE_MEDIA_EJECT: ::std::os::raw::c_uint = 129;
pub const AKEYCODE_MEDIA_RECORD: ::std::os::raw::c_uint = 130;
pub const AKEYCODE_F1: ::std::os::raw::c_uint = 131;
pub const AKEYCODE_F2: ::std::os::raw::c_uint = 132;
pub const AKEYCODE_F3: ::std::os::raw::c_uint = 133;
pub const AKEYCODE_F4: ::std::os::raw::c_uint = 134;
pub const AKEYCODE_F5: ::std::os::raw::c_uint = 135;
pub const AKEYCODE_F6: ::std::os::raw::c_uint = 136;
pub const AKEYCODE_F7: ::std::os::raw::c_uint = 137;
pub const AKEYCODE_F8: ::std::os::raw::c_uint = 138;
pub const AKEYCODE_F9: ::std::os::raw::c_uint = 139;
pub const AKEYCODE_F10: ::std::os::raw::c_uint = 140;
pub const AKEYCODE_F11: ::std::os::raw::c_uint = 141;
pub const AKEYCODE_F12: ::std::os::raw::c_uint = 142;
pub const AKEYCODE_NUM_LOCK: ::std::os::raw::c_uint = 143;
pub const AKEYCODE_NUMPAD_0: ::std::os::raw::c_uint = 144;
pub const AKEYCODE_NUMPAD_1: ::std::os::raw::c_uint = 145;
pub const AKEYCODE_NUMPAD_2: ::std::os::raw::c_uint = 146;
pub const AKEYCODE_NUMPAD_3: ::std::os::raw::c_uint = 147;
pub const AKEYCODE_NUMPAD_4: ::std::os::raw::c_uint = 148;
pub const AKEYCODE_NUMPAD_5: ::std::os::raw::c_uint = 149;
pub const AKEYCODE_NUMPAD_6: ::std::os::raw::c_uint = 150;
pub const AKEYCODE_NUMPAD_7: ::std::os::raw::c_uint = 151;
pub const AKEYCODE_NUMPAD_8: ::std::os::raw::c_uint = 152;
pub const AKEYCODE_NUMPAD_9: ::std::os::raw::c_uint = 153;
pub const AKEYCODE_NUMPAD_DIVIDE: ::std::os::raw::c_uint = 154;
pub const AKEYCODE_NUMPAD_MULTIPLY: ::std::os::raw::c_uint = 155;
pub const AKEYCODE_NUMPAD_SUBTRACT: ::std::os::raw::c_uint = 156;
pub const AKEYCODE_NUMPAD_ADD: ::std::os::raw::c_uint = 157;
pub const AKEYCODE_NUMPAD_DOT: ::std::os::raw::c_uint = 158;
pub const AKEYCODE_NUMPAD_COMMA: ::std::os::raw::c_uint = 159;
pub const AKEYCODE_NUMPAD_ENTER: ::std::os::raw::c_uint = 160;
pub const AKEYCODE_NUMPAD_EQUALS: ::std::os::raw::c_uint = 161;
pub const AKEYCODE_NUMPAD_LEFT_PAREN: ::std::os::raw::c_uint = 162;
pub const AKEYCODE_NUMPAD_RIGHT_PAREN: ::std::os::raw::c_uint = 163;
pub const AKEYCODE_VOLUME_MUTE: ::std::os::raw::c_uint = 164;
pub const AKEYCODE_INFO: ::std::os::raw::c_uint = 165;
pub const AKEYCODE_CHANNEL_UP: ::std::os::raw::c_uint = 166;
pub const AKEYCODE_CHANNEL_DOWN: ::std::os::raw::c_uint = 167;
pub const AKEYCODE_ZOOM_IN: ::std::os::raw::c_uint = 168;
pub const AKEYCODE_ZOOM_OUT: ::std::os::raw::c_uint = 169;
pub const AKEYCODE_TV: ::std::os::raw::c_uint = 170;
pub const AKEYCODE_WINDOW: ::std::os::raw::c_uint = 171;
pub const AKEYCODE_GUIDE: ::std::os::raw::c_uint = 172;
pub const AKEYCODE_DVR: ::std::os::raw::c_uint = 173;
pub const AKEYCODE_BOOKMARK: ::std::os::raw::c_uint = 174;
pub const AKEYCODE_CAPTIONS: ::std::os::raw::c_uint = 175;
pub const AKEYCODE_SETTINGS: ::std::os::raw::c_uint = 176;
pub const AKEYCODE_TV_POWER: ::std::os::raw::c_uint = 177;
pub const AKEYCODE_TV_INPUT: ::std::os::raw::c_uint = 178;
pub const AKEYCODE_STB_POWER: ::std::os::raw::c_uint = 179;
pub const AKEYCODE_STB_INPUT: ::std::os::raw::c_uint = 180;
pub const AKEYCODE_AVR_POWER: ::std::os::raw::c_uint = 181;
pub const AKEYCODE_AVR_INPUT: ::std::os::raw::c_uint = 182;
pub const AKEYCODE_PROG_RED: ::std::os::raw::c_uint = 183;
pub const AKEYCODE_PROG_GREEN: ::std::os::raw::c_uint = 184;
pub const AKEYCODE_PROG_YELLOW: ::std::os::raw::c_uint = 185;
pub const AKEYCODE_PROG_BLUE: ::std::os::raw::c_uint = 186;
pub const AKEYCODE_APP_SWITCH: ::std::os::raw::c_uint = 187;
pub const AKEYCODE_BUTTON_1: ::std::os::raw::c_uint = 188;
pub const AKEYCODE_BUTTON_2: ::std::os::raw::c_uint = 189;
pub const AKEYCODE_BUTTON_3: ::std::os::raw::c_uint = 190;
pub const AKEYCODE_BUTTON_4: ::std::os::raw::c_uint = 191;
pub const AKEYCODE_BUTTON_5: ::std::os::raw::c_uint = 192;
pub const AKEYCODE_BUTTON_6: ::std::os::raw::c_uint = 193;
pub const AKEYCODE_BUTTON_7: ::std::os::raw::c_uint = 194;
pub const AKEYCODE_BUTTON_8: ::std::os::raw::c_uint = 195;
pub const AKEYCODE_BUTTON_9: ::std::os::raw::c_uint = 196;
pub const AKEYCODE_BUTTON_10: ::std::os::raw::c_uint = 197;
pub const AKEYCODE_BUTTON_11: ::std::os::raw::c_uint = 198;
pub const AKEYCODE_BUTTON_12: ::std::os::raw::c_uint = 199;
pub const AKEYCODE_BUTTON_13: ::std::os::raw::c_uint = 200;
pub const AKEYCODE_BUTTON_14: ::std::os::raw::c_uint = 201;
pub const AKEYCODE_BUTTON_15: ::std::os::raw::c_uint = 202;
pub const AKEYCODE_BUTTON_16: ::std::os::raw::c_uint = 203;
pub const AKEYCODE_LANGUAGE_SWITCH: ::std::os::raw::c_uint = 204;
pub const AKEYCODE_MANNER_MODE: ::std::os::raw::c_uint = 205;
pub const AKEYCODE_3D_MODE: ::std::os::raw::c_uint = 206;
pub const AKEYCODE_CONTACTS: ::std::os::raw::c_uint = 207;
pub const AKEYCODE_CALENDAR: ::std::os::raw::c_uint = 208;
pub const AKEYCODE_MUSIC: ::std::os::raw::c_uint = 209;
pub const AKEYCODE_CALCULATOR: ::std::os::raw::c_uint = 210;
pub const AKEYCODE_ZENKAKU_HANKAKU: ::std::os::raw::c_uint = 211;
pub const AKEYCODE_EISU: ::std::os::raw::c_uint = 212;
pub const AKEYCODE_MUHENKAN: ::std::os::raw::c_uint = 213;
pub const AKEYCODE_HENKAN: ::std::os::raw::c_uint = 214;
pub const AKEYCODE_KATAKANA_HIRAGANA: ::std::os::raw::c_uint = 215;
pub const AKEYCODE_YEN: ::std::os::raw::c_uint = 216;
pub const AKEYCODE_RO: ::std::os::raw::c_uint = 217;
pub const AKEYCODE_KANA: ::std::os::raw::c_uint = 218;
pub const AKEYCODE_ASSIST: ::std::os::raw::c_uint = 219;
pub const AKEYCODE_BRIGHTNESS_DOWN: ::std::os::raw::c_uint = 220;
pub const AKEYCODE_BRIGHTNESS_UP: ::std::os::raw::c_uint = 221;
pub const AKEYCODE_MEDIA_AUDIO_TRACK: ::std::os::raw::c_uint = 222;
pub const AKEYCODE_SLEEP: ::std::os::raw::c_uint = 223;
pub const AKEYCODE_WAKEUP: ::std::os::raw::c_uint = 224;
pub const AKEYCODE_PAIRING: ::std::os::raw::c_uint = 225;
pub const AKEYCODE_MEDIA_TOP_MENU: ::std::os::raw::c_uint = 226;
pub const AKEYCODE_11: ::std::os::raw::c_uint = 227;
pub const AKEYCODE_12: ::std::os::raw::c_uint = 228;
pub const AKEYCODE_LAST_CHANNEL: ::std::os::raw::c_uint = 229;
pub const AKEYCODE_TV_DATA_SERVICE: ::std::os::raw::c_uint = 230;
pub const AKEYCODE_VOICE_ASSIST: ::std::os::raw::c_uint = 231;
pub const AKEYCODE_TV_RADIO_SERVICE: ::std::os::raw::c_uint = 232;
pub const AKEYCODE_TV_TELETEXT: ::std::os::raw::c_uint = 233;
pub const AKEYCODE_TV_NUMBER_ENTRY: ::std::os::raw::c_uint = 234;
pub const AKEYCODE_TV_TERRESTRIAL_ANALOG: ::std::os::raw::c_uint = 235;
pub const AKEYCODE_TV_TERRESTRIAL_DIGITAL: ::std::os::raw::c_uint = 236;
pub const AKEYCODE_TV_SATELLITE: ::std::os::raw::c_uint = 237;
pub const AKEYCODE_TV_SATELLITE_BS: ::std::os::raw::c_uint = 238;
pub const AKEYCODE_TV_SATELLITE_CS: ::std::os::raw::c_uint = 239;
pub const AKEYCODE_TV_SATELLITE_SERVICE: ::std::os::raw::c_uint = 240;
pub const AKEYCODE_TV_NETWORK: ::std::os::raw::c_uint = 241;
pub const AKEYCODE_TV_ANTENNA_CABLE: ::std::os::raw::c_uint = 242;
pub const AKEYCODE_TV_INPUT_HDMI_1: ::std::os::raw::c_uint = 243;
pub const AKEYCODE_TV_INPUT_HDMI_2: ::std::os::raw::c_uint = 244;
pub const AKEYCODE_TV_INPUT_HDMI_3: ::std::os::raw::c_uint = 245;
pub const AKEYCODE_TV_INPUT_HDMI_4: ::std::os::raw::c_uint = 246;
pub const AKEYCODE_TV_INPUT_COMPOSITE_1: ::std::os::raw::c_uint = 247;
pub const AKEYCODE_TV_INPUT_COMPOSITE_2: ::std::os::raw::c_uint = 248;
pub const AKEYCODE_TV_INPUT_COMPONENT_1: ::std::os::raw::c_uint = 249;
pub const AKEYCODE_TV_INPUT_COMPONENT_2: ::std::os::raw::c_uint = 250;
pub const AKEYCODE_TV_INPUT_VGA_1: ::std::os::raw::c_uint = 251;
pub const AKEYCODE_TV_AUDIO_DESCRIPTION: ::std::os::raw::c_uint = 252;
pub const AKEYCODE_TV_AUDIO_DESCRIPTION_MIX_UP: ::std::os::raw::c_uint = 253;
pub const AKEYCODE_TV_AUDIO_DESCRIPTION_MIX_DOWN: ::std::os::raw::c_uint = 254;
pub const AKEYCODE_TV_ZOOM_MODE: ::std::os::raw::c_uint = 255;
pub const AKEYCODE_TV_CONTENTS_MENU: ::std::os::raw::c_uint = 256;
pub const AKEYCODE_TV_MEDIA_CONTEXT_MENU: ::std::os::raw::c_uint = 257;
pub const AKEYCODE_TV_TIMER_PROGRAMMING: ::std::os::raw::c_uint = 258;
pub const AKEYCODE_HELP: ::std::os::raw::c_uint = 259;
pub const AKEYCODE_NAVIGATE_PREVIOUS: ::std::os::raw::c_uint = 260;
pub const AKEYCODE_NAVIGATE_NEXT: ::std::os::raw::c_uint = 261;
pub const AKEYCODE_NAVIGATE_IN: ::std::os::raw::c_uint = 262;
pub const AKEYCODE_NAVIGATE_OUT: ::std::os::raw::c_uint = 263;
pub const AKEYCODE_STEM_PRIMARY: ::std::os::raw::c_uint = 264;
pub const AKEYCODE_STEM_1: ::std::os::raw::c_uint = 265;
pub const AKEYCODE_STEM_2: ::std::os::raw::c_uint = 266;
pub const AKEYCODE_STEM_3: ::std::os::raw::c_uint = 267;
pub const AKEYCODE_DPAD_UP_LEFT: ::std::os::raw::c_uint = 268;
pub const AKEYCODE_DPAD_DOWN_LEFT: ::std::os::raw::c_uint = 269;
pub const AKEYCODE_DPAD_UP_RIGHT: ::std::os::raw::c_uint = 270;
pub const AKEYCODE_DPAD_DOWN_RIGHT: ::std::os::raw::c_uint = 271;
pub const AKEYCODE_MEDIA_SKIP_FORWARD: ::std::os::raw::c_uint = 272;
pub const AKEYCODE_MEDIA_SKIP_BACKWARD: ::std::os::raw::c_uint = 273;
pub const AKEYCODE_MEDIA_STEP_FORWARD: ::std::os::raw::c_uint = 274;
pub const AKEYCODE_MEDIA_STEP_BACKWARD: ::std::os::raw::c_uint = 275;
pub const AKEYCODE_SOFT_SLEEP: ::std::os::raw::c_uint = 276;
pub const AKEYCODE_CUT: ::std::os::raw::c_uint = 277;
pub const AKEYCODE_COPY: ::std::os::raw::c_uint = 278;
pub const AKEYCODE_PASTE: ::std::os::raw::c_uint = 279;
pub const AKEYCODE_SYSTEM_NAVIGATION_UP: ::std::os::raw::c_uint = 280;
pub const AKEYCODE_SYSTEM_NAVIGATION_DOWN: ::std::os::raw::c_uint = 281;
pub const AKEYCODE_SYSTEM_NAVIGATION_LEFT: ::std::os::raw::c_uint = 282;
pub const AKEYCODE_SYSTEM_NAVIGATION_RIGHT: ::std::os::raw::c_uint = 283;
pub const AKEYCODE_ALL_APPS: ::std::os::raw::c_uint = 284;
pub const AKEYCODE_REFRESH: ::std::os::raw::c_uint = 285;
pub const AKEYCODE_THUMBS_UP: ::std::os::raw::c_uint = 286;
pub const AKEYCODE_THUMBS_DOWN: ::std::os::raw::c_uint = 287;
pub const AKEYCODE_PROFILE_SWITCH: ::std::os::raw::c_uint = 288;
pub type _bindgen_ty_11 = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ALooper {
_unused: [u8; 0],
}
extern "C" {
pub fn ALooper_forThread() -> *mut ALooper;
}
pub const ALOOPER_PREPARE_ALLOW_NON_CALLBACKS: ::std::os::raw::c_uint = 1;
pub type _bindgen_ty_12 = ::std::os::raw::c_uint;
extern "C" {
pub fn ALooper_prepare(opts: ::std::os::raw::c_int) -> *mut ALooper;
}
pub const ALOOPER_POLL_WAKE: ::std::os::raw::c_int = -1;
pub const ALOOPER_POLL_CALLBACK: ::std::os::raw::c_int = -2;
pub const ALOOPER_POLL_TIMEOUT: ::std::os::raw::c_int = -3;
pub const ALOOPER_POLL_ERROR: ::std::os::raw::c_int = -4;
pub type _bindgen_ty_13 = ::std::os::raw::c_int;
extern "C" {
pub fn ALooper_acquire(looper: *mut ALooper);
}
extern "C" {
pub fn ALooper_release(looper: *mut ALooper);
}
pub const ALOOPER_EVENT_INPUT: ::std::os::raw::c_uint = 1;
pub const ALOOPER_EVENT_OUTPUT: ::std::os::raw::c_uint = 2;
pub const ALOOPER_EVENT_ERROR: ::std::os::raw::c_uint = 4;
pub const ALOOPER_EVENT_HANGUP: ::std::os::raw::c_uint = 8;
pub const ALOOPER_EVENT_INVALID: ::std::os::raw::c_uint = 16;
pub type _bindgen_ty_14 = ::std::os::raw::c_uint;
pub type ALooper_callbackFunc = ::std::option::Option<
unsafe extern "C" fn(
fd: ::std::os::raw::c_int,
events: ::std::os::raw::c_int,
data: *mut ::std::os::raw::c_void,
) -> ::std::os::raw::c_int,
>;
extern "C" {
pub fn ALooper_pollOnce(
timeoutMillis: ::std::os::raw::c_int,
outFd: *mut ::std::os::raw::c_int,
outEvents: *mut ::std::os::raw::c_int,
outData: *mut *mut ::std::os::raw::c_void,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ALooper_pollAll(
timeoutMillis: ::std::os::raw::c_int,
outFd: *mut ::std::os::raw::c_int,
outEvents: *mut ::std::os::raw::c_int,
outData: *mut *mut ::std::os::raw::c_void,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ALooper_wake(looper: *mut ALooper);
}
extern "C" {
pub fn ALooper_addFd(
looper: *mut ALooper,
fd: ::std::os::raw::c_int,
ident: ::std::os::raw::c_int,
events: ::std::os::raw::c_int,
callback: ALooper_callbackFunc,
data: *mut ::std::os::raw::c_void,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ALooper_removeFd(
looper: *mut ALooper,
fd: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
pub const AKEY_STATE_UNKNOWN: ::std::os::raw::c_int = -1;
pub const AKEY_STATE_UP: ::std::os::raw::c_int = 0;
pub const AKEY_STATE_DOWN: ::std::os::raw::c_int = 1;
pub const AKEY_STATE_VIRTUAL: ::std::os::raw::c_int = 2;
pub type _bindgen_ty_15 = ::std::os::raw::c_int;
pub const AMETA_NONE: ::std::os::raw::c_uint = 0;
pub const AMETA_ALT_ON: ::std::os::raw::c_uint = 2;
pub const AMETA_ALT_LEFT_ON: ::std::os::raw::c_uint = 16;
pub const AMETA_ALT_RIGHT_ON: ::std::os::raw::c_uint = 32;
pub const AMETA_SHIFT_ON: ::std::os::raw::c_uint = 1;
pub const AMETA_SHIFT_LEFT_ON: ::std::os::raw::c_uint = 64;
pub const AMETA_SHIFT_RIGHT_ON: ::std::os::raw::c_uint = 128;
pub const AMETA_SYM_ON: ::std::os::raw::c_uint = 4;
pub const AMETA_FUNCTION_ON: ::std::os::raw::c_uint = 8;
pub const AMETA_CTRL_ON: ::std::os::raw::c_uint = 4096;
pub const AMETA_CTRL_LEFT_ON: ::std::os::raw::c_uint = 8192;
pub const AMETA_CTRL_RIGHT_ON: ::std::os::raw::c_uint = 16384;
pub const AMETA_META_ON: ::std::os::raw::c_uint = 65536;
pub const AMETA_META_LEFT_ON: ::std::os::raw::c_uint = 131072;
pub const AMETA_META_RIGHT_ON: ::std::os::raw::c_uint = 262144;
pub const AMETA_CAPS_LOCK_ON: ::std::os::raw::c_uint = 1048576;
pub const AMETA_NUM_LOCK_ON: ::std::os::raw::c_uint = 2097152;
pub const AMETA_SCROLL_LOCK_ON: ::std::os::raw::c_uint = 4194304;
pub type _bindgen_ty_16 = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AInputEvent {
_unused: [u8; 0],
}
pub const AINPUT_EVENT_TYPE_KEY: ::std::os::raw::c_uint = 1;
pub const AINPUT_EVENT_TYPE_MOTION: ::std::os::raw::c_uint = 2;
pub const AINPUT_EVENT_TYPE_FOCUS: ::std::os::raw::c_uint = 3;
pub const AINPUT_EVENT_TYPE_CAPTURE: ::std::os::raw::c_uint = 4;
pub const AINPUT_EVENT_TYPE_DRAG: ::std::os::raw::c_uint = 5;
pub type _bindgen_ty_17 = ::std::os::raw::c_uint;
pub const AKEY_EVENT_ACTION_DOWN: ::std::os::raw::c_uint = 0;
pub const AKEY_EVENT_ACTION_UP: ::std::os::raw::c_uint = 1;
pub const AKEY_EVENT_ACTION_MULTIPLE: ::std::os::raw::c_uint = 2;
pub type _bindgen_ty_18 = ::std::os::raw::c_uint;
pub const AKEY_EVENT_FLAG_WOKE_HERE: ::std::os::raw::c_uint = 1;
pub const AKEY_EVENT_FLAG_SOFT_KEYBOARD: ::std::os::raw::c_uint = 2;
pub const AKEY_EVENT_FLAG_KEEP_TOUCH_MODE: ::std::os::raw::c_uint = 4;
pub const AKEY_EVENT_FLAG_FROM_SYSTEM: ::std::os::raw::c_uint = 8;
pub const AKEY_EVENT_FLAG_EDITOR_ACTION: ::std::os::raw::c_uint = 16;
pub const AKEY_EVENT_FLAG_CANCELED: ::std::os::raw::c_uint = 32;
pub const AKEY_EVENT_FLAG_VIRTUAL_HARD_KEY: ::std::os::raw::c_uint = 64;
pub const AKEY_EVENT_FLAG_LONG_PRESS: ::std::os::raw::c_uint = 128;
pub const AKEY_EVENT_FLAG_CANCELED_LONG_PRESS: ::std::os::raw::c_uint = 256;
pub const AKEY_EVENT_FLAG_TRACKING: ::std::os::raw::c_uint = 512;
pub const AKEY_EVENT_FLAG_FALLBACK: ::std::os::raw::c_uint = 1024;
pub type _bindgen_ty_19 = ::std::os::raw::c_uint;
pub const AMOTION_EVENT_ACTION_MASK: ::std::os::raw::c_uint = 255;
pub const AMOTION_EVENT_ACTION_POINTER_INDEX_MASK: ::std::os::raw::c_uint = 65280;
pub const AMOTION_EVENT_ACTION_DOWN: ::std::os::raw::c_uint = 0;
pub const AMOTION_EVENT_ACTION_UP: ::std::os::raw::c_uint = 1;
pub const AMOTION_EVENT_ACTION_MOVE: ::std::os::raw::c_uint = 2;
pub const AMOTION_EVENT_ACTION_CANCEL: ::std::os::raw::c_uint = 3;
pub const AMOTION_EVENT_ACTION_OUTSIDE: ::std::os::raw::c_uint = 4;
pub const AMOTION_EVENT_ACTION_POINTER_DOWN: ::std::os::raw::c_uint = 5;
pub const AMOTION_EVENT_ACTION_POINTER_UP: ::std::os::raw::c_uint = 6;
pub const AMOTION_EVENT_ACTION_HOVER_MOVE: ::std::os::raw::c_uint = 7;
pub const AMOTION_EVENT_ACTION_SCROLL: ::std::os::raw::c_uint = 8;
pub const AMOTION_EVENT_ACTION_HOVER_ENTER: ::std::os::raw::c_uint = 9;
pub const AMOTION_EVENT_ACTION_HOVER_EXIT: ::std::os::raw::c_uint = 10;
pub const AMOTION_EVENT_ACTION_BUTTON_PRESS: ::std::os::raw::c_uint = 11;
pub const AMOTION_EVENT_ACTION_BUTTON_RELEASE: ::std::os::raw::c_uint = 12;
pub type _bindgen_ty_20 = ::std::os::raw::c_uint;
pub const AMOTION_EVENT_FLAG_WINDOW_IS_OBSCURED: ::std::os::raw::c_uint = 1;
pub type _bindgen_ty_21 = ::std::os::raw::c_uint;
pub const AMOTION_EVENT_EDGE_FLAG_NONE: ::std::os::raw::c_uint = 0;
pub const AMOTION_EVENT_EDGE_FLAG_TOP: ::std::os::raw::c_uint = 1;
pub const AMOTION_EVENT_EDGE_FLAG_BOTTOM: ::std::os::raw::c_uint = 2;
pub const AMOTION_EVENT_EDGE_FLAG_LEFT: ::std::os::raw::c_uint = 4;
pub const AMOTION_EVENT_EDGE_FLAG_RIGHT: ::std::os::raw::c_uint = 8;
pub type _bindgen_ty_22 = ::std::os::raw::c_uint;
pub const AMOTION_EVENT_AXIS_X: ::std::os::raw::c_uint = 0;
pub const AMOTION_EVENT_AXIS_Y: ::std::os::raw::c_uint = 1;
pub const AMOTION_EVENT_AXIS_PRESSURE: ::std::os::raw::c_uint = 2;
pub const AMOTION_EVENT_AXIS_SIZE: ::std::os::raw::c_uint = 3;
pub const AMOTION_EVENT_AXIS_TOUCH_MAJOR: ::std::os::raw::c_uint = 4;
pub const AMOTION_EVENT_AXIS_TOUCH_MINOR: ::std::os::raw::c_uint = 5;
pub const AMOTION_EVENT_AXIS_TOOL_MAJOR: ::std::os::raw::c_uint = 6;
pub const AMOTION_EVENT_AXIS_TOOL_MINOR: ::std::os::raw::c_uint = 7;
pub const AMOTION_EVENT_AXIS_ORIENTATION: ::std::os::raw::c_uint = 8;
pub const AMOTION_EVENT_AXIS_VSCROLL: ::std::os::raw::c_uint = 9;
pub const AMOTION_EVENT_AXIS_HSCROLL: ::std::os::raw::c_uint = 10;
pub const AMOTION_EVENT_AXIS_Z: ::std::os::raw::c_uint = 11;
pub const AMOTION_EVENT_AXIS_RX: ::std::os::raw::c_uint = 12;
pub const AMOTION_EVENT_AXIS_RY: ::std::os::raw::c_uint = 13;
pub const AMOTION_EVENT_AXIS_RZ: ::std::os::raw::c_uint = 14;
pub const AMOTION_EVENT_AXIS_HAT_X: ::std::os::raw::c_uint = 15;
pub const AMOTION_EVENT_AXIS_HAT_Y: ::std::os::raw::c_uint = 16;
pub const AMOTION_EVENT_AXIS_LTRIGGER: ::std::os::raw::c_uint = 17;
pub const AMOTION_EVENT_AXIS_RTRIGGER: ::std::os::raw::c_uint = 18;
pub const AMOTION_EVENT_AXIS_THROTTLE: ::std::os::raw::c_uint = 19;
pub const AMOTION_EVENT_AXIS_RUDDER: ::std::os::raw::c_uint = 20;
pub const AMOTION_EVENT_AXIS_WHEEL: ::std::os::raw::c_uint = 21;
pub const AMOTION_EVENT_AXIS_GAS: ::std::os::raw::c_uint = 22;
pub const AMOTION_EVENT_AXIS_BRAKE: ::std::os::raw::c_uint = 23;
pub const AMOTION_EVENT_AXIS_DISTANCE: ::std::os::raw::c_uint = 24;
pub const AMOTION_EVENT_AXIS_TILT: ::std::os::raw::c_uint = 25;
pub const AMOTION_EVENT_AXIS_SCROLL: ::std::os::raw::c_uint = 26;
pub const AMOTION_EVENT_AXIS_RELATIVE_X: ::std::os::raw::c_uint = 27;
pub const AMOTION_EVENT_AXIS_RELATIVE_Y: ::std::os::raw::c_uint = 28;
pub const AMOTION_EVENT_AXIS_GENERIC_1: ::std::os::raw::c_uint = 32;
pub const AMOTION_EVENT_AXIS_GENERIC_2: ::std::os::raw::c_uint = 33;
pub const AMOTION_EVENT_AXIS_GENERIC_3: ::std::os::raw::c_uint = 34;
pub const AMOTION_EVENT_AXIS_GENERIC_4: ::std::os::raw::c_uint = 35;
pub const AMOTION_EVENT_AXIS_GENERIC_5: ::std::os::raw::c_uint = 36;
pub const AMOTION_EVENT_AXIS_GENERIC_6: ::std::os::raw::c_uint = 37;
pub const AMOTION_EVENT_AXIS_GENERIC_7: ::std::os::raw::c_uint = 38;
pub const AMOTION_EVENT_AXIS_GENERIC_8: ::std::os::raw::c_uint = 39;
pub const AMOTION_EVENT_AXIS_GENERIC_9: ::std::os::raw::c_uint = 40;
pub const AMOTION_EVENT_AXIS_GENERIC_10: ::std::os::raw::c_uint = 41;
pub const AMOTION_EVENT_AXIS_GENERIC_11: ::std::os::raw::c_uint = 42;
pub const AMOTION_EVENT_AXIS_GENERIC_12: ::std::os::raw::c_uint = 43;
pub const AMOTION_EVENT_AXIS_GENERIC_13: ::std::os::raw::c_uint = 44;
pub const AMOTION_EVENT_AXIS_GENERIC_14: ::std::os::raw::c_uint = 45;
pub const AMOTION_EVENT_AXIS_GENERIC_15: ::std::os::raw::c_uint = 46;
pub const AMOTION_EVENT_AXIS_GENERIC_16: ::std::os::raw::c_uint = 47;
pub type _bindgen_ty_23 = ::std::os::raw::c_uint;
pub const AMOTION_EVENT_BUTTON_PRIMARY: ::std::os::raw::c_uint = 1;
pub const AMOTION_EVENT_BUTTON_SECONDARY: ::std::os::raw::c_uint = 2;
pub const AMOTION_EVENT_BUTTON_TERTIARY: ::std::os::raw::c_uint = 4;
pub const AMOTION_EVENT_BUTTON_BACK: ::std::os::raw::c_uint = 8;
pub const AMOTION_EVENT_BUTTON_FORWARD: ::std::os::raw::c_uint = 16;
pub const AMOTION_EVENT_BUTTON_STYLUS_PRIMARY: ::std::os::raw::c_uint = 32;
pub const AMOTION_EVENT_BUTTON_STYLUS_SECONDARY: ::std::os::raw::c_uint = 64;
pub type _bindgen_ty_24 = ::std::os::raw::c_uint;
pub const AMOTION_EVENT_TOOL_TYPE_UNKNOWN: ::std::os::raw::c_uint = 0;
pub const AMOTION_EVENT_TOOL_TYPE_FINGER: ::std::os::raw::c_uint = 1;
pub const AMOTION_EVENT_TOOL_TYPE_STYLUS: ::std::os::raw::c_uint = 2;
pub const AMOTION_EVENT_TOOL_TYPE_MOUSE: ::std::os::raw::c_uint = 3;
pub const AMOTION_EVENT_TOOL_TYPE_ERASER: ::std::os::raw::c_uint = 4;
pub const AMOTION_EVENT_TOOL_TYPE_PALM: ::std::os::raw::c_uint = 5;
pub type _bindgen_ty_25 = ::std::os::raw::c_uint;
pub const AINPUT_SOURCE_CLASS_MASK: ::std::os::raw::c_uint = 255;
pub const AINPUT_SOURCE_CLASS_NONE: ::std::os::raw::c_uint = 0;
pub const AINPUT_SOURCE_CLASS_BUTTON: ::std::os::raw::c_uint = 1;
pub const AINPUT_SOURCE_CLASS_POINTER: ::std::os::raw::c_uint = 2;
pub const AINPUT_SOURCE_CLASS_NAVIGATION: ::std::os::raw::c_uint = 4;
pub const AINPUT_SOURCE_CLASS_POSITION: ::std::os::raw::c_uint = 8;
pub const AINPUT_SOURCE_CLASS_JOYSTICK: ::std::os::raw::c_uint = 16;
pub type _bindgen_ty_26 = ::std::os::raw::c_uint;
pub const AINPUT_SOURCE_UNKNOWN: ::std::os::raw::c_uint = 0;
pub const AINPUT_SOURCE_KEYBOARD: ::std::os::raw::c_uint = 257;
pub const AINPUT_SOURCE_DPAD: ::std::os::raw::c_uint = 513;
pub const AINPUT_SOURCE_GAMEPAD: ::std::os::raw::c_uint = 1025;
pub const AINPUT_SOURCE_TOUCHSCREEN: ::std::os::raw::c_uint = 4098;
pub const AINPUT_SOURCE_MOUSE: ::std::os::raw::c_uint = 8194;
pub const AINPUT_SOURCE_STYLUS: ::std::os::raw::c_uint = 16386;
pub const AINPUT_SOURCE_BLUETOOTH_STYLUS: ::std::os::raw::c_uint = 49154;
pub const AINPUT_SOURCE_TRACKBALL: ::std::os::raw::c_uint = 65540;
pub const AINPUT_SOURCE_MOUSE_RELATIVE: ::std::os::raw::c_uint = 131076;
pub const AINPUT_SOURCE_TOUCHPAD: ::std::os::raw::c_uint = 1048584;
pub const AINPUT_SOURCE_TOUCH_NAVIGATION: ::std::os::raw::c_uint = 2097152;
pub const AINPUT_SOURCE_JOYSTICK: ::std::os::raw::c_uint = 16777232;
pub const AINPUT_SOURCE_HDMI: ::std::os::raw::c_uint = 33554433;
pub const AINPUT_SOURCE_SENSOR: ::std::os::raw::c_uint = 67108864;
pub const AINPUT_SOURCE_ROTARY_ENCODER: ::std::os::raw::c_uint = 4194304;
pub const AINPUT_SOURCE_ANY: ::std::os::raw::c_uint = 4294967040;
pub type _bindgen_ty_27 = ::std::os::raw::c_uint;
pub const AINPUT_KEYBOARD_TYPE_NONE: ::std::os::raw::c_uint = 0;
pub const AINPUT_KEYBOARD_TYPE_NON_ALPHABETIC: ::std::os::raw::c_uint = 1;
pub const AINPUT_KEYBOARD_TYPE_ALPHABETIC: ::std::os::raw::c_uint = 2;
pub type _bindgen_ty_28 = ::std::os::raw::c_uint;
pub const AINPUT_MOTION_RANGE_X: ::std::os::raw::c_uint = 0;
pub const AINPUT_MOTION_RANGE_Y: ::std::os::raw::c_uint = 1;
pub const AINPUT_MOTION_RANGE_PRESSURE: ::std::os::raw::c_uint = 2;
pub const AINPUT_MOTION_RANGE_SIZE: ::std::os::raw::c_uint = 3;
pub const AINPUT_MOTION_RANGE_TOUCH_MAJOR: ::std::os::raw::c_uint = 4;
pub const AINPUT_MOTION_RANGE_TOUCH_MINOR: ::std::os::raw::c_uint = 5;
pub const AINPUT_MOTION_RANGE_TOOL_MAJOR: ::std::os::raw::c_uint = 6;
pub const AINPUT_MOTION_RANGE_TOOL_MINOR: ::std::os::raw::c_uint = 7;
pub const AINPUT_MOTION_RANGE_ORIENTATION: ::std::os::raw::c_uint = 8;
pub type _bindgen_ty_29 = ::std::os::raw::c_uint;
extern "C" {
pub fn AInputEvent_getType(event: *const AInputEvent) -> i32;
}
extern "C" {
pub fn AInputEvent_getDeviceId(event: *const AInputEvent) -> i32;
}
extern "C" {
pub fn AInputEvent_getSource(event: *const AInputEvent) -> i32;
}
extern "C" {
pub fn AInputEvent_release(event: *const AInputEvent);
}
extern "C" {
pub fn AKeyEvent_getAction(key_event: *const AInputEvent) -> i32;
}
extern "C" {
pub fn AKeyEvent_getFlags(key_event: *const AInputEvent) -> i32;
}
extern "C" {
pub fn AKeyEvent_getKeyCode(key_event: *const AInputEvent) -> i32;
}
extern "C" {
pub fn AKeyEvent_getScanCode(key_event: *const AInputEvent) -> i32;
}
extern "C" {
pub fn AKeyEvent_getMetaState(key_event: *const AInputEvent) -> i32;
}
extern "C" {
pub fn AKeyEvent_getRepeatCount(key_event: *const AInputEvent) -> i32;
}
extern "C" {
pub fn AKeyEvent_getDownTime(key_event: *const AInputEvent) -> i64;
}
extern "C" {
pub fn AKeyEvent_getEventTime(key_event: *const AInputEvent) -> i64;
}
extern "C" {
pub fn AKeyEvent_fromJava(env: *mut JNIEnv, keyEvent: jobject) -> *const AInputEvent;
}
extern "C" {
pub fn AMotionEvent_getAction(motion_event: *const AInputEvent) -> i32;
}
extern "C" {
pub fn AMotionEvent_getFlags(motion_event: *const AInputEvent) -> i32;
}
extern "C" {
pub fn AMotionEvent_getMetaState(motion_event: *const AInputEvent) -> i32;
}
extern "C" {
pub fn AMotionEvent_getButtonState(motion_event: *const AInputEvent) -> i32;
}
extern "C" {
pub fn AMotionEvent_getEdgeFlags(motion_event: *const AInputEvent) -> i32;
}
extern "C" {
pub fn AMotionEvent_getDownTime(motion_event: *const AInputEvent) -> i64;
}
extern "C" {
pub fn AMotionEvent_getEventTime(motion_event: *const AInputEvent) -> i64;
}
extern "C" {
pub fn AMotionEvent_getXOffset(motion_event: *const AInputEvent) -> f32;
}
extern "C" {
pub fn AMotionEvent_getYOffset(motion_event: *const AInputEvent) -> f32;
}
extern "C" {
pub fn AMotionEvent_getXPrecision(motion_event: *const AInputEvent) -> f32;
}
extern "C" {
pub fn AMotionEvent_getYPrecision(motion_event: *const AInputEvent) -> f32;
}
extern "C" {
pub fn AMotionEvent_getPointerCount(motion_event: *const AInputEvent) -> size_t;
}
extern "C" {
pub fn AMotionEvent_getPointerId(
motion_event: *const AInputEvent,
pointer_index: size_t,
) -> i32;
}
extern "C" {
pub fn AMotionEvent_getToolType(motion_event: *const AInputEvent, pointer_index: size_t)
-> i32;
}
extern "C" {
pub fn AMotionEvent_getRawX(motion_event: *const AInputEvent, pointer_index: size_t) -> f32;
}
extern "C" {
pub fn AMotionEvent_getRawY(motion_event: *const AInputEvent, pointer_index: size_t) -> f32;
}
extern "C" {
pub fn AMotionEvent_getX(motion_event: *const AInputEvent, pointer_index: size_t) -> f32;
}
extern "C" {
pub fn AMotionEvent_getY(motion_event: *const AInputEvent, pointer_index: size_t) -> f32;
}
extern "C" {
pub fn AMotionEvent_getPressure(motion_event: *const AInputEvent, pointer_index: size_t)
-> f32;
}
extern "C" {
pub fn AMotionEvent_getSize(motion_event: *const AInputEvent, pointer_index: size_t) -> f32;
}
extern "C" {
pub fn AMotionEvent_getTouchMajor(
motion_event: *const AInputEvent,
pointer_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getTouchMinor(
motion_event: *const AInputEvent,
pointer_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getToolMajor(
motion_event: *const AInputEvent,
pointer_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getToolMinor(
motion_event: *const AInputEvent,
pointer_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getOrientation(
motion_event: *const AInputEvent,
pointer_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getAxisValue(
motion_event: *const AInputEvent,
axis: i32,
pointer_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getHistorySize(motion_event: *const AInputEvent) -> size_t;
}
extern "C" {
pub fn AMotionEvent_getHistoricalEventTime(
motion_event: *const AInputEvent,
history_index: size_t,
) -> i64;
}
extern "C" {
pub fn AMotionEvent_getHistoricalRawX(
motion_event: *const AInputEvent,
pointer_index: size_t,
history_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getHistoricalRawY(
motion_event: *const AInputEvent,
pointer_index: size_t,
history_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getHistoricalX(
motion_event: *const AInputEvent,
pointer_index: size_t,
history_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getHistoricalY(
motion_event: *const AInputEvent,
pointer_index: size_t,
history_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getHistoricalPressure(
motion_event: *const AInputEvent,
pointer_index: size_t,
history_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getHistoricalSize(
motion_event: *const AInputEvent,
pointer_index: size_t,
history_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getHistoricalTouchMajor(
motion_event: *const AInputEvent,
pointer_index: size_t,
history_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getHistoricalTouchMinor(
motion_event: *const AInputEvent,
pointer_index: size_t,
history_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getHistoricalToolMajor(
motion_event: *const AInputEvent,
pointer_index: size_t,
history_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getHistoricalToolMinor(
motion_event: *const AInputEvent,
pointer_index: size_t,
history_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getHistoricalOrientation(
motion_event: *const AInputEvent,
pointer_index: size_t,
history_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_getHistoricalAxisValue(
motion_event: *const AInputEvent,
axis: i32,
pointer_index: size_t,
history_index: size_t,
) -> f32;
}
extern "C" {
pub fn AMotionEvent_fromJava(env: *mut JNIEnv, motionEvent: jobject) -> *const AInputEvent;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AInputQueue {
_unused: [u8; 0],
}
extern "C" {
pub fn AInputQueue_attachLooper(
queue: *mut AInputQueue,
looper: *mut ALooper,
ident: ::std::os::raw::c_int,
callback: ALooper_callbackFunc,
data: *mut ::std::os::raw::c_void,
);
}
extern "C" {
pub fn AInputQueue_detachLooper(queue: *mut AInputQueue);
}
extern "C" {
pub fn AInputQueue_hasEvents(queue: *mut AInputQueue) -> i32;
}
extern "C" {
pub fn AInputQueue_getEvent(queue: *mut AInputQueue, outEvent: *mut *mut AInputEvent) -> i32;
}
extern "C" {
pub fn AInputQueue_preDispatchEvent(queue: *mut AInputQueue, event: *mut AInputEvent) -> i32;
}
extern "C" {
pub fn AInputQueue_finishEvent(
queue: *mut AInputQueue,
event: *mut AInputEvent,
handled: ::std::os::raw::c_int,
);
}
pub const android_LogPriority_ANDROID_LOG_UNKNOWN: android_LogPriority = 0;
pub const android_LogPriority_ANDROID_LOG_DEFAULT: android_LogPriority = 1;
pub const android_LogPriority_ANDROID_LOG_VERBOSE: android_LogPriority = 2;
pub const android_LogPriority_ANDROID_LOG_DEBUG: android_LogPriority = 3;
pub const android_LogPriority_ANDROID_LOG_INFO: android_LogPriority = 4;
pub const android_LogPriority_ANDROID_LOG_WARN: android_LogPriority = 5;
pub const android_LogPriority_ANDROID_LOG_ERROR: android_LogPriority = 6;
pub const android_LogPriority_ANDROID_LOG_FATAL: android_LogPriority = 7;
pub const android_LogPriority_ANDROID_LOG_SILENT: android_LogPriority = 8;
pub type android_LogPriority = ::std::os::raw::c_uint;
extern "C" {
pub fn __android_log_write(
prio: ::std::os::raw::c_int,
tag: *const ::std::os::raw::c_char,
text: *const ::std::os::raw::c_char,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn __android_log_print(
prio: ::std::os::raw::c_int,
tag: *const ::std::os::raw::c_char,
fmt: *const ::std::os::raw::c_char,
...
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn __android_log_vprint(
prio: ::std::os::raw::c_int,
tag: *const ::std::os::raw::c_char,
fmt: *const ::std::os::raw::c_char,
ap: va_list,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn __android_log_assert(
cond: *const ::std::os::raw::c_char,
tag: *const ::std::os::raw::c_char,
fmt: *const ::std::os::raw::c_char,
...
);
}
pub const log_id_LOG_ID_MIN: log_id = 0;
pub const log_id_LOG_ID_MAIN: log_id = 0;
pub const log_id_LOG_ID_RADIO: log_id = 1;
pub const log_id_LOG_ID_EVENTS: log_id = 2;
pub const log_id_LOG_ID_SYSTEM: log_id = 3;
pub const log_id_LOG_ID_CRASH: log_id = 4;
pub const log_id_LOG_ID_STATS: log_id = 5;
pub const log_id_LOG_ID_SECURITY: log_id = 6;
pub const log_id_LOG_ID_KERNEL: log_id = 7;
pub const log_id_LOG_ID_MAX: log_id = 8;
pub const log_id_LOG_ID_DEFAULT: log_id = 2147483647;
pub type log_id = ::std::os::raw::c_uint;
pub use self::log_id as log_id_t;
extern "C" {
pub fn __android_log_buf_write(
bufID: ::std::os::raw::c_int,
prio: ::std::os::raw::c_int,
tag: *const ::std::os::raw::c_char,
text: *const ::std::os::raw::c_char,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn __android_log_buf_print(
bufID: ::std::os::raw::c_int,
prio: ::std::os::raw::c_int,
tag: *const ::std::os::raw::c_char,
fmt: *const ::std::os::raw::c_char,
...
) -> ::std::os::raw::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __android_log_message {
pub struct_size: size_t,
pub buffer_id: i32,
pub priority: i32,
pub tag: *const ::std::os::raw::c_char,
pub file: *const ::std::os::raw::c_char,
pub line: u32,
pub message: *const ::std::os::raw::c_char,
}
#[test]
fn bindgen_test_layout___android_log_message() {
assert_eq!(
::std::mem::size_of::<__android_log_message>(),
28usize,
concat!("Size of: ", stringify!(__android_log_message))
);
assert_eq!(
::std::mem::align_of::<__android_log_message>(),
4usize,
concat!("Alignment of ", stringify!(__android_log_message))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__android_log_message>())).struct_size as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(__android_log_message),
"::",
stringify!(struct_size)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__android_log_message>())).buffer_id as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(__android_log_message),
"::",
stringify!(buffer_id)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__android_log_message>())).priority as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(__android_log_message),
"::",
stringify!(priority)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__android_log_message>())).tag as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(__android_log_message),
"::",
stringify!(tag)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__android_log_message>())).file as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(__android_log_message),
"::",
stringify!(file)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__android_log_message>())).line as *const _ as usize },
20usize,
concat!(
"Offset of field: ",
stringify!(__android_log_message),
"::",
stringify!(line)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__android_log_message>())).message as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(__android_log_message),
"::",
stringify!(message)
)
);
}
pub type __android_logger_function =
::std::option::Option<unsafe extern "C" fn(log_message: *const __android_log_message)>;
pub type __android_aborter_function =
::std::option::Option<unsafe extern "C" fn(abort_message: *const ::std::os::raw::c_char)>;
extern "C" {
pub fn __android_log_write_log_message(log_message: *mut __android_log_message);
}
extern "C" {
pub fn __android_log_set_logger(logger: __android_logger_function);
}
extern "C" {
pub fn __android_log_logd_logger(log_message: *const __android_log_message);
}
extern "C" {
pub fn __android_log_stderr_logger(log_message: *const __android_log_message);
}
extern "C" {
pub fn __android_log_set_aborter(aborter: __android_aborter_function);
}
extern "C" {
pub fn __android_log_call_aborter(abort_message: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn __android_log_default_aborter(abort_message: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn __android_log_is_loggable(
prio: ::std::os::raw::c_int,
tag: *const ::std::os::raw::c_char,
default_prio: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn __android_log_is_loggable_len(
prio: ::std::os::raw::c_int,
tag: *const ::std::os::raw::c_char,
len: size_t,
default_prio: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn __android_log_set_minimum_priority(priority: i32) -> i32;
}
extern "C" {
pub fn __android_log_get_minimum_priority() -> i32;
}
extern "C" {
pub fn __android_log_set_default_tag(tag: *const ::std::os::raw::c_char);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct flock {
pub l_type: ::std::os::raw::c_short,
pub l_whence: ::std::os::raw::c_short,
pub l_start: off_t,
pub l_len: off_t,
pub l_pid: pid_t,
}
#[test]
fn bindgen_test_layout_flock() {
assert_eq!(
::std::mem::size_of::<flock>(),
16usize,
concat!("Size of: ", stringify!(flock))
);
assert_eq!(
::std::mem::align_of::<flock>(),
4usize,
concat!("Alignment of ", stringify!(flock))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<flock>())).l_type as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(flock),
"::",
stringify!(l_type)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<flock>())).l_whence as *const _ as usize },
2usize,
concat!(
"Offset of field: ",
stringify!(flock),
"::",
stringify!(l_whence)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<flock>())).l_start as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(flock),
"::",
stringify!(l_start)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<flock>())).l_len as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(flock),
"::",
stringify!(l_len)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<flock>())).l_pid as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(flock),
"::",
stringify!(l_pid)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct flock64 {
pub l_type: ::std::os::raw::c_short,
pub l_whence: ::std::os::raw::c_short,
pub l_start: off64_t,
pub l_len: off64_t,
pub l_pid: pid_t,
}
#[test]
fn bindgen_test_layout_flock64() {
assert_eq!(
::std::mem::size_of::<flock64>(),
32usize,
concat!("Size of: ", stringify!(flock64))
);
assert_eq!(
::std::mem::align_of::<flock64>(),
8usize,
concat!("Alignment of ", stringify!(flock64))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<flock64>())).l_type as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(flock64),
"::",
stringify!(l_type)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<flock64>())).l_whence as *const _ as usize },
2usize,
concat!(
"Offset of field: ",
stringify!(flock64),
"::",
stringify!(l_whence)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<flock64>())).l_start as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(flock64),
"::",
stringify!(l_start)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<flock64>())).l_len as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(flock64),
"::",
stringify!(l_len)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<flock64>())).l_pid as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(flock64),
"::",
stringify!(l_pid)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct f_owner_ex {
pub type_: ::std::os::raw::c_int,
pub pid: __kernel_pid_t,
}
#[test]
fn bindgen_test_layout_f_owner_ex() {
assert_eq!(
::std::mem::size_of::<f_owner_ex>(),
8usize,
concat!("Size of: ", stringify!(f_owner_ex))
);
assert_eq!(
::std::mem::align_of::<f_owner_ex>(),
4usize,
concat!("Alignment of ", stringify!(f_owner_ex))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<f_owner_ex>())).type_ as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(f_owner_ex),
"::",
stringify!(type_)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<f_owner_ex>())).pid as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(f_owner_ex),
"::",
stringify!(pid)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct iovec {
pub iov_base: *mut ::std::os::raw::c_void,
pub iov_len: __kernel_size_t,
}
#[test]
fn bindgen_test_layout_iovec() {
assert_eq!(
::std::mem::size_of::<iovec>(),
8usize,
concat!("Size of: ", stringify!(iovec))
);
assert_eq!(
::std::mem::align_of::<iovec>(),
4usize,
concat!("Alignment of ", stringify!(iovec))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<iovec>())).iov_base as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(iovec),
"::",
stringify!(iov_base)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<iovec>())).iov_len as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(iovec),
"::",
stringify!(iov_len)
)
);
}
pub type sa_family_t = ::std::os::raw::c_ushort;
pub const SHUT_RD: ::std::os::raw::c_uint = 0;
pub const SHUT_WR: ::std::os::raw::c_uint = 1;
pub const SHUT_RDWR: ::std::os::raw::c_uint = 2;
pub type _bindgen_ty_30 = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct sockaddr {
pub sa_family: sa_family_t,
pub sa_data: [::std::os::raw::c_char; 14usize],
}
#[test]
fn bindgen_test_layout_sockaddr() {
assert_eq!(
::std::mem::size_of::<sockaddr>(),
16usize,
concat!("Size of: ", stringify!(sockaddr))
);
assert_eq!(
::std::mem::align_of::<sockaddr>(),
2usize,
concat!("Alignment of ", stringify!(sockaddr))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sockaddr>())).sa_family as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(sockaddr),
"::",
stringify!(sa_family)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sockaddr>())).sa_data as *const _ as usize },
2usize,
concat!(
"Offset of field: ",
stringify!(sockaddr),
"::",
stringify!(sa_data)
)
);
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct sockaddr_storage {
pub __bindgen_anon_1: sockaddr_storage__bindgen_ty_1,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union sockaddr_storage__bindgen_ty_1 {
pub __bindgen_anon_1: sockaddr_storage__bindgen_ty_1__bindgen_ty_1,
pub __align: *mut ::std::os::raw::c_void,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct sockaddr_storage__bindgen_ty_1__bindgen_ty_1 {
pub ss_family: sa_family_t,
pub __data: [::std::os::raw::c_char; 126usize],
}
#[test]
fn bindgen_test_layout_sockaddr_storage__bindgen_ty_1__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<sockaddr_storage__bindgen_ty_1__bindgen_ty_1>(),
128usize,
concat!(
"Size of: ",
stringify!(sockaddr_storage__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
::std::mem::align_of::<sockaddr_storage__bindgen_ty_1__bindgen_ty_1>(),
2usize,
concat!(
"Alignment of ",
stringify!(sockaddr_storage__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<sockaddr_storage__bindgen_ty_1__bindgen_ty_1>())).ss_family
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(sockaddr_storage__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(ss_family)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<sockaddr_storage__bindgen_ty_1__bindgen_ty_1>())).__data
as *const _ as usize
},
2usize,
concat!(
"Offset of field: ",
stringify!(sockaddr_storage__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(__data)
)
);
}
#[test]
fn bindgen_test_layout_sockaddr_storage__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<sockaddr_storage__bindgen_ty_1>(),
128usize,
concat!("Size of: ", stringify!(sockaddr_storage__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<sockaddr_storage__bindgen_ty_1>(),
4usize,
concat!("Alignment of ", stringify!(sockaddr_storage__bindgen_ty_1))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<sockaddr_storage__bindgen_ty_1>())).__align as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(sockaddr_storage__bindgen_ty_1),
"::",
stringify!(__align)
)
);
}
#[test]
fn bindgen_test_layout_sockaddr_storage() {
assert_eq!(
::std::mem::size_of::<sockaddr_storage>(),
128usize,
concat!("Size of: ", stringify!(sockaddr_storage))
);
assert_eq!(
::std::mem::align_of::<sockaddr_storage>(),
4usize,
concat!("Alignment of ", stringify!(sockaddr_storage))
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct linger {
pub l_onoff: ::std::os::raw::c_int,
pub l_linger: ::std::os::raw::c_int,
}
#[test]
fn bindgen_test_layout_linger() {
assert_eq!(
::std::mem::size_of::<linger>(),
8usize,
concat!("Size of: ", stringify!(linger))
);
assert_eq!(
::std::mem::align_of::<linger>(),
4usize,
concat!("Alignment of ", stringify!(linger))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<linger>())).l_onoff as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(linger),
"::",
stringify!(l_onoff)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<linger>())).l_linger as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(linger),
"::",
stringify!(l_linger)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct msghdr {
pub msg_name: *mut ::std::os::raw::c_void,
pub msg_namelen: socklen_t,
pub msg_iov: *mut iovec,
pub msg_iovlen: size_t,
pub msg_control: *mut ::std::os::raw::c_void,
pub msg_controllen: size_t,
pub msg_flags: ::std::os::raw::c_int,
}
#[test]
fn bindgen_test_layout_msghdr() {
assert_eq!(
::std::mem::size_of::<msghdr>(),
28usize,
concat!("Size of: ", stringify!(msghdr))
);
assert_eq!(
::std::mem::align_of::<msghdr>(),
4usize,
concat!("Alignment of ", stringify!(msghdr))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<msghdr>())).msg_name as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(msghdr),
"::",
stringify!(msg_name)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<msghdr>())).msg_namelen as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(msghdr),
"::",
stringify!(msg_namelen)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<msghdr>())).msg_iov as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(msghdr),
"::",
stringify!(msg_iov)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<msghdr>())).msg_iovlen as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(msghdr),
"::",
stringify!(msg_iovlen)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<msghdr>())).msg_control as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(msghdr),
"::",
stringify!(msg_control)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<msghdr>())).msg_controllen as *const _ as usize },
20usize,
concat!(
"Offset of field: ",
stringify!(msghdr),
"::",
stringify!(msg_controllen)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<msghdr>())).msg_flags as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(msghdr),
"::",
stringify!(msg_flags)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct mmsghdr {
pub msg_hdr: msghdr,
pub msg_len: ::std::os::raw::c_uint,
}
#[test]
fn bindgen_test_layout_mmsghdr() {
assert_eq!(
::std::mem::size_of::<mmsghdr>(),
32usize,
concat!("Size of: ", stringify!(mmsghdr))
);
assert_eq!(
::std::mem::align_of::<mmsghdr>(),
4usize,
concat!("Alignment of ", stringify!(mmsghdr))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<mmsghdr>())).msg_hdr as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(mmsghdr),
"::",
stringify!(msg_hdr)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<mmsghdr>())).msg_len as *const _ as usize },
28usize,
concat!(
"Offset of field: ",
stringify!(mmsghdr),
"::",
stringify!(msg_len)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct cmsghdr {
pub cmsg_len: size_t,
pub cmsg_level: ::std::os::raw::c_int,
pub cmsg_type: ::std::os::raw::c_int,
}
#[test]
fn bindgen_test_layout_cmsghdr() {
assert_eq!(
::std::mem::size_of::<cmsghdr>(),
12usize,
concat!("Size of: ", stringify!(cmsghdr))
);
assert_eq!(
::std::mem::align_of::<cmsghdr>(),
4usize,
concat!("Alignment of ", stringify!(cmsghdr))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<cmsghdr>())).cmsg_len as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(cmsghdr),
"::",
stringify!(cmsg_len)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<cmsghdr>())).cmsg_level as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(cmsghdr),
"::",
stringify!(cmsg_level)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<cmsghdr>())).cmsg_type as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(cmsghdr),
"::",
stringify!(cmsg_type)
)
);
}
extern "C" {
pub fn __cmsg_nxthdr(__msg: *mut msghdr, __cmsg: *mut cmsghdr) -> *mut cmsghdr;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ucred {
pub pid: pid_t,
pub uid: uid_t,
pub gid: gid_t,
}
#[test]
fn bindgen_test_layout_ucred() {
assert_eq!(
::std::mem::size_of::<ucred>(),
12usize,
concat!("Size of: ", stringify!(ucred))
);
assert_eq!(
::std::mem::align_of::<ucred>(),
4usize,
concat!("Alignment of ", stringify!(ucred))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ucred>())).pid as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(ucred),
"::",
stringify!(pid)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ucred>())).uid as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(ucred),
"::",
stringify!(uid)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ucred>())).gid as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(ucred),
"::",
stringify!(gid)
)
);
}
extern "C" {
pub fn accept(
__fd: ::std::os::raw::c_int,
__addr: *mut sockaddr,
__addr_length: *mut socklen_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn accept4(
__fd: ::std::os::raw::c_int,
__addr: *mut sockaddr,
__addr_length: *mut socklen_t,
__flags: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn bind(
__fd: ::std::os::raw::c_int,
__addr: *const sockaddr,
__addr_length: socklen_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn connect(
__fd: ::std::os::raw::c_int,
__addr: *const sockaddr,
__addr_length: socklen_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn getpeername(
__fd: ::std::os::raw::c_int,
__addr: *mut sockaddr,
__addr_length: *mut socklen_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn getsockname(
__fd: ::std::os::raw::c_int,
__addr: *mut sockaddr,
__addr_length: *mut socklen_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn getsockopt(
__fd: ::std::os::raw::c_int,
__level: ::std::os::raw::c_int,
__option: ::std::os::raw::c_int,
__value: *mut ::std::os::raw::c_void,
__value_length: *mut socklen_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn listen(
__fd: ::std::os::raw::c_int,
__backlog: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn recvmmsg(
__fd: ::std::os::raw::c_int,
__msgs: *mut mmsghdr,
__msg_count: ::std::os::raw::c_uint,
__flags: ::std::os::raw::c_int,
__timeout: *const timespec,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn recvmsg(
__fd: ::std::os::raw::c_int,
__msg: *mut msghdr,
__flags: ::std::os::raw::c_int,
) -> ssize_t;
}
extern "C" {
pub fn sendmmsg(
__fd: ::std::os::raw::c_int,
__msgs: *const mmsghdr,
__msg_count: ::std::os::raw::c_uint,
__flags: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sendmsg(
__fd: ::std::os::raw::c_int,
__msg: *const msghdr,
__flags: ::std::os::raw::c_int,
) -> ssize_t;
}
extern "C" {
pub fn setsockopt(
__fd: ::std::os::raw::c_int,
__level: ::std::os::raw::c_int,
__option: ::std::os::raw::c_int,
__value: *const ::std::os::raw::c_void,
__value_length: socklen_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn shutdown(
__fd: ::std::os::raw::c_int,
__how: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn socket(
__af: ::std::os::raw::c_int,
__type: ::std::os::raw::c_int,
__protocol: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn socketpair(
__af: ::std::os::raw::c_int,
__type: ::std::os::raw::c_int,
__protocol: ::std::os::raw::c_int,
__fds: *mut ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn recv(
__fd: ::std::os::raw::c_int,
__buf: *mut ::std::os::raw::c_void,
__n: size_t,
__flags: ::std::os::raw::c_int,
) -> ssize_t;
}
extern "C" {
pub fn send(
__fd: ::std::os::raw::c_int,
__buf: *const ::std::os::raw::c_void,
__n: size_t,
__flags: ::std::os::raw::c_int,
) -> ssize_t;
}
extern "C" {
pub fn sendto(
__fd: ::std::os::raw::c_int,
__buf: *const ::std::os::raw::c_void,
__n: size_t,
__flags: ::std::os::raw::c_int,
__dst_addr: *const sockaddr,
__dst_addr_length: socklen_t,
) -> ssize_t;
}
extern "C" {
pub fn recvfrom(
__fd: ::std::os::raw::c_int,
__buf: *mut ::std::os::raw::c_void,
__n: size_t,
__flags: ::std::os::raw::c_int,
__src_addr: *mut sockaddr,
__src_addr_length: *mut socklen_t,
) -> ssize_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct hostent {
pub h_name: *mut ::std::os::raw::c_char,
pub h_aliases: *mut *mut ::std::os::raw::c_char,
pub h_addrtype: ::std::os::raw::c_int,
pub h_length: ::std::os::raw::c_int,
pub h_addr_list: *mut *mut ::std::os::raw::c_char,
}
#[test]
fn bindgen_test_layout_hostent() {
assert_eq!(
::std::mem::size_of::<hostent>(),
20usize,
concat!("Size of: ", stringify!(hostent))
);
assert_eq!(
::std::mem::align_of::<hostent>(),
4usize,
concat!("Alignment of ", stringify!(hostent))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<hostent>())).h_name as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(hostent),
"::",
stringify!(h_name)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<hostent>())).h_aliases as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(hostent),
"::",
stringify!(h_aliases)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<hostent>())).h_addrtype as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(hostent),
"::",
stringify!(h_addrtype)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<hostent>())).h_length as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(hostent),
"::",
stringify!(h_length)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<hostent>())).h_addr_list as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(hostent),
"::",
stringify!(h_addr_list)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct netent {
pub n_name: *mut ::std::os::raw::c_char,
pub n_aliases: *mut *mut ::std::os::raw::c_char,
pub n_addrtype: ::std::os::raw::c_int,
pub n_net: u32,
}
#[test]
fn bindgen_test_layout_netent() {
assert_eq!(
::std::mem::size_of::<netent>(),
16usize,
concat!("Size of: ", stringify!(netent))
);
assert_eq!(
::std::mem::align_of::<netent>(),
4usize,
concat!("Alignment of ", stringify!(netent))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<netent>())).n_name as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(netent),
"::",
stringify!(n_name)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<netent>())).n_aliases as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(netent),
"::",
stringify!(n_aliases)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<netent>())).n_addrtype as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(netent),
"::",
stringify!(n_addrtype)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<netent>())).n_net as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(netent),
"::",
stringify!(n_net)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct servent {
pub s_name: *mut ::std::os::raw::c_char,
pub s_aliases: *mut *mut ::std::os::raw::c_char,
pub s_port: ::std::os::raw::c_int,
pub s_proto: *mut ::std::os::raw::c_char,
}
#[test]
fn bindgen_test_layout_servent() {
assert_eq!(
::std::mem::size_of::<servent>(),
16usize,
concat!("Size of: ", stringify!(servent))
);
assert_eq!(
::std::mem::align_of::<servent>(),
4usize,
concat!("Alignment of ", stringify!(servent))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<servent>())).s_name as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(servent),
"::",
stringify!(s_name)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<servent>())).s_aliases as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(servent),
"::",
stringify!(s_aliases)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<servent>())).s_port as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(servent),
"::",
stringify!(s_port)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<servent>())).s_proto as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(servent),
"::",
stringify!(s_proto)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct protoent {
pub p_name: *mut ::std::os::raw::c_char,
pub p_aliases: *mut *mut ::std::os::raw::c_char,
pub p_proto: ::std::os::raw::c_int,
}
#[test]
fn bindgen_test_layout_protoent() {
assert_eq!(
::std::mem::size_of::<protoent>(),
12usize,
concat!("Size of: ", stringify!(protoent))
);
assert_eq!(
::std::mem::align_of::<protoent>(),
4usize,
concat!("Alignment of ", stringify!(protoent))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<protoent>())).p_name as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(protoent),
"::",
stringify!(p_name)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<protoent>())).p_aliases as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(protoent),
"::",
stringify!(p_aliases)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<protoent>())).p_proto as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(protoent),
"::",
stringify!(p_proto)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct addrinfo {
pub ai_flags: ::std::os::raw::c_int,
pub ai_family: ::std::os::raw::c_int,
pub ai_socktype: ::std::os::raw::c_int,
pub ai_protocol: ::std::os::raw::c_int,
pub ai_addrlen: socklen_t,
pub ai_canonname: *mut ::std::os::raw::c_char,
pub ai_addr: *mut sockaddr,
pub ai_next: *mut addrinfo,
}
#[test]
fn bindgen_test_layout_addrinfo() {
assert_eq!(
::std::mem::size_of::<addrinfo>(),
32usize,
concat!("Size of: ", stringify!(addrinfo))
);
assert_eq!(
::std::mem::align_of::<addrinfo>(),
4usize,
concat!("Alignment of ", stringify!(addrinfo))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<addrinfo>())).ai_flags as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(addrinfo),
"::",
stringify!(ai_flags)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<addrinfo>())).ai_family as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(addrinfo),
"::",
stringify!(ai_family)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<addrinfo>())).ai_socktype as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(addrinfo),
"::",
stringify!(ai_socktype)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<addrinfo>())).ai_protocol as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(addrinfo),
"::",
stringify!(ai_protocol)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<addrinfo>())).ai_addrlen as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(addrinfo),
"::",
stringify!(ai_addrlen)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<addrinfo>())).ai_canonname as *const _ as usize },
20usize,
concat!(
"Offset of field: ",
stringify!(addrinfo),
"::",
stringify!(ai_canonname)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<addrinfo>())).ai_addr as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(addrinfo),
"::",
stringify!(ai_addr)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<addrinfo>())).ai_next as *const _ as usize },
28usize,
concat!(
"Offset of field: ",
stringify!(addrinfo),
"::",
stringify!(ai_next)
)
);
}
extern "C" {
pub fn getaddrinfo(
__node: *const ::std::os::raw::c_char,
__service: *const ::std::os::raw::c_char,
__hints: *const addrinfo,
__result: *mut *mut addrinfo,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn freeaddrinfo(__ptr: *mut addrinfo);
}
extern "C" {
pub fn getnameinfo(
__sa: *const sockaddr,
__sa_length: socklen_t,
__host: *mut ::std::os::raw::c_char,
__host_length: size_t,
__service: *mut ::std::os::raw::c_char,
__service_length: size_t,
__flags: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn gai_strerror(__error: ::std::os::raw::c_int) -> *const ::std::os::raw::c_char;
}
extern "C" {
pub fn __get_h_errno() -> *mut ::std::os::raw::c_int;
}
extern "C" {
pub fn herror(__s: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn hstrerror(__error: ::std::os::raw::c_int) -> *const ::std::os::raw::c_char;
}
extern "C" {
pub fn gethostbyaddr(
__addr: *const ::std::os::raw::c_void,
__length: socklen_t,
__type: ::std::os::raw::c_int,
) -> *mut hostent;
}
extern "C" {
pub fn gethostbyaddr_r(
__addr: *const ::std::os::raw::c_void,
__length: socklen_t,
__type: ::std::os::raw::c_int,
__ret: *mut hostent,
__buf: *mut ::std::os::raw::c_char,
__buf_size: size_t,
__result: *mut *mut hostent,
__h_errno_ptr: *mut ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn gethostbyname(__name: *const ::std::os::raw::c_char) -> *mut hostent;
}
extern "C" {
pub fn gethostbyname_r(
__name: *const ::std::os::raw::c_char,
__ret: *mut hostent,
__buf: *mut ::std::os::raw::c_char,
__buf_size: size_t,
__result: *mut *mut hostent,
__h_errno_ptr: *mut ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn gethostbyname2(
__name: *const ::std::os::raw::c_char,
__af: ::std::os::raw::c_int,
) -> *mut hostent;
}
extern "C" {
pub fn gethostbyname2_r(
__name: *const ::std::os::raw::c_char,
__af: ::std::os::raw::c_int,
__ret: *mut hostent,
__buf: *mut ::std::os::raw::c_char,
__buf_size: size_t,
__result: *mut *mut hostent,
__h_errno_ptr: *mut ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn endhostent();
}
extern "C" {
pub fn gethostent() -> *mut hostent;
}
extern "C" {
pub fn sethostent(__stay_open: ::std::os::raw::c_int);
}
extern "C" {
pub fn endnetent();
}
extern "C" {
pub fn getnetbyaddr(__net: u32, __type: ::std::os::raw::c_int) -> *mut netent;
}
extern "C" {
pub fn getnetbyname(__name: *const ::std::os::raw::c_char) -> *mut netent;
}
extern "C" {
pub fn getnetent() -> *mut netent;
}
extern "C" {
pub fn setnetent(__stay_open: ::std::os::raw::c_int);
}
extern "C" {
pub fn endprotoent();
}
extern "C" {
pub fn getprotobyname(__name: *const ::std::os::raw::c_char) -> *mut protoent;
}
extern "C" {
pub fn getprotobynumber(__proto: ::std::os::raw::c_int) -> *mut protoent;
}
extern "C" {
pub fn getprotoent() -> *mut protoent;
}
extern "C" {
pub fn setprotoent(__stay_open: ::std::os::raw::c_int);
}
extern "C" {
pub fn endservent();
}
extern "C" {
pub fn getservbyname(
__name: *const ::std::os::raw::c_char,
__proto: *const ::std::os::raw::c_char,
) -> *mut servent;
}
extern "C" {
pub fn getservbyport(
__port_in_network_order: ::std::os::raw::c_int,
__proto: *const ::std::os::raw::c_char,
) -> *mut servent;
}
extern "C" {
pub fn getservent() -> *mut servent;
}
extern "C" {
pub fn setservent(__stay_open: ::std::os::raw::c_int);
}
pub type fpos_t = off_t;
pub type fpos64_t = off64_t;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __sFILE {
_unused: [u8; 0],
}
pub type FILE = __sFILE;
extern "C" {
pub static mut stdin: *mut FILE;
}
extern "C" {
pub static mut stdout: *mut FILE;
}
extern "C" {
pub static mut stderr: *mut FILE;
}
extern "C" {
pub fn clearerr(__fp: *mut FILE);
}
extern "C" {
pub fn fclose(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn feof(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ferror(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fflush(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fgetc(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fgets(
__buf: *mut ::std::os::raw::c_char,
__size: ::std::os::raw::c_int,
__fp: *mut FILE,
) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn fprintf(
__fp: *mut FILE,
__fmt: *const ::std::os::raw::c_char,
...
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fputc(__ch: ::std::os::raw::c_int, __fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fputs(__s: *const ::std::os::raw::c_char, __fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fread(
__buf: *mut ::std::os::raw::c_void,
__size: ::std::os::raw::c_uint,
__count: ::std::os::raw::c_uint,
__fp: *mut FILE,
) -> ::std::os::raw::c_uint;
}
extern "C" {
pub fn fscanf(
__fp: *mut FILE,
__fmt: *const ::std::os::raw::c_char,
...
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fwrite(
__buf: *const ::std::os::raw::c_void,
__size: ::std::os::raw::c_uint,
__count: ::std::os::raw::c_uint,
__fp: *mut FILE,
) -> ::std::os::raw::c_uint;
}
extern "C" {
pub fn getc(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn getchar() -> ::std::os::raw::c_int;
}
extern "C" {
pub fn getdelim(
__line_ptr: *mut *mut ::std::os::raw::c_char,
__line_length_ptr: *mut size_t,
__delimiter: ::std::os::raw::c_int,
__fp: *mut FILE,
) -> ssize_t;
}
extern "C" {
pub fn getline(
__line_ptr: *mut *mut ::std::os::raw::c_char,
__line_length_ptr: *mut size_t,
__fp: *mut FILE,
) -> ssize_t;
}
extern "C" {
pub fn perror(__msg: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn printf(__fmt: *const ::std::os::raw::c_char, ...) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn putc(__ch: ::std::os::raw::c_int, __fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn putchar(__ch: ::std::os::raw::c_int) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn puts(__s: *const ::std::os::raw::c_char) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn remove(__path: *const ::std::os::raw::c_char) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn rewind(__fp: *mut FILE);
}
extern "C" {
pub fn scanf(__fmt: *const ::std::os::raw::c_char, ...) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn setbuf(__fp: *mut FILE, __buf: *mut ::std::os::raw::c_char);
}
extern "C" {
pub fn setvbuf(
__fp: *mut FILE,
__buf: *mut ::std::os::raw::c_char,
__mode: ::std::os::raw::c_int,
__size: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sscanf(
__s: *const ::std::os::raw::c_char,
__fmt: *const ::std::os::raw::c_char,
...
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ungetc(__ch: ::std::os::raw::c_int, __fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn vfprintf(
__fp: *mut FILE,
__fmt: *const ::std::os::raw::c_char,
__args: __builtin_va_list,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn vprintf(
__fp: *const ::std::os::raw::c_char,
__args: __builtin_va_list,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn dprintf(
__fd: ::std::os::raw::c_int,
__fmt: *const ::std::os::raw::c_char,
...
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn vdprintf(
__fd: ::std::os::raw::c_int,
__fmt: *const ::std::os::raw::c_char,
__args: va_list,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sprintf(
__s: *mut ::std::os::raw::c_char,
__fmt: *const ::std::os::raw::c_char,
...
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn vsprintf(
__s: *mut ::std::os::raw::c_char,
__fmt: *const ::std::os::raw::c_char,
__args: __builtin_va_list,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn tmpnam(__s: *mut ::std::os::raw::c_char) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn tempnam(
__dir: *const ::std::os::raw::c_char,
__prefix: *const ::std::os::raw::c_char,
) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn rename(
__old_path: *const ::std::os::raw::c_char,
__new_path: *const ::std::os::raw::c_char,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn renameat(
__old_dir_fd: ::std::os::raw::c_int,
__old_path: *const ::std::os::raw::c_char,
__new_dir_fd: ::std::os::raw::c_int,
__new_path: *const ::std::os::raw::c_char,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fseek(
__fp: *mut FILE,
__offset: ::std::os::raw::c_long,
__whence: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ftell(__fp: *mut FILE) -> ::std::os::raw::c_long;
}
extern "C" {
pub fn fgetpos(__fp: *mut FILE, __pos: *mut fpos_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fsetpos(__fp: *mut FILE, __pos: *const fpos_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fseeko(
__fp: *mut FILE,
__offset: off_t,
__whence: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ftello(__fp: *mut FILE) -> off_t;
}
extern "C" {
pub fn fgetpos64(__fp: *mut FILE, __pos: *mut fpos64_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fsetpos64(__fp: *mut FILE, __pos: *const fpos64_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fseeko64(
__fp: *mut FILE,
__offset: off64_t,
__whence: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ftello64(__fp: *mut FILE) -> off64_t;
}
extern "C" {
pub fn fopen(
__path: *const ::std::os::raw::c_char,
__mode: *const ::std::os::raw::c_char,
) -> *mut FILE;
}
extern "C" {
pub fn fopen64(
__path: *const ::std::os::raw::c_char,
__mode: *const ::std::os::raw::c_char,
) -> *mut FILE;
}
extern "C" {
pub fn freopen(
__path: *const ::std::os::raw::c_char,
__mode: *const ::std::os::raw::c_char,
__fp: *mut FILE,
) -> *mut FILE;
}
extern "C" {
pub fn freopen64(
__path: *const ::std::os::raw::c_char,
__mode: *const ::std::os::raw::c_char,
__fp: *mut FILE,
) -> *mut FILE;
}
extern "C" {
pub fn tmpfile() -> *mut FILE;
}
extern "C" {
pub fn tmpfile64() -> *mut FILE;
}
extern "C" {
pub fn snprintf(
__buf: *mut ::std::os::raw::c_char,
__size: ::std::os::raw::c_uint,
__fmt: *const ::std::os::raw::c_char,
...
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn vfscanf(
__fp: *mut FILE,
__fmt: *const ::std::os::raw::c_char,
__args: __builtin_va_list,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn vscanf(
__fmt: *const ::std::os::raw::c_char,
__args: __builtin_va_list,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn vsnprintf(
__buf: *mut ::std::os::raw::c_char,
__size: ::std::os::raw::c_uint,
__fmt: *const ::std::os::raw::c_char,
__args: __builtin_va_list,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn vsscanf(
__s: *const ::std::os::raw::c_char,
__fmt: *const ::std::os::raw::c_char,
__args: __builtin_va_list,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ctermid(__buf: *mut ::std::os::raw::c_char) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn fdopen(__fd: ::std::os::raw::c_int, __mode: *const ::std::os::raw::c_char) -> *mut FILE;
}
extern "C" {
pub fn fileno(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn pclose(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn popen(
__command: *const ::std::os::raw::c_char,
__mode: *const ::std::os::raw::c_char,
) -> *mut FILE;
}
extern "C" {
pub fn flockfile(__fp: *mut FILE);
}
extern "C" {
pub fn ftrylockfile(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn funlockfile(__fp: *mut FILE);
}
extern "C" {
pub fn getc_unlocked(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn getchar_unlocked() -> ::std::os::raw::c_int;
}
extern "C" {
pub fn putc_unlocked(__ch: ::std::os::raw::c_int, __fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn putchar_unlocked(__ch: ::std::os::raw::c_int) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fmemopen(
__buf: *mut ::std::os::raw::c_void,
__size: size_t,
__mode: *const ::std::os::raw::c_char,
) -> *mut FILE;
}
extern "C" {
pub fn open_memstream(
__ptr: *mut *mut ::std::os::raw::c_char,
__size_ptr: *mut size_t,
) -> *mut FILE;
}
extern "C" {
pub fn asprintf(
__s_ptr: *mut *mut ::std::os::raw::c_char,
__fmt: *const ::std::os::raw::c_char,
...
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fgetln(__fp: *mut FILE, __length_ptr: *mut size_t) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn fpurge(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn setbuffer(
__fp: *mut FILE,
__buf: *mut ::std::os::raw::c_char,
__size: ::std::os::raw::c_int,
);
}
extern "C" {
pub fn setlinebuf(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn vasprintf(
__s_ptr: *mut *mut ::std::os::raw::c_char,
__fmt: *const ::std::os::raw::c_char,
__args: va_list,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn clearerr_unlocked(__fp: *mut FILE);
}
extern "C" {
pub fn feof_unlocked(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ferror_unlocked(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn fileno_unlocked(__fp: *mut FILE) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn malloc(__byte_count: ::std::os::raw::c_uint) -> *mut ::std::os::raw::c_void;
}
extern "C" {
pub fn calloc(
__item_count: ::std::os::raw::c_uint,
__item_size: ::std::os::raw::c_uint,
) -> *mut ::std::os::raw::c_void;
}
extern "C" {
pub fn realloc(
__ptr: *mut ::std::os::raw::c_void,
__byte_count: ::std::os::raw::c_uint,
) -> *mut ::std::os::raw::c_void;
}
extern "C" {
pub fn reallocarray(
__ptr: *mut ::std::os::raw::c_void,
__item_count: size_t,
__item_size: size_t,
) -> *mut ::std::os::raw::c_void;
}
extern "C" {
pub fn free(__ptr: *mut ::std::os::raw::c_void);
}
extern "C" {
pub fn memalign(
__alignment: ::std::os::raw::c_uint,
__byte_count: ::std::os::raw::c_uint,
) -> *mut ::std::os::raw::c_void;
}
extern "C" {
pub fn malloc_usable_size(__ptr: *const ::std::os::raw::c_void) -> size_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct mallinfo {
pub arena: size_t,
pub ordblks: size_t,
pub smblks: size_t,
pub hblks: size_t,
pub hblkhd: size_t,
pub usmblks: size_t,
pub fsmblks: size_t,
pub uordblks: size_t,
pub fordblks: size_t,
pub keepcost: size_t,
}
#[test]
fn bindgen_test_layout_mallinfo() {
assert_eq!(
::std::mem::size_of::<mallinfo>(),
40usize,
concat!("Size of: ", stringify!(mallinfo))
);
assert_eq!(
::std::mem::align_of::<mallinfo>(),
4usize,
concat!("Alignment of ", stringify!(mallinfo))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<mallinfo>())).arena as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(mallinfo),
"::",
stringify!(arena)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<mallinfo>())).ordblks as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(mallinfo),
"::",
stringify!(ordblks)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<mallinfo>())).smblks as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(mallinfo),
"::",
stringify!(smblks)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<mallinfo>())).hblks as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(mallinfo),
"::",
stringify!(hblks)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<mallinfo>())).hblkhd as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(mallinfo),
"::",
stringify!(hblkhd)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<mallinfo>())).usmblks as *const _ as usize },
20usize,
concat!(
"Offset of field: ",
stringify!(mallinfo),
"::",
stringify!(usmblks)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<mallinfo>())).fsmblks as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(mallinfo),
"::",
stringify!(fsmblks)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<mallinfo>())).uordblks as *const _ as usize },
28usize,
concat!(
"Offset of field: ",
stringify!(mallinfo),
"::",
stringify!(uordblks)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<mallinfo>())).fordblks as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(mallinfo),
"::",
stringify!(fordblks)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<mallinfo>())).keepcost as *const _ as usize },
36usize,
concat!(
"Offset of field: ",
stringify!(mallinfo),
"::",
stringify!(keepcost)
)
);
}
extern "C" {
pub fn mallinfo() -> mallinfo;
}
extern "C" {
pub fn malloc_info(
__must_be_zero: ::std::os::raw::c_int,
__fp: *mut FILE,
) -> ::std::os::raw::c_int;
}
pub const HeapTaggingLevel_M_HEAP_TAGGING_LEVEL_NONE: HeapTaggingLevel = 0;
pub const HeapTaggingLevel_M_HEAP_TAGGING_LEVEL_TBI: HeapTaggingLevel = 1;
pub const HeapTaggingLevel_M_HEAP_TAGGING_LEVEL_ASYNC: HeapTaggingLevel = 2;
pub const HeapTaggingLevel_M_HEAP_TAGGING_LEVEL_SYNC: HeapTaggingLevel = 3;
pub type HeapTaggingLevel = ::std::os::raw::c_uint;
extern "C" {
pub fn mallopt(
__option: ::std::os::raw::c_int,
__value: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub static mut __malloc_hook: ::std::option::Option<
unsafe extern "C" fn(
__byte_count: size_t,
__caller: *const ::std::os::raw::c_void,
) -> *mut ::std::os::raw::c_void,
>;
}
extern "C" {
pub static mut __realloc_hook: ::std::option::Option<
unsafe extern "C" fn(
__ptr: *mut ::std::os::raw::c_void,
__byte_count: size_t,
__caller: *const ::std::os::raw::c_void,
) -> *mut ::std::os::raw::c_void,
>;
}
extern "C" {
pub static mut __free_hook: ::std::option::Option<
unsafe extern "C" fn(
__ptr: *mut ::std::os::raw::c_void,
__caller: *const ::std::os::raw::c_void,
),
>;
}
extern "C" {
pub static mut __memalign_hook: ::std::option::Option<
unsafe extern "C" fn(
__alignment: size_t,
__byte_count: size_t,
__caller: *const ::std::os::raw::c_void,
) -> *mut ::std::os::raw::c_void,
>;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __locale_t {
_unused: [u8; 0],
}
pub type locale_t = *mut __locale_t;
extern "C" {
pub fn abort();
}
extern "C" {
pub fn exit(__status: ::std::os::raw::c_int);
}
extern "C" {
pub fn _Exit(__status: ::std::os::raw::c_int);
}
extern "C" {
pub fn atexit(__fn: ::std::option::Option<unsafe extern "C" fn()>) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn at_quick_exit(
__fn: ::std::option::Option<unsafe extern "C" fn()>,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn quick_exit(__status: ::std::os::raw::c_int);
}
extern "C" {
pub fn getenv(__name: *const ::std::os::raw::c_char) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn putenv(__assignment: *mut ::std::os::raw::c_char) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn setenv(
__name: *const ::std::os::raw::c_char,
__value: *const ::std::os::raw::c_char,
__overwrite: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn unsetenv(__name: *const ::std::os::raw::c_char) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn clearenv() -> ::std::os::raw::c_int;
}
extern "C" {
pub fn mkdtemp(__template: *mut ::std::os::raw::c_char) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn mktemp(__template: *mut ::std::os::raw::c_char) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn mkostemp64(
__template: *mut ::std::os::raw::c_char,
__flags: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn mkostemp(
__template: *mut ::std::os::raw::c_char,
__flags: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn mkostemps64(
__template: *mut ::std::os::raw::c_char,
__suffix_length: ::std::os::raw::c_int,
__flags: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn mkostemps(
__template: *mut ::std::os::raw::c_char,
__suffix_length: ::std::os::raw::c_int,
__flags: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn mkstemp64(__template: *mut ::std::os::raw::c_char) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn mkstemp(__template: *mut ::std::os::raw::c_char) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn mkstemps64(
__template: *mut ::std::os::raw::c_char,
__flags: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn mkstemps(
__template: *mut ::std::os::raw::c_char,
__flags: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn strtol(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
__base: ::std::os::raw::c_int,
) -> ::std::os::raw::c_long;
}
extern "C" {
pub fn strtoll(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
__base: ::std::os::raw::c_int,
) -> ::std::os::raw::c_longlong;
}
extern "C" {
pub fn strtoul(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
__base: ::std::os::raw::c_int,
) -> ::std::os::raw::c_ulong;
}
extern "C" {
pub fn strtoull(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
__base: ::std::os::raw::c_int,
) -> ::std::os::raw::c_ulonglong;
}
extern "C" {
pub fn posix_memalign(
__memptr: *mut *mut ::std::os::raw::c_void,
__alignment: size_t,
__size: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn aligned_alloc(
__alignment: ::std::os::raw::c_uint,
__size: ::std::os::raw::c_uint,
) -> *mut ::std::os::raw::c_void;
}
extern "C" {
pub fn strtod(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
) -> f64;
}
extern "C" {
pub fn strtoul_l(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
__base: ::std::os::raw::c_int,
__l: locale_t,
) -> ::std::os::raw::c_ulong;
}
extern "C" {
pub fn atoi(__s: *const ::std::os::raw::c_char) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn atol(__s: *const ::std::os::raw::c_char) -> ::std::os::raw::c_long;
}
extern "C" {
pub fn atoll(__s: *const ::std::os::raw::c_char) -> ::std::os::raw::c_longlong;
}
extern "C" {
pub fn realpath(
__path: *const ::std::os::raw::c_char,
__resolved: *mut ::std::os::raw::c_char,
) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn system(__command: *const ::std::os::raw::c_char) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn bsearch(
__key: *const ::std::os::raw::c_void,
__base: *const ::std::os::raw::c_void,
__nmemb: size_t,
__size: size_t,
__comparator: ::std::option::Option<
unsafe extern "C" fn(
__lhs: *const ::std::os::raw::c_void,
__rhs: *const ::std::os::raw::c_void,
) -> ::std::os::raw::c_int,
>,
) -> *mut ::std::os::raw::c_void;
}
extern "C" {
pub fn qsort(
__base: *mut ::std::os::raw::c_void,
__nmemb: size_t,
__size: size_t,
__comparator: ::std::option::Option<
unsafe extern "C" fn(
__lhs: *const ::std::os::raw::c_void,
__rhs: *const ::std::os::raw::c_void,
) -> ::std::os::raw::c_int,
>,
);
}
extern "C" {
pub fn arc4random() -> u32;
}
extern "C" {
pub fn arc4random_uniform(__upper_bound: u32) -> u32;
}
extern "C" {
pub fn arc4random_buf(__buf: *mut ::std::os::raw::c_void, __n: size_t);
}
extern "C" {
pub fn rand_r(__seed_ptr: *mut ::std::os::raw::c_uint) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn drand48() -> f64;
}
extern "C" {
pub fn erand48(__xsubi: *mut ::std::os::raw::c_ushort) -> f64;
}
extern "C" {
pub fn lcong48(__param: *mut ::std::os::raw::c_ushort);
}
extern "C" {
pub fn lrand48() -> ::std::os::raw::c_long;
}
extern "C" {
pub fn mrand48() -> ::std::os::raw::c_long;
}
extern "C" {
pub fn nrand48(__xsubi: *mut ::std::os::raw::c_ushort) -> ::std::os::raw::c_long;
}
extern "C" {
pub fn seed48(__seed16v: *mut ::std::os::raw::c_ushort) -> *mut ::std::os::raw::c_ushort;
}
extern "C" {
pub fn srand48(__seed: ::std::os::raw::c_long);
}
extern "C" {
pub fn initstate(
__seed: ::std::os::raw::c_uint,
__state: *mut ::std::os::raw::c_char,
__n: size_t,
) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn setstate(__state: *mut ::std::os::raw::c_char) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn getpt() -> ::std::os::raw::c_int;
}
extern "C" {
pub fn posix_openpt(__flags: ::std::os::raw::c_int) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ptsname(__fd: ::std::os::raw::c_int) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn ptsname_r(
__fd: ::std::os::raw::c_int,
__buf: *mut ::std::os::raw::c_char,
__n: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn unlockpt(__fd: ::std::os::raw::c_int) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn getsubopt(
__option: *mut *mut ::std::os::raw::c_char,
__tokens: *const *mut ::std::os::raw::c_char,
__value_ptr: *mut *mut ::std::os::raw::c_char,
) -> ::std::os::raw::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct div_t {
pub quot: ::std::os::raw::c_int,
pub rem: ::std::os::raw::c_int,
}
#[test]
fn bindgen_test_layout_div_t() {
assert_eq!(
::std::mem::size_of::<div_t>(),
8usize,
concat!("Size of: ", stringify!(div_t))
);
assert_eq!(
::std::mem::align_of::<div_t>(),
4usize,
concat!("Alignment of ", stringify!(div_t))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<div_t>())).quot as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(div_t),
"::",
stringify!(quot)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<div_t>())).rem as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(div_t),
"::",
stringify!(rem)
)
);
}
extern "C" {
pub fn div(__numerator: ::std::os::raw::c_int, __denominator: ::std::os::raw::c_int) -> div_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ldiv_t {
pub quot: ::std::os::raw::c_long,
pub rem: ::std::os::raw::c_long,
}
#[test]
fn bindgen_test_layout_ldiv_t() {
assert_eq!(
::std::mem::size_of::<ldiv_t>(),
8usize,
concat!("Size of: ", stringify!(ldiv_t))
);
assert_eq!(
::std::mem::align_of::<ldiv_t>(),
4usize,
concat!("Alignment of ", stringify!(ldiv_t))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ldiv_t>())).quot as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(ldiv_t),
"::",
stringify!(quot)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ldiv_t>())).rem as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(ldiv_t),
"::",
stringify!(rem)
)
);
}
extern "C" {
pub fn ldiv(
__numerator: ::std::os::raw::c_long,
__denominator: ::std::os::raw::c_long,
) -> ldiv_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct lldiv_t {
pub quot: ::std::os::raw::c_longlong,
pub rem: ::std::os::raw::c_longlong,
}
#[test]
fn bindgen_test_layout_lldiv_t() {
assert_eq!(
::std::mem::size_of::<lldiv_t>(),
16usize,
concat!("Size of: ", stringify!(lldiv_t))
);
assert_eq!(
::std::mem::align_of::<lldiv_t>(),
8usize,
concat!("Alignment of ", stringify!(lldiv_t))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<lldiv_t>())).quot as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(lldiv_t),
"::",
stringify!(quot)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<lldiv_t>())).rem as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(lldiv_t),
"::",
stringify!(rem)
)
);
}
extern "C" {
pub fn lldiv(
__numerator: ::std::os::raw::c_longlong,
__denominator: ::std::os::raw::c_longlong,
) -> lldiv_t;
}
extern "C" {
pub fn getloadavg(__averages: *mut f64, __n: ::std::os::raw::c_int) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn getprogname() -> *const ::std::os::raw::c_char;
}
extern "C" {
pub fn setprogname(__name: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn mblen(__s: *const ::std::os::raw::c_char, __n: size_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn mbstowcs(
__dst: *mut wchar_t,
__src: *const ::std::os::raw::c_char,
__n: size_t,
) -> size_t;
}
extern "C" {
pub fn mbtowc(
__wc_ptr: *mut wchar_t,
__s: *const ::std::os::raw::c_char,
__n: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn wctomb(__dst: *mut ::std::os::raw::c_char, __wc: wchar_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn wcstombs(
__dst: *mut ::std::os::raw::c_char,
__src: *const wchar_t,
__n: size_t,
) -> size_t;
}
extern "C" {
pub fn __ctype_get_mb_cur_max() -> size_t;
}
extern "C" {
pub fn abs(__x: ::std::os::raw::c_int) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn labs(__x: ::std::os::raw::c_long) -> ::std::os::raw::c_long;
}
extern "C" {
pub fn llabs(__x: ::std::os::raw::c_longlong) -> ::std::os::raw::c_longlong;
}
extern "C" {
pub fn strtof(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
) -> f32;
}
extern "C" {
pub fn atof(__s: *const ::std::os::raw::c_char) -> f64;
}
extern "C" {
pub fn rand() -> ::std::os::raw::c_int;
}
extern "C" {
pub fn srand(__seed: ::std::os::raw::c_uint);
}
extern "C" {
pub fn random() -> ::std::os::raw::c_long;
}
extern "C" {
pub fn srandom(__seed: ::std::os::raw::c_uint);
}
extern "C" {
pub fn grantpt(__fd: ::std::os::raw::c_int) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn strtoll_l(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
__base: ::std::os::raw::c_int,
__l: locale_t,
) -> ::std::os::raw::c_longlong;
}
extern "C" {
pub fn strtoull_l(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
__base: ::std::os::raw::c_int,
__l: locale_t,
) -> ::std::os::raw::c_ulonglong;
}
extern "C" {
pub fn strtold_l(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
__l: locale_t,
) -> f64;
}
extern "C" {
pub fn strtod_l(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
__l: locale_t,
) -> f64;
}
extern "C" {
pub fn strtof_l(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
__l: locale_t,
) -> f32;
}
extern "C" {
pub fn strtol_l(
__s: *const ::std::os::raw::c_char,
__end_ptr: *mut *mut ::std::os::raw::c_char,
arg1: ::std::os::raw::c_int,
__l: locale_t,
) -> ::std::os::raw::c_long;
}
pub type net_handle_t = u64;
extern "C" {
pub fn android_setsocknetwork(
network: net_handle_t,
fd: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn android_setprocnetwork(network: net_handle_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn android_getprocnetwork(network: *mut net_handle_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn android_setprocdns(network: net_handle_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn android_getprocdns(network: *mut net_handle_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn android_getaddrinfofornetwork(
network: net_handle_t,
node: *const ::std::os::raw::c_char,
service: *const ::std::os::raw::c_char,
hints: *const addrinfo,
res: *mut *mut addrinfo,
) -> ::std::os::raw::c_int;
}
pub const ResNsendFlags_ANDROID_RESOLV_NO_RETRY: ResNsendFlags = 1;
pub const ResNsendFlags_ANDROID_RESOLV_NO_CACHE_STORE: ResNsendFlags = 2;
pub const ResNsendFlags_ANDROID_RESOLV_NO_CACHE_LOOKUP: ResNsendFlags = 4;
pub type ResNsendFlags = u32;
extern "C" {
pub fn android_res_nquery(
network: net_handle_t,
dname: *const ::std::os::raw::c_char,
ns_class: ::std::os::raw::c_int,
ns_type: ::std::os::raw::c_int,
flags: u32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn android_res_nsend(
network: net_handle_t,
msg: *const u8,
msglen: size_t,
flags: u32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn android_res_nresult(
fd: ::std::os::raw::c_int,
rcode: *mut ::std::os::raw::c_int,
answer: *mut u8,
anslen: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn android_res_cancel(nsend_fd: ::std::os::raw::c_int);
}
pub const ANativeWindow_LegacyFormat_WINDOW_FORMAT_RGBA_8888: ANativeWindow_LegacyFormat = 1;
pub const ANativeWindow_LegacyFormat_WINDOW_FORMAT_RGBX_8888: ANativeWindow_LegacyFormat = 2;
pub const ANativeWindow_LegacyFormat_WINDOW_FORMAT_RGB_565: ANativeWindow_LegacyFormat = 4;
pub type ANativeWindow_LegacyFormat = ::std::os::raw::c_uint;
pub const ANativeWindowTransform_ANATIVEWINDOW_TRANSFORM_IDENTITY: ANativeWindowTransform = 0;
pub const ANativeWindowTransform_ANATIVEWINDOW_TRANSFORM_MIRROR_HORIZONTAL: ANativeWindowTransform =
1;
pub const ANativeWindowTransform_ANATIVEWINDOW_TRANSFORM_MIRROR_VERTICAL: ANativeWindowTransform =
2;
pub const ANativeWindowTransform_ANATIVEWINDOW_TRANSFORM_ROTATE_90: ANativeWindowTransform = 4;
pub const ANativeWindowTransform_ANATIVEWINDOW_TRANSFORM_ROTATE_180: ANativeWindowTransform = 3;
pub const ANativeWindowTransform_ANATIVEWINDOW_TRANSFORM_ROTATE_270: ANativeWindowTransform = 7;
pub type ANativeWindowTransform = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ANativeWindow {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ANativeWindow_Buffer {
pub width: i32,
pub height: i32,
pub stride: i32,
pub format: i32,
pub bits: *mut ::std::os::raw::c_void,
pub reserved: [u32; 6usize],
}
#[test]
fn bindgen_test_layout_ANativeWindow_Buffer() {
assert_eq!(
::std::mem::size_of::<ANativeWindow_Buffer>(),
44usize,
concat!("Size of: ", stringify!(ANativeWindow_Buffer))
);
assert_eq!(
::std::mem::align_of::<ANativeWindow_Buffer>(),
4usize,
concat!("Alignment of ", stringify!(ANativeWindow_Buffer))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeWindow_Buffer>())).width as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(ANativeWindow_Buffer),
"::",
stringify!(width)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeWindow_Buffer>())).height as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(ANativeWindow_Buffer),
"::",
stringify!(height)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeWindow_Buffer>())).stride as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(ANativeWindow_Buffer),
"::",
stringify!(stride)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeWindow_Buffer>())).format as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(ANativeWindow_Buffer),
"::",
stringify!(format)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeWindow_Buffer>())).bits as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(ANativeWindow_Buffer),
"::",
stringify!(bits)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeWindow_Buffer>())).reserved as *const _ as usize },
20usize,
concat!(
"Offset of field: ",
stringify!(ANativeWindow_Buffer),
"::",
stringify!(reserved)
)
);
}
extern "C" {
pub fn ANativeWindow_acquire(window: *mut ANativeWindow);
}
extern "C" {
pub fn ANativeWindow_release(window: *mut ANativeWindow);
}
extern "C" {
pub fn ANativeWindow_getWidth(window: *mut ANativeWindow) -> i32;
}
extern "C" {
pub fn ANativeWindow_getHeight(window: *mut ANativeWindow) -> i32;
}
extern "C" {
pub fn ANativeWindow_getFormat(window: *mut ANativeWindow) -> i32;
}
extern "C" {
pub fn ANativeWindow_setBuffersGeometry(
window: *mut ANativeWindow,
width: i32,
height: i32,
format: i32,
) -> i32;
}
extern "C" {
pub fn ANativeWindow_lock(
window: *mut ANativeWindow,
outBuffer: *mut ANativeWindow_Buffer,
inOutDirtyBounds: *mut ARect,
) -> i32;
}
extern "C" {
pub fn ANativeWindow_unlockAndPost(window: *mut ANativeWindow) -> i32;
}
extern "C" {
pub fn ANativeWindow_setBuffersTransform(window: *mut ANativeWindow, transform: i32) -> i32;
}
extern "C" {
pub fn ANativeWindow_setBuffersDataSpace(window: *mut ANativeWindow, dataSpace: i32) -> i32;
}
extern "C" {
pub fn ANativeWindow_getBuffersDataSpace(window: *mut ANativeWindow) -> i32;
}
pub const ANativeWindow_FrameRateCompatibility_ANATIVEWINDOW_FRAME_RATE_COMPATIBILITY_DEFAULT:
ANativeWindow_FrameRateCompatibility = 0;
pub const ANativeWindow_FrameRateCompatibility_ANATIVEWINDOW_FRAME_RATE_COMPATIBILITY_FIXED_SOURCE : ANativeWindow_FrameRateCompatibility = 1 ;
pub type ANativeWindow_FrameRateCompatibility = ::std::os::raw::c_uint;
extern "C" {
pub fn ANativeWindow_setFrameRate(
window: *mut ANativeWindow,
frameRate: f32,
compatibility: i8,
) -> i32;
}
extern "C" {
pub fn ANativeWindow_tryAllocateBuffers(window: *mut ANativeWindow);
}
pub const ANativeWindow_ChangeFrameRateStrategy_ANATIVEWINDOW_CHANGE_FRAME_RATE_ONLY_IF_SEAMLESS:
ANativeWindow_ChangeFrameRateStrategy = 0;
pub const ANativeWindow_ChangeFrameRateStrategy_ANATIVEWINDOW_CHANGE_FRAME_RATE_ALWAYS:
ANativeWindow_ChangeFrameRateStrategy = 1;
pub type ANativeWindow_ChangeFrameRateStrategy = ::std::os::raw::c_uint;
extern "C" {
pub fn ANativeWindow_setFrameRateWithChangeStrategy(
window: *mut ANativeWindow,
frameRate: f32,
compatibility: i8,
changeFrameRateStrategy: i8,
) -> i32;
}
#[repr(C)]
pub struct ANativeActivity {
pub callbacks: *mut ANativeActivityCallbacks,
pub vm: *mut JavaVM,
pub env: *mut JNIEnv,
pub clazz: jobject,
pub internalDataPath: *const ::std::os::raw::c_char,
pub externalDataPath: *const ::std::os::raw::c_char,
pub sdkVersion: i32,
pub instance: *mut ::std::os::raw::c_void,
pub assetManager: *mut AAssetManager,
pub obbPath: *const ::std::os::raw::c_char,
}
#[test]
fn bindgen_test_layout_ANativeActivity() {
assert_eq!(
::std::mem::size_of::<ANativeActivity>(),
40usize,
concat!("Size of: ", stringify!(ANativeActivity))
);
assert_eq!(
::std::mem::align_of::<ANativeActivity>(),
4usize,
concat!("Alignment of ", stringify!(ANativeActivity))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeActivity>())).callbacks as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivity),
"::",
stringify!(callbacks)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeActivity>())).vm as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivity),
"::",
stringify!(vm)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeActivity>())).env as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivity),
"::",
stringify!(env)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeActivity>())).clazz as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivity),
"::",
stringify!(clazz)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivity>())).internalDataPath as *const _ as usize
},
16usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivity),
"::",
stringify!(internalDataPath)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivity>())).externalDataPath as *const _ as usize
},
20usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivity),
"::",
stringify!(externalDataPath)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeActivity>())).sdkVersion as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivity),
"::",
stringify!(sdkVersion)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeActivity>())).instance as *const _ as usize },
28usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivity),
"::",
stringify!(instance)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeActivity>())).assetManager as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivity),
"::",
stringify!(assetManager)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeActivity>())).obbPath as *const _ as usize },
36usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivity),
"::",
stringify!(obbPath)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ANativeActivityCallbacks {
pub onStart: ::std::option::Option<unsafe extern "C" fn(activity: *mut ANativeActivity)>,
pub onResume: ::std::option::Option<unsafe extern "C" fn(activity: *mut ANativeActivity)>,
pub onSaveInstanceState: ::std::option::Option<
unsafe extern "C" fn(
activity: *mut ANativeActivity,
outSize: *mut size_t,
) -> *mut ::std::os::raw::c_void,
>,
pub onPause: ::std::option::Option<unsafe extern "C" fn(activity: *mut ANativeActivity)>,
pub onStop: ::std::option::Option<unsafe extern "C" fn(activity: *mut ANativeActivity)>,
pub onDestroy: ::std::option::Option<unsafe extern "C" fn(activity: *mut ANativeActivity)>,
pub onWindowFocusChanged: ::std::option::Option<
unsafe extern "C" fn(activity: *mut ANativeActivity, hasFocus: ::std::os::raw::c_int),
>,
pub onNativeWindowCreated: ::std::option::Option<
unsafe extern "C" fn(activity: *mut ANativeActivity, window: *mut ANativeWindow),
>,
pub onNativeWindowResized: ::std::option::Option<
unsafe extern "C" fn(activity: *mut ANativeActivity, window: *mut ANativeWindow),
>,
pub onNativeWindowRedrawNeeded: ::std::option::Option<
unsafe extern "C" fn(activity: *mut ANativeActivity, window: *mut ANativeWindow),
>,
pub onNativeWindowDestroyed: ::std::option::Option<
unsafe extern "C" fn(activity: *mut ANativeActivity, window: *mut ANativeWindow),
>,
pub onInputQueueCreated: ::std::option::Option<
unsafe extern "C" fn(activity: *mut ANativeActivity, queue: *mut AInputQueue),
>,
pub onInputQueueDestroyed: ::std::option::Option<
unsafe extern "C" fn(activity: *mut ANativeActivity, queue: *mut AInputQueue),
>,
pub onContentRectChanged: ::std::option::Option<
unsafe extern "C" fn(activity: *mut ANativeActivity, rect: *const ARect),
>,
pub onConfigurationChanged:
::std::option::Option<unsafe extern "C" fn(activity: *mut ANativeActivity)>,
pub onLowMemory: ::std::option::Option<unsafe extern "C" fn(activity: *mut ANativeActivity)>,
}
#[test]
fn bindgen_test_layout_ANativeActivityCallbacks() {
assert_eq!(
::std::mem::size_of::<ANativeActivityCallbacks>(),
64usize,
concat!("Size of: ", stringify!(ANativeActivityCallbacks))
);
assert_eq!(
::std::mem::align_of::<ANativeActivityCallbacks>(),
4usize,
concat!("Alignment of ", stringify!(ANativeActivityCallbacks))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onStart as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onStart)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onResume as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onResume)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onSaveInstanceState as *const _
as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onSaveInstanceState)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onPause as *const _ as usize
},
12usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onPause)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ANativeActivityCallbacks>())).onStop as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onStop)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onDestroy as *const _ as usize
},
20usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onDestroy)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onWindowFocusChanged as *const _
as usize
},
24usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onWindowFocusChanged)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onNativeWindowCreated as *const _
as usize
},
28usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onNativeWindowCreated)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onNativeWindowResized as *const _
as usize
},
32usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onNativeWindowResized)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onNativeWindowRedrawNeeded
as *const _ as usize
},
36usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onNativeWindowRedrawNeeded)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onNativeWindowDestroyed as *const _
as usize
},
40usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onNativeWindowDestroyed)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onInputQueueCreated as *const _
as usize
},
44usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onInputQueueCreated)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onInputQueueDestroyed as *const _
as usize
},
48usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onInputQueueDestroyed)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onContentRectChanged as *const _
as usize
},
52usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onContentRectChanged)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onConfigurationChanged as *const _
as usize
},
56usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onConfigurationChanged)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANativeActivityCallbacks>())).onLowMemory as *const _ as usize
},
60usize,
concat!(
"Offset of field: ",
stringify!(ANativeActivityCallbacks),
"::",
stringify!(onLowMemory)
)
);
}
pub type ANativeActivity_createFunc = ::std::option::Option<
unsafe extern "C" fn(
activity: *mut ANativeActivity,
savedState: *mut ::std::os::raw::c_void,
savedStateSize: size_t,
),
>;
extern "C" {
pub fn ANativeActivity_onCreate(
activity: *mut ANativeActivity,
savedState: *mut ::std::os::raw::c_void,
savedStateSize: size_t,
);
}
extern "C" {
pub fn ANativeActivity_finish(activity: *mut ANativeActivity);
}
extern "C" {
pub fn ANativeActivity_setWindowFormat(activity: *mut ANativeActivity, format: i32);
}
extern "C" {
pub fn ANativeActivity_setWindowFlags(
activity: *mut ANativeActivity,
addFlags: u32,
removeFlags: u32,
);
}
pub const ANATIVEACTIVITY_SHOW_SOFT_INPUT_IMPLICIT: ::std::os::raw::c_uint = 1;
pub const ANATIVEACTIVITY_SHOW_SOFT_INPUT_FORCED: ::std::os::raw::c_uint = 2;
pub type _bindgen_ty_31 = ::std::os::raw::c_uint;
extern "C" {
pub fn ANativeActivity_showSoftInput(activity: *mut ANativeActivity, flags: u32);
}
pub const ANATIVEACTIVITY_HIDE_SOFT_INPUT_IMPLICIT_ONLY: ::std::os::raw::c_uint = 1;
pub const ANATIVEACTIVITY_HIDE_SOFT_INPUT_NOT_ALWAYS: ::std::os::raw::c_uint = 2;
pub type _bindgen_ty_32 = ::std::os::raw::c_uint;
extern "C" {
pub fn ANativeActivity_hideSoftInput(activity: *mut ANativeActivity, flags: u32);
}
extern "C" {
pub fn ANativeWindow_fromSurface(env: *mut JNIEnv, surface: jobject) -> *mut ANativeWindow;
}
extern "C" {
pub fn ANativeWindow_toSurface(env: *mut JNIEnv, window: *mut ANativeWindow) -> jobject;
}
pub const OperandCode_ANEURALNETWORKS_FLOAT32: OperandCode = 0;
pub const OperandCode_ANEURALNETWORKS_INT32: OperandCode = 1;
pub const OperandCode_ANEURALNETWORKS_UINT32: OperandCode = 2;
pub const OperandCode_ANEURALNETWORKS_TENSOR_FLOAT32: OperandCode = 3;
pub const OperandCode_ANEURALNETWORKS_TENSOR_INT32: OperandCode = 4;
pub const OperandCode_ANEURALNETWORKS_TENSOR_QUANT8_ASYMM: OperandCode = 5;
pub const OperandCode_ANEURALNETWORKS_BOOL: OperandCode = 6;
pub const OperandCode_ANEURALNETWORKS_TENSOR_QUANT16_SYMM: OperandCode = 7;
pub const OperandCode_ANEURALNETWORKS_TENSOR_FLOAT16: OperandCode = 8;
pub const OperandCode_ANEURALNETWORKS_TENSOR_BOOL8: OperandCode = 9;
pub const OperandCode_ANEURALNETWORKS_FLOAT16: OperandCode = 10;
pub const OperandCode_ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL: OperandCode = 11;
pub const OperandCode_ANEURALNETWORKS_TENSOR_QUANT16_ASYMM: OperandCode = 12;
pub const OperandCode_ANEURALNETWORKS_TENSOR_QUANT8_SYMM: OperandCode = 13;
pub const OperandCode_ANEURALNETWORKS_TENSOR_QUANT8_ASYMM_SIGNED: OperandCode = 14;
pub const OperandCode_ANEURALNETWORKS_MODEL: OperandCode = 15;
pub type OperandCode = ::std::os::raw::c_uint;
pub const OperationCode_ANEURALNETWORKS_ADD: OperationCode = 0;
pub const OperationCode_ANEURALNETWORKS_AVERAGE_POOL_2D: OperationCode = 1;
pub const OperationCode_ANEURALNETWORKS_CONCATENATION: OperationCode = 2;
pub const OperationCode_ANEURALNETWORKS_CONV_2D: OperationCode = 3;
pub const OperationCode_ANEURALNETWORKS_DEPTHWISE_CONV_2D: OperationCode = 4;
pub const OperationCode_ANEURALNETWORKS_DEPTH_TO_SPACE: OperationCode = 5;
pub const OperationCode_ANEURALNETWORKS_DEQUANTIZE: OperationCode = 6;
pub const OperationCode_ANEURALNETWORKS_EMBEDDING_LOOKUP: OperationCode = 7;
pub const OperationCode_ANEURALNETWORKS_FLOOR: OperationCode = 8;
pub const OperationCode_ANEURALNETWORKS_FULLY_CONNECTED: OperationCode = 9;
pub const OperationCode_ANEURALNETWORKS_HASHTABLE_LOOKUP: OperationCode = 10;
pub const OperationCode_ANEURALNETWORKS_L2_NORMALIZATION: OperationCode = 11;
pub const OperationCode_ANEURALNETWORKS_L2_POOL_2D: OperationCode = 12;
pub const OperationCode_ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION: OperationCode = 13;
pub const OperationCode_ANEURALNETWORKS_LOGISTIC: OperationCode = 14;
pub const OperationCode_ANEURALNETWORKS_LSH_PROJECTION: OperationCode = 15;
pub const OperationCode_ANEURALNETWORKS_LSTM: OperationCode = 16;
pub const OperationCode_ANEURALNETWORKS_MAX_POOL_2D: OperationCode = 17;
pub const OperationCode_ANEURALNETWORKS_MUL: OperationCode = 18;
pub const OperationCode_ANEURALNETWORKS_RELU: OperationCode = 19;
pub const OperationCode_ANEURALNETWORKS_RELU1: OperationCode = 20;
pub const OperationCode_ANEURALNETWORKS_RELU6: OperationCode = 21;
pub const OperationCode_ANEURALNETWORKS_RESHAPE: OperationCode = 22;
pub const OperationCode_ANEURALNETWORKS_RESIZE_BILINEAR: OperationCode = 23;
pub const OperationCode_ANEURALNETWORKS_RNN: OperationCode = 24;
pub const OperationCode_ANEURALNETWORKS_SOFTMAX: OperationCode = 25;
pub const OperationCode_ANEURALNETWORKS_SPACE_TO_DEPTH: OperationCode = 26;
pub const OperationCode_ANEURALNETWORKS_SVDF: OperationCode = 27;
pub const OperationCode_ANEURALNETWORKS_TANH: OperationCode = 28;
pub const OperationCode_ANEURALNETWORKS_BATCH_TO_SPACE_ND: OperationCode = 29;
pub const OperationCode_ANEURALNETWORKS_DIV: OperationCode = 30;
pub const OperationCode_ANEURALNETWORKS_MEAN: OperationCode = 31;
pub const OperationCode_ANEURALNETWORKS_PAD: OperationCode = 32;
pub const OperationCode_ANEURALNETWORKS_SPACE_TO_BATCH_ND: OperationCode = 33;
pub const OperationCode_ANEURALNETWORKS_SQUEEZE: OperationCode = 34;
pub const OperationCode_ANEURALNETWORKS_STRIDED_SLICE: OperationCode = 35;
pub const OperationCode_ANEURALNETWORKS_SUB: OperationCode = 36;
pub const OperationCode_ANEURALNETWORKS_TRANSPOSE: OperationCode = 37;
pub const OperationCode_ANEURALNETWORKS_ABS: OperationCode = 38;
pub const OperationCode_ANEURALNETWORKS_ARGMAX: OperationCode = 39;
pub const OperationCode_ANEURALNETWORKS_ARGMIN: OperationCode = 40;
pub const OperationCode_ANEURALNETWORKS_AXIS_ALIGNED_BBOX_TRANSFORM: OperationCode = 41;
pub const OperationCode_ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_LSTM: OperationCode = 42;
pub const OperationCode_ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_RNN: OperationCode = 43;
pub const OperationCode_ANEURALNETWORKS_BOX_WITH_NMS_LIMIT: OperationCode = 44;
pub const OperationCode_ANEURALNETWORKS_CAST: OperationCode = 45;
pub const OperationCode_ANEURALNETWORKS_CHANNEL_SHUFFLE: OperationCode = 46;
pub const OperationCode_ANEURALNETWORKS_DETECTION_POSTPROCESSING: OperationCode = 47;
pub const OperationCode_ANEURALNETWORKS_EQUAL: OperationCode = 48;
pub const OperationCode_ANEURALNETWORKS_EXP: OperationCode = 49;
pub const OperationCode_ANEURALNETWORKS_EXPAND_DIMS: OperationCode = 50;
pub const OperationCode_ANEURALNETWORKS_GATHER: OperationCode = 51;
pub const OperationCode_ANEURALNETWORKS_GENERATE_PROPOSALS: OperationCode = 52;
pub const OperationCode_ANEURALNETWORKS_GREATER: OperationCode = 53;
pub const OperationCode_ANEURALNETWORKS_GREATER_EQUAL: OperationCode = 54;
pub const OperationCode_ANEURALNETWORKS_GROUPED_CONV_2D: OperationCode = 55;
pub const OperationCode_ANEURALNETWORKS_HEATMAP_MAX_KEYPOINT: OperationCode = 56;
pub const OperationCode_ANEURALNETWORKS_INSTANCE_NORMALIZATION: OperationCode = 57;
pub const OperationCode_ANEURALNETWORKS_LESS: OperationCode = 58;
pub const OperationCode_ANEURALNETWORKS_LESS_EQUAL: OperationCode = 59;
pub const OperationCode_ANEURALNETWORKS_LOG: OperationCode = 60;
pub const OperationCode_ANEURALNETWORKS_LOGICAL_AND: OperationCode = 61;
pub const OperationCode_ANEURALNETWORKS_LOGICAL_NOT: OperationCode = 62;
pub const OperationCode_ANEURALNETWORKS_LOGICAL_OR: OperationCode = 63;
pub const OperationCode_ANEURALNETWORKS_LOG_SOFTMAX: OperationCode = 64;
pub const OperationCode_ANEURALNETWORKS_MAXIMUM: OperationCode = 65;
pub const OperationCode_ANEURALNETWORKS_MINIMUM: OperationCode = 66;
pub const OperationCode_ANEURALNETWORKS_NEG: OperationCode = 67;
pub const OperationCode_ANEURALNETWORKS_NOT_EQUAL: OperationCode = 68;
pub const OperationCode_ANEURALNETWORKS_PAD_V2: OperationCode = 69;
pub const OperationCode_ANEURALNETWORKS_POW: OperationCode = 70;
pub const OperationCode_ANEURALNETWORKS_PRELU: OperationCode = 71;
pub const OperationCode_ANEURALNETWORKS_QUANTIZE: OperationCode = 72;
pub const OperationCode_ANEURALNETWORKS_QUANTIZED_16BIT_LSTM: OperationCode = 73;
pub const OperationCode_ANEURALNETWORKS_RANDOM_MULTINOMIAL: OperationCode = 74;
pub const OperationCode_ANEURALNETWORKS_REDUCE_ALL: OperationCode = 75;
pub const OperationCode_ANEURALNETWORKS_REDUCE_ANY: OperationCode = 76;
pub const OperationCode_ANEURALNETWORKS_REDUCE_MAX: OperationCode = 77;
pub const OperationCode_ANEURALNETWORKS_REDUCE_MIN: OperationCode = 78;
pub const OperationCode_ANEURALNETWORKS_REDUCE_PROD: OperationCode = 79;
pub const OperationCode_ANEURALNETWORKS_REDUCE_SUM: OperationCode = 80;
pub const OperationCode_ANEURALNETWORKS_ROI_ALIGN: OperationCode = 81;
pub const OperationCode_ANEURALNETWORKS_ROI_POOLING: OperationCode = 82;
pub const OperationCode_ANEURALNETWORKS_RSQRT: OperationCode = 83;
pub const OperationCode_ANEURALNETWORKS_SELECT: OperationCode = 84;
pub const OperationCode_ANEURALNETWORKS_SIN: OperationCode = 85;
pub const OperationCode_ANEURALNETWORKS_SLICE: OperationCode = 86;
pub const OperationCode_ANEURALNETWORKS_SPLIT: OperationCode = 87;
pub const OperationCode_ANEURALNETWORKS_SQRT: OperationCode = 88;
pub const OperationCode_ANEURALNETWORKS_TILE: OperationCode = 89;
pub const OperationCode_ANEURALNETWORKS_TOPK_V2: OperationCode = 90;
pub const OperationCode_ANEURALNETWORKS_TRANSPOSE_CONV_2D: OperationCode = 91;
pub const OperationCode_ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_LSTM: OperationCode = 92;
pub const OperationCode_ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_RNN: OperationCode = 93;
pub const OperationCode_ANEURALNETWORKS_RESIZE_NEAREST_NEIGHBOR: OperationCode = 94;
pub const OperationCode_ANEURALNETWORKS_QUANTIZED_LSTM: OperationCode = 95;
pub const OperationCode_ANEURALNETWORKS_IF: OperationCode = 96;
pub const OperationCode_ANEURALNETWORKS_WHILE: OperationCode = 97;
pub const OperationCode_ANEURALNETWORKS_ELU: OperationCode = 98;
pub const OperationCode_ANEURALNETWORKS_HARD_SWISH: OperationCode = 99;
pub const OperationCode_ANEURALNETWORKS_FILL: OperationCode = 100;
pub const OperationCode_ANEURALNETWORKS_RANK: OperationCode = 101;
pub type OperationCode = ::std::os::raw::c_uint;
pub const FuseCode_ANEURALNETWORKS_FUSED_NONE: FuseCode = 0;
pub const FuseCode_ANEURALNETWORKS_FUSED_RELU: FuseCode = 1;
pub const FuseCode_ANEURALNETWORKS_FUSED_RELU1: FuseCode = 2;
pub const FuseCode_ANEURALNETWORKS_FUSED_RELU6: FuseCode = 3;
pub type FuseCode = ::std::os::raw::c_uint;
pub const PaddingCode_ANEURALNETWORKS_PADDING_SAME: PaddingCode = 1;
pub const PaddingCode_ANEURALNETWORKS_PADDING_VALID: PaddingCode = 2;
pub type PaddingCode = ::std::os::raw::c_uint;
pub const PreferenceCode_ANEURALNETWORKS_PREFER_LOW_POWER: PreferenceCode = 0;
pub const PreferenceCode_ANEURALNETWORKS_PREFER_FAST_SINGLE_ANSWER: PreferenceCode = 1;
pub const PreferenceCode_ANEURALNETWORKS_PREFER_SUSTAINED_SPEED: PreferenceCode = 2;
pub type PreferenceCode = ::std::os::raw::c_uint;
pub const DeviceTypeCode_ANEURALNETWORKS_DEVICE_UNKNOWN: DeviceTypeCode = 0;
pub const DeviceTypeCode_ANEURALNETWORKS_DEVICE_OTHER: DeviceTypeCode = 1;
pub const DeviceTypeCode_ANEURALNETWORKS_DEVICE_CPU: DeviceTypeCode = 2;
pub const DeviceTypeCode_ANEURALNETWORKS_DEVICE_GPU: DeviceTypeCode = 3;
pub const DeviceTypeCode_ANEURALNETWORKS_DEVICE_ACCELERATOR: DeviceTypeCode = 4;
pub type DeviceTypeCode = ::std::os::raw::c_uint;
pub const FeatureLevelCode_ANEURALNETWORKS_FEATURE_LEVEL_1: FeatureLevelCode = 27;
pub const FeatureLevelCode_ANEURALNETWORKS_FEATURE_LEVEL_2: FeatureLevelCode = 28;
pub const FeatureLevelCode_ANEURALNETWORKS_FEATURE_LEVEL_3: FeatureLevelCode = 29;
pub const FeatureLevelCode_ANEURALNETWORKS_FEATURE_LEVEL_4: FeatureLevelCode = 30;
pub const FeatureLevelCode_ANEURALNETWORKS_FEATURE_LEVEL_5: FeatureLevelCode = 31;
pub type FeatureLevelCode = ::std::os::raw::c_uint;
pub const ResultCode_ANEURALNETWORKS_NO_ERROR: ResultCode = 0;
pub const ResultCode_ANEURALNETWORKS_OUT_OF_MEMORY: ResultCode = 1;
pub const ResultCode_ANEURALNETWORKS_INCOMPLETE: ResultCode = 2;
pub const ResultCode_ANEURALNETWORKS_UNEXPECTED_NULL: ResultCode = 3;
pub const ResultCode_ANEURALNETWORKS_BAD_DATA: ResultCode = 4;
pub const ResultCode_ANEURALNETWORKS_OP_FAILED: ResultCode = 5;
pub const ResultCode_ANEURALNETWORKS_BAD_STATE: ResultCode = 6;
pub const ResultCode_ANEURALNETWORKS_UNMAPPABLE: ResultCode = 7;
pub const ResultCode_ANEURALNETWORKS_OUTPUT_INSUFFICIENT_SIZE: ResultCode = 8;
pub const ResultCode_ANEURALNETWORKS_UNAVAILABLE_DEVICE: ResultCode = 9;
pub const ResultCode_ANEURALNETWORKS_MISSED_DEADLINE_TRANSIENT: ResultCode = 10;
pub const ResultCode_ANEURALNETWORKS_MISSED_DEADLINE_PERSISTENT: ResultCode = 11;
pub const ResultCode_ANEURALNETWORKS_RESOURCE_EXHAUSTED_TRANSIENT: ResultCode = 12;
pub const ResultCode_ANEURALNETWORKS_RESOURCE_EXHAUSTED_PERSISTENT: ResultCode = 13;
pub const ResultCode_ANEURALNETWORKS_DEAD_OBJECT: ResultCode = 14;
pub type ResultCode = ::std::os::raw::c_uint;
pub const ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES: ::std::os::raw::c_uint = 128;
pub type _bindgen_ty_33 = ::std::os::raw::c_uint;
pub const ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN: ::std::os::raw::c_uint = 32;
pub type _bindgen_ty_34 = ::std::os::raw::c_uint;
pub const DurationCode_ANEURALNETWORKS_DURATION_ON_HARDWARE: DurationCode = 0;
pub const DurationCode_ANEURALNETWORKS_DURATION_IN_DRIVER: DurationCode = 1;
pub const DurationCode_ANEURALNETWORKS_FENCED_DURATION_ON_HARDWARE: DurationCode = 2;
pub const DurationCode_ANEURALNETWORKS_FENCED_DURATION_IN_DRIVER: DurationCode = 3;
pub type DurationCode = ::std::os::raw::c_uint;
pub const PriorityCode_ANEURALNETWORKS_PRIORITY_LOW: PriorityCode = 90;
pub const PriorityCode_ANEURALNETWORKS_PRIORITY_MEDIUM: PriorityCode = 100;
pub const PriorityCode_ANEURALNETWORKS_PRIORITY_HIGH: PriorityCode = 110;
pub const PriorityCode_ANEURALNETWORKS_PRIORITY_DEFAULT: PriorityCode = 100;
pub type PriorityCode = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ANeuralNetworksMemory {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ANeuralNetworksModel {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ANeuralNetworksCompilation {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ANeuralNetworksExecution {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ANeuralNetworksSymmPerChannelQuantParams {
pub channelDim: u32,
pub scaleCount: u32,
pub scales: *const f32,
}
#[test]
fn bindgen_test_layout_ANeuralNetworksSymmPerChannelQuantParams() {
assert_eq!(
::std::mem::size_of::<ANeuralNetworksSymmPerChannelQuantParams>(),
12usize,
concat!(
"Size of: ",
stringify!(ANeuralNetworksSymmPerChannelQuantParams)
)
);
assert_eq!(
::std::mem::align_of::<ANeuralNetworksSymmPerChannelQuantParams>(),
4usize,
concat!(
"Alignment of ",
stringify!(ANeuralNetworksSymmPerChannelQuantParams)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANeuralNetworksSymmPerChannelQuantParams>())).channelDim
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ANeuralNetworksSymmPerChannelQuantParams),
"::",
stringify!(channelDim)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANeuralNetworksSymmPerChannelQuantParams>())).scaleCount
as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(ANeuralNetworksSymmPerChannelQuantParams),
"::",
stringify!(scaleCount)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANeuralNetworksSymmPerChannelQuantParams>())).scales as *const _
as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(ANeuralNetworksSymmPerChannelQuantParams),
"::",
stringify!(scales)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ANeuralNetworksBurst {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ANeuralNetworksOperandType {
pub type_: i32,
pub dimensionCount: u32,
pub dimensions: *const u32,
pub scale: f32,
pub zeroPoint: i32,
}
#[test]
fn bindgen_test_layout_ANeuralNetworksOperandType() {
assert_eq!(
::std::mem::size_of::<ANeuralNetworksOperandType>(),
20usize,
concat!("Size of: ", stringify!(ANeuralNetworksOperandType))
);
assert_eq!(
::std::mem::align_of::<ANeuralNetworksOperandType>(),
4usize,
concat!("Alignment of ", stringify!(ANeuralNetworksOperandType))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANeuralNetworksOperandType>())).type_ as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ANeuralNetworksOperandType),
"::",
stringify!(type_)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANeuralNetworksOperandType>())).dimensionCount as *const _
as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(ANeuralNetworksOperandType),
"::",
stringify!(dimensionCount)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANeuralNetworksOperandType>())).dimensions as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(ANeuralNetworksOperandType),
"::",
stringify!(dimensions)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANeuralNetworksOperandType>())).scale as *const _ as usize
},
12usize,
concat!(
"Offset of field: ",
stringify!(ANeuralNetworksOperandType),
"::",
stringify!(scale)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ANeuralNetworksOperandType>())).zeroPoint as *const _ as usize
},
16usize,
concat!(
"Offset of field: ",
stringify!(ANeuralNetworksOperandType),
"::",
stringify!(zeroPoint)
)
);
}
pub type ANeuralNetworksOperationType = i32;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ANeuralNetworksEvent {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ANeuralNetworksDevice {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ANeuralNetworksMemoryDesc {
_unused: [u8; 0],
}
extern "C" {
pub fn ANeuralNetworksMemoryDesc_create(
desc: *mut *mut ANeuralNetworksMemoryDesc,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksMemoryDesc_free(desc: *mut ANeuralNetworksMemoryDesc);
}
extern "C" {
pub fn ANeuralNetworksMemoryDesc_addInputRole(
desc: *mut ANeuralNetworksMemoryDesc,
compilation: *const ANeuralNetworksCompilation,
index: u32,
frequency: f32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksMemoryDesc_addOutputRole(
desc: *mut ANeuralNetworksMemoryDesc,
compilation: *const ANeuralNetworksCompilation,
index: u32,
frequency: f32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksMemoryDesc_setDimensions(
desc: *mut ANeuralNetworksMemoryDesc,
rank: u32,
dimensions: *const u32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksMemoryDesc_finish(
desc: *mut ANeuralNetworksMemoryDesc,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksMemory_createFromDesc(
desc: *const ANeuralNetworksMemoryDesc,
memory: *mut *mut ANeuralNetworksMemory,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksMemory_copy(
src: *const ANeuralNetworksMemory,
dst: *const ANeuralNetworksMemory,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworks_getDeviceCount(numDevices: *mut u32) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworks_getDevice(
devIndex: u32,
device: *mut *mut ANeuralNetworksDevice,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksDevice_getName(
device: *const ANeuralNetworksDevice,
name: *mut *const ::std::os::raw::c_char,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksDevice_getType(
device: *const ANeuralNetworksDevice,
type_: *mut i32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksDevice_getVersion(
device: *const ANeuralNetworksDevice,
version: *mut *const ::std::os::raw::c_char,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksDevice_getFeatureLevel(
device: *const ANeuralNetworksDevice,
featureLevel: *mut i64,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksDevice_wait(
device: *const ANeuralNetworksDevice,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksModel_getSupportedOperationsForDevices(
model: *const ANeuralNetworksModel,
devices: *const *const ANeuralNetworksDevice,
numDevices: u32,
supportedOps: *mut bool,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksCompilation_createForDevices(
model: *mut ANeuralNetworksModel,
devices: *const *const ANeuralNetworksDevice,
numDevices: u32,
compilation: *mut *mut ANeuralNetworksCompilation,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksCompilation_setCaching(
compilation: *mut ANeuralNetworksCompilation,
cacheDir: *const ::std::os::raw::c_char,
token: *const u8,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_compute(
execution: *mut ANeuralNetworksExecution,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_getOutputOperandRank(
execution: *mut ANeuralNetworksExecution,
index: i32,
rank: *mut u32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_getOutputOperandDimensions(
execution: *mut ANeuralNetworksExecution,
index: i32,
dimensions: *mut u32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksBurst_create(
compilation: *mut ANeuralNetworksCompilation,
burst: *mut *mut ANeuralNetworksBurst,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksBurst_free(burst: *mut ANeuralNetworksBurst);
}
extern "C" {
pub fn ANeuralNetworksExecution_burstCompute(
execution: *mut ANeuralNetworksExecution,
burst: *mut ANeuralNetworksBurst,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksMemory_createFromAHardwareBuffer(
ahwb: *const AHardwareBuffer,
memory: *mut *mut ANeuralNetworksMemory,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_setMeasureTiming(
execution: *mut ANeuralNetworksExecution,
measure: bool,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_getDuration(
execution: *const ANeuralNetworksExecution,
durationCode: i32,
duration: *mut u64,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksMemory_createFromFd(
size: size_t,
protect: ::std::os::raw::c_int,
fd: ::std::os::raw::c_int,
offset: size_t,
memory: *mut *mut ANeuralNetworksMemory,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksMemory_free(memory: *mut ANeuralNetworksMemory);
}
extern "C" {
pub fn ANeuralNetworksModel_create(
model: *mut *mut ANeuralNetworksModel,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksModel_free(model: *mut ANeuralNetworksModel);
}
extern "C" {
pub fn ANeuralNetworksModel_finish(model: *mut ANeuralNetworksModel) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksModel_addOperand(
model: *mut ANeuralNetworksModel,
type_: *const ANeuralNetworksOperandType,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksModel_setOperandValue(
model: *mut ANeuralNetworksModel,
index: i32,
buffer: *const ::std::os::raw::c_void,
length: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksModel_setOperandSymmPerChannelQuantParams(
model: *mut ANeuralNetworksModel,
index: i32,
channelQuant: *const ANeuralNetworksSymmPerChannelQuantParams,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksModel_setOperandValueFromMemory(
model: *mut ANeuralNetworksModel,
index: i32,
memory: *const ANeuralNetworksMemory,
offset: size_t,
length: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksModel_setOperandValueFromModel(
model: *mut ANeuralNetworksModel,
index: i32,
value: *const ANeuralNetworksModel,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksModel_addOperation(
model: *mut ANeuralNetworksModel,
type_: ANeuralNetworksOperationType,
inputCount: u32,
inputs: *const u32,
outputCount: u32,
outputs: *const u32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksModel_identifyInputsAndOutputs(
model: *mut ANeuralNetworksModel,
inputCount: u32,
inputs: *const u32,
outputCount: u32,
outputs: *const u32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksModel_relaxComputationFloat32toFloat16(
model: *mut ANeuralNetworksModel,
allow: bool,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksCompilation_create(
model: *mut ANeuralNetworksModel,
compilation: *mut *mut ANeuralNetworksCompilation,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksCompilation_free(compilation: *mut ANeuralNetworksCompilation);
}
extern "C" {
pub fn ANeuralNetworksCompilation_setPreference(
compilation: *mut ANeuralNetworksCompilation,
preference: i32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksCompilation_finish(
compilation: *mut ANeuralNetworksCompilation,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksCompilation_setPriority(
compilation: *mut ANeuralNetworksCompilation,
priority: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksCompilation_setTimeout(
compilation: *mut ANeuralNetworksCompilation,
duration: u64,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_create(
compilation: *mut ANeuralNetworksCompilation,
execution: *mut *mut ANeuralNetworksExecution,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_free(execution: *mut ANeuralNetworksExecution);
}
extern "C" {
pub fn ANeuralNetworksExecution_setInput(
execution: *mut ANeuralNetworksExecution,
index: i32,
type_: *const ANeuralNetworksOperandType,
buffer: *const ::std::os::raw::c_void,
length: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_setInputFromMemory(
execution: *mut ANeuralNetworksExecution,
index: i32,
type_: *const ANeuralNetworksOperandType,
memory: *const ANeuralNetworksMemory,
offset: size_t,
length: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_setOutput(
execution: *mut ANeuralNetworksExecution,
index: i32,
type_: *const ANeuralNetworksOperandType,
buffer: *mut ::std::os::raw::c_void,
length: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_setOutputFromMemory(
execution: *mut ANeuralNetworksExecution,
index: i32,
type_: *const ANeuralNetworksOperandType,
memory: *const ANeuralNetworksMemory,
offset: size_t,
length: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_startCompute(
execution: *mut ANeuralNetworksExecution,
event: *mut *mut ANeuralNetworksEvent,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_setTimeout(
execution: *mut ANeuralNetworksExecution,
duration: u64,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_setLoopTimeout(
execution: *mut ANeuralNetworksExecution,
duration: u64,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworks_getDefaultLoopTimeout() -> u64;
}
extern "C" {
pub fn ANeuralNetworks_getMaximumLoopTimeout() -> u64;
}
extern "C" {
pub fn ANeuralNetworksEvent_wait(event: *mut ANeuralNetworksEvent) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksEvent_free(event: *mut ANeuralNetworksEvent);
}
extern "C" {
pub fn ANeuralNetworksEvent_createFromSyncFenceFd(
sync_fence_fd: ::std::os::raw::c_int,
event: *mut *mut ANeuralNetworksEvent,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksEvent_getSyncFenceFd(
event: *const ANeuralNetworksEvent,
sync_fence_fd: *mut ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_startComputeWithDependencies(
execution: *mut ANeuralNetworksExecution,
dependencies: *const *const ANeuralNetworksEvent,
num_dependencies: u32,
duration: u64,
event: *mut *mut ANeuralNetworksEvent,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworks_getRuntimeFeatureLevel() -> i64;
}
extern "C" {
pub fn ANeuralNetworksExecution_enableInputAndOutputPadding(
execution: *mut ANeuralNetworksExecution,
enable: bool,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput(
compilation: *const ANeuralNetworksCompilation,
index: u32,
alignment: *mut u32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput(
compilation: *const ANeuralNetworksCompilation,
index: u32,
padding: *mut u32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput(
compilation: *const ANeuralNetworksCompilation,
index: u32,
alignment: *mut u32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput(
compilation: *const ANeuralNetworksCompilation,
index: u32,
padding: *mut u32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ANeuralNetworksExecution_setReusable(
execution: *mut ANeuralNetworksExecution,
reusable: bool,
) -> ::std::os::raw::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AObbInfo {
_unused: [u8; 0],
}
pub const AOBBINFO_OVERLAY: ::std::os::raw::c_uint = 1;
pub type _bindgen_ty_35 = ::std::os::raw::c_uint;
extern "C" {
pub fn AObbScanner_getObbInfo(filename: *const ::std::os::raw::c_char) -> *mut AObbInfo;
}
extern "C" {
pub fn AObbInfo_delete(obbInfo: *mut AObbInfo);
}
extern "C" {
pub fn AObbInfo_getPackageName(obbInfo: *mut AObbInfo) -> *const ::std::os::raw::c_char;
}
extern "C" {
pub fn AObbInfo_getVersion(obbInfo: *mut AObbInfo) -> i32;
}
extern "C" {
pub fn AObbInfo_getFlags(obbInfo: *mut AObbInfo) -> i32;
}
pub const PERMISSION_MANAGER_PERMISSION_GRANTED: ::std::os::raw::c_int = 0;
pub const PERMISSION_MANAGER_PERMISSION_DENIED: ::std::os::raw::c_int = -1;
pub type _bindgen_ty_36 = ::std::os::raw::c_int;
pub const PERMISSION_MANAGER_STATUS_OK: ::std::os::raw::c_int = 0;
pub const PERMISSION_MANAGER_STATUS_ERROR_UNKNOWN: ::std::os::raw::c_int = -1;
pub const PERMISSION_MANAGER_STATUS_SERVICE_UNAVAILABLE: ::std::os::raw::c_int = -2;
pub type _bindgen_ty_37 = ::std::os::raw::c_int;
extern "C" {
pub fn APermissionManager_checkPermission(
permission: *const ::std::os::raw::c_char,
pid: pid_t,
uid: uid_t,
outResult: *mut i32,
) -> i32;
}
pub type __double_t = f64;
pub type double_t = __double_t;
pub type __float_t = f32;
pub type float_t = __float_t;
extern "C" {
pub fn acos(__x: f64) -> f64;
}
extern "C" {
pub fn acosf(__x: f32) -> f32;
}
extern "C" {
pub fn asin(__x: f64) -> f64;
}
extern "C" {
pub fn asinf(__x: f32) -> f32;
}
extern "C" {
pub fn atan(__x: f64) -> f64;
}
extern "C" {
pub fn atanf(__x: f32) -> f32;
}
extern "C" {
pub fn atan2(__y: f64, __x: f64) -> f64;
}
extern "C" {
pub fn atan2f(__y: f32, __x: f32) -> f32;
}
extern "C" {
pub fn cos(__x: f64) -> f64;
}
extern "C" {
pub fn cosf(__x: f32) -> f32;
}
extern "C" {
pub fn sin(__x: f64) -> f64;
}
extern "C" {
pub fn sinf(__x: f32) -> f32;
}
extern "C" {
pub fn tan(__x: f64) -> f64;
}
extern "C" {
pub fn tanf(__x: f32) -> f32;
}
extern "C" {
pub fn acosh(__x: f64) -> f64;
}
extern "C" {
pub fn acoshf(__x: f32) -> f32;
}
extern "C" {
pub fn asinh(__x: f64) -> f64;
}
extern "C" {
pub fn asinhf(__x: f32) -> f32;
}
extern "C" {
pub fn atanh(__x: f64) -> f64;
}
extern "C" {
pub fn atanhf(__x: f32) -> f32;
}
extern "C" {
pub fn cosh(__x: f64) -> f64;
}
extern "C" {
pub fn coshf(__x: f32) -> f32;
}
extern "C" {
pub fn sinh(__x: f64) -> f64;
}
extern "C" {
pub fn sinhf(__x: f32) -> f32;
}
extern "C" {
pub fn tanh(__x: f64) -> f64;
}
extern "C" {
pub fn tanhf(__x: f32) -> f32;
}
extern "C" {
pub fn exp(__x: f64) -> f64;
}
extern "C" {
pub fn expf(__x: f32) -> f32;
}
extern "C" {
pub fn exp2(__x: f64) -> f64;
}
extern "C" {
pub fn exp2f(__x: f32) -> f32;
}
extern "C" {
pub fn expm1(__x: f64) -> f64;
}
extern "C" {
pub fn expm1f(__x: f32) -> f32;
}
extern "C" {
pub fn frexp(__x: f64, __exponent: *mut ::std::os::raw::c_int) -> f64;
}
extern "C" {
pub fn frexpf(__x: f32, __exponent: *mut ::std::os::raw::c_int) -> f32;
}
extern "C" {
pub fn ilogb(__x: f64) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ilogbf(__x: f32) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ldexp(__x: f64, __exponent: ::std::os::raw::c_int) -> f64;
}
extern "C" {
pub fn ldexpf(__x: f32, __exponent: ::std::os::raw::c_int) -> f32;
}
extern "C" {
pub fn log(__x: f64) -> f64;
}
extern "C" {
pub fn logf(__x: f32) -> f32;
}
extern "C" {
pub fn log10(__x: f64) -> f64;
}
extern "C" {
pub fn log10f(__x: f32) -> f32;
}
extern "C" {
pub fn log1p(__x: f64) -> f64;
}
extern "C" {
pub fn log1pf(__x: f32) -> f32;
}
extern "C" {
pub fn log2(__x: f64) -> f64;
}
extern "C" {
pub fn log2f(__x: f32) -> f32;
}
extern "C" {
pub fn logb(__x: f64) -> f64;
}
extern "C" {
pub fn logbf(__x: f32) -> f32;
}
extern "C" {
pub fn modf(__x: f64, __integral_part: *mut f64) -> f64;
}
extern "C" {
pub fn modff(__x: f32, __integral_part: *mut f32) -> f32;
}
extern "C" {
pub fn scalbn(__x: f64, __exponent: ::std::os::raw::c_int) -> f64;
}
extern "C" {
pub fn scalbnf(__x: f32, __exponent: ::std::os::raw::c_int) -> f32;
}
extern "C" {
pub fn scalbln(__x: f64, __exponent: ::std::os::raw::c_long) -> f64;
}
extern "C" {
pub fn scalblnf(__x: f32, __exponent: ::std::os::raw::c_long) -> f32;
}
extern "C" {
pub fn scalblnl(__x: f64, __exponent: ::std::os::raw::c_long) -> f64;
}
extern "C" {
pub fn cbrt(__x: f64) -> f64;
}
extern "C" {
pub fn cbrtf(__x: f32) -> f32;
}
extern "C" {
pub fn fabs(__x: f64) -> f64;
}
extern "C" {
pub fn fabsf(__x: f32) -> f32;
}
extern "C" {
pub fn hypot(__x: f64, __y: f64) -> f64;
}
extern "C" {
pub fn hypotf(__x: f32, __y: f32) -> f32;
}
extern "C" {
pub fn pow(__x: f64, __y: f64) -> f64;
}
extern "C" {
pub fn powf(__x: f32, __y: f32) -> f32;
}
extern "C" {
pub fn sqrt(__x: f64) -> f64;
}
extern "C" {
pub fn sqrtf(__x: f32) -> f32;
}
extern "C" {
pub fn erf(__x: f64) -> f64;
}
extern "C" {
pub fn erff(__x: f32) -> f32;
}
extern "C" {
pub fn erfc(__x: f64) -> f64;
}
extern "C" {
pub fn erfcf(__x: f32) -> f32;
}
extern "C" {
pub fn lgamma(__x: f64) -> f64;
}
extern "C" {
pub fn lgammaf(__x: f32) -> f32;
}
extern "C" {
pub fn tgamma(__x: f64) -> f64;
}
extern "C" {
pub fn tgammaf(__x: f32) -> f32;
}
extern "C" {
pub fn ceil(__x: f64) -> f64;
}
extern "C" {
pub fn ceilf(__x: f32) -> f32;
}
extern "C" {
pub fn floor(__x: f64) -> f64;
}
extern "C" {
pub fn floorf(__x: f32) -> f32;
}
extern "C" {
pub fn nearbyint(__x: f64) -> f64;
}
extern "C" {
pub fn nearbyintf(__x: f32) -> f32;
}
extern "C" {
pub fn rint(__x: f64) -> f64;
}
extern "C" {
pub fn rintf(__x: f32) -> f32;
}
extern "C" {
pub fn lrint(__x: f64) -> ::std::os::raw::c_long;
}
extern "C" {
pub fn lrintf(__x: f32) -> ::std::os::raw::c_long;
}
extern "C" {
pub fn llrint(__x: f64) -> ::std::os::raw::c_longlong;
}
extern "C" {
pub fn llrintf(__x: f32) -> ::std::os::raw::c_longlong;
}
extern "C" {
pub fn round(__x: f64) -> f64;
}
extern "C" {
pub fn roundf(__x: f32) -> f32;
}
extern "C" {
pub fn roundl(__x: f64) -> f64;
}
extern "C" {
pub fn lround(__x: f64) -> ::std::os::raw::c_long;
}
extern "C" {
pub fn lroundf(__x: f32) -> ::std::os::raw::c_long;
}
extern "C" {
pub fn llround(__x: f64) -> ::std::os::raw::c_longlong;
}
extern "C" {
pub fn llroundf(__x: f32) -> ::std::os::raw::c_longlong;
}
extern "C" {
pub fn trunc(__x: f64) -> f64;
}
extern "C" {
pub fn truncf(__x: f32) -> f32;
}
extern "C" {
pub fn fmod(__x: f64, __y: f64) -> f64;
}
extern "C" {
pub fn fmodf(__x: f32, __y: f32) -> f32;
}
extern "C" {
pub fn remainder(__x: f64, __y: f64) -> f64;
}
extern "C" {
pub fn remainderf(__x: f32, __y: f32) -> f32;
}
extern "C" {
pub fn remquo(__x: f64, __y: f64, __quotient_bits: *mut ::std::os::raw::c_int) -> f64;
}
extern "C" {
pub fn remquof(__x: f32, __y: f32, __quotient_bits: *mut ::std::os::raw::c_int) -> f32;
}
extern "C" {
pub fn copysign(__value: f64, __sign: f64) -> f64;
}
extern "C" {
pub fn copysignf(__value: f32, __sign: f32) -> f32;
}
extern "C" {
pub fn nan(__kind: *const ::std::os::raw::c_char) -> f64;
}
extern "C" {
pub fn nanf(__kind: *const ::std::os::raw::c_char) -> f32;
}
extern "C" {
pub fn nextafter(__x: f64, __y: f64) -> f64;
}
extern "C" {
pub fn nextafterf(__x: f32, __y: f32) -> f32;
}
extern "C" {
pub fn nexttoward(__x: f64, __y: f64) -> f64;
}
extern "C" {
pub fn nexttowardf(__x: f32, __y: f64) -> f32;
}
extern "C" {
pub fn fdim(__x: f64, __y: f64) -> f64;
}
extern "C" {
pub fn fdimf(__x: f32, __y: f32) -> f32;
}
extern "C" {
pub fn fmax(__x: f64, __y: f64) -> f64;
}
extern "C" {
pub fn fmaxf(__x: f32, __y: f32) -> f32;
}
extern "C" {
pub fn fmin(__x: f64, __y: f64) -> f64;
}
extern "C" {
pub fn fminf(__x: f32, __y: f32) -> f32;
}
extern "C" {
pub fn fma(__x: f64, __y: f64, __z: f64) -> f64;
}
extern "C" {
pub fn fmaf(__x: f32, __y: f32, __z: f32) -> f32;
}
extern "C" {
pub fn isinf(__x: f64) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn isnan(__x: f64) -> ::std::os::raw::c_int;
}
extern "C" {
pub static mut signgam: ::std::os::raw::c_int;
}
extern "C" {
pub fn y0(__x: f64) -> f64;
}
extern "C" {
pub fn y1(__x: f64) -> f64;
}
extern "C" {
pub fn yn(__n: ::std::os::raw::c_int, __x: f64) -> f64;
}
pub const ASENSOR_TYPE_INVALID: ::std::os::raw::c_int = -1;
pub const ASENSOR_TYPE_ACCELEROMETER: ::std::os::raw::c_int = 1;
pub const ASENSOR_TYPE_MAGNETIC_FIELD: ::std::os::raw::c_int = 2;
pub const ASENSOR_TYPE_GYROSCOPE: ::std::os::raw::c_int = 4;
pub const ASENSOR_TYPE_LIGHT: ::std::os::raw::c_int = 5;
pub const ASENSOR_TYPE_PRESSURE: ::std::os::raw::c_int = 6;
pub const ASENSOR_TYPE_PROXIMITY: ::std::os::raw::c_int = 8;
pub const ASENSOR_TYPE_GRAVITY: ::std::os::raw::c_int = 9;
pub const ASENSOR_TYPE_LINEAR_ACCELERATION: ::std::os::raw::c_int = 10;
pub const ASENSOR_TYPE_ROTATION_VECTOR: ::std::os::raw::c_int = 11;
pub const ASENSOR_TYPE_RELATIVE_HUMIDITY: ::std::os::raw::c_int = 12;
pub const ASENSOR_TYPE_AMBIENT_TEMPERATURE: ::std::os::raw::c_int = 13;
pub const ASENSOR_TYPE_MAGNETIC_FIELD_UNCALIBRATED: ::std::os::raw::c_int = 14;
pub const ASENSOR_TYPE_GAME_ROTATION_VECTOR: ::std::os::raw::c_int = 15;
pub const ASENSOR_TYPE_GYROSCOPE_UNCALIBRATED: ::std::os::raw::c_int = 16;
pub const ASENSOR_TYPE_SIGNIFICANT_MOTION: ::std::os::raw::c_int = 17;
pub const ASENSOR_TYPE_STEP_DETECTOR: ::std::os::raw::c_int = 18;
pub const ASENSOR_TYPE_STEP_COUNTER: ::std::os::raw::c_int = 19;
pub const ASENSOR_TYPE_GEOMAGNETIC_ROTATION_VECTOR: ::std::os::raw::c_int = 20;
pub const ASENSOR_TYPE_HEART_RATE: ::std::os::raw::c_int = 21;
pub const ASENSOR_TYPE_POSE_6DOF: ::std::os::raw::c_int = 28;
pub const ASENSOR_TYPE_STATIONARY_DETECT: ::std::os::raw::c_int = 29;
pub const ASENSOR_TYPE_MOTION_DETECT: ::std::os::raw::c_int = 30;
pub const ASENSOR_TYPE_HEART_BEAT: ::std::os::raw::c_int = 31;
pub const ASENSOR_TYPE_ADDITIONAL_INFO: ::std::os::raw::c_int = 33;
pub const ASENSOR_TYPE_LOW_LATENCY_OFFBODY_DETECT: ::std::os::raw::c_int = 34;
pub const ASENSOR_TYPE_ACCELEROMETER_UNCALIBRATED: ::std::os::raw::c_int = 35;
pub const ASENSOR_TYPE_HINGE_ANGLE: ::std::os::raw::c_int = 36;
pub type _bindgen_ty_38 = ::std::os::raw::c_int;
pub const ASENSOR_STATUS_NO_CONTACT: ::std::os::raw::c_int = -1;
pub const ASENSOR_STATUS_UNRELIABLE: ::std::os::raw::c_int = 0;
pub const ASENSOR_STATUS_ACCURACY_LOW: ::std::os::raw::c_int = 1;
pub const ASENSOR_STATUS_ACCURACY_MEDIUM: ::std::os::raw::c_int = 2;
pub const ASENSOR_STATUS_ACCURACY_HIGH: ::std::os::raw::c_int = 3;
pub type _bindgen_ty_39 = ::std::os::raw::c_int;
pub const AREPORTING_MODE_INVALID: ::std::os::raw::c_int = -1;
pub const AREPORTING_MODE_CONTINUOUS: ::std::os::raw::c_int = 0;
pub const AREPORTING_MODE_ON_CHANGE: ::std::os::raw::c_int = 1;
pub const AREPORTING_MODE_ONE_SHOT: ::std::os::raw::c_int = 2;
pub const AREPORTING_MODE_SPECIAL_TRIGGER: ::std::os::raw::c_int = 3;
pub type _bindgen_ty_40 = ::std::os::raw::c_int;
pub const ASENSOR_DIRECT_RATE_STOP: ::std::os::raw::c_uint = 0;
pub const ASENSOR_DIRECT_RATE_NORMAL: ::std::os::raw::c_uint = 1;
pub const ASENSOR_DIRECT_RATE_FAST: ::std::os::raw::c_uint = 2;
pub const ASENSOR_DIRECT_RATE_VERY_FAST: ::std::os::raw::c_uint = 3;
pub type _bindgen_ty_41 = ::std::os::raw::c_uint;
pub const ASENSOR_DIRECT_CHANNEL_TYPE_SHARED_MEMORY: ::std::os::raw::c_uint = 1;
pub const ASENSOR_DIRECT_CHANNEL_TYPE_HARDWARE_BUFFER: ::std::os::raw::c_uint = 2;
pub type _bindgen_ty_42 = ::std::os::raw::c_uint;
pub const ASENSOR_ADDITIONAL_INFO_BEGIN: ::std::os::raw::c_uint = 0;
pub const ASENSOR_ADDITIONAL_INFO_END: ::std::os::raw::c_uint = 1;
pub const ASENSOR_ADDITIONAL_INFO_UNTRACKED_DELAY: ::std::os::raw::c_uint = 65536;
pub const ASENSOR_ADDITIONAL_INFO_INTERNAL_TEMPERATURE: ::std::os::raw::c_uint = 65537;
pub const ASENSOR_ADDITIONAL_INFO_VEC3_CALIBRATION: ::std::os::raw::c_uint = 65538;
pub const ASENSOR_ADDITIONAL_INFO_SENSOR_PLACEMENT: ::std::os::raw::c_uint = 65539;
pub const ASENSOR_ADDITIONAL_INFO_SAMPLING: ::std::os::raw::c_uint = 65540;
pub type _bindgen_ty_43 = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ASensorVector {
pub __bindgen_anon_1: ASensorVector__bindgen_ty_1,
pub status: i8,
pub reserved: [u8; 3usize],
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union ASensorVector__bindgen_ty_1 {
pub v: [f32; 3usize],
pub __bindgen_anon_1: ASensorVector__bindgen_ty_1__bindgen_ty_1,
pub __bindgen_anon_2: ASensorVector__bindgen_ty_1__bindgen_ty_2,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ASensorVector__bindgen_ty_1__bindgen_ty_1 {
pub x: f32,
pub y: f32,
pub z: f32,
}
#[test]
fn bindgen_test_layout_ASensorVector__bindgen_ty_1__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<ASensorVector__bindgen_ty_1__bindgen_ty_1>(),
12usize,
concat!(
"Size of: ",
stringify!(ASensorVector__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
::std::mem::align_of::<ASensorVector__bindgen_ty_1__bindgen_ty_1>(),
4usize,
concat!(
"Alignment of ",
stringify!(ASensorVector__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorVector__bindgen_ty_1__bindgen_ty_1>())).x as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorVector__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(x)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorVector__bindgen_ty_1__bindgen_ty_1>())).y as *const _
as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(ASensorVector__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(y)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorVector__bindgen_ty_1__bindgen_ty_1>())).z as *const _
as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(ASensorVector__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(z)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ASensorVector__bindgen_ty_1__bindgen_ty_2 {
pub azimuth: f32,
pub pitch: f32,
pub roll: f32,
}
#[test]
fn bindgen_test_layout_ASensorVector__bindgen_ty_1__bindgen_ty_2() {
assert_eq!(
::std::mem::size_of::<ASensorVector__bindgen_ty_1__bindgen_ty_2>(),
12usize,
concat!(
"Size of: ",
stringify!(ASensorVector__bindgen_ty_1__bindgen_ty_2)
)
);
assert_eq!(
::std::mem::align_of::<ASensorVector__bindgen_ty_1__bindgen_ty_2>(),
4usize,
concat!(
"Alignment of ",
stringify!(ASensorVector__bindgen_ty_1__bindgen_ty_2)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorVector__bindgen_ty_1__bindgen_ty_2>())).azimuth
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorVector__bindgen_ty_1__bindgen_ty_2),
"::",
stringify!(azimuth)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorVector__bindgen_ty_1__bindgen_ty_2>())).pitch as *const _
as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(ASensorVector__bindgen_ty_1__bindgen_ty_2),
"::",
stringify!(pitch)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorVector__bindgen_ty_1__bindgen_ty_2>())).roll as *const _
as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(ASensorVector__bindgen_ty_1__bindgen_ty_2),
"::",
stringify!(roll)
)
);
}
#[test]
fn bindgen_test_layout_ASensorVector__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<ASensorVector__bindgen_ty_1>(),
12usize,
concat!("Size of: ", stringify!(ASensorVector__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<ASensorVector__bindgen_ty_1>(),
4usize,
concat!("Alignment of ", stringify!(ASensorVector__bindgen_ty_1))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ASensorVector__bindgen_ty_1>())).v as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorVector__bindgen_ty_1),
"::",
stringify!(v)
)
);
}
#[test]
fn bindgen_test_layout_ASensorVector() {
assert_eq!(
::std::mem::size_of::<ASensorVector>(),
16usize,
concat!("Size of: ", stringify!(ASensorVector))
);
assert_eq!(
::std::mem::align_of::<ASensorVector>(),
4usize,
concat!("Alignment of ", stringify!(ASensorVector))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ASensorVector>())).status as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(ASensorVector),
"::",
stringify!(status)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ASensorVector>())).reserved as *const _ as usize },
13usize,
concat!(
"Offset of field: ",
stringify!(ASensorVector),
"::",
stringify!(reserved)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMetaDataEvent {
pub what: i32,
pub sensor: i32,
}
#[test]
fn bindgen_test_layout_AMetaDataEvent() {
assert_eq!(
::std::mem::size_of::<AMetaDataEvent>(),
8usize,
concat!("Size of: ", stringify!(AMetaDataEvent))
);
assert_eq!(
::std::mem::align_of::<AMetaDataEvent>(),
4usize,
concat!("Alignment of ", stringify!(AMetaDataEvent))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AMetaDataEvent>())).what as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(AMetaDataEvent),
"::",
stringify!(what)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AMetaDataEvent>())).sensor as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(AMetaDataEvent),
"::",
stringify!(sensor)
)
);
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct AUncalibratedEvent {
pub __bindgen_anon_1: AUncalibratedEvent__bindgen_ty_1,
pub __bindgen_anon_2: AUncalibratedEvent__bindgen_ty_2,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union AUncalibratedEvent__bindgen_ty_1 {
pub uncalib: [f32; 3usize],
pub __bindgen_anon_1: AUncalibratedEvent__bindgen_ty_1__bindgen_ty_1,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AUncalibratedEvent__bindgen_ty_1__bindgen_ty_1 {
pub x_uncalib: f32,
pub y_uncalib: f32,
pub z_uncalib: f32,
}
#[test]
fn bindgen_test_layout_AUncalibratedEvent__bindgen_ty_1__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<AUncalibratedEvent__bindgen_ty_1__bindgen_ty_1>(),
12usize,
concat!(
"Size of: ",
stringify!(AUncalibratedEvent__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
::std::mem::align_of::<AUncalibratedEvent__bindgen_ty_1__bindgen_ty_1>(),
4usize,
concat!(
"Alignment of ",
stringify!(AUncalibratedEvent__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AUncalibratedEvent__bindgen_ty_1__bindgen_ty_1>())).x_uncalib
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(AUncalibratedEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(x_uncalib)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AUncalibratedEvent__bindgen_ty_1__bindgen_ty_1>())).y_uncalib
as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(AUncalibratedEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(y_uncalib)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AUncalibratedEvent__bindgen_ty_1__bindgen_ty_1>())).z_uncalib
as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(AUncalibratedEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(z_uncalib)
)
);
}
#[test]
fn bindgen_test_layout_AUncalibratedEvent__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<AUncalibratedEvent__bindgen_ty_1>(),
12usize,
concat!("Size of: ", stringify!(AUncalibratedEvent__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<AUncalibratedEvent__bindgen_ty_1>(),
4usize,
concat!(
"Alignment of ",
stringify!(AUncalibratedEvent__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AUncalibratedEvent__bindgen_ty_1>())).uncalib as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(AUncalibratedEvent__bindgen_ty_1),
"::",
stringify!(uncalib)
)
);
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union AUncalibratedEvent__bindgen_ty_2 {
pub bias: [f32; 3usize],
pub __bindgen_anon_1: AUncalibratedEvent__bindgen_ty_2__bindgen_ty_1,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AUncalibratedEvent__bindgen_ty_2__bindgen_ty_1 {
pub x_bias: f32,
pub y_bias: f32,
pub z_bias: f32,
}
#[test]
fn bindgen_test_layout_AUncalibratedEvent__bindgen_ty_2__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<AUncalibratedEvent__bindgen_ty_2__bindgen_ty_1>(),
12usize,
concat!(
"Size of: ",
stringify!(AUncalibratedEvent__bindgen_ty_2__bindgen_ty_1)
)
);
assert_eq!(
::std::mem::align_of::<AUncalibratedEvent__bindgen_ty_2__bindgen_ty_1>(),
4usize,
concat!(
"Alignment of ",
stringify!(AUncalibratedEvent__bindgen_ty_2__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AUncalibratedEvent__bindgen_ty_2__bindgen_ty_1>())).x_bias
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(AUncalibratedEvent__bindgen_ty_2__bindgen_ty_1),
"::",
stringify!(x_bias)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AUncalibratedEvent__bindgen_ty_2__bindgen_ty_1>())).y_bias
as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(AUncalibratedEvent__bindgen_ty_2__bindgen_ty_1),
"::",
stringify!(y_bias)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AUncalibratedEvent__bindgen_ty_2__bindgen_ty_1>())).z_bias
as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(AUncalibratedEvent__bindgen_ty_2__bindgen_ty_1),
"::",
stringify!(z_bias)
)
);
}
#[test]
fn bindgen_test_layout_AUncalibratedEvent__bindgen_ty_2() {
assert_eq!(
::std::mem::size_of::<AUncalibratedEvent__bindgen_ty_2>(),
12usize,
concat!("Size of: ", stringify!(AUncalibratedEvent__bindgen_ty_2))
);
assert_eq!(
::std::mem::align_of::<AUncalibratedEvent__bindgen_ty_2>(),
4usize,
concat!(
"Alignment of ",
stringify!(AUncalibratedEvent__bindgen_ty_2)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AUncalibratedEvent__bindgen_ty_2>())).bias as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(AUncalibratedEvent__bindgen_ty_2),
"::",
stringify!(bias)
)
);
}
#[test]
fn bindgen_test_layout_AUncalibratedEvent() {
assert_eq!(
::std::mem::size_of::<AUncalibratedEvent>(),
24usize,
concat!("Size of: ", stringify!(AUncalibratedEvent))
);
assert_eq!(
::std::mem::align_of::<AUncalibratedEvent>(),
4usize,
concat!("Alignment of ", stringify!(AUncalibratedEvent))
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AHeartRateEvent {
pub bpm: f32,
pub status: i8,
}
#[test]
fn bindgen_test_layout_AHeartRateEvent() {
assert_eq!(
::std::mem::size_of::<AHeartRateEvent>(),
8usize,
concat!("Size of: ", stringify!(AHeartRateEvent))
);
assert_eq!(
::std::mem::align_of::<AHeartRateEvent>(),
4usize,
concat!("Alignment of ", stringify!(AHeartRateEvent))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AHeartRateEvent>())).bpm as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(AHeartRateEvent),
"::",
stringify!(bpm)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AHeartRateEvent>())).status as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(AHeartRateEvent),
"::",
stringify!(status)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ADynamicSensorEvent {
pub connected: i32,
pub handle: i32,
}
#[test]
fn bindgen_test_layout_ADynamicSensorEvent() {
assert_eq!(
::std::mem::size_of::<ADynamicSensorEvent>(),
8usize,
concat!("Size of: ", stringify!(ADynamicSensorEvent))
);
assert_eq!(
::std::mem::align_of::<ADynamicSensorEvent>(),
4usize,
concat!("Alignment of ", stringify!(ADynamicSensorEvent))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ADynamicSensorEvent>())).connected as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(ADynamicSensorEvent),
"::",
stringify!(connected)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ADynamicSensorEvent>())).handle as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(ADynamicSensorEvent),
"::",
stringify!(handle)
)
);
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct AAdditionalInfoEvent {
pub type_: i32,
pub serial: i32,
pub __bindgen_anon_1: AAdditionalInfoEvent__bindgen_ty_1,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union AAdditionalInfoEvent__bindgen_ty_1 {
pub data_int32: [i32; 14usize],
pub data_float: [f32; 14usize],
}
#[test]
fn bindgen_test_layout_AAdditionalInfoEvent__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<AAdditionalInfoEvent__bindgen_ty_1>(),
56usize,
concat!("Size of: ", stringify!(AAdditionalInfoEvent__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<AAdditionalInfoEvent__bindgen_ty_1>(),
4usize,
concat!(
"Alignment of ",
stringify!(AAdditionalInfoEvent__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AAdditionalInfoEvent__bindgen_ty_1>())).data_int32 as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(AAdditionalInfoEvent__bindgen_ty_1),
"::",
stringify!(data_int32)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AAdditionalInfoEvent__bindgen_ty_1>())).data_float as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(AAdditionalInfoEvent__bindgen_ty_1),
"::",
stringify!(data_float)
)
);
}
#[test]
fn bindgen_test_layout_AAdditionalInfoEvent() {
assert_eq!(
::std::mem::size_of::<AAdditionalInfoEvent>(),
64usize,
concat!("Size of: ", stringify!(AAdditionalInfoEvent))
);
assert_eq!(
::std::mem::align_of::<AAdditionalInfoEvent>(),
4usize,
concat!("Alignment of ", stringify!(AAdditionalInfoEvent))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AAdditionalInfoEvent>())).type_ as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(AAdditionalInfoEvent),
"::",
stringify!(type_)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AAdditionalInfoEvent>())).serial as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(AAdditionalInfoEvent),
"::",
stringify!(serial)
)
);
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ASensorEvent {
pub version: i32,
pub sensor: i32,
pub type_: i32,
pub reserved0: i32,
pub timestamp: i64,
pub __bindgen_anon_1: ASensorEvent__bindgen_ty_1,
pub flags: u32,
pub reserved1: [i32; 3usize],
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union ASensorEvent__bindgen_ty_1 {
pub __bindgen_anon_1: ASensorEvent__bindgen_ty_1__bindgen_ty_1,
pub u64_: ASensorEvent__bindgen_ty_1__bindgen_ty_2,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union ASensorEvent__bindgen_ty_1__bindgen_ty_1 {
pub data: [f32; 16usize],
pub vector: ASensorVector,
pub acceleration: ASensorVector,
pub gyro: ASensorVector,
pub magnetic: ASensorVector,
pub temperature: f32,
pub distance: f32,
pub light: f32,
pub pressure: f32,
pub relative_humidity: f32,
pub uncalibrated_acceleration: AUncalibratedEvent,
pub uncalibrated_gyro: AUncalibratedEvent,
pub uncalibrated_magnetic: AUncalibratedEvent,
pub meta_data: AMetaDataEvent,
pub heart_rate: AHeartRateEvent,
pub dynamic_sensor_meta: ADynamicSensorEvent,
pub additional_info: AAdditionalInfoEvent,
}
#[test]
fn bindgen_test_layout_ASensorEvent__bindgen_ty_1__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>(),
64usize,
concat!(
"Size of: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
::std::mem::align_of::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>(),
4usize,
concat!(
"Alignment of ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).data as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(data)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).vector as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(vector)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).acceleration
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(acceleration)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).gyro as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(gyro)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).magnetic
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(magnetic)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).temperature
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(temperature)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).distance
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(distance)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).light as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(light)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).pressure
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(pressure)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).relative_humidity
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(relative_humidity)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>()))
.uncalibrated_acceleration as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(uncalibrated_acceleration)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).uncalibrated_gyro
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(uncalibrated_gyro)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>()))
.uncalibrated_magnetic as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(uncalibrated_magnetic)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).meta_data
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(meta_data)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).heart_rate
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(heart_rate)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).dynamic_sensor_meta
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(dynamic_sensor_meta)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_1>())).additional_info
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(additional_info)
)
);
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union ASensorEvent__bindgen_ty_1__bindgen_ty_2 {
pub data: [u64; 8usize],
pub step_counter: u64,
}
#[test]
fn bindgen_test_layout_ASensorEvent__bindgen_ty_1__bindgen_ty_2() {
assert_eq!(
::std::mem::size_of::<ASensorEvent__bindgen_ty_1__bindgen_ty_2>(),
64usize,
concat!(
"Size of: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_2)
)
);
assert_eq!(
::std::mem::align_of::<ASensorEvent__bindgen_ty_1__bindgen_ty_2>(),
8usize,
concat!(
"Alignment of ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_2)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_2>())).data as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_2),
"::",
stringify!(data)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1__bindgen_ty_2>())).step_counter
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1__bindgen_ty_2),
"::",
stringify!(step_counter)
)
);
}
#[test]
fn bindgen_test_layout_ASensorEvent__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<ASensorEvent__bindgen_ty_1>(),
64usize,
concat!("Size of: ", stringify!(ASensorEvent__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<ASensorEvent__bindgen_ty_1>(),
8usize,
concat!("Alignment of ", stringify!(ASensorEvent__bindgen_ty_1))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ASensorEvent__bindgen_ty_1>())).u64_ as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent__bindgen_ty_1),
"::",
stringify!(u64_)
)
);
}
#[test]
fn bindgen_test_layout_ASensorEvent() {
assert_eq!(
::std::mem::size_of::<ASensorEvent>(),
104usize,
concat!("Size of: ", stringify!(ASensorEvent))
);
assert_eq!(
::std::mem::align_of::<ASensorEvent>(),
8usize,
concat!("Alignment of ", stringify!(ASensorEvent))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ASensorEvent>())).version as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent),
"::",
stringify!(version)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ASensorEvent>())).sensor as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent),
"::",
stringify!(sensor)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ASensorEvent>())).type_ as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent),
"::",
stringify!(type_)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ASensorEvent>())).reserved0 as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent),
"::",
stringify!(reserved0)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ASensorEvent>())).timestamp as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent),
"::",
stringify!(timestamp)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ASensorEvent>())).flags as *const _ as usize },
88usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent),
"::",
stringify!(flags)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ASensorEvent>())).reserved1 as *const _ as usize },
92usize,
concat!(
"Offset of field: ",
stringify!(ASensorEvent),
"::",
stringify!(reserved1)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ASensorManager {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ASensorEventQueue {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ASensor {
_unused: [u8; 0],
}
pub type ASensorRef = *const ASensor;
pub type ASensorList = *const ASensorRef;
extern "C" {
pub fn ASensorManager_getInstance() -> *mut ASensorManager;
}
extern "C" {
pub fn ASensorManager_getInstanceForPackage(
packageName: *const ::std::os::raw::c_char,
) -> *mut ASensorManager;
}
extern "C" {
pub fn ASensorManager_getSensorList(
manager: *mut ASensorManager,
list: *mut ASensorList,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensorManager_getDefaultSensor(
manager: *mut ASensorManager,
type_: ::std::os::raw::c_int,
) -> *const ASensor;
}
extern "C" {
pub fn ASensorManager_getDefaultSensorEx(
manager: *mut ASensorManager,
type_: ::std::os::raw::c_int,
wakeUp: bool,
) -> *const ASensor;
}
extern "C" {
pub fn ASensorManager_createEventQueue(
manager: *mut ASensorManager,
looper: *mut ALooper,
ident: ::std::os::raw::c_int,
callback: ALooper_callbackFunc,
data: *mut ::std::os::raw::c_void,
) -> *mut ASensorEventQueue;
}
extern "C" {
pub fn ASensorManager_destroyEventQueue(
manager: *mut ASensorManager,
queue: *mut ASensorEventQueue,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensorManager_createSharedMemoryDirectChannel(
manager: *mut ASensorManager,
fd: ::std::os::raw::c_int,
size: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensorManager_createHardwareBufferDirectChannel(
manager: *mut ASensorManager,
buffer: *const AHardwareBuffer,
size: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensorManager_destroyDirectChannel(
manager: *mut ASensorManager,
channelId: ::std::os::raw::c_int,
);
}
extern "C" {
pub fn ASensorManager_configureDirectReport(
manager: *mut ASensorManager,
sensor: *const ASensor,
channelId: ::std::os::raw::c_int,
rate: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensorEventQueue_registerSensor(
queue: *mut ASensorEventQueue,
sensor: *const ASensor,
samplingPeriodUs: i32,
maxBatchReportLatencyUs: i64,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensorEventQueue_enableSensor(
queue: *mut ASensorEventQueue,
sensor: *const ASensor,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensorEventQueue_disableSensor(
queue: *mut ASensorEventQueue,
sensor: *const ASensor,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensorEventQueue_setEventRate(
queue: *mut ASensorEventQueue,
sensor: *const ASensor,
usec: i32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensorEventQueue_hasEvents(queue: *mut ASensorEventQueue) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensorEventQueue_getEvents(
queue: *mut ASensorEventQueue,
events: *mut ASensorEvent,
count: size_t,
) -> ssize_t;
}
extern "C" {
pub fn ASensorEventQueue_requestAdditionalInfoEvents(
queue: *mut ASensorEventQueue,
enable: bool,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensor_getName(sensor: *const ASensor) -> *const ::std::os::raw::c_char;
}
extern "C" {
pub fn ASensor_getVendor(sensor: *const ASensor) -> *const ::std::os::raw::c_char;
}
extern "C" {
pub fn ASensor_getType(sensor: *const ASensor) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensor_getResolution(sensor: *const ASensor) -> f32;
}
extern "C" {
pub fn ASensor_getMinDelay(sensor: *const ASensor) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensor_getFifoMaxEventCount(sensor: *const ASensor) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensor_getFifoReservedEventCount(sensor: *const ASensor) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensor_getStringType(sensor: *const ASensor) -> *const ::std::os::raw::c_char;
}
extern "C" {
pub fn ASensor_getReportingMode(sensor: *const ASensor) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensor_isWakeUpSensor(sensor: *const ASensor) -> bool;
}
extern "C" {
pub fn ASensor_isDirectChannelTypeSupported(
sensor: *const ASensor,
channelType: ::std::os::raw::c_int,
) -> bool;
}
extern "C" {
pub fn ASensor_getHighestDirectReportRateLevel(sensor: *const ASensor)
-> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASensor_getHandle(sensor: *const ASensor) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn android_set_abort_message(__msg: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn ASharedMemory_create(
name: *const ::std::os::raw::c_char,
size: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASharedMemory_getSize(fd: ::std::os::raw::c_int) -> size_t;
}
extern "C" {
pub fn ASharedMemory_setProt(
fd: ::std::os::raw::c_int,
prot: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASharedMemory_dupFromJava(
env: *mut JNIEnv,
sharedMemory: jobject,
) -> ::std::os::raw::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AStorageManager {
_unused: [u8; 0],
}
pub const AOBB_STATE_MOUNTED: ::std::os::raw::c_uint = 1;
pub const AOBB_STATE_UNMOUNTED: ::std::os::raw::c_uint = 2;
pub const AOBB_STATE_ERROR_INTERNAL: ::std::os::raw::c_uint = 20;
pub const AOBB_STATE_ERROR_COULD_NOT_MOUNT: ::std::os::raw::c_uint = 21;
pub const AOBB_STATE_ERROR_COULD_NOT_UNMOUNT: ::std::os::raw::c_uint = 22;
pub const AOBB_STATE_ERROR_NOT_MOUNTED: ::std::os::raw::c_uint = 23;
pub const AOBB_STATE_ERROR_ALREADY_MOUNTED: ::std::os::raw::c_uint = 24;
pub const AOBB_STATE_ERROR_PERMISSION_DENIED: ::std::os::raw::c_uint = 25;
pub type _bindgen_ty_44 = ::std::os::raw::c_uint;
extern "C" {
pub fn AStorageManager_new() -> *mut AStorageManager;
}
extern "C" {
pub fn AStorageManager_delete(mgr: *mut AStorageManager);
}
pub type AStorageManager_obbCallbackFunc = ::std::option::Option<
unsafe extern "C" fn(
filename: *const ::std::os::raw::c_char,
state: i32,
data: *mut ::std::os::raw::c_void,
),
>;
extern "C" {
pub fn AStorageManager_mountObb(
mgr: *mut AStorageManager,
filename: *const ::std::os::raw::c_char,
key: *const ::std::os::raw::c_char,
cb: AStorageManager_obbCallbackFunc,
data: *mut ::std::os::raw::c_void,
);
}
extern "C" {
pub fn AStorageManager_unmountObb(
mgr: *mut AStorageManager,
filename: *const ::std::os::raw::c_char,
force: ::std::os::raw::c_int,
cb: AStorageManager_obbCallbackFunc,
data: *mut ::std::os::raw::c_void,
);
}
extern "C" {
pub fn AStorageManager_isObbMounted(
mgr: *mut AStorageManager,
filename: *const ::std::os::raw::c_char,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn AStorageManager_getMountedObbPath(
mgr: *mut AStorageManager,
filename: *const ::std::os::raw::c_char,
) -> *const ::std::os::raw::c_char;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ASurfaceTexture {
_unused: [u8; 0],
}
extern "C" {
pub fn ASurfaceTexture_release(st: *mut ASurfaceTexture);
}
extern "C" {
pub fn ASurfaceTexture_acquireANativeWindow(st: *mut ASurfaceTexture) -> *mut ANativeWindow;
}
extern "C" {
pub fn ASurfaceTexture_attachToGLContext(
st: *mut ASurfaceTexture,
texName: u32,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASurfaceTexture_detachFromGLContext(st: *mut ASurfaceTexture) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASurfaceTexture_updateTexImage(st: *mut ASurfaceTexture) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn ASurfaceTexture_getTransformMatrix(st: *mut ASurfaceTexture, mtx: *mut f32);
}
extern "C" {
pub fn ASurfaceTexture_getTimestamp(st: *mut ASurfaceTexture) -> i64;
}
extern "C" {
pub fn ASurfaceTexture_fromSurfaceTexture(
env: *mut JNIEnv,
surfacetexture: jobject,
) -> *mut ASurfaceTexture;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct sync_merge_data {
pub name: [::std::os::raw::c_char; 32usize],
pub fd2: __s32,
pub fence: __s32,
pub flags: __u32,
pub pad: __u32,
}
#[test]
fn bindgen_test_layout_sync_merge_data() {
assert_eq!(
::std::mem::size_of::<sync_merge_data>(),
48usize,
concat!("Size of: ", stringify!(sync_merge_data))
);
assert_eq!(
::std::mem::align_of::<sync_merge_data>(),
4usize,
concat!("Alignment of ", stringify!(sync_merge_data))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_merge_data>())).name as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(sync_merge_data),
"::",
stringify!(name)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_merge_data>())).fd2 as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(sync_merge_data),
"::",
stringify!(fd2)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_merge_data>())).fence as *const _ as usize },
36usize,
concat!(
"Offset of field: ",
stringify!(sync_merge_data),
"::",
stringify!(fence)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_merge_data>())).flags as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(sync_merge_data),
"::",
stringify!(flags)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_merge_data>())).pad as *const _ as usize },
44usize,
concat!(
"Offset of field: ",
stringify!(sync_merge_data),
"::",
stringify!(pad)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct sync_fence_info {
pub obj_name: [::std::os::raw::c_char; 32usize],
pub driver_name: [::std::os::raw::c_char; 32usize],
pub status: __s32,
pub flags: __u32,
pub timestamp_ns: __u64,
}
#[test]
fn bindgen_test_layout_sync_fence_info() {
assert_eq!(
::std::mem::size_of::<sync_fence_info>(),
80usize,
concat!("Size of: ", stringify!(sync_fence_info))
);
assert_eq!(
::std::mem::align_of::<sync_fence_info>(),
8usize,
concat!("Alignment of ", stringify!(sync_fence_info))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_fence_info>())).obj_name as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(sync_fence_info),
"::",
stringify!(obj_name)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_fence_info>())).driver_name as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(sync_fence_info),
"::",
stringify!(driver_name)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_fence_info>())).status as *const _ as usize },
64usize,
concat!(
"Offset of field: ",
stringify!(sync_fence_info),
"::",
stringify!(status)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_fence_info>())).flags as *const _ as usize },
68usize,
concat!(
"Offset of field: ",
stringify!(sync_fence_info),
"::",
stringify!(flags)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_fence_info>())).timestamp_ns as *const _ as usize },
72usize,
concat!(
"Offset of field: ",
stringify!(sync_fence_info),
"::",
stringify!(timestamp_ns)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct sync_file_info {
pub name: [::std::os::raw::c_char; 32usize],
pub status: __s32,
pub flags: __u32,
pub num_fences: __u32,
pub pad: __u32,
pub sync_fence_info: __u64,
}
#[test]
fn bindgen_test_layout_sync_file_info() {
assert_eq!(
::std::mem::size_of::<sync_file_info>(),
56usize,
concat!("Size of: ", stringify!(sync_file_info))
);
assert_eq!(
::std::mem::align_of::<sync_file_info>(),
8usize,
concat!("Alignment of ", stringify!(sync_file_info))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_file_info>())).name as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(sync_file_info),
"::",
stringify!(name)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_file_info>())).status as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(sync_file_info),
"::",
stringify!(status)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_file_info>())).flags as *const _ as usize },
36usize,
concat!(
"Offset of field: ",
stringify!(sync_file_info),
"::",
stringify!(flags)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_file_info>())).num_fences as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(sync_file_info),
"::",
stringify!(num_fences)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_file_info>())).pad as *const _ as usize },
44usize,
concat!(
"Offset of field: ",
stringify!(sync_file_info),
"::",
stringify!(pad)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sync_file_info>())).sync_fence_info as *const _ as usize },
48usize,
concat!(
"Offset of field: ",
stringify!(sync_file_info),
"::",
stringify!(sync_fence_info)
)
);
}
extern "C" {
pub fn sync_merge(name: *const ::std::os::raw::c_char, fd1: i32, fd2: i32) -> i32;
}
extern "C" {
pub fn sync_file_info(fd: i32) -> *mut sync_file_info;
}
extern "C" {
pub fn sync_file_info_free(info: *mut sync_file_info);
}
extern "C" {
pub fn ATrace_isEnabled() -> bool;
}
extern "C" {
pub fn ATrace_beginSection(sectionName: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn ATrace_endSection();
}
extern "C" {
pub fn ATrace_beginAsyncSection(sectionName: *const ::std::os::raw::c_char, cookie: i32);
}
extern "C" {
pub fn ATrace_endAsyncSection(sectionName: *const ::std::os::raw::c_char, cookie: i32);
}
extern "C" {
pub fn ATrace_setCounter(counterName: *const ::std::os::raw::c_char, counterValue: i64);
}
pub const AWINDOW_FLAG_ALLOW_LOCK_WHILE_SCREEN_ON: ::std::os::raw::c_uint = 1;
pub const AWINDOW_FLAG_DIM_BEHIND: ::std::os::raw::c_uint = 2;
pub const AWINDOW_FLAG_BLUR_BEHIND: ::std::os::raw::c_uint = 4;
pub const AWINDOW_FLAG_NOT_FOCUSABLE: ::std::os::raw::c_uint = 8;
pub const AWINDOW_FLAG_NOT_TOUCHABLE: ::std::os::raw::c_uint = 16;
pub const AWINDOW_FLAG_NOT_TOUCH_MODAL: ::std::os::raw::c_uint = 32;
pub const AWINDOW_FLAG_TOUCHABLE_WHEN_WAKING: ::std::os::raw::c_uint = 64;
pub const AWINDOW_FLAG_KEEP_SCREEN_ON: ::std::os::raw::c_uint = 128;
pub const AWINDOW_FLAG_LAYOUT_IN_SCREEN: ::std::os::raw::c_uint = 256;
pub const AWINDOW_FLAG_LAYOUT_NO_LIMITS: ::std::os::raw::c_uint = 512;
pub const AWINDOW_FLAG_FULLSCREEN: ::std::os::raw::c_uint = 1024;
pub const AWINDOW_FLAG_FORCE_NOT_FULLSCREEN: ::std::os::raw::c_uint = 2048;
pub const AWINDOW_FLAG_DITHER: ::std::os::raw::c_uint = 4096;
pub const AWINDOW_FLAG_SECURE: ::std::os::raw::c_uint = 8192;
pub const AWINDOW_FLAG_SCALED: ::std::os::raw::c_uint = 16384;
pub const AWINDOW_FLAG_IGNORE_CHEEK_PRESSES: ::std::os::raw::c_uint = 32768;
pub const AWINDOW_FLAG_LAYOUT_INSET_DECOR: ::std::os::raw::c_uint = 65536;
pub const AWINDOW_FLAG_ALT_FOCUSABLE_IM: ::std::os::raw::c_uint = 131072;
pub const AWINDOW_FLAG_WATCH_OUTSIDE_TOUCH: ::std::os::raw::c_uint = 262144;
pub const AWINDOW_FLAG_SHOW_WHEN_LOCKED: ::std::os::raw::c_uint = 524288;
pub const AWINDOW_FLAG_SHOW_WALLPAPER: ::std::os::raw::c_uint = 1048576;
pub const AWINDOW_FLAG_TURN_SCREEN_ON: ::std::os::raw::c_uint = 2097152;
pub const AWINDOW_FLAG_DISMISS_KEYGUARD: ::std::os::raw::c_uint = 4194304;
pub type _bindgen_ty_45 = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __kernel_timespec {
pub tv_sec: __kernel_time64_t,
pub tv_nsec: ::std::os::raw::c_longlong,
}
#[test]
fn bindgen_test_layout___kernel_timespec() {
assert_eq!(
::std::mem::size_of::<__kernel_timespec>(),
16usize,
concat!("Size of: ", stringify!(__kernel_timespec))
);
assert_eq!(
::std::mem::align_of::<__kernel_timespec>(),
8usize,
concat!("Alignment of ", stringify!(__kernel_timespec))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__kernel_timespec>())).tv_sec as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__kernel_timespec),
"::",
stringify!(tv_sec)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__kernel_timespec>())).tv_nsec as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(__kernel_timespec),
"::",
stringify!(tv_nsec)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __kernel_itimerspec {
pub it_interval: __kernel_timespec,
pub it_value: __kernel_timespec,
}
#[test]
fn bindgen_test_layout___kernel_itimerspec() {
assert_eq!(
::std::mem::size_of::<__kernel_itimerspec>(),
32usize,
concat!("Size of: ", stringify!(__kernel_itimerspec))
);
assert_eq!(
::std::mem::align_of::<__kernel_itimerspec>(),
8usize,
concat!("Alignment of ", stringify!(__kernel_itimerspec))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__kernel_itimerspec>())).it_interval as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__kernel_itimerspec),
"::",
stringify!(it_interval)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__kernel_itimerspec>())).it_value as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(__kernel_itimerspec),
"::",
stringify!(it_value)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __kernel_old_timespec {
pub tv_sec: __kernel_old_time_t,
pub tv_nsec: ::std::os::raw::c_long,
}
#[test]
fn bindgen_test_layout___kernel_old_timespec() {
assert_eq!(
::std::mem::size_of::<__kernel_old_timespec>(),
8usize,
concat!("Size of: ", stringify!(__kernel_old_timespec))
);
assert_eq!(
::std::mem::align_of::<__kernel_old_timespec>(),
4usize,
concat!("Alignment of ", stringify!(__kernel_old_timespec))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__kernel_old_timespec>())).tv_sec as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__kernel_old_timespec),
"::",
stringify!(tv_sec)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__kernel_old_timespec>())).tv_nsec as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(__kernel_old_timespec),
"::",
stringify!(tv_nsec)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __kernel_sock_timeval {
pub tv_sec: __s64,
pub tv_usec: __s64,
}
#[test]
fn bindgen_test_layout___kernel_sock_timeval() {
assert_eq!(
::std::mem::size_of::<__kernel_sock_timeval>(),
16usize,
concat!("Size of: ", stringify!(__kernel_sock_timeval))
);
assert_eq!(
::std::mem::align_of::<__kernel_sock_timeval>(),
8usize,
concat!("Alignment of ", stringify!(__kernel_sock_timeval))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__kernel_sock_timeval>())).tv_sec as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__kernel_sock_timeval),
"::",
stringify!(tv_sec)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__kernel_sock_timeval>())).tv_usec as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(__kernel_sock_timeval),
"::",
stringify!(tv_usec)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct timespec {
pub tv_sec: __kernel_old_time_t,
pub tv_nsec: ::std::os::raw::c_long,
}
#[test]
fn bindgen_test_layout_timespec() {
assert_eq!(
::std::mem::size_of::<timespec>(),
8usize,
concat!("Size of: ", stringify!(timespec))
);
assert_eq!(
::std::mem::align_of::<timespec>(),
4usize,
concat!("Alignment of ", stringify!(timespec))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<timespec>())).tv_sec as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(timespec),
"::",
stringify!(tv_sec)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<timespec>())).tv_nsec as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(timespec),
"::",
stringify!(tv_nsec)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct timeval {
pub tv_sec: __kernel_old_time_t,
pub tv_usec: __kernel_suseconds_t,
}
#[test]
fn bindgen_test_layout_timeval() {
assert_eq!(
::std::mem::size_of::<timeval>(),
8usize,
concat!("Size of: ", stringify!(timeval))
);
assert_eq!(
::std::mem::align_of::<timeval>(),
4usize,
concat!("Alignment of ", stringify!(timeval))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<timeval>())).tv_sec as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(timeval),
"::",
stringify!(tv_sec)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<timeval>())).tv_usec as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(timeval),
"::",
stringify!(tv_usec)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct itimerspec {
pub it_interval: timespec,
pub it_value: timespec,
}
#[test]
fn bindgen_test_layout_itimerspec() {
assert_eq!(
::std::mem::size_of::<itimerspec>(),
16usize,
concat!("Size of: ", stringify!(itimerspec))
);
assert_eq!(
::std::mem::align_of::<itimerspec>(),
4usize,
concat!("Alignment of ", stringify!(itimerspec))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<itimerspec>())).it_interval as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(itimerspec),
"::",
stringify!(it_interval)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<itimerspec>())).it_value as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(itimerspec),
"::",
stringify!(it_value)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct itimerval {
pub it_interval: timeval,
pub it_value: timeval,
}
#[test]
fn bindgen_test_layout_itimerval() {
assert_eq!(
::std::mem::size_of::<itimerval>(),
16usize,
concat!("Size of: ", stringify!(itimerval))
);
assert_eq!(
::std::mem::align_of::<itimerval>(),
4usize,
concat!("Alignment of ", stringify!(itimerval))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<itimerval>())).it_interval as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(itimerval),
"::",
stringify!(it_interval)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<itimerval>())).it_value as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(itimerval),
"::",
stringify!(it_value)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct timezone {
pub tz_minuteswest: ::std::os::raw::c_int,
pub tz_dsttime: ::std::os::raw::c_int,
}
#[test]
fn bindgen_test_layout_timezone() {
assert_eq!(
::std::mem::size_of::<timezone>(),
8usize,
concat!("Size of: ", stringify!(timezone))
);
assert_eq!(
::std::mem::align_of::<timezone>(),
4usize,
concat!("Alignment of ", stringify!(timezone))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<timezone>())).tz_minuteswest as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(timezone),
"::",
stringify!(tz_minuteswest)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<timezone>())).tz_dsttime as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(timezone),
"::",
stringify!(tz_dsttime)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct sigcontext {
pub trap_no: ::std::os::raw::c_ulong,
pub error_code: ::std::os::raw::c_ulong,
pub oldmask: ::std::os::raw::c_ulong,
pub arm_r0: ::std::os::raw::c_ulong,
pub arm_r1: ::std::os::raw::c_ulong,
pub arm_r2: ::std::os::raw::c_ulong,
pub arm_r3: ::std::os::raw::c_ulong,
pub arm_r4: ::std::os::raw::c_ulong,
pub arm_r5: ::std::os::raw::c_ulong,
pub arm_r6: ::std::os::raw::c_ulong,
pub arm_r7: ::std::os::raw::c_ulong,
pub arm_r8: ::std::os::raw::c_ulong,
pub arm_r9: ::std::os::raw::c_ulong,
pub arm_r10: ::std::os::raw::c_ulong,
pub arm_fp: ::std::os::raw::c_ulong,
pub arm_ip: ::std::os::raw::c_ulong,
pub arm_sp: ::std::os::raw::c_ulong,
pub arm_lr: ::std::os::raw::c_ulong,
pub arm_pc: ::std::os::raw::c_ulong,
pub arm_cpsr: ::std::os::raw::c_ulong,
pub fault_address: ::std::os::raw::c_ulong,
}
#[test]
fn bindgen_test_layout_sigcontext() {
assert_eq!(
::std::mem::size_of::<sigcontext>(),
84usize,
concat!("Size of: ", stringify!(sigcontext))
);
assert_eq!(
::std::mem::align_of::<sigcontext>(),
4usize,
concat!("Alignment of ", stringify!(sigcontext))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).trap_no as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(trap_no)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).error_code as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(error_code)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).oldmask as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(oldmask)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_r0 as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_r0)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_r1 as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_r1)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_r2 as *const _ as usize },
20usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_r2)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_r3 as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_r3)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_r4 as *const _ as usize },
28usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_r4)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_r5 as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_r5)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_r6 as *const _ as usize },
36usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_r6)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_r7 as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_r7)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_r8 as *const _ as usize },
44usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_r8)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_r9 as *const _ as usize },
48usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_r9)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_r10 as *const _ as usize },
52usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_r10)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_fp as *const _ as usize },
56usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_fp)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_ip as *const _ as usize },
60usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_ip)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_sp as *const _ as usize },
64usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_sp)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_lr as *const _ as usize },
68usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_lr)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_pc as *const _ as usize },
72usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_pc)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).arm_cpsr as *const _ as usize },
76usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(arm_cpsr)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigcontext>())).fault_address as *const _ as usize },
80usize,
concat!(
"Offset of field: ",
stringify!(sigcontext),
"::",
stringify!(fault_address)
)
);
}
pub type sigset_t = ::std::os::raw::c_ulong;
pub type __signalfn_t = ::std::option::Option<unsafe extern "C" fn(arg1: ::std::os::raw::c_int)>;
pub type __sighandler_t = __signalfn_t;
pub type __restorefn_t = ::std::option::Option<unsafe extern "C" fn()>;
pub type __sigrestore_t = __restorefn_t;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct __kernel_sigaction {
pub _u: __kernel_sigaction__bindgen_ty_1,
pub sa_mask: sigset_t,
pub sa_flags: ::std::os::raw::c_ulong,
pub sa_restorer: ::std::option::Option<unsafe extern "C" fn()>,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union __kernel_sigaction__bindgen_ty_1 {
pub _sa_handler: __sighandler_t,
pub _sa_sigaction: ::std::option::Option<
unsafe extern "C" fn(
arg1: ::std::os::raw::c_int,
arg2: *mut siginfo,
arg3: *mut ::std::os::raw::c_void,
),
>,
}
#[test]
fn bindgen_test_layout___kernel_sigaction__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<__kernel_sigaction__bindgen_ty_1>(),
4usize,
concat!("Size of: ", stringify!(__kernel_sigaction__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<__kernel_sigaction__bindgen_ty_1>(),
4usize,
concat!(
"Alignment of ",
stringify!(__kernel_sigaction__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__kernel_sigaction__bindgen_ty_1>()))._sa_handler as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(__kernel_sigaction__bindgen_ty_1),
"::",
stringify!(_sa_handler)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__kernel_sigaction__bindgen_ty_1>()))._sa_sigaction as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(__kernel_sigaction__bindgen_ty_1),
"::",
stringify!(_sa_sigaction)
)
);
}
#[test]
fn bindgen_test_layout___kernel_sigaction() {
assert_eq!(
::std::mem::size_of::<__kernel_sigaction>(),
16usize,
concat!("Size of: ", stringify!(__kernel_sigaction))
);
assert_eq!(
::std::mem::align_of::<__kernel_sigaction>(),
4usize,
concat!("Alignment of ", stringify!(__kernel_sigaction))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__kernel_sigaction>()))._u as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__kernel_sigaction),
"::",
stringify!(_u)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__kernel_sigaction>())).sa_mask as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(__kernel_sigaction),
"::",
stringify!(sa_mask)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__kernel_sigaction>())).sa_flags as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(__kernel_sigaction),
"::",
stringify!(sa_flags)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__kernel_sigaction>())).sa_restorer as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(__kernel_sigaction),
"::",
stringify!(sa_restorer)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct sigaltstack {
pub ss_sp: *mut ::std::os::raw::c_void,
pub ss_flags: ::std::os::raw::c_int,
pub ss_size: size_t,
}
#[test]
fn bindgen_test_layout_sigaltstack() {
assert_eq!(
::std::mem::size_of::<sigaltstack>(),
12usize,
concat!("Size of: ", stringify!(sigaltstack))
);
assert_eq!(
::std::mem::align_of::<sigaltstack>(),
4usize,
concat!("Alignment of ", stringify!(sigaltstack))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigaltstack>())).ss_sp as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(sigaltstack),
"::",
stringify!(ss_sp)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigaltstack>())).ss_flags as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(sigaltstack),
"::",
stringify!(ss_flags)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigaltstack>())).ss_size as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(sigaltstack),
"::",
stringify!(ss_size)
)
);
}
pub type stack_t = sigaltstack;
#[repr(C)]
#[derive(Copy, Clone)]
pub union sigval {
pub sival_int: ::std::os::raw::c_int,
pub sival_ptr: *mut ::std::os::raw::c_void,
}
#[test]
fn bindgen_test_layout_sigval() {
assert_eq!(
::std::mem::size_of::<sigval>(),
4usize,
concat!("Size of: ", stringify!(sigval))
);
assert_eq!(
::std::mem::align_of::<sigval>(),
4usize,
concat!("Alignment of ", stringify!(sigval))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigval>())).sival_int as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(sigval),
"::",
stringify!(sival_int)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigval>())).sival_ptr as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(sigval),
"::",
stringify!(sival_ptr)
)
);
}
pub type sigval_t = sigval;
#[repr(C)]
#[derive(Copy, Clone)]
pub union __sifields {
pub _kill: __sifields__bindgen_ty_1,
pub _timer: __sifields__bindgen_ty_2,
pub _rt: __sifields__bindgen_ty_3,
pub _sigchld: __sifields__bindgen_ty_4,
pub _sigfault: __sifields__bindgen_ty_5,
pub _sigpoll: __sifields__bindgen_ty_6,
pub _sigsys: __sifields__bindgen_ty_7,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __sifields__bindgen_ty_1 {
pub _pid: __kernel_pid_t,
pub _uid: __kernel_uid32_t,
}
#[test]
fn bindgen_test_layout___sifields__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<__sifields__bindgen_ty_1>(),
8usize,
concat!("Size of: ", stringify!(__sifields__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<__sifields__bindgen_ty_1>(),
4usize,
concat!("Alignment of ", stringify!(__sifields__bindgen_ty_1))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields__bindgen_ty_1>()))._pid as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_1),
"::",
stringify!(_pid)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields__bindgen_ty_1>()))._uid as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_1),
"::",
stringify!(_uid)
)
);
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct __sifields__bindgen_ty_2 {
pub _tid: __kernel_timer_t,
pub _overrun: ::std::os::raw::c_int,
pub _sigval: sigval_t,
pub _sys_private: ::std::os::raw::c_int,
}
#[test]
fn bindgen_test_layout___sifields__bindgen_ty_2() {
assert_eq!(
::std::mem::size_of::<__sifields__bindgen_ty_2>(),
16usize,
concat!("Size of: ", stringify!(__sifields__bindgen_ty_2))
);
assert_eq!(
::std::mem::align_of::<__sifields__bindgen_ty_2>(),
4usize,
concat!("Alignment of ", stringify!(__sifields__bindgen_ty_2))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields__bindgen_ty_2>()))._tid as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_2),
"::",
stringify!(_tid)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_2>()))._overrun as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_2),
"::",
stringify!(_overrun)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_2>()))._sigval as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_2),
"::",
stringify!(_sigval)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_2>()))._sys_private as *const _ as usize
},
12usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_2),
"::",
stringify!(_sys_private)
)
);
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct __sifields__bindgen_ty_3 {
pub _pid: __kernel_pid_t,
pub _uid: __kernel_uid32_t,
pub _sigval: sigval_t,
}
#[test]
fn bindgen_test_layout___sifields__bindgen_ty_3() {
assert_eq!(
::std::mem::size_of::<__sifields__bindgen_ty_3>(),
12usize,
concat!("Size of: ", stringify!(__sifields__bindgen_ty_3))
);
assert_eq!(
::std::mem::align_of::<__sifields__bindgen_ty_3>(),
4usize,
concat!("Alignment of ", stringify!(__sifields__bindgen_ty_3))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields__bindgen_ty_3>()))._pid as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_3),
"::",
stringify!(_pid)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields__bindgen_ty_3>()))._uid as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_3),
"::",
stringify!(_uid)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_3>()))._sigval as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_3),
"::",
stringify!(_sigval)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __sifields__bindgen_ty_4 {
pub _pid: __kernel_pid_t,
pub _uid: __kernel_uid32_t,
pub _status: ::std::os::raw::c_int,
pub _utime: __kernel_clock_t,
pub _stime: __kernel_clock_t,
}
#[test]
fn bindgen_test_layout___sifields__bindgen_ty_4() {
assert_eq!(
::std::mem::size_of::<__sifields__bindgen_ty_4>(),
20usize,
concat!("Size of: ", stringify!(__sifields__bindgen_ty_4))
);
assert_eq!(
::std::mem::align_of::<__sifields__bindgen_ty_4>(),
4usize,
concat!("Alignment of ", stringify!(__sifields__bindgen_ty_4))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields__bindgen_ty_4>()))._pid as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_4),
"::",
stringify!(_pid)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields__bindgen_ty_4>()))._uid as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_4),
"::",
stringify!(_uid)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_4>()))._status as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_4),
"::",
stringify!(_status)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields__bindgen_ty_4>()))._utime as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_4),
"::",
stringify!(_utime)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields__bindgen_ty_4>()))._stime as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_4),
"::",
stringify!(_stime)
)
);
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct __sifields__bindgen_ty_5 {
pub _addr: *mut ::std::os::raw::c_void,
pub __bindgen_anon_1: __sifields__bindgen_ty_5__bindgen_ty_1,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union __sifields__bindgen_ty_5__bindgen_ty_1 {
pub _trapno: ::std::os::raw::c_int,
pub _addr_lsb: ::std::os::raw::c_short,
pub _addr_bnd: __sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_1,
pub _addr_pkey: __sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_2,
pub _perf: __sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_3,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_1 {
pub _dummy_bnd: [::std::os::raw::c_char; 4usize],
pub _lower: *mut ::std::os::raw::c_void,
pub _upper: *mut ::std::os::raw::c_void,
}
#[test]
fn bindgen_test_layout___sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_1>(),
12usize,
concat!(
"Size of: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
::std::mem::align_of::<__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_1>(),
4usize,
concat!(
"Alignment of ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_1>()))
._dummy_bnd as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(_dummy_bnd)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_1>()))._lower
as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(_lower)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_1>()))._upper
as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(_upper)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_2 {
pub _dummy_pkey: [::std::os::raw::c_char; 4usize],
pub _pkey: __u32,
}
#[test]
fn bindgen_test_layout___sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_2() {
assert_eq!(
::std::mem::size_of::<__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_2>(),
8usize,
concat!(
"Size of: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_2)
)
);
assert_eq!(
::std::mem::align_of::<__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_2>(),
4usize,
concat!(
"Alignment of ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_2)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_2>()))
._dummy_pkey as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_2),
"::",
stringify!(_dummy_pkey)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_2>()))._pkey
as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_2),
"::",
stringify!(_pkey)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_3 {
pub _data: ::std::os::raw::c_ulong,
pub _type: __u32,
}
#[test]
fn bindgen_test_layout___sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_3() {
assert_eq!(
::std::mem::size_of::<__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_3>(),
8usize,
concat!(
"Size of: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_3)
)
);
assert_eq!(
::std::mem::align_of::<__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_3>(),
4usize,
concat!(
"Alignment of ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_3)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_3>()))._data
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_3),
"::",
stringify!(_data)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_3>()))._type
as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1__bindgen_ty_3),
"::",
stringify!(_type)
)
);
}
#[test]
fn bindgen_test_layout___sifields__bindgen_ty_5__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<__sifields__bindgen_ty_5__bindgen_ty_1>(),
12usize,
concat!(
"Size of: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1)
)
);
assert_eq!(
::std::mem::align_of::<__sifields__bindgen_ty_5__bindgen_ty_1>(),
4usize,
concat!(
"Alignment of ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_5__bindgen_ty_1>()))._trapno as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1),
"::",
stringify!(_trapno)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_5__bindgen_ty_1>()))._addr_lsb as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1),
"::",
stringify!(_addr_lsb)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_5__bindgen_ty_1>()))._addr_bnd as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1),
"::",
stringify!(_addr_bnd)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_5__bindgen_ty_1>()))._addr_pkey
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1),
"::",
stringify!(_addr_pkey)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_5__bindgen_ty_1>()))._perf as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_5__bindgen_ty_1),
"::",
stringify!(_perf)
)
);
}
#[test]
fn bindgen_test_layout___sifields__bindgen_ty_5() {
assert_eq!(
::std::mem::size_of::<__sifields__bindgen_ty_5>(),
16usize,
concat!("Size of: ", stringify!(__sifields__bindgen_ty_5))
);
assert_eq!(
::std::mem::align_of::<__sifields__bindgen_ty_5>(),
4usize,
concat!("Alignment of ", stringify!(__sifields__bindgen_ty_5))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields__bindgen_ty_5>()))._addr as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_5),
"::",
stringify!(_addr)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __sifields__bindgen_ty_6 {
pub _band: ::std::os::raw::c_long,
pub _fd: ::std::os::raw::c_int,
}
#[test]
fn bindgen_test_layout___sifields__bindgen_ty_6() {
assert_eq!(
::std::mem::size_of::<__sifields__bindgen_ty_6>(),
8usize,
concat!("Size of: ", stringify!(__sifields__bindgen_ty_6))
);
assert_eq!(
::std::mem::align_of::<__sifields__bindgen_ty_6>(),
4usize,
concat!("Alignment of ", stringify!(__sifields__bindgen_ty_6))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields__bindgen_ty_6>()))._band as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_6),
"::",
stringify!(_band)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields__bindgen_ty_6>()))._fd as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_6),
"::",
stringify!(_fd)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __sifields__bindgen_ty_7 {
pub _call_addr: *mut ::std::os::raw::c_void,
pub _syscall: ::std::os::raw::c_int,
pub _arch: ::std::os::raw::c_uint,
}
#[test]
fn bindgen_test_layout___sifields__bindgen_ty_7() {
assert_eq!(
::std::mem::size_of::<__sifields__bindgen_ty_7>(),
12usize,
concat!("Size of: ", stringify!(__sifields__bindgen_ty_7))
);
assert_eq!(
::std::mem::align_of::<__sifields__bindgen_ty_7>(),
4usize,
concat!("Alignment of ", stringify!(__sifields__bindgen_ty_7))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_7>()))._call_addr as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_7),
"::",
stringify!(_call_addr)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<__sifields__bindgen_ty_7>()))._syscall as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_7),
"::",
stringify!(_syscall)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields__bindgen_ty_7>()))._arch as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(__sifields__bindgen_ty_7),
"::",
stringify!(_arch)
)
);
}
#[test]
fn bindgen_test_layout___sifields() {
assert_eq!(
::std::mem::size_of::<__sifields>(),
20usize,
concat!("Size of: ", stringify!(__sifields))
);
assert_eq!(
::std::mem::align_of::<__sifields>(),
4usize,
concat!("Alignment of ", stringify!(__sifields))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields>()))._kill as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields),
"::",
stringify!(_kill)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields>()))._timer as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields),
"::",
stringify!(_timer)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields>()))._rt as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields),
"::",
stringify!(_rt)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields>()))._sigchld as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields),
"::",
stringify!(_sigchld)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields>()))._sigfault as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields),
"::",
stringify!(_sigfault)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields>()))._sigpoll as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields),
"::",
stringify!(_sigpoll)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__sifields>()))._sigsys as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__sifields),
"::",
stringify!(_sigsys)
)
);
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct siginfo {
pub __bindgen_anon_1: siginfo__bindgen_ty_1,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union siginfo__bindgen_ty_1 {
pub __bindgen_anon_1: siginfo__bindgen_ty_1__bindgen_ty_1,
pub _si_pad: [::std::os::raw::c_int; 32usize],
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct siginfo__bindgen_ty_1__bindgen_ty_1 {
pub si_signo: ::std::os::raw::c_int,
pub si_errno: ::std::os::raw::c_int,
pub si_code: ::std::os::raw::c_int,
pub _sifields: __sifields,
}
#[test]
fn bindgen_test_layout_siginfo__bindgen_ty_1__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<siginfo__bindgen_ty_1__bindgen_ty_1>(),
32usize,
concat!("Size of: ", stringify!(siginfo__bindgen_ty_1__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<siginfo__bindgen_ty_1__bindgen_ty_1>(),
4usize,
concat!(
"Alignment of ",
stringify!(siginfo__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<siginfo__bindgen_ty_1__bindgen_ty_1>())).si_signo as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(siginfo__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(si_signo)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<siginfo__bindgen_ty_1__bindgen_ty_1>())).si_errno as *const _
as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(siginfo__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(si_errno)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<siginfo__bindgen_ty_1__bindgen_ty_1>())).si_code as *const _
as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(siginfo__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(si_code)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<siginfo__bindgen_ty_1__bindgen_ty_1>()))._sifields as *const _
as usize
},
12usize,
concat!(
"Offset of field: ",
stringify!(siginfo__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(_sifields)
)
);
}
#[test]
fn bindgen_test_layout_siginfo__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<siginfo__bindgen_ty_1>(),
128usize,
concat!("Size of: ", stringify!(siginfo__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<siginfo__bindgen_ty_1>(),
4usize,
concat!("Alignment of ", stringify!(siginfo__bindgen_ty_1))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<siginfo__bindgen_ty_1>()))._si_pad as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(siginfo__bindgen_ty_1),
"::",
stringify!(_si_pad)
)
);
}
#[test]
fn bindgen_test_layout_siginfo() |
pub type siginfo_t = siginfo;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct sigevent {
pub sigev_value: sigval_t,
pub sigev_signo: ::std::os::raw::c_int,
pub sigev_notify: ::std::os::raw::c_int,
pub _sigev_un: sigevent__bindgen_ty_1,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union sigevent__bindgen_ty_1 {
pub _pad: [::std::os::raw::c_int; 13usize],
pub _tid: ::std::os::raw::c_int,
pub _sigev_thread: sigevent__bindgen_ty_1__bindgen_ty_1,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct sigevent__bindgen_ty_1__bindgen_ty_1 {
pub _function: ::std::option::Option<unsafe extern "C" fn(arg1: sigval_t)>,
pub _attribute: *mut ::std::os::raw::c_void,
}
#[test]
fn bindgen_test_layout_sigevent__bindgen_ty_1__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<sigevent__bindgen_ty_1__bindgen_ty_1>(),
8usize,
concat!(
"Size of: ",
stringify!(sigevent__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
::std::mem::align_of::<sigevent__bindgen_ty_1__bindgen_ty_1>(),
4usize,
concat!(
"Alignment of ",
stringify!(sigevent__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<sigevent__bindgen_ty_1__bindgen_ty_1>()))._function as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(sigevent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(_function)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<sigevent__bindgen_ty_1__bindgen_ty_1>()))._attribute as *const _
as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(sigevent__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(_attribute)
)
);
}
#[test]
fn bindgen_test_layout_sigevent__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<sigevent__bindgen_ty_1>(),
52usize,
concat!("Size of: ", stringify!(sigevent__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<sigevent__bindgen_ty_1>(),
4usize,
concat!("Alignment of ", stringify!(sigevent__bindgen_ty_1))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigevent__bindgen_ty_1>()))._pad as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(sigevent__bindgen_ty_1),
"::",
stringify!(_pad)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigevent__bindgen_ty_1>()))._tid as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(sigevent__bindgen_ty_1),
"::",
stringify!(_tid)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<sigevent__bindgen_ty_1>()))._sigev_thread as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(sigevent__bindgen_ty_1),
"::",
stringify!(_sigev_thread)
)
);
}
#[test]
fn bindgen_test_layout_sigevent() {
assert_eq!(
::std::mem::size_of::<sigevent>(),
64usize,
concat!("Size of: ", stringify!(sigevent))
);
assert_eq!(
::std::mem::align_of::<sigevent>(),
4usize,
concat!("Alignment of ", stringify!(sigevent))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigevent>())).sigev_value as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(sigevent),
"::",
stringify!(sigev_value)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigevent>())).sigev_signo as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(sigevent),
"::",
stringify!(sigev_signo)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigevent>())).sigev_notify as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(sigevent),
"::",
stringify!(sigev_notify)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigevent>()))._sigev_un as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(sigevent),
"::",
stringify!(_sigev_un)
)
);
}
pub type sigevent_t = sigevent;
pub type sig_atomic_t = ::std::os::raw::c_int;
pub type sig_t = __sighandler_t;
pub type sighandler_t = __sighandler_t;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct sigset64_t {
pub __bits: [::std::os::raw::c_ulong; 2usize],
}
#[test]
fn bindgen_test_layout_sigset64_t() {
assert_eq!(
::std::mem::size_of::<sigset64_t>(),
8usize,
concat!("Size of: ", stringify!(sigset64_t))
);
assert_eq!(
::std::mem::align_of::<sigset64_t>(),
4usize,
concat!("Alignment of ", stringify!(sigset64_t))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigset64_t>())).__bits as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(sigset64_t),
"::",
stringify!(__bits)
)
);
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct sigaction {
pub __bindgen_anon_1: sigaction__bindgen_ty_1,
pub sa_mask: sigset_t,
pub sa_flags: ::std::os::raw::c_int,
pub sa_restorer: ::std::option::Option<unsafe extern "C" fn()>,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union sigaction__bindgen_ty_1 {
pub sa_handler: sighandler_t,
pub sa_sigaction: ::std::option::Option<
unsafe extern "C" fn(
arg1: ::std::os::raw::c_int,
arg2: *mut siginfo,
arg3: *mut ::std::os::raw::c_void,
),
>,
}
#[test]
fn bindgen_test_layout_sigaction__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<sigaction__bindgen_ty_1>(),
4usize,
concat!("Size of: ", stringify!(sigaction__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<sigaction__bindgen_ty_1>(),
4usize,
concat!("Alignment of ", stringify!(sigaction__bindgen_ty_1))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<sigaction__bindgen_ty_1>())).sa_handler as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(sigaction__bindgen_ty_1),
"::",
stringify!(sa_handler)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<sigaction__bindgen_ty_1>())).sa_sigaction as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(sigaction__bindgen_ty_1),
"::",
stringify!(sa_sigaction)
)
);
}
#[test]
fn bindgen_test_layout_sigaction() {
assert_eq!(
::std::mem::size_of::<sigaction>(),
16usize,
concat!("Size of: ", stringify!(sigaction))
);
assert_eq!(
::std::mem::align_of::<sigaction>(),
4usize,
concat!("Alignment of ", stringify!(sigaction))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigaction>())).sa_mask as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(sigaction),
"::",
stringify!(sa_mask)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigaction>())).sa_flags as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(sigaction),
"::",
stringify!(sa_flags)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigaction>())).sa_restorer as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(sigaction),
"::",
stringify!(sa_restorer)
)
);
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct sigaction64 {
pub __bindgen_anon_1: sigaction64__bindgen_ty_1,
pub sa_flags: ::std::os::raw::c_int,
pub sa_restorer: ::std::option::Option<unsafe extern "C" fn()>,
pub sa_mask: sigset64_t,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union sigaction64__bindgen_ty_1 {
pub sa_handler: sighandler_t,
pub sa_sigaction: ::std::option::Option<
unsafe extern "C" fn(
arg1: ::std::os::raw::c_int,
arg2: *mut siginfo,
arg3: *mut ::std::os::raw::c_void,
),
>,
}
#[test]
fn bindgen_test_layout_sigaction64__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<sigaction64__bindgen_ty_1>(),
4usize,
concat!("Size of: ", stringify!(sigaction64__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<sigaction64__bindgen_ty_1>(),
4usize,
concat!("Alignment of ", stringify!(sigaction64__bindgen_ty_1))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<sigaction64__bindgen_ty_1>())).sa_handler as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(sigaction64__bindgen_ty_1),
"::",
stringify!(sa_handler)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<sigaction64__bindgen_ty_1>())).sa_sigaction as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(sigaction64__bindgen_ty_1),
"::",
stringify!(sa_sigaction)
)
);
}
#[test]
fn bindgen_test_layout_sigaction64() {
assert_eq!(
::std::mem::size_of::<sigaction64>(),
20usize,
concat!("Size of: ", stringify!(sigaction64))
);
assert_eq!(
::std::mem::align_of::<sigaction64>(),
4usize,
concat!("Alignment of ", stringify!(sigaction64))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigaction64>())).sa_flags as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(sigaction64),
"::",
stringify!(sa_flags)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigaction64>())).sa_restorer as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(sigaction64),
"::",
stringify!(sa_restorer)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<sigaction64>())).sa_mask as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(sigaction64),
"::",
stringify!(sa_mask)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct user_fpregs {
pub fpregs: [user_fpregs_fp_reg; 8usize],
pub _bitfield_align_1: [u32; 0],
pub _bitfield_1: __BindgenBitfieldUnit<[u8; 8usize]>,
pub ftype: [::std::os::raw::c_uchar; 8usize],
pub init_flag: ::std::os::raw::c_uint,
}
#[repr(C)]
#[repr(align(4))]
#[derive(Debug, Copy, Clone)]
pub struct user_fpregs_fp_reg {
pub _bitfield_align_1: [u32; 0],
pub _bitfield_1: __BindgenBitfieldUnit<[u8; 12usize]>,
}
#[test]
fn bindgen_test_layout_user_fpregs_fp_reg() {
assert_eq!(
::std::mem::size_of::<user_fpregs_fp_reg>(),
12usize,
concat!("Size of: ", stringify!(user_fpregs_fp_reg))
);
assert_eq!(
::std::mem::align_of::<user_fpregs_fp_reg>(),
4usize,
concat!("Alignment of ", stringify!(user_fpregs_fp_reg))
);
}
impl user_fpregs_fp_reg {
#[inline]
pub fn sign1(&self) -> ::std::os::raw::c_uint {
unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
}
#[inline]
pub fn set_sign1(&mut self, val: ::std::os::raw::c_uint) {
unsafe {
let val: u32 = ::std::mem::transmute(val);
self._bitfield_1.set(0usize, 1u8, val as u64)
}
}
#[inline]
pub fn unused(&self) -> ::std::os::raw::c_uint {
unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 15u8) as u32) }
}
#[inline]
pub fn set_unused(&mut self, val: ::std::os::raw::c_uint) {
unsafe {
let val: u32 = ::std::mem::transmute(val);
self._bitfield_1.set(1usize, 15u8, val as u64)
}
}
#[inline]
pub fn sign2(&self) -> ::std::os::raw::c_uint {
unsafe { ::std::mem::transmute(self._bitfield_1.get(16usize, 1u8) as u32) }
}
#[inline]
pub fn set_sign2(&mut self, val: ::std::os::raw::c_uint) {
unsafe {
let val: u32 = ::std::mem::transmute(val);
self._bitfield_1.set(16usize, 1u8, val as u64)
}
}
#[inline]
pub fn exponent(&self) -> ::std::os::raw::c_uint {
unsafe { ::std::mem::transmute(self._bitfield_1.get(17usize, 14u8) as u32) }
}
#[inline]
pub fn set_exponent(&mut self, val: ::std::os::raw::c_uint) {
unsafe {
let val: u32 = ::std::mem::transmute(val);
self._bitfield_1.set(17usize, 14u8, val as u64)
}
}
#[inline]
pub fn j(&self) -> ::std::os::raw::c_uint {
unsafe { ::std::mem::transmute(self._bitfield_1.get(31usize, 1u8) as u32) }
}
#[inline]
pub fn set_j(&mut self, val: ::std::os::raw::c_uint) {
unsafe {
let val: u32 = ::std::mem::transmute(val);
self._bitfield_1.set(31usize, 1u8, val as u64)
}
}
#[inline]
pub fn mantissa1(&self) -> ::std::os::raw::c_uint {
unsafe { ::std::mem::transmute(self._bitfield_1.get(32usize, 31u8) as u32) }
}
#[inline]
pub fn set_mantissa1(&mut self, val: ::std::os::raw::c_uint) {
unsafe {
let val: u32 = ::std::mem::transmute(val);
self._bitfield_1.set(32usize, 31u8, val as u64)
}
}
#[inline]
pub fn mantissa0(&self) -> ::std::os::raw::c_uint {
unsafe { ::std::mem::transmute(self._bitfield_1.get(64usize, 32u8) as u32) }
}
#[inline]
pub fn set_mantissa0(&mut self, val: ::std::os::raw::c_uint) {
unsafe {
let val: u32 = ::std::mem::transmute(val);
self._bitfield_1.set(64usize, 32u8, val as u64)
}
}
#[inline]
pub fn new_bitfield_1(
sign1: ::std::os::raw::c_uint,
unused: ::std::os::raw::c_uint,
sign2: ::std::os::raw::c_uint,
exponent: ::std::os::raw::c_uint,
j: ::std::os::raw::c_uint,
mantissa1: ::std::os::raw::c_uint,
mantissa0: ::std::os::raw::c_uint,
) -> __BindgenBitfieldUnit<[u8; 12usize]> {
let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 12usize]> = Default::default();
__bindgen_bitfield_unit.set(0usize, 1u8, {
let sign1: u32 = unsafe { ::std::mem::transmute(sign1) };
sign1 as u64
});
__bindgen_bitfield_unit.set(1usize, 15u8, {
let unused: u32 = unsafe { ::std::mem::transmute(unused) };
unused as u64
});
__bindgen_bitfield_unit.set(16usize, 1u8, {
let sign2: u32 = unsafe { ::std::mem::transmute(sign2) };
sign2 as u64
});
__bindgen_bitfield_unit.set(17usize, 14u8, {
let exponent: u32 = unsafe { ::std::mem::transmute(exponent) };
exponent as u64
});
__bindgen_bitfield_unit.set(31usize, 1u8, {
let j: u32 = unsafe { ::std::mem::transmute(j) };
j as u64
});
__bindgen_bitfield_unit.set(32usize, 31u8, {
let mantissa1: u32 = unsafe { ::std::mem::transmute(mantissa1) };
mantissa1 as u64
});
__bindgen_bitfield_unit.set(64usize, 32u8, {
let mantissa0: u32 = unsafe { ::std::mem::transmute(mantissa0) };
mantissa0 as u64
});
__bindgen_bitfield_unit
}
}
#[test]
fn bindgen_test_layout_user_fpregs() {
assert_eq!(
::std::mem::size_of::<user_fpregs>(),
116usize,
concat!("Size of: ", stringify!(user_fpregs))
);
assert_eq!(
::std::mem::align_of::<user_fpregs>(),
4usize,
concat!("Alignment of ", stringify!(user_fpregs))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user_fpregs>())).fpregs as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(user_fpregs),
"::",
stringify!(fpregs)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user_fpregs>())).ftype as *const _ as usize },
104usize,
concat!(
"Offset of field: ",
stringify!(user_fpregs),
"::",
stringify!(ftype)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user_fpregs>())).init_flag as *const _ as usize },
112usize,
concat!(
"Offset of field: ",
stringify!(user_fpregs),
"::",
stringify!(init_flag)
)
);
}
impl user_fpregs {
#[inline]
pub fn fpsr(&self) -> ::std::os::raw::c_uint {
unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 32u8) as u32) }
}
#[inline]
pub fn set_fpsr(&mut self, val: ::std::os::raw::c_uint) {
unsafe {
let val: u32 = ::std::mem::transmute(val);
self._bitfield_1.set(0usize, 32u8, val as u64)
}
}
#[inline]
pub fn fpcr(&self) -> ::std::os::raw::c_uint {
unsafe { ::std::mem::transmute(self._bitfield_1.get(32usize, 32u8) as u32) }
}
#[inline]
pub fn set_fpcr(&mut self, val: ::std::os::raw::c_uint) {
unsafe {
let val: u32 = ::std::mem::transmute(val);
self._bitfield_1.set(32usize, 32u8, val as u64)
}
}
#[inline]
pub fn new_bitfield_1(
fpsr: ::std::os::raw::c_uint,
fpcr: ::std::os::raw::c_uint,
) -> __BindgenBitfieldUnit<[u8; 8usize]> {
let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 8usize]> = Default::default();
__bindgen_bitfield_unit.set(0usize, 32u8, {
let fpsr: u32 = unsafe { ::std::mem::transmute(fpsr) };
fpsr as u64
});
__bindgen_bitfield_unit.set(32usize, 32u8, {
let fpcr: u32 = unsafe { ::std::mem::transmute(fpcr) };
fpcr as u64
});
__bindgen_bitfield_unit
}
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct user_regs {
pub uregs: [::std::os::raw::c_ulong; 18usize],
}
#[test]
fn bindgen_test_layout_user_regs() {
assert_eq!(
::std::mem::size_of::<user_regs>(),
72usize,
concat!("Size of: ", stringify!(user_regs))
);
assert_eq!(
::std::mem::align_of::<user_regs>(),
4usize,
concat!("Alignment of ", stringify!(user_regs))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user_regs>())).uregs as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(user_regs),
"::",
stringify!(uregs)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct user_vfp {
pub fpregs: [::std::os::raw::c_ulonglong; 32usize],
pub fpscr: ::std::os::raw::c_ulong,
}
#[test]
fn bindgen_test_layout_user_vfp() {
assert_eq!(
::std::mem::size_of::<user_vfp>(),
264usize,
concat!("Size of: ", stringify!(user_vfp))
);
assert_eq!(
::std::mem::align_of::<user_vfp>(),
8usize,
concat!("Alignment of ", stringify!(user_vfp))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user_vfp>())).fpregs as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(user_vfp),
"::",
stringify!(fpregs)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user_vfp>())).fpscr as *const _ as usize },
256usize,
concat!(
"Offset of field: ",
stringify!(user_vfp),
"::",
stringify!(fpscr)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct user_vfp_exc {
pub fpexc: ::std::os::raw::c_ulong,
pub fpinst: ::std::os::raw::c_ulong,
pub fpinst2: ::std::os::raw::c_ulong,
}
#[test]
fn bindgen_test_layout_user_vfp_exc() {
assert_eq!(
::std::mem::size_of::<user_vfp_exc>(),
12usize,
concat!("Size of: ", stringify!(user_vfp_exc))
);
assert_eq!(
::std::mem::align_of::<user_vfp_exc>(),
4usize,
concat!("Alignment of ", stringify!(user_vfp_exc))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user_vfp_exc>())).fpexc as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(user_vfp_exc),
"::",
stringify!(fpexc)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user_vfp_exc>())).fpinst as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(user_vfp_exc),
"::",
stringify!(fpinst)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user_vfp_exc>())).fpinst2 as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(user_vfp_exc),
"::",
stringify!(fpinst2)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct user {
pub regs: user_regs,
pub u_fpvalid: ::std::os::raw::c_int,
pub u_tsize: ::std::os::raw::c_ulong,
pub u_dsize: ::std::os::raw::c_ulong,
pub u_ssize: ::std::os::raw::c_ulong,
pub start_code: ::std::os::raw::c_ulong,
pub start_stack: ::std::os::raw::c_ulong,
pub signal: ::std::os::raw::c_long,
pub reserved: ::std::os::raw::c_int,
pub u_ar0: *mut user_regs,
pub magic: ::std::os::raw::c_ulong,
pub u_comm: [::std::os::raw::c_char; 32usize],
pub u_debugreg: [::std::os::raw::c_int; 8usize],
pub u_fp: user_fpregs,
pub u_fp0: *mut user_fpregs,
}
#[test]
fn bindgen_test_layout_user() {
assert_eq!(
::std::mem::size_of::<user>(),
296usize,
concat!("Size of: ", stringify!(user))
);
assert_eq!(
::std::mem::align_of::<user>(),
4usize,
concat!("Alignment of ", stringify!(user))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).regs as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(regs)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).u_fpvalid as *const _ as usize },
72usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(u_fpvalid)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).u_tsize as *const _ as usize },
76usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(u_tsize)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).u_dsize as *const _ as usize },
80usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(u_dsize)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).u_ssize as *const _ as usize },
84usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(u_ssize)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).start_code as *const _ as usize },
88usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(start_code)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).start_stack as *const _ as usize },
92usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(start_stack)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).signal as *const _ as usize },
96usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(signal)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).reserved as *const _ as usize },
100usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(reserved)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).u_ar0 as *const _ as usize },
104usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(u_ar0)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).magic as *const _ as usize },
108usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(magic)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).u_comm as *const _ as usize },
112usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(u_comm)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).u_debugreg as *const _ as usize },
144usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(u_debugreg)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).u_fp as *const _ as usize },
176usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(u_fp)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<user>())).u_fp0 as *const _ as usize },
292usize,
concat!(
"Offset of field: ",
stringify!(user),
"::",
stringify!(u_fp0)
)
);
}
pub const REG_R0: ::std::os::raw::c_uint = 0;
pub const REG_R1: ::std::os::raw::c_uint = 1;
pub const REG_R2: ::std::os::raw::c_uint = 2;
pub const REG_R3: ::std::os::raw::c_uint = 3;
pub const REG_R4: ::std::os::raw::c_uint = 4;
pub const REG_R5: ::std::os::raw::c_uint = 5;
pub const REG_R6: ::std::os::raw::c_uint = 6;
pub const REG_R7: ::std::os::raw::c_uint = 7;
pub const REG_R8: ::std::os::raw::c_uint = 8;
pub const REG_R9: ::std::os::raw::c_uint = 9;
pub const REG_R10: ::std::os::raw::c_uint = 10;
pub const REG_R11: ::std::os::raw::c_uint = 11;
pub const REG_R12: ::std::os::raw::c_uint = 12;
pub const REG_R13: ::std::os::raw::c_uint = 13;
pub const REG_R14: ::std::os::raw::c_uint = 14;
pub const REG_R15: ::std::os::raw::c_uint = 15;
pub type _bindgen_ty_46 = ::std::os::raw::c_uint;
pub type greg_t = ::std::os::raw::c_int;
pub type gregset_t = [greg_t; 18usize];
pub type fpregset_t = user_fpregs;
pub type mcontext_t = sigcontext;
#[repr(C)]
#[repr(align(8))]
#[derive(Copy, Clone)]
pub struct ucontext {
pub uc_flags: ::std::os::raw::c_ulong,
pub uc_link: *mut ucontext,
pub uc_stack: stack_t,
pub uc_mcontext: mcontext_t,
pub __bindgen_anon_1: ucontext__bindgen_ty_1,
pub __padding: [::std::os::raw::c_char; 120usize],
pub uc_regspace: [::std::os::raw::c_ulong; 128usize],
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union ucontext__bindgen_ty_1 {
pub __bindgen_anon_1: ucontext__bindgen_ty_1__bindgen_ty_1,
pub uc_sigmask64: sigset64_t,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ucontext__bindgen_ty_1__bindgen_ty_1 {
pub uc_sigmask: sigset_t,
pub __padding_rt_sigset: u32,
}
#[test]
fn bindgen_test_layout_ucontext__bindgen_ty_1__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<ucontext__bindgen_ty_1__bindgen_ty_1>(),
8usize,
concat!(
"Size of: ",
stringify!(ucontext__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
::std::mem::align_of::<ucontext__bindgen_ty_1__bindgen_ty_1>(),
4usize,
concat!(
"Alignment of ",
stringify!(ucontext__bindgen_ty_1__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ucontext__bindgen_ty_1__bindgen_ty_1>())).uc_sigmask as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ucontext__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(uc_sigmask)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ucontext__bindgen_ty_1__bindgen_ty_1>())).__padding_rt_sigset
as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(ucontext__bindgen_ty_1__bindgen_ty_1),
"::",
stringify!(__padding_rt_sigset)
)
);
}
#[test]
fn bindgen_test_layout_ucontext__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<ucontext__bindgen_ty_1>(),
8usize,
concat!("Size of: ", stringify!(ucontext__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<ucontext__bindgen_ty_1>(),
4usize,
concat!("Alignment of ", stringify!(ucontext__bindgen_ty_1))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ucontext__bindgen_ty_1>())).uc_sigmask64 as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ucontext__bindgen_ty_1),
"::",
stringify!(uc_sigmask64)
)
);
}
#[test]
fn bindgen_test_layout_ucontext() {
assert_eq!(
::std::mem::size_of::<ucontext>(),
744usize,
concat!("Size of: ", stringify!(ucontext))
);
assert_eq!(
::std::mem::align_of::<ucontext>(),
8usize,
concat!("Alignment of ", stringify!(ucontext))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ucontext>())).uc_flags as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(ucontext),
"::",
stringify!(uc_flags)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ucontext>())).uc_link as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(ucontext),
"::",
stringify!(uc_link)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ucontext>())).uc_stack as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(ucontext),
"::",
stringify!(uc_stack)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ucontext>())).uc_mcontext as *const _ as usize },
20usize,
concat!(
"Offset of field: ",
stringify!(ucontext),
"::",
stringify!(uc_mcontext)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ucontext>())).__padding as *const _ as usize },
112usize,
concat!(
"Offset of field: ",
stringify!(ucontext),
"::",
stringify!(__padding)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ucontext>())).uc_regspace as *const _ as usize },
232usize,
concat!(
"Offset of field: ",
stringify!(ucontext),
"::",
stringify!(uc_regspace)
)
);
}
pub type ucontext_t = ucontext;
extern "C" {
pub fn __libc_current_sigrtmin() -> ::std::os::raw::c_int;
}
extern "C" {
pub fn __libc_current_sigrtmax() -> ::std::os::raw::c_int;
}
extern "C" {
pub static sys_siglist: [*const ::std::os::raw::c_char; 65usize];
}
extern "C" {
pub static sys_signame: [*const ::std::os::raw::c_char; 65usize];
}
extern "C" {
pub fn sigaction(
__signal: ::std::os::raw::c_int,
__new_action: *const sigaction,
__old_action: *mut sigaction,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigaction64(
__signal: ::std::os::raw::c_int,
__new_action: *const sigaction64,
__old_action: *mut sigaction64,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn siginterrupt(
__signal: ::std::os::raw::c_int,
__flag: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn signal(__signal: ::std::os::raw::c_int, __handler: sighandler_t) -> sighandler_t;
}
extern "C" {
pub fn sigaddset(
__set: *mut sigset_t,
__signal: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigaddset64(
__set: *mut sigset64_t,
__signal: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigdelset(
__set: *mut sigset_t,
__signal: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigdelset64(
__set: *mut sigset64_t,
__signal: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigemptyset(__set: *mut sigset_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigemptyset64(__set: *mut sigset64_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigfillset(__set: *mut sigset_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigfillset64(__set: *mut sigset64_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigismember(
__set: *const sigset_t,
__signal: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigismember64(
__set: *const sigset64_t,
__signal: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigpending(__set: *mut sigset_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigpending64(__set: *mut sigset64_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigprocmask(
__how: ::std::os::raw::c_int,
__new_set: *const sigset_t,
__old_set: *mut sigset_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigprocmask64(
__how: ::std::os::raw::c_int,
__new_set: *const sigset64_t,
__old_set: *mut sigset64_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigsuspend(__mask: *const sigset_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigsuspend64(__mask: *const sigset64_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigwait(
__set: *const sigset_t,
__signal: *mut ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigwait64(
__set: *const sigset64_t,
__signal: *mut ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sighold(__signal: ::std::os::raw::c_int) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigignore(__signal: ::std::os::raw::c_int) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigpause(__signal: ::std::os::raw::c_int) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigrelse(__signal: ::std::os::raw::c_int) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigset(__signal: ::std::os::raw::c_int, __handler: sighandler_t) -> sighandler_t;
}
extern "C" {
pub fn raise(__signal: ::std::os::raw::c_int) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn kill(__pid: pid_t, __signal: ::std::os::raw::c_int) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn killpg(
__pgrp: ::std::os::raw::c_int,
__signal: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn tgkill(
__tgid: ::std::os::raw::c_int,
__tid: ::std::os::raw::c_int,
__signal: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigaltstack(
__new_signal_stack: *const stack_t,
__old_signal_stack: *mut stack_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn psiginfo(__info: *const siginfo_t, __msg: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn psignal(__signal: ::std::os::raw::c_int, __msg: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn pthread_kill(
__pthread: pthread_t,
__signal: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn pthread_sigmask(
__how: ::std::os::raw::c_int,
__new_set: *const sigset_t,
__old_set: *mut sigset_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn pthread_sigmask64(
__how: ::std::os::raw::c_int,
__new_set: *const sigset64_t,
__old_set: *mut sigset64_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigqueue(
__pid: pid_t,
__signal: ::std::os::raw::c_int,
__value: sigval,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigtimedwait(
__set: *const sigset_t,
__info: *mut siginfo_t,
__timeout: *const timespec,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigtimedwait64(
__set: *const sigset64_t,
__info: *mut siginfo_t,
__timeout: *const timespec,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigwaitinfo(__set: *const sigset_t, __info: *mut siginfo_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn sigwaitinfo64(__set: *const sigset64_t, __info: *mut siginfo_t)
-> ::std::os::raw::c_int;
}
pub type fd_mask = ::std::os::raw::c_ulong;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct fd_set {
pub fds_bits: [fd_mask; 32usize],
}
#[test]
fn bindgen_test_layout_fd_set() {
assert_eq!(
::std::mem::size_of::<fd_set>(),
128usize,
concat!("Size of: ", stringify!(fd_set))
);
assert_eq!(
::std::mem::align_of::<fd_set>(),
4usize,
concat!("Alignment of ", stringify!(fd_set))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<fd_set>())).fds_bits as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(fd_set),
"::",
stringify!(fds_bits)
)
);
}
extern "C" {
pub fn __FD_CLR_chk(arg1: ::std::os::raw::c_int, arg2: *mut fd_set, arg3: size_t);
}
extern "C" {
pub fn __FD_SET_chk(arg1: ::std::os::raw::c_int, arg2: *mut fd_set, arg3: size_t);
}
extern "C" {
pub fn __FD_ISSET_chk(
arg1: ::std::os::raw::c_int,
arg2: *const fd_set,
arg3: size_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn select(
__max_fd_plus_one: ::std::os::raw::c_int,
__read_fds: *mut fd_set,
__write_fds: *mut fd_set,
__exception_fds: *mut fd_set,
__timeout: *mut timeval,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn pselect(
__max_fd_plus_one: ::std::os::raw::c_int,
__read_fds: *mut fd_set,
__write_fds: *mut fd_set,
__exception_fds: *mut fd_set,
__timeout: *const timespec,
__mask: *const sigset_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn pselect64(
__max_fd_plus_one: ::std::os::raw::c_int,
__read_fds: *mut fd_set,
__write_fds: *mut fd_set,
__exception_fds: *mut fd_set,
__timeout: *const timespec,
__mask: *const sigset64_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn gettimeofday(__tv: *mut timeval, __tz: *mut timezone) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn settimeofday(__tv: *const timeval, __tz: *const timezone) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn getitimer(
__which: ::std::os::raw::c_int,
__current_value: *mut itimerval,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn setitimer(
__which: ::std::os::raw::c_int,
__new_value: *const itimerval,
__old_value: *mut itimerval,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn utimes(
__path: *const ::std::os::raw::c_char,
__times: *const timeval,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub static mut tzname: [*mut ::std::os::raw::c_char; 0usize];
}
extern "C" {
pub static mut daylight: ::std::os::raw::c_int;
}
extern "C" {
pub static mut timezone: ::std::os::raw::c_long;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct tm {
pub tm_sec: ::std::os::raw::c_int,
pub tm_min: ::std::os::raw::c_int,
pub tm_hour: ::std::os::raw::c_int,
pub tm_mday: ::std::os::raw::c_int,
pub tm_mon: ::std::os::raw::c_int,
pub tm_year: ::std::os::raw::c_int,
pub tm_wday: ::std::os::raw::c_int,
pub tm_yday: ::std::os::raw::c_int,
pub tm_isdst: ::std::os::raw::c_int,
pub tm_gmtoff: ::std::os::raw::c_long,
pub tm_zone: *const ::std::os::raw::c_char,
}
#[test]
fn bindgen_test_layout_tm() {
assert_eq!(
::std::mem::size_of::<tm>(),
44usize,
concat!("Size of: ", stringify!(tm))
);
assert_eq!(
::std::mem::align_of::<tm>(),
4usize,
concat!("Alignment of ", stringify!(tm))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_sec as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_sec)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_min as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_min)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_hour as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_hour)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_mday as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_mday)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_mon as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_mon)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_year as *const _ as usize },
20usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_year)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_wday as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_wday)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_yday as *const _ as usize },
28usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_yday)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_isdst as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_isdst)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_gmtoff as *const _ as usize },
36usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_gmtoff)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_zone as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_zone)
)
);
}
extern "C" {
pub fn time(__t: *mut time_t) -> time_t;
}
extern "C" {
pub fn nanosleep(
__request: *const timespec,
__remainder: *mut timespec,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn asctime(__tm: *const tm) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn asctime_r(
__tm: *const tm,
__buf: *mut ::std::os::raw::c_char,
) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn difftime(__lhs: time_t, __rhs: time_t) -> f64;
}
extern "C" {
pub fn mktime(__tm: *mut tm) -> time_t;
}
extern "C" {
pub fn localtime(__t: *const time_t) -> *mut tm;
}
extern "C" {
pub fn localtime_r(__t: *const time_t, __tm: *mut tm) -> *mut tm;
}
extern "C" {
pub fn gmtime(__t: *const time_t) -> *mut tm;
}
extern "C" {
pub fn gmtime_r(__t: *const time_t, __tm: *mut tm) -> *mut tm;
}
extern "C" {
pub fn strptime(
__s: *const ::std::os::raw::c_char,
__fmt: *const ::std::os::raw::c_char,
__tm: *mut tm,
) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn strptime_l(
__s: *const ::std::os::raw::c_char,
__fmt: *const ::std::os::raw::c_char,
__tm: *mut tm,
__l: locale_t,
) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn strftime(
__buf: *mut ::std::os::raw::c_char,
__n: size_t,
__fmt: *const ::std::os::raw::c_char,
__tm: *const tm,
) -> size_t;
}
extern "C" {
pub fn strftime_l(
__buf: *mut ::std::os::raw::c_char,
__n: size_t,
__fmt: *const ::std::os::raw::c_char,
__tm: *const tm,
__l: locale_t,
) -> size_t;
}
extern "C" {
pub fn ctime(__t: *const time_t) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn ctime_r(
__t: *const time_t,
__buf: *mut ::std::os::raw::c_char,
) -> *mut ::std::os::raw::c_char;
}
extern "C" {
pub fn tzset();
}
extern "C" {
pub fn clock() -> clock_t;
}
extern "C" {
pub fn clock_getcpuclockid(__pid: pid_t, __clock: *mut clockid_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn clock_getres(__clock: clockid_t, __resolution: *mut timespec) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn clock_gettime(__clock: clockid_t, __ts: *mut timespec) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn clock_nanosleep(
__clock: clockid_t,
__flags: ::std::os::raw::c_int,
__request: *const timespec,
__remainder: *mut timespec,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn clock_settime(__clock: clockid_t, __ts: *const timespec) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn timer_create(
__clock: clockid_t,
__event: *mut sigevent,
__timer_ptr: *mut timer_t,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn timer_delete(__timer: timer_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn timer_settime(
__timer: timer_t,
__flags: ::std::os::raw::c_int,
__new_value: *const itimerspec,
__old_value: *mut itimerspec,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn timer_gettime(__timer: timer_t, __ts: *mut itimerspec) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn timer_getoverrun(__timer: timer_t) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn timelocal(__tm: *mut tm) -> time_t;
}
extern "C" {
pub fn timegm(__tm: *mut tm) -> time_t;
}
extern "C" {
pub fn timespec_get(
__ts: *mut timespec,
__base: ::std::os::raw::c_int,
) -> ::std::os::raw::c_int;
}
pub const AAUDIO_DIRECTION_OUTPUT: ::std::os::raw::c_uint = 0;
pub const AAUDIO_DIRECTION_INPUT: ::std::os::raw::c_uint = 1;
pub type _bindgen_ty_47 = ::std::os::raw::c_uint;
pub type aaudio_direction_t = i32;
pub const AAUDIO_FORMAT_INVALID: ::std::os::raw::c_int = -1;
pub const AAUDIO_FORMAT_UNSPECIFIED: ::std::os::raw::c_int = 0;
pub const AAUDIO_FORMAT_PCM_I16: ::std::os::raw::c_int = 1;
pub const AAUDIO_FORMAT_PCM_FLOAT: ::std::os::raw::c_int = 2;
pub const AAUDIO_FORMAT_PCM_I24_PACKED: ::std::os::raw::c_int = 3;
pub const AAUDIO_FORMAT_PCM_I32: ::std::os::raw::c_int = 4;
pub type _bindgen_ty_48 = ::std::os::raw::c_int;
pub type aaudio_format_t = i32;
pub const AAUDIO_OK: ::std::os::raw::c_int = 0;
pub const AAUDIO_ERROR_BASE: ::std::os::raw::c_int = -900;
pub const AAUDIO_ERROR_DISCONNECTED: ::std::os::raw::c_int = -899;
pub const AAUDIO_ERROR_ILLEGAL_ARGUMENT: ::std::os::raw::c_int = -898;
pub const AAUDIO_ERROR_INTERNAL: ::std::os::raw::c_int = -896;
pub const AAUDIO_ERROR_INVALID_STATE: ::std::os::raw::c_int = -895;
pub const AAUDIO_ERROR_INVALID_HANDLE: ::std::os::raw::c_int = -892;
pub const AAUDIO_ERROR_UNIMPLEMENTED: ::std::os::raw::c_int = -890;
pub const AAUDIO_ERROR_UNAVAILABLE: ::std::os::raw::c_int = -889;
pub const AAUDIO_ERROR_NO_FREE_HANDLES: ::std::os::raw::c_int = -888;
pub const AAUDIO_ERROR_NO_MEMORY: ::std::os::raw::c_int = -887;
pub const AAUDIO_ERROR_NULL: ::std::os::raw::c_int = -886;
pub const AAUDIO_ERROR_TIMEOUT: ::std::os::raw::c_int = -885;
pub const AAUDIO_ERROR_WOULD_BLOCK: ::std::os::raw::c_int = -884;
pub const AAUDIO_ERROR_INVALID_FORMAT: ::std::os::raw::c_int = -883;
pub const AAUDIO_ERROR_OUT_OF_RANGE: ::std::os::raw::c_int = -882;
pub const AAUDIO_ERROR_NO_SERVICE: ::std::os::raw::c_int = -881;
pub const AAUDIO_ERROR_INVALID_RATE: ::std::os::raw::c_int = -880;
pub type _bindgen_ty_49 = ::std::os::raw::c_int;
pub type aaudio_result_t = i32;
pub const AAUDIO_STREAM_STATE_UNINITIALIZED: ::std::os::raw::c_uint = 0;
pub const AAUDIO_STREAM_STATE_UNKNOWN: ::std::os::raw::c_uint = 1;
pub const AAUDIO_STREAM_STATE_OPEN: ::std::os::raw::c_uint = 2;
pub const AAUDIO_STREAM_STATE_STARTING: ::std::os::raw::c_uint = 3;
pub const AAUDIO_STREAM_STATE_STARTED: ::std::os::raw::c_uint = 4;
pub const AAUDIO_STREAM_STATE_PAUSING: ::std::os::raw::c_uint = 5;
pub const AAUDIO_STREAM_STATE_PAUSED: ::std::os::raw::c_uint = 6;
pub const AAUDIO_STREAM_STATE_FLUSHING: ::std::os::raw::c_uint = 7;
pub const AAUDIO_STREAM_STATE_FLUSHED: ::std::os::raw::c_uint = 8;
pub const AAUDIO_STREAM_STATE_STOPPING: ::std::os::raw::c_uint = 9;
pub const AAUDIO_STREAM_STATE_STOPPED: ::std::os::raw::c_uint = 10;
pub const AAUDIO_STREAM_STATE_CLOSING: ::std::os::raw::c_uint = 11;
pub const AAUDIO_STREAM_STATE_CLOSED: ::std::os::raw::c_uint = 12;
pub const AAUDIO_STREAM_STATE_DISCONNECTED: ::std::os::raw::c_uint = 13;
pub type _bindgen_ty_50 = ::std::os::raw::c_uint;
pub type aaudio_stream_state_t = i32;
pub const AAUDIO_SHARING_MODE_EXCLUSIVE: ::std::os::raw::c_uint = 0;
pub const AAUDIO_SHARING_MODE_SHARED: ::std::os::raw::c_uint = 1;
pub type _bindgen_ty_51 = ::std::os::raw::c_uint;
pub type aaudio_sharing_mode_t = i32;
pub const AAUDIO_PERFORMANCE_MODE_NONE: ::std::os::raw::c_uint = 10;
pub const AAUDIO_PERFORMANCE_MODE_POWER_SAVING: ::std::os::raw::c_uint = 11;
pub const AAUDIO_PERFORMANCE_MODE_LOW_LATENCY: ::std::os::raw::c_uint = 12;
pub type _bindgen_ty_52 = ::std::os::raw::c_uint;
pub type aaudio_performance_mode_t = i32;
pub const AAUDIO_USAGE_MEDIA: ::std::os::raw::c_uint = 1;
pub const AAUDIO_USAGE_VOICE_COMMUNICATION: ::std::os::raw::c_uint = 2;
pub const AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING: ::std::os::raw::c_uint = 3;
pub const AAUDIO_USAGE_ALARM: ::std::os::raw::c_uint = 4;
pub const AAUDIO_USAGE_NOTIFICATION: ::std::os::raw::c_uint = 5;
pub const AAUDIO_USAGE_NOTIFICATION_RINGTONE: ::std::os::raw::c_uint = 6;
pub const AAUDIO_USAGE_NOTIFICATION_EVENT: ::std::os::raw::c_uint = 10;
pub const AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY: ::std::os::raw::c_uint = 11;
pub const AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE: ::std::os::raw::c_uint = 12;
pub const AAUDIO_USAGE_ASSISTANCE_SONIFICATION: ::std::os::raw::c_uint = 13;
pub const AAUDIO_USAGE_GAME: ::std::os::raw::c_uint = 14;
pub const AAUDIO_USAGE_ASSISTANT: ::std::os::raw::c_uint = 16;
pub const AAUDIO_SYSTEM_USAGE_EMERGENCY: ::std::os::raw::c_uint = 1000;
pub const AAUDIO_SYSTEM_USAGE_SAFETY: ::std::os::raw::c_uint = 1001;
pub const AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS: ::std::os::raw::c_uint = 1002;
pub const AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT: ::std::os::raw::c_uint = 1003;
pub type _bindgen_ty_53 = ::std::os::raw::c_uint;
pub type aaudio_usage_t = i32;
pub const AAUDIO_CONTENT_TYPE_SPEECH: ::std::os::raw::c_uint = 1;
pub const AAUDIO_CONTENT_TYPE_MUSIC: ::std::os::raw::c_uint = 2;
pub const AAUDIO_CONTENT_TYPE_MOVIE: ::std::os::raw::c_uint = 3;
pub const AAUDIO_CONTENT_TYPE_SONIFICATION: ::std::os::raw::c_uint = 4;
pub type _bindgen_ty_54 = ::std::os::raw::c_uint;
pub type aaudio_content_type_t = i32;
pub const AAUDIO_INPUT_PRESET_GENERIC: ::std::os::raw::c_uint = 1;
pub const AAUDIO_INPUT_PRESET_CAMCORDER: ::std::os::raw::c_uint = 5;
pub const AAUDIO_INPUT_PRESET_VOICE_RECOGNITION: ::std::os::raw::c_uint = 6;
pub const AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION: ::std::os::raw::c_uint = 7;
pub const AAUDIO_INPUT_PRESET_UNPROCESSED: ::std::os::raw::c_uint = 9;
pub const AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE: ::std::os::raw::c_uint = 10;
pub type _bindgen_ty_55 = ::std::os::raw::c_uint;
pub type aaudio_input_preset_t = i32;
pub const AAUDIO_ALLOW_CAPTURE_BY_ALL: ::std::os::raw::c_uint = 1;
pub const AAUDIO_ALLOW_CAPTURE_BY_SYSTEM: ::std::os::raw::c_uint = 2;
pub const AAUDIO_ALLOW_CAPTURE_BY_NONE: ::std::os::raw::c_uint = 3;
pub type _bindgen_ty_56 = ::std::os::raw::c_uint;
pub type aaudio_allowed_capture_policy_t = i32;
pub const AAUDIO_SESSION_ID_NONE: ::std::os::raw::c_int = -1;
pub const AAUDIO_SESSION_ID_ALLOCATE: ::std::os::raw::c_int = 0;
pub type _bindgen_ty_57 = ::std::os::raw::c_int;
pub type aaudio_session_id_t = i32;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AAudioStreamStruct {
_unused: [u8; 0],
}
pub type AAudioStream = AAudioStreamStruct;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AAudioStreamBuilderStruct {
_unused: [u8; 0],
}
pub type AAudioStreamBuilder = AAudioStreamBuilderStruct;
extern "C" {
pub fn AAudio_convertResultToText(returnCode: aaudio_result_t)
-> *const ::std::os::raw::c_char;
}
extern "C" {
pub fn AAudio_convertStreamStateToText(
state: aaudio_stream_state_t,
) -> *const ::std::os::raw::c_char;
}
extern "C" {
pub fn AAudio_createStreamBuilder(builder: *mut *mut AAudioStreamBuilder) -> aaudio_result_t;
}
extern "C" {
pub fn AAudioStreamBuilder_setDeviceId(builder: *mut AAudioStreamBuilder, deviceId: i32);
}
extern "C" {
pub fn AAudioStreamBuilder_setPackageName(
builder: *mut AAudioStreamBuilder,
packageName: *const ::std::os::raw::c_char,
);
}
extern "C" {
pub fn AAudioStreamBuilder_setAttributionTag(
builder: *mut AAudioStreamBuilder,
attributionTag: *const ::std::os::raw::c_char,
);
}
extern "C" {
pub fn AAudioStreamBuilder_setSampleRate(builder: *mut AAudioStreamBuilder, sampleRate: i32);
}
extern "C" {
pub fn AAudioStreamBuilder_setChannelCount(
builder: *mut AAudioStreamBuilder,
channelCount: i32,
);
}
extern "C" {
pub fn AAudioStreamBuilder_setSamplesPerFrame(
builder: *mut AAudioStreamBuilder,
samplesPerFrame: i32,
);
}
extern "C" {
pub fn AAudioStreamBuilder_setFormat(
builder: *mut AAudioStreamBuilder,
format: aaudio_format_t,
);
}
extern "C" {
pub fn AAudioStreamBuilder_setSharingMode(
builder: *mut AAudioStreamBuilder,
sharingMode: aaudio_sharing_mode_t,
);
}
extern "C" {
pub fn AAudioStreamBuilder_setDirection(
builder: *mut AAudioStreamBuilder,
direction: aaudio_direction_t,
);
}
extern "C" {
pub fn AAudioStreamBuilder_setBufferCapacityInFrames(
builder: *mut AAudioStreamBuilder,
numFrames: i32,
);
}
extern "C" {
pub fn AAudioStreamBuilder_setPerformanceMode(
builder: *mut AAudioStreamBuilder,
mode: aaudio_performance_mode_t,
);
}
extern "C" {
pub fn AAudioStreamBuilder_setUsage(builder: *mut AAudioStreamBuilder, usage: aaudio_usage_t);
}
extern "C" {
pub fn AAudioStreamBuilder_setContentType(
builder: *mut AAudioStreamBuilder,
contentType: aaudio_content_type_t,
);
}
extern "C" {
pub fn AAudioStreamBuilder_setInputPreset(
builder: *mut AAudioStreamBuilder,
inputPreset: aaudio_input_preset_t,
);
}
extern "C" {
pub fn AAudioStreamBuilder_setAllowedCapturePolicy(
builder: *mut AAudioStreamBuilder,
capturePolicy: aaudio_allowed_capture_policy_t,
);
}
extern "C" {
pub fn AAudioStreamBuilder_setSessionId(
builder: *mut AAudioStreamBuilder,
sessionId: aaudio_session_id_t,
);
}
extern "C" {
pub fn AAudioStreamBuilder_setPrivacySensitive(
builder: *mut AAudioStreamBuilder,
privacySensitive: bool,
);
}
pub const AAUDIO_CALLBACK_RESULT_CONTINUE: ::std::os::raw::c_uint = 0;
pub const AAUDIO_CALLBACK_RESULT_STOP: ::std::os::raw::c_uint = 1;
pub type _bindgen_ty_58 = ::std::os::raw::c_uint;
pub type aaudio_data_callback_result_t = i32;
pub type AAudioStream_dataCallback = ::std::option::Option<
unsafe extern "C" fn(
stream: *mut AAudioStream,
userData: *mut ::std::os::raw::c_void,
audioData: *mut ::std::os::raw::c_void,
numFrames: i32,
) -> aaudio_data_callback_result_t,
>;
extern "C" {
pub fn AAudioStreamBuilder_setDataCallback(
builder: *mut AAudioStreamBuilder,
callback: AAudioStream_dataCallback,
userData: *mut ::std::os::raw::c_void,
);
}
extern "C" {
pub fn AAudioStreamBuilder_setFramesPerDataCallback(
builder: *mut AAudioStreamBuilder,
numFrames: i32,
);
}
pub type AAudioStream_errorCallback = ::std::option::Option<
unsafe extern "C" fn(
stream: *mut AAudioStream,
userData: *mut ::std::os::raw::c_void,
error: aaudio_result_t,
),
>;
extern "C" {
pub fn AAudioStreamBuilder_setErrorCallback(
builder: *mut AAudioStreamBuilder,
callback: AAudioStream_errorCallback,
userData: *mut ::std::os::raw::c_void,
);
}
extern "C" {
pub fn AAudioStreamBuilder_openStream(
builder: *mut AAudioStreamBuilder,
stream: *mut *mut AAudioStream,
) -> aaudio_result_t;
}
extern "C" {
pub fn AAudioStreamBuilder_delete(builder: *mut AAudioStreamBuilder) -> aaudio_result_t;
}
extern "C" {
pub fn AAudioStream_release(stream: *mut AAudioStream) -> aaudio_result_t;
}
extern "C" {
pub fn AAudioStream_close(stream: *mut AAudioStream) -> aaudio_result_t;
}
extern "C" {
pub fn AAudioStream_requestStart(stream: *mut AAudioStream) -> aaudio_result_t;
}
extern "C" {
pub fn AAudioStream_requestPause(stream: *mut AAudioStream) -> aaudio_result_t;
}
extern "C" {
pub fn AAudioStream_requestFlush(stream: *mut AAudioStream) -> aaudio_result_t;
}
extern "C" {
pub fn AAudioStream_requestStop(stream: *mut AAudioStream) -> aaudio_result_t;
}
extern "C" {
pub fn AAudioStream_getState(stream: *mut AAudioStream) -> aaudio_stream_state_t;
}
extern "C" {
pub fn AAudioStream_waitForStateChange(
stream: *mut AAudioStream,
inputState: aaudio_stream_state_t,
nextState: *mut aaudio_stream_state_t,
timeoutNanoseconds: i64,
) -> aaudio_result_t;
}
extern "C" {
pub fn AAudioStream_read(
stream: *mut AAudioStream,
buffer: *mut ::std::os::raw::c_void,
numFrames: i32,
timeoutNanoseconds: i64,
) -> aaudio_result_t;
}
extern "C" {
pub fn AAudioStream_write(
stream: *mut AAudioStream,
buffer: *const ::std::os::raw::c_void,
numFrames: i32,
timeoutNanoseconds: i64,
) -> aaudio_result_t;
}
extern "C" {
pub fn AAudioStream_setBufferSizeInFrames(
stream: *mut AAudioStream,
numFrames: i32,
) -> aaudio_result_t;
}
extern "C" {
pub fn AAudioStream_getBufferSizeInFrames(stream: *mut AAudioStream) -> i32;
}
extern "C" {
pub fn AAudioStream_getFramesPerBurst(stream: *mut AAudioStream) -> i32;
}
extern "C" {
pub fn AAudioStream_getBufferCapacityInFrames(stream: *mut AAudioStream) -> i32;
}
extern "C" {
pub fn AAudioStream_getFramesPerDataCallback(stream: *mut AAudioStream) -> i32;
}
extern "C" {
pub fn AAudioStream_getXRunCount(stream: *mut AAudioStream) -> i32;
}
extern "C" {
pub fn AAudioStream_getSampleRate(stream: *mut AAudioStream) -> i32;
}
extern "C" {
pub fn AAudioStream_getChannelCount(stream: *mut AAudioStream) -> i32;
}
extern "C" {
pub fn AAudioStream_getSamplesPerFrame(stream: *mut AAudioStream) -> i32;
}
extern "C" {
pub fn AAudioStream_getDeviceId(stream: *mut AAudioStream) -> i32;
}
extern "C" {
pub fn AAudioStream_getFormat(stream: *mut AAudioStream) -> aaudio_format_t;
}
extern "C" {
pub fn AAudioStream_getSharingMode(stream: *mut AAudioStream) -> aaudio_sharing_mode_t;
}
extern "C" {
pub fn AAudioStream_getPerformanceMode(stream: *mut AAudioStream) -> aaudio_performance_mode_t;
}
extern "C" {
pub fn AAudioStream_getDirection(stream: *mut AAudioStream) -> aaudio_direction_t;
}
extern "C" {
pub fn AAudioStream_getFramesWritten(stream: *mut AAudioStream) -> i64;
}
extern "C" {
pub fn AAudioStream_getFramesRead(stream: *mut AAudioStream) -> i64;
}
extern "C" {
pub fn AAudioStream_getSessionId(stream: *mut AAudioStream) -> aaudio_session_id_t;
}
extern "C" {
pub fn AAudioStream_getTimestamp(
stream: *mut AAudioStream,
clockid: clockid_t,
framePosition: *mut i64,
timeNanoseconds: *mut i64,
) -> aaudio_result_t;
}
extern "C" {
pub fn AAudioStream_getUsage(stream: *mut AAudioStream) -> aaudio_usage_t;
}
extern "C" {
pub fn AAudioStream_getContentType(stream: *mut AAudioStream) -> aaudio_content_type_t;
}
extern "C" {
pub fn AAudioStream_getInputPreset(stream: *mut AAudioStream) -> aaudio_input_preset_t;
}
extern "C" {
pub fn AAudioStream_getAllowedCapturePolicy(
stream: *mut AAudioStream,
) -> aaudio_allowed_capture_policy_t;
}
extern "C" {
pub fn AAudioStream_isPrivacySensitive(stream: *mut AAudioStream) -> bool;
}
pub const media_status_t_AMEDIA_OK: media_status_t = 0;
pub const media_status_t_AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE: media_status_t = 1100;
pub const media_status_t_AMEDIACODEC_ERROR_RECLAIMED: media_status_t = 1101;
pub const media_status_t_AMEDIA_ERROR_BASE: media_status_t = -10000;
pub const media_status_t_AMEDIA_ERROR_UNKNOWN: media_status_t = -10000;
pub const media_status_t_AMEDIA_ERROR_MALFORMED: media_status_t = -10001;
pub const media_status_t_AMEDIA_ERROR_UNSUPPORTED: media_status_t = -10002;
pub const media_status_t_AMEDIA_ERROR_INVALID_OBJECT: media_status_t = -10003;
pub const media_status_t_AMEDIA_ERROR_INVALID_PARAMETER: media_status_t = -10004;
pub const media_status_t_AMEDIA_ERROR_INVALID_OPERATION: media_status_t = -10005;
pub const media_status_t_AMEDIA_ERROR_END_OF_STREAM: media_status_t = -10006;
pub const media_status_t_AMEDIA_ERROR_IO: media_status_t = -10007;
pub const media_status_t_AMEDIA_ERROR_WOULD_BLOCK: media_status_t = -10008;
pub const media_status_t_AMEDIA_DRM_ERROR_BASE: media_status_t = -20000;
pub const media_status_t_AMEDIA_DRM_NOT_PROVISIONED: media_status_t = -20001;
pub const media_status_t_AMEDIA_DRM_RESOURCE_BUSY: media_status_t = -20002;
pub const media_status_t_AMEDIA_DRM_DEVICE_REVOKED: media_status_t = -20003;
pub const media_status_t_AMEDIA_DRM_SHORT_BUFFER: media_status_t = -20004;
pub const media_status_t_AMEDIA_DRM_SESSION_NOT_OPENED: media_status_t = -20005;
pub const media_status_t_AMEDIA_DRM_TAMPER_DETECTED: media_status_t = -20006;
pub const media_status_t_AMEDIA_DRM_VERIFY_FAILED: media_status_t = -20007;
pub const media_status_t_AMEDIA_DRM_NEED_KEY: media_status_t = -20008;
pub const media_status_t_AMEDIA_DRM_LICENSE_EXPIRED: media_status_t = -20009;
pub const media_status_t_AMEDIA_IMGREADER_ERROR_BASE: media_status_t = -30000;
pub const media_status_t_AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE: media_status_t = -30001;
pub const media_status_t_AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED: media_status_t = -30002;
pub const media_status_t_AMEDIA_IMGREADER_CANNOT_LOCK_IMAGE: media_status_t = -30003;
pub const media_status_t_AMEDIA_IMGREADER_CANNOT_UNLOCK_IMAGE: media_status_t = -30004;
pub const media_status_t_AMEDIA_IMGREADER_IMAGE_NOT_LOCKED: media_status_t = -30005;
pub type media_status_t = ::std::os::raw::c_int;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMidiDevice {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMidiInputPort {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMidiOutputPort {
_unused: [u8; 0],
}
pub const AMIDI_OPCODE_DATA: ::std::os::raw::c_uint = 1;
pub const AMIDI_OPCODE_FLUSH: ::std::os::raw::c_uint = 2;
pub type _bindgen_ty_59 = ::std::os::raw::c_uint;
pub const AMIDI_DEVICE_TYPE_USB: ::std::os::raw::c_uint = 1;
pub const AMIDI_DEVICE_TYPE_VIRTUAL: ::std::os::raw::c_uint = 2;
pub const AMIDI_DEVICE_TYPE_BLUETOOTH: ::std::os::raw::c_uint = 3;
pub type _bindgen_ty_60 = ::std::os::raw::c_uint;
extern "C" {
pub fn AMidiDevice_fromJava(
env: *mut JNIEnv,
midiDeviceObj: jobject,
outDevicePtrPtr: *mut *mut AMidiDevice,
) -> media_status_t;
}
extern "C" {
pub fn AMidiDevice_release(midiDevice: *const AMidiDevice) -> media_status_t;
}
extern "C" {
pub fn AMidiDevice_getType(device: *const AMidiDevice) -> i32;
}
extern "C" {
pub fn AMidiDevice_getNumInputPorts(device: *const AMidiDevice) -> ssize_t;
}
extern "C" {
pub fn AMidiDevice_getNumOutputPorts(device: *const AMidiDevice) -> ssize_t;
}
extern "C" {
pub fn AMidiOutputPort_open(
device: *const AMidiDevice,
portNumber: i32,
outOutputPortPtr: *mut *mut AMidiOutputPort,
) -> media_status_t;
}
extern "C" {
pub fn AMidiOutputPort_close(outputPort: *const AMidiOutputPort);
}
extern "C" {
pub fn AMidiOutputPort_receive(
outputPort: *const AMidiOutputPort,
opcodePtr: *mut i32,
buffer: *mut u8,
maxBytes: size_t,
numBytesReceivedPtr: *mut size_t,
outTimestampPtr: *mut i64,
) -> ssize_t;
}
extern "C" {
pub fn AMidiInputPort_open(
device: *const AMidiDevice,
portNumber: i32,
outInputPortPtr: *mut *mut AMidiInputPort,
) -> media_status_t;
}
extern "C" {
pub fn AMidiInputPort_send(
inputPort: *const AMidiInputPort,
buffer: *const u8,
numBytes: size_t,
) -> ssize_t;
}
extern "C" {
pub fn AMidiInputPort_sendWithTimestamp(
inputPort: *const AMidiInputPort,
buffer: *const u8,
numBytes: size_t,
timestamp: i64,
) -> ssize_t;
}
extern "C" {
pub fn AMidiInputPort_sendFlush(inputPort: *const AMidiInputPort) -> media_status_t;
}
extern "C" {
pub fn AMidiInputPort_close(inputPort: *const AMidiInputPort);
}
pub const camera_status_t_ACAMERA_OK: camera_status_t = 0;
pub const camera_status_t_ACAMERA_ERROR_BASE: camera_status_t = -10000;
pub const camera_status_t_ACAMERA_ERROR_UNKNOWN: camera_status_t = -10000;
pub const camera_status_t_ACAMERA_ERROR_INVALID_PARAMETER: camera_status_t = -10001;
pub const camera_status_t_ACAMERA_ERROR_CAMERA_DISCONNECTED: camera_status_t = -10002;
pub const camera_status_t_ACAMERA_ERROR_NOT_ENOUGH_MEMORY: camera_status_t = -10003;
pub const camera_status_t_ACAMERA_ERROR_METADATA_NOT_FOUND: camera_status_t = -10004;
pub const camera_status_t_ACAMERA_ERROR_CAMERA_DEVICE: camera_status_t = -10005;
pub const camera_status_t_ACAMERA_ERROR_CAMERA_SERVICE: camera_status_t = -10006;
pub const camera_status_t_ACAMERA_ERROR_SESSION_CLOSED: camera_status_t = -10007;
pub const camera_status_t_ACAMERA_ERROR_INVALID_OPERATION: camera_status_t = -10008;
pub const camera_status_t_ACAMERA_ERROR_STREAM_CONFIGURE_FAIL: camera_status_t = -10009;
pub const camera_status_t_ACAMERA_ERROR_CAMERA_IN_USE: camera_status_t = -10010;
pub const camera_status_t_ACAMERA_ERROR_MAX_CAMERA_IN_USE: camera_status_t = -10011;
pub const camera_status_t_ACAMERA_ERROR_CAMERA_DISABLED: camera_status_t = -10012;
pub const camera_status_t_ACAMERA_ERROR_PERMISSION_DENIED: camera_status_t = -10013;
pub const camera_status_t_ACAMERA_ERROR_UNSUPPORTED_OPERATION: camera_status_t = -10014;
pub type camera_status_t = ::std::os::raw::c_int;
pub const acamera_metadata_section_ACAMERA_COLOR_CORRECTION: acamera_metadata_section = 0;
pub const acamera_metadata_section_ACAMERA_CONTROL: acamera_metadata_section = 1;
pub const acamera_metadata_section_ACAMERA_DEMOSAIC: acamera_metadata_section = 2;
pub const acamera_metadata_section_ACAMERA_EDGE: acamera_metadata_section = 3;
pub const acamera_metadata_section_ACAMERA_FLASH: acamera_metadata_section = 4;
pub const acamera_metadata_section_ACAMERA_FLASH_INFO: acamera_metadata_section = 5;
pub const acamera_metadata_section_ACAMERA_HOT_PIXEL: acamera_metadata_section = 6;
pub const acamera_metadata_section_ACAMERA_JPEG: acamera_metadata_section = 7;
pub const acamera_metadata_section_ACAMERA_LENS: acamera_metadata_section = 8;
pub const acamera_metadata_section_ACAMERA_LENS_INFO: acamera_metadata_section = 9;
pub const acamera_metadata_section_ACAMERA_NOISE_REDUCTION: acamera_metadata_section = 10;
pub const acamera_metadata_section_ACAMERA_QUIRKS: acamera_metadata_section = 11;
pub const acamera_metadata_section_ACAMERA_REQUEST: acamera_metadata_section = 12;
pub const acamera_metadata_section_ACAMERA_SCALER: acamera_metadata_section = 13;
pub const acamera_metadata_section_ACAMERA_SENSOR: acamera_metadata_section = 14;
pub const acamera_metadata_section_ACAMERA_SENSOR_INFO: acamera_metadata_section = 15;
pub const acamera_metadata_section_ACAMERA_SHADING: acamera_metadata_section = 16;
pub const acamera_metadata_section_ACAMERA_STATISTICS: acamera_metadata_section = 17;
pub const acamera_metadata_section_ACAMERA_STATISTICS_INFO: acamera_metadata_section = 18;
pub const acamera_metadata_section_ACAMERA_TONEMAP: acamera_metadata_section = 19;
pub const acamera_metadata_section_ACAMERA_LED: acamera_metadata_section = 20;
pub const acamera_metadata_section_ACAMERA_INFO: acamera_metadata_section = 21;
pub const acamera_metadata_section_ACAMERA_BLACK_LEVEL: acamera_metadata_section = 22;
pub const acamera_metadata_section_ACAMERA_SYNC: acamera_metadata_section = 23;
pub const acamera_metadata_section_ACAMERA_REPROCESS: acamera_metadata_section = 24;
pub const acamera_metadata_section_ACAMERA_DEPTH: acamera_metadata_section = 25;
pub const acamera_metadata_section_ACAMERA_LOGICAL_MULTI_CAMERA: acamera_metadata_section = 26;
pub const acamera_metadata_section_ACAMERA_DISTORTION_CORRECTION: acamera_metadata_section = 27;
pub const acamera_metadata_section_ACAMERA_HEIC: acamera_metadata_section = 28;
pub const acamera_metadata_section_ACAMERA_HEIC_INFO: acamera_metadata_section = 29;
pub const acamera_metadata_section_ACAMERA_SECTION_COUNT: acamera_metadata_section = 30;
pub const acamera_metadata_section_ACAMERA_VENDOR: acamera_metadata_section = 32768;
pub type acamera_metadata_section = ::std::os::raw::c_uint;
pub use self::acamera_metadata_section as acamera_metadata_section_t;
pub const acamera_metadata_section_start_ACAMERA_COLOR_CORRECTION_START:
acamera_metadata_section_start = 0;
pub const acamera_metadata_section_start_ACAMERA_CONTROL_START: acamera_metadata_section_start =
65536;
pub const acamera_metadata_section_start_ACAMERA_DEMOSAIC_START: acamera_metadata_section_start =
131072;
pub const acamera_metadata_section_start_ACAMERA_EDGE_START: acamera_metadata_section_start =
196608;
pub const acamera_metadata_section_start_ACAMERA_FLASH_START: acamera_metadata_section_start =
262144;
pub const acamera_metadata_section_start_ACAMERA_FLASH_INFO_START: acamera_metadata_section_start =
327680;
pub const acamera_metadata_section_start_ACAMERA_HOT_PIXEL_START: acamera_metadata_section_start =
393216;
pub const acamera_metadata_section_start_ACAMERA_JPEG_START: acamera_metadata_section_start =
458752;
pub const acamera_metadata_section_start_ACAMERA_LENS_START: acamera_metadata_section_start =
524288;
pub const acamera_metadata_section_start_ACAMERA_LENS_INFO_START: acamera_metadata_section_start =
589824;
pub const acamera_metadata_section_start_ACAMERA_NOISE_REDUCTION_START:
acamera_metadata_section_start = 655360;
pub const acamera_metadata_section_start_ACAMERA_QUIRKS_START: acamera_metadata_section_start =
720896;
pub const acamera_metadata_section_start_ACAMERA_REQUEST_START: acamera_metadata_section_start =
786432;
pub const acamera_metadata_section_start_ACAMERA_SCALER_START: acamera_metadata_section_start =
851968;
pub const acamera_metadata_section_start_ACAMERA_SENSOR_START: acamera_metadata_section_start =
917504;
pub const acamera_metadata_section_start_ACAMERA_SENSOR_INFO_START: acamera_metadata_section_start =
983040;
pub const acamera_metadata_section_start_ACAMERA_SHADING_START: acamera_metadata_section_start =
1048576;
pub const acamera_metadata_section_start_ACAMERA_STATISTICS_START: acamera_metadata_section_start =
1114112;
pub const acamera_metadata_section_start_ACAMERA_STATISTICS_INFO_START:
acamera_metadata_section_start = 1179648;
pub const acamera_metadata_section_start_ACAMERA_TONEMAP_START: acamera_metadata_section_start =
1245184;
pub const acamera_metadata_section_start_ACAMERA_LED_START: acamera_metadata_section_start =
1310720;
pub const acamera_metadata_section_start_ACAMERA_INFO_START: acamera_metadata_section_start =
1376256;
pub const acamera_metadata_section_start_ACAMERA_BLACK_LEVEL_START: acamera_metadata_section_start =
1441792;
pub const acamera_metadata_section_start_ACAMERA_SYNC_START: acamera_metadata_section_start =
1507328;
pub const acamera_metadata_section_start_ACAMERA_REPROCESS_START: acamera_metadata_section_start =
1572864;
pub const acamera_metadata_section_start_ACAMERA_DEPTH_START: acamera_metadata_section_start =
1638400;
pub const acamera_metadata_section_start_ACAMERA_LOGICAL_MULTI_CAMERA_START:
acamera_metadata_section_start = 1703936;
pub const acamera_metadata_section_start_ACAMERA_DISTORTION_CORRECTION_START:
acamera_metadata_section_start = 1769472;
pub const acamera_metadata_section_start_ACAMERA_HEIC_START: acamera_metadata_section_start =
1835008;
pub const acamera_metadata_section_start_ACAMERA_HEIC_INFO_START: acamera_metadata_section_start =
1900544;
pub const acamera_metadata_section_start_ACAMERA_VENDOR_START: acamera_metadata_section_start =
-2147483648;
pub type acamera_metadata_section_start = ::std::os::raw::c_int;
pub use self::acamera_metadata_section_start as acamera_metadata_section_start_t;
pub const acamera_metadata_tag_ACAMERA_COLOR_CORRECTION_MODE: acamera_metadata_tag = 0;
pub const acamera_metadata_tag_ACAMERA_COLOR_CORRECTION_TRANSFORM: acamera_metadata_tag = 1;
pub const acamera_metadata_tag_ACAMERA_COLOR_CORRECTION_GAINS: acamera_metadata_tag = 2;
pub const acamera_metadata_tag_ACAMERA_COLOR_CORRECTION_ABERRATION_MODE: acamera_metadata_tag = 3;
pub const acamera_metadata_tag_ACAMERA_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES:
acamera_metadata_tag = 4;
pub const acamera_metadata_tag_ACAMERA_COLOR_CORRECTION_END: acamera_metadata_tag = 5;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AE_ANTIBANDING_MODE: acamera_metadata_tag = 65536;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION: acamera_metadata_tag =
65537;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AE_LOCK: acamera_metadata_tag = 65538;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AE_MODE: acamera_metadata_tag = 65539;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AE_REGIONS: acamera_metadata_tag = 65540;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AE_TARGET_FPS_RANGE: acamera_metadata_tag = 65541;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER: acamera_metadata_tag = 65542;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AF_MODE: acamera_metadata_tag = 65543;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AF_REGIONS: acamera_metadata_tag = 65544;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AF_TRIGGER: acamera_metadata_tag = 65545;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AWB_LOCK: acamera_metadata_tag = 65546;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AWB_MODE: acamera_metadata_tag = 65547;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AWB_REGIONS: acamera_metadata_tag = 65548;
pub const acamera_metadata_tag_ACAMERA_CONTROL_CAPTURE_INTENT: acamera_metadata_tag = 65549;
pub const acamera_metadata_tag_ACAMERA_CONTROL_EFFECT_MODE: acamera_metadata_tag = 65550;
pub const acamera_metadata_tag_ACAMERA_CONTROL_MODE: acamera_metadata_tag = 65551;
pub const acamera_metadata_tag_ACAMERA_CONTROL_SCENE_MODE: acamera_metadata_tag = 65552;
pub const acamera_metadata_tag_ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE: acamera_metadata_tag =
65553;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES:
acamera_metadata_tag = 65554;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AE_AVAILABLE_MODES: acamera_metadata_tag = 65555;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES:
acamera_metadata_tag = 65556;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AE_COMPENSATION_RANGE: acamera_metadata_tag = 65557;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AE_COMPENSATION_STEP: acamera_metadata_tag = 65558;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AF_AVAILABLE_MODES: acamera_metadata_tag = 65559;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AVAILABLE_EFFECTS: acamera_metadata_tag = 65560;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AVAILABLE_SCENE_MODES: acamera_metadata_tag = 65561;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES:
acamera_metadata_tag = 65562;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AWB_AVAILABLE_MODES: acamera_metadata_tag = 65563;
pub const acamera_metadata_tag_ACAMERA_CONTROL_MAX_REGIONS: acamera_metadata_tag = 65564;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AE_STATE: acamera_metadata_tag = 65567;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AF_STATE: acamera_metadata_tag = 65568;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AWB_STATE: acamera_metadata_tag = 65570;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AE_LOCK_AVAILABLE: acamera_metadata_tag = 65572;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AWB_LOCK_AVAILABLE: acamera_metadata_tag = 65573;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AVAILABLE_MODES: acamera_metadata_tag = 65574;
pub const acamera_metadata_tag_ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE:
acamera_metadata_tag = 65575;
pub const acamera_metadata_tag_ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST: acamera_metadata_tag =
65576;
pub const acamera_metadata_tag_ACAMERA_CONTROL_ENABLE_ZSL: acamera_metadata_tag = 65577;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AF_SCENE_CHANGE: acamera_metadata_tag = 65578;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES:
acamera_metadata_tag = 65579;
pub const acamera_metadata_tag_ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES:
acamera_metadata_tag = 65580;
pub const acamera_metadata_tag_ACAMERA_CONTROL_EXTENDED_SCENE_MODE: acamera_metadata_tag = 65581;
pub const acamera_metadata_tag_ACAMERA_CONTROL_ZOOM_RATIO_RANGE: acamera_metadata_tag = 65582;
pub const acamera_metadata_tag_ACAMERA_CONTROL_ZOOM_RATIO: acamera_metadata_tag = 65583;
pub const acamera_metadata_tag_ACAMERA_CONTROL_END: acamera_metadata_tag = 65584;
pub const acamera_metadata_tag_ACAMERA_EDGE_MODE: acamera_metadata_tag = 196608;
pub const acamera_metadata_tag_ACAMERA_EDGE_AVAILABLE_EDGE_MODES: acamera_metadata_tag = 196610;
pub const acamera_metadata_tag_ACAMERA_EDGE_END: acamera_metadata_tag = 196611;
pub const acamera_metadata_tag_ACAMERA_FLASH_MODE: acamera_metadata_tag = 262146;
pub const acamera_metadata_tag_ACAMERA_FLASH_STATE: acamera_metadata_tag = 262149;
pub const acamera_metadata_tag_ACAMERA_FLASH_END: acamera_metadata_tag = 262150;
pub const acamera_metadata_tag_ACAMERA_FLASH_INFO_AVAILABLE: acamera_metadata_tag = 327680;
pub const acamera_metadata_tag_ACAMERA_FLASH_INFO_END: acamera_metadata_tag = 327681;
pub const acamera_metadata_tag_ACAMERA_HOT_PIXEL_MODE: acamera_metadata_tag = 393216;
pub const acamera_metadata_tag_ACAMERA_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES: acamera_metadata_tag =
393217;
pub const acamera_metadata_tag_ACAMERA_HOT_PIXEL_END: acamera_metadata_tag = 393218;
pub const acamera_metadata_tag_ACAMERA_JPEG_GPS_COORDINATES: acamera_metadata_tag = 458752;
pub const acamera_metadata_tag_ACAMERA_JPEG_GPS_PROCESSING_METHOD: acamera_metadata_tag = 458753;
pub const acamera_metadata_tag_ACAMERA_JPEG_GPS_TIMESTAMP: acamera_metadata_tag = 458754;
pub const acamera_metadata_tag_ACAMERA_JPEG_ORIENTATION: acamera_metadata_tag = 458755;
pub const acamera_metadata_tag_ACAMERA_JPEG_QUALITY: acamera_metadata_tag = 458756;
pub const acamera_metadata_tag_ACAMERA_JPEG_THUMBNAIL_QUALITY: acamera_metadata_tag = 458757;
pub const acamera_metadata_tag_ACAMERA_JPEG_THUMBNAIL_SIZE: acamera_metadata_tag = 458758;
pub const acamera_metadata_tag_ACAMERA_JPEG_AVAILABLE_THUMBNAIL_SIZES: acamera_metadata_tag =
458759;
pub const acamera_metadata_tag_ACAMERA_JPEG_END: acamera_metadata_tag = 458760;
pub const acamera_metadata_tag_ACAMERA_LENS_APERTURE: acamera_metadata_tag = 524288;
pub const acamera_metadata_tag_ACAMERA_LENS_FILTER_DENSITY: acamera_metadata_tag = 524289;
pub const acamera_metadata_tag_ACAMERA_LENS_FOCAL_LENGTH: acamera_metadata_tag = 524290;
pub const acamera_metadata_tag_ACAMERA_LENS_FOCUS_DISTANCE: acamera_metadata_tag = 524291;
pub const acamera_metadata_tag_ACAMERA_LENS_OPTICAL_STABILIZATION_MODE: acamera_metadata_tag =
524292;
pub const acamera_metadata_tag_ACAMERA_LENS_FACING: acamera_metadata_tag = 524293;
pub const acamera_metadata_tag_ACAMERA_LENS_POSE_ROTATION: acamera_metadata_tag = 524294;
pub const acamera_metadata_tag_ACAMERA_LENS_POSE_TRANSLATION: acamera_metadata_tag = 524295;
pub const acamera_metadata_tag_ACAMERA_LENS_FOCUS_RANGE: acamera_metadata_tag = 524296;
pub const acamera_metadata_tag_ACAMERA_LENS_STATE: acamera_metadata_tag = 524297;
pub const acamera_metadata_tag_ACAMERA_LENS_INTRINSIC_CALIBRATION: acamera_metadata_tag = 524298;
pub const acamera_metadata_tag_ACAMERA_LENS_RADIAL_DISTORTION: acamera_metadata_tag = 524299;
pub const acamera_metadata_tag_ACAMERA_LENS_POSE_REFERENCE: acamera_metadata_tag = 524300;
pub const acamera_metadata_tag_ACAMERA_LENS_DISTORTION: acamera_metadata_tag = 524301;
pub const acamera_metadata_tag_ACAMERA_LENS_DISTORTION_MAXIMUM_RESOLUTION: acamera_metadata_tag =
524302;
pub const acamera_metadata_tag_ACAMERA_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION:
acamera_metadata_tag = 524303;
pub const acamera_metadata_tag_ACAMERA_LENS_END: acamera_metadata_tag = 524304;
pub const acamera_metadata_tag_ACAMERA_LENS_INFO_AVAILABLE_APERTURES: acamera_metadata_tag = 589824;
pub const acamera_metadata_tag_ACAMERA_LENS_INFO_AVAILABLE_FILTER_DENSITIES: acamera_metadata_tag =
589825;
pub const acamera_metadata_tag_ACAMERA_LENS_INFO_AVAILABLE_FOCAL_LENGTHS: acamera_metadata_tag =
589826;
pub const acamera_metadata_tag_ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION:
acamera_metadata_tag = 589827;
pub const acamera_metadata_tag_ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE: acamera_metadata_tag = 589828;
pub const acamera_metadata_tag_ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE: acamera_metadata_tag =
589829;
pub const acamera_metadata_tag_ACAMERA_LENS_INFO_SHADING_MAP_SIZE: acamera_metadata_tag = 589830;
pub const acamera_metadata_tag_ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION: acamera_metadata_tag =
589831;
pub const acamera_metadata_tag_ACAMERA_LENS_INFO_END: acamera_metadata_tag = 589832;
pub const acamera_metadata_tag_ACAMERA_NOISE_REDUCTION_MODE: acamera_metadata_tag = 655360;
pub const acamera_metadata_tag_ACAMERA_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES:
acamera_metadata_tag = 655362;
pub const acamera_metadata_tag_ACAMERA_NOISE_REDUCTION_END: acamera_metadata_tag = 655363;
pub const acamera_metadata_tag_ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS: acamera_metadata_tag =
786438;
pub const acamera_metadata_tag_ACAMERA_REQUEST_PIPELINE_DEPTH: acamera_metadata_tag = 786441;
pub const acamera_metadata_tag_ACAMERA_REQUEST_PIPELINE_MAX_DEPTH: acamera_metadata_tag = 786442;
pub const acamera_metadata_tag_ACAMERA_REQUEST_PARTIAL_RESULT_COUNT: acamera_metadata_tag = 786443;
pub const acamera_metadata_tag_ACAMERA_REQUEST_AVAILABLE_CAPABILITIES: acamera_metadata_tag =
786444;
pub const acamera_metadata_tag_ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS: acamera_metadata_tag =
786445;
pub const acamera_metadata_tag_ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS: acamera_metadata_tag = 786446;
pub const acamera_metadata_tag_ACAMERA_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS:
acamera_metadata_tag = 786447;
pub const acamera_metadata_tag_ACAMERA_REQUEST_AVAILABLE_SESSION_KEYS: acamera_metadata_tag =
786448;
pub const acamera_metadata_tag_ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS:
acamera_metadata_tag = 786449;
pub const acamera_metadata_tag_ACAMERA_REQUEST_END: acamera_metadata_tag = 786450;
pub const acamera_metadata_tag_ACAMERA_SCALER_CROP_REGION: acamera_metadata_tag = 851968;
pub const acamera_metadata_tag_ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM: acamera_metadata_tag =
851972;
pub const acamera_metadata_tag_ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS:
acamera_metadata_tag = 851978;
pub const acamera_metadata_tag_ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS: acamera_metadata_tag =
851979;
pub const acamera_metadata_tag_ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS: acamera_metadata_tag =
851980;
pub const acamera_metadata_tag_ACAMERA_SCALER_CROPPING_TYPE: acamera_metadata_tag = 851981;
pub const acamera_metadata_tag_ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS:
acamera_metadata_tag = 851982;
pub const acamera_metadata_tag_ACAMERA_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP:
acamera_metadata_tag = 851983;
pub const acamera_metadata_tag_ACAMERA_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES:
acamera_metadata_tag = 851984;
pub const acamera_metadata_tag_ACAMERA_SCALER_ROTATE_AND_CROP: acamera_metadata_tag = 851985;
pub const acamera_metadata_tag_ACAMERA_SCALER_DEFAULT_SECURE_IMAGE_SIZE: acamera_metadata_tag =
851986;
pub const acamera_metadata_tag_ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS : acamera_metadata_tag = 851987 ;
pub const acamera_metadata_tag_ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION:
acamera_metadata_tag = 851988;
pub const acamera_metadata_tag_ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION:
acamera_metadata_tag = 851989;
pub const acamera_metadata_tag_ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION:
acamera_metadata_tag = 851990;
pub const acamera_metadata_tag_ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED:
acamera_metadata_tag = 851992;
pub const acamera_metadata_tag_ACAMERA_SCALER_END: acamera_metadata_tag = 851993;
pub const acamera_metadata_tag_ACAMERA_SENSOR_EXPOSURE_TIME: acamera_metadata_tag = 917504;
pub const acamera_metadata_tag_ACAMERA_SENSOR_FRAME_DURATION: acamera_metadata_tag = 917505;
pub const acamera_metadata_tag_ACAMERA_SENSOR_SENSITIVITY: acamera_metadata_tag = 917506;
pub const acamera_metadata_tag_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1: acamera_metadata_tag = 917507;
pub const acamera_metadata_tag_ACAMERA_SENSOR_REFERENCE_ILLUMINANT2: acamera_metadata_tag = 917508;
pub const acamera_metadata_tag_ACAMERA_SENSOR_CALIBRATION_TRANSFORM1: acamera_metadata_tag = 917509;
pub const acamera_metadata_tag_ACAMERA_SENSOR_CALIBRATION_TRANSFORM2: acamera_metadata_tag = 917510;
pub const acamera_metadata_tag_ACAMERA_SENSOR_COLOR_TRANSFORM1: acamera_metadata_tag = 917511;
pub const acamera_metadata_tag_ACAMERA_SENSOR_COLOR_TRANSFORM2: acamera_metadata_tag = 917512;
pub const acamera_metadata_tag_ACAMERA_SENSOR_FORWARD_MATRIX1: acamera_metadata_tag = 917513;
pub const acamera_metadata_tag_ACAMERA_SENSOR_FORWARD_MATRIX2: acamera_metadata_tag = 917514;
pub const acamera_metadata_tag_ACAMERA_SENSOR_BLACK_LEVEL_PATTERN: acamera_metadata_tag = 917516;
pub const acamera_metadata_tag_ACAMERA_SENSOR_MAX_ANALOG_SENSITIVITY: acamera_metadata_tag = 917517;
pub const acamera_metadata_tag_ACAMERA_SENSOR_ORIENTATION: acamera_metadata_tag = 917518;
pub const acamera_metadata_tag_ACAMERA_SENSOR_TIMESTAMP: acamera_metadata_tag = 917520;
pub const acamera_metadata_tag_ACAMERA_SENSOR_NEUTRAL_COLOR_POINT: acamera_metadata_tag = 917522;
pub const acamera_metadata_tag_ACAMERA_SENSOR_NOISE_PROFILE: acamera_metadata_tag = 917523;
pub const acamera_metadata_tag_ACAMERA_SENSOR_GREEN_SPLIT: acamera_metadata_tag = 917526;
pub const acamera_metadata_tag_ACAMERA_SENSOR_TEST_PATTERN_DATA: acamera_metadata_tag = 917527;
pub const acamera_metadata_tag_ACAMERA_SENSOR_TEST_PATTERN_MODE: acamera_metadata_tag = 917528;
pub const acamera_metadata_tag_ACAMERA_SENSOR_AVAILABLE_TEST_PATTERN_MODES: acamera_metadata_tag =
917529;
pub const acamera_metadata_tag_ACAMERA_SENSOR_ROLLING_SHUTTER_SKEW: acamera_metadata_tag = 917530;
pub const acamera_metadata_tag_ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS: acamera_metadata_tag = 917531;
pub const acamera_metadata_tag_ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL: acamera_metadata_tag = 917532;
pub const acamera_metadata_tag_ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL: acamera_metadata_tag = 917533;
pub const acamera_metadata_tag_ACAMERA_SENSOR_PIXEL_MODE: acamera_metadata_tag = 917536;
pub const acamera_metadata_tag_ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED: acamera_metadata_tag =
917537;
pub const acamera_metadata_tag_ACAMERA_SENSOR_END: acamera_metadata_tag = 917538;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE: acamera_metadata_tag = 983040;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE: acamera_metadata_tag = 983041;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT: acamera_metadata_tag =
983042;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE: acamera_metadata_tag =
983043;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION: acamera_metadata_tag =
983044;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_PHYSICAL_SIZE: acamera_metadata_tag = 983045;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE: acamera_metadata_tag = 983046;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_WHITE_LEVEL: acamera_metadata_tag = 983047;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE: acamera_metadata_tag = 983048;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED: acamera_metadata_tag =
983049;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE:
acamera_metadata_tag = 983050;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION:
acamera_metadata_tag = 983051;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION:
acamera_metadata_tag = 983052;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION : acamera_metadata_tag = 983053 ;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_BINNING_FACTOR: acamera_metadata_tag = 983054;
pub const acamera_metadata_tag_ACAMERA_SENSOR_INFO_END: acamera_metadata_tag = 983055;
pub const acamera_metadata_tag_ACAMERA_SHADING_MODE: acamera_metadata_tag = 1048576;
pub const acamera_metadata_tag_ACAMERA_SHADING_AVAILABLE_MODES: acamera_metadata_tag = 1048578;
pub const acamera_metadata_tag_ACAMERA_SHADING_END: acamera_metadata_tag = 1048579;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_FACE_DETECT_MODE: acamera_metadata_tag = 1114112;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE: acamera_metadata_tag =
1114115;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_FACE_IDS: acamera_metadata_tag = 1114116;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_FACE_LANDMARKS: acamera_metadata_tag = 1114117;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_FACE_RECTANGLES: acamera_metadata_tag = 1114118;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_FACE_SCORES: acamera_metadata_tag = 1114119;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_LENS_SHADING_MAP: acamera_metadata_tag = 1114123;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_SCENE_FLICKER: acamera_metadata_tag = 1114126;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_HOT_PIXEL_MAP: acamera_metadata_tag = 1114127;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE: acamera_metadata_tag =
1114128;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_OIS_DATA_MODE: acamera_metadata_tag = 1114129;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_OIS_TIMESTAMPS: acamera_metadata_tag = 1114130;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_OIS_X_SHIFTS: acamera_metadata_tag = 1114131;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_OIS_Y_SHIFTS: acamera_metadata_tag = 1114132;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_END: acamera_metadata_tag = 1114133;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES:
acamera_metadata_tag = 1179648;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_INFO_MAX_FACE_COUNT: acamera_metadata_tag =
1179650;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES:
acamera_metadata_tag = 1179654;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES:
acamera_metadata_tag = 1179655;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES:
acamera_metadata_tag = 1179656;
pub const acamera_metadata_tag_ACAMERA_STATISTICS_INFO_END: acamera_metadata_tag = 1179657;
pub const acamera_metadata_tag_ACAMERA_TONEMAP_CURVE_BLUE: acamera_metadata_tag = 1245184;
pub const acamera_metadata_tag_ACAMERA_TONEMAP_CURVE_GREEN: acamera_metadata_tag = 1245185;
pub const acamera_metadata_tag_ACAMERA_TONEMAP_CURVE_RED: acamera_metadata_tag = 1245186;
pub const acamera_metadata_tag_ACAMERA_TONEMAP_MODE: acamera_metadata_tag = 1245187;
pub const acamera_metadata_tag_ACAMERA_TONEMAP_MAX_CURVE_POINTS: acamera_metadata_tag = 1245188;
pub const acamera_metadata_tag_ACAMERA_TONEMAP_AVAILABLE_TONE_MAP_MODES: acamera_metadata_tag =
1245189;
pub const acamera_metadata_tag_ACAMERA_TONEMAP_GAMMA: acamera_metadata_tag = 1245190;
pub const acamera_metadata_tag_ACAMERA_TONEMAP_PRESET_CURVE: acamera_metadata_tag = 1245191;
pub const acamera_metadata_tag_ACAMERA_TONEMAP_END: acamera_metadata_tag = 1245192;
pub const acamera_metadata_tag_ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL: acamera_metadata_tag =
1376256;
pub const acamera_metadata_tag_ACAMERA_INFO_VERSION: acamera_metadata_tag = 1376257;
pub const acamera_metadata_tag_ACAMERA_INFO_END: acamera_metadata_tag = 1376258;
pub const acamera_metadata_tag_ACAMERA_BLACK_LEVEL_LOCK: acamera_metadata_tag = 1441792;
pub const acamera_metadata_tag_ACAMERA_BLACK_LEVEL_END: acamera_metadata_tag = 1441793;
pub const acamera_metadata_tag_ACAMERA_SYNC_FRAME_NUMBER: acamera_metadata_tag = 1507328;
pub const acamera_metadata_tag_ACAMERA_SYNC_MAX_LATENCY: acamera_metadata_tag = 1507329;
pub const acamera_metadata_tag_ACAMERA_SYNC_END: acamera_metadata_tag = 1507330;
pub const acamera_metadata_tag_ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS:
acamera_metadata_tag = 1638401;
pub const acamera_metadata_tag_ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS:
acamera_metadata_tag = 1638402;
pub const acamera_metadata_tag_ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS: acamera_metadata_tag =
1638403;
pub const acamera_metadata_tag_ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE: acamera_metadata_tag = 1638404;
pub const acamera_metadata_tag_ACAMERA_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS:
acamera_metadata_tag = 1638405;
pub const acamera_metadata_tag_ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS:
acamera_metadata_tag = 1638406;
pub const acamera_metadata_tag_ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS:
acamera_metadata_tag = 1638407;
pub const acamera_metadata_tag_ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS:
acamera_metadata_tag = 1638408;
pub const acamera_metadata_tag_ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION : acamera_metadata_tag = 1638409 ;
pub const acamera_metadata_tag_ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION : acamera_metadata_tag = 1638410 ;
pub const acamera_metadata_tag_ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION:
acamera_metadata_tag = 1638411;
pub const acamera_metadata_tag_ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION : acamera_metadata_tag = 1638412 ;
pub const acamera_metadata_tag_ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION : acamera_metadata_tag = 1638413 ;
pub const acamera_metadata_tag_ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION : acamera_metadata_tag = 1638414 ;
pub const acamera_metadata_tag_ACAMERA_DEPTH_END: acamera_metadata_tag = 1638415;
pub const acamera_metadata_tag_ACAMERA_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS: acamera_metadata_tag =
1703936;
pub const acamera_metadata_tag_ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE: acamera_metadata_tag =
1703937;
pub const acamera_metadata_tag_ACAMERA_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID:
acamera_metadata_tag = 1703938;
pub const acamera_metadata_tag_ACAMERA_LOGICAL_MULTI_CAMERA_END: acamera_metadata_tag = 1703939;
pub const acamera_metadata_tag_ACAMERA_DISTORTION_CORRECTION_MODE: acamera_metadata_tag = 1769472;
pub const acamera_metadata_tag_ACAMERA_DISTORTION_CORRECTION_AVAILABLE_MODES: acamera_metadata_tag =
1769473;
pub const acamera_metadata_tag_ACAMERA_DISTORTION_CORRECTION_END: acamera_metadata_tag = 1769474;
pub const acamera_metadata_tag_ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS:
acamera_metadata_tag = 1835008;
pub const acamera_metadata_tag_ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS:
acamera_metadata_tag = 1835009;
pub const acamera_metadata_tag_ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS: acamera_metadata_tag =
1835010;
pub const acamera_metadata_tag_ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION : acamera_metadata_tag = 1835011 ;
pub const acamera_metadata_tag_ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION : acamera_metadata_tag = 1835012 ;
pub const acamera_metadata_tag_ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION:
acamera_metadata_tag = 1835013;
pub const acamera_metadata_tag_ACAMERA_HEIC_END: acamera_metadata_tag = 1835014;
pub type acamera_metadata_tag = ::std::os::raw::c_uint;
pub use self::acamera_metadata_tag as acamera_metadata_tag_t;
pub const acamera_metadata_enum_acamera_color_correction_mode_ACAMERA_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX : acamera_metadata_enum_acamera_color_correction_mode = 0 ;
pub const acamera_metadata_enum_acamera_color_correction_mode_ACAMERA_COLOR_CORRECTION_MODE_FAST:
acamera_metadata_enum_acamera_color_correction_mode = 1;
pub const acamera_metadata_enum_acamera_color_correction_mode_ACAMERA_COLOR_CORRECTION_MODE_HIGH_QUALITY : acamera_metadata_enum_acamera_color_correction_mode = 2 ;
pub type acamera_metadata_enum_acamera_color_correction_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_color_correction_mode as acamera_metadata_enum_android_color_correction_mode_t;
pub const acamera_metadata_enum_acamera_color_correction_aberration_mode_ACAMERA_COLOR_CORRECTION_ABERRATION_MODE_OFF : acamera_metadata_enum_acamera_color_correction_aberration_mode = 0 ;
pub const acamera_metadata_enum_acamera_color_correction_aberration_mode_ACAMERA_COLOR_CORRECTION_ABERRATION_MODE_FAST : acamera_metadata_enum_acamera_color_correction_aberration_mode = 1 ;
pub const acamera_metadata_enum_acamera_color_correction_aberration_mode_ACAMERA_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY : acamera_metadata_enum_acamera_color_correction_aberration_mode = 2 ;
pub type acamera_metadata_enum_acamera_color_correction_aberration_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_color_correction_aberration_mode as acamera_metadata_enum_android_color_correction_aberration_mode_t;
pub const acamera_metadata_enum_acamera_control_ae_antibanding_mode_ACAMERA_CONTROL_AE_ANTIBANDING_MODE_OFF : acamera_metadata_enum_acamera_control_ae_antibanding_mode = 0 ;
pub const acamera_metadata_enum_acamera_control_ae_antibanding_mode_ACAMERA_CONTROL_AE_ANTIBANDING_MODE_50HZ : acamera_metadata_enum_acamera_control_ae_antibanding_mode = 1 ;
pub const acamera_metadata_enum_acamera_control_ae_antibanding_mode_ACAMERA_CONTROL_AE_ANTIBANDING_MODE_60HZ : acamera_metadata_enum_acamera_control_ae_antibanding_mode = 2 ;
pub const acamera_metadata_enum_acamera_control_ae_antibanding_mode_ACAMERA_CONTROL_AE_ANTIBANDING_MODE_AUTO : acamera_metadata_enum_acamera_control_ae_antibanding_mode = 3 ;
pub type acamera_metadata_enum_acamera_control_ae_antibanding_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_ae_antibanding_mode as acamera_metadata_enum_android_control_ae_antibanding_mode_t;
pub const acamera_metadata_enum_acamera_control_ae_lock_ACAMERA_CONTROL_AE_LOCK_OFF:
acamera_metadata_enum_acamera_control_ae_lock = 0;
pub const acamera_metadata_enum_acamera_control_ae_lock_ACAMERA_CONTROL_AE_LOCK_ON:
acamera_metadata_enum_acamera_control_ae_lock = 1;
pub type acamera_metadata_enum_acamera_control_ae_lock = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_ae_lock as acamera_metadata_enum_android_control_ae_lock_t;
pub const acamera_metadata_enum_acamera_control_ae_mode_ACAMERA_CONTROL_AE_MODE_OFF:
acamera_metadata_enum_acamera_control_ae_mode = 0;
pub const acamera_metadata_enum_acamera_control_ae_mode_ACAMERA_CONTROL_AE_MODE_ON:
acamera_metadata_enum_acamera_control_ae_mode = 1;
pub const acamera_metadata_enum_acamera_control_ae_mode_ACAMERA_CONTROL_AE_MODE_ON_AUTO_FLASH:
acamera_metadata_enum_acamera_control_ae_mode = 2;
pub const acamera_metadata_enum_acamera_control_ae_mode_ACAMERA_CONTROL_AE_MODE_ON_ALWAYS_FLASH:
acamera_metadata_enum_acamera_control_ae_mode = 3;
pub const acamera_metadata_enum_acamera_control_ae_mode_ACAMERA_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE : acamera_metadata_enum_acamera_control_ae_mode = 4 ;
pub const acamera_metadata_enum_acamera_control_ae_mode_ACAMERA_CONTROL_AE_MODE_ON_EXTERNAL_FLASH : acamera_metadata_enum_acamera_control_ae_mode = 5 ;
pub type acamera_metadata_enum_acamera_control_ae_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_ae_mode as acamera_metadata_enum_android_control_ae_mode_t;
pub const acamera_metadata_enum_acamera_control_ae_precapture_trigger_ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE : acamera_metadata_enum_acamera_control_ae_precapture_trigger = 0 ;
pub const acamera_metadata_enum_acamera_control_ae_precapture_trigger_ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START : acamera_metadata_enum_acamera_control_ae_precapture_trigger = 1 ;
pub const acamera_metadata_enum_acamera_control_ae_precapture_trigger_ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL : acamera_metadata_enum_acamera_control_ae_precapture_trigger = 2 ;
pub type acamera_metadata_enum_acamera_control_ae_precapture_trigger = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_ae_precapture_trigger as acamera_metadata_enum_android_control_ae_precapture_trigger_t;
pub const acamera_metadata_enum_acamera_control_af_mode_ACAMERA_CONTROL_AF_MODE_OFF:
acamera_metadata_enum_acamera_control_af_mode = 0;
pub const acamera_metadata_enum_acamera_control_af_mode_ACAMERA_CONTROL_AF_MODE_AUTO:
acamera_metadata_enum_acamera_control_af_mode = 1;
pub const acamera_metadata_enum_acamera_control_af_mode_ACAMERA_CONTROL_AF_MODE_MACRO:
acamera_metadata_enum_acamera_control_af_mode = 2;
pub const acamera_metadata_enum_acamera_control_af_mode_ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
acamera_metadata_enum_acamera_control_af_mode = 3;
pub const acamera_metadata_enum_acamera_control_af_mode_ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE : acamera_metadata_enum_acamera_control_af_mode = 4 ;
pub const acamera_metadata_enum_acamera_control_af_mode_ACAMERA_CONTROL_AF_MODE_EDOF:
acamera_metadata_enum_acamera_control_af_mode = 5;
pub type acamera_metadata_enum_acamera_control_af_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_af_mode as acamera_metadata_enum_android_control_af_mode_t;
pub const acamera_metadata_enum_acamera_control_af_trigger_ACAMERA_CONTROL_AF_TRIGGER_IDLE:
acamera_metadata_enum_acamera_control_af_trigger = 0;
pub const acamera_metadata_enum_acamera_control_af_trigger_ACAMERA_CONTROL_AF_TRIGGER_START:
acamera_metadata_enum_acamera_control_af_trigger = 1;
pub const acamera_metadata_enum_acamera_control_af_trigger_ACAMERA_CONTROL_AF_TRIGGER_CANCEL:
acamera_metadata_enum_acamera_control_af_trigger = 2;
pub type acamera_metadata_enum_acamera_control_af_trigger = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_af_trigger as acamera_metadata_enum_android_control_af_trigger_t;
pub const acamera_metadata_enum_acamera_control_awb_lock_ACAMERA_CONTROL_AWB_LOCK_OFF:
acamera_metadata_enum_acamera_control_awb_lock = 0;
pub const acamera_metadata_enum_acamera_control_awb_lock_ACAMERA_CONTROL_AWB_LOCK_ON:
acamera_metadata_enum_acamera_control_awb_lock = 1;
pub type acamera_metadata_enum_acamera_control_awb_lock = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_awb_lock as acamera_metadata_enum_android_control_awb_lock_t;
pub const acamera_metadata_enum_acamera_control_awb_mode_ACAMERA_CONTROL_AWB_MODE_OFF:
acamera_metadata_enum_acamera_control_awb_mode = 0;
pub const acamera_metadata_enum_acamera_control_awb_mode_ACAMERA_CONTROL_AWB_MODE_AUTO:
acamera_metadata_enum_acamera_control_awb_mode = 1;
pub const acamera_metadata_enum_acamera_control_awb_mode_ACAMERA_CONTROL_AWB_MODE_INCANDESCENT:
acamera_metadata_enum_acamera_control_awb_mode = 2;
pub const acamera_metadata_enum_acamera_control_awb_mode_ACAMERA_CONTROL_AWB_MODE_FLUORESCENT:
acamera_metadata_enum_acamera_control_awb_mode = 3;
pub const acamera_metadata_enum_acamera_control_awb_mode_ACAMERA_CONTROL_AWB_MODE_WARM_FLUORESCENT : acamera_metadata_enum_acamera_control_awb_mode = 4 ;
pub const acamera_metadata_enum_acamera_control_awb_mode_ACAMERA_CONTROL_AWB_MODE_DAYLIGHT:
acamera_metadata_enum_acamera_control_awb_mode = 5;
pub const acamera_metadata_enum_acamera_control_awb_mode_ACAMERA_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT : acamera_metadata_enum_acamera_control_awb_mode = 6 ;
pub const acamera_metadata_enum_acamera_control_awb_mode_ACAMERA_CONTROL_AWB_MODE_TWILIGHT:
acamera_metadata_enum_acamera_control_awb_mode = 7;
pub const acamera_metadata_enum_acamera_control_awb_mode_ACAMERA_CONTROL_AWB_MODE_SHADE:
acamera_metadata_enum_acamera_control_awb_mode = 8;
pub type acamera_metadata_enum_acamera_control_awb_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_awb_mode as acamera_metadata_enum_android_control_awb_mode_t;
pub const acamera_metadata_enum_acamera_control_capture_intent_ACAMERA_CONTROL_CAPTURE_INTENT_CUSTOM : acamera_metadata_enum_acamera_control_capture_intent = 0 ;
pub const acamera_metadata_enum_acamera_control_capture_intent_ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW : acamera_metadata_enum_acamera_control_capture_intent = 1 ;
pub const acamera_metadata_enum_acamera_control_capture_intent_ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE : acamera_metadata_enum_acamera_control_capture_intent = 2 ;
pub const acamera_metadata_enum_acamera_control_capture_intent_ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_RECORD : acamera_metadata_enum_acamera_control_capture_intent = 3 ;
pub const acamera_metadata_enum_acamera_control_capture_intent_ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT : acamera_metadata_enum_acamera_control_capture_intent = 4 ;
pub const acamera_metadata_enum_acamera_control_capture_intent_ACAMERA_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG : acamera_metadata_enum_acamera_control_capture_intent = 5 ;
pub const acamera_metadata_enum_acamera_control_capture_intent_ACAMERA_CONTROL_CAPTURE_INTENT_MANUAL : acamera_metadata_enum_acamera_control_capture_intent = 6 ;
pub const acamera_metadata_enum_acamera_control_capture_intent_ACAMERA_CONTROL_CAPTURE_INTENT_MOTION_TRACKING : acamera_metadata_enum_acamera_control_capture_intent = 7 ;
pub type acamera_metadata_enum_acamera_control_capture_intent = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_capture_intent as acamera_metadata_enum_android_control_capture_intent_t;
pub const acamera_metadata_enum_acamera_control_effect_mode_ACAMERA_CONTROL_EFFECT_MODE_OFF:
acamera_metadata_enum_acamera_control_effect_mode = 0;
pub const acamera_metadata_enum_acamera_control_effect_mode_ACAMERA_CONTROL_EFFECT_MODE_MONO:
acamera_metadata_enum_acamera_control_effect_mode = 1;
pub const acamera_metadata_enum_acamera_control_effect_mode_ACAMERA_CONTROL_EFFECT_MODE_NEGATIVE:
acamera_metadata_enum_acamera_control_effect_mode = 2;
pub const acamera_metadata_enum_acamera_control_effect_mode_ACAMERA_CONTROL_EFFECT_MODE_SOLARIZE:
acamera_metadata_enum_acamera_control_effect_mode = 3;
pub const acamera_metadata_enum_acamera_control_effect_mode_ACAMERA_CONTROL_EFFECT_MODE_SEPIA:
acamera_metadata_enum_acamera_control_effect_mode = 4;
pub const acamera_metadata_enum_acamera_control_effect_mode_ACAMERA_CONTROL_EFFECT_MODE_POSTERIZE : acamera_metadata_enum_acamera_control_effect_mode = 5 ;
pub const acamera_metadata_enum_acamera_control_effect_mode_ACAMERA_CONTROL_EFFECT_MODE_WHITEBOARD : acamera_metadata_enum_acamera_control_effect_mode = 6 ;
pub const acamera_metadata_enum_acamera_control_effect_mode_ACAMERA_CONTROL_EFFECT_MODE_BLACKBOARD : acamera_metadata_enum_acamera_control_effect_mode = 7 ;
pub const acamera_metadata_enum_acamera_control_effect_mode_ACAMERA_CONTROL_EFFECT_MODE_AQUA:
acamera_metadata_enum_acamera_control_effect_mode = 8;
pub type acamera_metadata_enum_acamera_control_effect_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_effect_mode as acamera_metadata_enum_android_control_effect_mode_t;
pub const acamera_metadata_enum_acamera_control_mode_ACAMERA_CONTROL_MODE_OFF:
acamera_metadata_enum_acamera_control_mode = 0;
pub const acamera_metadata_enum_acamera_control_mode_ACAMERA_CONTROL_MODE_AUTO:
acamera_metadata_enum_acamera_control_mode = 1;
pub const acamera_metadata_enum_acamera_control_mode_ACAMERA_CONTROL_MODE_USE_SCENE_MODE:
acamera_metadata_enum_acamera_control_mode = 2;
pub const acamera_metadata_enum_acamera_control_mode_ACAMERA_CONTROL_MODE_OFF_KEEP_STATE:
acamera_metadata_enum_acamera_control_mode = 3;
pub const acamera_metadata_enum_acamera_control_mode_ACAMERA_CONTROL_MODE_USE_EXTENDED_SCENE_MODE : acamera_metadata_enum_acamera_control_mode = 4 ;
pub type acamera_metadata_enum_acamera_control_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_mode as acamera_metadata_enum_android_control_mode_t;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_DISABLED:
acamera_metadata_enum_acamera_control_scene_mode = 0;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_FACE_PRIORITY : acamera_metadata_enum_acamera_control_scene_mode = 1 ;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_ACTION:
acamera_metadata_enum_acamera_control_scene_mode = 2;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_PORTRAIT:
acamera_metadata_enum_acamera_control_scene_mode = 3;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_LANDSCAPE:
acamera_metadata_enum_acamera_control_scene_mode = 4;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_NIGHT:
acamera_metadata_enum_acamera_control_scene_mode = 5;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_NIGHT_PORTRAIT : acamera_metadata_enum_acamera_control_scene_mode = 6 ;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_THEATRE:
acamera_metadata_enum_acamera_control_scene_mode = 7;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_BEACH:
acamera_metadata_enum_acamera_control_scene_mode = 8;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_SNOW:
acamera_metadata_enum_acamera_control_scene_mode = 9;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_SUNSET:
acamera_metadata_enum_acamera_control_scene_mode = 10;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_STEADYPHOTO : acamera_metadata_enum_acamera_control_scene_mode = 11 ;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_FIREWORKS:
acamera_metadata_enum_acamera_control_scene_mode = 12;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_SPORTS:
acamera_metadata_enum_acamera_control_scene_mode = 13;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_PARTY:
acamera_metadata_enum_acamera_control_scene_mode = 14;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_CANDLELIGHT : acamera_metadata_enum_acamera_control_scene_mode = 15 ;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_BARCODE:
acamera_metadata_enum_acamera_control_scene_mode = 16;
pub const acamera_metadata_enum_acamera_control_scene_mode_ACAMERA_CONTROL_SCENE_MODE_HDR:
acamera_metadata_enum_acamera_control_scene_mode = 18;
pub type acamera_metadata_enum_acamera_control_scene_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_scene_mode as acamera_metadata_enum_android_control_scene_mode_t;
pub const acamera_metadata_enum_acamera_control_video_stabilization_mode_ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE_OFF : acamera_metadata_enum_acamera_control_video_stabilization_mode = 0 ;
pub const acamera_metadata_enum_acamera_control_video_stabilization_mode_ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE_ON : acamera_metadata_enum_acamera_control_video_stabilization_mode = 1 ;
pub type acamera_metadata_enum_acamera_control_video_stabilization_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_video_stabilization_mode as acamera_metadata_enum_android_control_video_stabilization_mode_t;
pub const acamera_metadata_enum_acamera_control_ae_state_ACAMERA_CONTROL_AE_STATE_INACTIVE:
acamera_metadata_enum_acamera_control_ae_state = 0;
pub const acamera_metadata_enum_acamera_control_ae_state_ACAMERA_CONTROL_AE_STATE_SEARCHING:
acamera_metadata_enum_acamera_control_ae_state = 1;
pub const acamera_metadata_enum_acamera_control_ae_state_ACAMERA_CONTROL_AE_STATE_CONVERGED:
acamera_metadata_enum_acamera_control_ae_state = 2;
pub const acamera_metadata_enum_acamera_control_ae_state_ACAMERA_CONTROL_AE_STATE_LOCKED:
acamera_metadata_enum_acamera_control_ae_state = 3;
pub const acamera_metadata_enum_acamera_control_ae_state_ACAMERA_CONTROL_AE_STATE_FLASH_REQUIRED:
acamera_metadata_enum_acamera_control_ae_state = 4;
pub const acamera_metadata_enum_acamera_control_ae_state_ACAMERA_CONTROL_AE_STATE_PRECAPTURE:
acamera_metadata_enum_acamera_control_ae_state = 5;
pub type acamera_metadata_enum_acamera_control_ae_state = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_ae_state as acamera_metadata_enum_android_control_ae_state_t;
pub const acamera_metadata_enum_acamera_control_af_state_ACAMERA_CONTROL_AF_STATE_INACTIVE:
acamera_metadata_enum_acamera_control_af_state = 0;
pub const acamera_metadata_enum_acamera_control_af_state_ACAMERA_CONTROL_AF_STATE_PASSIVE_SCAN:
acamera_metadata_enum_acamera_control_af_state = 1;
pub const acamera_metadata_enum_acamera_control_af_state_ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED : acamera_metadata_enum_acamera_control_af_state = 2 ;
pub const acamera_metadata_enum_acamera_control_af_state_ACAMERA_CONTROL_AF_STATE_ACTIVE_SCAN:
acamera_metadata_enum_acamera_control_af_state = 3;
pub const acamera_metadata_enum_acamera_control_af_state_ACAMERA_CONTROL_AF_STATE_FOCUSED_LOCKED:
acamera_metadata_enum_acamera_control_af_state = 4;
pub const acamera_metadata_enum_acamera_control_af_state_ACAMERA_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED : acamera_metadata_enum_acamera_control_af_state = 5 ;
pub const acamera_metadata_enum_acamera_control_af_state_ACAMERA_CONTROL_AF_STATE_PASSIVE_UNFOCUSED : acamera_metadata_enum_acamera_control_af_state = 6 ;
pub type acamera_metadata_enum_acamera_control_af_state = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_af_state as acamera_metadata_enum_android_control_af_state_t;
pub const acamera_metadata_enum_acamera_control_awb_state_ACAMERA_CONTROL_AWB_STATE_INACTIVE:
acamera_metadata_enum_acamera_control_awb_state = 0;
pub const acamera_metadata_enum_acamera_control_awb_state_ACAMERA_CONTROL_AWB_STATE_SEARCHING:
acamera_metadata_enum_acamera_control_awb_state = 1;
pub const acamera_metadata_enum_acamera_control_awb_state_ACAMERA_CONTROL_AWB_STATE_CONVERGED:
acamera_metadata_enum_acamera_control_awb_state = 2;
pub const acamera_metadata_enum_acamera_control_awb_state_ACAMERA_CONTROL_AWB_STATE_LOCKED:
acamera_metadata_enum_acamera_control_awb_state = 3;
pub type acamera_metadata_enum_acamera_control_awb_state = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_awb_state as acamera_metadata_enum_android_control_awb_state_t;
pub const acamera_metadata_enum_acamera_control_ae_lock_available_ACAMERA_CONTROL_AE_LOCK_AVAILABLE_FALSE : acamera_metadata_enum_acamera_control_ae_lock_available = 0 ;
pub const acamera_metadata_enum_acamera_control_ae_lock_available_ACAMERA_CONTROL_AE_LOCK_AVAILABLE_TRUE : acamera_metadata_enum_acamera_control_ae_lock_available = 1 ;
pub type acamera_metadata_enum_acamera_control_ae_lock_available = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_ae_lock_available as acamera_metadata_enum_android_control_ae_lock_available_t;
pub const acamera_metadata_enum_acamera_control_awb_lock_available_ACAMERA_CONTROL_AWB_LOCK_AVAILABLE_FALSE : acamera_metadata_enum_acamera_control_awb_lock_available = 0 ;
pub const acamera_metadata_enum_acamera_control_awb_lock_available_ACAMERA_CONTROL_AWB_LOCK_AVAILABLE_TRUE : acamera_metadata_enum_acamera_control_awb_lock_available = 1 ;
pub type acamera_metadata_enum_acamera_control_awb_lock_available = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_awb_lock_available as acamera_metadata_enum_android_control_awb_lock_available_t;
pub const acamera_metadata_enum_acamera_control_enable_zsl_ACAMERA_CONTROL_ENABLE_ZSL_FALSE:
acamera_metadata_enum_acamera_control_enable_zsl = 0;
pub const acamera_metadata_enum_acamera_control_enable_zsl_ACAMERA_CONTROL_ENABLE_ZSL_TRUE:
acamera_metadata_enum_acamera_control_enable_zsl = 1;
pub type acamera_metadata_enum_acamera_control_enable_zsl = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_enable_zsl as acamera_metadata_enum_android_control_enable_zsl_t;
pub const acamera_metadata_enum_acamera_control_af_scene_change_ACAMERA_CONTROL_AF_SCENE_CHANGE_NOT_DETECTED : acamera_metadata_enum_acamera_control_af_scene_change = 0 ;
pub const acamera_metadata_enum_acamera_control_af_scene_change_ACAMERA_CONTROL_AF_SCENE_CHANGE_DETECTED : acamera_metadata_enum_acamera_control_af_scene_change = 1 ;
pub type acamera_metadata_enum_acamera_control_af_scene_change = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_af_scene_change as acamera_metadata_enum_android_control_af_scene_change_t;
pub const acamera_metadata_enum_acamera_control_extended_scene_mode_ACAMERA_CONTROL_EXTENDED_SCENE_MODE_DISABLED : acamera_metadata_enum_acamera_control_extended_scene_mode = 0 ;
pub const acamera_metadata_enum_acamera_control_extended_scene_mode_ACAMERA_CONTROL_EXTENDED_SCENE_MODE_BOKEH_STILL_CAPTURE : acamera_metadata_enum_acamera_control_extended_scene_mode = 1 ;
pub const acamera_metadata_enum_acamera_control_extended_scene_mode_ACAMERA_CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS : acamera_metadata_enum_acamera_control_extended_scene_mode = 2 ;
pub type acamera_metadata_enum_acamera_control_extended_scene_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_control_extended_scene_mode as acamera_metadata_enum_android_control_extended_scene_mode_t;
pub const acamera_metadata_enum_acamera_edge_mode_ACAMERA_EDGE_MODE_OFF:
acamera_metadata_enum_acamera_edge_mode = 0;
pub const acamera_metadata_enum_acamera_edge_mode_ACAMERA_EDGE_MODE_FAST:
acamera_metadata_enum_acamera_edge_mode = 1;
pub const acamera_metadata_enum_acamera_edge_mode_ACAMERA_EDGE_MODE_HIGH_QUALITY:
acamera_metadata_enum_acamera_edge_mode = 2;
pub const acamera_metadata_enum_acamera_edge_mode_ACAMERA_EDGE_MODE_ZERO_SHUTTER_LAG:
acamera_metadata_enum_acamera_edge_mode = 3;
pub type acamera_metadata_enum_acamera_edge_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_edge_mode as acamera_metadata_enum_android_edge_mode_t;
pub const acamera_metadata_enum_acamera_flash_mode_ACAMERA_FLASH_MODE_OFF:
acamera_metadata_enum_acamera_flash_mode = 0;
pub const acamera_metadata_enum_acamera_flash_mode_ACAMERA_FLASH_MODE_SINGLE:
acamera_metadata_enum_acamera_flash_mode = 1;
pub const acamera_metadata_enum_acamera_flash_mode_ACAMERA_FLASH_MODE_TORCH:
acamera_metadata_enum_acamera_flash_mode = 2;
pub type acamera_metadata_enum_acamera_flash_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_flash_mode as acamera_metadata_enum_android_flash_mode_t;
pub const acamera_metadata_enum_acamera_flash_state_ACAMERA_FLASH_STATE_UNAVAILABLE:
acamera_metadata_enum_acamera_flash_state = 0;
pub const acamera_metadata_enum_acamera_flash_state_ACAMERA_FLASH_STATE_CHARGING:
acamera_metadata_enum_acamera_flash_state = 1;
pub const acamera_metadata_enum_acamera_flash_state_ACAMERA_FLASH_STATE_READY:
acamera_metadata_enum_acamera_flash_state = 2;
pub const acamera_metadata_enum_acamera_flash_state_ACAMERA_FLASH_STATE_FIRED:
acamera_metadata_enum_acamera_flash_state = 3;
pub const acamera_metadata_enum_acamera_flash_state_ACAMERA_FLASH_STATE_PARTIAL:
acamera_metadata_enum_acamera_flash_state = 4;
pub type acamera_metadata_enum_acamera_flash_state = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_flash_state as acamera_metadata_enum_android_flash_state_t;
pub const acamera_metadata_enum_acamera_flash_info_available_ACAMERA_FLASH_INFO_AVAILABLE_FALSE:
acamera_metadata_enum_acamera_flash_info_available = 0;
pub const acamera_metadata_enum_acamera_flash_info_available_ACAMERA_FLASH_INFO_AVAILABLE_TRUE:
acamera_metadata_enum_acamera_flash_info_available = 1;
pub type acamera_metadata_enum_acamera_flash_info_available = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_flash_info_available as acamera_metadata_enum_android_flash_info_available_t;
pub const acamera_metadata_enum_acamera_hot_pixel_mode_ACAMERA_HOT_PIXEL_MODE_OFF:
acamera_metadata_enum_acamera_hot_pixel_mode = 0;
pub const acamera_metadata_enum_acamera_hot_pixel_mode_ACAMERA_HOT_PIXEL_MODE_FAST:
acamera_metadata_enum_acamera_hot_pixel_mode = 1;
pub const acamera_metadata_enum_acamera_hot_pixel_mode_ACAMERA_HOT_PIXEL_MODE_HIGH_QUALITY:
acamera_metadata_enum_acamera_hot_pixel_mode = 2;
pub type acamera_metadata_enum_acamera_hot_pixel_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_hot_pixel_mode as acamera_metadata_enum_android_hot_pixel_mode_t;
pub const acamera_metadata_enum_acamera_lens_optical_stabilization_mode_ACAMERA_LENS_OPTICAL_STABILIZATION_MODE_OFF : acamera_metadata_enum_acamera_lens_optical_stabilization_mode = 0 ;
pub const acamera_metadata_enum_acamera_lens_optical_stabilization_mode_ACAMERA_LENS_OPTICAL_STABILIZATION_MODE_ON : acamera_metadata_enum_acamera_lens_optical_stabilization_mode = 1 ;
pub type acamera_metadata_enum_acamera_lens_optical_stabilization_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_lens_optical_stabilization_mode as acamera_metadata_enum_android_lens_optical_stabilization_mode_t;
pub const acamera_metadata_enum_acamera_lens_facing_ACAMERA_LENS_FACING_FRONT:
acamera_metadata_enum_acamera_lens_facing = 0;
pub const acamera_metadata_enum_acamera_lens_facing_ACAMERA_LENS_FACING_BACK:
acamera_metadata_enum_acamera_lens_facing = 1;
pub const acamera_metadata_enum_acamera_lens_facing_ACAMERA_LENS_FACING_EXTERNAL:
acamera_metadata_enum_acamera_lens_facing = 2;
pub type acamera_metadata_enum_acamera_lens_facing = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_lens_facing as acamera_metadata_enum_android_lens_facing_t;
pub const acamera_metadata_enum_acamera_lens_state_ACAMERA_LENS_STATE_STATIONARY:
acamera_metadata_enum_acamera_lens_state = 0;
pub const acamera_metadata_enum_acamera_lens_state_ACAMERA_LENS_STATE_MOVING:
acamera_metadata_enum_acamera_lens_state = 1;
pub type acamera_metadata_enum_acamera_lens_state = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_lens_state as acamera_metadata_enum_android_lens_state_t;
pub const acamera_metadata_enum_acamera_lens_pose_reference_ACAMERA_LENS_POSE_REFERENCE_PRIMARY_CAMERA : acamera_metadata_enum_acamera_lens_pose_reference = 0 ;
pub const acamera_metadata_enum_acamera_lens_pose_reference_ACAMERA_LENS_POSE_REFERENCE_GYROSCOPE : acamera_metadata_enum_acamera_lens_pose_reference = 1 ;
pub const acamera_metadata_enum_acamera_lens_pose_reference_ACAMERA_LENS_POSE_REFERENCE_UNDEFINED : acamera_metadata_enum_acamera_lens_pose_reference = 2 ;
pub type acamera_metadata_enum_acamera_lens_pose_reference = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_lens_pose_reference as acamera_metadata_enum_android_lens_pose_reference_t;
pub const acamera_metadata_enum_acamera_lens_info_focus_distance_calibration_ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED : acamera_metadata_enum_acamera_lens_info_focus_distance_calibration = 0 ;
pub const acamera_metadata_enum_acamera_lens_info_focus_distance_calibration_ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE : acamera_metadata_enum_acamera_lens_info_focus_distance_calibration = 1 ;
pub const acamera_metadata_enum_acamera_lens_info_focus_distance_calibration_ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED : acamera_metadata_enum_acamera_lens_info_focus_distance_calibration = 2 ;
pub type acamera_metadata_enum_acamera_lens_info_focus_distance_calibration =
::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_lens_info_focus_distance_calibration as acamera_metadata_enum_android_lens_info_focus_distance_calibration_t;
pub const acamera_metadata_enum_acamera_noise_reduction_mode_ACAMERA_NOISE_REDUCTION_MODE_OFF:
acamera_metadata_enum_acamera_noise_reduction_mode = 0;
pub const acamera_metadata_enum_acamera_noise_reduction_mode_ACAMERA_NOISE_REDUCTION_MODE_FAST:
acamera_metadata_enum_acamera_noise_reduction_mode = 1;
pub const acamera_metadata_enum_acamera_noise_reduction_mode_ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY : acamera_metadata_enum_acamera_noise_reduction_mode = 2 ;
pub const acamera_metadata_enum_acamera_noise_reduction_mode_ACAMERA_NOISE_REDUCTION_MODE_MINIMAL : acamera_metadata_enum_acamera_noise_reduction_mode = 3 ;
pub const acamera_metadata_enum_acamera_noise_reduction_mode_ACAMERA_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG : acamera_metadata_enum_acamera_noise_reduction_mode = 4 ;
pub type acamera_metadata_enum_acamera_noise_reduction_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_noise_reduction_mode as acamera_metadata_enum_android_noise_reduction_mode_t;
pub const acamera_metadata_enum_acamera_request_available_capabilities_ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE : acamera_metadata_enum_acamera_request_available_capabilities = 0 ;
pub const acamera_metadata_enum_acamera_request_available_capabilities_ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR : acamera_metadata_enum_acamera_request_available_capabilities = 1 ;
pub const acamera_metadata_enum_acamera_request_available_capabilities_ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING : acamera_metadata_enum_acamera_request_available_capabilities = 2 ;
pub const acamera_metadata_enum_acamera_request_available_capabilities_ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_RAW : acamera_metadata_enum_acamera_request_available_capabilities = 3 ;
pub const acamera_metadata_enum_acamera_request_available_capabilities_ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS : acamera_metadata_enum_acamera_request_available_capabilities = 5 ;
pub const acamera_metadata_enum_acamera_request_available_capabilities_ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE : acamera_metadata_enum_acamera_request_available_capabilities = 6 ;
pub const acamera_metadata_enum_acamera_request_available_capabilities_ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT : acamera_metadata_enum_acamera_request_available_capabilities = 8 ;
pub const acamera_metadata_enum_acamera_request_available_capabilities_ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING : acamera_metadata_enum_acamera_request_available_capabilities = 10 ;
pub const acamera_metadata_enum_acamera_request_available_capabilities_ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA : acamera_metadata_enum_acamera_request_available_capabilities = 11 ;
pub const acamera_metadata_enum_acamera_request_available_capabilities_ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME : acamera_metadata_enum_acamera_request_available_capabilities = 12 ;
pub const acamera_metadata_enum_acamera_request_available_capabilities_ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA : acamera_metadata_enum_acamera_request_available_capabilities = 13 ;
pub const acamera_metadata_enum_acamera_request_available_capabilities_ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA : acamera_metadata_enum_acamera_request_available_capabilities = 14 ;
pub const acamera_metadata_enum_acamera_request_available_capabilities_ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR : acamera_metadata_enum_acamera_request_available_capabilities = 16 ;
pub type acamera_metadata_enum_acamera_request_available_capabilities = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_request_available_capabilities as acamera_metadata_enum_android_request_available_capabilities_t;
pub const acamera_metadata_enum_acamera_scaler_available_stream_configurations_ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT : acamera_metadata_enum_acamera_scaler_available_stream_configurations = 0 ;
pub const acamera_metadata_enum_acamera_scaler_available_stream_configurations_ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT : acamera_metadata_enum_acamera_scaler_available_stream_configurations = 1 ;
pub type acamera_metadata_enum_acamera_scaler_available_stream_configurations =
::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_scaler_available_stream_configurations as acamera_metadata_enum_android_scaler_available_stream_configurations_t;
pub const acamera_metadata_enum_acamera_scaler_cropping_type_ACAMERA_SCALER_CROPPING_TYPE_CENTER_ONLY : acamera_metadata_enum_acamera_scaler_cropping_type = 0 ;
pub const acamera_metadata_enum_acamera_scaler_cropping_type_ACAMERA_SCALER_CROPPING_TYPE_FREEFORM : acamera_metadata_enum_acamera_scaler_cropping_type = 1 ;
pub type acamera_metadata_enum_acamera_scaler_cropping_type = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_scaler_cropping_type as acamera_metadata_enum_android_scaler_cropping_type_t;
pub const acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations_ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PREVIEW : acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations = 0 ;
pub const acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations_ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RECORD : acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations = 1 ;
pub const acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations_ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VIDEO_SNAPSHOT : acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations = 2 ;
pub const acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations_ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_SNAPSHOT : acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations = 3 ;
pub const acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations_ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_ZSL : acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations = 4 ;
pub const acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations_ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RAW : acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations = 5 ;
pub const acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations_ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_LOW_LATENCY_SNAPSHOT : acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations = 6 ;
pub const acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations_ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PUBLIC_END : acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations = 7 ;
pub const acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations_ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VENDOR_START : acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations = 24 ;
pub type acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations =
::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_scaler_available_recommended_stream_configurations as acamera_metadata_enum_android_scaler_available_recommended_stream_configurations_t;
pub const acamera_metadata_enum_acamera_scaler_rotate_and_crop_ACAMERA_SCALER_ROTATE_AND_CROP_NONE : acamera_metadata_enum_acamera_scaler_rotate_and_crop = 0 ;
pub const acamera_metadata_enum_acamera_scaler_rotate_and_crop_ACAMERA_SCALER_ROTATE_AND_CROP_90:
acamera_metadata_enum_acamera_scaler_rotate_and_crop = 1;
pub const acamera_metadata_enum_acamera_scaler_rotate_and_crop_ACAMERA_SCALER_ROTATE_AND_CROP_180 : acamera_metadata_enum_acamera_scaler_rotate_and_crop = 2 ;
pub const acamera_metadata_enum_acamera_scaler_rotate_and_crop_ACAMERA_SCALER_ROTATE_AND_CROP_270 : acamera_metadata_enum_acamera_scaler_rotate_and_crop = 3 ;
pub const acamera_metadata_enum_acamera_scaler_rotate_and_crop_ACAMERA_SCALER_ROTATE_AND_CROP_AUTO : acamera_metadata_enum_acamera_scaler_rotate_and_crop = 4 ;
pub type acamera_metadata_enum_acamera_scaler_rotate_and_crop = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_scaler_rotate_and_crop as acamera_metadata_enum_android_scaler_rotate_and_crop_t;
pub const acamera_metadata_enum_acamera_scaler_physical_camera_multi_resolution_stream_configurations_ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS_OUTPUT : acamera_metadata_enum_acamera_scaler_physical_camera_multi_resolution_stream_configurations = 0 ;
pub const acamera_metadata_enum_acamera_scaler_physical_camera_multi_resolution_stream_configurations_ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS_INPUT : acamera_metadata_enum_acamera_scaler_physical_camera_multi_resolution_stream_configurations = 1 ;
pub type acamera_metadata_enum_acamera_scaler_physical_camera_multi_resolution_stream_configurations =
::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_scaler_physical_camera_multi_resolution_stream_configurations as acamera_metadata_enum_android_scaler_physical_camera_multi_resolution_stream_configurations_t;
pub const acamera_metadata_enum_acamera_scaler_available_stream_configurations_maximum_resolution_ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT : acamera_metadata_enum_acamera_scaler_available_stream_configurations_maximum_resolution = 0 ;
pub const acamera_metadata_enum_acamera_scaler_available_stream_configurations_maximum_resolution_ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT : acamera_metadata_enum_acamera_scaler_available_stream_configurations_maximum_resolution = 1 ;
pub type acamera_metadata_enum_acamera_scaler_available_stream_configurations_maximum_resolution =
::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_scaler_available_stream_configurations_maximum_resolution as acamera_metadata_enum_android_scaler_available_stream_configurations_maximum_resolution_t;
pub const acamera_metadata_enum_acamera_scaler_multi_resolution_stream_supported_ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED_FALSE : acamera_metadata_enum_acamera_scaler_multi_resolution_stream_supported = 0 ;
pub const acamera_metadata_enum_acamera_scaler_multi_resolution_stream_supported_ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED_TRUE : acamera_metadata_enum_acamera_scaler_multi_resolution_stream_supported = 1 ;
pub type acamera_metadata_enum_acamera_scaler_multi_resolution_stream_supported =
::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_scaler_multi_resolution_stream_supported as acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 1 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 2 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 3 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_FLASH : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 4 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 9 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 10 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_SHADE : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 11 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 12 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 13 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 14 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 15 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 17 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 18 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 19 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_D55 : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 20 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_D65 : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 21 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_D75 : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 22 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_D50 : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 23 ;
pub const acamera_metadata_enum_acamera_sensor_reference_illuminant1_ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN : acamera_metadata_enum_acamera_sensor_reference_illuminant1 = 24 ;
pub type acamera_metadata_enum_acamera_sensor_reference_illuminant1 = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_sensor_reference_illuminant1 as acamera_metadata_enum_android_sensor_reference_illuminant1_t;
pub const acamera_metadata_enum_acamera_sensor_test_pattern_mode_ACAMERA_SENSOR_TEST_PATTERN_MODE_OFF : acamera_metadata_enum_acamera_sensor_test_pattern_mode = 0 ;
pub const acamera_metadata_enum_acamera_sensor_test_pattern_mode_ACAMERA_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR : acamera_metadata_enum_acamera_sensor_test_pattern_mode = 1 ;
pub const acamera_metadata_enum_acamera_sensor_test_pattern_mode_ACAMERA_SENSOR_TEST_PATTERN_MODE_COLOR_BARS : acamera_metadata_enum_acamera_sensor_test_pattern_mode = 2 ;
pub const acamera_metadata_enum_acamera_sensor_test_pattern_mode_ACAMERA_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY : acamera_metadata_enum_acamera_sensor_test_pattern_mode = 3 ;
pub const acamera_metadata_enum_acamera_sensor_test_pattern_mode_ACAMERA_SENSOR_TEST_PATTERN_MODE_PN9 : acamera_metadata_enum_acamera_sensor_test_pattern_mode = 4 ;
pub const acamera_metadata_enum_acamera_sensor_test_pattern_mode_ACAMERA_SENSOR_TEST_PATTERN_MODE_CUSTOM1 : acamera_metadata_enum_acamera_sensor_test_pattern_mode = 256 ;
pub type acamera_metadata_enum_acamera_sensor_test_pattern_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_sensor_test_pattern_mode as acamera_metadata_enum_android_sensor_test_pattern_mode_t;
pub const acamera_metadata_enum_acamera_sensor_pixel_mode_ACAMERA_SENSOR_PIXEL_MODE_DEFAULT:
acamera_metadata_enum_acamera_sensor_pixel_mode = 0;
pub const acamera_metadata_enum_acamera_sensor_pixel_mode_ACAMERA_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION : acamera_metadata_enum_acamera_sensor_pixel_mode = 1 ;
pub type acamera_metadata_enum_acamera_sensor_pixel_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_sensor_pixel_mode as acamera_metadata_enum_android_sensor_pixel_mode_t;
pub const acamera_metadata_enum_acamera_sensor_raw_binning_factor_used_ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED_TRUE : acamera_metadata_enum_acamera_sensor_raw_binning_factor_used = 0 ;
pub const acamera_metadata_enum_acamera_sensor_raw_binning_factor_used_ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED_FALSE : acamera_metadata_enum_acamera_sensor_raw_binning_factor_used = 1 ;
pub type acamera_metadata_enum_acamera_sensor_raw_binning_factor_used = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_sensor_raw_binning_factor_used as acamera_metadata_enum_android_sensor_raw_binning_factor_used_t;
pub const acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement_ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB : acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement = 0 ;
pub const acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement_ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG : acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement = 1 ;
pub const acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement_ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG : acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement = 2 ;
pub const acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement_ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR : acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement = 3 ;
pub const acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement_ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB : acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement = 4 ;
pub const acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement_ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO : acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement = 5 ;
pub const acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement_ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR : acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement = 6 ;
pub type acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement =
::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement as acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t;
pub const acamera_metadata_enum_acamera_sensor_info_timestamp_source_ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN : acamera_metadata_enum_acamera_sensor_info_timestamp_source = 0 ;
pub const acamera_metadata_enum_acamera_sensor_info_timestamp_source_ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME : acamera_metadata_enum_acamera_sensor_info_timestamp_source = 1 ;
pub type acamera_metadata_enum_acamera_sensor_info_timestamp_source = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_sensor_info_timestamp_source as acamera_metadata_enum_android_sensor_info_timestamp_source_t;
pub const acamera_metadata_enum_acamera_sensor_info_lens_shading_applied_ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED_FALSE : acamera_metadata_enum_acamera_sensor_info_lens_shading_applied = 0 ;
pub const acamera_metadata_enum_acamera_sensor_info_lens_shading_applied_ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED_TRUE : acamera_metadata_enum_acamera_sensor_info_lens_shading_applied = 1 ;
pub type acamera_metadata_enum_acamera_sensor_info_lens_shading_applied = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_sensor_info_lens_shading_applied as acamera_metadata_enum_android_sensor_info_lens_shading_applied_t;
pub const acamera_metadata_enum_acamera_shading_mode_ACAMERA_SHADING_MODE_OFF:
acamera_metadata_enum_acamera_shading_mode = 0;
pub const acamera_metadata_enum_acamera_shading_mode_ACAMERA_SHADING_MODE_FAST:
acamera_metadata_enum_acamera_shading_mode = 1;
pub const acamera_metadata_enum_acamera_shading_mode_ACAMERA_SHADING_MODE_HIGH_QUALITY:
acamera_metadata_enum_acamera_shading_mode = 2;
pub type acamera_metadata_enum_acamera_shading_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_shading_mode as acamera_metadata_enum_android_shading_mode_t;
pub const acamera_metadata_enum_acamera_statistics_face_detect_mode_ACAMERA_STATISTICS_FACE_DETECT_MODE_OFF : acamera_metadata_enum_acamera_statistics_face_detect_mode = 0 ;
pub const acamera_metadata_enum_acamera_statistics_face_detect_mode_ACAMERA_STATISTICS_FACE_DETECT_MODE_SIMPLE : acamera_metadata_enum_acamera_statistics_face_detect_mode = 1 ;
pub const acamera_metadata_enum_acamera_statistics_face_detect_mode_ACAMERA_STATISTICS_FACE_DETECT_MODE_FULL : acamera_metadata_enum_acamera_statistics_face_detect_mode = 2 ;
pub type acamera_metadata_enum_acamera_statistics_face_detect_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_statistics_face_detect_mode as acamera_metadata_enum_android_statistics_face_detect_mode_t;
pub const acamera_metadata_enum_acamera_statistics_hot_pixel_map_mode_ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE_OFF : acamera_metadata_enum_acamera_statistics_hot_pixel_map_mode = 0 ;
pub const acamera_metadata_enum_acamera_statistics_hot_pixel_map_mode_ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE_ON : acamera_metadata_enum_acamera_statistics_hot_pixel_map_mode = 1 ;
pub type acamera_metadata_enum_acamera_statistics_hot_pixel_map_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_statistics_hot_pixel_map_mode as acamera_metadata_enum_android_statistics_hot_pixel_map_mode_t;
pub const acamera_metadata_enum_acamera_statistics_scene_flicker_ACAMERA_STATISTICS_SCENE_FLICKER_NONE : acamera_metadata_enum_acamera_statistics_scene_flicker = 0 ;
pub const acamera_metadata_enum_acamera_statistics_scene_flicker_ACAMERA_STATISTICS_SCENE_FLICKER_50HZ : acamera_metadata_enum_acamera_statistics_scene_flicker = 1 ;
pub const acamera_metadata_enum_acamera_statistics_scene_flicker_ACAMERA_STATISTICS_SCENE_FLICKER_60HZ : acamera_metadata_enum_acamera_statistics_scene_flicker = 2 ;
pub type acamera_metadata_enum_acamera_statistics_scene_flicker = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_statistics_scene_flicker as acamera_metadata_enum_android_statistics_scene_flicker_t;
pub const acamera_metadata_enum_acamera_statistics_lens_shading_map_mode_ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE_OFF : acamera_metadata_enum_acamera_statistics_lens_shading_map_mode = 0 ;
pub const acamera_metadata_enum_acamera_statistics_lens_shading_map_mode_ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE_ON : acamera_metadata_enum_acamera_statistics_lens_shading_map_mode = 1 ;
pub type acamera_metadata_enum_acamera_statistics_lens_shading_map_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_statistics_lens_shading_map_mode as acamera_metadata_enum_android_statistics_lens_shading_map_mode_t;
pub const acamera_metadata_enum_acamera_statistics_ois_data_mode_ACAMERA_STATISTICS_OIS_DATA_MODE_OFF : acamera_metadata_enum_acamera_statistics_ois_data_mode = 0 ;
pub const acamera_metadata_enum_acamera_statistics_ois_data_mode_ACAMERA_STATISTICS_OIS_DATA_MODE_ON : acamera_metadata_enum_acamera_statistics_ois_data_mode = 1 ;
pub type acamera_metadata_enum_acamera_statistics_ois_data_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_statistics_ois_data_mode as acamera_metadata_enum_android_statistics_ois_data_mode_t;
pub const acamera_metadata_enum_acamera_tonemap_mode_ACAMERA_TONEMAP_MODE_CONTRAST_CURVE:
acamera_metadata_enum_acamera_tonemap_mode = 0;
pub const acamera_metadata_enum_acamera_tonemap_mode_ACAMERA_TONEMAP_MODE_FAST:
acamera_metadata_enum_acamera_tonemap_mode = 1;
pub const acamera_metadata_enum_acamera_tonemap_mode_ACAMERA_TONEMAP_MODE_HIGH_QUALITY:
acamera_metadata_enum_acamera_tonemap_mode = 2;
pub const acamera_metadata_enum_acamera_tonemap_mode_ACAMERA_TONEMAP_MODE_GAMMA_VALUE:
acamera_metadata_enum_acamera_tonemap_mode = 3;
pub const acamera_metadata_enum_acamera_tonemap_mode_ACAMERA_TONEMAP_MODE_PRESET_CURVE:
acamera_metadata_enum_acamera_tonemap_mode = 4;
pub type acamera_metadata_enum_acamera_tonemap_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_tonemap_mode as acamera_metadata_enum_android_tonemap_mode_t;
pub const acamera_metadata_enum_acamera_tonemap_preset_curve_ACAMERA_TONEMAP_PRESET_CURVE_SRGB:
acamera_metadata_enum_acamera_tonemap_preset_curve = 0;
pub const acamera_metadata_enum_acamera_tonemap_preset_curve_ACAMERA_TONEMAP_PRESET_CURVE_REC709:
acamera_metadata_enum_acamera_tonemap_preset_curve = 1;
pub type acamera_metadata_enum_acamera_tonemap_preset_curve = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_tonemap_preset_curve as acamera_metadata_enum_android_tonemap_preset_curve_t;
pub const acamera_metadata_enum_acamera_info_supported_hardware_level_ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED : acamera_metadata_enum_acamera_info_supported_hardware_level = 0 ;
pub const acamera_metadata_enum_acamera_info_supported_hardware_level_ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_FULL : acamera_metadata_enum_acamera_info_supported_hardware_level = 1 ;
pub const acamera_metadata_enum_acamera_info_supported_hardware_level_ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY : acamera_metadata_enum_acamera_info_supported_hardware_level = 2 ;
pub const acamera_metadata_enum_acamera_info_supported_hardware_level_ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_3 : acamera_metadata_enum_acamera_info_supported_hardware_level = 3 ;
pub const acamera_metadata_enum_acamera_info_supported_hardware_level_ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL : acamera_metadata_enum_acamera_info_supported_hardware_level = 4 ;
pub type acamera_metadata_enum_acamera_info_supported_hardware_level = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_info_supported_hardware_level as acamera_metadata_enum_android_info_supported_hardware_level_t;
pub const acamera_metadata_enum_acamera_black_level_lock_ACAMERA_BLACK_LEVEL_LOCK_OFF:
acamera_metadata_enum_acamera_black_level_lock = 0;
pub const acamera_metadata_enum_acamera_black_level_lock_ACAMERA_BLACK_LEVEL_LOCK_ON:
acamera_metadata_enum_acamera_black_level_lock = 1;
pub type acamera_metadata_enum_acamera_black_level_lock = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_black_level_lock as acamera_metadata_enum_android_black_level_lock_t;
pub const acamera_metadata_enum_acamera_sync_frame_number_ACAMERA_SYNC_FRAME_NUMBER_CONVERGING:
acamera_metadata_enum_acamera_sync_frame_number = -1;
pub const acamera_metadata_enum_acamera_sync_frame_number_ACAMERA_SYNC_FRAME_NUMBER_UNKNOWN:
acamera_metadata_enum_acamera_sync_frame_number = -2;
pub type acamera_metadata_enum_acamera_sync_frame_number = ::std::os::raw::c_int;
pub use self::acamera_metadata_enum_acamera_sync_frame_number as acamera_metadata_enum_android_sync_frame_number_t;
pub const acamera_metadata_enum_acamera_sync_max_latency_ACAMERA_SYNC_MAX_LATENCY_PER_FRAME_CONTROL : acamera_metadata_enum_acamera_sync_max_latency = 0 ;
pub const acamera_metadata_enum_acamera_sync_max_latency_ACAMERA_SYNC_MAX_LATENCY_UNKNOWN:
acamera_metadata_enum_acamera_sync_max_latency = -1;
pub type acamera_metadata_enum_acamera_sync_max_latency = ::std::os::raw::c_int;
pub use self::acamera_metadata_enum_acamera_sync_max_latency as acamera_metadata_enum_android_sync_max_latency_t;
pub const acamera_metadata_enum_acamera_depth_available_depth_stream_configurations_ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT : acamera_metadata_enum_acamera_depth_available_depth_stream_configurations = 0 ;
pub const acamera_metadata_enum_acamera_depth_available_depth_stream_configurations_ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_INPUT : acamera_metadata_enum_acamera_depth_available_depth_stream_configurations = 1 ;
pub type acamera_metadata_enum_acamera_depth_available_depth_stream_configurations =
::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_depth_available_depth_stream_configurations as acamera_metadata_enum_android_depth_available_depth_stream_configurations_t;
pub const acamera_metadata_enum_acamera_depth_depth_is_exclusive_ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE : acamera_metadata_enum_acamera_depth_depth_is_exclusive = 0 ;
pub const acamera_metadata_enum_acamera_depth_depth_is_exclusive_ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE_TRUE : acamera_metadata_enum_acamera_depth_depth_is_exclusive = 1 ;
pub type acamera_metadata_enum_acamera_depth_depth_is_exclusive = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_depth_depth_is_exclusive as acamera_metadata_enum_android_depth_depth_is_exclusive_t;
pub const acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations_ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_OUTPUT : acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations = 0 ;
pub const acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations_ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_INPUT : acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations = 1 ;
pub type acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations =
::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations as acamera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_t;
pub const acamera_metadata_enum_acamera_depth_available_depth_stream_configurations_maximum_resolution_ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT : acamera_metadata_enum_acamera_depth_available_depth_stream_configurations_maximum_resolution = 0 ;
pub const acamera_metadata_enum_acamera_depth_available_depth_stream_configurations_maximum_resolution_ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT : acamera_metadata_enum_acamera_depth_available_depth_stream_configurations_maximum_resolution = 1 ;
pub type acamera_metadata_enum_acamera_depth_available_depth_stream_configurations_maximum_resolution =
::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_depth_available_depth_stream_configurations_maximum_resolution as acamera_metadata_enum_android_depth_available_depth_stream_configurations_maximum_resolution_t;
pub const acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations_maximum_resolution_ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT : acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations_maximum_resolution = 0 ;
pub const acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations_maximum_resolution_ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT : acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations_maximum_resolution = 1 ;
pub type acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations_maximum_resolution =
::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations_maximum_resolution as acamera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_maximum_resolution_t;
pub const acamera_metadata_enum_acamera_logical_multi_camera_sensor_sync_type_ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE : acamera_metadata_enum_acamera_logical_multi_camera_sensor_sync_type = 0 ;
pub const acamera_metadata_enum_acamera_logical_multi_camera_sensor_sync_type_ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED : acamera_metadata_enum_acamera_logical_multi_camera_sensor_sync_type = 1 ;
pub type acamera_metadata_enum_acamera_logical_multi_camera_sensor_sync_type =
::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_logical_multi_camera_sensor_sync_type as acamera_metadata_enum_android_logical_multi_camera_sensor_sync_type_t;
pub const acamera_metadata_enum_acamera_distortion_correction_mode_ACAMERA_DISTORTION_CORRECTION_MODE_OFF : acamera_metadata_enum_acamera_distortion_correction_mode = 0 ;
pub const acamera_metadata_enum_acamera_distortion_correction_mode_ACAMERA_DISTORTION_CORRECTION_MODE_FAST : acamera_metadata_enum_acamera_distortion_correction_mode = 1 ;
pub const acamera_metadata_enum_acamera_distortion_correction_mode_ACAMERA_DISTORTION_CORRECTION_MODE_HIGH_QUALITY : acamera_metadata_enum_acamera_distortion_correction_mode = 2 ;
pub type acamera_metadata_enum_acamera_distortion_correction_mode = ::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_distortion_correction_mode as acamera_metadata_enum_android_distortion_correction_mode_t;
pub const acamera_metadata_enum_acamera_heic_available_heic_stream_configurations_ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_OUTPUT : acamera_metadata_enum_acamera_heic_available_heic_stream_configurations = 0 ;
pub const acamera_metadata_enum_acamera_heic_available_heic_stream_configurations_ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_INPUT : acamera_metadata_enum_acamera_heic_available_heic_stream_configurations = 1 ;
pub type acamera_metadata_enum_acamera_heic_available_heic_stream_configurations =
::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_heic_available_heic_stream_configurations as acamera_metadata_enum_android_heic_available_heic_stream_configurations_t;
pub const acamera_metadata_enum_acamera_heic_available_heic_stream_configurations_maximum_resolution_ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT : acamera_metadata_enum_acamera_heic_available_heic_stream_configurations_maximum_resolution = 0 ;
pub const acamera_metadata_enum_acamera_heic_available_heic_stream_configurations_maximum_resolution_ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT : acamera_metadata_enum_acamera_heic_available_heic_stream_configurations_maximum_resolution = 1 ;
pub type acamera_metadata_enum_acamera_heic_available_heic_stream_configurations_maximum_resolution =
::std::os::raw::c_uint;
pub use self::acamera_metadata_enum_acamera_heic_available_heic_stream_configurations_maximum_resolution as acamera_metadata_enum_android_heic_available_heic_stream_configurations_maximum_resolution_t;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraMetadata {
_unused: [u8; 0],
}
pub const ACAMERA_TYPE_BYTE: ::std::os::raw::c_uint = 0;
pub const ACAMERA_TYPE_INT32: ::std::os::raw::c_uint = 1;
pub const ACAMERA_TYPE_FLOAT: ::std::os::raw::c_uint = 2;
pub const ACAMERA_TYPE_INT64: ::std::os::raw::c_uint = 3;
pub const ACAMERA_TYPE_DOUBLE: ::std::os::raw::c_uint = 4;
pub const ACAMERA_TYPE_RATIONAL: ::std::os::raw::c_uint = 5;
pub const ACAMERA_NUM_TYPES: ::std::os::raw::c_uint = 6;
pub type _bindgen_ty_61 = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraMetadata_rational {
pub numerator: i32,
pub denominator: i32,
}
#[test]
fn bindgen_test_layout_ACameraMetadata_rational() {
assert_eq!(
::std::mem::size_of::<ACameraMetadata_rational>(),
8usize,
concat!("Size of: ", stringify!(ACameraMetadata_rational))
);
assert_eq!(
::std::mem::align_of::<ACameraMetadata_rational>(),
4usize,
concat!("Alignment of ", stringify!(ACameraMetadata_rational))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_rational>())).numerator as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_rational),
"::",
stringify!(numerator)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_rational>())).denominator as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_rational),
"::",
stringify!(denominator)
)
);
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ACameraMetadata_entry {
pub tag: u32,
pub type_: u8,
pub count: u32,
pub data: ACameraMetadata_entry__bindgen_ty_1,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union ACameraMetadata_entry__bindgen_ty_1 {
pub u8_: *mut u8,
pub i32_: *mut i32,
pub f: *mut f32,
pub i64_: *mut i64,
pub d: *mut f64,
pub r: *mut ACameraMetadata_rational,
}
#[test]
fn bindgen_test_layout_ACameraMetadata_entry__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<ACameraMetadata_entry__bindgen_ty_1>(),
4usize,
concat!("Size of: ", stringify!(ACameraMetadata_entry__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<ACameraMetadata_entry__bindgen_ty_1>(),
4usize,
concat!(
"Alignment of ",
stringify!(ACameraMetadata_entry__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_entry__bindgen_ty_1>())).u8_ as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_entry__bindgen_ty_1),
"::",
stringify!(u8_)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_entry__bindgen_ty_1>())).i32_ as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_entry__bindgen_ty_1),
"::",
stringify!(i32_)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_entry__bindgen_ty_1>())).f as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_entry__bindgen_ty_1),
"::",
stringify!(f)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_entry__bindgen_ty_1>())).i64_ as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_entry__bindgen_ty_1),
"::",
stringify!(i64_)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_entry__bindgen_ty_1>())).d as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_entry__bindgen_ty_1),
"::",
stringify!(d)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_entry__bindgen_ty_1>())).r as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_entry__bindgen_ty_1),
"::",
stringify!(r)
)
);
}
#[test]
fn bindgen_test_layout_ACameraMetadata_entry() {
assert_eq!(
::std::mem::size_of::<ACameraMetadata_entry>(),
16usize,
concat!("Size of: ", stringify!(ACameraMetadata_entry))
);
assert_eq!(
::std::mem::align_of::<ACameraMetadata_entry>(),
4usize,
concat!("Alignment of ", stringify!(ACameraMetadata_entry))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ACameraMetadata_entry>())).tag as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_entry),
"::",
stringify!(tag)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ACameraMetadata_entry>())).type_ as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_entry),
"::",
stringify!(type_)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ACameraMetadata_entry>())).count as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_entry),
"::",
stringify!(count)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ACameraMetadata_entry>())).data as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_entry),
"::",
stringify!(data)
)
);
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct ACameraMetadata_const_entry {
pub tag: u32,
pub type_: u8,
pub count: u32,
pub data: ACameraMetadata_const_entry__bindgen_ty_1,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union ACameraMetadata_const_entry__bindgen_ty_1 {
pub u8_: *const u8,
pub i32_: *const i32,
pub f: *const f32,
pub i64_: *const i64,
pub d: *const f64,
pub r: *const ACameraMetadata_rational,
}
#[test]
fn bindgen_test_layout_ACameraMetadata_const_entry__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<ACameraMetadata_const_entry__bindgen_ty_1>(),
4usize,
concat!(
"Size of: ",
stringify!(ACameraMetadata_const_entry__bindgen_ty_1)
)
);
assert_eq!(
::std::mem::align_of::<ACameraMetadata_const_entry__bindgen_ty_1>(),
4usize,
concat!(
"Alignment of ",
stringify!(ACameraMetadata_const_entry__bindgen_ty_1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_const_entry__bindgen_ty_1>())).u8_ as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_const_entry__bindgen_ty_1),
"::",
stringify!(u8_)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_const_entry__bindgen_ty_1>())).i32_ as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_const_entry__bindgen_ty_1),
"::",
stringify!(i32_)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_const_entry__bindgen_ty_1>())).f as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_const_entry__bindgen_ty_1),
"::",
stringify!(f)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_const_entry__bindgen_ty_1>())).i64_ as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_const_entry__bindgen_ty_1),
"::",
stringify!(i64_)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_const_entry__bindgen_ty_1>())).d as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_const_entry__bindgen_ty_1),
"::",
stringify!(d)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_const_entry__bindgen_ty_1>())).r as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_const_entry__bindgen_ty_1),
"::",
stringify!(r)
)
);
}
#[test]
fn bindgen_test_layout_ACameraMetadata_const_entry() {
assert_eq!(
::std::mem::size_of::<ACameraMetadata_const_entry>(),
16usize,
concat!("Size of: ", stringify!(ACameraMetadata_const_entry))
);
assert_eq!(
::std::mem::align_of::<ACameraMetadata_const_entry>(),
4usize,
concat!("Alignment of ", stringify!(ACameraMetadata_const_entry))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ACameraMetadata_const_entry>())).tag as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_const_entry),
"::",
stringify!(tag)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_const_entry>())).type_ as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_const_entry),
"::",
stringify!(type_)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_const_entry>())).count as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_const_entry),
"::",
stringify!(count)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraMetadata_const_entry>())).data as *const _ as usize
},
12usize,
concat!(
"Offset of field: ",
stringify!(ACameraMetadata_const_entry),
"::",
stringify!(data)
)
);
}
extern "C" {
pub fn ACameraMetadata_getConstEntry(
metadata: *const ACameraMetadata,
tag: u32,
entry: *mut ACameraMetadata_const_entry,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraMetadata_getAllTags(
metadata: *const ACameraMetadata,
numEntries: *mut i32,
tags: *mut *const u32,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraMetadata_copy(src: *const ACameraMetadata) -> *mut ACameraMetadata;
}
extern "C" {
pub fn ACameraMetadata_free(metadata: *mut ACameraMetadata);
}
extern "C" {
pub fn ACameraMetadata_isLogicalMultiCamera(
staticMetadata: *const ACameraMetadata,
numPhysicalCameras: *mut size_t,
physicalCameraIds: *mut *const *const ::std::os::raw::c_char,
) -> bool;
}
extern "C" {
pub fn ACameraMetadata_fromCameraMetadata(
env: *mut JNIEnv,
cameraMetadata: jobject,
) -> *mut ACameraMetadata;
}
pub type ACameraWindowType = ANativeWindow;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraOutputTargets {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraOutputTarget {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACaptureRequest {
_unused: [u8; 0],
}
extern "C" {
pub fn ACameraOutputTarget_create(
window: *mut ACameraWindowType,
output: *mut *mut ACameraOutputTarget,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraOutputTarget_free(output: *mut ACameraOutputTarget);
}
extern "C" {
pub fn ACaptureRequest_addTarget(
request: *mut ACaptureRequest,
output: *const ACameraOutputTarget,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_removeTarget(
request: *mut ACaptureRequest,
output: *const ACameraOutputTarget,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_getConstEntry(
request: *const ACaptureRequest,
tag: u32,
entry: *mut ACameraMetadata_const_entry,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_getAllTags(
request: *const ACaptureRequest,
numTags: *mut i32,
tags: *mut *const u32,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_setEntry_u8(
request: *mut ACaptureRequest,
tag: u32,
count: u32,
data: *const u8,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_setEntry_i32(
request: *mut ACaptureRequest,
tag: u32,
count: u32,
data: *const i32,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_setEntry_float(
request: *mut ACaptureRequest,
tag: u32,
count: u32,
data: *const f32,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_setEntry_i64(
request: *mut ACaptureRequest,
tag: u32,
count: u32,
data: *const i64,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_setEntry_double(
request: *mut ACaptureRequest,
tag: u32,
count: u32,
data: *const f64,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_setEntry_rational(
request: *mut ACaptureRequest,
tag: u32,
count: u32,
data: *const ACameraMetadata_rational,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_free(request: *mut ACaptureRequest);
}
extern "C" {
pub fn ACaptureRequest_setUserContext(
request: *mut ACaptureRequest,
context: *mut ::std::os::raw::c_void,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_getUserContext(
request: *const ACaptureRequest,
context: *mut *mut ::std::os::raw::c_void,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_copy(src: *const ACaptureRequest) -> *mut ACaptureRequest;
}
extern "C" {
pub fn ACaptureRequest_getConstEntry_physicalCamera(
request: *const ACaptureRequest,
physicalId: *const ::std::os::raw::c_char,
tag: u32,
entry: *mut ACameraMetadata_const_entry,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_setEntry_physicalCamera_u8(
request: *mut ACaptureRequest,
physicalId: *const ::std::os::raw::c_char,
tag: u32,
count: u32,
data: *const u8,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_setEntry_physicalCamera_i32(
request: *mut ACaptureRequest,
physicalId: *const ::std::os::raw::c_char,
tag: u32,
count: u32,
data: *const i32,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_setEntry_physicalCamera_float(
request: *mut ACaptureRequest,
physicalId: *const ::std::os::raw::c_char,
tag: u32,
count: u32,
data: *const f32,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_setEntry_physicalCamera_i64(
request: *mut ACaptureRequest,
physicalId: *const ::std::os::raw::c_char,
tag: u32,
count: u32,
data: *const i64,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_setEntry_physicalCamera_double(
request: *mut ACaptureRequest,
physicalId: *const ::std::os::raw::c_char,
tag: u32,
count: u32,
data: *const f64,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureRequest_setEntry_physicalCamera_rational(
request: *mut ACaptureRequest,
physicalId: *const ::std::os::raw::c_char,
tag: u32,
count: u32,
data: *const ACameraMetadata_rational,
) -> camera_status_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraCaptureSession {
_unused: [u8; 0],
}
pub type ACameraCaptureSession_stateCallback = ::std::option::Option<
unsafe extern "C" fn(context: *mut ::std::os::raw::c_void, session: *mut ACameraCaptureSession),
>;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraCaptureSession_stateCallbacks {
pub context: *mut ::std::os::raw::c_void,
pub onClosed: ACameraCaptureSession_stateCallback,
pub onReady: ACameraCaptureSession_stateCallback,
pub onActive: ACameraCaptureSession_stateCallback,
}
#[test]
fn bindgen_test_layout_ACameraCaptureSession_stateCallbacks() {
assert_eq!(
::std::mem::size_of::<ACameraCaptureSession_stateCallbacks>(),
16usize,
concat!(
"Size of: ",
stringify!(ACameraCaptureSession_stateCallbacks)
)
);
assert_eq!(
::std::mem::align_of::<ACameraCaptureSession_stateCallbacks>(),
4usize,
concat!(
"Alignment of ",
stringify!(ACameraCaptureSession_stateCallbacks)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_stateCallbacks>())).context as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_stateCallbacks),
"::",
stringify!(context)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_stateCallbacks>())).onClosed as *const _
as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_stateCallbacks),
"::",
stringify!(onClosed)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_stateCallbacks>())).onReady as *const _
as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_stateCallbacks),
"::",
stringify!(onReady)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_stateCallbacks>())).onActive as *const _
as usize
},
12usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_stateCallbacks),
"::",
stringify!(onActive)
)
);
}
pub const CAPTURE_FAILURE_REASON_FLUSHED: ::std::os::raw::c_uint = 0;
pub const CAPTURE_FAILURE_REASON_ERROR: ::std::os::raw::c_uint = 1;
pub type _bindgen_ty_62 = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraCaptureFailure {
pub frameNumber: i64,
pub reason: ::std::os::raw::c_int,
pub sequenceId: ::std::os::raw::c_int,
pub wasImageCaptured: bool,
}
#[test]
fn bindgen_test_layout_ACameraCaptureFailure() {
assert_eq!(
::std::mem::size_of::<ACameraCaptureFailure>(),
24usize,
concat!("Size of: ", stringify!(ACameraCaptureFailure))
);
assert_eq!(
::std::mem::align_of::<ACameraCaptureFailure>(),
8usize,
concat!("Alignment of ", stringify!(ACameraCaptureFailure))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureFailure>())).frameNumber as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureFailure),
"::",
stringify!(frameNumber)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ACameraCaptureFailure>())).reason as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureFailure),
"::",
stringify!(reason)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureFailure>())).sequenceId as *const _ as usize
},
12usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureFailure),
"::",
stringify!(sequenceId)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureFailure>())).wasImageCaptured as *const _ as usize
},
16usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureFailure),
"::",
stringify!(wasImageCaptured)
)
);
}
pub type ACameraCaptureSession_captureCallback_start = ::std::option::Option<
unsafe extern "C" fn(
context: *mut ::std::os::raw::c_void,
session: *mut ACameraCaptureSession,
request: *const ACaptureRequest,
timestamp: i64,
),
>;
pub type ACameraCaptureSession_captureCallback_result = ::std::option::Option<
unsafe extern "C" fn(
context: *mut ::std::os::raw::c_void,
session: *mut ACameraCaptureSession,
request: *mut ACaptureRequest,
result: *const ACameraMetadata,
),
>;
pub type ACameraCaptureSession_captureCallback_failed = ::std::option::Option<
unsafe extern "C" fn(
context: *mut ::std::os::raw::c_void,
session: *mut ACameraCaptureSession,
request: *mut ACaptureRequest,
failure: *mut ACameraCaptureFailure,
),
>;
pub type ACameraCaptureSession_captureCallback_sequenceEnd = ::std::option::Option<
unsafe extern "C" fn(
context: *mut ::std::os::raw::c_void,
session: *mut ACameraCaptureSession,
sequenceId: ::std::os::raw::c_int,
frameNumber: i64,
),
>;
pub type ACameraCaptureSession_captureCallback_sequenceAbort = ::std::option::Option<
unsafe extern "C" fn(
context: *mut ::std::os::raw::c_void,
session: *mut ACameraCaptureSession,
sequenceId: ::std::os::raw::c_int,
),
>;
pub type ACameraCaptureSession_captureCallback_bufferLost = ::std::option::Option<
unsafe extern "C" fn(
context: *mut ::std::os::raw::c_void,
session: *mut ACameraCaptureSession,
request: *mut ACaptureRequest,
window: *mut ACameraWindowType,
frameNumber: i64,
),
>;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraCaptureSession_captureCallbacks {
pub context: *mut ::std::os::raw::c_void,
pub onCaptureStarted: ACameraCaptureSession_captureCallback_start,
pub onCaptureProgressed: ACameraCaptureSession_captureCallback_result,
pub onCaptureCompleted: ACameraCaptureSession_captureCallback_result,
pub onCaptureFailed: ACameraCaptureSession_captureCallback_failed,
pub onCaptureSequenceCompleted: ACameraCaptureSession_captureCallback_sequenceEnd,
pub onCaptureSequenceAborted: ACameraCaptureSession_captureCallback_sequenceAbort,
pub onCaptureBufferLost: ACameraCaptureSession_captureCallback_bufferLost,
}
#[test]
fn bindgen_test_layout_ACameraCaptureSession_captureCallbacks() {
assert_eq!(
::std::mem::size_of::<ACameraCaptureSession_captureCallbacks>(),
32usize,
concat!(
"Size of: ",
stringify!(ACameraCaptureSession_captureCallbacks)
)
);
assert_eq!(
::std::mem::align_of::<ACameraCaptureSession_captureCallbacks>(),
4usize,
concat!(
"Alignment of ",
stringify!(ACameraCaptureSession_captureCallbacks)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_captureCallbacks>())).context as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_captureCallbacks),
"::",
stringify!(context)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_captureCallbacks>())).onCaptureStarted
as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_captureCallbacks),
"::",
stringify!(onCaptureStarted)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_captureCallbacks>())).onCaptureProgressed
as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_captureCallbacks),
"::",
stringify!(onCaptureProgressed)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_captureCallbacks>())).onCaptureCompleted
as *const _ as usize
},
12usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_captureCallbacks),
"::",
stringify!(onCaptureCompleted)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_captureCallbacks>())).onCaptureFailed
as *const _ as usize
},
16usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_captureCallbacks),
"::",
stringify!(onCaptureFailed)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_captureCallbacks>()))
.onCaptureSequenceCompleted as *const _ as usize
},
20usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_captureCallbacks),
"::",
stringify!(onCaptureSequenceCompleted)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_captureCallbacks>()))
.onCaptureSequenceAborted as *const _ as usize
},
24usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_captureCallbacks),
"::",
stringify!(onCaptureSequenceAborted)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_captureCallbacks>())).onCaptureBufferLost
as *const _ as usize
},
28usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_captureCallbacks),
"::",
stringify!(onCaptureBufferLost)
)
);
}
pub const CAPTURE_SEQUENCE_ID_NONE: ::std::os::raw::c_int = -1;
pub type _bindgen_ty_63 = ::std::os::raw::c_int;
extern "C" {
pub fn ACameraCaptureSession_close(session: *mut ACameraCaptureSession);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraDevice {
_unused: [u8; 0],
}
extern "C" {
pub fn ACameraCaptureSession_getDevice(
session: *mut ACameraCaptureSession,
device: *mut *mut ACameraDevice,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraCaptureSession_capture(
session: *mut ACameraCaptureSession,
callbacks: *mut ACameraCaptureSession_captureCallbacks,
numRequests: ::std::os::raw::c_int,
requests: *mut *mut ACaptureRequest,
captureSequenceId: *mut ::std::os::raw::c_int,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraCaptureSession_setRepeatingRequest(
session: *mut ACameraCaptureSession,
callbacks: *mut ACameraCaptureSession_captureCallbacks,
numRequests: ::std::os::raw::c_int,
requests: *mut *mut ACaptureRequest,
captureSequenceId: *mut ::std::os::raw::c_int,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraCaptureSession_stopRepeating(
session: *mut ACameraCaptureSession,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraCaptureSession_abortCaptures(
session: *mut ACameraCaptureSession,
) -> camera_status_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACaptureSessionOutput {
_unused: [u8; 0],
}
extern "C" {
pub fn ACameraCaptureSession_updateSharedOutput(
session: *mut ACameraCaptureSession,
output: *mut ACaptureSessionOutput,
) -> camera_status_t;
}
pub type ACameraCaptureSession_logicalCamera_captureCallback_result = ::std::option::Option<
unsafe extern "C" fn(
context: *mut ::std::os::raw::c_void,
session: *mut ACameraCaptureSession,
request: *mut ACaptureRequest,
result: *const ACameraMetadata,
physicalResultCount: size_t,
physicalCameraIds: *mut *const ::std::os::raw::c_char,
physicalResults: *mut *const ACameraMetadata,
),
>;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ALogicalCameraCaptureFailure {
pub captureFailure: ACameraCaptureFailure,
pub physicalCameraId: *const ::std::os::raw::c_char,
}
#[test]
fn bindgen_test_layout_ALogicalCameraCaptureFailure() {
assert_eq!(
::std::mem::size_of::<ALogicalCameraCaptureFailure>(),
32usize,
concat!("Size of: ", stringify!(ALogicalCameraCaptureFailure))
);
assert_eq!(
::std::mem::align_of::<ALogicalCameraCaptureFailure>(),
8usize,
concat!("Alignment of ", stringify!(ALogicalCameraCaptureFailure))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ALogicalCameraCaptureFailure>())).captureFailure as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ALogicalCameraCaptureFailure),
"::",
stringify!(captureFailure)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ALogicalCameraCaptureFailure>())).physicalCameraId as *const _
as usize
},
24usize,
concat!(
"Offset of field: ",
stringify!(ALogicalCameraCaptureFailure),
"::",
stringify!(physicalCameraId)
)
);
}
pub type ACameraCaptureSession_logicalCamera_captureCallback_failed = ::std::option::Option<
unsafe extern "C" fn(
context: *mut ::std::os::raw::c_void,
session: *mut ACameraCaptureSession,
request: *mut ACaptureRequest,
failure: *mut ALogicalCameraCaptureFailure,
),
>;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraCaptureSession_logicalCamera_captureCallbacks {
pub context: *mut ::std::os::raw::c_void,
pub onCaptureStarted: ACameraCaptureSession_captureCallback_start,
pub onCaptureProgressed: ACameraCaptureSession_captureCallback_result,
pub onLogicalCameraCaptureCompleted: ACameraCaptureSession_logicalCamera_captureCallback_result,
pub onLogicalCameraCaptureFailed: ACameraCaptureSession_logicalCamera_captureCallback_failed,
pub onCaptureSequenceCompleted: ACameraCaptureSession_captureCallback_sequenceEnd,
pub onCaptureSequenceAborted: ACameraCaptureSession_captureCallback_sequenceAbort,
pub onCaptureBufferLost: ACameraCaptureSession_captureCallback_bufferLost,
}
#[test]
fn bindgen_test_layout_ACameraCaptureSession_logicalCamera_captureCallbacks() {
assert_eq!(
::std::mem::size_of::<ACameraCaptureSession_logicalCamera_captureCallbacks>(),
32usize,
concat!(
"Size of: ",
stringify!(ACameraCaptureSession_logicalCamera_captureCallbacks)
)
);
assert_eq!(
::std::mem::align_of::<ACameraCaptureSession_logicalCamera_captureCallbacks>(),
4usize,
concat!(
"Alignment of ",
stringify!(ACameraCaptureSession_logicalCamera_captureCallbacks)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_logicalCamera_captureCallbacks>())).context
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_logicalCamera_captureCallbacks),
"::",
stringify!(context)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_logicalCamera_captureCallbacks>()))
.onCaptureStarted as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_logicalCamera_captureCallbacks),
"::",
stringify!(onCaptureStarted)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_logicalCamera_captureCallbacks>()))
.onCaptureProgressed as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_logicalCamera_captureCallbacks),
"::",
stringify!(onCaptureProgressed)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_logicalCamera_captureCallbacks>()))
.onLogicalCameraCaptureCompleted as *const _ as usize
},
12usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_logicalCamera_captureCallbacks),
"::",
stringify!(onLogicalCameraCaptureCompleted)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_logicalCamera_captureCallbacks>()))
.onLogicalCameraCaptureFailed as *const _ as usize
},
16usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_logicalCamera_captureCallbacks),
"::",
stringify!(onLogicalCameraCaptureFailed)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_logicalCamera_captureCallbacks>()))
.onCaptureSequenceCompleted as *const _ as usize
},
20usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_logicalCamera_captureCallbacks),
"::",
stringify!(onCaptureSequenceCompleted)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_logicalCamera_captureCallbacks>()))
.onCaptureSequenceAborted as *const _ as usize
},
24usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_logicalCamera_captureCallbacks),
"::",
stringify!(onCaptureSequenceAborted)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraCaptureSession_logicalCamera_captureCallbacks>()))
.onCaptureBufferLost as *const _ as usize
},
28usize,
concat!(
"Offset of field: ",
stringify!(ACameraCaptureSession_logicalCamera_captureCallbacks),
"::",
stringify!(onCaptureBufferLost)
)
);
}
extern "C" {
pub fn ACameraCaptureSession_logicalCamera_capture(
session: *mut ACameraCaptureSession,
callbacks: *mut ACameraCaptureSession_logicalCamera_captureCallbacks,
numRequests: ::std::os::raw::c_int,
requests: *mut *mut ACaptureRequest,
captureSequenceId: *mut ::std::os::raw::c_int,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraCaptureSession_logicalCamera_setRepeatingRequest(
session: *mut ACameraCaptureSession,
callbacks: *mut ACameraCaptureSession_logicalCamera_captureCallbacks,
numRequests: ::std::os::raw::c_int,
requests: *mut *mut ACaptureRequest,
captureSequenceId: *mut ::std::os::raw::c_int,
) -> camera_status_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraIdList {
pub numCameras: ::std::os::raw::c_int,
pub cameraIds: *mut *const ::std::os::raw::c_char,
}
#[test]
fn bindgen_test_layout_ACameraIdList() {
assert_eq!(
::std::mem::size_of::<ACameraIdList>(),
8usize,
concat!("Size of: ", stringify!(ACameraIdList))
);
assert_eq!(
::std::mem::align_of::<ACameraIdList>(),
4usize,
concat!("Alignment of ", stringify!(ACameraIdList))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ACameraIdList>())).numCameras as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraIdList),
"::",
stringify!(numCameras)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<ACameraIdList>())).cameraIds as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(ACameraIdList),
"::",
stringify!(cameraIds)
)
);
}
pub const ERROR_CAMERA_IN_USE: ::std::os::raw::c_uint = 1;
pub const ERROR_MAX_CAMERAS_IN_USE: ::std::os::raw::c_uint = 2;
pub const ERROR_CAMERA_DISABLED: ::std::os::raw::c_uint = 3;
pub const ERROR_CAMERA_DEVICE: ::std::os::raw::c_uint = 4;
pub const ERROR_CAMERA_SERVICE: ::std::os::raw::c_uint = 5;
pub type _bindgen_ty_64 = ::std::os::raw::c_uint;
pub type ACameraDevice_StateCallback = ::std::option::Option<
unsafe extern "C" fn(context: *mut ::std::os::raw::c_void, device: *mut ACameraDevice),
>;
pub type ACameraDevice_ErrorStateCallback = ::std::option::Option<
unsafe extern "C" fn(
context: *mut ::std::os::raw::c_void,
device: *mut ACameraDevice,
error: ::std::os::raw::c_int,
),
>;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraDevice_StateCallbacks {
pub context: *mut ::std::os::raw::c_void,
pub onDisconnected: ACameraDevice_StateCallback,
pub onError: ACameraDevice_ErrorStateCallback,
}
#[test]
fn bindgen_test_layout_ACameraDevice_StateCallbacks() {
assert_eq!(
::std::mem::size_of::<ACameraDevice_StateCallbacks>(),
12usize,
concat!("Size of: ", stringify!(ACameraDevice_StateCallbacks))
);
assert_eq!(
::std::mem::align_of::<ACameraDevice_StateCallbacks>(),
4usize,
concat!("Alignment of ", stringify!(ACameraDevice_StateCallbacks))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraDevice_StateCallbacks>())).context as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraDevice_StateCallbacks),
"::",
stringify!(context)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraDevice_StateCallbacks>())).onDisconnected as *const _
as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(ACameraDevice_StateCallbacks),
"::",
stringify!(onDisconnected)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraDevice_StateCallbacks>())).onError as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(ACameraDevice_StateCallbacks),
"::",
stringify!(onError)
)
);
}
pub type ACameraDevice_stateCallbacks = ACameraDevice_StateCallbacks;
extern "C" {
pub fn ACameraDevice_close(device: *mut ACameraDevice) -> camera_status_t;
}
extern "C" {
pub fn ACameraDevice_getId(device: *const ACameraDevice) -> *const ::std::os::raw::c_char;
}
pub const ACameraDevice_request_template_TEMPLATE_PREVIEW: ACameraDevice_request_template = 1;
pub const ACameraDevice_request_template_TEMPLATE_STILL_CAPTURE: ACameraDevice_request_template = 2;
pub const ACameraDevice_request_template_TEMPLATE_RECORD: ACameraDevice_request_template = 3;
pub const ACameraDevice_request_template_TEMPLATE_VIDEO_SNAPSHOT: ACameraDevice_request_template =
4;
pub const ACameraDevice_request_template_TEMPLATE_ZERO_SHUTTER_LAG: ACameraDevice_request_template =
5;
pub const ACameraDevice_request_template_TEMPLATE_MANUAL: ACameraDevice_request_template = 6;
pub type ACameraDevice_request_template = ::std::os::raw::c_uint;
extern "C" {
pub fn ACameraDevice_createCaptureRequest(
device: *const ACameraDevice,
templateId: ACameraDevice_request_template,
request: *mut *mut ACaptureRequest,
) -> camera_status_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACaptureSessionOutputContainer {
_unused: [u8; 0],
}
extern "C" {
pub fn ACaptureSessionOutputContainer_create(
container: *mut *mut ACaptureSessionOutputContainer,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureSessionOutputContainer_free(container: *mut ACaptureSessionOutputContainer);
}
extern "C" {
pub fn ACaptureSessionOutput_create(
anw: *mut ACameraWindowType,
output: *mut *mut ACaptureSessionOutput,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureSessionOutput_free(output: *mut ACaptureSessionOutput);
}
extern "C" {
pub fn ACaptureSessionOutputContainer_add(
container: *mut ACaptureSessionOutputContainer,
output: *const ACaptureSessionOutput,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureSessionOutputContainer_remove(
container: *mut ACaptureSessionOutputContainer,
output: *const ACaptureSessionOutput,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraDevice_createCaptureSession(
device: *mut ACameraDevice,
outputs: *const ACaptureSessionOutputContainer,
callbacks: *const ACameraCaptureSession_stateCallbacks,
session: *mut *mut ACameraCaptureSession,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureSessionSharedOutput_create(
anw: *mut ACameraWindowType,
output: *mut *mut ACaptureSessionOutput,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureSessionSharedOutput_add(
output: *mut ACaptureSessionOutput,
anw: *mut ACameraWindowType,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureSessionSharedOutput_remove(
output: *mut ACaptureSessionOutput,
anw: *mut ACameraWindowType,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraDevice_createCaptureSessionWithSessionParameters(
device: *mut ACameraDevice,
outputs: *const ACaptureSessionOutputContainer,
sessionParameters: *const ACaptureRequest,
callbacks: *const ACameraCaptureSession_stateCallbacks,
session: *mut *mut ACameraCaptureSession,
) -> camera_status_t;
}
extern "C" {
pub fn ACaptureSessionPhysicalOutput_create(
anw: *mut ACameraWindowType,
physicalId: *const ::std::os::raw::c_char,
output: *mut *mut ACaptureSessionOutput,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraDevice_createCaptureRequest_withPhysicalIds(
device: *const ACameraDevice,
templateId: ACameraDevice_request_template,
physicalIdList: *const ACameraIdList,
request: *mut *mut ACaptureRequest,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraDevice_isSessionConfigurationSupported(
device: *const ACameraDevice,
sessionOutputContainer: *const ACaptureSessionOutputContainer,
) -> camera_status_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraManager {
_unused: [u8; 0],
}
extern "C" {
pub fn ACameraManager_create() -> *mut ACameraManager;
}
extern "C" {
pub fn ACameraManager_delete(manager: *mut ACameraManager);
}
extern "C" {
pub fn ACameraManager_getCameraIdList(
manager: *mut ACameraManager,
cameraIdList: *mut *mut ACameraIdList,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraManager_deleteCameraIdList(cameraIdList: *mut ACameraIdList);
}
pub type ACameraManager_AvailabilityCallback = ::std::option::Option<
unsafe extern "C" fn(
context: *mut ::std::os::raw::c_void,
cameraId: *const ::std::os::raw::c_char,
),
>;
pub type ACameraManager_PhysicalCameraAvailabilityCallback = ::std::option::Option<
unsafe extern "C" fn(
context: *mut ::std::os::raw::c_void,
cameraId: *const ::std::os::raw::c_char,
physicalCameraId: *const ::std::os::raw::c_char,
),
>;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraManager_AvailabilityListener {
pub context: *mut ::std::os::raw::c_void,
pub onCameraAvailable: ACameraManager_AvailabilityCallback,
pub onCameraUnavailable: ACameraManager_AvailabilityCallback,
}
#[test]
fn bindgen_test_layout_ACameraManager_AvailabilityListener() {
assert_eq!(
::std::mem::size_of::<ACameraManager_AvailabilityListener>(),
12usize,
concat!("Size of: ", stringify!(ACameraManager_AvailabilityListener))
);
assert_eq!(
::std::mem::align_of::<ACameraManager_AvailabilityListener>(),
4usize,
concat!(
"Alignment of ",
stringify!(ACameraManager_AvailabilityListener)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraManager_AvailabilityListener>())).context as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraManager_AvailabilityListener),
"::",
stringify!(context)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraManager_AvailabilityListener>())).onCameraAvailable
as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(ACameraManager_AvailabilityListener),
"::",
stringify!(onCameraAvailable)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraManager_AvailabilityListener>())).onCameraUnavailable
as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(ACameraManager_AvailabilityListener),
"::",
stringify!(onCameraUnavailable)
)
);
}
pub type ACameraManager_AvailabilityCallbacks = ACameraManager_AvailabilityListener;
extern "C" {
pub fn ACameraManager_registerAvailabilityCallback(
manager: *mut ACameraManager,
callback: *const ACameraManager_AvailabilityCallbacks,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraManager_unregisterAvailabilityCallback(
manager: *mut ACameraManager,
callback: *const ACameraManager_AvailabilityCallbacks,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraManager_getCameraCharacteristics(
manager: *mut ACameraManager,
cameraId: *const ::std::os::raw::c_char,
characteristics: *mut *mut ACameraMetadata,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraManager_openCamera(
manager: *mut ACameraManager,
cameraId: *const ::std::os::raw::c_char,
callback: *mut ACameraDevice_StateCallbacks,
device: *mut *mut ACameraDevice,
) -> camera_status_t;
}
pub type ACameraManager_AccessPrioritiesChangedCallback =
::std::option::Option<unsafe extern "C" fn(context: *mut ::std::os::raw::c_void)>;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ACameraManager_ExtendedAvailabilityListener {
pub availabilityCallbacks: ACameraManager_AvailabilityCallbacks,
pub onCameraAccessPrioritiesChanged: ACameraManager_AccessPrioritiesChangedCallback,
pub onPhysicalCameraAvailable: ACameraManager_PhysicalCameraAvailabilityCallback,
pub onPhysicalCameraUnavailable: ACameraManager_PhysicalCameraAvailabilityCallback,
pub reserved: [*mut ::std::os::raw::c_void; 4usize],
}
#[test]
fn bindgen_test_layout_ACameraManager_ExtendedAvailabilityListener() {
assert_eq!(
::std::mem::size_of::<ACameraManager_ExtendedAvailabilityListener>(),
40usize,
concat!(
"Size of: ",
stringify!(ACameraManager_ExtendedAvailabilityListener)
)
);
assert_eq!(
::std::mem::align_of::<ACameraManager_ExtendedAvailabilityListener>(),
4usize,
concat!(
"Alignment of ",
stringify!(ACameraManager_ExtendedAvailabilityListener)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraManager_ExtendedAvailabilityListener>()))
.availabilityCallbacks as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(ACameraManager_ExtendedAvailabilityListener),
"::",
stringify!(availabilityCallbacks)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraManager_ExtendedAvailabilityListener>()))
.onCameraAccessPrioritiesChanged as *const _ as usize
},
12usize,
concat!(
"Offset of field: ",
stringify!(ACameraManager_ExtendedAvailabilityListener),
"::",
stringify!(onCameraAccessPrioritiesChanged)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraManager_ExtendedAvailabilityListener>()))
.onPhysicalCameraAvailable as *const _ as usize
},
16usize,
concat!(
"Offset of field: ",
stringify!(ACameraManager_ExtendedAvailabilityListener),
"::",
stringify!(onPhysicalCameraAvailable)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraManager_ExtendedAvailabilityListener>()))
.onPhysicalCameraUnavailable as *const _ as usize
},
20usize,
concat!(
"Offset of field: ",
stringify!(ACameraManager_ExtendedAvailabilityListener),
"::",
stringify!(onPhysicalCameraUnavailable)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<ACameraManager_ExtendedAvailabilityListener>())).reserved
as *const _ as usize
},
24usize,
concat!(
"Offset of field: ",
stringify!(ACameraManager_ExtendedAvailabilityListener),
"::",
stringify!(reserved)
)
);
}
pub type ACameraManager_ExtendedAvailabilityCallbacks = ACameraManager_ExtendedAvailabilityListener;
extern "C" {
pub fn ACameraManager_registerExtendedAvailabilityCallback(
manager: *mut ACameraManager,
callback: *const ACameraManager_ExtendedAvailabilityCallbacks,
) -> camera_status_t;
}
extern "C" {
pub fn ACameraManager_unregisterExtendedAvailabilityCallback(
manager: *mut ACameraManager,
callback: *const ACameraManager_ExtendedAvailabilityCallbacks,
) -> camera_status_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AImage {
_unused: [u8; 0],
}
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_RGBA_8888: AIMAGE_FORMATS = 1;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_RGBX_8888: AIMAGE_FORMATS = 2;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_RGB_888: AIMAGE_FORMATS = 3;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_RGB_565: AIMAGE_FORMATS = 4;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_RGBA_FP16: AIMAGE_FORMATS = 22;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_YUV_420_888: AIMAGE_FORMATS = 35;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_JPEG: AIMAGE_FORMATS = 256;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_RAW16: AIMAGE_FORMATS = 32;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_RAW_PRIVATE: AIMAGE_FORMATS = 36;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_RAW10: AIMAGE_FORMATS = 37;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_RAW12: AIMAGE_FORMATS = 38;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_DEPTH16: AIMAGE_FORMATS = 1144402265;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_DEPTH_POINT_CLOUD: AIMAGE_FORMATS = 257;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_PRIVATE: AIMAGE_FORMATS = 34;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_Y8: AIMAGE_FORMATS = 538982489;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_HEIC: AIMAGE_FORMATS = 1212500294;
pub const AIMAGE_FORMATS_AIMAGE_FORMAT_DEPTH_JPEG: AIMAGE_FORMATS = 1768253795;
pub type AIMAGE_FORMATS = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AImageCropRect {
pub left: i32,
pub top: i32,
pub right: i32,
pub bottom: i32,
}
#[test]
fn bindgen_test_layout_AImageCropRect() {
assert_eq!(
::std::mem::size_of::<AImageCropRect>(),
16usize,
concat!("Size of: ", stringify!(AImageCropRect))
);
assert_eq!(
::std::mem::align_of::<AImageCropRect>(),
4usize,
concat!("Alignment of ", stringify!(AImageCropRect))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AImageCropRect>())).left as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(AImageCropRect),
"::",
stringify!(left)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AImageCropRect>())).top as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(AImageCropRect),
"::",
stringify!(top)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AImageCropRect>())).right as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(AImageCropRect),
"::",
stringify!(right)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AImageCropRect>())).bottom as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(AImageCropRect),
"::",
stringify!(bottom)
)
);
}
extern "C" {
pub fn AImage_delete(image: *mut AImage);
}
extern "C" {
pub fn AImage_getWidth(image: *const AImage, width: *mut i32) -> media_status_t;
}
extern "C" {
pub fn AImage_getHeight(image: *const AImage, height: *mut i32) -> media_status_t;
}
extern "C" {
pub fn AImage_getFormat(image: *const AImage, format: *mut i32) -> media_status_t;
}
extern "C" {
pub fn AImage_getCropRect(image: *const AImage, rect: *mut AImageCropRect) -> media_status_t;
}
extern "C" {
pub fn AImage_getTimestamp(image: *const AImage, timestampNs: *mut i64) -> media_status_t;
}
extern "C" {
pub fn AImage_getNumberOfPlanes(image: *const AImage, numPlanes: *mut i32) -> media_status_t;
}
extern "C" {
pub fn AImage_getPlanePixelStride(
image: *const AImage,
planeIdx: ::std::os::raw::c_int,
pixelStride: *mut i32,
) -> media_status_t;
}
extern "C" {
pub fn AImage_getPlaneRowStride(
image: *const AImage,
planeIdx: ::std::os::raw::c_int,
rowStride: *mut i32,
) -> media_status_t;
}
extern "C" {
pub fn AImage_getPlaneData(
image: *const AImage,
planeIdx: ::std::os::raw::c_int,
data: *mut *mut u8,
dataLength: *mut ::std::os::raw::c_int,
) -> media_status_t;
}
extern "C" {
pub fn AImage_deleteAsync(image: *mut AImage, releaseFenceFd: ::std::os::raw::c_int);
}
extern "C" {
pub fn AImage_getHardwareBuffer(
image: *const AImage,
buffer: *mut *mut AHardwareBuffer,
) -> media_status_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AImageReader {
_unused: [u8; 0],
}
extern "C" {
pub fn AImageReader_new(
width: i32,
height: i32,
format: i32,
maxImages: i32,
reader: *mut *mut AImageReader,
) -> media_status_t;
}
extern "C" {
pub fn AImageReader_delete(reader: *mut AImageReader);
}
extern "C" {
pub fn AImageReader_getWindow(
reader: *mut AImageReader,
window: *mut *mut ANativeWindow,
) -> media_status_t;
}
extern "C" {
pub fn AImageReader_getWidth(reader: *const AImageReader, width: *mut i32) -> media_status_t;
}
extern "C" {
pub fn AImageReader_getHeight(reader: *const AImageReader, height: *mut i32) -> media_status_t;
}
extern "C" {
pub fn AImageReader_getFormat(reader: *const AImageReader, format: *mut i32) -> media_status_t;
}
extern "C" {
pub fn AImageReader_getMaxImages(
reader: *const AImageReader,
maxImages: *mut i32,
) -> media_status_t;
}
extern "C" {
pub fn AImageReader_acquireNextImage(
reader: *mut AImageReader,
image: *mut *mut AImage,
) -> media_status_t;
}
extern "C" {
pub fn AImageReader_acquireLatestImage(
reader: *mut AImageReader,
image: *mut *mut AImage,
) -> media_status_t;
}
pub type AImageReader_ImageCallback = ::std::option::Option<
unsafe extern "C" fn(context: *mut ::std::os::raw::c_void, reader: *mut AImageReader),
>;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AImageReader_ImageListener {
pub context: *mut ::std::os::raw::c_void,
pub onImageAvailable: AImageReader_ImageCallback,
}
#[test]
fn bindgen_test_layout_AImageReader_ImageListener() {
assert_eq!(
::std::mem::size_of::<AImageReader_ImageListener>(),
8usize,
concat!("Size of: ", stringify!(AImageReader_ImageListener))
);
assert_eq!(
::std::mem::align_of::<AImageReader_ImageListener>(),
4usize,
concat!("Alignment of ", stringify!(AImageReader_ImageListener))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AImageReader_ImageListener>())).context as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(AImageReader_ImageListener),
"::",
stringify!(context)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AImageReader_ImageListener>())).onImageAvailable as *const _
as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(AImageReader_ImageListener),
"::",
stringify!(onImageAvailable)
)
);
}
extern "C" {
pub fn AImageReader_setImageListener(
reader: *mut AImageReader,
listener: *mut AImageReader_ImageListener,
) -> media_status_t;
}
extern "C" {
pub fn AImageReader_newWithUsage(
width: i32,
height: i32,
format: i32,
usage: u64,
maxImages: i32,
reader: *mut *mut AImageReader,
) -> media_status_t;
}
extern "C" {
pub fn AImageReader_acquireNextImageAsync(
reader: *mut AImageReader,
image: *mut *mut AImage,
acquireFenceFd: *mut ::std::os::raw::c_int,
) -> media_status_t;
}
extern "C" {
pub fn AImageReader_acquireLatestImageAsync(
reader: *mut AImageReader,
image: *mut *mut AImage,
acquireFenceFd: *mut ::std::os::raw::c_int,
) -> media_status_t;
}
pub type AImageReader_BufferRemovedCallback = ::std::option::Option<
unsafe extern "C" fn(
context: *mut ::std::os::raw::c_void,
reader: *mut AImageReader,
buffer: *mut AHardwareBuffer,
),
>;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AImageReader_BufferRemovedListener {
pub context: *mut ::std::os::raw::c_void,
pub onBufferRemoved: AImageReader_BufferRemovedCallback,
}
#[test]
fn bindgen_test_layout_AImageReader_BufferRemovedListener() {
assert_eq!(
::std::mem::size_of::<AImageReader_BufferRemovedListener>(),
8usize,
concat!("Size of: ", stringify!(AImageReader_BufferRemovedListener))
);
assert_eq!(
::std::mem::align_of::<AImageReader_BufferRemovedListener>(),
4usize,
concat!(
"Alignment of ",
stringify!(AImageReader_BufferRemovedListener)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AImageReader_BufferRemovedListener>())).context as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(AImageReader_BufferRemovedListener),
"::",
stringify!(context)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AImageReader_BufferRemovedListener>())).onBufferRemoved
as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(AImageReader_BufferRemovedListener),
"::",
stringify!(onBufferRemoved)
)
);
}
extern "C" {
pub fn AImageReader_setBufferRemovedListener(
reader: *mut AImageReader,
listener: *mut AImageReader_BufferRemovedListener,
) -> media_status_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMediaCrypto {
_unused: [u8; 0],
}
pub type AMediaUUID = [u8; 16usize];
extern "C" {
pub fn AMediaCrypto_isCryptoSchemeSupported(uuid: *mut u8) -> bool;
}
extern "C" {
pub fn AMediaCrypto_requiresSecureDecoderComponent(mime: *const ::std::os::raw::c_char)
-> bool;
}
extern "C" {
pub fn AMediaCrypto_new(
uuid: *mut u8,
initData: *const ::std::os::raw::c_void,
initDataSize: size_t,
) -> *mut AMediaCrypto;
}
extern "C" {
pub fn AMediaCrypto_delete(crypto: *mut AMediaCrypto);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMediaFormat {
_unused: [u8; 0],
}
extern "C" {
pub fn AMediaFormat_new() -> *mut AMediaFormat;
}
extern "C" {
pub fn AMediaFormat_delete(arg1: *mut AMediaFormat) -> media_status_t;
}
extern "C" {
pub fn AMediaFormat_toString(arg1: *mut AMediaFormat) -> *const ::std::os::raw::c_char;
}
extern "C" {
pub fn AMediaFormat_getInt32(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
out: *mut i32,
) -> bool;
}
extern "C" {
pub fn AMediaFormat_getInt64(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
out: *mut i64,
) -> bool;
}
extern "C" {
pub fn AMediaFormat_getFloat(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
out: *mut f32,
) -> bool;
}
extern "C" {
pub fn AMediaFormat_getSize(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
out: *mut size_t,
) -> bool;
}
extern "C" {
pub fn AMediaFormat_getBuffer(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
data: *mut *mut ::std::os::raw::c_void,
size: *mut size_t,
) -> bool;
}
extern "C" {
pub fn AMediaFormat_getString(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
out: *mut *const ::std::os::raw::c_char,
) -> bool;
}
extern "C" {
pub fn AMediaFormat_setInt32(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
value: i32,
);
}
extern "C" {
pub fn AMediaFormat_setInt64(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
value: i64,
);
}
extern "C" {
pub fn AMediaFormat_setFloat(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
value: f32,
);
}
extern "C" {
pub fn AMediaFormat_setString(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
value: *const ::std::os::raw::c_char,
);
}
extern "C" {
pub fn AMediaFormat_setBuffer(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
data: *const ::std::os::raw::c_void,
size: size_t,
);
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_AAC_PROFILE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_AAC_SBR_MODE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_AUDIO_SESSION_ID: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_BITRATE_MODE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_BIT_RATE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CAPTURE_RATE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CHANNEL_COUNT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CHANNEL_MASK: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_COLOR_FORMAT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_COLOR_RANGE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_COLOR_STANDARD: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_COLOR_TRANSFER: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_COMPLEXITY: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CSD: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CSD_0: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CSD_1: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CSD_2: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_DISPLAY_CROP: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_DISPLAY_HEIGHT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_DISPLAY_WIDTH: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_DURATION: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_FRAME_RATE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_GRID_COLUMNS: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_GRID_ROWS: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_HDR_STATIC_INFO: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_HEIGHT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_IS_ADTS: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_IS_AUTOSELECT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_IS_DEFAULT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_I_FRAME_INTERVAL: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_LANGUAGE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_LATENCY: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_LEVEL: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_MAX_HEIGHT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_MAX_INPUT_SIZE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_MAX_WIDTH: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_MIME: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_MPEG_USER_DATA: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_OPERATING_RATE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_PCM_ENCODING: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_PRIORITY: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_PROFILE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_ROTATION: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_SAMPLE_RATE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_SEI: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_SLICE_HEIGHT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_STRIDE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_TEMPORAL_LAYERING: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_TILE_HEIGHT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_TILE_WIDTH: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_TIME_US: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_TRACK_ID: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_TRACK_INDEX: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_WIDTH: *const ::std::os::raw::c_char;
}
extern "C" {
pub fn AMediaFormat_getDouble(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
out: *mut f64,
) -> bool;
}
extern "C" {
pub fn AMediaFormat_getRect(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
left: *mut i32,
top: *mut i32,
right: *mut i32,
bottom: *mut i32,
) -> bool;
}
extern "C" {
pub fn AMediaFormat_setDouble(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
value: f64,
);
}
extern "C" {
pub fn AMediaFormat_setSize(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
value: size_t,
);
}
extern "C" {
pub fn AMediaFormat_setRect(
arg1: *mut AMediaFormat,
name: *const ::std::os::raw::c_char,
left: i32,
top: i32,
right: i32,
bottom: i32,
);
}
extern "C" {
pub fn AMediaFormat_clear(arg1: *mut AMediaFormat);
}
extern "C" {
pub fn AMediaFormat_copy(to: *mut AMediaFormat, from: *mut AMediaFormat) -> media_status_t;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_ALBUM: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_ALBUMART: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_ALBUMARTIST: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_ARTIST: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PRESENTATION_ID:
*const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PROGRAM_ID: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_AUTHOR: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_BITS_PER_SAMPLE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CDTRACKNUMBER: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_COMPILATION: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_COMPOSER: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CREATE_INPUT_SURFACE_SUSPENDED: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CRYPTO_DEFAULT_IV_SIZE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_BYTE_BLOCK: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_SIZES: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CRYPTO_IV: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CRYPTO_KEY: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CRYPTO_MODE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CRYPTO_PLAIN_SIZES: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CRYPTO_SKIP_BYTE_BLOCK: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CSD_AVC: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_CSD_HEVC: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_D263: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_DATE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_DISCNUMBER: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_ENCODER_DELAY: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_ENCODER_PADDING: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_ESDS: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_EXIF_OFFSET: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_EXIF_SIZE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_FRAME_COUNT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_GENRE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_ICC_PROFILE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_IS_SYNC_FRAME: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_LOCATION: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_LOOP: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_LYRICIST: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_MANUFACTURER: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_MAX_BIT_RATE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_MAX_PTS_GAP_TO_ENCODER: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_MPEG2_STREAM_HEADER: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_PSSH: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_SAR_HEIGHT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_SAR_WIDTH: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_TARGET_TIME: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_TEMPORAL_LAYER_COUNT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_TEXT_FORMAT_DATA: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_THUMBNAIL_TIME: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_THUMBNAIL_WIDTH: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_TITLE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_VALID_SAMPLES: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_YEAR: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_LOW_LATENCY: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_HDR10_PLUS_INFO: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_XMP_OFFSET: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_XMP_SIZE: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_VIDEO_QP_B_MAX: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_VIDEO_QP_B_MIN: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_VIDEO_QP_I_MAX: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_VIDEO_QP_I_MIN: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_VIDEO_QP_MAX: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_VIDEO_QP_MIN: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_VIDEO_QP_P_MAX: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIAFORMAT_VIDEO_QP_P_MIN: *const ::std::os::raw::c_char;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMediaCodec {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMediaCodecBufferInfo {
pub offset: i32,
pub size: i32,
pub presentationTimeUs: i64,
pub flags: u32,
}
#[test]
fn bindgen_test_layout_AMediaCodecBufferInfo() {
assert_eq!(
::std::mem::size_of::<AMediaCodecBufferInfo>(),
24usize,
concat!("Size of: ", stringify!(AMediaCodecBufferInfo))
);
assert_eq!(
::std::mem::align_of::<AMediaCodecBufferInfo>(),
8usize,
concat!("Alignment of ", stringify!(AMediaCodecBufferInfo))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AMediaCodecBufferInfo>())).offset as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(AMediaCodecBufferInfo),
"::",
stringify!(offset)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AMediaCodecBufferInfo>())).size as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(AMediaCodecBufferInfo),
"::",
stringify!(size)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AMediaCodecBufferInfo>())).presentationTimeUs as *const _
as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(AMediaCodecBufferInfo),
"::",
stringify!(presentationTimeUs)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AMediaCodecBufferInfo>())).flags as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(AMediaCodecBufferInfo),
"::",
stringify!(flags)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMediaCodecCryptoInfo {
_unused: [u8; 0],
}
pub const AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG: ::std::os::raw::c_int = 2;
pub const AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM: ::std::os::raw::c_int = 4;
pub const AMEDIACODEC_BUFFER_FLAG_PARTIAL_FRAME: ::std::os::raw::c_int = 8;
pub const AMEDIACODEC_CONFIGURE_FLAG_ENCODE: ::std::os::raw::c_int = 1;
pub const AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED: ::std::os::raw::c_int = -3;
pub const AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED: ::std::os::raw::c_int = -2;
pub const AMEDIACODEC_INFO_TRY_AGAIN_LATER: ::std::os::raw::c_int = -1;
pub type _bindgen_ty_65 = ::std::os::raw::c_int;
pub type AMediaCodecOnAsyncInputAvailable = ::std::option::Option<
unsafe extern "C" fn(
codec: *mut AMediaCodec,
userdata: *mut ::std::os::raw::c_void,
index: i32,
),
>;
pub type AMediaCodecOnAsyncOutputAvailable = ::std::option::Option<
unsafe extern "C" fn(
codec: *mut AMediaCodec,
userdata: *mut ::std::os::raw::c_void,
index: i32,
bufferInfo: *mut AMediaCodecBufferInfo,
),
>;
pub type AMediaCodecOnAsyncFormatChanged = ::std::option::Option<
unsafe extern "C" fn(
codec: *mut AMediaCodec,
userdata: *mut ::std::os::raw::c_void,
format: *mut AMediaFormat,
),
>;
pub type AMediaCodecOnAsyncError = ::std::option::Option<
unsafe extern "C" fn(
codec: *mut AMediaCodec,
userdata: *mut ::std::os::raw::c_void,
error: media_status_t,
actionCode: i32,
detail: *const ::std::os::raw::c_char,
),
>;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMediaCodecOnAsyncNotifyCallback {
pub onAsyncInputAvailable: AMediaCodecOnAsyncInputAvailable,
pub onAsyncOutputAvailable: AMediaCodecOnAsyncOutputAvailable,
pub onAsyncFormatChanged: AMediaCodecOnAsyncFormatChanged,
pub onAsyncError: AMediaCodecOnAsyncError,
}
#[test]
fn bindgen_test_layout_AMediaCodecOnAsyncNotifyCallback() {
assert_eq!(
::std::mem::size_of::<AMediaCodecOnAsyncNotifyCallback>(),
16usize,
concat!("Size of: ", stringify!(AMediaCodecOnAsyncNotifyCallback))
);
assert_eq!(
::std::mem::align_of::<AMediaCodecOnAsyncNotifyCallback>(),
4usize,
concat!(
"Alignment of ",
stringify!(AMediaCodecOnAsyncNotifyCallback)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AMediaCodecOnAsyncNotifyCallback>())).onAsyncInputAvailable
as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(AMediaCodecOnAsyncNotifyCallback),
"::",
stringify!(onAsyncInputAvailable)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AMediaCodecOnAsyncNotifyCallback>())).onAsyncOutputAvailable
as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(AMediaCodecOnAsyncNotifyCallback),
"::",
stringify!(onAsyncOutputAvailable)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AMediaCodecOnAsyncNotifyCallback>())).onAsyncFormatChanged
as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(AMediaCodecOnAsyncNotifyCallback),
"::",
stringify!(onAsyncFormatChanged)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<AMediaCodecOnAsyncNotifyCallback>())).onAsyncError as *const _
as usize
},
12usize,
concat!(
"Offset of field: ",
stringify!(AMediaCodecOnAsyncNotifyCallback),
"::",
stringify!(onAsyncError)
)
);
}
extern "C" {
pub fn AMediaCodec_createCodecByName(name: *const ::std::os::raw::c_char) -> *mut AMediaCodec;
}
extern "C" {
pub fn AMediaCodec_createDecoderByType(
mime_type: *const ::std::os::raw::c_char,
) -> *mut AMediaCodec;
}
extern "C" {
pub fn AMediaCodec_createEncoderByType(
mime_type: *const ::std::os::raw::c_char,
) -> *mut AMediaCodec;
}
extern "C" {
pub fn AMediaCodec_delete(arg1: *mut AMediaCodec) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_configure(
arg1: *mut AMediaCodec,
format: *const AMediaFormat,
surface: *mut ANativeWindow,
crypto: *mut AMediaCrypto,
flags: u32,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_start(arg1: *mut AMediaCodec) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_stop(arg1: *mut AMediaCodec) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_flush(arg1: *mut AMediaCodec) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_getInputBuffer(
arg1: *mut AMediaCodec,
idx: size_t,
out_size: *mut size_t,
) -> *mut u8;
}
extern "C" {
pub fn AMediaCodec_getOutputBuffer(
arg1: *mut AMediaCodec,
idx: size_t,
out_size: *mut size_t,
) -> *mut u8;
}
extern "C" {
pub fn AMediaCodec_dequeueInputBuffer(arg1: *mut AMediaCodec, timeoutUs: i64) -> ssize_t;
}
extern "C" {
pub fn __assert(
__file: *const ::std::os::raw::c_char,
__line: ::std::os::raw::c_int,
__msg: *const ::std::os::raw::c_char,
);
}
extern "C" {
pub fn __assert2(
__file: *const ::std::os::raw::c_char,
__line: ::std::os::raw::c_int,
__function: *const ::std::os::raw::c_char,
__msg: *const ::std::os::raw::c_char,
);
}
extern "C" {
pub fn AMediaCodec_queueInputBuffer(
arg1: *mut AMediaCodec,
idx: size_t,
offset: off_t,
size: size_t,
time: u64,
flags: u32,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_queueSecureInputBuffer(
arg1: *mut AMediaCodec,
idx: size_t,
offset: off_t,
arg2: *mut AMediaCodecCryptoInfo,
time: u64,
flags: u32,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_dequeueOutputBuffer(
arg1: *mut AMediaCodec,
info: *mut AMediaCodecBufferInfo,
timeoutUs: i64,
) -> ssize_t;
}
extern "C" {
pub fn AMediaCodec_getOutputFormat(arg1: *mut AMediaCodec) -> *mut AMediaFormat;
}
extern "C" {
pub fn AMediaCodec_releaseOutputBuffer(
arg1: *mut AMediaCodec,
idx: size_t,
render: bool,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_setOutputSurface(
arg1: *mut AMediaCodec,
surface: *mut ANativeWindow,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_releaseOutputBufferAtTime(
mData: *mut AMediaCodec,
idx: size_t,
timestampNs: i64,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_createInputSurface(
mData: *mut AMediaCodec,
surface: *mut *mut ANativeWindow,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_createPersistentInputSurface(
surface: *mut *mut ANativeWindow,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_setInputSurface(
mData: *mut AMediaCodec,
surface: *mut ANativeWindow,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_setParameters(
mData: *mut AMediaCodec,
params: *const AMediaFormat,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_signalEndOfInputStream(mData: *mut AMediaCodec) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_getBufferFormat(arg1: *mut AMediaCodec, index: size_t) -> *mut AMediaFormat;
}
extern "C" {
pub fn AMediaCodec_getName(
arg1: *mut AMediaCodec,
out_name: *mut *mut ::std::os::raw::c_char,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_releaseName(arg1: *mut AMediaCodec, name: *mut ::std::os::raw::c_char);
}
extern "C" {
pub fn AMediaCodec_setAsyncNotifyCallback(
arg1: *mut AMediaCodec,
callback: AMediaCodecOnAsyncNotifyCallback,
userdata: *mut ::std::os::raw::c_void,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_releaseCrypto(arg1: *mut AMediaCodec) -> media_status_t;
}
extern "C" {
pub fn AMediaCodec_getInputFormat(arg1: *mut AMediaCodec) -> *mut AMediaFormat;
}
extern "C" {
pub fn AMediaCodecActionCode_isRecoverable(actionCode: i32) -> bool;
}
extern "C" {
pub fn AMediaCodecActionCode_isTransient(actionCode: i32) -> bool;
}
pub const cryptoinfo_mode_t_AMEDIACODECRYPTOINFO_MODE_CLEAR: cryptoinfo_mode_t = 0;
pub const cryptoinfo_mode_t_AMEDIACODECRYPTOINFO_MODE_AES_CTR: cryptoinfo_mode_t = 1;
pub const cryptoinfo_mode_t_AMEDIACODECRYPTOINFO_MODE_AES_WV: cryptoinfo_mode_t = 2;
pub const cryptoinfo_mode_t_AMEDIACODECRYPTOINFO_MODE_AES_CBC: cryptoinfo_mode_t = 3;
pub type cryptoinfo_mode_t = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct cryptoinfo_pattern_t {
pub encryptBlocks: i32,
pub skipBlocks: i32,
}
#[test]
fn bindgen_test_layout_cryptoinfo_pattern_t() {
assert_eq!(
::std::mem::size_of::<cryptoinfo_pattern_t>(),
8usize,
concat!("Size of: ", stringify!(cryptoinfo_pattern_t))
);
assert_eq!(
::std::mem::align_of::<cryptoinfo_pattern_t>(),
4usize,
concat!("Alignment of ", stringify!(cryptoinfo_pattern_t))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<cryptoinfo_pattern_t>())).encryptBlocks as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(cryptoinfo_pattern_t),
"::",
stringify!(encryptBlocks)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<cryptoinfo_pattern_t>())).skipBlocks as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(cryptoinfo_pattern_t),
"::",
stringify!(skipBlocks)
)
);
}
extern "C" {
pub fn AMediaCodecCryptoInfo_new(
numsubsamples: ::std::os::raw::c_int,
key: *mut u8,
iv: *mut u8,
mode: cryptoinfo_mode_t,
clearbytes: *mut size_t,
encryptedbytes: *mut size_t,
) -> *mut AMediaCodecCryptoInfo;
}
extern "C" {
pub fn AMediaCodecCryptoInfo_delete(arg1: *mut AMediaCodecCryptoInfo) -> media_status_t;
}
extern "C" {
pub fn AMediaCodecCryptoInfo_setPattern(
info: *mut AMediaCodecCryptoInfo,
pattern: *mut cryptoinfo_pattern_t,
);
}
extern "C" {
pub fn AMediaCodecCryptoInfo_getNumSubSamples(arg1: *mut AMediaCodecCryptoInfo) -> size_t;
}
extern "C" {
pub fn AMediaCodecCryptoInfo_getKey(
arg1: *mut AMediaCodecCryptoInfo,
dst: *mut u8,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodecCryptoInfo_getIV(
arg1: *mut AMediaCodecCryptoInfo,
dst: *mut u8,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodecCryptoInfo_getMode(arg1: *mut AMediaCodecCryptoInfo) -> cryptoinfo_mode_t;
}
extern "C" {
pub fn AMediaCodecCryptoInfo_getClearBytes(
arg1: *mut AMediaCodecCryptoInfo,
dst: *mut size_t,
) -> media_status_t;
}
extern "C" {
pub fn AMediaCodecCryptoInfo_getEncryptedBytes(
arg1: *mut AMediaCodecCryptoInfo,
dst: *mut size_t,
) -> media_status_t;
}
extern "C" {
pub static mut AMEDIACODEC_KEY_HDR10_PLUS_INFO: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIACODEC_KEY_LOW_LATENCY: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIACODEC_KEY_OFFSET_TIME: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIACODEC_KEY_REQUEST_SYNC_FRAME: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIACODEC_KEY_SUSPEND: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIACODEC_KEY_SUSPEND_TIME: *const ::std::os::raw::c_char;
}
extern "C" {
pub static mut AMEDIACODEC_KEY_VIDEO_BITRATE: *const ::std::os::raw::c_char;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMediaDataSource {
_unused: [u8; 0],
}
pub type AMediaDataSourceReadAt = ::std::option::Option<
unsafe extern "C" fn(
userdata: *mut ::std::os::raw::c_void,
offset: off64_t,
buffer: *mut ::std::os::raw::c_void,
size: size_t,
) -> ssize_t,
>;
pub type AMediaDataSourceGetSize =
::std::option::Option<unsafe extern "C" fn(userdata: *mut ::std::os::raw::c_void) -> ssize_t>;
pub type AMediaDataSourceClose =
::std::option::Option<unsafe extern "C" fn(userdata: *mut ::std::os::raw::c_void)>;
extern "C" {
pub fn AMediaDataSource_new() -> *mut AMediaDataSource;
}
pub type AMediaDataSourceGetAvailableSize = ::std::option::Option<
unsafe extern "C" fn(userdata: *mut ::std::os::raw::c_void, offset: off64_t) -> ssize_t,
>;
extern "C" {
pub fn AMediaDataSource_newUri(
uri: *const ::std::os::raw::c_char,
numheaders: ::std::os::raw::c_int,
key_values: *const *const ::std::os::raw::c_char,
) -> *mut AMediaDataSource;
}
extern "C" {
pub fn AMediaDataSource_delete(arg1: *mut AMediaDataSource);
}
extern "C" {
pub fn AMediaDataSource_setUserdata(
arg1: *mut AMediaDataSource,
userdata: *mut ::std::os::raw::c_void,
);
}
extern "C" {
pub fn AMediaDataSource_setReadAt(arg1: *mut AMediaDataSource, arg2: AMediaDataSourceReadAt);
}
extern "C" {
pub fn AMediaDataSource_setGetSize(arg1: *mut AMediaDataSource, arg2: AMediaDataSourceGetSize);
}
extern "C" {
pub fn AMediaDataSource_setClose(arg1: *mut AMediaDataSource, arg2: AMediaDataSourceClose);
}
extern "C" {
pub fn AMediaDataSource_close(arg1: *mut AMediaDataSource);
}
extern "C" {
pub fn AMediaDataSource_setGetAvailableSize(
arg1: *mut AMediaDataSource,
arg2: AMediaDataSourceGetAvailableSize,
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMediaDrm {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMediaDrmByteArray {
pub ptr: *const u8,
pub length: size_t,
}
#[test]
fn bindgen_test_layout_AMediaDrmByteArray() {
assert_eq!(
::std::mem::size_of::<AMediaDrmByteArray>(),
8usize,
concat!("Size of: ", stringify!(AMediaDrmByteArray))
);
assert_eq!(
::std::mem::align_of::<AMediaDrmByteArray>(),
4usize,
concat!("Alignment of ", stringify!(AMediaDrmByteArray))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AMediaDrmByteArray>())).ptr as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(AMediaDrmByteArray),
"::",
stringify!(ptr)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AMediaDrmByteArray>())).length as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(AMediaDrmByteArray),
"::",
stringify!(length)
)
);
}
pub type AMediaDrmSessionId = AMediaDrmByteArray;
pub type AMediaDrmScope = AMediaDrmByteArray;
pub type AMediaDrmKeySetId = AMediaDrmByteArray;
pub type AMediaDrmSecureStop = AMediaDrmByteArray;
pub type AMediaDrmKeyId = AMediaDrmByteArray;
pub const AMediaDrmEventType_EVENT_PROVISION_REQUIRED: AMediaDrmEventType = 1;
pub const AMediaDrmEventType_EVENT_KEY_REQUIRED: AMediaDrmEventType = 2;
pub const AMediaDrmEventType_EVENT_KEY_EXPIRED: AMediaDrmEventType = 3;
pub const AMediaDrmEventType_EVENT_VENDOR_DEFINED: AMediaDrmEventType = 4;
pub const AMediaDrmEventType_EVENT_SESSION_RECLAIMED: AMediaDrmEventType = 5;
pub type AMediaDrmEventType = ::std::os::raw::c_uint;
pub const AMediaDrmKeyType_KEY_TYPE_STREAMING: AMediaDrmKeyType = 1;
pub const AMediaDrmKeyType_KEY_TYPE_OFFLINE: AMediaDrmKeyType = 2;
pub const AMediaDrmKeyType_KEY_TYPE_RELEASE: AMediaDrmKeyType = 3;
pub type AMediaDrmKeyType = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMediaDrmKeyValuePair {
pub mKey: *const ::std::os::raw::c_char,
pub mValue: *const ::std::os::raw::c_char,
}
#[test]
fn bindgen_test_layout_AMediaDrmKeyValuePair() {
assert_eq!(
::std::mem::size_of::<AMediaDrmKeyValuePair>(),
8usize,
concat!("Size of: ", stringify!(AMediaDrmKeyValuePair))
);
assert_eq!(
::std::mem::align_of::<AMediaDrmKeyValuePair>(),
4usize,
concat!("Alignment of ", stringify!(AMediaDrmKeyValuePair))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AMediaDrmKeyValuePair>())).mKey as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(AMediaDrmKeyValuePair),
"::",
stringify!(mKey)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AMediaDrmKeyValuePair>())).mValue as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(AMediaDrmKeyValuePair),
"::",
stringify!(mValue)
)
);
}
pub type AMediaDrmKeyValue = AMediaDrmKeyValuePair;
pub const AMediaKeyStatusType_KEY_STATUS_TYPE_USABLE: AMediaKeyStatusType = 0;
pub const AMediaKeyStatusType_KEY_STATUS_TYPE_EXPIRED: AMediaKeyStatusType = 1;
pub const AMediaKeyStatusType_KEY_STATUS_TYPE_OUTPUTNOTALLOWED: AMediaKeyStatusType = 2;
pub const AMediaKeyStatusType_KEY_STATUS_TYPE_STATUSPENDING: AMediaKeyStatusType = 3;
pub const AMediaKeyStatusType_KEY_STATUS_TYPE_INTERNALERROR: AMediaKeyStatusType = 4;
pub type AMediaKeyStatusType = ::std::os::raw::c_uint;
pub use self::AMediaKeyStatusType as AMediaDrmKeyStatusType;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMediaDrmKeyStatus {
pub keyId: AMediaDrmKeyId,
pub keyType: AMediaDrmKeyStatusType,
}
#[test]
fn bindgen_test_layout_AMediaDrmKeyStatus() {
assert_eq!(
::std::mem::size_of::<AMediaDrmKeyStatus>(),
12usize,
concat!("Size of: ", stringify!(AMediaDrmKeyStatus))
);
assert_eq!(
::std::mem::align_of::<AMediaDrmKeyStatus>(),
4usize,
concat!("Alignment of ", stringify!(AMediaDrmKeyStatus))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AMediaDrmKeyStatus>())).keyId as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(AMediaDrmKeyStatus),
"::",
stringify!(keyId)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<AMediaDrmKeyStatus>())).keyType as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(AMediaDrmKeyStatus),
"::",
stringify!(keyType)
)
);
}
pub type AMediaDrmEventListener = ::std::option::Option<
unsafe extern "C" fn(
arg1: *mut AMediaDrm,
sessionId: *const AMediaDrmSessionId,
eventType: AMediaDrmEventType,
extra: ::std::os::raw::c_int,
data: *const u8,
dataSize: size_t,
),
>;
pub type AMediaDrmExpirationUpdateListener = ::std::option::Option<
unsafe extern "C" fn(
arg1: *mut AMediaDrm,
sessionId: *const AMediaDrmSessionId,
expiryTimeInMS: i64,
),
>;
pub type AMediaDrmKeysChangeListener = ::std::option::Option<
unsafe extern "C" fn(
arg1: *mut AMediaDrm,
sessionId: *const AMediaDrmSessionId,
keyStatus: *const AMediaDrmKeyStatus,
numKeys: size_t,
hasNewUsableKey: bool,
),
>;
extern "C" {
pub fn AMediaDrm_isCryptoSchemeSupported(
uuid: *const u8,
mimeType: *const ::std::os::raw::c_char,
) -> bool;
}
extern "C" {
pub fn AMediaDrm_createByUUID(uuid: *const u8) -> *mut AMediaDrm;
}
extern "C" {
pub fn AMediaDrm_release(arg1: *mut AMediaDrm);
}
extern "C" {
pub fn AMediaDrm_setOnEventListener(
arg1: *mut AMediaDrm,
listener: AMediaDrmEventListener,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_setOnExpirationUpdateListener(
arg1: *mut AMediaDrm,
listener: AMediaDrmExpirationUpdateListener,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_setOnKeysChangeListener(
arg1: *mut AMediaDrm,
listener: AMediaDrmKeysChangeListener,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_openSession(
arg1: *mut AMediaDrm,
sessionId: *mut AMediaDrmSessionId,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_closeSession(
arg1: *mut AMediaDrm,
sessionId: *const AMediaDrmSessionId,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_getKeyRequest(
arg1: *mut AMediaDrm,
scope: *const AMediaDrmScope,
init: *const u8,
initSize: size_t,
mimeType: *const ::std::os::raw::c_char,
keyType: AMediaDrmKeyType,
optionalParameters: *const AMediaDrmKeyValue,
numOptionalParameters: size_t,
keyRequest: *mut *const u8,
keyRequestSize: *mut size_t,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_provideKeyResponse(
arg1: *mut AMediaDrm,
scope: *const AMediaDrmScope,
response: *const u8,
responseSize: size_t,
keySetId: *mut AMediaDrmKeySetId,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_restoreKeys(
arg1: *mut AMediaDrm,
sessionId: *const AMediaDrmSessionId,
keySetId: *const AMediaDrmKeySetId,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_removeKeys(
arg1: *mut AMediaDrm,
keySetId: *const AMediaDrmSessionId,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_queryKeyStatus(
arg1: *mut AMediaDrm,
sessionId: *const AMediaDrmSessionId,
keyValuePairs: *mut AMediaDrmKeyValue,
numPairs: *mut size_t,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_getProvisionRequest(
arg1: *mut AMediaDrm,
provisionRequest: *mut *const u8,
provisionRequestSize: *mut size_t,
serverUrl: *mut *const ::std::os::raw::c_char,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_provideProvisionResponse(
arg1: *mut AMediaDrm,
response: *const u8,
responseSize: size_t,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_getSecureStops(
arg1: *mut AMediaDrm,
secureStops: *mut AMediaDrmSecureStop,
numSecureStops: *mut size_t,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_releaseSecureStops(
arg1: *mut AMediaDrm,
ssRelease: *const AMediaDrmSecureStop,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_getPropertyString(
arg1: *mut AMediaDrm,
propertyName: *const ::std::os::raw::c_char,
propertyValue: *mut *const ::std::os::raw::c_char,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_getPropertyByteArray(
arg1: *mut AMediaDrm,
propertyName: *const ::std::os::raw::c_char,
propertyValue: *mut AMediaDrmByteArray,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_setPropertyString(
arg1: *mut AMediaDrm,
propertyName: *const ::std::os::raw::c_char,
value: *const ::std::os::raw::c_char,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_setPropertyByteArray(
arg1: *mut AMediaDrm,
propertyName: *const ::std::os::raw::c_char,
value: *const u8,
valueSize: size_t,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_encrypt(
arg1: *mut AMediaDrm,
sessionId: *const AMediaDrmSessionId,
cipherAlgorithm: *const ::std::os::raw::c_char,
keyId: *mut u8,
iv: *mut u8,
input: *const u8,
output: *mut u8,
dataSize: size_t,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_decrypt(
arg1: *mut AMediaDrm,
sessionId: *const AMediaDrmSessionId,
cipherAlgorithm: *const ::std::os::raw::c_char,
keyId: *mut u8,
iv: *mut u8,
input: *const u8,
output: *mut u8,
dataSize: size_t,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_sign(
arg1: *mut AMediaDrm,
sessionId: *const AMediaDrmSessionId,
macAlgorithm: *const ::std::os::raw::c_char,
keyId: *mut u8,
message: *mut u8,
messageSize: size_t,
signature: *mut u8,
signatureSize: *mut size_t,
) -> media_status_t;
}
extern "C" {
pub fn AMediaDrm_verify(
arg1: *mut AMediaDrm,
sessionId: *const AMediaDrmSessionId,
macAlgorithm: *const ::std::os::raw::c_char,
keyId: *mut u8,
message: *const u8,
messageSize: size_t,
signature: *const u8,
signatureSize: size_t,
) -> media_status_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMediaExtractor {
_unused: [u8; 0],
}
extern "C" {
pub fn AMediaExtractor_new() -> *mut AMediaExtractor;
}
extern "C" {
pub fn AMediaExtractor_delete(arg1: *mut AMediaExtractor) -> media_status_t;
}
extern "C" {
pub fn AMediaExtractor_setDataSourceFd(
arg1: *mut AMediaExtractor,
fd: ::std::os::raw::c_int,
offset: off64_t,
length: off64_t,
) -> media_status_t;
}
extern "C" {
pub fn AMediaExtractor_setDataSource(
arg1: *mut AMediaExtractor,
location: *const ::std::os::raw::c_char,
) -> media_status_t;
}
extern "C" {
pub fn AMediaExtractor_setDataSourceCustom(
arg1: *mut AMediaExtractor,
src: *mut AMediaDataSource,
) -> media_status_t;
}
extern "C" {
pub fn AMediaExtractor_getTrackCount(arg1: *mut AMediaExtractor) -> size_t;
}
extern "C" {
pub fn AMediaExtractor_getTrackFormat(
arg1: *mut AMediaExtractor,
idx: size_t,
) -> *mut AMediaFormat;
}
extern "C" {
pub fn AMediaExtractor_selectTrack(arg1: *mut AMediaExtractor, idx: size_t) -> media_status_t;
}
extern "C" {
pub fn AMediaExtractor_unselectTrack(arg1: *mut AMediaExtractor, idx: size_t)
-> media_status_t;
}
extern "C" {
pub fn AMediaExtractor_readSampleData(
arg1: *mut AMediaExtractor,
buffer: *mut u8,
capacity: size_t,
) -> ssize_t;
}
extern "C" {
pub fn AMediaExtractor_getSampleFlags(arg1: *mut AMediaExtractor) -> u32;
}
extern "C" {
pub fn AMediaExtractor_getSampleTrackIndex(arg1: *mut AMediaExtractor)
-> ::std::os::raw::c_int;
}
extern "C" {
pub fn AMediaExtractor_getSampleTime(arg1: *mut AMediaExtractor) -> i64;
}
extern "C" {
pub fn AMediaExtractor_advance(arg1: *mut AMediaExtractor) -> bool;
}
pub const SeekMode_AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC: SeekMode = 0;
pub const SeekMode_AMEDIAEXTRACTOR_SEEK_NEXT_SYNC: SeekMode = 1;
pub const SeekMode_AMEDIAEXTRACTOR_SEEK_CLOSEST_SYNC: SeekMode = 2;
pub type SeekMode = ::std::os::raw::c_uint;
extern "C" {
pub fn AMediaExtractor_seekTo(
arg1: *mut AMediaExtractor,
seekPosUs: i64,
mode: SeekMode,
) -> media_status_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct PsshEntry {
pub uuid: AMediaUUID,
pub datalen: size_t,
pub data: *mut ::std::os::raw::c_void,
}
#[test]
fn bindgen_test_layout_PsshEntry() {
assert_eq!(
::std::mem::size_of::<PsshEntry>(),
24usize,
concat!("Size of: ", stringify!(PsshEntry))
);
assert_eq!(
::std::mem::align_of::<PsshEntry>(),
4usize,
concat!("Alignment of ", stringify!(PsshEntry))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<PsshEntry>())).uuid as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(PsshEntry),
"::",
stringify!(uuid)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<PsshEntry>())).datalen as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(PsshEntry),
"::",
stringify!(datalen)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<PsshEntry>())).data as *const _ as usize },
20usize,
concat!(
"Offset of field: ",
stringify!(PsshEntry),
"::",
stringify!(data)
)
);
}
#[repr(C)]
#[derive(Debug)]
pub struct PsshInfo {
pub numentries: size_t,
pub entries: __IncompleteArrayField<PsshEntry>,
}
#[test]
fn bindgen_test_layout_PsshInfo() {
assert_eq!(
::std::mem::size_of::<PsshInfo>(),
4usize,
concat!("Size of: ", stringify!(PsshInfo))
);
assert_eq!(
::std::mem::align_of::<PsshInfo>(),
4usize,
concat!("Alignment of ", stringify!(PsshInfo))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<PsshInfo>())).numentries as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(PsshInfo),
"::",
stringify!(numentries)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<PsshInfo>())).entries as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(PsshInfo),
"::",
stringify!(entries)
)
);
}
extern "C" {
pub fn AMediaExtractor_getPsshInfo(arg1: *mut AMediaExtractor) -> *mut PsshInfo;
}
extern "C" {
pub fn AMediaExtractor_getSampleCryptoInfo(
arg1: *mut AMediaExtractor,
) -> *mut AMediaCodecCryptoInfo;
}
pub const AMEDIAEXTRACTOR_SAMPLE_FLAG_SYNC: ::std::os::raw::c_uint = 1;
pub const AMEDIAEXTRACTOR_SAMPLE_FLAG_ENCRYPTED: ::std::os::raw::c_uint = 2;
pub type _bindgen_ty_66 = ::std::os::raw::c_uint;
extern "C" {
pub fn AMediaExtractor_getFileFormat(arg1: *mut AMediaExtractor) -> *mut AMediaFormat;
}
extern "C" {
pub fn AMediaExtractor_getSampleSize(arg1: *mut AMediaExtractor) -> ssize_t;
}
extern "C" {
pub fn AMediaExtractor_getCachedDuration(arg1: *mut AMediaExtractor) -> i64;
}
extern "C" {
pub fn AMediaExtractor_getSampleFormat(
ex: *mut AMediaExtractor,
fmt: *mut AMediaFormat,
) -> media_status_t;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct AMediaMuxer {
_unused: [u8; 0],
}
pub const OutputFormat_AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4: OutputFormat = 0;
pub const OutputFormat_AMEDIAMUXER_OUTPUT_FORMAT_WEBM: OutputFormat = 1;
pub const OutputFormat_AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP: OutputFormat = 2;
pub type OutputFormat = ::std::os::raw::c_uint;
pub const AppendMode_AMEDIAMUXER_APPEND_IGNORE_LAST_VIDEO_GOP: AppendMode = 0;
pub const AppendMode_AMEDIAMUXER_APPEND_TO_EXISTING_DATA: AppendMode = 1;
pub type AppendMode = ::std::os::raw::c_uint;
extern "C" {
pub fn AMediaMuxer_new(fd: ::std::os::raw::c_int, format: OutputFormat) -> *mut AMediaMuxer;
}
extern "C" {
pub fn AMediaMuxer_delete(arg1: *mut AMediaMuxer) -> media_status_t;
}
extern "C" {
pub fn AMediaMuxer_setLocation(
arg1: *mut AMediaMuxer,
latitude: f32,
longitude: f32,
) -> media_status_t;
}
extern "C" {
pub fn AMediaMuxer_setOrientationHint(
arg1: *mut AMediaMuxer,
degrees: ::std::os::raw::c_int,
) -> media_status_t;
}
extern "C" {
pub fn AMediaMuxer_addTrack(arg1: *mut AMediaMuxer, format: *const AMediaFormat) -> ssize_t;
}
extern "C" {
pub fn AMediaMuxer_start(arg1: *mut AMediaMuxer) -> media_status_t;
}
extern "C" {
pub fn AMediaMuxer_stop(arg1: *mut AMediaMuxer) -> media_status_t;
}
extern "C" {
pub fn AMediaMuxer_writeSampleData(
muxer: *mut AMediaMuxer,
trackIdx: size_t,
data: *const u8,
info: *const AMediaCodecBufferInfo,
) -> media_status_t;
}
extern "C" {
pub fn AMediaMuxer_append(fd: ::std::os::raw::c_int, mode: AppendMode) -> *mut AMediaMuxer;
}
extern "C" {
pub fn AMediaMuxer_getTrackCount(arg1: *mut AMediaMuxer) -> ssize_t;
}
extern "C" {
pub fn AMediaMuxer_getTrackFormat(muxer: *mut AMediaMuxer, idx: size_t) -> *mut AMediaFormat;
}
pub type __builtin_va_list = __va_list;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __va_list {
pub __ap: *mut ::std::os::raw::c_void,
}
#[test]
fn bindgen_test_layout___va_list() {
assert_eq!(
::std::mem::size_of::<__va_list>(),
4usize,
concat!("Size of: ", stringify!(__va_list))
);
assert_eq!(
::std::mem::align_of::<__va_list>(),
4usize,
concat!("Alignment of ", stringify!(__va_list))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__va_list>())).__ap as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__va_list),
"::",
stringify!(__ap)
)
);
}
| {
assert_eq!(
::std::mem::size_of::<siginfo>(),
128usize,
concat!("Size of: ", stringify!(siginfo))
);
assert_eq!(
::std::mem::align_of::<siginfo>(),
4usize,
concat!("Alignment of ", stringify!(siginfo))
);
} |
conf.py | # -*- coding: utf-8 -*-
#
# relate documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 26 18:41:17 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
os.environ["DJANGO_SETTINGS_MODULE"] = "relate.settings"
import django
django.setup()
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'RELATE'
copyright = u'2014, Andreas Kloeckner'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2015.1'
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
try:
import sphinx_bootstrap_theme
except:
from warnings import warn
warn("I would like to use the sphinx bootstrap theme, but can't find it.\n"
"'pip install sphinx_bootstrap_theme' to fix.")
else:
# Activate the theme.
html_theme = 'bootstrap'
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"navbar_fixed_top": "true",
"navbar_site_name": "Contents",
'bootstrap_version': '3',
'source_link_position': 'footer',
}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'relatedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'relate.tex', u'RELATE Documentation',
u'Andreas Kloeckner', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'relate', u'RELATE Documentation',
[u'Andreas Kloeckner'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [ |
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False | ('index', 'relate', u'RELATE Documentation',
u'Andreas Kloeckner', 'relate', 'One line description of project.',
'Miscellaneous'),
] |
array_reader.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use std::any::Any;
use std::cmp::{max, min};
use std::collections::{HashMap, HashSet};
use std::marker::PhantomData;
use std::mem::size_of;
use std::result::Result::Ok;
use std::sync::Arc;
use std::vec::Vec;
use parquet_format::TimeUnit;
use arrow::array::{
new_empty_array, Array, ArrayData, ArrayDataBuilder, ArrayRef, BinaryArray,
BinaryBuilder, BooleanArray, BooleanBufferBuilder, BooleanBuilder, DecimalBuilder,
FixedSizeBinaryArray, FixedSizeBinaryBuilder, GenericListArray, Int16BufferBuilder,
Int32Array, Int64Array, MapArray, OffsetSizeTrait, PrimitiveArray, PrimitiveBuilder,
StringArray, StringBuilder, StructArray,
};
use arrow::buffer::{Buffer, MutableBuffer};
use arrow::datatypes::{
ArrowPrimitiveType, BooleanType as ArrowBooleanType, DataType as ArrowType,
Date32Type as ArrowDate32Type, Date64Type as ArrowDate64Type,
DurationMicrosecondType as ArrowDurationMicrosecondType,
DurationMillisecondType as ArrowDurationMillisecondType,
DurationNanosecondType as ArrowDurationNanosecondType,
DurationSecondType as ArrowDurationSecondType, Field,
Float32Type as ArrowFloat32Type, Float64Type as ArrowFloat64Type,
Int16Type as ArrowInt16Type, Int32Type as ArrowInt32Type,
Int64Type as ArrowInt64Type, Int8Type as ArrowInt8Type, IntervalUnit, Schema,
SchemaRef, Time32MillisecondType as ArrowTime32MillisecondType,
Time32SecondType as ArrowTime32SecondType,
Time64MicrosecondType as ArrowTime64MicrosecondType,
Time64NanosecondType as ArrowTime64NanosecondType, TimeUnit as ArrowTimeUnit,
TimestampMicrosecondType as ArrowTimestampMicrosecondType,
TimestampMillisecondType as ArrowTimestampMillisecondType,
TimestampNanosecondType as ArrowTimestampNanosecondType,
TimestampSecondType as ArrowTimestampSecondType, ToByteSlice,
TimeUnit::{Microsecond, Nanosecond, Millisecond},
UInt16Type as ArrowUInt16Type, UInt32Type as ArrowUInt32Type,
UInt64Type as ArrowUInt64Type, UInt8Type as ArrowUInt8Type,
};
use arrow::util::bit_util;
use crate::arrow::converter::{
Converter, DecimalArrayConverter, DecimalConverter, FixedLenBinaryConverter,
FixedSizeArrayConverter, Int96ArrayConverter, Int96Converter,
IntervalDayTimeArrayConverter, IntervalDayTimeConverter,
IntervalYearMonthArrayConverter, IntervalYearMonthConverter,
};
use crate::arrow::record_reader::buffer::{ScalarValue, ValuesBuffer};
use crate::arrow::record_reader::{GenericRecordReader, RecordReader};
use crate::arrow::schema::parquet_to_arrow_field;
use crate::basic::{ConvertedType, Repetition, Type as PhysicalType, LogicalType};
use crate::column::page::PageIterator;
use crate::column::reader::decoder::ColumnValueDecoder;
use crate::column::reader::ColumnReaderImpl;
use crate::data_type::{
BoolType, DataType, DoubleType, FixedLenByteArrayType, FloatType, Int32Type,
Int64Type, Int96Type,
};
use crate::errors::{ParquetError, ParquetError::ArrowError, Result};
use crate::file::reader::{FilePageIterator, FileReader};
use crate::schema::types::{
ColumnDescPtr, ColumnDescriptor, ColumnPath, SchemaDescPtr, Type, TypePtr,
};
use crate::schema::visitor::TypeVisitor;
mod byte_array;
mod byte_array_dictionary;
mod dictionary_buffer;
mod offset_buffer;
#[cfg(test)]
mod test_util;
pub use byte_array::make_byte_array_reader;
pub use byte_array_dictionary::make_byte_array_dictionary_reader;
/// Array reader reads parquet data into arrow array.
pub trait ArrayReader: Send {
fn as_any(&self) -> &dyn Any;
/// Returns the arrow type of this array reader.
fn get_data_type(&self) -> &ArrowType;
/// Reads at most `batch_size` records into an arrow array and return it.
fn next_batch(&mut self, batch_size: usize) -> Result<ArrayRef>;
/// If this array has a non-zero definition level, i.e. has a nullable parent
/// array, returns the definition levels of data from the last call of `next_batch`
///
/// Otherwise returns None
///
/// This is used by parent [`ArrayReader`] to compute their null bitmaps
fn get_def_levels(&self) -> Option<&[i16]>;
/// If this array has a non-zero repetition level, i.e. has a repeated parent
/// array, returns the repetition levels of data from the last call of `next_batch`
///
/// Otherwise returns None
///
/// This is used by parent [`ArrayReader`] to compute their array offsets
fn get_rep_levels(&self) -> Option<&[i16]>;
}
/// A collection of row groups
pub trait RowGroupCollection {
/// Get schema of parquet file.
fn schema(&self) -> Result<SchemaDescPtr>;
/// Returns an iterator over the column chunks for particular column
fn column_chunks(&self, i: usize) -> Result<Box<dyn PageIterator>>;
}
impl RowGroupCollection for Arc<dyn FileReader> {
fn schema(&self) -> Result<SchemaDescPtr> {
Ok(self.metadata().file_metadata().schema_descr_ptr())
}
fn column_chunks(&self, column_index: usize) -> Result<Box<dyn PageIterator>> {
let iterator = FilePageIterator::new(column_index, Arc::clone(self))?;
Ok(Box::new(iterator))
}
}
/// Uses `record_reader` to read up to `batch_size` records from `pages`
///
/// Returns the number of records read, which can be less than batch_size if
/// pages is exhausted.
fn read_records<V, CV>(
record_reader: &mut GenericRecordReader<V, CV>,
pages: &mut dyn PageIterator,
batch_size: usize,
) -> Result<usize>
where
V: ValuesBuffer + Default,
CV: ColumnValueDecoder<Slice = V::Slice>,
{
let mut records_read = 0usize;
while records_read < batch_size {
let records_to_read = batch_size - records_read;
let records_read_once = record_reader.read_records(records_to_read)?;
records_read += records_read_once;
// Record reader exhausted
if records_read_once < records_to_read {
if let Some(page_reader) = pages.next() {
// Read from new page reader (i.e. column chunk)
record_reader.set_page_reader(page_reader?)?;
} else {
// Page reader also exhausted
break;
}
}
}
Ok(records_read)
}
/// A NullArrayReader reads Parquet columns stored as null int32s with an Arrow
/// NullArray type.
pub struct NullArrayReader<T>
where
T: DataType,
T::T: ScalarValue,
{
data_type: ArrowType,
pages: Box<dyn PageIterator>,
def_levels_buffer: Option<Buffer>,
rep_levels_buffer: Option<Buffer>,
column_desc: ColumnDescPtr,
record_reader: RecordReader<T>,
_type_marker: PhantomData<T>,
}
impl<T> NullArrayReader<T>
where
T: DataType,
T::T: ScalarValue,
{
/// Construct null array reader.
pub fn new(pages: Box<dyn PageIterator>, column_desc: ColumnDescPtr) -> Result<Self> {
let record_reader = RecordReader::<T>::new(column_desc.clone());
Ok(Self {
data_type: ArrowType::Null,
pages,
def_levels_buffer: None,
rep_levels_buffer: None,
column_desc,
record_reader,
_type_marker: PhantomData,
})
}
}
/// Implementation of primitive array reader.
impl<T> ArrayReader for NullArrayReader<T>
where
T: DataType,
T::T: ScalarValue,
{
fn as_any(&self) -> &dyn Any {
self
}
/// Returns data type of primitive array.
fn get_data_type(&self) -> &ArrowType {
&self.data_type
}
/// Reads at most `batch_size` records into array.
fn next_batch(&mut self, batch_size: usize) -> Result<ArrayRef> {
let records_read =
read_records(&mut self.record_reader, self.pages.as_mut(), batch_size)?;
// convert to arrays
let array = arrow::array::NullArray::new(records_read);
// save definition and repetition buffers
self.def_levels_buffer = self.record_reader.consume_def_levels()?;
self.rep_levels_buffer = self.record_reader.consume_rep_levels()?;
// Must consume bitmap buffer
self.record_reader.consume_bitmap_buffer()?;
self.record_reader.reset();
Ok(Arc::new(array))
}
fn get_def_levels(&self) -> Option<&[i16]> {
self.def_levels_buffer
.as_ref()
.map(|buf| unsafe { buf.typed_data() })
}
fn get_rep_levels(&self) -> Option<&[i16]> {
self.rep_levels_buffer
.as_ref()
.map(|buf| unsafe { buf.typed_data() })
}
}
/// Primitive array readers are leaves of array reader tree. They accept page iterator
/// and read them into primitive arrays.
pub struct PrimitiveArrayReader<T>
where
T: DataType,
T::T: ScalarValue,
{
data_type: ArrowType,
pages: Box<dyn PageIterator>,
def_levels_buffer: Option<Buffer>,
rep_levels_buffer: Option<Buffer>,
column_desc: ColumnDescPtr,
record_reader: RecordReader<T>,
}
impl<T> PrimitiveArrayReader<T>
where
T: DataType,
T::T: ScalarValue,
{
/// Construct primitive array reader.
pub fn new(
pages: Box<dyn PageIterator>,
column_desc: ColumnDescPtr,
arrow_type: Option<ArrowType>,
) -> Result<Self> {
Self::new_with_options(pages, column_desc, arrow_type, false)
}
/// Construct primitive array reader with ability to only compute null mask and not
/// buffer level data
pub fn new_with_options(
pages: Box<dyn PageIterator>,
column_desc: ColumnDescPtr,
arrow_type: Option<ArrowType>,
null_mask_only: bool,
) -> Result<Self> {
// Check if Arrow type is specified, else create it from Parquet type
let data_type = match arrow_type {
Some(t) => t,
None => parquet_to_arrow_field(column_desc.as_ref())?
.data_type()
.clone(),
};
let record_reader =
RecordReader::<T>::new_with_options(column_desc.clone(), null_mask_only);
Ok(Self {
data_type,
pages, | })
}
}
/// Implementation of primitive array reader.
impl<T> ArrayReader for PrimitiveArrayReader<T>
where
T: DataType,
T::T: ScalarValue,
{
fn as_any(&self) -> &dyn Any {
self
}
/// Returns data type of primitive array.
fn get_data_type(&self) -> &ArrowType {
&self.data_type
}
/// Reads at most `batch_size` records into array.
fn next_batch(&mut self, batch_size: usize) -> Result<ArrayRef> {
read_records(&mut self.record_reader, self.pages.as_mut(), batch_size)?;
let target_type = self.get_data_type().clone();
let arrow_data_type = match T::get_physical_type() {
PhysicalType::BOOLEAN => ArrowBooleanType::DATA_TYPE,
PhysicalType::INT32 => {
match target_type {
ArrowType::UInt32 => {
// follow C++ implementation and use overflow/reinterpret cast from i32 to u32 which will map
// `i32::MIN..0` to `(i32::MAX as u32)..u32::MAX`
ArrowUInt32Type::DATA_TYPE
}
_ => ArrowInt32Type::DATA_TYPE,
}
}
PhysicalType::INT64 => {
match target_type {
ArrowType::UInt64 => {
// follow C++ implementation and use overflow/reinterpret cast from i64 to u64 which will map
// `i64::MIN..0` to `(i64::MAX as u64)..u64::MAX`
ArrowUInt64Type::DATA_TYPE
}
_ => ArrowInt64Type::DATA_TYPE,
}
}
PhysicalType::FLOAT => ArrowFloat32Type::DATA_TYPE,
PhysicalType::DOUBLE => ArrowFloat64Type::DATA_TYPE,
PhysicalType::INT96
| PhysicalType::BYTE_ARRAY
| PhysicalType::FIXED_LEN_BYTE_ARRAY => {
unreachable!(
"PrimitiveArrayReaders don't support complex physical types"
);
}
};
// Convert to arrays by using the Parquet physical type.
// The physical types are then cast to Arrow types if necessary
let mut record_data = self.record_reader.consume_record_data()?;
if T::get_physical_type() == PhysicalType::BOOLEAN {
let mut boolean_buffer = BooleanBufferBuilder::new(record_data.len());
for e in record_data.as_slice() {
boolean_buffer.append(*e > 0);
}
record_data = boolean_buffer.finish();
}
let mut array_data = ArrayDataBuilder::new(arrow_data_type)
.len(self.record_reader.num_values())
.add_buffer(record_data);
if let Some(b) = self.record_reader.consume_bitmap_buffer()? {
array_data = array_data.null_bit_buffer(b);
}
let array_data = unsafe { array_data.build_unchecked() };
let array = match T::get_physical_type() {
PhysicalType::BOOLEAN => Arc::new(BooleanArray::from(array_data)) as ArrayRef,
PhysicalType::INT32 => {
Arc::new(PrimitiveArray::<ArrowInt32Type>::from(array_data)) as ArrayRef
}
PhysicalType::INT64 => {
Arc::new(PrimitiveArray::<ArrowInt64Type>::from(array_data)) as ArrayRef
}
PhysicalType::FLOAT => {
Arc::new(PrimitiveArray::<ArrowFloat32Type>::from(array_data)) as ArrayRef
}
PhysicalType::DOUBLE => {
Arc::new(PrimitiveArray::<ArrowFloat64Type>::from(array_data)) as ArrayRef
}
PhysicalType::INT96
| PhysicalType::BYTE_ARRAY
| PhysicalType::FIXED_LEN_BYTE_ARRAY => {
unreachable!(
"PrimitiveArrayReaders don't support complex physical types"
);
}
};
// cast to Arrow type
// We make a strong assumption here that the casts should be infallible.
// If the cast fails because of incompatible datatypes, then there might
// be a bigger problem with how Arrow schemas are converted to Parquet.
//
// As there is not always a 1:1 mapping between Arrow and Parquet, there
// are datatypes which we must convert explicitly.
// These are:
// - date64: we should cast int32 to date32, then date32 to date64.
let array = match target_type {
ArrowType::Date64 => {
// this is cheap as it internally reinterprets the data
let a = arrow::compute::cast(&array, &ArrowType::Date32)?;
arrow::compute::cast(&a, &target_type)?
}
ArrowType::Decimal(p, s) => {
let mut builder = DecimalBuilder::new(array.len(), p, s);
match array.data_type() {
ArrowType::Int32 => {
let values = array.as_any().downcast_ref::<Int32Array>().unwrap();
for maybe_value in values.iter() {
match maybe_value {
Some(value) => builder.append_value(value as i128)?,
None => builder.append_null()?,
}
}
}
ArrowType::Int64 => {
let values = array.as_any().downcast_ref::<Int64Array>().unwrap();
for maybe_value in values.iter() {
match maybe_value {
Some(value) => builder.append_value(value as i128)?,
None => builder.append_null()?,
}
}
}
_ => {
return Err(ArrowError(format!(
"Cannot convert {:?} to decimal",
array.data_type()
)))
}
}
Arc::new(builder.finish()) as ArrayRef
}
ArrowType::Timestamp(ArrowTimeUnit::Nanosecond, ref tz) => {
if let Some(LogicalType::TIMESTAMP(t)) = self.column_desc.logical_type() {
match t.unit {
TimeUnit::MICROS(_) => {
let a = arrow::compute::cast(&array, &ArrowType::Timestamp(Microsecond, tz.clone()))?;
arrow::compute::cast(&a, &ArrowType::Timestamp(Nanosecond, tz.clone()))?
}
TimeUnit::MILLIS(_) => {
let a = arrow::compute::cast(&array, &ArrowType::Timestamp(Millisecond, tz.clone()))?;
arrow::compute::cast(&a, &ArrowType::Timestamp(Nanosecond, tz.clone()))?
}
_ => arrow::compute::cast(&array, &target_type.clone())?
}
}
else {
arrow::compute::cast(&array, &target_type.clone())?
}
}
_ => arrow::compute::cast(&array, &target_type)?,
};
// save definition and repetition buffers
self.def_levels_buffer = self.record_reader.consume_def_levels()?;
self.rep_levels_buffer = self.record_reader.consume_rep_levels()?;
self.record_reader.reset();
Ok(array)
}
fn get_def_levels(&self) -> Option<&[i16]> {
self.def_levels_buffer
.as_ref()
.map(|buf| unsafe { buf.typed_data() })
}
fn get_rep_levels(&self) -> Option<&[i16]> {
self.rep_levels_buffer
.as_ref()
.map(|buf| unsafe { buf.typed_data() })
}
}
/// Primitive array readers are leaves of array reader tree. They accept page iterator
/// and read them into primitive arrays.
pub struct ComplexObjectArrayReader<T, C>
where
T: DataType,
C: Converter<Vec<Option<T::T>>, ArrayRef> + 'static,
{
data_type: ArrowType,
pages: Box<dyn PageIterator>,
def_levels_buffer: Option<Vec<i16>>,
rep_levels_buffer: Option<Vec<i16>>,
column_desc: ColumnDescPtr,
column_reader: Option<ColumnReaderImpl<T>>,
converter: C,
_parquet_type_marker: PhantomData<T>,
_converter_marker: PhantomData<C>,
}
impl<T, C> ArrayReader for ComplexObjectArrayReader<T, C>
where
T: DataType,
C: Converter<Vec<Option<T::T>>, ArrayRef> + Send + 'static,
{
fn as_any(&self) -> &dyn Any {
self
}
fn get_data_type(&self) -> &ArrowType {
&self.data_type
}
fn next_batch(&mut self, batch_size: usize) -> Result<ArrayRef> {
// Try to initialize column reader
if self.column_reader.is_none() {
self.next_column_reader()?;
}
let mut data_buffer: Vec<T::T> = Vec::with_capacity(batch_size);
data_buffer.resize_with(batch_size, T::T::default);
let mut def_levels_buffer = if self.column_desc.max_def_level() > 0 {
let mut buf: Vec<i16> = Vec::with_capacity(batch_size);
buf.resize_with(batch_size, || 0);
Some(buf)
} else {
None
};
let mut rep_levels_buffer = if self.column_desc.max_rep_level() > 0 {
let mut buf: Vec<i16> = Vec::with_capacity(batch_size);
buf.resize_with(batch_size, || 0);
Some(buf)
} else {
None
};
let mut num_read = 0;
while self.column_reader.is_some() && num_read < batch_size {
let num_to_read = batch_size - num_read;
let cur_data_buf = &mut data_buffer[num_read..];
let cur_def_levels_buf =
def_levels_buffer.as_mut().map(|b| &mut b[num_read..]);
let cur_rep_levels_buf =
rep_levels_buffer.as_mut().map(|b| &mut b[num_read..]);
let (data_read, levels_read) =
self.column_reader.as_mut().unwrap().read_batch(
num_to_read,
cur_def_levels_buf,
cur_rep_levels_buf,
cur_data_buf,
)?;
// Fill space
if levels_read > data_read {
def_levels_buffer.iter().for_each(|def_levels_buffer| {
let (mut level_pos, mut data_pos) = (levels_read, data_read);
while level_pos > 0 && data_pos > 0 {
if def_levels_buffer[num_read + level_pos - 1]
== self.column_desc.max_def_level()
{
cur_data_buf.swap(level_pos - 1, data_pos - 1);
level_pos -= 1;
data_pos -= 1;
} else {
level_pos -= 1;
}
}
});
}
let values_read = max(levels_read, data_read);
num_read += values_read;
// current page exhausted && page iterator exhausted
if values_read < num_to_read && !self.next_column_reader()? {
break;
}
}
data_buffer.truncate(num_read);
def_levels_buffer
.iter_mut()
.for_each(|buf| buf.truncate(num_read));
rep_levels_buffer
.iter_mut()
.for_each(|buf| buf.truncate(num_read));
self.def_levels_buffer = def_levels_buffer;
self.rep_levels_buffer = rep_levels_buffer;
let data: Vec<Option<T::T>> = if self.def_levels_buffer.is_some() {
data_buffer
.into_iter()
.zip(self.def_levels_buffer.as_ref().unwrap().iter())
.map(|(t, def_level)| {
if *def_level == self.column_desc.max_def_level() {
Some(t)
} else {
None
}
})
.collect()
} else {
data_buffer.into_iter().map(Some).collect()
};
let mut array = self.converter.convert(data)?;
if let ArrowType::Dictionary(_, _) = self.data_type {
array = arrow::compute::cast(&array, &self.data_type)?;
}
Ok(array)
}
fn get_def_levels(&self) -> Option<&[i16]> {
self.def_levels_buffer.as_deref()
}
fn get_rep_levels(&self) -> Option<&[i16]> {
self.rep_levels_buffer.as_deref()
}
}
impl<T, C> ComplexObjectArrayReader<T, C>
where
T: DataType,
C: Converter<Vec<Option<T::T>>, ArrayRef> + 'static,
{
pub fn new(
pages: Box<dyn PageIterator>,
column_desc: ColumnDescPtr,
converter: C,
arrow_type: Option<ArrowType>,
) -> Result<Self> {
let data_type = match arrow_type {
Some(t) => t,
None => parquet_to_arrow_field(column_desc.as_ref())?
.data_type()
.clone(),
};
Ok(Self {
data_type,
pages,
def_levels_buffer: None,
rep_levels_buffer: None,
column_desc,
column_reader: None,
converter,
_parquet_type_marker: PhantomData,
_converter_marker: PhantomData,
})
}
fn next_column_reader(&mut self) -> Result<bool> {
Ok(match self.pages.next() {
Some(page) => {
self.column_reader =
Some(ColumnReaderImpl::<T>::new(self.column_desc.clone(), page?));
true
}
None => false,
})
}
}
/// Implementation of list array reader.
pub struct ListArrayReader<OffsetSize: OffsetSizeTrait> {
item_reader: Box<dyn ArrayReader>,
data_type: ArrowType,
item_type: ArrowType,
list_def_level: i16,
list_rep_level: i16,
list_empty_def_level: i16,
list_null_def_level: i16,
def_level_buffer: Option<Buffer>,
rep_level_buffer: Option<Buffer>,
_marker: PhantomData<OffsetSize>,
}
impl<OffsetSize: OffsetSizeTrait> ListArrayReader<OffsetSize> {
/// Construct list array reader.
pub fn new(
item_reader: Box<dyn ArrayReader>,
data_type: ArrowType,
item_type: ArrowType,
def_level: i16,
rep_level: i16,
list_null_def_level: i16,
list_empty_def_level: i16,
) -> Self {
Self {
item_reader,
data_type,
item_type,
list_def_level: def_level,
list_rep_level: rep_level,
list_null_def_level,
list_empty_def_level,
def_level_buffer: None,
rep_level_buffer: None,
_marker: PhantomData,
}
}
}
macro_rules! remove_primitive_array_indices {
($arr: expr, $item_type:ty, $indices:expr) => {{
let array_data = match $arr.as_any().downcast_ref::<PrimitiveArray<$item_type>>() {
Some(a) => a,
_ => return Err(ParquetError::General(format!("Error generating next batch for ListArray: {:?} cannot be downcast to PrimitiveArray", $arr))),
};
let mut builder = PrimitiveBuilder::<$item_type>::new($arr.len());
for i in 0..array_data.len() {
if !$indices.contains(&i) {
if array_data.is_null(i) {
builder.append_null()?;
} else {
builder.append_value(array_data.value(i))?;
}
}
}
Ok(Arc::new(builder.finish()))
}};
}
macro_rules! remove_array_indices_custom_builder {
($arr: expr, $array_type:ty, $item_builder:ident, $indices:expr) => {{
let array_data = match $arr.as_any().downcast_ref::<$array_type>() {
Some(a) => a,
_ => return Err(ParquetError::General(format!("Error generating next batch for ListArray: {:?} cannot be downcast to PrimitiveArray", $arr))),
};
let mut builder = $item_builder::new(array_data.len());
for i in 0..array_data.len() {
if !$indices.contains(&i) {
if array_data.is_null(i) {
builder.append_null()?;
} else {
builder.append_value(array_data.value(i))?;
}
}
}
Ok(Arc::new(builder.finish()))
}};
}
macro_rules! remove_fixed_size_binary_array_indices {
($arr: expr, $array_type:ty, $item_builder:ident, $indices:expr, $len:expr) => {{
let array_data = match $arr.as_any().downcast_ref::<$array_type>() {
Some(a) => a,
_ => return Err(ParquetError::General(format!("Error generating next batch for ListArray: {:?} cannot be downcast to PrimitiveArray", $arr))),
};
let mut builder = FixedSizeBinaryBuilder::new(array_data.len(), $len);
for i in 0..array_data.len() {
if !$indices.contains(&i) {
if array_data.is_null(i) {
builder.append_null()?;
} else {
builder.append_value(array_data.value(i))?;
}
}
}
Ok(Arc::new(builder.finish()))
}};
}
fn remove_indices(
arr: ArrayRef,
item_type: ArrowType,
indices: Vec<usize>,
) -> Result<ArrayRef> {
match item_type {
ArrowType::UInt8 => remove_primitive_array_indices!(arr, ArrowUInt8Type, indices),
ArrowType::UInt16 => {
remove_primitive_array_indices!(arr, ArrowUInt16Type, indices)
}
ArrowType::UInt32 => {
remove_primitive_array_indices!(arr, ArrowUInt32Type, indices)
}
ArrowType::UInt64 => {
remove_primitive_array_indices!(arr, ArrowUInt64Type, indices)
}
ArrowType::Int8 => remove_primitive_array_indices!(arr, ArrowInt8Type, indices),
ArrowType::Int16 => remove_primitive_array_indices!(arr, ArrowInt16Type, indices),
ArrowType::Int32 => remove_primitive_array_indices!(arr, ArrowInt32Type, indices),
ArrowType::Int64 => remove_primitive_array_indices!(arr, ArrowInt64Type, indices),
ArrowType::Float32 => {
remove_primitive_array_indices!(arr, ArrowFloat32Type, indices)
}
ArrowType::Float64 => {
remove_primitive_array_indices!(arr, ArrowFloat64Type, indices)
}
ArrowType::Boolean => {
remove_array_indices_custom_builder!(
arr,
BooleanArray,
BooleanBuilder,
indices
)
}
ArrowType::Date32 => {
remove_primitive_array_indices!(arr, ArrowDate32Type, indices)
}
ArrowType::Date64 => {
remove_primitive_array_indices!(arr, ArrowDate64Type, indices)
}
ArrowType::Time32(ArrowTimeUnit::Second) => {
remove_primitive_array_indices!(arr, ArrowTime32SecondType, indices)
}
ArrowType::Time32(ArrowTimeUnit::Millisecond) => {
remove_primitive_array_indices!(arr, ArrowTime32MillisecondType, indices)
}
ArrowType::Time64(ArrowTimeUnit::Microsecond) => {
remove_primitive_array_indices!(arr, ArrowTime64MicrosecondType, indices)
}
ArrowType::Time64(ArrowTimeUnit::Nanosecond) => {
remove_primitive_array_indices!(arr, ArrowTime64NanosecondType, indices)
}
ArrowType::Duration(ArrowTimeUnit::Second) => {
remove_primitive_array_indices!(arr, ArrowDurationSecondType, indices)
}
ArrowType::Duration(ArrowTimeUnit::Millisecond) => {
remove_primitive_array_indices!(arr, ArrowDurationMillisecondType, indices)
}
ArrowType::Duration(ArrowTimeUnit::Microsecond) => {
remove_primitive_array_indices!(arr, ArrowDurationMicrosecondType, indices)
}
ArrowType::Duration(ArrowTimeUnit::Nanosecond) => {
remove_primitive_array_indices!(arr, ArrowDurationNanosecondType, indices)
}
ArrowType::Timestamp(ArrowTimeUnit::Second, _) => {
remove_primitive_array_indices!(arr, ArrowTimestampSecondType, indices)
}
ArrowType::Timestamp(ArrowTimeUnit::Millisecond, _) => {
remove_primitive_array_indices!(arr, ArrowTimestampMillisecondType, indices)
}
ArrowType::Timestamp(ArrowTimeUnit::Microsecond, _) => {
remove_primitive_array_indices!(arr, ArrowTimestampMicrosecondType, indices)
}
ArrowType::Timestamp(ArrowTimeUnit::Nanosecond, _) => {
remove_primitive_array_indices!(arr, ArrowTimestampNanosecondType, indices)
}
ArrowType::Utf8 => {
remove_array_indices_custom_builder!(arr, StringArray, StringBuilder, indices)
}
ArrowType::Binary => {
remove_array_indices_custom_builder!(arr, BinaryArray, BinaryBuilder, indices)
}
ArrowType::FixedSizeBinary(size) => remove_fixed_size_binary_array_indices!(
arr,
FixedSizeBinaryArray,
FixedSizeBinaryBuilder,
indices,
size
),
ArrowType::Struct(fields) => {
let struct_array = arr
.as_any()
.downcast_ref::<StructArray>()
.expect("Array should be a struct");
// Recursively call remove indices on each of the structs fields
let new_columns = fields
.into_iter()
.zip(struct_array.columns())
.map(|(field, column)| {
let dt = field.data_type().clone();
Ok((field, remove_indices(column.clone(), dt, indices.clone())?))
})
.collect::<Result<Vec<_>>>()?;
if arr.data().null_count() == 0 {
// No nulls, nothing to do.
Ok(Arc::new(StructArray::from(new_columns)))
} else {
// Construct a new validity buffer by removing `indices` from the original validity
// map.
let mut valid = BooleanBufferBuilder::new(arr.len() - indices.len());
for idx in 0..arr.len() {
if !indices.contains(&idx) {
valid.append(!arr.is_null(idx));
}
}
Ok(Arc::new(StructArray::from((new_columns, valid.finish()))))
}
}
_ => Err(ParquetError::General(format!(
"ListArray of type List({:?}) is not supported by array_reader",
item_type
))),
}
}
/// Implementation of ListArrayReader. Nested lists and lists of structs are not yet supported.
impl<OffsetSize: OffsetSizeTrait> ArrayReader for ListArrayReader<OffsetSize> {
fn as_any(&self) -> &dyn Any {
self
}
/// Returns data type.
/// This must be a List.
fn get_data_type(&self) -> &ArrowType {
&self.data_type
}
fn next_batch(&mut self, batch_size: usize) -> Result<ArrayRef> {
let next_batch_array = self.item_reader.next_batch(batch_size)?;
let item_type = self.item_reader.get_data_type().clone();
if next_batch_array.len() == 0 {
return Ok(new_empty_array(&self.data_type));
}
let def_levels = self
.item_reader
.get_def_levels()
.ok_or_else(|| ArrowError("item_reader def levels are None.".to_string()))?;
let rep_levels = self
.item_reader
.get_rep_levels()
.ok_or_else(|| ArrowError("item_reader rep levels are None.".to_string()))?;
if !((def_levels.len() == rep_levels.len())
&& (rep_levels.len() == next_batch_array.len()))
{
return Err(ArrowError(
"Expected item_reader def_levels and rep_levels to be same length as batch".to_string(),
));
}
// List definitions can be encoded as 4 values:
// - n + 0: the list slot is null
// - n + 1: the list slot is not null, but is empty (i.e. [])
// - n + 2: the list slot is not null, but its child is empty (i.e. [ null ])
// - n + 3: the list slot is not null, and its child is not empty
// Where n is the max definition level of the list's parent.
// If a Parquet schema's only leaf is the list, then n = 0.
// If the list index is at empty definition, the child slot is null
let null_list_indices: Vec<usize> = def_levels
.iter()
.enumerate()
.filter_map(|(index, def)| {
if *def <= self.list_empty_def_level {
Some(index)
} else {
None
}
})
.collect();
let batch_values = match null_list_indices.len() {
0 => next_batch_array.clone(),
_ => remove_indices(next_batch_array.clone(), item_type, null_list_indices)?,
};
// first item in each list has rep_level = 0, subsequent items have rep_level = 1
let mut offsets: Vec<OffsetSize> = Vec::new();
let mut cur_offset = OffsetSize::zero();
def_levels.iter().zip(rep_levels).for_each(|(d, r)| {
if *r == 0 || d == &self.list_empty_def_level {
offsets.push(cur_offset);
}
if d > &self.list_empty_def_level {
cur_offset += OffsetSize::one();
}
});
offsets.push(cur_offset);
let num_bytes = bit_util::ceil(offsets.len(), 8);
// TODO: A useful optimization is to use the null count to fill with
// 0 or null, to reduce individual bits set in a loop.
// To favour dense data, set every slot to true, then unset
let mut null_buf = MutableBuffer::new(num_bytes).with_bitset(num_bytes, true);
let null_slice = null_buf.as_slice_mut();
let mut list_index = 0;
for i in 0..rep_levels.len() {
// If the level is lower than empty, then the slot is null.
// When a list is non-nullable, its empty level = null level,
// so this automatically factors that in.
if rep_levels[i] == 0 && def_levels[i] < self.list_empty_def_level {
bit_util::unset_bit(null_slice, list_index);
}
if rep_levels[i] == 0 {
list_index += 1;
}
}
let value_offsets = Buffer::from(&offsets.to_byte_slice());
let list_data = ArrayData::builder(self.get_data_type().clone())
.len(offsets.len() - 1)
.add_buffer(value_offsets)
.add_child_data(batch_values.data().clone())
.null_bit_buffer(null_buf.into())
.offset(next_batch_array.offset());
let list_data = unsafe { list_data.build_unchecked() };
let result_array = GenericListArray::<OffsetSize>::from(list_data);
Ok(Arc::new(result_array))
}
fn get_def_levels(&self) -> Option<&[i16]> {
self.def_level_buffer
.as_ref()
.map(|buf| unsafe { buf.typed_data() })
}
fn get_rep_levels(&self) -> Option<&[i16]> {
self.rep_level_buffer
.as_ref()
.map(|buf| unsafe { buf.typed_data() })
}
}
/// Implementation of a map array reader.
pub struct MapArrayReader {
key_reader: Box<dyn ArrayReader>,
value_reader: Box<dyn ArrayReader>,
data_type: ArrowType,
map_def_level: i16,
map_rep_level: i16,
def_level_buffer: Option<Buffer>,
rep_level_buffer: Option<Buffer>,
}
impl MapArrayReader {
pub fn new(
key_reader: Box<dyn ArrayReader>,
value_reader: Box<dyn ArrayReader>,
data_type: ArrowType,
def_level: i16,
rep_level: i16,
) -> Self {
Self {
key_reader,
value_reader,
data_type,
map_def_level: rep_level,
map_rep_level: def_level,
def_level_buffer: None,
rep_level_buffer: None,
}
}
}
impl ArrayReader for MapArrayReader {
fn as_any(&self) -> &dyn Any {
self
}
fn get_data_type(&self) -> &ArrowType {
&self.data_type
}
fn next_batch(&mut self, batch_size: usize) -> Result<ArrayRef> {
let key_array = self.key_reader.next_batch(batch_size)?;
let value_array = self.value_reader.next_batch(batch_size)?;
// Check that key and value have the same lengths
let key_length = key_array.len();
if key_length != value_array.len() {
return Err(general_err!(
"Map key and value should have the same lengths."
));
}
let def_levels = self
.key_reader
.get_def_levels()
.ok_or_else(|| ArrowError("item_reader def levels are None.".to_string()))?;
let rep_levels = self
.key_reader
.get_rep_levels()
.ok_or_else(|| ArrowError("item_reader rep levels are None.".to_string()))?;
if !((def_levels.len() == rep_levels.len()) && (rep_levels.len() == key_length)) {
return Err(ArrowError(
"Expected item_reader def_levels and rep_levels to be same length as batch".to_string(),
));
}
let entry_data_type = if let ArrowType::Map(field, _) = &self.data_type {
field.data_type().clone()
} else {
return Err(ArrowError("Expected a map arrow type".to_string()));
};
let entry_data = ArrayDataBuilder::new(entry_data_type)
.len(key_length)
.add_child_data(key_array.data().clone())
.add_child_data(value_array.data().clone());
let entry_data = unsafe { entry_data.build_unchecked() };
let entry_len = rep_levels.iter().filter(|level| **level == 0).count();
// first item in each list has rep_level = 0, subsequent items have rep_level = 1
let mut offsets: Vec<i32> = Vec::new();
let mut cur_offset = 0;
def_levels.iter().zip(rep_levels).for_each(|(d, r)| {
if *r == 0 || d == &self.map_def_level {
offsets.push(cur_offset);
}
if d > &self.map_def_level {
cur_offset += 1;
}
});
offsets.push(cur_offset);
let num_bytes = bit_util::ceil(offsets.len(), 8);
// TODO: A useful optimization is to use the null count to fill with
// 0 or null, to reduce individual bits set in a loop.
// To favour dense data, set every slot to true, then unset
let mut null_buf = MutableBuffer::new(num_bytes).with_bitset(num_bytes, true);
let null_slice = null_buf.as_slice_mut();
let mut list_index = 0;
for i in 0..rep_levels.len() {
// If the level is lower than empty, then the slot is null.
// When a list is non-nullable, its empty level = null level,
// so this automatically factors that in.
if rep_levels[i] == 0 && def_levels[i] < self.map_def_level {
// should be empty list
bit_util::unset_bit(null_slice, list_index);
}
if rep_levels[i] == 0 {
list_index += 1;
}
}
let value_offsets = Buffer::from(&offsets.to_byte_slice());
// Now we can build array data
let array_data = ArrayDataBuilder::new(self.data_type.clone())
.len(entry_len)
.add_buffer(value_offsets)
.null_bit_buffer(null_buf.into())
.add_child_data(entry_data);
let array_data = unsafe { array_data.build_unchecked() };
Ok(Arc::new(MapArray::from(array_data)))
}
fn get_def_levels(&self) -> Option<&[i16]> {
self.def_level_buffer
.as_ref()
.map(|buf| unsafe { buf.typed_data() })
}
fn get_rep_levels(&self) -> Option<&[i16]> {
self.rep_level_buffer
.as_ref()
.map(|buf| unsafe { buf.typed_data() })
}
}
/// Implementation of struct array reader.
pub struct StructArrayReader {
children: Vec<Box<dyn ArrayReader>>,
data_type: ArrowType,
struct_def_level: i16,
struct_rep_level: i16,
def_level_buffer: Option<Buffer>,
rep_level_buffer: Option<Buffer>,
}
impl StructArrayReader {
/// Construct struct array reader.
pub fn new(
data_type: ArrowType,
children: Vec<Box<dyn ArrayReader>>,
def_level: i16,
rep_level: i16,
) -> Self {
Self {
data_type,
children,
struct_def_level: def_level,
struct_rep_level: rep_level,
def_level_buffer: None,
rep_level_buffer: None,
}
}
}
impl ArrayReader for StructArrayReader {
fn as_any(&self) -> &dyn Any {
self
}
/// Returns data type.
/// This must be a struct.
fn get_data_type(&self) -> &ArrowType {
&self.data_type
}
/// Read `batch_size` struct records.
///
/// Definition levels of struct array is calculated as following:
/// ```ignore
/// def_levels[i] = min(child1_def_levels[i], child2_def_levels[i], ...,
/// childn_def_levels[i]);
/// ```
///
/// Repetition levels of struct array is calculated as following:
/// ```ignore
/// rep_levels[i] = child1_rep_levels[i];
/// ```
///
/// The null bitmap of struct array is calculated from def_levels:
/// ```ignore
/// null_bitmap[i] = (def_levels[i] >= self.def_level);
/// ```
fn next_batch(&mut self, batch_size: usize) -> Result<ArrayRef> {
if self.children.is_empty() {
self.def_level_buffer = None;
self.rep_level_buffer = None;
return Ok(Arc::new(StructArray::from(Vec::new())));
}
let children_array = self
.children
.iter_mut()
.map(|reader| reader.next_batch(batch_size))
.try_fold(
Vec::new(),
|mut result, child_array| -> Result<Vec<ArrayRef>> {
result.push(child_array?);
Ok(result)
},
)?;
// check that array child data has same size
let children_array_len =
children_array.first().map(|arr| arr.len()).ok_or_else(|| {
general_err!("Struct array reader should have at least one child!")
})?;
let all_children_len_eq = children_array
.iter()
.all(|arr| arr.len() == children_array_len);
if !all_children_len_eq {
return Err(general_err!("Not all children array length are the same!"));
}
// Now we can build array data
let mut array_data_builder = ArrayDataBuilder::new(self.data_type.clone())
.len(children_array_len)
.child_data(
children_array
.iter()
.map(|x| x.data().clone())
.collect::<Vec<ArrayData>>(),
);
if self.struct_def_level != 0 {
// calculate struct def level data
let buffer_size = children_array_len * size_of::<i16>();
let mut def_level_data_buffer = MutableBuffer::new(buffer_size);
def_level_data_buffer.resize(buffer_size, 0);
// Safety: the buffer is always treated as `u16` in the code below
let def_level_data = unsafe { def_level_data_buffer.typed_data_mut() };
def_level_data
.iter_mut()
.for_each(|v| *v = self.struct_def_level);
for child in &self.children {
if let Some(current_child_def_levels) = child.get_def_levels() {
if current_child_def_levels.len() != children_array_len {
return Err(general_err!("Child array length are not equal!"));
} else {
for i in 0..children_array_len {
def_level_data[i] =
min(def_level_data[i], current_child_def_levels[i]);
}
}
}
}
// calculate bitmap for current array
let mut bitmap_builder = BooleanBufferBuilder::new(children_array_len);
for def_level in def_level_data {
let not_null = *def_level >= self.struct_def_level;
bitmap_builder.append(not_null);
}
array_data_builder =
array_data_builder.null_bit_buffer(bitmap_builder.finish());
self.def_level_buffer = Some(def_level_data_buffer.into());
}
let array_data = unsafe { array_data_builder.build_unchecked() };
if self.struct_rep_level != 0 {
// calculate struct rep level data, since struct doesn't add to repetition
// levels, here we just need to keep repetition levels of first array
// TODO: Verify that all children array reader has same repetition levels
let rep_level_data = self
.children
.first()
.ok_or_else(|| {
general_err!("Struct array reader should have at least one child!")
})?
.get_rep_levels()
.map(|data| -> Result<Buffer> {
let mut buffer = Int16BufferBuilder::new(children_array_len);
buffer.append_slice(data);
Ok(buffer.finish())
})
.transpose()?;
self.rep_level_buffer = rep_level_data;
}
Ok(Arc::new(StructArray::from(array_data)))
}
fn get_def_levels(&self) -> Option<&[i16]> {
self.def_level_buffer
.as_ref()
.map(|buf| unsafe { buf.typed_data() })
}
fn get_rep_levels(&self) -> Option<&[i16]> {
self.rep_level_buffer
.as_ref()
.map(|buf| unsafe { buf.typed_data() })
}
}
/// Create array reader from parquet schema, column indices, and parquet file reader.
pub fn build_array_reader<T>(
parquet_schema: SchemaDescPtr,
arrow_schema: SchemaRef,
column_indices: T,
row_groups: Box<dyn RowGroupCollection>,
) -> Result<Box<dyn ArrayReader>>
where
T: IntoIterator<Item = usize>,
{
let mut leaves = HashMap::<*const Type, usize>::new();
let mut filtered_root_names = HashSet::<String>::new();
for c in column_indices {
let column = parquet_schema.column(c).self_type() as *const Type;
leaves.insert(column, c);
let root = parquet_schema.get_column_root_ptr(c);
filtered_root_names.insert(root.name().to_string());
}
if leaves.is_empty() {
return Err(general_err!("Can't build array reader without columns!"));
}
// Only pass root fields that take part in the projection
// to avoid traversal of columns that are not read.
// TODO: also prune unread parts of the tree in child structures
let filtered_root_fields = parquet_schema
.root_schema()
.get_fields()
.iter()
.filter(|field| filtered_root_names.contains(field.name()))
.cloned()
.collect::<Vec<_>>();
let proj = Type::GroupType {
basic_info: parquet_schema.root_schema().get_basic_info().clone(),
fields: filtered_root_fields,
};
ArrayReaderBuilder::new(Arc::new(proj), arrow_schema, Arc::new(leaves), row_groups)
.build_array_reader()
}
/// Used to build array reader.
struct ArrayReaderBuilder {
root_schema: TypePtr,
arrow_schema: Arc<Schema>,
// Key: columns that need to be included in final array builder
// Value: column index in schema
columns_included: Arc<HashMap<*const Type, usize>>,
row_groups: Box<dyn RowGroupCollection>,
}
/// Used in type visitor.
#[derive(Clone)]
struct ArrayReaderBuilderContext {
def_level: i16,
rep_level: i16,
path: ColumnPath,
}
impl Default for ArrayReaderBuilderContext {
fn default() -> Self {
Self {
def_level: 0i16,
rep_level: 0i16,
path: ColumnPath::new(Vec::new()),
}
}
}
/// Create array reader by visiting schema.
impl<'a> TypeVisitor<Option<Box<dyn ArrayReader>>, &'a ArrayReaderBuilderContext>
for ArrayReaderBuilder
{
/// Build array reader for primitive type.
fn visit_primitive(
&mut self,
cur_type: TypePtr,
context: &'a ArrayReaderBuilderContext,
) -> Result<Option<Box<dyn ArrayReader>>> {
if self.is_included(cur_type.as_ref()) {
let mut new_context = context.clone();
new_context.path.append(vec![cur_type.name().to_string()]);
let null_mask_only = match cur_type.get_basic_info().repetition() {
Repetition::REPEATED => {
new_context.def_level += 1;
new_context.rep_level += 1;
false
}
Repetition::OPTIONAL => {
new_context.def_level += 1;
// Can just compute null mask if no parent
context.def_level == 0 && context.rep_level == 0
}
_ => false,
};
let reader = self.build_for_primitive_type_inner(
cur_type.clone(),
&new_context,
null_mask_only,
)?;
if cur_type.get_basic_info().repetition() == Repetition::REPEATED {
Err(ArrowError(
"Reading repeated field is not supported yet!".to_string(),
))
} else {
Ok(Some(reader))
}
} else {
Ok(None)
}
}
/// Build array reader for struct type.
fn visit_struct(
&mut self,
cur_type: Arc<Type>,
context: &'a ArrayReaderBuilderContext,
) -> Result<Option<Box<dyn ArrayReader>>> {
let mut new_context = context.clone();
new_context.path.append(vec![cur_type.name().to_string()]);
if cur_type.get_basic_info().has_repetition() {
match cur_type.get_basic_info().repetition() {
Repetition::REPEATED => {
new_context.def_level += 1;
new_context.rep_level += 1;
}
Repetition::OPTIONAL => {
new_context.def_level += 1;
}
_ => (),
}
}
if let Some(reader) = self.build_for_struct_type_inner(&cur_type, &new_context)? {
if cur_type.get_basic_info().has_repetition()
&& cur_type.get_basic_info().repetition() == Repetition::REPEATED
{
Err(ArrowError(
"Reading repeated field is not supported yet!".to_string(),
))
} else {
Ok(Some(reader))
}
} else {
Ok(None)
}
}
/// Build array reader for map type.
fn visit_map(
&mut self,
map_type: Arc<Type>,
context: &'a ArrayReaderBuilderContext,
) -> Result<Option<Box<dyn ArrayReader>>> {
// Add map type to context
let mut new_context = context.clone();
new_context.path.append(vec![map_type.name().to_string()]);
if let Repetition::OPTIONAL = map_type.get_basic_info().repetition() {
new_context.def_level += 1;
}
// Add map entry (key_value) to context
let map_key_value = map_type.get_fields().first().ok_or_else(|| {
ArrowError("Map field must have a key_value entry".to_string())
})?;
new_context
.path
.append(vec![map_key_value.name().to_string()]);
new_context.rep_level += 1;
// Get key and value, and create context for each
let map_key = map_key_value
.get_fields()
.first()
.ok_or_else(|| ArrowError("Map entry must have a key".to_string()))?;
let map_value = map_key_value
.get_fields()
.get(1)
.ok_or_else(|| ArrowError("Map entry must have a value".to_string()))?;
let key_reader = {
let mut key_context = new_context.clone();
key_context.def_level += 1;
key_context.path.append(vec![map_key.name().to_string()]);
self.dispatch(map_key.clone(), &key_context)?.unwrap()
};
let value_reader = {
let mut value_context = new_context.clone();
if let Repetition::OPTIONAL = map_value.get_basic_info().repetition() {
value_context.def_level += 1;
}
self.dispatch(map_value.clone(), &value_context)?.unwrap()
};
let arrow_type = self
.arrow_schema
.field_with_name(map_type.name())
.ok()
.map(|f| f.data_type().to_owned())
.unwrap_or_else(|| {
ArrowType::Map(
Box::new(Field::new(
map_key_value.name(),
ArrowType::Struct(vec![
Field::new(
map_key.name(),
key_reader.get_data_type().clone(),
false,
),
Field::new(
map_value.name(),
value_reader.get_data_type().clone(),
map_value.is_optional(),
),
]),
map_type.is_optional(),
)),
false,
)
});
let key_array_reader: Box<dyn ArrayReader> = Box::new(MapArrayReader::new(
key_reader,
value_reader,
arrow_type,
new_context.def_level,
new_context.rep_level,
));
Ok(Some(key_array_reader))
}
/// Build array reader for list type.
fn visit_list_with_item(
&mut self,
list_type: Arc<Type>,
item_type: Arc<Type>,
context: &'a ArrayReaderBuilderContext,
) -> Result<Option<Box<dyn ArrayReader>>> {
let mut list_child = &list_type
.get_fields()
.first()
.ok_or_else(|| ArrowError("List field must have a child.".to_string()))?
.clone();
let mut new_context = context.clone();
new_context.path.append(vec![list_type.name().to_string()]);
// We need to know at what definition a list or its child is null
let list_null_def = new_context.def_level;
let mut list_empty_def = new_context.def_level;
// If the list's root is nullable
if let Repetition::OPTIONAL = list_type.get_basic_info().repetition() {
new_context.def_level += 1;
// current level is nullable, increment to get level for empty list slot
list_empty_def += 1;
}
match list_child.get_basic_info().repetition() {
Repetition::REPEATED => {
new_context.def_level += 1;
new_context.rep_level += 1;
}
Repetition::OPTIONAL => {
new_context.def_level += 1;
}
_ => (),
}
let item_reader = self
.dispatch(item_type.clone(), &new_context)
.unwrap()
.unwrap();
let item_reader_type = item_reader.get_data_type().clone();
match item_reader_type {
ArrowType::List(_)
| ArrowType::FixedSizeList(_, _)
| ArrowType::Dictionary(_, _) => Err(ArrowError(format!(
"reading List({:?}) into arrow not supported yet",
item_type
))),
_ => {
// a list is a group type with a single child. The list child's
// name comes from the child's field name.
// if the child's name is "list" and it has a child, then use this child
if list_child.name() == "list" && !list_child.get_fields().is_empty() {
list_child = list_child.get_fields().first().unwrap();
}
let arrow_type = self
.arrow_schema
.field_with_name(list_type.name())
.ok()
.map(|f| f.data_type().to_owned())
.unwrap_or_else(|| {
ArrowType::List(Box::new(Field::new(
list_child.name(),
item_reader_type.clone(),
list_child.is_optional(),
)))
});
let list_array_reader: Box<dyn ArrayReader> = match arrow_type {
ArrowType::List(_) => Box::new(ListArrayReader::<i32>::new(
item_reader,
arrow_type,
item_reader_type,
new_context.def_level,
new_context.rep_level,
list_null_def,
list_empty_def,
)),
ArrowType::LargeList(_) => Box::new(ListArrayReader::<i64>::new(
item_reader,
arrow_type,
item_reader_type,
new_context.def_level,
new_context.rep_level,
list_null_def,
list_empty_def,
)),
_ => {
return Err(ArrowError(format!(
"creating ListArrayReader with type {:?} should be unreachable",
arrow_type
)))
}
};
Ok(Some(list_array_reader))
}
}
}
}
impl<'a> ArrayReaderBuilder {
/// Construct array reader builder.
fn new(
root_schema: TypePtr,
arrow_schema: Arc<Schema>,
columns_included: Arc<HashMap<*const Type, usize>>,
file_reader: Box<dyn RowGroupCollection>,
) -> Self {
Self {
root_schema,
arrow_schema,
columns_included,
row_groups: file_reader,
}
}
/// Main entry point.
fn build_array_reader(&mut self) -> Result<Box<dyn ArrayReader>> {
let context = ArrayReaderBuilderContext::default();
self.visit_struct(self.root_schema.clone(), &context)
.and_then(|reader_opt| {
reader_opt.ok_or_else(|| general_err!("Failed to build array reader!"))
})
}
// Utility functions
/// Check whether one column in included in this array reader builder.
fn is_included(&self, t: &Type) -> bool {
self.columns_included.contains_key(&(t as *const Type))
}
/// Creates primitive array reader for each primitive type.
fn build_for_primitive_type_inner(
&self,
cur_type: TypePtr,
context: &'a ArrayReaderBuilderContext,
null_mask_only: bool,
) -> Result<Box<dyn ArrayReader>> {
let column_desc = Arc::new(ColumnDescriptor::new(
cur_type.clone(),
context.def_level,
context.rep_level,
context.path.clone(),
));
let page_iterator = self
.row_groups
.column_chunks(self.columns_included[&(cur_type.as_ref() as *const Type)])?;
let arrow_type: Option<ArrowType> = self
.get_arrow_field(&cur_type, context)
.map(|f| f.data_type().clone());
match cur_type.get_physical_type() {
PhysicalType::BOOLEAN => Ok(Box::new(
PrimitiveArrayReader::<BoolType>::new_with_options(
page_iterator,
column_desc,
arrow_type,
null_mask_only,
)?,
)),
PhysicalType::INT32 => {
if let Some(ArrowType::Null) = arrow_type {
Ok(Box::new(NullArrayReader::<Int32Type>::new(
page_iterator,
column_desc,
)?))
} else {
Ok(Box::new(
PrimitiveArrayReader::<Int32Type>::new_with_options(
page_iterator,
column_desc,
arrow_type,
null_mask_only,
)?,
))
}
}
PhysicalType::INT64 => Ok(Box::new(
PrimitiveArrayReader::<Int64Type>::new_with_options(
page_iterator,
column_desc,
arrow_type,
null_mask_only,
)?,
)),
PhysicalType::INT96 => {
// get the optional timezone information from arrow type
let timezone = arrow_type
.as_ref()
.map(|data_type| {
if let ArrowType::Timestamp(_, tz) = data_type {
tz.clone()
} else {
None
}
})
.flatten();
let converter = Int96Converter::new(Int96ArrayConverter { timezone });
Ok(Box::new(ComplexObjectArrayReader::<
Int96Type,
Int96Converter,
>::new(
page_iterator,
column_desc,
converter,
arrow_type,
)?))
}
PhysicalType::FLOAT => Ok(Box::new(
PrimitiveArrayReader::<FloatType>::new_with_options(
page_iterator,
column_desc,
arrow_type,
null_mask_only,
)?,
)),
PhysicalType::DOUBLE => Ok(Box::new(
PrimitiveArrayReader::<DoubleType>::new_with_options(
page_iterator,
column_desc,
arrow_type,
null_mask_only,
)?,
)),
PhysicalType::BYTE_ARRAY => match arrow_type {
Some(ArrowType::Dictionary(_, _)) => make_byte_array_dictionary_reader(
page_iterator,
column_desc,
arrow_type,
null_mask_only,
),
_ => make_byte_array_reader(
page_iterator,
column_desc,
arrow_type,
null_mask_only,
),
},
PhysicalType::FIXED_LEN_BYTE_ARRAY
if cur_type.get_basic_info().converted_type()
== ConvertedType::DECIMAL =>
{
let converter = DecimalConverter::new(DecimalArrayConverter::new(
cur_type.get_precision(),
cur_type.get_scale(),
));
Ok(Box::new(ComplexObjectArrayReader::<
FixedLenByteArrayType,
DecimalConverter,
>::new(
page_iterator,
column_desc,
converter,
arrow_type,
)?))
}
PhysicalType::FIXED_LEN_BYTE_ARRAY => {
let byte_width = match *cur_type {
Type::PrimitiveType {
ref type_length, ..
} => *type_length,
_ => {
return Err(ArrowError(
"Expected a physical type, not a group type".to_string(),
))
}
};
if cur_type.get_basic_info().converted_type() == ConvertedType::INTERVAL {
if byte_width != 12 {
return Err(ArrowError(format!(
"Parquet interval type should have length of 12, found {}",
byte_width
)));
}
match arrow_type {
Some(ArrowType::Interval(IntervalUnit::DayTime)) => {
let converter = IntervalDayTimeConverter::new(
IntervalDayTimeArrayConverter {},
);
Ok(Box::new(ComplexObjectArrayReader::<
FixedLenByteArrayType,
IntervalDayTimeConverter,
>::new(
page_iterator,
column_desc,
converter,
arrow_type,
)?))
}
Some(ArrowType::Interval(IntervalUnit::YearMonth)) => {
let converter = IntervalYearMonthConverter::new(
IntervalYearMonthArrayConverter {},
);
Ok(Box::new(ComplexObjectArrayReader::<
FixedLenByteArrayType,
IntervalYearMonthConverter,
>::new(
page_iterator,
column_desc,
converter,
arrow_type,
)?))
}
Some(t) => Err(ArrowError(format!(
"Cannot write a Parquet interval to {:?}",
t
))),
None => {
// we do not support an interval not matched to an Arrow type,
// because we risk data loss as we won't know which of the 12 bytes
// are or should be populated
Err(ArrowError(
"Cannot write a Parquet interval with no Arrow type specified.
There is a risk of data loss as Arrow either supports YearMonth or
DayTime precision. Without the Arrow type, we cannot infer the type.
".to_string()
))
}
}
} else {
let converter = FixedLenBinaryConverter::new(
FixedSizeArrayConverter::new(byte_width),
);
Ok(Box::new(ComplexObjectArrayReader::<
FixedLenByteArrayType,
FixedLenBinaryConverter,
>::new(
page_iterator,
column_desc,
converter,
arrow_type,
)?))
}
}
}
}
/// Constructs struct array reader without considering repetition.
fn build_for_struct_type_inner(
&mut self,
cur_type: &Type,
context: &'a ArrayReaderBuilderContext,
) -> Result<Option<Box<dyn ArrayReader>>> {
let mut fields = Vec::with_capacity(cur_type.get_fields().len());
let mut children_reader = Vec::with_capacity(cur_type.get_fields().len());
for child in cur_type.get_fields() {
let mut struct_context = context.clone();
if let Some(child_reader) = self.dispatch(child.clone(), context)? {
// TODO: this results in calling get_arrow_field twice, it could be reused
// from child_reader above, by making child_reader carry its `Field`
struct_context.path.append(vec![child.name().to_string()]);
let field = match self.get_arrow_field(child, &struct_context) {
Some(f) => f.clone(),
_ => Field::new(
child.name(),
child_reader.get_data_type().clone(),
child.is_optional(),
),
};
fields.push(field);
children_reader.push(child_reader);
}
}
if !fields.is_empty() {
let arrow_type = ArrowType::Struct(fields);
Ok(Some(Box::new(StructArrayReader::new(
arrow_type,
children_reader,
context.def_level,
context.rep_level,
))))
} else {
Ok(None)
}
}
fn get_arrow_field(
&self,
cur_type: &Type,
context: &'a ArrayReaderBuilderContext,
) -> Option<&Field> {
let parts: Vec<&str> = context
.path
.parts()
.iter()
.map(|x| -> &str { x })
.collect::<Vec<&str>>();
// If the parts length is one it'll have the top level "schema" type. If
// it's two then it'll be a top-level type that we can get from the arrow
// schema directly.
if parts.len() <= 2 {
self.arrow_schema.field_with_name(cur_type.name()).ok()
} else {
// If it's greater than two then we need to traverse the type path
// until we find the actual field we're looking for.
let mut field: Option<&Field> = None;
for (i, part) in parts.iter().enumerate().skip(1) {
if i == 1 {
field = self.arrow_schema.field_with_name(part).ok();
} else if let Some(f) = field {
if let ArrowType::Struct(fields) = f.data_type() {
field = fields.iter().find(|f| f.name() == part)
} else {
field = None
}
} else {
field = None
}
}
field
}
}
}
#[cfg(test)]
mod tests {
use std::any::Any;
use std::collections::VecDeque;
use std::sync::Arc;
use rand::distributions::uniform::SampleUniform;
use rand::{thread_rng, Rng};
use arrow::array::{
Array, ArrayRef, LargeListArray, ListArray, PrimitiveArray, StringArray,
StructArray,
};
use arrow::datatypes::{
ArrowPrimitiveType, DataType as ArrowType, Date32Type as ArrowDate32, Field,
Int32Type as ArrowInt32, Int64Type as ArrowInt64,
Time32MillisecondType as ArrowTime32MillisecondArray,
Time64MicrosecondType as ArrowTime64MicrosecondArray,
TimestampMicrosecondType as ArrowTimestampMicrosecondType,
TimestampMillisecondType as ArrowTimestampMillisecondType,
};
use crate::arrow::converter::{Utf8ArrayConverter, Utf8Converter};
use crate::arrow::schema::parquet_to_arrow_schema;
use crate::basic::{Encoding, Type as PhysicalType};
use crate::column::page::{Page, PageReader};
use crate::data_type::{ByteArray, ByteArrayType, DataType, Int32Type, Int64Type};
use crate::errors::Result;
use crate::file::reader::{FileReader, SerializedFileReader};
use crate::schema::parser::parse_message_type;
use crate::schema::types::{ColumnDescPtr, SchemaDescriptor};
use crate::util::test_common::page_util::{
DataPageBuilder, DataPageBuilderImpl, InMemoryPageIterator,
};
use crate::util::test_common::{get_test_file, make_pages};
use super::*;
fn make_column_chunks<T: DataType>(
column_desc: ColumnDescPtr,
encoding: Encoding,
num_levels: usize,
min_value: T::T,
max_value: T::T,
def_levels: &mut Vec<i16>,
rep_levels: &mut Vec<i16>,
values: &mut Vec<T::T>,
page_lists: &mut Vec<Vec<Page>>,
use_v2: bool,
num_chunks: usize,
) where
T::T: PartialOrd + SampleUniform + Copy,
{
for _i in 0..num_chunks {
let mut pages = VecDeque::new();
let mut data = Vec::new();
let mut page_def_levels = Vec::new();
let mut page_rep_levels = Vec::new();
make_pages::<T>(
column_desc.clone(),
encoding,
1,
num_levels,
min_value,
max_value,
&mut page_def_levels,
&mut page_rep_levels,
&mut data,
&mut pages,
use_v2,
);
def_levels.append(&mut page_def_levels);
rep_levels.append(&mut page_rep_levels);
values.append(&mut data);
page_lists.push(Vec::from(pages));
}
}
#[test]
fn test_primitive_array_reader_empty_pages() {
// Construct column schema
let message_type = "
message test_schema {
REQUIRED INT32 leaf;
}
";
let schema = parse_message_type(message_type)
.map(|t| Arc::new(SchemaDescriptor::new(Arc::new(t))))
.unwrap();
let column_desc = schema.column(0);
let page_iterator = EmptyPageIterator::new(schema);
let mut array_reader = PrimitiveArrayReader::<Int32Type>::new(
Box::new(page_iterator),
column_desc,
None,
)
.unwrap();
// expect no values to be read
let array = array_reader.next_batch(50).unwrap();
assert!(array.is_empty());
}
#[test]
fn test_primitive_array_reader_data() {
// Construct column schema
let message_type = "
message test_schema {
REQUIRED INT32 leaf;
}
";
let schema = parse_message_type(message_type)
.map(|t| Arc::new(SchemaDescriptor::new(Arc::new(t))))
.unwrap();
let column_desc = schema.column(0);
// Construct page iterator
{
let mut data = Vec::new();
let mut page_lists = Vec::new();
make_column_chunks::<Int32Type>(
column_desc.clone(),
Encoding::PLAIN,
100,
1,
200,
&mut Vec::new(),
&mut Vec::new(),
&mut data,
&mut page_lists,
true,
2,
);
let page_iterator =
InMemoryPageIterator::new(schema, column_desc.clone(), page_lists);
let mut array_reader = PrimitiveArrayReader::<Int32Type>::new(
Box::new(page_iterator),
column_desc,
None,
)
.unwrap();
// Read first 50 values, which are all from the first column chunk
let array = array_reader.next_batch(50).unwrap();
let array = array
.as_any()
.downcast_ref::<PrimitiveArray<ArrowInt32>>()
.unwrap();
assert_eq!(
&PrimitiveArray::<ArrowInt32>::from(data[0..50].to_vec()),
array
);
// Read next 100 values, the first 50 ones are from the first column chunk,
// and the last 50 ones are from the second column chunk
let array = array_reader.next_batch(100).unwrap();
let array = array
.as_any()
.downcast_ref::<PrimitiveArray<ArrowInt32>>()
.unwrap();
assert_eq!(
&PrimitiveArray::<ArrowInt32>::from(data[50..150].to_vec()),
array
);
// Try to read 100 values, however there are only 50 values
let array = array_reader.next_batch(100).unwrap();
let array = array
.as_any()
.downcast_ref::<PrimitiveArray<ArrowInt32>>()
.unwrap();
assert_eq!(
&PrimitiveArray::<ArrowInt32>::from(data[150..200].to_vec()),
array
);
}
}
macro_rules! test_primitive_array_reader_one_type {
($arrow_parquet_type:ty, $physical_type:expr, $converted_type_str:expr, $result_arrow_type:ty, $result_arrow_cast_type:ty, $result_primitive_type:ty) => {{
let message_type = format!(
"
message test_schema {{
REQUIRED {:?} leaf ({});
}}
",
$physical_type, $converted_type_str
);
let schema = parse_message_type(&message_type)
.map(|t| Arc::new(SchemaDescriptor::new(Arc::new(t))))
.unwrap();
let column_desc = schema.column(0);
// Construct page iterator
{
let mut data = Vec::new();
let mut page_lists = Vec::new();
make_column_chunks::<$arrow_parquet_type>(
column_desc.clone(),
Encoding::PLAIN,
100,
1,
200,
&mut Vec::new(),
&mut Vec::new(),
&mut data,
&mut page_lists,
true,
2,
);
let page_iterator = InMemoryPageIterator::new(
schema.clone(),
column_desc.clone(),
page_lists,
);
let mut array_reader = PrimitiveArrayReader::<$arrow_parquet_type>::new(
Box::new(page_iterator),
column_desc.clone(),
None,
)
.expect("Unable to get array reader");
let array = array_reader
.next_batch(50)
.expect("Unable to get batch from reader");
let result_data_type = <$result_arrow_type>::DATA_TYPE;
let array = array
.as_any()
.downcast_ref::<PrimitiveArray<$result_arrow_type>>()
.expect(
format!(
"Unable to downcast {:?} to {:?}",
array.data_type(),
result_data_type
)
.as_str(),
);
// create expected array as primitive, and cast to result type
let expected = PrimitiveArray::<$result_arrow_cast_type>::from(
data[0..50]
.iter()
.map(|x| *x as $result_primitive_type)
.collect::<Vec<$result_primitive_type>>(),
);
let expected = Arc::new(expected) as ArrayRef;
let expected = arrow::compute::cast(&expected, &result_data_type)
.expect("Unable to cast expected array");
assert_eq!(expected.data_type(), &result_data_type);
let expected = expected
.as_any()
.downcast_ref::<PrimitiveArray<$result_arrow_type>>()
.expect(
format!(
"Unable to downcast expected {:?} to {:?}",
expected.data_type(),
result_data_type
)
.as_str(),
);
assert_eq!(expected, array);
}
}};
}
#[test]
fn test_primitive_array_reader_temporal_types() {
test_primitive_array_reader_one_type!(
Int32Type,
PhysicalType::INT32,
"DATE",
ArrowDate32,
ArrowInt32,
i32
);
test_primitive_array_reader_one_type!(
Int32Type,
PhysicalType::INT32,
"TIME_MILLIS",
ArrowTime32MillisecondArray,
ArrowInt32,
i32
);
test_primitive_array_reader_one_type!(
Int64Type,
PhysicalType::INT64,
"TIME_MICROS",
ArrowTime64MicrosecondArray,
ArrowInt64,
i64
);
test_primitive_array_reader_one_type!(
Int64Type,
PhysicalType::INT64,
"TIMESTAMP_MILLIS",
ArrowTimestampMillisecondType,
ArrowInt64,
i64
);
test_primitive_array_reader_one_type!(
Int64Type,
PhysicalType::INT64,
"TIMESTAMP_MICROS",
ArrowTimestampMicrosecondType,
ArrowInt64,
i64
);
}
#[test]
fn test_primitive_array_reader_def_and_rep_levels() {
// Construct column schema
let message_type = "
message test_schema {
REPEATED Group test_mid {
OPTIONAL INT32 leaf;
}
}
";
let schema = parse_message_type(message_type)
.map(|t| Arc::new(SchemaDescriptor::new(Arc::new(t))))
.unwrap();
let column_desc = schema.column(0);
// Construct page iterator
{
let mut def_levels = Vec::new();
let mut rep_levels = Vec::new();
let mut page_lists = Vec::new();
make_column_chunks::<Int32Type>(
column_desc.clone(),
Encoding::PLAIN,
100,
1,
200,
&mut def_levels,
&mut rep_levels,
&mut Vec::new(),
&mut page_lists,
true,
2,
);
let page_iterator =
InMemoryPageIterator::new(schema, column_desc.clone(), page_lists);
let mut array_reader = PrimitiveArrayReader::<Int32Type>::new(
Box::new(page_iterator),
column_desc,
None,
)
.unwrap();
let mut accu_len: usize = 0;
// Read first 50 values, which are all from the first column chunk
let array = array_reader.next_batch(50).unwrap();
assert_eq!(
Some(&def_levels[accu_len..(accu_len + array.len())]),
array_reader.get_def_levels()
);
assert_eq!(
Some(&rep_levels[accu_len..(accu_len + array.len())]),
array_reader.get_rep_levels()
);
accu_len += array.len();
// Read next 100 values, the first 50 ones are from the first column chunk,
// and the last 50 ones are from the second column chunk
let array = array_reader.next_batch(100).unwrap();
assert_eq!(
Some(&def_levels[accu_len..(accu_len + array.len())]),
array_reader.get_def_levels()
);
assert_eq!(
Some(&rep_levels[accu_len..(accu_len + array.len())]),
array_reader.get_rep_levels()
);
accu_len += array.len();
// Try to read 100 values, however there are only 50 values
let array = array_reader.next_batch(100).unwrap();
assert_eq!(
Some(&def_levels[accu_len..(accu_len + array.len())]),
array_reader.get_def_levels()
);
assert_eq!(
Some(&rep_levels[accu_len..(accu_len + array.len())]),
array_reader.get_rep_levels()
);
}
}
#[test]
fn test_complex_array_reader_no_pages() {
let message_type = "
message test_schema {
REPEATED Group test_mid {
OPTIONAL BYTE_ARRAY leaf (UTF8);
}
}
";
let schema = parse_message_type(message_type)
.map(|t| Arc::new(SchemaDescriptor::new(Arc::new(t))))
.unwrap();
let column_desc = schema.column(0);
let pages: Vec<Vec<Page>> = Vec::new();
let page_iterator = InMemoryPageIterator::new(schema, column_desc.clone(), pages);
let converter = Utf8Converter::new(Utf8ArrayConverter {});
let mut array_reader =
ComplexObjectArrayReader::<ByteArrayType, Utf8Converter>::new(
Box::new(page_iterator),
column_desc,
converter,
None,
)
.unwrap();
let values_per_page = 100; // this value is arbitrary in this test - the result should always be an array of 0 length
let array = array_reader.next_batch(values_per_page).unwrap();
assert_eq!(array.len(), 0);
}
#[test]
fn test_complex_array_reader_def_and_rep_levels() {
// Construct column schema
let message_type = "
message test_schema {
REPEATED Group test_mid {
OPTIONAL BYTE_ARRAY leaf (UTF8);
}
}
";
let num_pages = 2;
let values_per_page = 100;
let str_base = "Hello World";
let schema = parse_message_type(message_type)
.map(|t| Arc::new(SchemaDescriptor::new(Arc::new(t))))
.unwrap();
let max_def_level = schema.column(0).max_def_level();
let max_rep_level = schema.column(0).max_rep_level();
assert_eq!(max_def_level, 2);
assert_eq!(max_rep_level, 1);
let mut rng = thread_rng();
let column_desc = schema.column(0);
let mut pages: Vec<Vec<Page>> = Vec::new();
let mut rep_levels = Vec::with_capacity(num_pages * values_per_page);
let mut def_levels = Vec::with_capacity(num_pages * values_per_page);
let mut all_values = Vec::with_capacity(num_pages * values_per_page);
for i in 0..num_pages {
let mut values = Vec::with_capacity(values_per_page);
for _ in 0..values_per_page {
let def_level = rng.gen_range(0..max_def_level + 1);
let rep_level = rng.gen_range(0..max_rep_level + 1);
if def_level == max_def_level {
let len = rng.gen_range(1..str_base.len());
let slice = &str_base[..len];
values.push(ByteArray::from(slice));
all_values.push(Some(slice.to_string()));
} else {
all_values.push(None)
}
rep_levels.push(rep_level);
def_levels.push(def_level)
}
let range = i * values_per_page..(i + 1) * values_per_page;
let mut pb =
DataPageBuilderImpl::new(column_desc.clone(), values.len() as u32, true);
pb.add_rep_levels(max_rep_level, &rep_levels.as_slice()[range.clone()]);
pb.add_def_levels(max_def_level, &def_levels.as_slice()[range]);
pb.add_values::<ByteArrayType>(Encoding::PLAIN, values.as_slice());
let data_page = pb.consume();
pages.push(vec![data_page]);
}
let page_iterator = InMemoryPageIterator::new(schema, column_desc.clone(), pages);
let converter = Utf8Converter::new(Utf8ArrayConverter {});
let mut array_reader =
ComplexObjectArrayReader::<ByteArrayType, Utf8Converter>::new(
Box::new(page_iterator),
column_desc,
converter,
None,
)
.unwrap();
let mut accu_len: usize = 0;
let array = array_reader.next_batch(values_per_page / 2).unwrap();
assert_eq!(array.len(), values_per_page / 2);
assert_eq!(
Some(&def_levels[accu_len..(accu_len + array.len())]),
array_reader.get_def_levels()
);
assert_eq!(
Some(&rep_levels[accu_len..(accu_len + array.len())]),
array_reader.get_rep_levels()
);
accu_len += array.len();
// Read next values_per_page values, the first values_per_page/2 ones are from the first column chunk,
// and the last values_per_page/2 ones are from the second column chunk
let array = array_reader.next_batch(values_per_page).unwrap();
assert_eq!(array.len(), values_per_page);
assert_eq!(
Some(&def_levels[accu_len..(accu_len + array.len())]),
array_reader.get_def_levels()
);
assert_eq!(
Some(&rep_levels[accu_len..(accu_len + array.len())]),
array_reader.get_rep_levels()
);
let strings = array.as_any().downcast_ref::<StringArray>().unwrap();
for i in 0..array.len() {
if array.is_valid(i) {
assert_eq!(
all_values[i + accu_len].as_ref().unwrap().as_str(),
strings.value(i)
)
} else {
assert_eq!(all_values[i + accu_len], None)
}
}
accu_len += array.len();
// Try to read values_per_page values, however there are only values_per_page/2 values
let array = array_reader.next_batch(values_per_page).unwrap();
assert_eq!(array.len(), values_per_page / 2);
assert_eq!(
Some(&def_levels[accu_len..(accu_len + array.len())]),
array_reader.get_def_levels()
);
assert_eq!(
Some(&rep_levels[accu_len..(accu_len + array.len())]),
array_reader.get_rep_levels()
);
}
#[test]
fn test_complex_array_reader_dict_enc_string() {
use crate::encodings::encoding::{DictEncoder, Encoder};
use crate::util::memory::MemTracker;
// Construct column schema
let message_type = "
message test_schema {
REPEATED Group test_mid {
OPTIONAL BYTE_ARRAY leaf (UTF8);
}
}
";
let num_pages = 2;
let values_per_page = 100;
let str_base = "Hello World";
let schema = parse_message_type(message_type)
.map(|t| Arc::new(SchemaDescriptor::new(Arc::new(t))))
.unwrap();
let column_desc = schema.column(0);
let max_def_level = column_desc.max_def_level();
let max_rep_level = column_desc.max_rep_level();
assert_eq!(max_def_level, 2);
assert_eq!(max_rep_level, 1);
let mut rng = thread_rng();
let mut pages: Vec<Vec<Page>> = Vec::new();
let mut rep_levels = Vec::with_capacity(num_pages * values_per_page);
let mut def_levels = Vec::with_capacity(num_pages * values_per_page);
let mut all_values = Vec::with_capacity(num_pages * values_per_page);
for i in 0..num_pages {
let mem_tracker = Arc::new(MemTracker::new());
let mut dict_encoder =
DictEncoder::<ByteArrayType>::new(column_desc.clone(), mem_tracker);
// add data page
let mut values = Vec::with_capacity(values_per_page);
for _ in 0..values_per_page {
let def_level = rng.gen_range(0..max_def_level + 1);
let rep_level = rng.gen_range(0..max_rep_level + 1);
if def_level == max_def_level {
let len = rng.gen_range(1..str_base.len());
let slice = &str_base[..len];
values.push(ByteArray::from(slice));
all_values.push(Some(slice.to_string()));
} else {
all_values.push(None)
}
rep_levels.push(rep_level);
def_levels.push(def_level)
}
let range = i * values_per_page..(i + 1) * values_per_page;
let mut pb =
DataPageBuilderImpl::new(column_desc.clone(), values.len() as u32, true);
pb.add_rep_levels(max_rep_level, &rep_levels.as_slice()[range.clone()]);
pb.add_def_levels(max_def_level, &def_levels.as_slice()[range]);
let _ = dict_encoder.put(&values);
let indices = dict_encoder
.write_indices()
.expect("write_indices() should be OK");
pb.add_indices(indices);
let data_page = pb.consume();
// for each page log num_values vs actual values in page
// println!("page num_values: {}, values.len(): {}", data_page.num_values(), values.len());
// add dictionary page
let dict = dict_encoder
.write_dict()
.expect("write_dict() should be OK");
let dict_page = Page::DictionaryPage {
buf: dict,
num_values: dict_encoder.num_entries() as u32,
encoding: Encoding::RLE_DICTIONARY,
is_sorted: false,
};
pages.push(vec![dict_page, data_page]);
}
let page_iterator = InMemoryPageIterator::new(schema, column_desc.clone(), pages);
let converter = Utf8Converter::new(Utf8ArrayConverter {});
let mut array_reader =
ComplexObjectArrayReader::<ByteArrayType, Utf8Converter>::new(
Box::new(page_iterator),
column_desc,
converter,
None,
)
.unwrap();
let mut accu_len: usize = 0;
// println!("---------- reading a batch of {} values ----------", values_per_page / 2);
let array = array_reader.next_batch(values_per_page / 2).unwrap();
assert_eq!(array.len(), values_per_page / 2);
assert_eq!(
Some(&def_levels[accu_len..(accu_len + array.len())]),
array_reader.get_def_levels()
);
assert_eq!(
Some(&rep_levels[accu_len..(accu_len + array.len())]),
array_reader.get_rep_levels()
);
accu_len += array.len();
// Read next values_per_page values, the first values_per_page/2 ones are from the first column chunk,
// and the last values_per_page/2 ones are from the second column chunk
// println!("---------- reading a batch of {} values ----------", values_per_page);
let array = array_reader.next_batch(values_per_page).unwrap();
assert_eq!(array.len(), values_per_page);
assert_eq!(
Some(&def_levels[accu_len..(accu_len + array.len())]),
array_reader.get_def_levels()
);
assert_eq!(
Some(&rep_levels[accu_len..(accu_len + array.len())]),
array_reader.get_rep_levels()
);
let strings = array.as_any().downcast_ref::<StringArray>().unwrap();
for i in 0..array.len() {
if array.is_valid(i) {
assert_eq!(
all_values[i + accu_len].as_ref().unwrap().as_str(),
strings.value(i)
)
} else {
assert_eq!(all_values[i + accu_len], None)
}
}
accu_len += array.len();
// Try to read values_per_page values, however there are only values_per_page/2 values
// println!("---------- reading a batch of {} values ----------", values_per_page);
let array = array_reader.next_batch(values_per_page).unwrap();
assert_eq!(array.len(), values_per_page / 2);
assert_eq!(
Some(&def_levels[accu_len..(accu_len + array.len())]),
array_reader.get_def_levels()
);
assert_eq!(
Some(&rep_levels[accu_len..(accu_len + array.len())]),
array_reader.get_rep_levels()
);
}
/// Array reader for test.
struct InMemoryArrayReader {
data_type: ArrowType,
array: ArrayRef,
def_levels: Option<Vec<i16>>,
rep_levels: Option<Vec<i16>>,
}
impl InMemoryArrayReader {
pub fn new(
data_type: ArrowType,
array: ArrayRef,
def_levels: Option<Vec<i16>>,
rep_levels: Option<Vec<i16>>,
) -> Self {
Self {
data_type,
array,
def_levels,
rep_levels,
}
}
}
impl ArrayReader for InMemoryArrayReader {
fn as_any(&self) -> &dyn Any {
self
}
fn get_data_type(&self) -> &ArrowType {
&self.data_type
}
fn next_batch(&mut self, _batch_size: usize) -> Result<ArrayRef> {
Ok(self.array.clone())
}
fn get_def_levels(&self) -> Option<&[i16]> {
self.def_levels.as_deref()
}
fn get_rep_levels(&self) -> Option<&[i16]> {
self.rep_levels.as_deref()
}
}
/// Iterator for testing reading empty columns
struct EmptyPageIterator {
schema: SchemaDescPtr,
}
impl EmptyPageIterator {
fn new(schema: SchemaDescPtr) -> Self {
EmptyPageIterator { schema }
}
}
impl Iterator for EmptyPageIterator {
type Item = Result<Box<dyn PageReader>>;
fn next(&mut self) -> Option<Self::Item> {
None
}
}
impl PageIterator for EmptyPageIterator {
fn schema(&mut self) -> Result<SchemaDescPtr> {
Ok(self.schema.clone())
}
fn column_schema(&mut self) -> Result<ColumnDescPtr> {
Ok(self.schema.column(0))
}
}
#[test]
fn test_struct_array_reader() {
let array_1 = Arc::new(PrimitiveArray::<ArrowInt32>::from(vec![1, 2, 3, 4, 5]));
let array_reader_1 = InMemoryArrayReader::new(
ArrowType::Int32,
array_1.clone(),
Some(vec![0, 1, 2, 3, 1]),
Some(vec![1, 1, 1, 1, 1]),
);
let array_2 = Arc::new(PrimitiveArray::<ArrowInt32>::from(vec![5, 4, 3, 2, 1]));
let array_reader_2 = InMemoryArrayReader::new(
ArrowType::Int32,
array_2.clone(),
Some(vec![0, 1, 3, 1, 2]),
Some(vec![1, 1, 1, 1, 1]),
);
let struct_type = ArrowType::Struct(vec![
Field::new("f1", array_1.data_type().clone(), true),
Field::new("f2", array_2.data_type().clone(), true),
]);
let mut struct_array_reader = StructArrayReader::new(
struct_type,
vec![Box::new(array_reader_1), Box::new(array_reader_2)],
1,
1,
);
let struct_array = struct_array_reader.next_batch(5).unwrap();
let struct_array = struct_array.as_any().downcast_ref::<StructArray>().unwrap();
assert_eq!(5, struct_array.len());
assert_eq!(
vec![true, false, false, false, false],
(0..5)
.map(|idx| struct_array.data_ref().is_null(idx))
.collect::<Vec<bool>>()
);
assert_eq!(
Some(vec![0, 1, 1, 1, 1].as_slice()),
struct_array_reader.get_def_levels()
);
assert_eq!(
Some(vec![1, 1, 1, 1, 1].as_slice()),
struct_array_reader.get_rep_levels()
);
}
#[test]
fn test_create_array_reader() {
let file = get_test_file("nulls.snappy.parquet");
let file_reader: Arc<dyn FileReader> =
Arc::new(SerializedFileReader::new(file).unwrap());
let file_metadata = file_reader.metadata().file_metadata();
let arrow_schema = parquet_to_arrow_schema(
file_metadata.schema_descr(),
file_metadata.key_value_metadata(),
)
.unwrap();
let array_reader = build_array_reader(
file_reader.metadata().file_metadata().schema_descr_ptr(),
Arc::new(arrow_schema),
vec![0usize].into_iter(),
Box::new(file_reader),
)
.unwrap();
// Create arrow types
let arrow_type = ArrowType::Struct(vec![Field::new(
"b_struct",
ArrowType::Struct(vec![Field::new("b_c_int", ArrowType::Int32, true)]),
true,
)]);
assert_eq!(array_reader.get_data_type(), &arrow_type);
}
#[test]
fn test_list_array_reader() {
// [[1, null, 2], null, [3, 4]]
let array = Arc::new(PrimitiveArray::<ArrowInt32>::from(vec![
Some(1),
None,
Some(2),
None,
Some(3),
Some(4),
]));
let item_array_reader = InMemoryArrayReader::new(
ArrowType::Int32,
array,
Some(vec![3, 2, 3, 0, 3, 3]),
Some(vec![0, 1, 1, 0, 0, 1]),
);
let mut list_array_reader = ListArrayReader::<i32>::new(
Box::new(item_array_reader),
ArrowType::List(Box::new(Field::new("item", ArrowType::Int32, true))),
ArrowType::Int32,
1,
1,
0,
1,
);
let next_batch = list_array_reader.next_batch(1024).unwrap();
let list_array = next_batch.as_any().downcast_ref::<ListArray>().unwrap();
assert_eq!(3, list_array.len());
// This passes as I expect
assert_eq!(1, list_array.null_count());
assert_eq!(
list_array
.value(0)
.as_any()
.downcast_ref::<PrimitiveArray<ArrowInt32>>()
.unwrap(),
&PrimitiveArray::<ArrowInt32>::from(vec![Some(1), None, Some(2)])
);
assert!(list_array.is_null(1));
assert_eq!(
list_array
.value(2)
.as_any()
.downcast_ref::<PrimitiveArray<ArrowInt32>>()
.unwrap(),
&PrimitiveArray::<ArrowInt32>::from(vec![Some(3), Some(4)])
);
}
#[test]
fn test_large_list_array_reader() {
// [[1, null, 2], null, [3, 4]]
let array = Arc::new(PrimitiveArray::<ArrowInt32>::from(vec![
Some(1),
None,
Some(2),
None,
Some(3),
Some(4),
]));
let item_array_reader = InMemoryArrayReader::new(
ArrowType::Int32,
array,
Some(vec![3, 2, 3, 0, 3, 3]),
Some(vec![0, 1, 1, 0, 0, 1]),
);
let mut list_array_reader = ListArrayReader::<i64>::new(
Box::new(item_array_reader),
ArrowType::LargeList(Box::new(Field::new("item", ArrowType::Int32, true))),
ArrowType::Int32,
1,
1,
0,
1,
);
let next_batch = list_array_reader.next_batch(1024).unwrap();
let list_array = next_batch
.as_any()
.downcast_ref::<LargeListArray>()
.unwrap();
assert_eq!(3, list_array.len());
assert_eq!(
list_array
.value(0)
.as_any()
.downcast_ref::<PrimitiveArray<ArrowInt32>>()
.unwrap(),
&PrimitiveArray::<ArrowInt32>::from(vec![Some(1), None, Some(2)])
);
assert!(list_array.is_null(1));
assert_eq!(
list_array
.value(2)
.as_any()
.downcast_ref::<PrimitiveArray<ArrowInt32>>()
.unwrap(),
&PrimitiveArray::<ArrowInt32>::from(vec![Some(3), Some(4)])
);
}
} | def_levels_buffer: None,
rep_levels_buffer: None,
column_desc,
record_reader, |
export_and_import.py | # -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
"""
Tests for the export and import routines.
"""
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import io
import six
from six.moves import range, zip
from aiida.backends.testbase import AiidaTestCase
from aiida.orm.importexport import import_data
from aiida import orm
class TestSpecificImport(AiidaTestCase):
def setUp(self):
super(TestSpecificImport, self).setUp()
self.clean_db()
self.insert_data()
def test_simple_import(self):
"""
This is a very simple test which checks that an export file with nodes
that are not associated to a computer is imported correctly. In Django
when such nodes are exported, there is an empty set for computers
in the export file. In SQLA there is such a set only when a computer is
associated with the exported nodes. When an empty computer set is
found at the export file (when imported to an SQLA profile), the SQLA
import code used to crash. This test demonstrates this problem.
"""
import tempfile
from aiida.orm.data.parameter import ParameterData
from aiida.orm.importexport import export, import_data
from aiida.orm.node import Node
from aiida.orm.querybuilder import QueryBuilder
parameters = ParameterData(dict={
'Pr': {
'cutoff': 50.0,
'pseudo_type': 'Wentzcovitch',
'dual': 8,
'cutoff_units': 'Ry'
},
'Ru': {
'cutoff': 40.0,
'pseudo_type': 'SG15',
'dual': 4,
'cutoff_units': 'Ry'
},
}).store()
with tempfile.NamedTemporaryFile() as handle:
nodes = [parameters]
export(nodes, outfile=handle.name, overwrite=True, silent=True)
# Check that we have the expected number of nodes in the database
self.assertEquals(QueryBuilder().append(Node).count(), len(nodes))
# Clean the database and verify there are no nodes left
self.clean_db()
self.assertEquals(QueryBuilder().append(Node).count(), 0)
# After importing we should have the original number of nodes again
import_data(handle.name, silent=True)
self.assertEquals(QueryBuilder().append(Node).count(), len(nodes))
def test_cycle_structure_data(self):
"""
Create an export with some Calculation and Data nodes and import it after having
cleaned the database. Verify that the nodes and their attributes are restored
properly after importing the created export archive
"""
import tempfile
from aiida.common.links import LinkType
from aiida.orm.calculation import Calculation
from aiida.orm.data.structure import StructureData
from aiida.orm.data.remote import RemoteData
from aiida.orm.importexport import export, import_data
from aiida.orm.node import Node
from aiida.orm.querybuilder import QueryBuilder
test_label = 'Test structure'
test_cell = [
[8.34, 0.0, 0.0],
[0.298041701839357, 8.53479766274308, 0.0],
[0.842650688117053, 0.47118495164127, 10.6965192730702]
]
test_kinds = [
{
'symbols': [u'Fe'],
'weights': [1.0],
'mass': 55.845,
'name': u'Fe'
},
{
'symbols': [u'S'],
'weights': [1.0],
'mass': 32.065,
'name': u'S'
}
]
structure = StructureData(cell=test_cell)
structure.append_atom(symbols=['Fe'], position=[0, 0, 0])
structure.append_atom(symbols=['S'], position=[2, 2, 2])
structure.label = test_label
structure.store()
parent_calculation = Calculation()
parent_calculation._set_attr('key', 'value')
parent_calculation.store()
child_calculation = Calculation()
child_calculation._set_attr('key', 'value')
child_calculation.store()
remote_folder = RemoteData(computer=self.computer, remote_path='/').store()
remote_folder.add_link_from(parent_calculation, link_type=LinkType.CREATE)
child_calculation.add_link_from(remote_folder, link_type=LinkType.INPUT)
structure.add_link_from(child_calculation, link_type=LinkType.CREATE)
with tempfile.NamedTemporaryFile() as handle:
nodes = [structure, child_calculation, parent_calculation, remote_folder]
export(nodes, outfile=handle.name, overwrite=True, silent=True)
# Check that we have the expected number of nodes in the database
self.assertEquals(QueryBuilder().append(Node).count(), len(nodes))
# Clean the database and verify there are no nodes left
self.clean_db()
self.assertEquals(QueryBuilder().append(Node).count(), 0)
# After importing we should have the original number of nodes again
import_data(handle.name, silent=True)
self.assertEquals(QueryBuilder().append(Node).count(), len(nodes))
# Verify that Calculations have non-empty attribute dictionaries
qb = QueryBuilder().append(Calculation)
for [calculation] in qb.iterall():
self.assertIsInstance(calculation.get_attrs(), dict)
self.assertNotEquals(len(calculation.get_attrs()), 0)
# Verify that the structure data maintained its label, cell and kinds
qb = QueryBuilder().append(StructureData)
for [structure] in qb.iterall():
self.assertEquals(structure.label, test_label)
self.assertEquals(structure.cell, test_cell)
qb = QueryBuilder().append(StructureData, project=['attributes.kinds'])
for [kinds] in qb.iterall():
self.assertEqual(len(kinds), 2)
for kind in kinds:
self.assertIn(kind, test_kinds)
# Check that there is a StructureData that is an output of a Calculation
qb = QueryBuilder()
qb.append(Calculation, project=['uuid'], tag='calculation')
qb.append(StructureData, output_of='calculation')
self.assertGreater(len(qb.all()), 0)
# Check that there is a RemoteData that is a child and parent of a Calculation
qb = QueryBuilder()
qb.append(Calculation, tag='parent')
qb.append(RemoteData, project=['uuid'], output_of='parent', tag='remote')
qb.append(Calculation, output_of='remote')
self.assertGreater(len(qb.all()), 0)
class TestSimple(AiidaTestCase):
def setUp(self):
self.clean_db()
self.insert_data()
def tearDown(self):
pass
def test_0(self):
import os
import shutil
import tempfile
from aiida.orm import load_node
from aiida.orm.data.base import Str, Int, Float, Bool
from aiida.orm.importexport import export
# Creating a folder for the import/export files
temp_folder = tempfile.mkdtemp()
try:
# producing values for each base type
values = ("Hello", 6, -1.2399834e12, False) # , ["Bla", 1, 1e-10])
filename = os.path.join(temp_folder, "export.tar.gz")
# producing nodes:
nodes = [cls(val).store() for val, cls in zip(values, (Str, Int, Float, Bool))]
# my uuid - list to reload the node:
uuids = [n.uuid for n in nodes]
# exporting the nodes:
export(nodes, outfile=filename, silent=True)
# cleaning:
self.clean_db()
# Importing back the data:
import_data(filename, silent=True)
# Checking whether values are preserved:
for uuid, refval in zip(uuids, values):
self.assertEquals(load_node(uuid).value, refval)
finally:
# Deleting the created temporary folder
shutil.rmtree(temp_folder, ignore_errors=True)
def test_1(self):
import os
import shutil
import tempfile
from aiida.orm import DataFactory
from aiida.orm import load_node
from aiida.orm.calculation.job import JobCalculation
from aiida.orm.importexport import export
# Creating a folder for the import/export files
temp_folder = tempfile.mkdtemp()
try:
StructureData = DataFactory('structure')
sd = StructureData()
sd.store()
calc = JobCalculation()
calc.set_computer(self.computer)
calc.set_option('resources', {"num_machines": 1, "num_mpiprocs_per_machine": 1})
calc.store()
calc.add_link_from(sd)
pks = [sd.pk, calc.pk]
attrs = {}
for pk in pks:
node = load_node(pk)
attrs[node.uuid] = dict()
for k in node.attrs():
attrs[node.uuid][k] = node.get_attr(k)
filename = os.path.join(temp_folder, "export.tar.gz")
export([calc], outfile=filename, silent=True)
self.clean_db()
# NOTE: it is better to load new nodes by uuid, rather than assuming
# that they will have the first 3 pks. In fact, a recommended policy in
# databases is that pk always increment, even if you've deleted elements
import_data(filename, silent=True)
for uuid in attrs.keys():
node = load_node(uuid)
# for k in node.attrs():
for k in attrs[uuid].keys():
self.assertEquals(attrs[uuid][k], node.get_attr(k))
finally:
# Deleting the created temporary folder
shutil.rmtree(temp_folder, ignore_errors=True)
# print temp_folder
def test_2(self):
"""
Test the check for the export format version.
"""
import tarfile
import os
import shutil
import tempfile
from aiida.common import exceptions
from aiida.orm import DataFactory
from aiida.orm.importexport import export
import aiida.utils.json as json
# Creating a folder for the import/export files
export_file_tmp_folder = tempfile.mkdtemp()
unpack_tmp_folder = tempfile.mkdtemp()
try:
StructureData = DataFactory('structure')
sd = StructureData()
sd.store()
filename = os.path.join(export_file_tmp_folder, "export.tar.gz")
export([sd], outfile=filename, silent=True)
with tarfile.open(filename, "r:gz", format=tarfile.PAX_FORMAT) as tar:
tar.extractall(unpack_tmp_folder)
with io.open(os.path.join(unpack_tmp_folder,
'metadata.json'), 'r', encoding='utf8') as fhandle:
metadata = json.load(fhandle)
metadata['export_version'] = 0.0
with io.open(os.path.join(unpack_tmp_folder, 'metadata.json'),
'wb') as fhandle:
json.dump(metadata, fhandle)
with tarfile.open(filename, "w:gz", format=tarfile.PAX_FORMAT) as tar:
tar.add(unpack_tmp_folder, arcname="")
self.tearDownClass()
self.setUpClass()
with self.assertRaises(exceptions.IncompatibleArchiveVersionError):
import_data(filename, silent=True)
finally:
# Deleting the created temporary folders
shutil.rmtree(export_file_tmp_folder, ignore_errors=True)
shutil.rmtree(unpack_tmp_folder, ignore_errors=True)
def test_3(self):
"""
Test importing of nodes, that have links to unknown nodes.
"""
import tarfile
import os
import shutil
import tempfile
from aiida.orm.importexport import export
from aiida.common.folders import SandboxFolder
from aiida.orm.data.structure import StructureData
from aiida.orm import load_node
import aiida.utils.json as json
# Creating a folder for the import/export files
temp_folder = tempfile.mkdtemp()
try:
node_label = "Test structure data"
sd = StructureData()
sd.label = str(node_label)
sd.store()
filename = os.path.join(temp_folder, "export.tar.gz")
export([sd], outfile=filename, silent=True)
unpack = SandboxFolder()
with tarfile.open(
filename, "r:gz", format=tarfile.PAX_FORMAT) as tar:
tar.extractall(unpack.abspath)
with io.open(unpack.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle:
metadata = json.load(fhandle)
metadata['links_uuid'].append({
'output': sd.uuid,
'input': 'non-existing-uuid',
'label': 'parent'
})
with io.open(unpack.get_abs_path('data.json'), 'wb') as fhandle:
json.dump(metadata, fhandle)
with tarfile.open(
filename, "w:gz", format=tarfile.PAX_FORMAT) as tar:
tar.add(unpack.abspath, arcname="")
self.clean_db()
with self.assertRaises(ValueError):
import_data(filename, silent=True)
import_data(filename, ignore_unknown_nodes=True, silent=True)
self.assertEquals(load_node(sd.uuid).label, node_label)
finally:
# Deleting the created temporary folder
shutil.rmtree(temp_folder, ignore_errors=True)
def test_4(self):
"""
Test control of licenses.
"""
from aiida.common.exceptions import LicensingException
from aiida.common.folders import SandboxFolder
from aiida.orm.importexport import export_tree
from aiida.orm import DataFactory
StructureData = DataFactory('structure')
sd = StructureData()
sd.source = {'license': 'GPL'}
sd.store()
folder = SandboxFolder()
export_tree([sd], folder=folder, silent=True,
allowed_licenses=['GPL'])
# Folder should contain two files of metadata + nodes/
self.assertEquals(len(folder.get_content_list()), 3)
folder = SandboxFolder()
export_tree([sd], folder=folder, silent=True,
forbidden_licenses=['Academic'])
# Folder should contain two files of metadata + nodes/
self.assertEquals(len(folder.get_content_list()), 3)
folder = SandboxFolder()
with self.assertRaises(LicensingException):
export_tree([sd], folder=folder, silent=True,
allowed_licenses=['CC0'])
folder = SandboxFolder()
with self.assertRaises(LicensingException):
export_tree([sd], folder=folder, silent=True,
forbidden_licenses=['GPL'])
def cc_filter(license):
return license.startswith('CC')
def gpl_filter(license):
return license == 'GPL'
def crashing_filter(license):
raise NotImplementedError("not implemented yet")
folder = SandboxFolder()
with self.assertRaises(LicensingException):
export_tree([sd], folder=folder, silent=True,
allowed_licenses=cc_filter)
folder = SandboxFolder()
with self.assertRaises(LicensingException):
export_tree([sd], folder=folder, silent=True,
forbidden_licenses=gpl_filter)
folder = SandboxFolder()
with self.assertRaises(LicensingException):
export_tree([sd], folder=folder, silent=True,
allowed_licenses=crashing_filter)
folder = SandboxFolder()
with self.assertRaises(LicensingException):
export_tree([sd], folder=folder, silent=True,
forbidden_licenses=crashing_filter)
def test_5(self):
"""
This test checks that nodes belonging to different users are correctly
exported & imported.
"""
import os
import shutil
import tempfile
from aiida.orm import load_node
from aiida.orm.calculation.job import JobCalculation
from aiida.orm.data.structure import StructureData
from aiida.orm.importexport import export
from aiida.common.datastructures import calc_states
from aiida.common.links import LinkType
from aiida.common.utils import get_configured_user_email
# Creating a folder for the import/export files
temp_folder = tempfile.mkdtemp()
try:
# Create another user
new_email = "[email protected]"
user = orm.User(email=new_email, backend=self.backend).store()
# Create a structure data node that has a calculation as output
sd1 = StructureData()
sd1.set_user(user)
sd1.label = 'sd1'
sd1.store()
jc1 = JobCalculation()
jc1.set_computer(self.computer)
jc1.set_option('resources', {"num_machines": 1, "num_mpiprocs_per_machine": 1})
jc1.set_user(user)
jc1.label = 'jc1'
jc1.store()
jc1.add_link_from(sd1)
jc1._set_state(calc_states.PARSING)
# Create some nodes from a different user
sd2 = StructureData()
sd2.set_user(user)
sd2.label = 'sd2'
sd2.store()
sd2.add_link_from(jc1, label='l1', link_type=LinkType.CREATE) # I assume jc1 CREATED sd2
jc2 = JobCalculation()
jc2.set_computer(self.computer)
jc2.set_option('resources', {"num_machines": 1, "num_mpiprocs_per_machine": 1})
jc2.label = 'jc2'
jc2.store()
jc2.add_link_from(sd2, label='l2')
jc2._set_state(calc_states.PARSING)
sd3 = StructureData()
sd3.label = 'sd3'
sd3.store()
sd3.add_link_from(jc2, label='l3', link_type=LinkType.CREATE)
uuids_u1 = [sd1.uuid, jc1.uuid, sd2.uuid]
uuids_u2 = [jc2.uuid, sd3.uuid]
filename = os.path.join(temp_folder, "export.tar.gz")
export([sd3], outfile=filename, silent=True)
self.clean_db()
import_data(filename, silent=True)
# Check that the imported nodes are correctly imported and that
# the user assigned to the nodes is the right one
for uuid in uuids_u1:
node = load_node(uuid=uuid)
self.assertEquals(node.get_user().email, new_email)
for uuid in uuids_u2:
self.assertEquals(load_node(uuid).get_user().email,
get_configured_user_email())
finally:
# Deleting the created temporary folder
shutil.rmtree(temp_folder, ignore_errors=True)
def test_6(self):
"""
This test checks that nodes belonging to user A (which is not the
default user) can be correctly exported, imported, enriched with nodes
from the default user, re-exported & re-imported and that in the end
all the nodes that have been finally imported belonging to the right
users.
"""
import os
import shutil
import tempfile
from aiida.orm import load_node
from aiida.orm.calculation.job import JobCalculation
from aiida.orm.data.structure import StructureData
from aiida.orm.importexport import export
from aiida.common.datastructures import calc_states
from aiida.common.links import LinkType
from aiida.common.utils import get_configured_user_email
# Creating a folder for the import/export files
temp_folder = tempfile.mkdtemp()
try:
# Create another user
new_email = "[email protected]"
user = orm.User(email=new_email, backend=self.backend).store()
# Create a structure data node that has a calculation as output
sd1 = StructureData()
sd1.set_user(user)
sd1.label = 'sd1'
sd1.store()
jc1 = JobCalculation()
jc1.set_computer(self.computer)
jc1.set_option('resources', {"num_machines": 1, "num_mpiprocs_per_machine": 1})
jc1.set_user(user)
jc1.label = 'jc1'
jc1.store()
jc1.add_link_from(sd1)
jc1._set_state(calc_states.PARSING)
# Create some nodes from a different user
sd2 = StructureData()
sd2.set_user(user)
sd2.label = 'sd2'
sd2.store()
sd2.add_link_from(jc1, label='l1', link_type=LinkType.CREATE)
# Set the jc1 to FINISHED
jc1._set_state(calc_states.FINISHED)
# At this point we export the generated data
filename1 = os.path.join(temp_folder, "export1.tar.gz")
export([sd2], outfile=filename1, silent=True)
uuids1 = [sd1.uuid, jc1.uuid, sd2.uuid]
self.clean_db()
self.insert_data()
import_data(filename1, silent=True)
# Check that the imported nodes are correctly imported and that
# the user assigned to the nodes is the right one
for uuid in uuids1:
self.assertEquals(load_node(uuid).get_user().email, new_email)
# Now we continue to generate more data based on the imported
# data
sd2_imp = load_node(sd2.uuid)
jc2 = JobCalculation()
jc2.set_computer(self.computer)
jc2.set_option('resources', {"num_machines": 1, "num_mpiprocs_per_machine": 1})
jc2.label = 'jc2'
jc2.store()
jc2.add_link_from(sd2_imp, label='l2')
jc2._set_state(calc_states.PARSING)
sd3 = StructureData()
sd3.label = 'sd3'
sd3.store()
sd3.add_link_from(jc2, label='l3', link_type=LinkType.CREATE)
# Set the jc2 to FINISHED
jc2._set_state(calc_states.FINISHED)
# Store the UUIDs of the nodes that should be checked
# if they can be imported correctly.
uuids2 = [jc2.uuid, sd3.uuid]
filename2 = os.path.join(temp_folder, "export2.tar.gz")
export([sd3], outfile=filename2, silent=True)
self.clean_db()
self.insert_data()
import_data(filename2, silent=True)
# Check that the imported nodes are correctly imported and that
# the user assigned to the nodes is the right one
for uuid in uuids1:
self.assertEquals(load_node(uuid).get_user().email, new_email)
for uuid in uuids2:
self.assertEquals(load_node(uuid).get_user().email,
get_configured_user_email())
finally:
# Deleting the created temporary folder
shutil.rmtree(temp_folder, ignore_errors=True)
def test_7(self):
"""
This test checks that nodes that belong to a specific group are
correctly imported and exported.
"""
import os
import shutil
import tempfile
from aiida.orm import load_node
from aiida.orm.calculation.job import JobCalculation
from aiida.orm.data.structure import StructureData
from aiida.orm.importexport import export
from aiida.common.datastructures import calc_states
from aiida.orm.querybuilder import QueryBuilder
# Creating a folder for the import/export files
temp_folder = tempfile.mkdtemp()
try:
# Create another user
new_email = "[email protected]"
user = orm.User(email=new_email, backend=self.backend)
user.store()
# Create a structure data node that has a calculation as output
sd1 = StructureData()
sd1.set_user(user)
sd1.label = 'sd1'
sd1.store()
jc1 = JobCalculation()
jc1.set_computer(self.computer)
jc1.set_option('resources', {"num_machines": 1, "num_mpiprocs_per_machine": 1})
jc1.set_user(user)
jc1.label = 'jc1'
jc1.store()
jc1.add_link_from(sd1)
jc1._set_state(calc_states.PARSING)
# Create a group and add the data inside
from aiida.orm.group import Group
g1 = Group(name="node_group")
g1.store()
g1.add_nodes([sd1, jc1])
g1_uuid = g1.uuid
# At this point we export the generated data
filename1 = os.path.join(temp_folder, "export1.tar.gz")
export([sd1, jc1, g1], outfile=filename1,
silent=True)
n_uuids = [sd1.uuid, jc1.uuid]
self.clean_db()
self.insert_data()
import_data(filename1, silent=True)
# Check that the imported nodes are correctly imported and that
# the user assigned to the nodes is the right one
for uuid in n_uuids:
self.assertEquals(load_node(uuid).get_user().email, new_email)
# Check that the exported group is imported correctly
qb = QueryBuilder()
qb.append(Group, filters={'uuid': {'==': g1_uuid}})
self.assertEquals(qb.count(), 1, "The group was not found.")
finally:
# Deleting the created temporary folder
shutil.rmtree(temp_folder, ignore_errors=True)
def test_group_export(self):
"""
Test that when exporting just a group, its nodes are also exported
"""
import os
import shutil
import tempfile
from aiida.orm import load_node
from aiida.orm.data.structure import StructureData
from aiida.orm.importexport import export
from aiida.orm.querybuilder import QueryBuilder
# Creating a folder for the import/export files
temp_folder = tempfile.mkdtemp()
try:
# Create another user
new_email = "[email protected]"
user = orm.User(email=new_email, backend=self.backend)
user.store()
# Create a structure data node
sd1 = StructureData()
sd1.set_user(user)
sd1.label = 'sd1'
sd1.store()
# Create a group and add the data inside
from aiida.orm.group import Group
g1 = Group(name="node_group")
g1.store()
g1.add_nodes([sd1])
g1_uuid = g1.uuid
# At this point we export the generated data
filename1 = os.path.join(temp_folder, "export1.tar.gz")
export([g1], outfile=filename1, silent=True)
n_uuids = [sd1.uuid]
self.clean_db()
self.insert_data()
import_data(filename1, silent=True)
# Check that the imported nodes are correctly imported and that
# the user assigned to the nodes is the right one
for uuid in n_uuids:
self.assertEquals(load_node(uuid).get_user().email, new_email)
# Check that the exported group is imported correctly
qb = QueryBuilder()
qb.append(Group, filters={'uuid': {'==': g1_uuid}})
self.assertEquals(qb.count(), 1, "The group was not found.")
finally:
# Deleting the created temporary folder
shutil.rmtree(temp_folder, ignore_errors=True)
def test_workfunction_1(self):
import shutil, os, tempfile
from aiida.work.workfunctions import workfunction
from aiida.orm.data.float import Float
from aiida.orm import load_node
from aiida.orm.importexport import export
from aiida.common.exceptions import NotExistent
# Creating a folder for the import/export files
temp_folder = tempfile.mkdtemp()
@workfunction
def add(a, b):
"""Add 2 numbers"""
return {'res': Float(a + b)}
def max_(**kwargs):
"""select the max value"""
max_val = max([(v.value, v) for v in kwargs.values()])
return {'res': max_val[1]}
try:
# I'm creating a bunch of nuimbers
a, b, c, d, e = (Float(i) for i in range(5))
# this adds the maximum number between bcde to a.
res = add(a=a, b=max_(b=b, c=c, d=d, e=e)['res'])['res']
# These are the uuids that would be exported as well (as parents) if I wanted the final result
uuids_values = [(a.uuid, a.value), (e.uuid, e.value), (res.uuid, res.value)]
# These are the uuids that shouldn't be exported since it's a selection.
not_wanted_uuids = [v.uuid for v in (b, c, d)]
# At this point we export the generated data
filename1 = os.path.join(temp_folder, "export1.tar.gz")
export([res], outfile=filename1, silent=True)
self.clean_db()
self.insert_data()
import_data(filename1, silent=True)
# Check that the imported nodes are correctly imported and that the value is preserved
for uuid, value in uuids_values:
self.assertEquals(load_node(uuid).value, value)
for uuid in not_wanted_uuids:
with self.assertRaises(NotExistent):
load_node(uuid)
finally:
# Deleting the created temporary folder
shutil.rmtree(temp_folder, ignore_errors=True)
def test_workcalculation_2(self):
import shutil, os, tempfile
from aiida.orm.calculation.work import WorkCalculation
from aiida.orm.data.float import Float
from aiida.orm.data.int import Int
from aiida.orm import load_node
from aiida.common.links import LinkType
from aiida.orm.importexport import export
from aiida.common.exceptions import NotExistent
# Creating a folder for the import/export files
temp_folder = tempfile.mkdtemp()
try:
master = WorkCalculation().store()
slave = WorkCalculation().store()
input_1 = Int(3).store()
input_2 = Int(5).store()
output_1 = Int(2).store()
master.add_link_from(input_1, 'input_1', link_type=LinkType.INPUT)
slave.add_link_from(master, 'CALL', link_type=LinkType.CALL)
slave.add_link_from(input_2, 'input_2', link_type=LinkType.INPUT)
output_1.add_link_from(master, 'CREATE', link_type=LinkType.CREATE)
uuids_values = [(v.uuid, v.value) for v in (output_1,)]
filename1 = os.path.join(temp_folder, "export1.tar.gz")
export([output_1], outfile=filename1, silent=True)
self.clean_db()
self.insert_data()
import_data(filename1, silent=True)
for uuid, value in uuids_values:
self.assertEquals(load_node(uuid).value, value)
finally:
# Deleting the created temporary folder
shutil.rmtree(temp_folder, ignore_errors=True)
def test_reexport(self):
"""
Export something, import and reexport and check if everything is valid.
The export is rather easy::
___ ___ ___
| | INP | | CREATE | |
| p | --> | c | -----> | a |
|___| |___| |___|
"""
import os, shutil, tempfile, numpy as np, string, random
from datetime import datetime
from aiida.orm import Calculation, load_node, Group
from aiida.orm.data.array import ArrayData
from aiida.orm.data.parameter import ParameterData
from aiida.orm.querybuilder import QueryBuilder
from aiida.orm.importexport import export
from aiida.common.hashing import make_hash
from aiida.common.links import LinkType
def get_hash_from_db_content(groupname):
qb = QueryBuilder()
qb.append(ParameterData, tag='p', project='*')
qb.append(Calculation, tag='c', project='*', edge_tag='p2c', edge_project=('label', 'type'))
qb.append(ArrayData, tag='a', project='*', edge_tag='c2a', edge_project=('label', 'type'))
qb.append(Group, filters={'name': groupname}, project='*', tag='g', group_of='a')
# I want the query to contain something!
self.assertTrue(qb.count() > 0)
# The hash is given from the preservable entries in an export-import cycle,
# uuids, attributes, labels, descriptions, arrays, link-labels, link-types:
hash_ = make_hash([(
item['p']['*'].get_attrs(),
item['p']['*'].uuid,
item['p']['*'].label,
item['p']['*'].description,
item['c']['*'].uuid,
item['c']['*'].get_attrs(),
item['a']['*'].get_attrs(),
[item['a']['*'].get_array(name) for name in item['a']['*'].get_arraynames()],
item['a']['*'].uuid,
item['g']['*'].uuid,
item['g']['*'].name,
item['p2c']['label'],
item['p2c']['type'],
item['c2a']['label'],
item['c2a']['type'],
item['g']['*'].name,
) for item in qb.dict()])
return hash_
# Creating a folder for the import/export files
temp_folder = tempfile.mkdtemp()
chars = string.ascii_uppercase + string.digits
size = 10
groupname = 'test-group'
try:
nparr = np.random.random((4, 3, 2))
trial_dict = {}
# give some integers:
trial_dict.update({str(k): np.random.randint(100) for k in range(10)})
# give some floats:
trial_dict.update({str(k): np.random.random() for k in range(10, 20)})
# give some booleans:
trial_dict.update({str(k): bool(np.random.randint(1)) for k in range(20, 30)})
# give some datetime:
trial_dict.update({str(k): datetime(
year=2017,
month=np.random.randint(1, 12),
day=np.random.randint(1, 28)) for k in range(30, 40)})
# give some text:
trial_dict.update({str(k): ''.join(random.choice(chars) for _ in range(size)) for k in range(20, 30)})
p = ParameterData(dict=trial_dict)
p.label = str(datetime.now())
p.description = 'd_' + str(datetime.now())
p.store()
c = Calculation()
# setting also trial dict as attributes, but randomizing the keys)
(c._set_attr(str(int(k) + np.random.randint(10)), v) for k, v in trial_dict.items())
c.store()
a = ArrayData()
a.set_array('array', nparr)
a.store()
# LINKS
# the calculation has input the parameters-instance
c.add_link_from(p, label='input_parameters', link_type=LinkType.INPUT)
# I want the array to be an output of the calculation
a.add_link_from(c, label='output_array', link_type=LinkType.CREATE)
g = Group(name='test-group')
g.store()
g.add_nodes(a)
hash_from_dbcontent = get_hash_from_db_content(groupname)
# I export and reimport 3 times in a row:
for i in range(3):
# Always new filename:
filename = os.path.join(temp_folder, "export-{}.zip".format(i))
# Loading the group from the string
g = Group.get_from_string(groupname)
# exporting based on all members of the group
# this also checks if group memberships are preserved!
export([g] + [n for n in g.nodes], outfile=filename, silent=True)
# cleaning the DB!
self.clean_db()
# reimporting the data from the file
import_data(filename, silent=True, ignore_unknown_nodes=True)
# creating the hash from db content
new_hash = get_hash_from_db_content(groupname)
# I check for equality against the first hash created, which implies that hashes
# are equal in all iterations of this process
self.assertEqual(hash_from_dbcontent, new_hash)
finally:
# Deleting the created temporary folder
shutil.rmtree(temp_folder, ignore_errors=True)
class TestComplex(AiidaTestCase):
def test_complex_graph_import_export(self):
"""
This test checks that a small and bit complex graph can be correctly
exported and imported.
It will create the graph, store it to the database, export it to a file
and import it. In the end it will check if the initial nodes are present
at the imported graph.
"""
import tempfile
import shutil
import os
from aiida.orm.calculation.job import JobCalculation
from aiida.orm.data.folder import FolderData
from aiida.orm.data.parameter import ParameterData
from aiida.orm.data.remote import RemoteData
from aiida.common.links import LinkType
from aiida.orm.importexport import export, import_data
from aiida.orm.utils import load_node
from aiida.common.exceptions import NotExistent
temp_folder = tempfile.mkdtemp()
try:
calc1 = JobCalculation()
calc1.set_computer(self.computer)
calc1.set_option('resources', {"num_machines": 1, "num_mpiprocs_per_machine": 1})
calc1.label = "calc1"
calc1.store()
calc1._set_state(u'RETRIEVING')
pd1 = ParameterData()
pd1.label = "pd1"
pd1.store()
pd2 = ParameterData()
pd2.label = "pd2"
pd2.store()
rd1 = RemoteData()
rd1.label = "rd1"
rd1.set_remote_path("/x/y.py")
rd1.set_computer(self.computer)
rd1.store()
rd1.add_link_from(calc1, link_type=LinkType.CREATE)
calc2 = JobCalculation()
calc2.set_computer(self.computer)
calc2.set_option('resources', {"num_machines": 1, "num_mpiprocs_per_machine": 1})
calc2.label = "calc2"
calc2.store()
calc2.add_link_from(pd1, link_type=LinkType.INPUT)
calc2.add_link_from(pd2, link_type=LinkType.INPUT)
calc2.add_link_from(rd1, link_type=LinkType.INPUT)
calc2._set_state(u'SUBMITTING')
fd1 = FolderData()
fd1.label = "fd1"
fd1.store()
fd1.add_link_from(calc2, link_type=LinkType.CREATE)
node_uuids_labels = {calc1.uuid: calc1.label, pd1.uuid: pd1.label,
pd2.uuid: pd2.label, rd1.uuid: rd1.label,
calc2.uuid: calc2.label, fd1.uuid: fd1.label}
filename = os.path.join(temp_folder, "export.tar.gz")
export([fd1], outfile=filename, silent=True)
self.clean_db()
import_data(filename, silent=True, ignore_unknown_nodes=True)
for uuid, label in node_uuids_labels.items():
try:
load_node(uuid)
except NotExistent:
self.fail("Node with UUID {} and label {} was not "
"found.".format(uuid, label))
finally:
# Deleting the created temporary folder
shutil.rmtree(temp_folder, ignore_errors=True)
class TestComputer(AiidaTestCase):
def setUp(self):
self.clean_db()
self.insert_data()
def tearDown(self):
pass
def test_same_computer_import(self):
"""
Test that you can import nodes in steps without any problems. In this
test we will import a first calculation and then a second one. The
import should work as expected and have in the end two job
calculations.
Each calculation is related to the same computer. In the end we should
have only one computer
"""
import os
import shutil
import tempfile
from aiida.orm.importexport import export
from aiida.orm.querybuilder import QueryBuilder
from aiida.orm.computers import Computer
from aiida.orm.calculation.job import JobCalculation
# Creating a folder for the import/export files
export_file_tmp_folder = tempfile.mkdtemp()
unpack_tmp_folder = tempfile.mkdtemp()
try:
# Store two job calculation related to the same computer
calc1_label = "calc1"
calc1 = JobCalculation()
calc1.set_computer(self.computer)
calc1.set_option('resources', {"num_machines": 1,
"num_mpiprocs_per_machine": 1})
calc1.label = calc1_label
calc1.store()
calc1._set_state(u'RETRIEVING')
calc2_label = "calc2"
calc2 = JobCalculation()
calc2.set_computer(self.computer)
calc2.set_option('resources', {"num_machines": 2,
"num_mpiprocs_per_machine": 2})
calc2.label = calc2_label
calc2.store()
calc2._set_state(u'RETRIEVING')
# Store locally the computer name
comp_name = six.text_type(self.computer.name)
comp_uuid = six.text_type(self.computer.uuid)
# Export the first job calculation
filename1 = os.path.join(export_file_tmp_folder, "export1.tar.gz")
export([calc1], outfile=filename1, silent=True)
# Export the second job calculation
filename2 = os.path.join(export_file_tmp_folder, "export2.tar.gz")
export([calc2], outfile=filename2, silent=True)
# Clean the local database
self.clean_db()
# Check that there are no computers
qb = QueryBuilder()
qb.append(Computer, project=['*'])
self.assertEqual(qb.count(), 0, "There should not be any computers"
"in the database at this point.")
# Check that there are no calculations
qb = QueryBuilder()
qb.append(JobCalculation, project=['*'])
self.assertEqual(qb.count(), 0, "There should not be any "
"calculations in the database at "
"this point.")
# Import the first calculation
import_data(filename1, silent=True)
# Check that the calculation computer is imported correctly.
qb = QueryBuilder()
qb.append(JobCalculation, project=['label'])
self.assertEqual(qb.count(), 1, "Only one calculation should be "
"found.")
self.assertEqual(six.text_type(qb.first()[0]), calc1_label,
"The calculation label is not correct.")
# Check that the referenced computer is imported correctly.
qb = QueryBuilder()
qb.append(Computer, project=['name', 'uuid', 'id'])
self.assertEqual(qb.count(), 1, "Only one computer should be "
"found.")
self.assertEqual(six.text_type(qb.first()[0]), comp_name,
"The computer name is not correct.")
self.assertEqual(six.text_type(qb.first()[1]), comp_uuid,
"The computer uuid is not correct.")
# Store the id of the computer
comp_id = qb.first()[2]
# Import the second calculation
import_data(filename2, silent=True)
# Check that the number of computers remains the same and its data
# did not change.
qb = QueryBuilder()
qb.append(Computer, project=['name', 'uuid', 'id'])
self.assertEqual(qb.count(), 1, "Only one computer should be "
"found.")
self.assertEqual(six.text_type(qb.first()[0]), comp_name,
"The computer name is not correct.")
self.assertEqual(six.text_type(qb.first()[1]), comp_uuid,
"The computer uuid is not correct.")
self.assertEqual(qb.first()[2], comp_id,
"The computer id is not correct.")
# Check that now you have two calculations attached to the same
# computer.
qb = QueryBuilder()
qb.append(Computer, tag='comp')
qb.append(JobCalculation, has_computer='comp', project=['label'])
self.assertEqual(qb.count(), 2, "Two calculations should be "
"found.")
ret_labels = set(_ for [_] in qb.all())
self.assertEqual(ret_labels, set([calc1_label, calc2_label]),
"The labels of the calculations are not correct.")
finally:
# Deleting the created temporary folders
shutil.rmtree(export_file_tmp_folder, ignore_errors=True)
shutil.rmtree(unpack_tmp_folder, ignore_errors=True)
def test_same_computer_different_name_import(self):
"""
This test checks that if the computer is re-imported with a different
name to the same database, then the original computer will not be
renamed. It also checks that the names were correctly imported (without
any change since there is no computer name collision)
"""
import os
import shutil
import tempfile
from aiida.orm.importexport import export
from aiida.orm.querybuilder import QueryBuilder
from aiida.orm.computers import Computer
from aiida.orm.calculation.job import JobCalculation
# Creating a folder for the import/export files
export_file_tmp_folder = tempfile.mkdtemp()
unpack_tmp_folder = tempfile.mkdtemp()
try:
# Store a calculation
calc1_label = "calc1"
calc1 = JobCalculation()
calc1.set_computer(self.computer)
calc1.set_option('resources', {"num_machines": 1,
"num_mpiprocs_per_machine": 1})
calc1.label = calc1_label
calc1.store()
calc1._set_state(u'RETRIEVING')
# Store locally the computer name
comp1_name = six.text_type(self.computer.name)
# Export the first job calculation
filename1 = os.path.join(export_file_tmp_folder, "export1.tar.gz")
export([calc1], outfile=filename1, silent=True)
# Rename the computer
self.computer.set_name(comp1_name + "_updated")
# Store a second calculation
calc2_label = "calc2"
calc2 = JobCalculation()
calc2.set_computer(self.computer)
calc2.set_option('resources', {"num_machines": 2,
"num_mpiprocs_per_machine": 2})
calc2.label = calc2_label
calc2.store()
calc2._set_state(u'RETRIEVING')
# Export the second job calculation
filename2 = os.path.join(export_file_tmp_folder, "export2.tar.gz")
export([calc2], outfile=filename2, silent=True)
# Clean the local database
self.clean_db()
# Check that there are no computers
qb = QueryBuilder()
qb.append(Computer, project=['*'])
self.assertEqual(qb.count(), 0, "There should not be any computers"
"in the database at this point.")
# Check that there are no calculations
qb = QueryBuilder()
qb.append(JobCalculation, project=['*'])
self.assertEqual(qb.count(), 0, "There should not be any "
"calculations in the database at "
"this point.")
# Import the first calculation
import_data(filename1, silent=True)
# Check that the calculation computer is imported correctly.
qb = QueryBuilder()
qb.append(JobCalculation, project=['label'])
self.assertEqual(qb.count(), 1, "Only one calculation should be "
"found.")
self.assertEqual(six.text_type(qb.first()[0]), calc1_label,
"The calculation label is not correct.")
# Check that the referenced computer is imported correctly.
qb = QueryBuilder()
qb.append(Computer, project=['name', 'uuid', 'id'])
self.assertEqual(qb.count(), 1, "Only one computer should be "
"found.")
self.assertEqual(six.text_type(qb.first()[0]), comp1_name,
"The computer name is not correct.")
# Import the second calculation
import_data(filename2, silent=True)
# Check that the number of computers remains the same and its data
# did not change.
qb = QueryBuilder()
qb.append(Computer, project=['name'])
self.assertEqual(qb.count(), 1, "Only one computer should be "
"found.")
self.assertEqual(six.text_type(qb.first()[0]), comp1_name,
"The computer name is not correct.")
finally:
# Deleting the created temporary folders
shutil.rmtree(export_file_tmp_folder, ignore_errors=True)
shutil.rmtree(unpack_tmp_folder, ignore_errors=True)
def test_different_computer_same_name_import(self):
"""
This test checks that if there is a name collision, the imported
computers are renamed accordingly.
"""
import os
import shutil
import tempfile
from aiida.orm.importexport import export
from aiida.orm.querybuilder import QueryBuilder
from aiida.orm.computers import Computer
from aiida.orm.calculation.job import JobCalculation
from aiida.orm.importexport import COMP_DUPL_SUFFIX
# Creating a folder for the import/export files
export_file_tmp_folder = tempfile.mkdtemp()
unpack_tmp_folder = tempfile.mkdtemp()
try:
# Set the computer name
comp1_name = "localhost_1"
self.computer.set_name(comp1_name)
# Store a calculation
calc1_label = "calc1"
calc1 = JobCalculation()
calc1.set_computer(self.computer)
calc1.set_option('resources', {"num_machines": 1,
"num_mpiprocs_per_machine": 1})
calc1.label = calc1_label
calc1.store()
calc1._set_state(u'RETRIEVING')
# Export the first job calculation
filename1 = os.path.join(export_file_tmp_folder, "export1.tar.gz")
export([calc1], outfile=filename1, silent=True)
# Reset the database
self.clean_db()
self.insert_data()
# Set the computer name to the same name as before
self.computer.set_name(comp1_name)
# Store a second calculation
calc2_label = "calc2"
calc2 = JobCalculation()
calc2.set_computer(self.computer)
calc2.set_option('resources', {"num_machines": 2,
"num_mpiprocs_per_machine": 2})
calc2.label = calc2_label
calc2.store()
calc2._set_state(u'RETRIEVING')
# Export the second job calculation
filename2 = os.path.join(export_file_tmp_folder, "export2.tar.gz")
export([calc2], outfile=filename2, silent=True)
# Reset the database
self.clean_db()
self.insert_data()
# Set the computer name to the same name as before
self.computer.set_name(comp1_name)
# Store a third calculation
calc3_label = "calc3"
calc3 = JobCalculation()
calc3.set_computer(self.computer)
calc3.set_option('resources', {"num_machines": 2,
"num_mpiprocs_per_machine": 2})
calc3.label = calc3_label
calc3.store()
calc3._set_state(u'RETRIEVING')
# Export the third job calculation
filename3 = os.path.join(export_file_tmp_folder, "export3.tar.gz")
export([calc3], outfile=filename3, silent=True)
# Clean the local database
self.clean_db()
# Check that there are no computers
qb = QueryBuilder()
qb.append(Computer, project=['*'])
self.assertEqual(qb.count(), 0, "There should not be any computers"
"in the database at this point.")
# Check that there are no calculations
qb = QueryBuilder()
qb.append(JobCalculation, project=['*'])
self.assertEqual(qb.count(), 0, "There should not be any "
"calculations in the database at "
"this point.")
# Import all the calculations
import_data(filename1, silent=True)
import_data(filename2, silent=True)
import_data(filename3, silent=True)
# Retrieve the calculation-computer pairs
qb = QueryBuilder()
qb.append(JobCalculation, project=['label'], tag='jcalc')
qb.append(Computer, project=['name'],
computer_of='jcalc')
self.assertEqual(qb.count(), 3, "Three combinations expected.")
res = qb.all()
self.assertIn([calc1_label, comp1_name], res,
"Calc-Computer combination not found.")
self.assertIn([calc2_label,
comp1_name + COMP_DUPL_SUFFIX.format(0)], res,
"Calc-Computer combination not found.")
self.assertIn([calc3_label,
comp1_name + COMP_DUPL_SUFFIX.format(1)], res,
"Calc-Computer combination not found.")
finally:
# Deleting the created temporary folders
shutil.rmtree(export_file_tmp_folder, ignore_errors=True)
shutil.rmtree(unpack_tmp_folder, ignore_errors=True)
def test_correct_import_of_computer_json_params(self):
"""
This test checks that the metadata and transport params are
exported and imported correctly in both backends.
"""
import os
import shutil
import tempfile
from aiida.orm.importexport import export
from aiida.orm.querybuilder import QueryBuilder
from aiida.orm.computers import Computer
from aiida.orm.calculation.job import JobCalculation
# Creating a folder for the import/export files
export_file_tmp_folder = tempfile.mkdtemp()
unpack_tmp_folder = tempfile.mkdtemp()
try:
# Set the computer name
comp1_name = "localhost_1"
comp1_metadata = {
u'workdir': u'/tmp/aiida'
}
comp1_transport_params = {
u'key1': u'value1',
u'key2': 2
}
self.computer.set_name(comp1_name)
self.computer._set_metadata(comp1_metadata)
self.computer.set_transport_params(comp1_transport_params)
# Store a calculation
calc1_label = "calc1"
calc1 = JobCalculation()
calc1.set_computer(self.computer)
calc1.set_option('resources', {"num_machines": 1,
"num_mpiprocs_per_machine": 1})
calc1.label = calc1_label
calc1.store()
calc1._set_state(u'RETRIEVING')
# Export the first job calculation
filename1 = os.path.join(export_file_tmp_folder, "export1.tar.gz")
export([calc1], outfile=filename1, silent=True)
# Clean the local database
self.clean_db()
# Import the data
import_data(filename1, silent=True)
qb = QueryBuilder()
qb.append(Computer, project=['transport_params', '_metadata'],
tag="comp")
self.assertEqual(qb.count(), 1, "Expected only one computer")
res = qb.dict()[0]
self.assertEqual(res['comp']['transport_params'],
comp1_transport_params,
"Not the expected transport parameters "
"were found")
self.assertEqual(res['comp']['_metadata'],
comp1_metadata,
"Not the expected metadata were found")
finally:
# Deleting the created temporary folders
shutil.rmtree(export_file_tmp_folder, ignore_errors=True)
shutil.rmtree(unpack_tmp_folder, ignore_errors=True)
def test_import_of_django_sqla_export_file(self):
"""
Check why sqla import manages to import the django export file correctly
"""
from aiida.backends.tests.utils.fixtures import import_archive_fixture
from aiida.orm.querybuilder import QueryBuilder
from aiida.orm.computers import Computer
for archive in ['export/compare/django.aiida', 'export/compare/sqlalchemy.aiida']:
# Clean the database
self.clean_db()
# Import the needed data
import_archive_fixture(archive)
# The expected metadata & transport parameters
comp1_metadata = {
u'workdir': u'/tmp/aiida'
}
comp1_transport_params = {
u'key1': u'value1',
u'key2': 2
}
# Check that we got the correct metadata & transport parameters
qb = QueryBuilder()
qb.append(Computer, project=['transport_params', '_metadata'], tag="comp")
self.assertEqual(qb.count(), 1, "Expected only one computer")
res = qb.dict()[0]
self.assertEqual(res['comp']['transport_params'], comp1_transport_params)
self.assertEqual(res['comp']['_metadata'], comp1_metadata)
class TestLinks(AiidaTestCase):
def setUp(self):
self.clean_db()
self.insert_data()
def tearDown(self):
pass
def get_all_node_links(self):
"""
"""
from aiida.orm import load_node, Node
from aiida.orm.querybuilder import QueryBuilder
qb = QueryBuilder()
qb.append(Node, project='uuid', tag='input')
qb.append(Node, project='uuid', tag='output',
edge_project=['label', 'type'], output_of='input')
return qb.all()
def test_input_and_create_links(self):
"""
Simple test that will verify that INPUT and CREATE links are properly exported and
correctly recreated upon import.
"""
import os, shutil, tempfile
from aiida.orm.data.int import Int
from aiida.orm.importexport import export
from aiida.orm.calculation.work import WorkCalculation
from aiida.common.links import LinkType
tmp_folder = tempfile.mkdtemp()
try:
node_work = WorkCalculation().store()
node_input = Int(1).store()
node_output = Int(2).store()
node_work.add_link_from(node_input, 'input', link_type=LinkType.INPUT)
node_output.add_link_from(node_work, 'output', link_type=LinkType.CREATE)
export_links = self.get_all_node_links()
export_file = os.path.join(tmp_folder, 'export.tar.gz')
export([node_output], outfile=export_file, silent=True)
self.clean_db()
self.insert_data()
import_data(export_file, silent=True)
import_links = self.get_all_node_links()
export_set = [tuple(_) for _ in export_links]
import_set = [tuple(_) for _ in import_links]
self.assertEquals(set(export_set), set(import_set))
finally:
shutil.rmtree(tmp_folder, ignore_errors=True)
def construct_complex_graph(self, export_combination=0):
"""
This method creates a "complex" graph with all available link types
(INPUT, CREATE, RETURN and CALL) and returns the nodes of the graph. It
also returns various combinations of nodes that need to be extracted
but also the final expected set of nodes (after adding the expected
predecessors, desuccessors).
"""
from aiida.orm.data.base import Int
from aiida.orm.calculation.job import JobCalculation
from aiida.orm.calculation.work import WorkCalculation
from aiida.common.datastructures import calc_states
from aiida.common.links import LinkType
if export_combination < 0 or export_combination > 8:
return None
# Node creation
d1 = Int(1).store()
d2 = Int(1).store()
wc1 = WorkCalculation().store()
wc2 = WorkCalculation().store()
pw1 = JobCalculation()
pw1.set_computer(self.computer)
pw1.set_option('resources', {"num_machines": 1, "num_mpiprocs_per_machine": 1})
pw1.store()
d3 = Int(1).store()
d4 = Int(1).store()
pw2 = JobCalculation()
pw2.set_computer(self.computer)
pw2.set_option('resources', {"num_machines": 1, "num_mpiprocs_per_machine": 1})
pw2.store()
d5 = Int(1).store()
d6 = Int(1).store()
# Link creation
wc1.add_link_from(d1, 'input1', link_type=LinkType.INPUT)
wc1.add_link_from(d2, 'input2', link_type=LinkType.INPUT)
wc2.add_link_from(d1, 'input', link_type=LinkType.INPUT)
wc2.add_link_from(wc1, 'call', link_type=LinkType.CALL)
pw1.add_link_from(d1, 'input', link_type=LinkType.INPUT)
pw1.add_link_from(wc2, 'call', link_type=LinkType.CALL)
pw1._set_state(calc_states.PARSING)
d3.add_link_from(pw1, 'create', link_type=LinkType.CREATE)
d3.add_link_from(wc2, 'return', link_type=LinkType.RETURN)
d4.add_link_from(pw1, 'create', link_type=LinkType.CREATE)
d4.add_link_from(wc2, 'return', link_type=LinkType.RETURN)
pw2.add_link_from(d4, 'input', link_type=LinkType.INPUT)
pw2._set_state(calc_states.PARSING)
d5.add_link_from(pw2, 'create', link_type=LinkType.CREATE)
d6.add_link_from(pw2, 'create', link_type=LinkType.CREATE)
# Return the generated nodes
graph_nodes = [d1, d2, d3, d4, d5, d6, pw1, pw2, wc1, wc2]
# Create various combinations of nodes that should be exported
# and the final set of nodes that are exported in each case, following
# predecessor/successor links.
export_list = [
(wc1, [d1, d2, d3, d4, pw1, wc1, wc2]),
(wc2, [d1, d3, d4, pw1, wc2]),
(d3, [d1, d3, d4, pw1]),
(d4, [d1, d3, d4, pw1]),
(d5, [d1, d3, d4, d5, d6, pw1, pw2]),
(d6, [d1, d3, d4, d5, d6, pw1, pw2]),
(pw2, [d1, d3, d4, d5, d6, pw1, pw2]),
(d1, [d1]),
(d2, [d2])
]
return graph_nodes, export_list[export_combination]
def test_data_create_reversed_false(self):
"""Verify that create_reversed = False is respected when only exporting Data nodes."""
import os
import shutil
import tempfile
from aiida.common.datastructures import calc_states
from aiida.orm import Data, Group
from aiida.orm.data.base import Int
from aiida.orm.calculation.job import JobCalculation
from aiida.orm.importexport import export
from aiida.common.links import LinkType
from aiida.orm.querybuilder import QueryBuilder
tmp_folder = tempfile.mkdtemp()
try:
data_input = Int(1).store()
data_output = Int(2).store()
calc = JobCalculation()
calc.set_computer(self.computer)
calc.set_option('resources', {"num_machines": 1, "num_mpiprocs_per_machine": 1})
calc.store()
calc.add_link_from(data_input, 'input', link_type=LinkType.INPUT)
calc._set_state(calc_states.PARSING)
data_output.add_link_from(calc, 'create', link_type=LinkType.CREATE)
group = Group.create(name='test_group')
group.add_nodes(data_output)
export_file = os.path.join(tmp_folder, 'export.tar.gz')
export([group], outfile=export_file, silent=True, create_reversed=False)
self.clean_db()
self.insert_data()
import_data(export_file, silent=True)
builder = QueryBuilder()
builder.append(Data)
self.assertEqual(builder.count(), 1, 'Expected a single Data node but got {}'.format(builder.count()))
self.assertEqual(builder.all()[0][0].uuid, data_output.uuid)
builder = QueryBuilder()
builder.append(JobCalculation)
self.assertEqual(builder.count(), 0, 'Expected no Calculation nodes')
finally:
shutil.rmtree(tmp_folder, ignore_errors=True)
def test_complex_workflow_graph_links(self):
"""
This test checks that all the needed links are correctly exported and
imported. More precisely, it checks that INPUT, CREATE, RETURN and CALL
links connecting Data nodes, JobCalculations and WorkCalculations are
exported and imported correctly.
"""
import os, shutil, tempfile
from aiida.orm import Node
from aiida.orm.importexport import export
from aiida.common.links import LinkType
from aiida.orm.querybuilder import QueryBuilder
tmp_folder = tempfile.mkdtemp()
try:
graph_nodes, _ = self.construct_complex_graph()
# Getting the input, create, return and call links
qb = QueryBuilder()
qb.append(Node, project='uuid')
qb.append(Node, project='uuid',
edge_project=['label', 'type'],
edge_filters={'type': {'in': (LinkType.INPUT.value,
LinkType.CREATE.value,
LinkType.RETURN.value,
LinkType.CALL.value)}})
export_links = qb.all()
export_file = os.path.join(tmp_folder, 'export.tar.gz')
export(graph_nodes, outfile=export_file, silent=True)
self.clean_db()
self.insert_data()
import_data(export_file, silent=True)
import_links = self.get_all_node_links()
export_set = [tuple(_) for _ in export_links]
import_set = [tuple(_) for _ in import_links]
self.assertEquals(set(export_set), set(import_set))
finally:
shutil.rmtree(tmp_folder, ignore_errors=True)
def test_complex_workflow_graph_export_set_expansion(self):
import os, shutil, tempfile
from aiida.orm.importexport import export
from aiida.orm.querybuilder import QueryBuilder
from aiida.orm import Node
for export_conf in range(0, 8):
graph_nodes, (export_node, export_target) = (
self.construct_complex_graph(export_conf))
tmp_folder = tempfile.mkdtemp()
try:
export_file = os.path.join(tmp_folder, 'export.tar.gz')
export([export_node], outfile=export_file, silent=True)
export_node_str = str(export_node)
self.clean_db()
self.insert_data()
import_data(export_file, silent=True)
# Get all the nodes of the database
qb = QueryBuilder()
qb.append(Node, project='uuid')
imported_node_uuids = set(str(_[0]) for _ in qb.all())
export_target_uuids = set(str(_.uuid) for _ in export_target)
from aiida.orm.utils import load_node
self.assertEquals(
export_target_uuids,
imported_node_uuids,
"Problem in comparison of export node: " +
str(export_node_str) + "\n" +
"Expected set: " + str(export_target_uuids) + "\n" +
"Imported set: " + str(imported_node_uuids) + "\n" +
"Difference: " + str([load_node(_) for _ in
export_target_uuids.symmetric_difference(
imported_node_uuids)])
)
finally:
shutil.rmtree(tmp_folder, ignore_errors=True)
def test_recursive_export_input_and_create_links_proper(self):
"""
Check that CALL, INPUT, RETURN and CREATE links are followed
recursively.
"""
import os, shutil, tempfile
from aiida.orm import Node
from aiida.orm.data.base import Int
from aiida.orm.importexport import export
from aiida.orm.calculation.inline import InlineCalculation
from aiida.orm.calculation.work import WorkCalculation
from aiida.common.links import LinkType
from aiida.orm.querybuilder import QueryBuilder
tmp_folder = tempfile.mkdtemp()
try:
wc2 = WorkCalculation().store()
wc1 = WorkCalculation().store()
c1 = InlineCalculation().store()
ni1 = Int(1).store()
ni2 = Int(2).store()
no1 = Int(1).store()
no2 = Int(2).store()
# Create the connections between workcalculations and calculations
wc1.add_link_from(wc2, 'call', link_type=LinkType.CALL)
c1.add_link_from(wc1, 'call', link_type=LinkType.CALL)
# Connect the first data node to wc1 & c1
wc1.add_link_from(ni1, 'ni1-to-wc1',
link_type=LinkType.INPUT)
c1.add_link_from(ni1, 'ni1-to-c1',
link_type=LinkType.INPUT)
# Connect the second data node to wc1 & c1
wc1.add_link_from(ni2, 'ni2-to-wc1',
link_type=LinkType.INPUT)
c1.add_link_from(ni2, 'ni2-to-c1',
link_type=LinkType.INPUT)
# Connecting the first output node to wc1 & c1
no1.add_link_from(wc1, 'output',
link_type=LinkType.RETURN)
no1.add_link_from(c1, 'output',
link_type=LinkType.CREATE)
# Connecting the second output node to wc1 & c1
no2.add_link_from(wc1, 'output',
link_type=LinkType.RETURN)
no2.add_link_from(c1, 'output',
link_type=LinkType.CREATE)
# Getting the input, create, return and call links
qb = QueryBuilder()
qb.append(Node, project='uuid')
qb.append(Node, project='uuid',
edge_project=['label', 'type'],
edge_filters={'type': {'in': (LinkType.INPUT.value,
LinkType.CREATE.value,
LinkType.RETURN.value,
LinkType.CALL.value)}})
export_links = qb.all()
export_file = os.path.join(tmp_folder, 'export.tar.gz')
export([wc2], outfile=export_file, silent=True)
self.clean_db()
self.insert_data()
import_data(export_file, silent=True)
import_links = self.get_all_node_links()
export_set = [tuple(_) for _ in export_links]
import_set = [tuple(_) for _ in import_links]
self.assertEquals(set(export_set), set(import_set))
finally:
shutil.rmtree(tmp_folder, ignore_errors=True)
def test_links_for_workflows(self):
"""
Check that CALL links are not followed in the export procedure, and the only creation
is followed for data::
____ ____ ____
| | INP | | CALL | |
| i1 | --> | w1 | <--- | w2 |
|____| |____| |____|
| |
CREATE v v RETURN
____
| |
| o1 |
|____|
"""
import os, shutil, tempfile
from aiida.orm.data.base import Int
from aiida.orm.importexport import export
from aiida.orm.calculation.work import WorkCalculation
from aiida.common.links import LinkType
tmp_folder = tempfile.mkdtemp()
try:
w1 = WorkCalculation().store()
w2 = WorkCalculation().store()
i1 = Int(1).store()
o1 = Int(2).store()
w1.add_link_from(i1, 'input-i1', link_type=LinkType.INPUT)
w1.add_link_from(w2, 'call', link_type=LinkType.CALL)
o1.add_link_from(w1, 'output', link_type=LinkType.CREATE)
o1.add_link_from(w1, 'return', link_type=LinkType.RETURN)
links_wanted = [l for l in self.get_all_node_links() if l[3] in
(LinkType.CREATE.value,
LinkType.INPUT.value,
LinkType.RETURN.value)]
export_file_1 = os.path.join(tmp_folder, 'export-1.tar.gz')
export_file_2 = os.path.join(tmp_folder, 'export-2.tar.gz')
export([o1], outfile=export_file_1, silent=True)
export([w1], outfile=export_file_2, silent=True)
self.clean_db()
self.insert_data()
import_data(export_file_1, silent=True)
links_in_db = self.get_all_node_links()
self.assertEquals(sorted(links_wanted), sorted(links_in_db))
self.clean_db()
self.insert_data()
import_data(export_file_2, silent=True)
links_in_db = self.get_all_node_links()
self.assertEquals(sorted(links_wanted), sorted(links_in_db))
finally:
shutil.rmtree(tmp_folder, ignore_errors=True)
def test_double_return_links_for_workflows(self):
"""
This test checks that double return links to a node can be exported
and imported without problems,
""" |
from aiida.orm.data.base import Int
from aiida.orm.importexport import export
from aiida.orm.calculation.work import WorkCalculation
from aiida.common.links import LinkType
from aiida.orm.querybuilder import QueryBuilder
from aiida.orm.node import Node
tmp_folder = tempfile.mkdtemp()
try:
w1 = WorkCalculation().store()
w2 = WorkCalculation().store()
i1 = Int(1).store()
o1 = Int(2).store()
w1.add_link_from(i1, 'input-i1', link_type=LinkType.INPUT)
w1.add_link_from(w2, 'call', link_type=LinkType.CALL)
o1.add_link_from(w1, 'output', link_type=LinkType.CREATE)
o1.add_link_from(w1, 'return', link_type=LinkType.RETURN)
o1.add_link_from(w2, 'return', link_type=LinkType.RETURN)
uuids_wanted = set(_.uuid for _ in (w1, o1, i1, w2))
links_wanted = [l for l in self.get_all_node_links() if l[3] in (
'createlink', 'inputlink', 'returnlink', 'calllink')]
export_file = os.path.join(tmp_folder, 'export.tar.gz')
export([o1, w1, w2, i1],
outfile=export_file, silent=True)
self.clean_db()
self.insert_data()
import_data(export_file, silent=True)
uuids_in_db = [str(uuid) for [uuid] in
QueryBuilder().append(Node, project='uuid').all()]
self.assertEquals(sorted(uuids_wanted), sorted(uuids_in_db))
links_in_db = self.get_all_node_links()
self.assertEquals(sorted(links_wanted), sorted(links_in_db))
finally:
shutil.rmtree(tmp_folder, ignore_errors=True)
def test_that_solo_code_is_exported_correctly(self):
"""
This test checks that when a calculation is exported then the
corresponding code is also exported.
"""
import os, shutil, tempfile
from aiida.orm.utils import load_node
from aiida.orm.importexport import export
from aiida.orm.code import Code
tmp_folder = tempfile.mkdtemp()
try:
code_label = 'test_code1'
code = Code()
code.set_remote_computer_exec((self.computer, '/bin/true'))
code.label = code_label
code.store()
code_uuid = code.uuid
export_file = os.path.join(tmp_folder, 'export.tar.gz')
export([code], outfile=export_file, silent=True)
self.clean_db()
self.insert_data()
import_data(export_file, silent=True)
self.assertEquals(load_node(code_uuid).label, code_label)
finally:
shutil.rmtree(tmp_folder, ignore_errors=True)
def test_that_input_code_is_exported_correctly(self):
"""
This test checks that when a calculation is exported then the
corresponding code is also exported. It also checks that the links
are also in place after the import.
"""
import os, shutil, tempfile
from aiida.orm.utils import load_node
from aiida.orm.importexport import export
from aiida.common.links import LinkType
from aiida.orm.calculation.job import JobCalculation
from aiida.orm.code import Code
from aiida.orm.querybuilder import QueryBuilder
tmp_folder = tempfile.mkdtemp()
try:
code_label = 'test_code1'
code = Code()
code.set_remote_computer_exec((self.computer, '/bin/true'))
code.label = code_label
code.store()
code_uuid = code.uuid
jc = JobCalculation()
jc.set_computer(self.computer)
jc.set_option('resources',
{"num_machines": 1, "num_mpiprocs_per_machine": 1})
jc.store()
jc.add_link_from(code, 'code', link_type=LinkType.INPUT)
export_file = os.path.join(tmp_folder, 'export.tar.gz')
export([jc], outfile=export_file, silent=True)
self.clean_db()
self.insert_data()
import_data(export_file, silent=True)
# Check that the node is there
self.assertEquals(load_node(code_uuid).label, code_label)
# Check that the link is in place
qb = QueryBuilder()
qb.append(Code, project='uuid')
qb.append(JobCalculation, project='uuid',
edge_project=['label', 'type'],
edge_filters={'type': {'==': LinkType.INPUT.value}})
self.assertEquals(qb.count(), 1,
"Expected to find one and only one link from "
"code to the calculation node. {} found."
.format(qb.count()))
finally:
shutil.rmtree(tmp_folder, ignore_errors=True) | import os, shutil, tempfile |
system.ts | import {
css,
propNames,
ResponsiveValue,
SystemProps,
SystemStyleObject,
} from "@chakra-ui/styled-system"
import { get, objectFilter, objectAssign, Dict } from "@chakra-ui/utils"
import createStyled, {
CSSObject,
FunctionInterpolation,
Interpolation,
} from "@emotion/styled"
import { shouldForwardProp } from "./should-forward-prop"
import { As, ChakraComponent } from "./system.types"
import { domElements, DOMElements } from "./system.utils"
/**
* Convert propNames array to object to faster lookup perf
*/
const stylePropNames = propNames.reduce(function (keymirror, key) {
if (typeof key != "object" && typeof key != "function") keymirror[key] = key
return keymirror
}, {})
interface StyleResolverProps extends SystemProps {
__css?: SystemStyleObject
sx?: SystemStyleObject
theme: Dict
css?: CSSObject
noOfLines?: ResponsiveValue<number>
isTruncated?: boolean
layerStyle?: string
textStyle?: string
apply?: ResponsiveValue<string>
}
type StyleResolver = (params: {
baseStyle?: SystemStyleObject
}) => FunctionInterpolation<StyleResolverProps>
/**
* Style resolver function that manages how style props are merged
* in combination with other possible ways of defining styles.
*
* For example, take a component defined this way:
* ```jsx
* <Box fontSize="24px" sx={{ fontSize: "40px" }}></Box>
* ```
*
* We want to manage the priority of the styles properly to prevent unwanted
* behaviors. Right now, the `sx` prop has the highest priority so the resolved
* fontSize will be `40px`
*/
export const styleResolver: StyleResolver = ({ baseStyle }) => (props) => {
const {
theme,
layerStyle,
textStyle,
apply,
noOfLines,
isTruncated,
css: cssProp,
__css,
sx,
...rest
} = props
const _layerStyle = get(theme, `layerStyles.${layerStyle}`, {})
const _textStyle = get(theme, `textStyles.${textStyle}`, {})
// filter out props that aren't style props
const styleProps = objectFilter(rest, (_, prop) => prop in stylePropNames)
let truncateStyle: any = {}
if (noOfLines != null) {
truncateStyle = {
overflow: "hidden",
textOverflow: "ellipsis",
display: "-webkit-box",
WebkitBoxOrient: "vertical",
WebkitLineClamp: noOfLines,
}
} else {
if (isTruncated) {
truncateStyle = {
overflow: "hidden",
textOverflow: "ellipsis",
whiteSpace: "nowrap",
}
}
}
/**
* The computed, theme-aware style object. The other of the properties
* within `objectAssign` determines how styles are overriden.
*/
const finalStyles = objectAssign(
{},
__css,
baseStyle,
{ apply },
_layerStyle,
_textStyle,
truncateStyle,
styleProps,
sx,
)
// Converts theme-aware style object to real css object
const computedCSS = css(finalStyles)(props.theme)
// Merge the computed css object with styles in css prop
const cssObject = objectAssign(computedCSS, cssProp)
return cssObject as Interpolation<StyleResolverProps>
}
interface StyledOptions {
shouldForwardProp?(prop: string): boolean
label?: string
baseStyle?: SystemStyleObject
}
export function | <T extends As, P = {}>(
component: T,
options?: StyledOptions,
) {
const { baseStyle, ...styledOptions } = options ?? {}
const opts = { ...styledOptions, shouldForwardProp }
const _styled = createStyled(component as React.ComponentType<any>, opts)
const interpolation = styleResolver({ baseStyle })
const StyledComponent = _styled(interpolation)
return StyledComponent as ChakraComponent<T, P>
}
type ChakraJSXElements = {
[Tag in DOMElements]: ChakraComponent<Tag, {}>
}
type CreateChakraComponent = {
<T extends As, P = {}>(
component: T,
options?: StyledOptions,
): ChakraComponent<T, P>
}
export const chakra = (styled as unknown) as CreateChakraComponent &
ChakraJSXElements
domElements.forEach((tag) => {
//@ts-ignore
chakra[tag] = chakra(tag)
})
| styled |
date-es-SV.js | /**
* @overview datejs
* @version 1.0.0-rc3
* @author Gregory Wild-Smith <[email protected]>
* @copyright 2014 Gregory Wild-Smith
* @license MIT
* @homepage https://github.com/abritinthebay/datejs
*/
/*
* DateJS Culture String File
* Country Code: es-SV
* Name: Spanish (El Salvador)
* Format: "key" : "value"
* Key is the en-US term, Value is the Key in the current language.
*/
Date.CultureStrings = Date.CultureStrings || {};
Date.CultureStrings["es-SV"] = {
"name": "es-SV",
"englishName": "Spanish (El Salvador)",
"nativeName": "Español (El Salvador)",
"Sunday": "domingo",
"Monday": "lunes",
"Tuesday": "martes",
"Wednesday": "miércoles",
"Thursday": "jueves",
"Friday": "viernes",
"Saturday": "sábado",
"Sun": "dom",
"Mon": "lun",
"Tue": "mar",
"Wed": "mié",
"Thu": "jue",
"Fri": "vie",
"Sat": "sáb",
"Su": "do",
"Mo": "lu",
"Tu": "ma",
"We": "mi",
"Th": "ju",
"Fr": "vi",
"Sa": "sá",
"S_Sun_Initial": "d",
"M_Mon_Initial": "l",
"T_Tue_Initial": "m",
"W_Wed_Initial": "m",
"T_Thu_Initial": "j",
"F_Fri_Initial": "v",
"S_Sat_Initial": "s",
"January": "enero",
"February": "febrero",
"March": "marzo",
"April": "abril",
"May": "mayo",
"June": "junio",
"July": "julio",
"August": "agosto",
"September": "septiembre",
"October": "octubre",
"November": "noviembre",
"December": "diciembre",
"Jan_Abbr": "ene",
"Feb_Abbr": "feb",
"Mar_Abbr": "mar",
"Apr_Abbr": "abr",
"May_Abbr": "may",
"Jun_Abbr": "jun",
"Jul_Abbr": "jul",
"Aug_Abbr": "ago",
"Sep_Abbr": "sep",
"Oct_Abbr": "oct",
"Nov_Abbr": "nov",
"Dec_Abbr": "dic",
"AM": "a.m.",
"PM": "p.m.",
"firstDayOfWeek": 0,
"twoDigitYearMax": 2029,
"mdy": "dmy",
"M/d/yyyy": "dd/MM/yyyy",
"dddd, MMMM dd, yyyy": "dddd, dd' [de] 'MMMM' [de] 'yyyy",
"h:mm tt": "hh:mm tt",
"h:mm:ss tt": "hh:mm:ss tt",
"dddd, MMMM dd, yyyy h:mm:ss tt": "dddd, dd' [de] 'MMMM' [de] 'yyyy hh:mm:ss tt",
"yyyy-MM-ddTHH:mm:ss": "yyyy-MM-ddTHH:mm:ss",
"yyyy-MM-dd HH:mm:ssZ": "yyyy-MM-dd HH:mm:ssZ",
"ddd, dd MMM yyyy HH:mm:ss": "ddd, dd MMM yyyy HH:mm:ss",
"MMMM dd": "dd MMMM",
"MMMM, yyyy": "MMMM' [de] 'yyyy",
"/jan(uary)?/": "ene(ro)?",
"/feb(ruary)?/": "feb(rero)?",
"/mar(ch)?/": "mar(zo)?",
"/apr(il)?/": "abr(il)?",
"/may/": "may(o)?",
"/jun(e)?/": "jun(io)?",
"/jul(y)?/": "jul(io)?",
"/aug(ust)?/": "ago(sto)?",
"/sep(t(ember)?)?/": "sep(tiembre)?",
"/oct(ober)?/": "oct(ubre)?",
"/nov(ember)?/": "nov(iembre)?",
"/dec(ember)?/": "dic(iembre)?",
"/^su(n(day)?)?/": "^do(m(ingo)?)?",
"/^mo(n(day)?)?/": "^lu(n(es)?)?",
"/^tu(e(s(day)?)?)?/": "^ma(r(tes)?)?",
"/^we(d(nesday)?)?/": "^mi(é(rcoles)?)?",
"/^th(u(r(s(day)?)?)?)?/": "^ju(e(ves)?)?",
"/^fr(i(day)?)?/": "^vi(e(rnes)?)?",
"/^sa(t(urday)?)?/": "^sá(b(ado)?)?",
"/^next/": "^next",
"/^last|past|prev(ious)?/": "^last|past|prev(ious)?",
"/^(\\+|aft(er)?|from|hence)/": "^(\\+|aft(er)?|from|hence)",
"/^(\\-|bef(ore)?|ago)/": "^(\\-|bef(ore)?|ago)",
"/^yes(terday)?/": "^yes(terday)?",
"/^t(od(ay)?)?/": "^t(od(ay)?)?",
"/^tom(orrow)?/": "^tom(orrow)?",
"/^n(ow)?/": "^n(ow)?",
"/^ms|milli(second)?s?/": "^ms|milli(second)?s?",
"/^sec(ond)?s?/": "^sec(ond)?s?",
"/^mn|min(ute)?s?/": "^mn|min(ute)?s?",
"/^h(our)?s?/": "^h(our)?s?",
"/^w(eek)?s?/": "^w(eek)?s?",
"/^m(onth)?s?/": "^m(onth)?s?",
"/^d(ay)?s?/": "^d(ay)?s?",
"/^y(ear)?s?/": "^y(ear)?s?",
"/^(a|p)/": "^(a|p)",
"/^(a\\.?m?\\.?|p\\.?m?\\.?)/": "^(a\\.?m?\\.?|p\\.?m?\\.?)",
"/^((e(s|d)t|c(s|d)t|m(s|d)t|p(s|d)t)|((gmt)?\\s*(\\+|\\-)\\s*\\d\\d\\d\\d?)|gmt|utc)/": "^((e(s|d)t|c(s|d)t|m(s|d)t|p(s|d)t)|((gmt)?\\s*(\\+|\\-)\\s*\\d\\d\\d\\d?)|gmt|utc)",
"/^\\s*(st|nd|rd|th)/": "^\\s*(st|nd|rd|th)",
"/^\\s*(\\:|a(?!u|p)|p)/": "^\\s*(\\:|a(?!u|p)|p)",
"LINT": "LINT",
"TOT": "TOT",
"CHAST": "CHAST",
"NZST": "NZST",
"NFT": "NFT",
"SBT": "SBT",
"AEST": "AEST",
"ACST": "ACST",
"JST": "JST",
"CWST": "CWST",
"CT": "CT",
"ICT": "ICT",
"MMT": "MMT",
"BIOT": "BST",
"NPT": "NPT",
"IST": "IST",
"PKT": "PKT",
"AFT": "AFT",
"MSK": "MSK",
"IRST": "IRST",
"FET": "FET",
"EET": "EET",
"CET": "CET",
"UTC": "UTC",
"GMT": "GMT",
"CVT": "CVT",
"GST": "GST",
"BRT": "BRT",
"NST": "NST",
"AST": "AST",
"EST": "EST",
"CST": "CST",
"MST": "MST",
"PST": "PST",
"AKST": "AKST",
"MIT": "MIT",
"HST": "HST",
"SST": "SST",
"BIT": "BIT",
"CHADT": "CHADT",
"NZDT": "NZDT",
"AEDT": "AEDT",
"ACDT": "ACDT",
"AZST": "AZST",
"IRDT": "IRDT",
"EEST": "EEST",
"CEST": "CEST",
"BST": "BST",
"PMDT": "PMDT",
"ADT": "ADT",
"NDT": "NDT",
"EDT": "EDT",
"CDT": "CDT",
"MDT": "MDT",
"PDT": "PDT",
"AKDT": "AKDT",
"HADT": "HADT"
};
Date.CultureStrings.lang = "es-SV";
/**
* @overview datejs
* @version 1.0.0-rc3
* @author Gregory Wild-Smith <[email protected]>
* @copyright 2014 Gregory Wild-Smith
* @license MIT
* @homepage https://github.com/abritinthebay/datejs
*/(function () {
var $D = Date;
var lang = Date.CultureStrings ? Date.CultureStrings.lang : null;
var loggedKeys = {}; // for debug purposes.
var getText = {
getFromKey: function (key, countryCode) {
var output;
if (Date.CultureStrings && Date.CultureStrings[countryCode] && Date.CultureStrings[countryCode][key]) {
output = Date.CultureStrings[countryCode][key];
} else {
output = getText.buildFromDefault(key);
}
if (key.charAt(0) === "/") { // Assume it's a regex
output = getText.buildFromRegex(key, countryCode);
}
return output;
},
getFromObjectValues: function (obj, countryCode) {
var key, output = {};
for(key in obj) {
if (obj.hasOwnProperty(key)) {
output[key] = getText.getFromKey(obj[key], countryCode);
}
}
return output;
},
getFromObjectKeys: function (obj, countryCode) {
var key, output = {};
for(key in obj) {
if (obj.hasOwnProperty(key)) {
output[getText.getFromKey(key, countryCode)] = obj[key];
}
}
return output;
},
getFromArray: function (arr, countryCode) {
var output = [];
for (var i=0; i < arr.length; i++){
if (i in arr) {
output[i] = getText.getFromKey(arr[i], countryCode);
}
}
return output;
},
buildFromDefault: function (key) {
var output, length, split, last;
switch(key) {
case "name":
output = "en-US";
break;
case "englishName":
output = "English (United States)";
break;
case "nativeName":
output = "English (United States)";
break;
case "twoDigitYearMax":
output = 2049;
break;
case "firstDayOfWeek":
output = 0;
break;
default:
output = key;
split = key.split("_");
length = split.length;
if (length > 1 && key.charAt(0) !== "/") {
// if the key isn't a regex and it has a split.
last = split[(length - 1)].toLowerCase();
if (last === "initial" || last === "abbr") {
output = split[0];
}
}
break;
}
return output;
},
buildFromRegex: function (key, countryCode) {
var output;
if (Date.CultureStrings && Date.CultureStrings[countryCode] && Date.CultureStrings[countryCode][key]) {
output = new RegExp(Date.CultureStrings[countryCode][key], "i");
} else {
output = new RegExp(key.replace(new RegExp("/", "g"),""), "i");
}
return output;
}
};
var shallowMerge = function (obj1, obj2) {
for (var attrname in obj2) {
if (obj2.hasOwnProperty(attrname)) {
obj1[attrname] = obj2[attrname];
}
}
};
var __ = function (key, language) {
var countryCode = (language) ? language : lang;
loggedKeys[key] = key;
if (typeof key === "object") {
if (key instanceof Array) {
return getText.getFromArray(key, countryCode);
} else {
return getText.getFromObjectKeys(key, countryCode);
}
} else {
return getText.getFromKey(key, countryCode);
}
};
var loadI18nScript = function (code) {
// paatterned after jQuery's getScript.
var url = Date.Config.i18n + code + ".js";
var head = document.getElementsByTagName("head")[0] || document.documentElement;
var script = document.createElement("script");
script.src = url;
var completed = false;
var events = {
done: function (){} // placeholder function
};
// Attach handlers for all browsers
script.onload = script.onreadystatechange = function() {
if ( !completed && (!this.readyState || this.readyState === "loaded" || this.readyState === "complete") ) {
events.done();
head.removeChild(script);
}
};
setTimeout(function() {
head.insertBefore(script, head.firstChild);
}, 0); // allows return to execute first
return {
done: function (cb) {
events.done = function() {
if (cb) {
setTimeout(cb,0);
}
};
}
};
};
var buildInfo = {
buildFromMethodHash: function (obj) {
var key;
for(key in obj) {
if (obj.hasOwnProperty(key)) {
obj[key] = buildInfo[obj[key]]();
}
}
return obj;
},
timeZoneDST: function () {
var DST = {
"CHADT": "+1345",
"NZDT": "+1300",
"AEDT": "+1100",
"ACDT": "+1030",
"AZST": "+0500",
"IRDT": "+0430",
"EEST": "+0300",
"CEST": "+0200",
"BST": "+0100",
"PMDT": "-0200",
"ADT": "-0300",
"NDT": "-0230",
"EDT": "-0400",
"CDT": "-0500",
"MDT": "-0600",
"PDT": "-0700",
"AKDT": "-0800",
"HADT": "-0900"
};
return __(DST);
},
timeZoneStandard: function () {
var standard = {
"LINT": "+1400",
"TOT": "+1300",
"CHAST": "+1245",
"NZST": "+1200",
"NFT": "+1130",
"SBT": "+1100",
"AEST": "+1000",
"ACST": "+0930",
"JST": "+0900",
"CWST": "+0845",
"CT": "+0800",
"ICT": "+0700",
"MMT": "+0630",
"BST": "+0600",
"NPT": "+0545",
"IST": "+0530",
"PKT": "+0500",
"AFT": "+0430",
"MSK": "+0400",
"IRST": "+0330",
"FET": "+0300",
"EET": "+0200",
"CET": "+0100",
"GMT": "+0000",
"UTC": "+0000",
"CVT": "-0100",
"GST": "-0200",
"BRT": "-0300",
"NST": "-0330",
"AST": "-0400",
"EST": "-0500",
"CST": "-0600",
"MST": "-0700",
"PST": "-0800",
"AKST": "-0900",
"MIT": "-0930",
"HST": "-1000",
"SST": "-1100",
"BIT": "-1200"
};
return __(standard);
},
timeZones: function (data) {
var zone;
data.timezones = [];
for (zone in data.abbreviatedTimeZoneStandard) {
if (data.abbreviatedTimeZoneStandard.hasOwnProperty(zone)) {
data.timezones.push({ name: zone, offset: data.abbreviatedTimeZoneStandard[zone]});
}
}
for (zone in data.abbreviatedTimeZoneDST) {
if (data.abbreviatedTimeZoneDST.hasOwnProperty(zone)) {
data.timezones.push({ name: zone, offset: data.abbreviatedTimeZoneDST[zone], dst: true});
}
}
return data.timezones;
},
days: function () {
return __(["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"]);
},
dayAbbr: function () {
return __(["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]);
},
dayShortNames: function () {
return __(["Su", "Mo", "Tu", "We", "Th", "Fr", "Sa"]);
},
dayFirstLetters: function () {
return __(["S_Sun_Initial", "M_Mon_Initial", "T_Tues_Initial", "W_Wed_Initial", "T_Thu_Initial", "F_Fri_Initial", "S_Sat_Initial"]);
},
months: function () {
return __(["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"]);
},
monthAbbr: function () {
return __(["Jan_Abbr", "Feb_Abbr", "Mar_Abbr", "Apr_Abbr", "May_Abbr", "Jun_Abbr", "Jul_Abbr", "Aug_Abbr", "Sep_Abbr", "Oct_Abbr", "Nov_Abbr", "Dec_Abbr"]);
},
formatPatterns: function () {
return getText.getFromObjectValues({
shortDate: "M/d/yyyy",
longDate: "dddd, MMMM dd, yyyy",
shortTime: "h:mm tt",
longTime: "h:mm:ss tt",
fullDateTime: "dddd, MMMM dd, yyyy h:mm:ss tt",
sortableDateTime: "yyyy-MM-ddTHH:mm:ss",
universalSortableDateTime: "yyyy-MM-dd HH:mm:ssZ",
rfc1123: "ddd, dd MMM yyyy HH:mm:ss",
monthDay: "MMMM dd",
yearMonth: "MMMM, yyyy"
}, Date.i18n.currentLanguage());
},
regex: function () {
return getText.getFromObjectValues({
inTheMorning: "/( in the )(morn(ing)?)\\b/",
thisMorning: "/(this )(morn(ing)?)\\b/",
amThisMorning: "/(\b\\d(am)? )(this )(morn(ing)?)/",
inTheEvening: "/( in the )(even(ing)?)\\b/",
thisEvening: "/(this )(even(ing)?)\\b/",
pmThisEvening: "/(\b\\d(pm)? )(this )(even(ing)?)/",
jan: "/jan(uary)?/",
feb: "/feb(ruary)?/",
mar: "/mar(ch)?/",
apr: "/apr(il)?/",
may: "/may/",
jun: "/jun(e)?/",
jul: "/jul(y)?/",
aug: "/aug(ust)?/",
sep: "/sep(t(ember)?)?/",
oct: "/oct(ober)?/",
nov: "/nov(ember)?/",
dec: "/dec(ember)?/",
sun: "/^su(n(day)?)?/",
mon: "/^mo(n(day)?)?/",
tue: "/^tu(e(s(day)?)?)?/",
wed: "/^we(d(nesday)?)?/",
thu: "/^th(u(r(s(day)?)?)?)?/",
fri: "/fr(i(day)?)?/",
sat: "/^sa(t(urday)?)?/",
future: "/^next/",
past: "/^last|past|prev(ious)?/",
add: "/^(\\+|aft(er)?|from|hence)/",
subtract: "/^(\\-|bef(ore)?|ago)/",
yesterday: "/^yes(terday)?/",
today: "/^t(od(ay)?)?/",
tomorrow: "/^tom(orrow)?/",
now: "/^n(ow)?/",
millisecond: "/^ms|milli(second)?s?/",
second: "/^sec(ond)?s?/",
minute: "/^mn|min(ute)?s?/",
hour: "/^h(our)?s?/",
week: "/^w(eek)?s?/",
month: "/^m(onth)?s?/",
day: "/^d(ay)?s?/",
year: "/^y(ear)?s?/",
shortMeridian: "/^(a|p)/",
longMeridian: "/^(a\\.?m?\\.?|p\\.?m?\\.?)/",
timezone: "/^((e(s|d)t|c(s|d)t|m(s|d)t|p(s|d)t)|((gmt)?\\s*(\\+|\\-)\\s*\\d\\d\\d\\d?)|gmt|utc)/",
ordinalSuffix: "/^\\s*(st|nd|rd|th)/",
timeContext: "/^\\s*(\\:|a(?!u|p)|p)/"
}, Date.i18n.currentLanguage());
}
};
var CultureInfo = function () {
var info = getText.getFromObjectValues({
name: "name",
englishName: "englishName",
nativeName: "nativeName",
amDesignator: "AM",
pmDesignator: "PM",
firstDayOfWeek: "firstDayOfWeek",
twoDigitYearMax: "twoDigitYearMax",
dateElementOrder: "mdy"
}, Date.i18n.currentLanguage());
var constructedInfo = buildInfo.buildFromMethodHash({
dayNames: "days",
abbreviatedDayNames: "dayAbbr",
shortestDayNames: "dayShortNames",
firstLetterDayNames: "dayFirstLetters",
monthNames: "months",
abbreviatedMonthNames: "monthAbbr",
formatPatterns: "formatPatterns",
regexPatterns: "regex",
abbreviatedTimeZoneDST: "timeZoneDST",
abbreviatedTimeZoneStandard: "timeZoneStandard"
});
shallowMerge(info, constructedInfo);
buildInfo.timeZones(info);
return info;
};
$D.i18n = {
__: function (key, lang) {
return __(key, lang);
},
currentLanguage: function () {
return lang || "en-US";
},
setLanguage: function (code, force, cb) {
var async = false;
if (force || code === "en-US" || (!!Date.CultureStrings && !!Date.CultureStrings[code])) {
lang = code;
Date.CultureStrings = Date.CultureStrings || {};
Date.CultureStrings.lang = code;
Date.CultureInfo = new CultureInfo();
} else {
if (!(!!Date.CultureStrings && !!Date.CultureStrings[code])) {
if (typeof exports !== "undefined" && this.exports !== exports) {
// we're in a Node enviroment, load it using require
try {
require("../i18n/" + code + ".js");
lang = code;
Date.CultureStrings.lang = code;
Date.CultureInfo = new CultureInfo();
} catch (e) {
// var str = "The language for '" + code + "' could not be loaded by Node. It likely does not exist.";
throw new Error("The DateJS IETF language tag '" + code + "' could not be loaded by Node. It likely does not exist.");
}
} else if (Date.Config && Date.Config.i18n) {
// we know the location of the files, so lets load them
async = true;
loadI18nScript(code).done(function(){
lang = code;
Date.CultureStrings = Date.CultureStrings || {};
Date.CultureStrings.lang = code;
Date.CultureInfo = new CultureInfo();
$D.Parsing.Normalizer.buildReplaceData(); // because this is async
if ($D.Grammar) {
$D.Grammar.buildGrammarFormats(); // so we can parse those strings...
}
if (cb) {
setTimeout(cb,0);
}
});
} else {
Date.console.error("The DateJS IETF language tag '" + code + "' is not available and has not been loaded.");
return false;
}
}
}
$D.Parsing.Normalizer.buildReplaceData(); // rebuild normalizer strings
if ($D.Grammar) {
$D.Grammar.buildGrammarFormats(); // so we can parse those strings...
}
if (!async && cb) {
setTimeout(cb,0);
}
},
getLoggedKeys: function () {
return loggedKeys;
},
updateCultureInfo: function () {
Date.CultureInfo = new CultureInfo();
}
};
$D.i18n.updateCultureInfo(); // run automatically
}());
(function () {
var $D = Date,
$P = $D.prototype,
p = function (s, l) {
if (!l) {
l = 2;
}
return ("000" + s).slice(l * -1);
};
if (typeof window !== "undefined" && typeof window.console !== "undefined" && typeof window.console.log !== "undefined") {
$D.console = console; // used only to raise non-critical errors if available
} else {
// set mock so we don't give errors.
$D.console = {
log: function(){},
error: function(){}
};
}
$D.Config = $D.Config || {};
$D.initOverloads = function() {
/**
* Overload of Date.now. Allows an alternate call for Date.now where it returns the
* current Date as an object rather than just milliseconds since the Unix Epoch.
*
* Also provides an implementation of now() for browsers (IE<9) that don't have it.
*
* Backwards compatible so with work with either:
* Date.now() [returns ms]
* or
* Date.now(true) [returns Date]
*/
if (!$D.now) {
$D._now = function now() {
return new Date().getTime();
};
} else if (!$D._now) {
$D._now = $D.now;
}
$D.now = function (returnObj) {
if (returnObj) {
return $D.present();
} else {
return $D._now();
}
};
if ( !$P.toISOString ) {
$P.toISOString = function() {
return this.getUTCFullYear() +
"-" + p(this.getUTCMonth() + 1) +
"-" + p(this.getUTCDate()) +
"T" + p(this.getUTCHours()) +
":" + p(this.getUTCMinutes()) +
":" + p(this.getUTCSeconds()) +
"." + String( (this.getUTCMilliseconds()/1000).toFixed(3)).slice(2, 5) +
"Z";
};
}
// private
if ( $P._toString === undefined ){
$P._toString = $P.toString;
}
};
$D.initOverloads();
/**
* Gets a date that is set to the current date. The time is set to the start of the day (00:00 or 12:00 AM).
* @return {Date} The current date.
*/
$D.today = function () {
return new Date().clearTime();
};
/**
* Gets a date that is set to the current date and time (same as new Date, but chainable)
* @return {Date} The current date.
*/
$D.present = function () {
return new Date();
};
/**
* Compares the first date to the second date and returns an number indication of their relative values.
* @param {Date} First Date object to compare [Required].
* @param {Date} Second Date object to compare to [Required].
* @return {Number} -1 = date1 is lessthan date2. 0 = values are equal. 1 = date1 is greaterthan date2.
*/
$D.compare = function (date1, date2) {
if (isNaN(date1) || isNaN(date2)) {
throw new Error(date1 + " - " + date2);
} else if (date1 instanceof Date && date2 instanceof Date) {
return (date1 < date2) ? -1 : (date1 > date2) ? 1 : 0;
} else {
throw new TypeError(date1 + " - " + date2);
}
};
/**
* Compares the first Date object to the second Date object and returns true if they are equal.
* @param {Date} First Date object to compare [Required]
* @param {Date} Second Date object to compare to [Required]
* @return {Boolean} true if dates are equal. false if they are not equal.
*/
$D.equals = function (date1, date2) {
return (date1.compareTo(date2) === 0);
};
/**
* Gets the language appropriate day name when given the day number(0-6)
* eg - 0 == Sunday
* @return {String} The day name
*/
$D.getDayName = function (n) {
return Date.CultureInfo.dayNames[n];
};
/**
* Gets the day number (0-6) if given a CultureInfo specific string which is a valid dayName, abbreviatedDayName or shortestDayName (two char).
* @param {String} The name of the day (eg. "Monday, "Mon", "tuesday", "tue", "We", "we").
* @return {Number} The day number
*/
$D.getDayNumberFromName = function (name) {
var n = Date.CultureInfo.dayNames, m = Date.CultureInfo.abbreviatedDayNames, o = Date.CultureInfo.shortestDayNames, s = name.toLowerCase();
for (var i = 0; i < n.length; i++) {
if (n[i].toLowerCase() === s || m[i].toLowerCase() === s || o[i].toLowerCase() === s) {
return i;
}
}
return -1;
};
/**
* Gets the month number (0-11) if given a Culture Info specific string which is a valid monthName or abbreviatedMonthName.
* @param {String} The name of the month (eg. "February, "Feb", "october", "oct").
* @return {Number} The day number
*/
$D.getMonthNumberFromName = function (name) {
var n = Date.CultureInfo.monthNames, m = Date.CultureInfo.abbreviatedMonthNames, s = name.toLowerCase();
for (var i = 0; i < n.length; i++) {
if (n[i].toLowerCase() === s || m[i].toLowerCase() === s) {
return i;
}
}
return -1;
};
/**
* Gets the language appropriate month name when given the month number(0-11)
* eg - 0 == January
* @return {String} The month name
*/
$D.getMonthName = function (n) {
return Date.CultureInfo.monthNames[n];
};
/**
* Determines if the current date instance is within a LeapYear.
* @param {Number} The year.
* @return {Boolean} true if date is within a LeapYear, otherwise false.
*/
$D.isLeapYear = function (year) {
return ((year % 4 === 0 && year % 100 !== 0) || year % 400 === 0);
};
/**
* Gets the number of days in the month, given a year and month value. Automatically corrects for LeapYear.
* @param {Number} The year.
* @param {Number} The month (0-11).
* @return {Number} The number of days in the month.
*/
$D.getDaysInMonth = function (year, month) {
if (!month && $D.validateMonth(year)) {
month = year;
year = Date.today().getFullYear();
}
return [31, ($D.isLeapYear(year) ? 29 : 28), 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][month];
};
$P.getDaysInMonth = function () {
return $D.getDaysInMonth(this.getFullYear(), this.getMonth());
};
$D.getTimezoneAbbreviation = function (offset, dst) {
var p, n = (dst || false) ? Date.CultureInfo.abbreviatedTimeZoneDST : Date.CultureInfo.abbreviatedTimeZoneStandard;
for (p in n) {
if (n.hasOwnProperty(p)) {
if (n[p] === offset) {
return p;
}
}
}
return null;
};
$D.getTimezoneOffset = function (name, dst) {
var i, a =[], z = Date.CultureInfo.timezones;
if (!name) { name = (new Date()).getTimezone();}
for (i = 0; i < z.length; i++) {
if (z[i].name === name.toUpperCase()) {
a.push(i);
}
}
if (!z[a[0]]) {
return null;
}
if (a.length === 1 || !dst) {
return z[a[0]].offset;
} else {
for (i=0; i < a.length; i++) {
if (z[a[i]].dst) {
return z[a[i]].offset;
}
}
}
};
$D.getQuarter = function (d) {
d = d || new Date(); // If no date supplied, use today
var q = [1,2,3,4];
return q[Math.floor(d.getMonth() / 3)]; // ~~~ is a bitwise op. Faster than Math.floor
};
$D.getDaysLeftInQuarter = function (d) {
d = d || new Date();
var qEnd = new Date(d);
qEnd.setMonth(qEnd.getMonth() + 3 - qEnd.getMonth() % 3, 0);
return Math.floor((qEnd - d) / 8.64e7);
};
// private
var validate = function (n, min, max, name) {
name = name ? name : "Object";
if (typeof n === "undefined") {
return false;
} else if (typeof n !== "number") {
throw new TypeError(n + " is not a Number.");
} else if (n < min || n > max) {
// As failing validation is *not* an exceptional circumstance
// lets not throw a RangeError Exception here.
// It's semantically correct but it's not sensible.
return false;
}
return true;
};
/**
* Validates the number is within an acceptable range for milliseconds [0-999].
* @param {Number} The number to check if within range.
* @return {Boolean} true if within range, otherwise false.
*/
$D.validateMillisecond = function (value) {
return validate(value, 0, 999, "millisecond");
};
/**
* Validates the number is within an acceptable range for seconds [0-59].
* @param {Number} The number to check if within range.
* @return {Boolean} true if within range, otherwise false.
*/
$D.validateSecond = function (value) {
return validate(value, 0, 59, "second");
};
/**
* Validates the number is within an acceptable range for minutes [0-59].
* @param {Number} The number to check if within range.
* @return {Boolean} true if within range, otherwise false.
*/
$D.validateMinute = function (value) {
return validate(value, 0, 59, "minute");
};
/**
* Validates the number is within an acceptable range for hours [0-23].
* @param {Number} The number to check if within range.
* @return {Boolean} true if within range, otherwise false.
*/
$D.validateHour = function (value) {
return validate(value, 0, 23, "hour");
};
/**
* Validates the number is within an acceptable range for the days in a month [0-MaxDaysInMonth].
* @param {Number} The number to check if within range.
* @return {Boolean} true if within range, otherwise false.
*/
$D.validateDay = function (value, year, month) {
if (year === undefined || year === null || month === undefined || month === null) { return false;}
return validate(value, 1, $D.getDaysInMonth(year, month), "day");
};
/**
* Validates the number is within an acceptable range for months [0-11].
* @param {Number} The number to check if within range.
* @return {Boolean} true if within range, otherwise false.
*/
$D.validateWeek = function (value) {
return validate(value, 0, 53, "week");
};
/**
* Validates the number is within an acceptable range for months [0-11].
* @param {Number} The number to check if within range.
* @return {Boolean} true if within range, otherwise false.
*/
$D.validateMonth = function (value) {
return validate(value, 0, 11, "month");
};
/**
* Validates the number is within an acceptable range for years.
* @param {Number} The number to check if within range.
* @return {Boolean} true if within range, otherwise false.
*/
$D.validateYear = function (value) {
/**
* Per ECMAScript spec the range of times supported by Date objects is
* exactly -100,000,000 days to +100,000,000 days measured relative to
* midnight at the beginning of 01 January, 1970 UTC.
* This gives a range of 8,640,000,000,000,000 milliseconds to either
* side of 01 January, 1970 UTC.
*
* Earliest possible date: Tue, 20 Apr 271,822 B.C. 00:00:00 UTC
* Latest possible date: Sat, 13 Sep 275,760 00:00:00 UTC
*/
return validate(value, -271822, 275760, "year");
};
$D.validateTimezone = function(value) {
var timezones = {"ACDT":1,"ACST":1,"ACT":1,"ADT":1,"AEDT":1,"AEST":1,"AFT":1,"AKDT":1,"AKST":1,"AMST":1,"AMT":1,"ART":1,"AST":1,"AWDT":1,"AWST":1,"AZOST":1,"AZT":1,"BDT":1,"BIOT":1,"BIT":1,"BOT":1,"BRT":1,"BST":1,"BTT":1,"CAT":1,"CCT":1,"CDT":1,"CEDT":1,"CEST":1,"CET":1,"CHADT":1,"CHAST":1,"CHOT":1,"ChST":1,"CHUT":1,"CIST":1,"CIT":1,"CKT":1,"CLST":1,"CLT":1,"COST":1,"COT":1,"CST":1,"CT":1,"CVT":1,"CWST":1,"CXT":1,"DAVT":1,"DDUT":1,"DFT":1,"EASST":1,"EAST":1,"EAT":1,"ECT":1,"EDT":1,"EEDT":1,"EEST":1,"EET":1,"EGST":1,"EGT":1,"EIT":1,"EST":1,"FET":1,"FJT":1,"FKST":1,"FKT":1,"FNT":1,"GALT":1,"GAMT":1,"GET":1,"GFT":1,"GILT":1,"GIT":1,"GMT":1,"GST":1,"GYT":1,"HADT":1,"HAEC":1,"HAST":1,"HKT":1,"HMT":1,"HOVT":1,"HST":1,"ICT":1,"IDT":1,"IOT":1,"IRDT":1,"IRKT":1,"IRST":1,"IST":1,"JST":1,"KGT":1,"KOST":1,"KRAT":1,"KST":1,"LHST":1,"LINT":1,"MAGT":1,"MART":1,"MAWT":1,"MDT":1,"MET":1,"MEST":1,"MHT":1,"MIST":1,"MIT":1,"MMT":1,"MSK":1,"MST":1,"MUT":1,"MVT":1,"MYT":1,"NCT":1,"NDT":1,"NFT":1,"NPT":1,"NST":1,"NT":1,"NUT":1,"NZDT":1,"NZST":1,"OMST":1,"ORAT":1,"PDT":1,"PET":1,"PETT":1,"PGT":1,"PHOT":1,"PHT":1,"PKT":1,"PMDT":1,"PMST":1,"PONT":1,"PST":1,"PYST":1,"PYT":1,"RET":1,"ROTT":1,"SAKT":1,"SAMT":1,"SAST":1,"SBT":1,"SCT":1,"SGT":1,"SLST":1,"SRT":1,"SST":1,"SYOT":1,"TAHT":1,"THA":1,"TFT":1,"TJT":1,"TKT":1,"TLT":1,"TMT":1,"TOT":1,"TVT":1,"UCT":1,"ULAT":1,"UTC":1,"UYST":1,"UYT":1,"UZT":1,"VET":1,"VLAT":1,"VOLT":1,"VOST":1,"VUT":1,"WAKT":1,"WAST":1,"WAT":1,"WEDT":1,"WEST":1,"WET":1,"WST":1,"YAKT":1,"YEKT":1,"Z":1};
return (timezones[value] === 1);
};
$D.validateTimezoneOffset= function(value) {
// timezones go from +14hrs to -12hrs, the +X hours are negative offsets.
return (value > -841 && value < 721);
};
}());
(function () {
var $D = Date,
$P = $D.prototype,
p = function (s, l) {
if (!l) {
l = 2;
}
return ("000" + s).slice(l * -1);
};
var validateConfigObject = function (obj) {
var result = {}, self = this, prop, testFunc;
testFunc = function (prop, func, value) {
if (prop === "day") {
var month = (obj.month !== undefined) ? obj.month : self.getMonth();
var year = (obj.year !== undefined) ? obj.year : self.getFullYear();
return $D[func](value, year, month);
} else {
return $D[func](value);
}
};
for (prop in obj) {
if (hasOwnProperty.call(obj, prop)) {
var func = "validate" + prop.charAt(0).toUpperCase() + prop.slice(1);
if ($D[func] && obj[prop] !== null && testFunc(prop, func, obj[prop])) {
result[prop] = obj[prop];
}
}
}
return result;
};
/**
* Resets the time of this Date object to 12:00 AM (00:00), which is the start of the day.
* @param {Boolean} .clone() this date instance before clearing Time
* @return {Date} this
*/
$P.clearTime = function () {
this.setHours(0);
this.setMinutes(0);
this.setSeconds(0);
this.setMilliseconds(0);
return this;
};
/**
* Resets the time of this Date object to the current time ('now').
* @return {Date} this
*/
$P.setTimeToNow = function () {
var n = new Date();
this.setHours(n.getHours());
this.setMinutes(n.getMinutes());
this.setSeconds(n.getSeconds());
this.setMilliseconds(n.getMilliseconds());
return this;
};
/**
* Returns a new Date object that is an exact date and time copy of the original instance.
* @return {Date} A new Date instance
*/
$P.clone = function () {
return new Date(this.getTime());
};
/**
* Compares this instance to a Date object and returns an number indication of their relative values.
* @param {Date} Date object to compare [Required]
* @return {Number} -1 = this is lessthan date. 0 = values are equal. 1 = this is greaterthan date.
*/
$P.compareTo = function (date) {
return Date.compare(this, date);
};
/**
* Compares this instance to another Date object and returns true if they are equal.
* @param {Date} Date object to compare. If no date to compare, new Date() [now] is used.
* @return {Boolean} true if dates are equal. false if they are not equal.
*/
$P.equals = function (date) {
return Date.equals(this, (date !== undefined ? date : new Date()));
};
/**
* Determines if this instance is between a range of two dates or equal to either the start or end dates.
* @param {Date} Start of range [Required]
* @param {Date} End of range [Required]
* @return {Boolean} true is this is between or equal to the start and end dates, else false
*/
$P.between = function (start, end) {
return this.getTime() >= start.getTime() && this.getTime() <= end.getTime();
};
/**
* Determines if this date occurs after the date to compare to.
* @param {Date} Date object to compare. If no date to compare, new Date() ("now") is used.
* @return {Boolean} true if this date instance is greater than the date to compare to (or "now"), otherwise false.
*/
$P.isAfter = function (date) {
return this.compareTo(date || new Date()) === 1;
};
/**
* Determines if this date occurs before the date to compare to.
* @param {Date} Date object to compare. If no date to compare, new Date() ("now") is used.
* @return {Boolean} true if this date instance is less than the date to compare to (or "now").
*/
$P.isBefore = function (date) {
return (this.compareTo(date || new Date()) === -1);
};
/**
* Determines if the current Date instance occurs today.
* @return {Boolean} true if this date instance is 'today', otherwise false.
*/
/**
* Determines if the current Date instance occurs on the same Date as the supplied 'date'.
* If no 'date' to compare to is provided, the current Date instance is compared to 'today'.
* @param {date} Date object to compare. If no date to compare, the current Date ("now") is used.
* @return {Boolean} true if this Date instance occurs on the same Day as the supplied 'date'.
*/
$P.isToday = $P.isSameDay = function (date) {
return this.clone().clearTime().equals((date || new Date()).clone().clearTime());
};
/**
* Adds the specified number of milliseconds to this instance.
* @param {Number} The number of milliseconds to add. The number can be positive or negative [Required]
* @return {Date} this
*/
$P.addMilliseconds = function (value) {
if (!value) { return this; }
this.setTime(this.getTime() + value * 1);
return this;
};
/**
* Adds the specified number of seconds to this instance.
* @param {Number} The number of seconds to add. The number can be positive or negative [Required]
* @return {Date} this
*/
$P.addSeconds = function (value) {
if (!value) { return this; }
return this.addMilliseconds(value * 1000);
};
/**
* Adds the specified number of seconds to this instance.
* @param {Number} The number of seconds to add. The number can be positive or negative [Required]
* @return {Date} this
*/
$P.addMinutes = function (value) {
if (!value) { return this; }
return this.addMilliseconds(value * 60000); // 60*1000
};
/**
* Adds the specified number of hours to this instance.
* @param {Number} The number of hours to add. The number can be positive or negative [Required]
* @return {Date} this
*/
$P.addHours = function (value) {
if (!value) { return this; }
return this.addMilliseconds(value * 3600000); // 60*60*1000
};
/**
* Adds the specified number of days to this instance.
* @param {Number} The number of days to add. The number can be positive or negative [Required]
* @return {Date} this
*/
$P.addDays = function (value) {
if (!value) { return this; }
this.setDate(this.getDate() + value * 1);
return this;
};
/**
* Adds the specified number of weekdays (ie - not sat or sun) to this instance.
* @param {Number} The number of days to add. The number can be positive or negative [Required]
* @return {Date} this
*/
$P.addWeekdays = function (value) {
if (!value) { return this; }
var day = this.getDay();
var weeks = (Math.ceil(Math.abs(value)/7));
if (day === 0 || day === 6) {
if (value > 0) {
this.next().monday();
this.addDays(-1);
day = this.getDay();
}
}
if (value < 0) {
while (value < 0) {
this.addDays(-1);
day = this.getDay();
if (day !== 0 && day !== 6) {
value++;
}
}
return this;
} else if (value > 5 || (6-day) <= value) {
value = value + (weeks * 2);
}
return this.addDays(value);
};
/**
* Adds the specified number of weeks to this instance.
* @param {Number} The number of weeks to add. The number can be positive or negative [Required]
* @return {Date} this
*/
$P.addWeeks = function (value) {
if (!value) { return this; }
return this.addDays(value * 7);
};
/**
* Adds the specified number of months to this instance.
* @param {Number} The number of months to add. The number can be positive or negative [Required]
* @return {Date} this
*/
$P.addMonths = function (value) {
if (!value) { return this; }
var n = this.getDate();
this.setDate(1);
this.setMonth(this.getMonth() + value * 1);
this.setDate(Math.min(n, $D.getDaysInMonth(this.getFullYear(), this.getMonth())));
return this;
};
$P.addQuarters = function (value) {
if (!value) { return this; }
// note this will take you to the same point in the quarter as you are now.
// i.e. - if you are 15 days into the quarter you'll be 15 days into the resulting one.
// bonus: this allows adding fractional quarters
return this.addMonths(value * 3);
};
/**
* Adds the specified number of years to this instance.
* @param {Number} The number of years to add. The number can be positive or negative [Required]
* @return {Date} this
*/
$P.addYears = function (value) {
if (!value) { return this; }
return this.addMonths(value * 12);
};
/**
* Adds (or subtracts) to the value of the years, months, weeks, days, hours, minutes, seconds, milliseconds of the date instance using given configuration object. Positive and Negative values allowed.
* Example
<pre><code>
Date.today().add( { days: 1, months: 1 } )
new Date().add( { years: -1 } )
</code></pre>
* @param {Object} Configuration object containing attributes (months, days, etc.)
* @return {Date} this
*/
$P.add = function (config) {
if (typeof config === "number") {
this._orient = config;
return this;
}
var x = config;
if (x.day) {
// If we should be a different date than today (eg: for 'tomorrow -1d', etc).
// Should only effect parsing, not direct usage (eg, Finish and FinishExact)
if ((x.day - this.getDate()) !== 0) {
this.setDate(x.day);
}
}
if (x.milliseconds) {
this.addMilliseconds(x.milliseconds);
}
if (x.seconds) {
this.addSeconds(x.seconds);
}
if (x.minutes) {
this.addMinutes(x.minutes);
}
if (x.hours) {
this.addHours(x.hours);
}
if (x.weeks) {
this.addWeeks(x.weeks);
}
if (x.months) {
this.addMonths(x.months);
}
if (x.years) {
this.addYears(x.years);
}
if (x.days) {
this.addDays(x.days);
}
return this;
};
/**
* Get the week number. Week one (1) is the week which contains the first Thursday of the year. Monday is considered the first day of the week.
* The .getWeek() function does NOT convert the date to UTC. The local datetime is used.
* Please use .getISOWeek() to get the week of the UTC converted date.
* @return {Number} 1 to 53
*/
$P.getWeek = function (utc) {
// Create a copy of this date object
var self, target = new Date(this.valueOf());
if (utc) {
target.addMinutes(target.getTimezoneOffset());
self = target.clone();
} else {
self = this;
}
// ISO week date weeks start on monday
// so correct the day number
var dayNr = (self.getDay() + 6) % 7;
// ISO 8601 states that week 1 is the week
// with the first thursday of that year.
// Set the target date to the thursday in the target week
target.setDate(target.getDate() - dayNr + 3);
// Store the millisecond value of the target date
var firstThursday = target.valueOf();
// Set the target to the first thursday of the year
// First set the target to january first
target.setMonth(0, 1);
// Not a thursday? Correct the date to the next thursday
if (target.getDay() !== 4) {
target.setMonth(0, 1 + ((4 - target.getDay()) + 7) % 7);
}
// The weeknumber is the number of weeks between the
// first thursday of the year and the thursday in the target week
return 1 + Math.ceil((firstThursday - target) / 604800000); // 604800000 = 7 * 24 * 3600 * 1000
};
/**
* Get the ISO 8601 week number. Week one ("01") is the week which contains the first Thursday of the year. Monday is considered the first day of the week.
* The .getISOWeek() function does convert the date to it's UTC value. Please use .getWeek() to get the week of the local date.
* @return {String} "01" to "53"
*/
$P.getISOWeek = function () {
return p(this.getWeek(true));
};
/**
* Moves the date to Monday of the week set. Week one (1) is the week which contains the first Thursday of the year.
* @param {Number} A Number (1 to 53) that represents the week of the year.
* @return {Date} this
*/
$P.setWeek = function (n) {
if ((n - this.getWeek()) === 0) {
if (this.getDay() !== 1) {
return this.moveToDayOfWeek(1, (this.getDay() > 1 ? -1 : 1));
} else {
return this;
}
} else {
return this.moveToDayOfWeek(1, (this.getDay() > 1 ? -1 : 1)).addWeeks(n - this.getWeek());
}
};
$P.setQuarter = function (qtr) {
var month = Math.abs(((qtr-1) * 3) + 1);
return this.setMonth(month, 1);
};
$P.getQuarter = function () {
return Date.getQuarter(this);
};
$P.getDaysLeftInQuarter = function () {
return Date.getDaysLeftInQuarter(this);
};
/**
* Moves the date to the next n'th occurrence of the dayOfWeek starting from the beginning of the month. The number (-1) is a magic number and will return the last occurrence of the dayOfWeek in the month.
* @param {Number} The dayOfWeek to move to
* @param {Number} The n'th occurrence to move to. Use (-1) to return the last occurrence in the month
* @return {Date} this
*/
$P.moveToNthOccurrence = function (dayOfWeek, occurrence) {
if (dayOfWeek === "Weekday") {
if (occurrence > 0) {
this.moveToFirstDayOfMonth();
if (this.is().weekday()) {
occurrence -= 1;
}
} else if (occurrence < 0) {
this.moveToLastDayOfMonth();
if (this.is().weekday()) {
occurrence += 1;
}
} else {
return this;
}
return this.addWeekdays(occurrence);
}
var shift = 0;
if (occurrence > 0) {
shift = occurrence - 1;
}
else if (occurrence === -1) {
this.moveToLastDayOfMonth();
if (this.getDay() !== dayOfWeek) {
this.moveToDayOfWeek(dayOfWeek, -1);
}
return this;
}
return this.moveToFirstDayOfMonth().addDays(-1).moveToDayOfWeek(dayOfWeek, +1).addWeeks(shift);
};
var moveToN = function (getFunc, addFunc, nVal) {
return function (value, orient) {
var diff = (value - this[getFunc]() + nVal * (orient || +1)) % nVal;
return this[addFunc]((diff === 0) ? diff += nVal * (orient || +1) : diff);
};
};
/**
* Move to the next or last dayOfWeek based on the orient value.
* @param {Number} The dayOfWeek to move to
* @param {Number} Forward (+1) or Back (-1). Defaults to +1. [Optional]
* @return {Date} this
*/
$P.moveToDayOfWeek = moveToN("getDay", "addDays", 7);
/**
* Move to the next or last month based on the orient value.
* @param {Number} The month to move to. 0 = January, 11 = December
* @param {Number} Forward (+1) or Back (-1). Defaults to +1. [Optional]
* @return {Date} this
*/
$P.moveToMonth = moveToN("getMonth", "addMonths", 12);
/**
* Get the Ordinate of the current day ("th", "st", "rd").
* @return {String}
*/
$P.getOrdinate = function () {
var num = this.getDate();
return ord(num);
};
/**
* Get the Ordinal day (numeric day number) of the year, adjusted for leap year.
* @return {Number} 1 through 365 (366 in leap years)
*/
$P.getOrdinalNumber = function () {
return Math.ceil((this.clone().clearTime() - new Date(this.getFullYear(), 0, 1)) / 86400000) + 1;
};
/**
* Get the time zone abbreviation of the current date.
* @return {String} The abbreviated time zone name (e.g. "EST")
*/
$P.getTimezone = function () {
return $D.getTimezoneAbbreviation(this.getUTCOffset(), this.isDaylightSavingTime());
};
$P.setTimezoneOffset = function (offset) {
var here = this.getTimezoneOffset(), there = Number(offset) * -6 / 10;
return (there || there === 0) ? this.addMinutes(there - here) : this;
};
$P.setTimezone = function (offset) {
return this.setTimezoneOffset($D.getTimezoneOffset(offset));
};
/**
* Indicates whether Daylight Saving Time is observed in the current time zone.
* @return {Boolean} true|false
*/
$P.hasDaylightSavingTime = function () {
return (Date.today().set({month: 0, day: 1}).getTimezoneOffset() !== Date.today().set({month: 6, day: 1}).getTimezoneOffset());
};
/**
* Indicates whether this Date instance is within the Daylight Saving Time range for the current time zone.
* @return {Boolean} true|false
*/
$P.isDaylightSavingTime = function () {
return Date.today().set({month: 0, day: 1}).getTimezoneOffset() !== this.getTimezoneOffset();
};
/**
* Get the offset from UTC of the current date.
* @return {String} The 4-character offset string prefixed with + or - (e.g. "-0500")
*/
$P.getUTCOffset = function (offset) {
var n = (offset || this.getTimezoneOffset()) * -10 / 6, r;
if (n < 0) {
r = (n - 10000).toString();
return r.charAt(0) + r.substr(2);
} else {
r = (n + 10000).toString();
return "+" + r.substr(1);
}
};
/**
* Returns the number of milliseconds between this date and date.
* @param {Date} Defaults to now
* @return {Number} The diff in milliseconds
*/
$P.getElapsed = function (date) {
return (date || new Date()) - this;
};
/**
* Set the value of year, month, day, hour, minute, second, millisecond of date instance using given configuration object.
* Example
<pre><code>
Date.today().set( { day: 20, month: 1 } )
new Date().set( { millisecond: 0 } )
</code></pre>
*
* @param {Object} Configuration object containing attributes (month, day, etc.)
* @return {Date} this
*/
$P.set = function (config) {
config = validateConfigObject.call(this, config);
var key;
for (key in config) {
if (hasOwnProperty.call(config, key)) {
var name = key.charAt(0).toUpperCase() + key.slice(1);
var addFunc, getFunc;
if (key !== "week" && key !== "month" && key !== "timezone" && key !== "timezoneOffset") {
name += "s";
}
addFunc = "add" + name;
getFunc = "get" + name;
if (key === "month") {
addFunc = addFunc + "s";
} else if (key === "year"){
getFunc = "getFullYear";
}
if (key !== "day" && key !== "timezone" && key !== "timezoneOffset" && key !== "week" && key !== "hour") {
this[addFunc](config[key] - this[getFunc]());
} else if ( key === "timezone"|| key === "timezoneOffset" || key === "week" || key === "hour") {
this["set"+name](config[key]);
}
}
}
// day has to go last because you can't validate the day without first knowing the month
if (config.day) {
this.addDays(config.day - this.getDate());
}
return this;
};
/**
* Moves the date to the first day of the month.
* @return {Date} this
*/
$P.moveToFirstDayOfMonth = function () {
return this.set({ day: 1 });
};
/**
* Moves the date to the last day of the month.
* @return {Date} this
*/
$P.moveToLastDayOfMonth = function () {
return this.set({ day: $D.getDaysInMonth(this.getFullYear(), this.getMonth())});
};
/**
* Converts the value of the current Date object to its equivalent string representation.
* Format Specifiers
* CUSTOM DATE AND TIME FORMAT STRINGS
* Format Description Example
* ------ --------------------------------------------------------------------------- -----------------------
* s The seconds of the minute between 0-59. "0" to "59"
* ss The seconds of the minute with leading zero if required. "00" to "59"
*
* m The minute of the hour between 0-59. "0" or "59"
* mm The minute of the hour with leading zero if required. "00" or "59"
*
* h The hour of the day between 1-12. "1" to "12"
* hh The hour of the day with leading zero if required. "01" to "12"
*
* H The hour of the day between 0-23. "0" to "23"
* HH The hour of the day with leading zero if required. "00" to "23"
*
* d The day of the month between 1 and 31. "1" to "31"
* dd The day of the month with leading zero if required. "01" to "31"
* ddd Abbreviated day name. Date.CultureInfo.abbreviatedDayNames. "Mon" to "Sun"
* dddd The full day name. Date.CultureInfo.dayNames. "Monday" to "Sunday"
*
* M The month of the year between 1-12. "1" to "12"
* MM The month of the year with leading zero if required. "01" to "12"
* MMM Abbreviated month name. Date.CultureInfo.abbreviatedMonthNames. "Jan" to "Dec"
* MMMM The full month name. Date.CultureInfo.monthNames. "January" to "December"
*
* yy The year as a two-digit number. "99" or "08"
* yyyy The full four digit year. "1999" or "2008"
*
* t Displays the first character of the A.M./P.M. designator. "A" or "P"
* Date.CultureInfo.amDesignator or Date.CultureInfo.pmDesignator
* tt Displays the A.M./P.M. designator. "AM" or "PM"
* Date.CultureInfo.amDesignator or Date.CultureInfo.pmDesignator
*
* S The ordinal suffix ("st, "nd", "rd" or "th") of the current day. "st, "nd", "rd" or "th"
*
* STANDARD DATE AND TIME FORMAT STRINGS
* Format Description Example
*------ --------------------------------------------------------------------------- -----------------------
* d The CultureInfo shortDate Format Pattern "M/d/yyyy"
* D The CultureInfo longDate Format Pattern "dddd, MMMM dd, yyyy"
* F The CultureInfo fullDateTime Format Pattern "dddd, MMMM dd, yyyy h:mm:ss tt"
* m The CultureInfo monthDay Format Pattern "MMMM dd"
* r The CultureInfo rfc1123 Format Pattern "ddd, dd MMM yyyy HH:mm:ss GMT"
* s The CultureInfo sortableDateTime Format Pattern "yyyy-MM-ddTHH:mm:ss"
* t The CultureInfo shortTime Format Pattern "h:mm tt"
* T The CultureInfo longTime Format Pattern "h:mm:ss tt"
* u The CultureInfo universalSortableDateTime Format Pattern "yyyy-MM-dd HH:mm:ssZ"
* y The CultureInfo yearMonth Format Pattern "MMMM, yyyy"
*
* @param {String} A format string consisting of one or more format spcifiers [Optional].
* @return {String} A string representation of the current Date object.
*/
var ord = function (n) {
switch (n * 1) {
case 1:
case 21:
case 31:
return "st";
case 2:
case 22:
return "nd";
case 3:
case 23:
return "rd";
default:
return "th";
}
};
var parseStandardFormats = function (format) {
var y, c = Date.CultureInfo.formatPatterns;
switch (format) {
case "d":
return this.toString(c.shortDate);
case "D":
return this.toString(c.longDate);
case "F":
return this.toString(c.fullDateTime);
case "m":
return this.toString(c.monthDay);
case "r":
case "R":
y = this.clone().addMinutes(this.getTimezoneOffset());
return y.toString(c.rfc1123) + " GMT";
case "s":
return this.toString(c.sortableDateTime);
case "t":
return this.toString(c.shortTime);
case "T":
return this.toString(c.longTime);
case "u":
y = this.clone().addMinutes(this.getTimezoneOffset());
return y.toString(c.universalSortableDateTime);
case "y":
return this.toString(c.yearMonth);
default:
return false;
}
};
var parseFormatStringsClosure = function (context) {
return function (m) {
if (m.charAt(0) === "\\") {
return m.replace("\\", "");
}
switch (m) {
case "hh":
return p(context.getHours() < 13 ? (context.getHours() === 0 ? 12 : context.getHours()) : (context.getHours() - 12));
case "h":
return context.getHours() < 13 ? (context.getHours() === 0 ? 12 : context.getHours()) : (context.getHours() - 12);
case "HH":
return p(context.getHours());
case "H":
return context.getHours();
case "mm":
return p(context.getMinutes());
case "m":
return context.getMinutes();
case "ss":
return p(context.getSeconds());
case "s":
return context.getSeconds();
case "yyyy":
return p(context.getFullYear(), 4);
case "yy":
return p(context.getFullYear());
case "y":
return context.getFullYear();
case "E":
case "dddd":
return Date.CultureInfo.dayNames[context.getDay()];
case "ddd":
return Date.CultureInfo.abbreviatedDayNames[context.getDay()];
case "dd":
return p(context.getDate());
case "d":
return context.getDate();
case "MMMM":
return Date.CultureInfo.monthNames[context.getMonth()];
case "MMM":
return Date.CultureInfo.abbreviatedMonthNames[context.getMonth()];
case "MM":
return p((context.getMonth() + 1));
case "M":
return context.getMonth() + 1;
case "t":
return context.getHours() < 12 ? Date.CultureInfo.amDesignator.substring(0, 1) : Date.CultureInfo.pmDesignator.substring(0, 1);
case "tt":
return context.getHours() < 12 ? Date.CultureInfo.amDesignator : Date.CultureInfo.pmDesignator;
case "S":
return ord(context.getDate());
case "W":
return context.getWeek();
case "WW":
return context.getISOWeek();
case "Q":
return "Q" + context.getQuarter();
case "q":
return String(context.getQuarter());
case "z":
return context.getTimezone();
case "Z":
case "X":
return Date.getTimezoneOffset(context.getTimezone());
case "ZZ": // Timezone offset in seconds
return context.getTimezoneOffset() * -60;
case "u":
return context.getDay();
case "L":
return ($D.isLeapYear(context.getFullYear())) ? 1 : 0;
case "B":
// Swatch Internet Time (.beats)
return "@"+((context.getUTCSeconds() + (context.getUTCMinutes()*60) + ((context.getUTCHours()+1)*3600))/86.4);
default:
return m;
}
};
};
$P.toString = function (format, ignoreStandards) {
// Standard Date and Time Format Strings. Formats pulled from CultureInfo file and
// may vary by culture.
if (!ignoreStandards && format && format.length === 1) {
output = parseStandardFormats.call(this, format);
if (output) {
return output;
}
}
var parseFormatStrings = parseFormatStringsClosure(this);
return format ? format.replace(/((\\)?(dd?d?d?|MM?M?M?|yy?y?y?|hh?|HH?|mm?|ss?|tt?|S|q|Q|WW?W?W?)(?![^\[]*\]))/g, parseFormatStrings).replace(/\[|\]/g, "") : this._toString();
};
}());
/*************************************************************
* SugarPak - Domain Specific Language - Syntactical Sugar *
*************************************************************/
(function () {
var $D = Date, $P = $D.prototype, $N = Number.prototype;
// private
$P._orient = +1;
// private
$P._nth = null;
// private
$P._is = false;
// private
$P._same = false;
// private
$P._isSecond = false;
// private
$N._dateElement = "days";
/**
* Moves the date to the next instance of a date as specified by the subsequent date element function (eg. .day(), .month()), month name function (eg. .january(), .jan()) or day name function (eg. .friday(), fri()).
* Example
<pre><code>
Date.today().next().friday();
Date.today().next().fri();
Date.today().next().march();
Date.today().next().mar();
Date.today().next().week();
</code></pre>
*
* @return {Date} date
*/
$P.next = function () {
this._move = true;
this._orient = +1;
return this;
};
/**
* Creates a new Date (Date.today()) and moves the date to the next instance of the date as specified by the subsequent date element function (eg. .day(), .month()), month name function (eg. .january(), .jan()) or day name function (eg. .friday(), fri()).
* Example
<pre><code>
Date.next().friday();
Date.next().fri();
Date.next().march();
Date.next().mar();
Date.next().week();
</code></pre>
*
* @return {Date} date
*/
$D.next = function () {
return $D.today().next();
};
/**
* Moves the date to the previous instance of a date as specified by the subsequent date element function (eg. .day(), .month()), month name function (eg. .january(), .jan()) or day name function (eg. .friday(), fri()).
* Example
<pre><code>
Date.today().last().friday();
Date.today().last().fri();
Date.today().last().march();
Date.today().last().mar();
Date.today().last().week();
</code></pre>
*
* @return {Date} date
*/
$P.last = $P.prev = $P.previous = function () {
this._move = true;
this._orient = -1;
return this;
};
/**
* Creates a new Date (Date.today()) and moves the date to the previous instance of the date as specified by the subsequent date element function (eg. .day(), .month()), month name function (eg. .january(), .jan()) or day name function (eg. .friday(), fri()).
* Example
<pre><code>
Date.last().friday();
Date.last().fri();
Date.previous().march();
Date.prev().mar();
Date.last().week();
</code></pre>
*
* @return {Date} date
*/
$D.last = $D.prev = $D.previous = function () {
return $D.today().last();
};
/**
* Performs a equality check when followed by either a month name, day name or .weekday() function.
* Example
<pre><code>
Date.today().is().friday(); // true|false
Date.today().is().fri();
Date.today().is().march();
Date.today().is().mar();
</code></pre>
*
* @return {Boolean} true|false
*/
$P.is = function () {
this._is = true;
return this;
};
/**
* Determines if two date objects occur on/in exactly the same instance of the subsequent date part function.
* The function .same() must be followed by a date part function (example: .day(), .month(), .year(), etc).
*
* An optional Date can be passed in the date part function. If now date is passed as a parameter, 'Now' is used.
*
* The following example demonstrates how to determine if two dates fall on the exact same day.
*
* Example
<pre><code>
var d1 = Date.today(); // today at 00:00
var d2 = new Date(); // exactly now.
// Do they occur on the same day?
d1.same().day(d2); // true
// Do they occur on the same hour?
d1.same().hour(d2); // false, unless d2 hour is '00' (midnight).
// What if it's the same day, but one year apart?
var nextYear = Date.today().add(1).year();
d1.same().day(nextYear); // false, because the dates must occur on the exact same day.
</code></pre>
*
* Scenario: Determine if a given date occurs during some week period 2 months from now.
*
* Example
<pre><code>
var future = Date.today().add(2).months();
return someDate.same().week(future); // true|false;
</code></pre>
*
* @return {Boolean} true|false
*/
$P.same = function () {
this._same = true;
this._isSecond = false;
return this;
};
/**
* Determines if the current date/time occurs during Today. Must be preceded by the .is() function.
* Example
<pre><code>
someDate.is().today(); // true|false
new Date().is().today(); // true
Date.today().is().today();// true
Date.today().add(-1).day().is().today(); // false
</code></pre>
*
* @return {Boolean} true|false
*/
$P.today = function () {
return this.same().day();
};
/**
* Determines if the current date is a weekday. This function must be preceded by the .is() function.
* Example
<pre><code>
Date.today().is().weekday(); // true|false
</code></pre>
*
* @return {Boolean} true|false
*/
$P.weekday = function () {
if (this._nth) {
return df("Weekday").call(this);
}
if (this._move) {
return this.addWeekdays(this._orient);
}
if (this._is) {
this._is = false;
return (!this.is().sat() && !this.is().sun());
}
return false;
};
/**
* Determines if the current date is on the weekend. This function must be preceded by the .is() function.
* Example
<pre><code>
Date.today().is().weekend(); // true|false
</code></pre>
*
* @return {Boolean} true|false
*/
$P.weekend = function () {
if (this._is) {
this._is = false;
return (this.is().sat() || this.is().sun());
}
return false;
};
/**
* Sets the Time of the current Date instance. A string "6:15 pm" or config object {hour:18, minute:15} are accepted.
* Example
<pre><code>
// Set time to 6:15pm with a String
Date.today().at("6:15pm");
// Set time to 6:15pm with a config object
Date.today().at({hour:18, minute:15});
</code></pre>
*
* @return {Date} date
*/
$P.at = function (time) {
return (typeof time === "string") ? $D.parse(this.toString("d") + " " + time) : this.set(time);
};
/**
* Creates a new Date() and adds this (Number) to the date based on the preceding date element function (eg. second|minute|hour|day|month|year).
* Example
<pre><code>
// Undeclared Numbers must be wrapped with parentheses. Requirment of JavaScript.
(3).days().fromNow();
(6).months().fromNow();
// Declared Number variables do not require parentheses.
var n = 6;
n.months().fromNow();
</code></pre>
*
* @return {Date} A new Date instance
*/
$N.fromNow = $N.after = function (date) {
var c = {};
c[this._dateElement] = this;
return ((!date) ? new Date() : date.clone()).add(c);
};
/**
* Creates a new Date() and subtract this (Number) from the date based on the preceding date element function (eg. second|minute|hour|day|month|year).
* Example
<pre><code>
// Undeclared Numbers must be wrapped with parentheses. Requirment of JavaScript.
(3).days().ago();
(6).months().ago();
// Declared Number variables do not require parentheses.
var n = 6;
n.months().ago();
</code></pre>
*
* @return {Date} A new Date instance
*/
$N.ago = $N.before = function (date) {
var c = {},
s = (this._dateElement[this._dateElement.length-1] !== "s") ? this._dateElement + "s" : this._dateElement;
c[s] = this * -1;
return ((!date) ? new Date() : date.clone()).add(c);
};
// Do NOT modify the following string tokens. These tokens are used to build dynamic functions.
// All culture-specific strings can be found in the CultureInfo files.
var dx = ("sunday monday tuesday wednesday thursday friday saturday").split(/\s/),
mx = ("january february march april may june july august september october november december").split(/\s/),
px = ("Millisecond Second Minute Hour Day Week Month Year Quarter Weekday").split(/\s/),
pxf = ("Milliseconds Seconds Minutes Hours Date Week Month FullYear Quarter").split(/\s/),
nth = ("final first second third fourth fifth").split(/\s/),
de;
/**
* Returns an object literal of all the date parts.
* Example
<pre><code>
var o = new Date().toObject();
// { year: 2008, month: 4, week: 20, day: 13, hour: 18, minute: 9, second: 32, millisecond: 812 }
// The object properties can be referenced directly from the object.
alert(o.day); // alerts "13"
alert(o.year); // alerts "2008"
</code></pre>
*
* @return {Date} An object literal representing the original date object.
*/
$P.toObject = function () {
var o = {};
for (var i = 0; i < px.length; i++) {
if (this["get" + pxf[i]]) {
o[px[i].toLowerCase()] = this["get" + pxf[i]]();
}
}
return o;
};
/**
* Returns a date created from an object literal. Ignores the .week property if set in the config.
* Example
<pre><code>
var o = new Date().toObject();
return Date.fromObject(o); // will return the same date.
var o2 = {month: 1, day: 20, hour: 18}; // birthday party!
Date.fromObject(o2);
</code></pre>
*
* @return {Date} An object literal representing the original date object.
*/
$D.fromObject = function(config) {
config.week = null;
return Date.today().set(config);
};
// Create day name functions and abbreviated day name functions (eg. monday(), friday(), fri()).
var df = function (n) {
return function () {
if (this._is) {
this._is = false;
return this.getDay() === n;
}
if (this._move) { this._move = null; }
if (this._nth !== null) {
// If the .second() function was called earlier, remove the _orient
// from the date, and then continue.
// This is required because 'second' can be used in two different context.
//
// Example
//
// Date.today().add(1).second();
// Date.march().second().monday();
//
// Things get crazy with the following...
// Date.march().add(1).second().second().monday(); // but it works!!
//
if (this._isSecond) {
this.addSeconds(this._orient * -1);
}
// make sure we reset _isSecond
this._isSecond = false;
var ntemp = this._nth;
this._nth = null;
var temp = this.clone().moveToLastDayOfMonth();
this.moveToNthOccurrence(n, ntemp);
if (this > temp) {
throw new RangeError($D.getDayName(n) + " does not occur " + ntemp + " times in the month of " + $D.getMonthName(temp.getMonth()) + " " + temp.getFullYear() + ".");
}
return this;
}
return this.moveToDayOfWeek(n, this._orient);
};
};
var sdf = function (n) {
return function () {
var t = $D.today(), shift = n - t.getDay();
if (n === 0 && Date.CultureInfo.firstDayOfWeek === 1 && t.getDay() !== 0) {
shift = shift + 7;
}
return t.addDays(shift);
};
};
// Create month name functions and abbreviated month name functions (eg. january(), march(), mar()).
var month_instance_functions = function (n) {
return function () {
if (this._is) {
this._is = false;
return this.getMonth() === n;
}
return this.moveToMonth(n, this._orient);
};
};
var month_static_functions = function (n) {
return function () {
return $D.today().set({ month: n, day: 1 });
};
};
var processTerms = function (names, staticFunc, instanceFunc) {
for (var i = 0; i < names.length; i++) {
// Create constant static Name variables.
$D[names[i].toUpperCase()] = $D[names[i].toUpperCase().substring(0, 3)] = i;
// Create Name functions.
$D[names[i]] = $D[names[i].substring(0, 3)] = staticFunc(i);
// Create Name instance functions.
$P[names[i]] = $P[names[i].substring(0, 3)] = instanceFunc(i);
}
};
processTerms(dx, sdf, df);
processTerms(mx, month_static_functions, month_instance_functions);
// Create date element functions and plural date element functions used with Date (eg. day(), days(), months()).
var ef = function (j) {
return function () {
// if the .second() function was called earlier, the _orient
// has alread been added. Just return this and reset _isSecond.
if (this._isSecond) {
this._isSecond = false;
return this;
}
if (this._same) {
this._same = this._is = false;
var o1 = this.toObject(),
o2 = (arguments[0] || new Date()).toObject(),
v = "",
k = j.toLowerCase();
// the substr trick with -1 doesn't work in IE8 or less
k = (k[k.length-1] === "s") ? k.substring(0,k.length-1) : k;
for (var m = (px.length - 1); m > -1; m--) {
v = px[m].toLowerCase();
if (o1[v] !== o2[v]) {
return false;
}
if (k === v) {
break;
}
}
return true;
}
if (j.substring(j.length - 1) !== "s") {
j += "s";
}
if (this._move) { this._move = null; }
return this["add" + j](this._orient);
};
};
var nf = function (n) {
return function () {
this._dateElement = n;
return this;
};
};
for (var k = 0; k < px.length; k++) {
de = px[k].toLowerCase();
if(de !== "weekday") {
// Create date element functions and plural date element functions used with Date (eg. day(), days(), months()).
$P[de] = $P[de + "s"] = ef(px[k]);
// Create date element functions and plural date element functions used with Number (eg. day(), days(), months()).
$N[de] = $N[de + "s"] = nf(de + "s");
}
}
$P._ss = ef("Second");
var nthfn = function (n) {
return function (dayOfWeek) {
if (this._same) {
return this._ss(arguments[0]);
}
if (dayOfWeek || dayOfWeek === 0) {
return this.moveToNthOccurrence(dayOfWeek, n);
}
this._nth = n;
// if the operator is 'second' add the _orient, then deal with it later...
if (n === 2 && (dayOfWeek === undefined || dayOfWeek === null)) {
this._isSecond = true;
return this.addSeconds(this._orient);
}
return this;
};
};
for (var l = 0; l < nth.length; l++) {
$P[nth[l]] = (l === 0) ? nthfn(-1) : nthfn(l);
}
}());
(function () {
"use strict";
Date.Parsing = {
Exception: function (s) {
this.message = "Parse error at '" + s.substring(0, 10) + " ...'";
}
};
var $P = Date.Parsing;
var dayOffsets = {
standard: [0,31,59,90,120,151,181,212,243,273,304,334],
leap: [0,31,60,91,121,152,182,213,244,274,305,335]
};
$P.isLeapYear = function(year) {
return ((year % 4 === 0) && (year % 100 !== 0)) || (year % 400 === 0);
};
var utils = {
multiReplace : function (str, hash ) {
var key;
for (key in hash) {
if (Object.prototype.hasOwnProperty.call(hash, key)) {
var regex;
if (typeof hash[key] === "function") {
} else {
regex = (hash[key] instanceof RegExp) ? hash[key] : new RegExp(hash[key], "g");
}
str = str.replace(regex, key);
}
}
return str;
},
getDayOfYearFromWeek : function (obj) {
var d, jan4, offset;
obj.weekDay = (!obj.weekDay && obj.weekDay !== 0) ? 1 : obj.weekDay;
d = new Date(obj.year, 0, 4);
jan4 = d.getDay() === 0 ? 7 : d.getDay(); // JS is 0 indexed on Sunday.
offset = jan4+3;
obj.dayOfYear = ((obj.week * 7) + (obj.weekDay === 0 ? 7 : obj.weekDay))-offset;
return obj;
},
getDayOfYear : function (obj, dayOffset) {
if (!obj.dayOfYear) {
obj = utils.getDayOfYearFromWeek(obj);
}
for (var i=0;i <= dayOffset.length;i++) {
if (obj.dayOfYear < dayOffset[i] || i === dayOffset.length) {
obj.day = obj.day ? obj.day : (obj.dayOfYear - dayOffset[i-1]);
break;
} else {
obj.month = i;
}
}
return obj;
},
adjustForTimeZone : function (obj, date) {
var offset;
if (obj.zone.toUpperCase() === "Z" || (obj.zone_hours === 0 && obj.zone_minutes === 0)) {
// it's UTC/GML so work out the current timeszone offset
offset = -date.getTimezoneOffset();
} else {
offset = (obj.zone_hours*60) + (obj.zone_minutes || 0);
if (obj.zone_sign === "+") {
offset *= -1;
}
offset -= date.getTimezoneOffset();
}
date.setMinutes(date.getMinutes()+offset);
return date;
},
setDefaults : function (obj) {
obj.year = obj.year || Date.today().getFullYear();
obj.hours = obj.hours || 0;
obj.minutes = obj.minutes || 0;
obj.seconds = obj.seconds || 0;
obj.milliseconds = obj.milliseconds || 0;
if (!(!obj.month && (obj.week || obj.dayOfYear))) {
// if we have a month, or if we don't but don't have the day calculation data
obj.month = obj.month || 0;
obj.day = obj.day || 1;
}
return obj;
},
dataNum: function (data, mod, explict, postProcess) {
var dataNum = data*1;
if (mod) {
if (postProcess) {
return data ? mod(data)*1 : data;
} else {
return data ? mod(dataNum) : data;
}
} else if (!explict){
return data ? dataNum : data;
} else {
return (data && typeof data !== "undefined") ? dataNum : data;
}
},
timeDataProcess: function (obj) {
var timeObj = {};
for (var x in obj.data) {
if (obj.data.hasOwnProperty(x)) {
timeObj[x] = obj.ignore[x] ? obj.data[x] : utils.dataNum(obj.data[x], obj.mods[x], obj.explict[x], obj.postProcess[x]);
}
}
if (obj.data.secmins) {
obj.data.secmins = obj.data.secmins.replace(",", ".") * 60;
if (!timeObj.minutes) {
timeObj.minutes = obj.data.secmins;
} else if (!timeObj.seconds) {
timeObj.seconds = obj.data.secmins;
}
delete obj.secmins;
}
return timeObj;
},
buildTimeObjectFromData: function (data) {
var time = utils.timeDataProcess({
data: {
year : data[1],
month : data[5],
day : data[7],
week : data[8],
dayOfYear : data[10],
hours : data[15],
zone_hours : data[23],
zone_minutes : data[24],
zone : data[21],
zone_sign : data[22],
weekDay : data[9],
minutes: data[16],
seconds: data[19],
milliseconds: data[20],
secmins: data[18]
},
mods: {
month: function(data) {
return data-1;
},
weekDay: function (data) {
data = Math.abs(data);
return (data === 7 ? 0 : data);
},
minutes: function (data) {
return data.replace(":","");
},
seconds: function (data) {
return Math.floor( (data.replace(":","").replace(",","."))*1 );
},
milliseconds: function (data) {
return (data.replace(",",".")*1000);
}
},
postProcess: {
minutes: true,
seconds: true,
milliseconds: true
},
explict: {
zone_hours: true,
zone_minutes: true
},
ignore: {
zone: true,
zone_sign: true,
secmins: true
}
});
return time;
},
addToHash: function (hash, keys, data) {
keys = keys;
data = data;
var len = keys.length;
for (var i = 0; i < len; i++) {
hash[keys[i]] = data[i];
}
return hash;
},
combineRegex: function (r1, r2) {
return new RegExp("(("+r1.source+")\\s("+r2.source+"))");
},
getDateNthString: function(add, last, inc){
if (add) {
return Date.today().addDays(inc).toString("d");
} else if (last) {
return Date.today().last()[inc]().toString("d");
}
},
buildRegexData: function (array) {
var arr = [];
var len = array.length;
for (var i=0; i < len; i++) {
if (Object.prototype.toString.call(array[i]) === '[object Array]') { // oldIE compat version of Array.isArray
arr.push(this.combineRegex(array[i][0], array[i][1]));
} else {
arr.push(array[i]);
}
}
return arr;
}
};
$P.processTimeObject = function (obj) {
var date, dayOffset;
utils.setDefaults(obj);
dayOffset = ($P.isLeapYear(obj.year)) ? dayOffsets.leap : dayOffsets.standard;
if (!obj.month && (obj.week || obj.dayOfYear)) {
utils.getDayOfYear(obj, dayOffset);
} else {
obj.dayOfYear = dayOffset[obj.month] + obj.day;
}
date = new Date(obj.year, obj.month, obj.day, obj.hours, obj.minutes, obj.seconds, obj.milliseconds);
if (obj.zone) {
utils.adjustForTimeZone(obj, date); // adjust (and calculate) for timezone
}
return date;
};
$P.ISO = {
regex : /^([\+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-3])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))([T\s]((([01]\d|2[0-4])((:?)[0-5]\d)?|24\:?00)([\.,]\d+(?!:))?)?(\17[0-5]\d([\.,]\d+)?)?\s?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)?$/,
parse : function (s) {
var time, data = s.match(this.regex);
if (!data || !data.length) {
return null;
}
time = utils.buildTimeObjectFromData(data);
if (!time.year || (!time.year && (!time.month && !time.day) && (!time.week && !time.dayOfYear)) ) {
return null;
}
return $P.processTimeObject(time);
}
};
$P.Numeric = {
isNumeric: function (e){return!isNaN(parseFloat(e))&&isFinite(e);},
regex: /\b([0-1]?[0-9])([0-3]?[0-9])([0-2]?[0-9]?[0-9][0-9])\b/i,
parse: function (s) {
var data, i,
time = {},
order = Date.CultureInfo.dateElementOrder.split("");
if (!(this.isNumeric(s)) || // if it's non-numeric OR
(s[0] === "+" && s[0] === "-")) { // It's an arithmatic string (eg +/-1000)
return null;
}
if (s.length < 5 && s.indexOf(".") < 0 && s.indexOf("/") < 0) { // assume it's just a year.
time.year = s;
return $P.processTimeObject(time);
}
data = s.match(this.regex);
if (!data || !data.length) {
return null;
}
for (i=0; i < order.length; i++) {
switch(order[i]) {
case "d":
time.day = data[i+1];
break;
case "m":
time.month = (data[i+1]-1);
break;
case "y":
time.year = data[i+1];
break;
}
}
return $P.processTimeObject(time);
}
};
$P.Normalizer = {
regexData: function () {
var $R = Date.CultureInfo.regexPatterns;
return utils.buildRegexData([
$R.tomorrow,
$R.yesterday,
[$R.past, $R.mon],
[$R.past, $R.tue],
[$R.past, $R.wed],
[$R.past, $R.thu],
[$R.past, $R.fri],
[$R.past, $R.sat],
[$R.past, $R.sun]
]);
},
basicReplaceHash : function() {
var $R = Date.CultureInfo.regexPatterns;
return {
"January": $R.jan.source,
"February": $R.feb,
"March": $R.mar,
"April": $R.apr,
"May": $R.may,
"June": $R.jun,
"July": $R.jul,
"August": $R.aug,
"September": $R.sep,
"October": $R.oct,
"November": $R.nov,
"December": $R.dec,
"": /\bat\b/gi,
" ": /\s{2,}/,
"am": $R.inTheMorning,
"9am": $R.thisMorning,
"pm": $R.inTheEvening,
"7pm":$R.thisEvening
};
},
keys : function(){
return [
utils.getDateNthString(true, false, 1), // tomorrow
utils.getDateNthString(true, false, -1), // yesterday
utils.getDateNthString(false, true, "monday"), //last mon
utils.getDateNthString(false, true, "tuesday"), //last tues
utils.getDateNthString(false, true, "wednesday"), //last wed
utils.getDateNthString(false, true, "thursday"), //last thurs
utils.getDateNthString(false, true, "friday"), //last fri
utils.getDateNthString(false, true, "saturday"), //last sat
utils.getDateNthString(false, true, "sunday") //last sun
];
},
buildRegexFunctions: function () {
var $R = Date.CultureInfo.regexPatterns;
var __ = Date.i18n.__;
var tomorrowRE = new RegExp("(\\b\\d\\d?("+__("AM")+"|"+__("PM")+")? )("+$R.tomorrow.source.slice(1)+")", "i"); // adapted tomorrow regex for AM PM relative dates
var todayRE = new RegExp($R.today.source + "(?!\\s*([+-]))\\b"); // today, but excludes the math operators (eg "today + 2h")
this.replaceFuncs = [
[todayRE, function (full) {
return (full.length > 1) ? Date.today().toString("d") : full;
}],
[tomorrowRE,
function(full, m1) {
var t = Date.today().addDays(1).toString("d");
return (t + " " + m1);
}],
[$R.amThisMorning, function(str, am){return am;}],
[$R.pmThisEvening, function(str, pm){return pm;}]
];
},
buildReplaceData: function () {
this.buildRegexFunctions();
this.replaceHash = utils.addToHash(this.basicReplaceHash(), this.keys(), this.regexData());
},
stringReplaceFuncs: function (s) {
for (var i=0; i < this.replaceFuncs.length; i++) {
s = s.replace(this.replaceFuncs[i][0], this.replaceFuncs[i][1]);
}
return s;
},
parse: function (s) {
s = this.stringReplaceFuncs(s);
s = utils.multiReplace(s, this.replaceHash);
try {
var n = s.split(/([\s\-\.\,\/\x27]+)/);
if (n.length === 3 &&
$P.Numeric.isNumeric(n[0]) &&
$P.Numeric.isNumeric(n[2]) &&
(n[2].length >= 4)) {
// ok, so we're dealing with x/year. But that's not a full date.
// This fixes wonky dateElementOrder parsing when set to dmy order.
if (Date.CultureInfo.dateElementOrder[0] === "d") {
s = "1/" + n[0] + "/" + n[2]; // set to 1st of month and normalize the seperator
}
}
} catch (e) {}
return s;
}
};
$P.Normalizer.buildReplaceData();
}());
(function () {
var $P = Date.Parsing;
var _ = $P.Operators = {
//
// Tokenizers
//
rtoken: function (r) { // regex token
return function (s) {
var mx = s.match(r);
if (mx) {
return ([ mx[0], s.substring(mx[0].length) ]);
} else {
throw new $P.Exception(s);
}
};
},
token: function () { // whitespace-eating token
return function (s) {
return _.rtoken(new RegExp("^\\s*" + s + "\\s*"))(s);
};
},
stoken: function (s) { // string token
return _.rtoken(new RegExp("^" + s));
},
// Atomic Operators
until: function (p) {
return function (s) {
var qx = [], rx = null;
while (s.length) {
try {
rx = p.call(this, s);
} catch (e) {
qx.push(rx[0]);
s = rx[1];
continue;
}
break;
}
return [ qx, s ];
};
},
many: function (p) {
return function (s) {
var rx = [], r = null;
while (s.length) {
try {
r = p.call(this, s);
} catch (e) {
return [ rx, s ];
}
rx.push(r[0]);
s = r[1];
}
return [ rx, s ];
};
},
// generator operators -- see below
optional: function (p) {
return function (s) {
var r = null;
try {
r = p.call(this, s);
} catch (e) {
return [ null, s ];
}
return [ r[0], r[1] ];
};
},
not: function (p) {
return function (s) {
try {
p.call(this, s);
} catch (e) {
return [null, s];
}
throw new $P.Exception(s);
};
},
ignore: function (p) {
return p ?
function (s) {
var r = null;
r = p.call(this, s);
return [null, r[1]];
} : null;
},
product: function () {
var px = arguments[0],
qx = Array.prototype.slice.call(arguments, 1), rx = [];
for (var i = 0 ; i < px.length ; i++) {
rx.push(_.each(px[i], qx));
}
return rx;
},
cache: function (rule) {
var cache = {}, cache_length = 0, cache_keys = [], CACHE_MAX = Date.Config.CACHE_MAX || 100000, r = null;
var cacheCheck = function () {
if (cache_length === CACHE_MAX) {
// kill several keys, don't want to have to do this all the time...
for (var i=0; i < 10; i++) {
var key = cache_keys.shift();
if (key) {
delete cache[key];
cache_length--;
}
}
}
};
return function (s) {
cacheCheck();
try {
r = cache[s] = (cache[s] || rule.call(this, s));
} catch (e) {
r = cache[s] = e;
}
cache_length++;
cache_keys.push(s);
if (r instanceof $P.Exception) {
throw r;
} else {
return r;
}
};
},
// vector operators -- see below
any: function () {
var px = arguments;
return function (s) {
var r = null;
for (var i = 0; i < px.length; i++) {
if (px[i] == null) {
continue;
}
try {
r = (px[i].call(this, s));
} catch (e) {
r = null;
}
if (r) {
return r;
}
}
throw new $P.Exception(s);
};
},
each: function () {
var px = arguments;
return function (s) {
var rx = [], r = null;
for (var i = 0; i < px.length ; i++) {
if (px[i] == null) {
continue;
}
try {
r = (px[i].call(this, s));
} catch (e) {
throw new $P.Exception(s);
}
rx.push(r[0]);
s = r[1];
}
return [ rx, s];
};
},
all: function () {
var px = arguments, _ = _;
return _.each(_.optional(px));
},
// delimited operators
sequence: function (px, d, c) {
d = d || _.rtoken(/^\s*/);
c = c || null;
if (px.length === 1) {
return px[0];
}
return function (s) {
var r = null, q = null;
var rx = [];
for (var i = 0; i < px.length ; i++) {
try {
r = px[i].call(this, s);
} catch (e) {
break;
}
rx.push(r[0]);
try {
q = d.call(this, r[1]);
} catch (ex) {
q = null;
break;
}
s = q[1];
}
if (!r) {
throw new $P.Exception(s);
}
if (q) {
throw new $P.Exception(q[1]);
}
if (c) {
try {
r = c.call(this, r[1]);
} catch (ey) {
throw new $P.Exception(r[1]);
}
}
return [ rx, (r?r[1]:s) ];
};
},
//
// Composite Operators
//
between: function (d1, p, d2) {
d2 = d2 || d1;
var _fn = _.each(_.ignore(d1), p, _.ignore(d2));
return function (s) {
var rx = _fn.call(this, s);
return [[rx[0][0], r[0][2]], rx[1]];
};
},
list: function (p, d, c) {
d = d || _.rtoken(/^\s*/);
c = c || null;
return (p instanceof Array ?
_.each(_.product(p.slice(0, -1), _.ignore(d)), p.slice(-1), _.ignore(c)) :
_.each(_.many(_.each(p, _.ignore(d))), px, _.ignore(c)));
},
set: function (px, d, c) {
d = d || _.rtoken(/^\s*/);
c = c || null;
return function (s) {
// r is the current match, best the current 'best' match
// which means it parsed the most amount of input
var r = null, p = null, q = null, rx = null, best = [[], s], last = false;
// go through the rules in the given set
for (var i = 0; i < px.length ; i++) {
// last is a flag indicating whether this must be the last element
// if there is only 1 element, then it MUST be the last one
q = null;
p = null;
r = null;
last = (px.length === 1);
// first, we try simply to match the current pattern
// if not, try the next pattern
try {
r = px[i].call(this, s);
} catch (e) {
continue;
}
// since we are matching against a set of elements, the first
// thing to do is to add r[0] to matched elements
rx = [[r[0]], r[1]];
// if we matched and there is still input to parse and
// we don't already know this is the last element,
// we're going to next check for the delimiter ...
// if there's none, or if there's no input left to parse
// than this must be the last element after all ...
if (r[1].length > 0 && ! last) {
try {
q = d.call(this, r[1]);
} catch (ex) {
last = true;
}
} else {
last = true;
}
// if we parsed the delimiter and now there's no more input,
// that means we shouldn't have parsed the delimiter at all
// so don't update r and mark this as the last element ...
if (!last && q[1].length === 0) {
last = true;
}
// so, if this isn't the last element, we're going to see if
// we can get any more matches from the remaining (unmatched)
// elements ...
if (!last) {
// build a list of the remaining rules we can match against,
// i.e., all but the one we just matched against
var qx = [];
for (var j = 0; j < px.length ; j++) {
if (i !== j) {
qx.push(px[j]);
}
}
// now invoke recursively set with the remaining input
// note that we don't include the closing delimiter ...
// we'll check for that ourselves at the end
p = _.set(qx, d).call(this, q[1]);
// if we got a non-empty set as a result ...
// (otw rx already contains everything we want to match)
if (p[0].length > 0) {
// update current result, which is stored in rx ...
// basically, pick up the remaining text from p[1]
// and concat the result from p[0] so that we don't
// get endless nesting ...
rx[0] = rx[0].concat(p[0]);
rx[1] = p[1];
}
}
// at this point, rx either contains the last matched element
// or the entire matched set that starts with this element.
// now we just check to see if this variation is better than
// our best so far, in terms of how much of the input is parsed
if (rx[1].length < best[1].length) {
best = rx;
}
// if we've parsed all the input, then we're finished
if (best[1].length === 0) {
break;
}
}
// so now we've either gone through all the patterns trying them
// as the initial match; or we found one that parsed the entire
// input string ...
// if best has no matches, just return empty set ...
if (best[0].length === 0) {
return best;
}
// if a closing delimiter is provided, then we have to check it also
if (c) {
// we try this even if there is no remaining input because the pattern
// may well be optional or match empty input ...
try {
q = c.call(this, best[1]);
} catch (ey) {
throw new $P.Exception(best[1]);
}
// it parsed ... be sure to update the best match remaining input
best[1] = q[1];
}
// if we're here, either there was no closing delimiter or we parsed it
// so now we have the best match; just return it!
return best;
};
},
forward: function (gr, fname) {
return function (s) {
return gr[fname].call(this, s);
};
},
//
// Translation Operators
//
replace: function (rule, repl) {
return function (s) {
var r = rule.call(this, s);
return [repl, r[1]];
};
},
process: function (rule, fn) {
return function (s) {
var r = rule.call(this, s);
return [fn.call(this, r[0]), r[1]];
};
},
min: function (min, rule) {
return function (s) {
var rx = rule.call(this, s);
if (rx[0].length < min) {
throw new $P.Exception(s);
}
return rx;
};
}
};
// Generator Operators And Vector Operators
// Generators are operators that have a signature of F(R) => R,
// taking a given rule and returning another rule, such as
// ignore, which parses a given rule and throws away the result.
// Vector operators are those that have a signature of F(R1,R2,...) => R,
// take a list of rules and returning a new rule, such as each.
// Generator operators are converted (via the following _generator
// function) into functions that can also take a list or array of rules
// and return an array of new rules as though the function had been
// called on each rule in turn (which is what actually happens).
// This allows generators to be used with vector operators more easily.
// Example:
// each(ignore(foo, bar)) instead of each(ignore(foo), ignore(bar))
// This also turns generators into vector operators, which allows
// constructs like:
// not(cache(foo, bar))
var _generator = function (op) {
function gen() {
| var args = null, rx = [], px, i;
if (arguments.length > 1) {
args = Array.prototype.slice.call(arguments);
} else if (arguments[0] instanceof Array) {
args = arguments[0];
}
if (args) {
px = args.shift();
if (px.length > 0) {
args.unshift(px[i]);
rx.push(op.apply(null, args));
args.shift();
return rx;
}
} else {
return op.apply(null, arguments);
}
}
return gen;
};
var gx = "optional not ignore cache".split(/\s/);
for (var i = 0 ; i < gx.length ; i++) {
_[gx[i]] = _generator(_[gx[i]]);
}
var _vector = function (op) {
return function () {
if (arguments[0] instanceof Array) {
return op.apply(null, arguments[0]);
} else {
return op.apply(null, arguments);
}
};
};
var vx = "each any all".split(/\s/);
for (var j = 0 ; j < vx.length ; j++) {
_[vx[j]] = _vector(_[vx[j]]);
}
}());
(function () {
var $D = Date;
var flattenAndCompact = function (ax) {
var rx = [];
for (var i = 0; i < ax.length; i++) {
if (ax[i] instanceof Array) {
rx = rx.concat(flattenAndCompact(ax[i]));
} else {
if (ax[i]) {
rx.push(ax[i]);
}
}
}
return rx;
};
var parseMeridian = function () {
if (this.meridian && (this.hour || this.hour === 0)) {
if (this.meridian === "a" && this.hour > 11 && Date.Config.strict24hr){
throw "Invalid hour and meridian combination";
} else if (this.meridian === "p" && this.hour < 12 && Date.Config.strict24hr){
throw "Invalid hour and meridian combination";
} else if (this.meridian === "p" && this.hour < 12) {
this.hour = this.hour + 12;
} else if (this.meridian === "a" && this.hour === 12) {
this.hour = 0;
}
}
};
var setDefaults = function () {
var now = new Date();
if ((this.hour || this.minute) && (!this.month && !this.year && !this.day)) {
this.day = now.getDate();
}
if (!this.year) {
this.year = now.getFullYear();
}
if (!this.month && this.month !== 0) {
this.month = now.getMonth();
}
if (!this.day) {
this.day = 1;
}
if (!this.hour) {
this.hour = 0;
}
if (!this.minute) {
this.minute = 0;
}
if (!this.second) {
this.second = 0;
}
if (!this.millisecond) {
this.millisecond = 0;
}
};
var finishUtils = {
getToday: function () {
if (this.now || "hour minute second".indexOf(this.unit) !== -1) {
return new Date();
} else {
return $D.today();
}
},
setDaysFromWeekday: function (today, orient){
var gap;
orient = orient || 1;
this.unit = "day";
gap = ($D.getDayNumberFromName(this.weekday) - today.getDay());
this.days = gap ? ((gap + (orient * 7)) % 7) : (orient * 7);
return this;
},
setMonthsFromMonth: function (today, orient) {
var gap;
orient = orient || 1;
this.unit = "month";
gap = (this.month - today.getMonth());
this.months = gap ? ((gap + (orient * 12)) % 12) : (orient * 12);
this.month = null;
return this;
},
setDMYFromWeekday: function () {
var d = Date[this.weekday]();
this.day = d.getDate();
if (!this.month) {
this.month = d.getMonth();
}
this.year = d.getFullYear();
return this;
},
setUnitValue: function (orient) {
if (!this.value && this.operator && this.operator !== null && this[this.unit + "s"] && this[this.unit + "s"] !== null) {
this[this.unit + "s"] = this[this.unit + "s"] + ((this.operator === "add") ? 1 : -1) + (this.value||0) * orient;
} else if (this[this.unit + "s"] == null || this.operator != null) {
if (!this.value) {
this.value = 1;
}
this[this.unit + "s"] = this.value * orient;
}
},
generateDateFromWeeks: function () {
var weekday = (this.weekday !== undefined) ? this.weekday : "today";
var d = Date[weekday]().addWeeks(this.weeks);
if (this.now) {
d.setTimeToNow();
}
return d;
}
};
$D.Translator = {
hour: function (s) {
return function () {
this.hour = Number(s);
};
},
minute: function (s) {
return function () {
this.minute = Number(s);
};
},
second: function (s) {
return function () {
this.second = Number(s);
};
},
/* for ss.s format */
secondAndMillisecond: function (s) {
return function () {
var mx = s.match(/^([0-5][0-9])\.([0-9]{1,3})/);
this.second = Number(mx[1]);
this.millisecond = Number(mx[2]);
};
},
meridian: function (s) {
return function () {
this.meridian = s.slice(0, 1).toLowerCase();
};
},
timezone: function (s) {
return function () {
var n = s.replace(/[^\d\+\-]/g, "");
if (n.length) {
this.timezoneOffset = Number(n);
} else {
this.timezone = s.toLowerCase();
}
};
},
day: function (x) {
var s = x[0];
return function () {
this.day = Number(s.match(/\d+/)[0]);
if (this.day < 1) {
throw "invalid day";
}
};
},
month: function (s) {
return function () {
this.month = (s.length === 3) ? "jan feb mar apr may jun jul aug sep oct nov dec".indexOf(s)/4 : Number(s) - 1;
if (this.month < 0) {
throw "invalid month";
}
};
},
year: function (s) {
return function () {
var n = Number(s);
this.year = ((s.length > 2) ? n :
(n + (((n + 2000) < Date.CultureInfo.twoDigitYearMax) ? 2000 : 1900)));
};
},
rday: function (s) {
return function () {
switch (s) {
case "yesterday":
this.days = -1;
break;
case "tomorrow":
this.days = 1;
break;
case "today":
this.days = 0;
break;
case "now":
this.days = 0;
this.now = true;
break;
}
};
},
finishExact: function (x) {
var d;
x = (x instanceof Array) ? x : [x];
for (var i = 0 ; i < x.length ; i++) {
if (x[i]) {
x[i].call(this);
}
}
setDefaults.call(this);
parseMeridian.call(this);
if (this.day > $D.getDaysInMonth(this.year, this.month)) {
throw new RangeError(this.day + " is not a valid value for days.");
}
d = new Date(this.year, this.month, this.day, this.hour, this.minute, this.second, this.millisecond);
if (this.year < 100) {
d.setFullYear(this.year); // means years less that 100 are process correctly. JS will parse it otherwise as 1900-1999.
}
if (this.timezone) {
d.set({ timezone: this.timezone });
} else if (this.timezoneOffset) {
d.set({ timezoneOffset: this.timezoneOffset });
}
return d;
},
finish: function (x) {
var today, expression, orient, temp;
x = (x instanceof Array) ? flattenAndCompact(x) : [ x ];
if (x.length === 0) {
return null;
}
for (var i = 0 ; i < x.length ; i++) {
if (typeof x[i] === "function") {
x[i].call(this);
}
}
if (this.now && !this.unit && !this.operator) {
return new Date();
} else {
today = finishUtils.getToday.call(this);
}
expression = !!(this.days && this.days !== null || this.orient || this.operator);
orient = ((this.orient === "past" || this.operator === "subtract") ? -1 : 1);
if (this.month && this.unit === "week") {
this.value = this.month + 1;
delete this.month;
delete this.day;
}
if ((this.month || this.month === 0) && "year day hour minute second".indexOf(this.unit) !== -1) {
if (!this.value) {
this.value = this.month + 1;
}
this.month = null;
expression = true;
}
if (!expression && this.weekday && !this.day && !this.days) {
finishUtils.setDMYFromWeekday.call(this);
}
if (expression && this.weekday && this.unit !== "month" && this.unit !== "week") {
finishUtils.setDaysFromWeekday.call(this, today, orient);
}
if (this.weekday && this.unit !== "week" && !this.day && !this.days) {
temp = Date[this.weekday]();
this.day = temp.getDate();
if (temp.getMonth() !== today.getMonth()) {
this.month = temp.getMonth();
}
}
if (this.month && this.unit === "day" && this.operator) {
if (!this.value) {
this.value = (this.month + 1);
}
this.month = null;
}
if (this.value != null && this.month != null && this.year != null) {
this.day = this.value * 1;
}
if (this.month && !this.day && this.value) {
today.set({ day: this.value * 1 });
if (!expression) {
this.day = this.value * 1;
}
}
if (!this.month && this.value && this.unit === "month" && !this.now) {
this.month = this.value;
expression = true;
}
if (expression && (this.month || this.month === 0) && this.unit !== "year") {
finishUtils.setMonthsFromMonth.call(this, today, orient);
}
if (!this.unit) {
this.unit = "day";
}
finishUtils.setUnitValue.call(this, orient);
parseMeridian.call(this);
if ((this.month || this.month === 0) && !this.day) {
this.day = 1;
}
if (!this.orient && !this.operator && this.unit === "week" && this.value && !this.day && !this.month) {
return Date.today().setWeek(this.value);
}
if (this.unit === "week" && this.weeks && !this.day && !this.month) {
return finishUtils.generateDateFromWeeks.call(this);
}
if (expression && this.timezone && this.day && this.days) {
this.day = this.days;
}
if (expression){
today.add(this);
} else {
today.set(this);
}
if (this.timezone) {
this.timezone = this.timezone.toUpperCase();
var offset = $D.getTimezoneOffset(this.timezone);
var timezone;
if (today.hasDaylightSavingTime()) {
// lets check that we're being sane with timezone setting
timezone = $D.getTimezoneAbbreviation(offset, today.isDaylightSavingTime());
if (timezone !== this.timezone) {
// bugger, we're in a place where things like EST vs EDT matters.
if (today.isDaylightSavingTime()) {
today.addHours(-1);
} else {
today.addHours(1);
}
}
}
today.setTimezoneOffset(offset);
}
return today;
}
};
}());
(function () {
var $D = Date;
$D.Grammar = {};
var _ = $D.Parsing.Operators, g = $D.Grammar, t = $D.Translator, _fn;
// Allow rolling up into general purpose rules
_fn = function () {
return _.each(_.any.apply(null, arguments), _.not(g.ctoken2("timeContext")));
};
g.datePartDelimiter = _.rtoken(/^([\s\-\.\,\/\x27]+)/);
g.timePartDelimiter = _.stoken(":");
g.whiteSpace = _.rtoken(/^\s*/);
g.generalDelimiter = _.rtoken(/^(([\s\,]|at|@|on)+)/);
var _C = {};
g.ctoken = function (keys) {
var fn = _C[keys];
if (! fn) {
var c = Date.CultureInfo.regexPatterns;
var kx = keys.split(/\s+/), px = [];
for (var i = 0; i < kx.length ; i++) {
px.push(_.replace(_.rtoken(c[kx[i]]), kx[i]));
}
fn = _C[keys] = _.any.apply(null, px);
}
return fn;
};
g.ctoken2 = function (key) {
return _.rtoken(Date.CultureInfo.regexPatterns[key]);
};
var cacheProcessRtoken = function (key, token, type, eachToken) {
if (eachToken) {
g[key] = _.cache(_.process(_.each(_.rtoken(token),_.optional(g.ctoken2(eachToken))), type));
} else {
g[key] = _.cache(_.process(_.rtoken(token), type));
}
};
var cacheProcessCtoken = function (token, type) {
return _.cache(_.process(g.ctoken2(token), type));
};
var _F = {}; //function cache
var _get = function (f) {
_F[f] = (_F[f] || g.format(f)[0]);
return _F[f];
};
g.allformats = function (fx) {
var rx = [];
if (fx instanceof Array) {
for (var i = 0; i < fx.length; i++) {
rx.push(_get(fx[i]));
}
} else {
rx.push(_get(fx));
}
return rx;
};
g.formats = function (fx) {
if (fx instanceof Array) {
var rx = [];
for (var i = 0 ; i < fx.length ; i++) {
rx.push(_get(fx[i]));
}
return _.any.apply(null, rx);
} else {
return _get(fx);
}
};
var grammarFormats = {
timeFormats: function(){
var i,
RTokenKeys = [
"h",
"hh",
"H",
"HH",
"m",
"mm",
"s",
"ss",
"ss.s",
"z",
"zz"
],
RToken = [
/^(0[0-9]|1[0-2]|[1-9])/,
/^(0[0-9]|1[0-2])/,
/^([0-1][0-9]|2[0-3]|[0-9])/,
/^([0-1][0-9]|2[0-3])/,
/^([0-5][0-9]|[0-9])/,
/^[0-5][0-9]/,
/^([0-5][0-9]|[0-9])/,
/^[0-5][0-9]/,
/^[0-5][0-9]\.[0-9]{1,3}/,
/^((\+|\-)\s*\d\d\d\d)|((\+|\-)\d\d\:?\d\d)/,
/^((\+|\-)\s*\d\d\d\d)|((\+|\-)\d\d\:?\d\d)/
],
tokens = [
t.hour,
t.hour,
t.hour,
t.minute,
t.minute,
t.second,
t.second,
t.secondAndMillisecond,
t.timezone,
t.timezone,
t.timezone
];
for (i=0; i < RTokenKeys.length; i++) {
cacheProcessRtoken(RTokenKeys[i], RToken[i], tokens[i]);
}
g.hms = _.cache(_.sequence([g.H, g.m, g.s], g.timePartDelimiter));
g.t = cacheProcessCtoken("shortMeridian", t.meridian);
g.tt = cacheProcessCtoken("longMeridian", t.meridian);
g.zzz = cacheProcessCtoken("timezone", t.timezone);
g.timeSuffix = _.each(_.ignore(g.whiteSpace), _.set([ g.tt, g.zzz ]));
g.time = _.each(_.optional(_.ignore(_.stoken("T"))), g.hms, g.timeSuffix);
},
dateFormats: function () {
// pre-loaded rules for different date part order preferences
var _setfn = function () {
return _.set(arguments, g.datePartDelimiter);
};
var i,
RTokenKeys = [
"d",
"dd",
"M",
"MM",
"y",
"yy",
"yyy",
"yyyy"
],
RToken = [
/^([0-2]\d|3[0-1]|\d)/,
/^([0-2]\d|3[0-1])/,
/^(1[0-2]|0\d|\d)/,
/^(1[0-2]|0\d)/,
/^(\d+)/,
/^(\d\d)/,
/^(\d\d?\d?\d?)/,
/^(\d\d\d\d)/
],
tokens = [
t.day,
t.day,
t.month,
t.month,
t.year,
t.year,
t.year,
t.year
],
eachToken = [
"ordinalSuffix",
"ordinalSuffix"
];
for (i=0; i < RTokenKeys.length; i++) {
cacheProcessRtoken(RTokenKeys[i], RToken[i], tokens[i], eachToken[i]);
}
g.MMM = g.MMMM = _.cache(_.process(g.ctoken("jan feb mar apr may jun jul aug sep oct nov dec"), t.month));
g.ddd = g.dddd = _.cache(_.process(g.ctoken("sun mon tue wed thu fri sat"),
function (s) {
return function () {
this.weekday = s;
};
}
));
g.day = _fn(g.d, g.dd);
g.month = _fn(g.M, g.MMM);
g.year = _fn(g.yyyy, g.yy);
g.mdy = _setfn(g.ddd, g.month, g.day, g.year);
g.ymd = _setfn(g.ddd, g.year, g.month, g.day);
g.dmy = _setfn(g.ddd, g.day, g.month, g.year);
g.date = function (s) {
return ((g[Date.CultureInfo.dateElementOrder] || g.mdy).call(this, s));
};
},
relative: function () {
// relative date / time expressions
g.orientation = _.process(g.ctoken("past future"),
function (s) {
return function () {
this.orient = s;
};
}
);
g.operator = _.process(g.ctoken("add subtract"),
function (s) {
return function () {
this.operator = s;
};
}
);
g.rday = _.process(g.ctoken("yesterday tomorrow today now"), t.rday);
g.unit = _.process(g.ctoken("second minute hour day week month year"),
function (s) {
return function () {
this.unit = s;
};
}
);
}
};
g.buildGrammarFormats = function () {
// these need to be rebuilt every time the language changes.
_C = {};
grammarFormats.timeFormats();
grammarFormats.dateFormats();
grammarFormats.relative();
g.value = _.process(_.rtoken(/^([-+]?\d+)?(st|nd|rd|th)?/),
function (s) {
return function () {
this.value = s.replace(/\D/g, "");
};
}
);
g.expression = _.set([g.rday, g.operator, g.value, g.unit, g.orientation, g.ddd, g.MMM ]);
g.format = _.process(_.many(
_.any(
// translate format specifiers into grammar rules
_.process(
_.rtoken(/^(dd?d?d?(?!e)|MM?M?M?|yy?y?y?|hh?|HH?|mm?|ss?|tt?|zz?z?)/),
function (fmt) {
if (g[fmt]) {
return g[fmt];
} else {
throw $D.Parsing.Exception(fmt);
}
}
),
// translate separator tokens into token rules
_.process(_.rtoken(/^[^dMyhHmstz]+/), // all legal separators
function (s) {
return _.ignore(_.stoken(s));
}
)
)
),
// construct the parser ...
function (rules) {
return _.process(_.each.apply(null, rules), t.finishExact);
}
);
// starting rule for general purpose grammar
g._start = _.process(_.set([ g.date, g.time, g.expression ],
g.generalDelimiter, g.whiteSpace), t.finish);
};
g.buildGrammarFormats();
// parsing date format specifiers - ex: "h:m:s tt"
// this little guy will generate a custom parser based
// on the format string, ex: g.format("h:m:s tt")
// check for these formats first
g._formats = g.formats([
"\"yyyy-MM-ddTHH:mm:ssZ\"",
"yyyy-MM-ddTHH:mm:ss.sz",
"yyyy-MM-ddTHH:mm:ssZ",
"yyyy-MM-ddTHH:mm:ssz",
"yyyy-MM-ddTHH:mm:ss",
"yyyy-MM-ddTHH:mmZ",
"yyyy-MM-ddTHH:mmz",
"yyyy-MM-ddTHH:mm",
"ddd, MMM dd, yyyy H:mm:ss tt",
"ddd MMM d yyyy HH:mm:ss zzz",
"MMddyyyy",
"ddMMyyyy",
"Mddyyyy",
"ddMyyyy",
"Mdyyyy",
"dMyyyy",
"yyyy",
"Mdyy",
"dMyy",
"d"
]);
// real starting rule: tries selected formats first,
// then general purpose rule
g.start = function (s) {
try {
var r = g._formats.call({}, s);
if (r[1].length === 0) {
return r;
}
} catch (e) {}
return g._start.call({}, s);
};
}());
(function () {
var $D = Date;
/**
* @desc Converts the specified string value into its JavaScript Date equivalent using CultureInfo specific format information.
*
* Example
<pre><code>
///////////
// Dates //
///////////
// 15-Oct-2004
var d1 = Date.parse("10/15/2004");
// 15-Oct-2004
var d1 = Date.parse("15-Oct-2004");
// 15-Oct-2004
var d1 = Date.parse("2004.10.15");
//Fri Oct 15, 2004
var d1 = Date.parse("Fri Oct 15, 2004");
///////////
// Times //
///////////
// Today at 10 PM.
var d1 = Date.parse("10 PM");
// Today at 10:30 PM.
var d1 = Date.parse("10:30 P.M.");
// Today at 6 AM.
var d1 = Date.parse("06am");
/////////////////////
// Dates and Times //
/////////////////////
// 8-July-2004 @ 10:30 PM
var d1 = Date.parse("July 8th, 2004, 10:30 PM");
// 1-July-2004 @ 10:30 PM
var d1 = Date.parse("2004-07-01T22:30:00");
////////////////////
// Relative Dates //
////////////////////
// Returns today's date. The string "today" is culture specific.
var d1 = Date.parse("today");
// Returns yesterday's date. The string "yesterday" is culture specific.
var d1 = Date.parse("yesterday");
// Returns the date of the next thursday.
var d1 = Date.parse("Next thursday");
// Returns the date of the most previous monday.
var d1 = Date.parse("last monday");
// Returns today's day + one year.
var d1 = Date.parse("next year");
///////////////
// Date Math //
///////////////
// Today + 2 days
var d1 = Date.parse("t+2");
// Today + 2 days
var d1 = Date.parse("today + 2 days");
// Today + 3 months
var d1 = Date.parse("t+3m");
// Today - 1 year
var d1 = Date.parse("today - 1 year");
// Today - 1 year
var d1 = Date.parse("t-1y");
/////////////////////////////
// Partial Dates and Times //
/////////////////////////////
// July 15th of this year.
var d1 = Date.parse("July 15");
// 15th day of current day and year.
var d1 = Date.parse("15");
// July 1st of current year at 10pm.
var d1 = Date.parse("7/1 10pm");
</code></pre>
*
* @param {String} The string value to convert into a Date object [Required]
* @return {Date} A Date object or null if the string cannot be converted into a Date.
*/
var parseUtils = {
removeOrds: function (s) {
ords = s.match(/\b(\d+)(?:st|nd|rd|th)\b/); // find ordinal matches
s = ((ords && ords.length === 2) ? s.replace(ords[0], ords[1]) : s);
return s;
},
grammarParser: function (s) {
var r = null;
try {
r = $D.Grammar.start.call({}, s.replace(/^\s*(\S*(\s+\S+)*)\s*$/, "$1"));
} catch (e) {
return null;
}
return ((r[1].length === 0) ? r[0] : null);
},
nativeFallback: function(s) {
var t;
try {
// ok we haven't parsed it, last ditch attempt with the built-in parser.
t = Date._parse(s);
return (t || t === 0) ? new Date(t) : null;
} catch (e) {
return null;
}
}
};
function parse (s) {
var d;
if (!s) {
return null;
}
if (s instanceof Date) {
return s.clone();
}
if (s.length >= 4 && s.charAt(0) !== "0" && s.charAt(0) !== "+"&& s.charAt(0) !== "-") { // ie: 2004 will pass, 0800 won't.
// Start with specific formats
d = $D.Parsing.ISO.parse(s) || $D.Parsing.Numeric.parse(s);
}
if (d instanceof Date && !isNaN(d.getTime())) {
return d;
} else {
// find ordinal dates (1st, 3rd, 8th, etc and remove them as they cause parsing issues)
s = $D.Parsing.Normalizer.parse(parseUtils.removeOrds(s));
d = parseUtils.grammarParser(s);
if (d !== null) {
return d;
} else {
return parseUtils.nativeFallback(s);
}
}
}
if (!$D._parse) {
$D._parse = $D.parse;
}
$D.parse = parse;
Date.getParseFunction = function (fx) {
var fns = Date.Grammar.allformats(fx);
return function (s) {
var r = null;
for (var i = 0; i < fns.length; i++) {
try {
r = fns[i].call({}, s);
} catch (e) {
continue;
}
if (r[1].length === 0) {
return r[0];
}
}
return null;
};
};
/**
* Converts the specified string value into its JavaScript Date equivalent using the specified format {String} or formats {Array} and the CultureInfo specific format information.
* The format of the string value must match one of the supplied formats exactly.
*
* Example
<pre><code>
// 15-Oct-2004
var d1 = Date.parseExact("10/15/2004", "M/d/yyyy");
// 15-Oct-2004
var d1 = Date.parse("15-Oct-2004", "M-ddd-yyyy");
// 15-Oct-2004
var d1 = Date.parse("2004.10.15", "yyyy.MM.dd");
// Multiple formats
var d1 = Date.parseExact("10/15/2004", ["M/d/yyyy", "MMMM d, yyyy"]);
</code></pre>
*
* @param {String} The string value to convert into a Date object [Required].
* @param {Object} The expected format {String} or an array of expected formats {Array} of the date string [Required].
* @return {Date} A Date object or null if the string cannot be converted into a Date.
*/
$D.parseExact = function (s, fx) {
return $D.getParseFunction(fx)(s);
};
}());
(function () {
var $D = Date,
$P = $D.prototype,
// $C = $D.CultureInfo, // not used atm
p = function (s, l) {
if (!l) {
l = 2;
}
return ("000" + s).slice(l * -1);
};
/**
* Converts a PHP format string to Java/.NET format string.
* A PHP format string can be used with ._format or .format.
* A Java/.NET format string can be used with .toString().
* The .parseExact function will only accept a Java/.NET format string
*
* Example
* var f1 = "%m/%d/%y"
* var f2 = Date.normalizeFormat(f1); // "MM/dd/yy"
*
* new Date().format(f1); // "04/13/08"
* new Date()._format(f1); // "04/13/08"
* new Date().toString(f2); // "04/13/08"
*
* var date = Date.parseExact("04/13/08", f2); // Sun Apr 13 2008
*
* @param {String} A PHP format string consisting of one or more format spcifiers.
* @return {String} The PHP format converted to a Java/.NET format string.
*/
var normalizerSubstitutions = {
"d" : "dd",
"%d": "dd",
"D" : "ddd",
"%a": "ddd",
"j" : "dddd",
"l" : "dddd",
"%A": "dddd",
"S" : "S",
"F" : "MMMM",
"%B": "MMMM",
"m" : "MM",
"%m": "MM",
"M" : "MMM",
"%b": "MMM",
"%h": "MMM",
"n" : "M",
"Y" : "yyyy",
"%Y": "yyyy",
"y" : "yy",
"%y": "yy",
"g" : "h",
"%I": "h",
"G" : "H",
"h" : "hh",
"H" : "HH",
"%H": "HH",
"i" : "mm",
"%M": "mm",
"s" : "ss",
"%S": "ss",
"%r": "hh:mm tt",
"%R": "H:mm",
"%T": "H:mm:ss",
"%X": "t",
"%x": "d",
"%e": "d",
"%D": "MM/dd/yy",
"%n": "\\n",
"%t": "\\t",
"e" : "z",
"T" : "z",
"%z": "z",
"%Z": "z",
"Z" : "ZZ",
"N" : "u",
"w" : "u",
"%w": "u",
"W" : "W",
"%V": "W"
};
var normalizer = {
substitutes: function (m) {
return normalizerSubstitutions[m];
},
interpreted: function (m, x) {
var y;
switch (m) {
case "%u":
return x.getDay() + 1;
case "z":
return x.getOrdinalNumber();
case "%j":
return p(x.getOrdinalNumber(), 3);
case "%U":
var d1 = x.clone().set({month: 0, day: 1}).addDays(-1).moveToDayOfWeek(0),
d2 = x.clone().addDays(1).moveToDayOfWeek(0, -1);
return (d2 < d1) ? "00" : p((d2.getOrdinalNumber() - d1.getOrdinalNumber()) / 7 + 1);
case "%W":
return p(x.getWeek());
case "t":
return $D.getDaysInMonth(x.getFullYear(), x.getMonth());
case "o":
case "%G":
return x.setWeek(x.getISOWeek()).toString("yyyy");
case "%g":
return x._format("%G").slice(-2);
case "a":
case "%p":
return t("tt").toLowerCase();
case "A":
return t("tt").toUpperCase();
case "u":
return p(x.getMilliseconds(), 3);
case "I":
return (x.isDaylightSavingTime()) ? 1 : 0;
case "O":
return x.getUTCOffset();
case "P":
y = x.getUTCOffset();
return y.substring(0, y.length - 2) + ":" + y.substring(y.length - 2);
case "B":
var now = new Date();
return Math.floor(((now.getHours() * 3600) + (now.getMinutes() * 60) + now.getSeconds() + (now.getTimezoneOffset() + 60) * 60) / 86.4);
case "c":
return x.toISOString().replace(/\"/g, "");
case "U":
return $D.strtotime("now");
case "%c":
return t("d") + " " + t("t");
case "%C":
return Math.floor(x.getFullYear() / 100 + 1);
}
},
shouldOverrideDefaults: function (m) {
switch (m) {
case "%e":
return true;
default:
return false;
}
},
parse: function (m, context) {
var formatString, c = context || new Date();
formatString = normalizer.substitutes(m);
if (formatString) {
return formatString;
}
formatString = normalizer.interpreted(m, c);
if (formatString) {
return formatString;
} else {
return m;
}
}
};
$D.normalizeFormat = function (format, context) {
return format.replace(/(%|\\)?.|%%/g, function(t){
return normalizer.parse(t, context);
});
};
/**
* Format a local Unix timestamp according to locale settings
*
* Example:
* Date.strftime("%m/%d/%y", new Date()); // "04/13/08"
* Date.strftime("c", "2008-04-13T17:52:03Z"); // "04/13/08"
*
* @param {String} A format string consisting of one or more format spcifiers [Optional].
* @param {Number|String} The number representing the number of seconds that have elapsed since January 1, 1970 (local time).
* @return {String} A string representation of the current Date object.
*/
$D.strftime = function (format, time) {
var d = Date.parse(time);
return d._format(format);
};
/**
* Parse any textual datetime description into a Unix timestamp.
* A Unix timestamp is the number of seconds that have elapsed since January 1, 1970 (midnight UTC/GMT).
*
* Example:
* Date.strtotime("04/13/08"); // 1208044800
* Date.strtotime("1970-01-01T00:00:00Z"); // 0
*
* @param {String} A format string consisting of one or more format spcifiers [Optional].
* @param {Object} A string or date object.
* @return {String} A string representation of the current Date object.
*/
$D.strtotime = function (time) {
var d = $D.parse(time);
return Math.round($D.UTC(d.getUTCFullYear(), d.getUTCMonth(), d.getUTCDate(), d.getUTCHours(), d.getUTCMinutes(), d.getUTCSeconds(), d.getUTCMilliseconds()) / 1000);
};
/**
* Converts the value of the current Date object to its equivalent string representation using a PHP/Unix style of date format specifiers.
* Format Specifiers
* Format Description Example
* ------ --------------------------------------------------------------------------- -----------------------
* %a abbreviated weekday name according to the current localed "Mon" through "Sun"
* %A full weekday name according to the current localed "Sunday" through "Saturday"
* %b abbreviated month name according to the current localed "Jan" through "Dec"
* %B full month name according to the current locale "January" through "December"
* %c preferred date and time representation for the current locale "4/13/2008 12:33 PM"
* %C century number (the year divided by 100 and truncated to an integer) "00" to "99"
* %d day of the month as a decimal number "01" to "31"
* %D same as %m/%d/%y "04/13/08"
* %e day of the month as a decimal number, a single digit is preceded by a space "1" to "31"
* %g like %G, but without the century "08"
* %G The 4-digit year corresponding to the ISO week number (see %V). "2008"
* This has the same format and value as %Y, except that if the ISO week number
* belongs to the previous or next year, that year is used instead.
* %h same as %b "Jan" through "Dec"
* %H hour as a decimal number using a 24-hour clock. "00" to "23"
* %I hour as a decimal number using a 12-hour clock. "01" to "12"
* %j day of the year as a decimal number. "001" to "366"
* %m month as a decimal number. "01" to "12"
* %M minute as a decimal number. "00" to "59"
* %n newline character "\n"
* %p either "am" or "pm" according to the given time value, or the "am" or "pm"
* corresponding strings for the current locale.
* %r time in a.m. and p.m. notation "8:44 PM"
* %R time in 24 hour notation "20:44"
* %S second as a decimal number "00" to "59"
* %t tab character "\t"
* %T current time, equal to %H:%M:%S "12:49:11"
* %u weekday as a decimal number ["1", "7"], with "1" representing Monday "1" to "7"
* %U week number of the current year as a decimal number, starting with the "0" to ("52" or "53")
* first Sunday as the first day of the first week
* %V The ISO 8601:1988 week number of the current year as a decimal number, "00" to ("52" or "53")
* range 01 to 53, where week 1 is the first week that has at least 4 days
* in the current year, and with Monday as the first day of the week.
* (Use %G or %g for the year component that corresponds to the week number
* for the specified timestamp.)
* %W week number of the current year as a decimal number, starting with the "00" to ("52" or "53")
* first Monday as the first day of the first week
* %w day of the week as a decimal, Sunday being "0" "0" to "6"
* %x preferred date representation for the current locale without the time "4/13/2008"
* %X preferred time representation for the current locale without the date "12:53:05"
* %y year as a decimal number without a century "00" "99"
* %Y year as a decimal number including the century "2008"
* %Z time zone or name or abbreviation "UTC", "EST", "PST"
* %z same as %Z
* %% a literal "%" characters "%"
* d Day of the month, 2 digits with leading zeros "01" to "31"
* D A textual representation of a day, three letters "Mon" through "Sun"
* j Day of the month without leading zeros "1" to "31"
* l A full textual representation of the day of the week (lowercase "L") "Sunday" through "Saturday"
* N ISO-8601 numeric representation of the day of the week (added in PHP 5.1.0) "1" (for Monday) through "7" (for Sunday)
* S English ordinal suffix for the day of the month, 2 characters "st", "nd", "rd" or "th". Works well with j
* w Numeric representation of the day of the week "0" (for Sunday) through "6" (for Saturday)
* z The day of the year (starting from "0") "0" through "365"
* W ISO-8601 week number of year, weeks starting on Monday "00" to ("52" or "53")
* F A full textual representation of a month, such as January or March "January" through "December"
* m Numeric representation of a month, with leading zeros "01" through "12"
* M A short textual representation of a month, three letters "Jan" through "Dec"
* n Numeric representation of a month, without leading zeros "1" through "12"
* t Number of days in the given month "28" through "31"
* L Whether it's a leap year "1" if it is a leap year, "0" otherwise
* o ISO-8601 year number. This has the same value as Y, except that if the "2008"
* ISO week number (W) belongs to the previous or next year, that year
* is used instead.
* Y A full numeric representation of a year, 4 digits "2008"
* y A two digit representation of a year "08"
* a Lowercase Ante meridiem and Post meridiem "am" or "pm"
* A Uppercase Ante meridiem and Post meridiem "AM" or "PM"
* B Swatch Internet time "000" through "999"
* g 12-hour format of an hour without leading zeros "1" through "12"
* G 24-hour format of an hour without leading zeros "0" through "23"
* h 12-hour format of an hour with leading zeros "01" through "12"
* H 24-hour format of an hour with leading zeros "00" through "23"
* i Minutes with leading zeros "00" to "59"
* s Seconds, with leading zeros "00" through "59"
* u Milliseconds "54321"
* e Timezone identifier "UTC", "EST", "PST"
* I Whether or not the date is in daylight saving time (uppercase i) "1" if Daylight Saving Time, "0" otherwise
* O Difference to Greenwich time (GMT) in hours "+0200", "-0600"
* P Difference to Greenwich time (GMT) with colon between hours and minutes "+02:00", "-06:00"
* T Timezone abbreviation "UTC", "EST", "PST"
* Z Timezone offset in seconds. The offset for timezones west of UTC is "-43200" through "50400"
* always negative, and for those east of UTC is always positive.
* c ISO 8601 date "2004-02-12T15:19:21+00:00"
* r RFC 2822 formatted date "Thu, 21 Dec 2000 16:01:07 +0200"
* U Seconds since the Unix Epoch (January 1 1970 00:00:00 GMT) "0"
* @param {String} A format string consisting of one or more format spcifiers [Optional].
* @return {String} A string representation of the current Date object.
*/
var formatReplace = function (context) {
return function (m) {
var formatString, override = false;
if (m.charAt(0) === "\\" || m.substring(0, 2) === "%%") {
return m.replace("\\", "").replace("%%", "%");
}
override = normalizer.shouldOverrideDefaults(m);
formatString = $D.normalizeFormat(m, context);
if (formatString) {
return context.toString(formatString, override);
}
};
};
$P._format = function (format) {
var formatter = formatReplace(this);
if (!format) {
return this._toString();
} else {
return format.replace(/(%|\\)?.|%%/g, formatter);
}
};
if (!$P.format) {
$P.format = $P._format;
}
}());
(function () {
"use strict";
var gFn = function (attr) {
return function () {
return this[attr];
};
};
var sFn = function (attr) {
return function (val) {
this[attr] = val;
return this;
};
};
var attrs = ["years", "months", "days", "hours", "minutes", "seconds", "milliseconds"];
var addSetFuncs = function (context, attrs) {
for (var i = 0; i < attrs.length ; i++) {
var $a = attrs[i], $b = $a.slice(0, 1).toUpperCase() + $a.slice(1);
context.prototype[$a] = 0;
context.prototype["get" + $b] = gFn($a);
context.prototype["set" + $b] = sFn($a);
}
};
/**
* new TimeSpan(milliseconds);
* new TimeSpan(days, hours, minutes, seconds);
* new TimeSpan(days, hours, minutes, seconds, milliseconds);
*/
var TimeSpan = function (days, hours, minutes, seconds, milliseconds) {
if (arguments.length === 1 && typeof days === "number") {
var orient = (days < 0) ? -1 : +1;
var millsLeft = Math.abs(days);
this.setDays(Math.floor(millsLeft / 86400000) * orient);
millsLeft = millsLeft % 86400000;
this.setHours(Math.floor(millsLeft / 3600000) * orient);
millsLeft = millsLeft % 3600000;
this.setMinutes(Math.floor(millsLeft / 60000) * orient);
millsLeft = millsLeft % 60000;
this.setSeconds(Math.floor(millsLeft / 1000) * orient);
millsLeft = millsLeft % 1000;
this.setMilliseconds(millsLeft * orient);
} else {
this.set(days, hours, minutes, seconds, milliseconds);
}
this.getTotalMilliseconds = function () {
return (this.getDays() * 86400000) +
(this.getHours() * 3600000) +
(this.getMinutes() * 60000) +
(this.getSeconds() * 1000);
};
this.compareTo = function (time) {
var t1 = new Date(1970, 1, 1, this.getHours(), this.getMinutes(), this.getSeconds()), t2;
if (time === null) {
t2 = new Date(1970, 1, 1, 0, 0, 0);
}
else {
t2 = new Date(1970, 1, 1, time.getHours(), time.getMinutes(), time.getSeconds());
}
return (t1 < t2) ? -1 : (t1 > t2) ? 1 : 0;
};
this.equals = function (time) {
return (this.compareTo(time) === 0);
};
this.add = function (time) {
return (time === null) ? this : this.addSeconds(time.getTotalMilliseconds() / 1000);
};
this.subtract = function (time) {
return (time === null) ? this : this.addSeconds(-time.getTotalMilliseconds() / 1000);
};
this.addDays = function (n) {
return new TimeSpan(this.getTotalMilliseconds() + (n * 86400000));
};
this.addHours = function (n) {
return new TimeSpan(this.getTotalMilliseconds() + (n * 3600000));
};
this.addMinutes = function (n) {
return new TimeSpan(this.getTotalMilliseconds() + (n * 60000));
};
this.addSeconds = function (n) {
return new TimeSpan(this.getTotalMilliseconds() + (n * 1000));
};
this.addMilliseconds = function (n) {
return new TimeSpan(this.getTotalMilliseconds() + n);
};
this.get12HourHour = function () {
return (this.getHours() > 12) ? this.getHours() - 12 : (this.getHours() === 0) ? 12 : this.getHours();
};
this.getDesignator = function () {
return (this.getHours() < 12) ? Date.CultureInfo.amDesignator : Date.CultureInfo.pmDesignator;
};
this.toString = function (format) {
this._toString = function () {
if (this.getDays() !== null && this.getDays() > 0) {
return this.getDays() + "." + this.getHours() + ":" + this.p(this.getMinutes()) + ":" + this.p(this.getSeconds());
} else {
return this.getHours() + ":" + this.p(this.getMinutes()) + ":" + this.p(this.getSeconds());
}
};
this.p = function (s) {
return (s.toString().length < 2) ? "0" + s : s;
};
var me = this;
return format ? format.replace(/dd?|HH?|hh?|mm?|ss?|tt?/g,
function (format) {
switch (format) {
case "d":
return me.getDays();
case "dd":
return me.p(me.getDays());
case "H":
return me.getHours();
case "HH":
return me.p(me.getHours());
case "h":
return me.get12HourHour();
case "hh":
return me.p(me.get12HourHour());
case "m":
return me.getMinutes();
case "mm":
return me.p(me.getMinutes());
case "s":
return me.getSeconds();
case "ss":
return me.p(me.getSeconds());
case "t":
return ((me.getHours() < 12) ? Date.CultureInfo.amDesignator : Date.CultureInfo.pmDesignator).substring(0, 1);
case "tt":
return (me.getHours() < 12) ? Date.CultureInfo.amDesignator : Date.CultureInfo.pmDesignator;
}
}
) : this._toString();
};
return this;
};
addSetFuncs(TimeSpan, attrs.slice(2));
TimeSpan.prototype.set = function (days, hours, minutes, seconds, milliseconds){
this.setDays(days || this.getDays());
this.setHours(hours || this.getHours());
this.setMinutes(minutes || this.getMinutes());
this.setSeconds(seconds || this.getSeconds());
this.setMilliseconds(milliseconds || this.getMilliseconds());
};
/**
* Gets the time of day for this date instances.
* @return {TimeSpan} TimeSpan
*/
Date.prototype.getTimeOfDay = function () {
return new TimeSpan(0, this.getHours(), this.getMinutes(), this.getSeconds(), this.getMilliseconds());
};
Date.TimeSpan = TimeSpan;
if (typeof window !== "undefined" ) {
// keeping API compatible for v1.x
window.TimeSpan = TimeSpan;
}
}());
(function () {
"use strict";
var attrs = ["years", "months", "days", "hours", "minutes", "seconds", "milliseconds"];
var gFn = function (attr) {
return function () {
return this[attr];
};
};
var sFn = function (attr) {
return function (val) {
this[attr] = val;
return this;
};
};
var addSetFuncs = function (context, attrs) {
for (var i = 0; i < attrs.length ; i++) {
var $a = attrs[i], $b = $a.slice(0, 1).toUpperCase() + $a.slice(1);
context.prototype[$a] = 0;
context.prototype["get" + $b] = gFn($a);
context.prototype["set" + $b] = sFn($a);
}
};
var setMonthsAndYears = function (orient, d1, d2, context) {
function inc() {
d1.addMonths(-orient);
context.months++;
if (context.months === 12) {
context.years++;
context.months = 0;
}
}
if (orient === +1) {
while (d1 > d2) {
inc();
}
} else {
while (d1 < d2) {
inc();
}
}
context.months--;
context.months *= orient;
context.years *= orient;
};
var adjustForDST = function(orient, startDate, endDate) {
var hasDSTMismatch = (false === (startDate.isDaylightSavingTime() === endDate.isDaylightSavingTime()));
if (hasDSTMismatch && orient === 1) {
startDate.addHours(-1);
} else if (hasDSTMismatch) {
startDate.addHours(1);
}
};
/**
* TimePeriod(startDate, endDate);
* TimePeriod(years, months, days, hours, minutes, seconds, milliseconds);
*/
var TimePeriod = function (years, months, days, hours, minutes, seconds, milliseconds) {
if (arguments.length === 7) {
this.set(years, months, days, hours, minutes, seconds, milliseconds);
} else if (arguments.length === 2 && arguments[0] instanceof Date && arguments[1] instanceof Date) {
var startDate = arguments[0].clone();
var endDate = arguments[1].clone();
var orient = (startDate > endDate) ? +1 : -1;
this.dates = {
start: arguments[0].clone(),
end: arguments[1].clone()
};
setMonthsAndYears(orient, startDate, endDate, this);
adjustForDST(orient, startDate, endDate);
// // TODO - adjust for DST
var diff = endDate - startDate;
if (diff !== 0) {
var ts = new TimeSpan(diff);
this.set(this.years, this.months, ts.getDays(), ts.getHours(), ts.getMinutes(), ts.getSeconds(), ts.getMilliseconds());
}
}
return this;
};
// create all the set functions.
addSetFuncs(TimePeriod, attrs);
TimePeriod.prototype.set = function (years, months, days, hours, minutes, seconds, milliseconds){
this.setYears(years || this.getYears());
this.setMonths(months || this.getMonths());
this.setDays(days || this.getDays());
this.setHours(hours || this.getHours());
this.setMinutes(minutes || this.getMinutes());
this.setSeconds(seconds || this.getSeconds());
this.setMilliseconds(milliseconds || this.getMilliseconds());
};
Date.TimePeriod = TimePeriod;
if (typeof window !== "undefined") {
// keeping API compatible for v1.x
window.TimePeriod = TimePeriod;
}
}()); | |
PyColorize.py | # -*- coding: utf-8 -*-
"""
Class and program to colorize python source code for ANSI terminals.
Based on an HTML code highlighter by Jurgen Hermann found at:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52298
Modifications by Fernando Perez ([email protected]).
Information on the original HTML highlighter follows:
MoinMoin - Python Source Parser
Title: Colorize Python source using the built-in tokenizer
Submitter: Jurgen Hermann
Last Updated:2001/04/06
Version no:1.2
Description:
This code is part of MoinMoin (http://moin.sourceforge.net/) and converts
Python source code to HTML markup, rendering comments, keywords,
operators, numeric and string literals in different colors.
It shows how to use the built-in keyword, token and tokenize modules to
scan Python source code and re-emit it with no changes to its original
formatting (which is the hard part).
"""
from __future__ import print_function
from __future__ import unicode_literals
__all__ = ['ANSICodeColors','Parser']
_scheme_default = 'Linux'
# Imports
import StringIO
import keyword
import os
import optparse
import sys
import token
import tokenize
try:
generate_tokens = tokenize.generate_tokens
except AttributeError:
# Python 3. Note that we use the undocumented _tokenize because it expects
# strings, not bytes. See also Python issue #9969.
generate_tokens = tokenize._tokenize
from IPython.utils.coloransi import *
#############################################################################
### Python Source Parser (does Hilighting)
#############################################################################
_KEYWORD = token.NT_OFFSET + 1
_TEXT = token.NT_OFFSET + 2
#****************************************************************************
# Builtin color schemes
Colors = TermColors # just a shorthand
# Build a few color schemes
NoColor = ColorScheme(
'NoColor',{
token.NUMBER : Colors.NoColor,
token.OP : Colors.NoColor,
token.STRING : Colors.NoColor,
tokenize.COMMENT : Colors.NoColor,
token.NAME : Colors.NoColor,
token.ERRORTOKEN : Colors.NoColor,
_KEYWORD : Colors.NoColor,
_TEXT : Colors.NoColor,
'normal' : Colors.NoColor # color off (usu. Colors.Normal)
} )
LinuxColors = ColorScheme(
'Linux',{
token.NUMBER : Colors.LightCyan,
token.OP : Colors.Yellow,
token.STRING : Colors.LightBlue,
tokenize.COMMENT : Colors.LightRed,
token.NAME : Colors.Normal,
token.ERRORTOKEN : Colors.Red,
_KEYWORD : Colors.LightGreen,
_TEXT : Colors.Yellow,
'normal' : Colors.Normal # color off (usu. Colors.Normal)
} )
LightBGColors = ColorScheme(
'LightBG',{
token.NUMBER : Colors.Cyan,
token.OP : Colors.Blue,
token.STRING : Colors.Blue,
tokenize.COMMENT : Colors.Red,
token.NAME : Colors.Normal,
token.ERRORTOKEN : Colors.Red,
_KEYWORD : Colors.Green,
_TEXT : Colors.Blue,
'normal' : Colors.Normal # color off (usu. Colors.Normal)
} )
# Build table of color schemes (needed by the parser)
ANSICodeColors = ColorSchemeTable([NoColor,LinuxColors,LightBGColors],
_scheme_default)
class Parser:
""" Format colored Python source.
"""
def __init__(self, color_table=None,out = sys.stdout):
""" Create a parser with a specified color table and output channel.
Call format() to process code.
"""
self.color_table = color_table and color_table or ANSICodeColors
self.out = out
def format(self, raw, out = None, scheme = ''):
return self.format2(raw, out, scheme)[0]
def format2(self, raw, out = None, scheme = ''):
""" Parse and send the colored source.
If out and scheme are not specified, the defaults (given to
constructor) are used.
out should be a file-type object. Optionally, out can be given as the
string 'str' and the parser will automatically return the output in a
string."""
string_output = 0
if out == 'str' or self.out == 'str' or \
isinstance(self.out,StringIO.StringIO):
# XXX - I don't really like this state handling logic, but at this
# point I don't want to make major changes, so adding the
# isinstance() check is the simplest I can do to ensure correct
# behavior.
out_old = self.out
self.out = StringIO.StringIO()
string_output = 1
elif out is not None:
self.out = out
# Fast return of the unmodified input for NoColor scheme
if scheme == 'NoColor':
error = False
self.out.write(raw)
if string_output:
return raw,error
else:
return None,error
# local shorthands
colors = self.color_table[scheme].colors
self.colors = colors # put in object so __call__ sees it
# Remove trailing whitespace and normalize tabs
self.raw = raw.expandtabs().rstrip()
# store line offsets in self.lines
self.lines = [0, 0]
pos = 0
raw_find = self.raw.find
lines_append = self.lines.append
while 1:
pos = raw_find('\n', pos) + 1
if not pos: break
lines_append(pos)
lines_append(len(self.raw))
# parse the source and write it
self.pos = 0
text = StringIO.StringIO(self.raw)
error = False
try:
for atoken in generate_tokens(text.readline):
self(*atoken)
except tokenize.TokenError as ex:
msg = ex.args[0]
line = ex.args[1][0]
self.out.write("%s\n\n*** ERROR: %s%s%s\n" %
(colors[token.ERRORTOKEN],
msg, self.raw[self.lines[line]:],
colors.normal)
)
error = True
self.out.write(colors.normal+'\n')
if string_output:
output = self.out.getvalue()
self.out = out_old
return (output, error)
return (None, error)
def __call__(self, toktype, toktext, start_pos, end_pos, line):
""" Token handler, with syntax highlighting."""
(srow,scol) = start_pos
(erow,ecol) = end_pos
colors = self.colors
owrite = self.out.write
# line separator, so this works across platforms
linesep = os.linesep
# calculate new positions
oldpos = self.pos
newpos = self.lines[srow] + scol
self.pos = newpos + len(toktext)
# send the original whitespace, if needed
if newpos > oldpos:
owrite(self.raw[oldpos:newpos])
# skip indenting tokens
if toktype in [token.INDENT, token.DEDENT]:
|
# map token type to a color group
if token.LPAR <= toktype and toktype <= token.OP:
toktype = token.OP
elif toktype == token.NAME and keyword.iskeyword(toktext):
toktype = _KEYWORD
color = colors.get(toktype, colors[_TEXT])
#print '<%s>' % toktext, # dbg
# Triple quoted strings must be handled carefully so that backtracking
# in pagers works correctly. We need color terminators on _each_ line.
if linesep in toktext:
toktext = toktext.replace(linesep, '%s%s%s' %
(colors.normal,linesep,color))
# send text
owrite('%s%s%s' % (color,toktext,colors.normal))
def main(argv=None):
"""Run as a command-line script: colorize a python file or stdin using ANSI
color escapes and print to stdout.
Inputs:
- argv(None): a list of strings like sys.argv[1:] giving the command-line
arguments. If None, use sys.argv[1:].
"""
usage_msg = """%prog [options] [filename]
Colorize a python file or stdin using ANSI color escapes and print to stdout.
If no filename is given, or if filename is -, read standard input."""
parser = optparse.OptionParser(usage=usage_msg)
newopt = parser.add_option
newopt('-s','--scheme',metavar='NAME',dest='scheme_name',action='store',
choices=['Linux','LightBG','NoColor'],default=_scheme_default,
help="give the color scheme to use. Currently only 'Linux'\
(default) and 'LightBG' and 'NoColor' are implemented (give without\
quotes)")
opts,args = parser.parse_args(argv)
if len(args) > 1:
parser.error("you must give at most one filename.")
if len(args) == 0:
fname = '-' # no filename given; setup to read from stdin
else:
fname = args[0]
if fname == '-':
stream = sys.stdin
else:
try:
stream = open(fname)
except IOError as msg:
print(msg, file=sys.stderr)
sys.exit(1)
parser = Parser()
# we need nested try blocks because pre-2.5 python doesn't support unified
# try-except-finally
try:
try:
# write colorized version to stdout
parser.format(stream.read(),scheme=opts.scheme_name)
except IOError as msg:
# if user reads through a pager and quits, don't print traceback
if msg.args != (32,'Broken pipe'):
raise
finally:
if stream is not sys.stdin:
stream.close() # in case a non-handled exception happened above
if __name__ == "__main__":
main()
| self.pos = newpos
return |
service.go | package password
import (
"github.com/go-pg/pg/v9"
"github.com/go-pg/pg/v9/orm"
"github.com/labstack/echo"
gorsk "github.com/soldevx/androbk"
"github.com/soldevx/androbk/pkg/api/password/platform/pgsql"
)
| }
// New creates new password application service
func New(db *pg.DB, udb UserDB, rbac RBAC, sec Securer) Password {
return Password{
db: db,
udb: udb,
rbac: rbac,
sec: sec,
}
}
// Initialize initalizes password application service with defaults
func Initialize(db *pg.DB, rbac RBAC, sec Securer) Password {
return New(db, pgsql.User{}, rbac, sec)
}
// Password represents password application service
type Password struct {
db *pg.DB
udb UserDB
rbac RBAC
sec Securer
}
// UserDB represents user repository interface
type UserDB interface {
View(orm.DB, int) (gorsk.User, error)
Update(orm.DB, gorsk.User) error
}
// Securer represents security interface
type Securer interface {
Hash(string) string
HashMatchesPassword(string, string) bool
Password(string, ...string) bool
}
// RBAC represents role-based-access-control interface
type RBAC interface {
EnforceUser(echo.Context, int) error
} | // Service represents password application interface
type Service interface {
Change(echo.Context, int, string, string) error |
mod.rs | // Copyright 2022 Cartesi Pte. Ltd.
//
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
pub mod complete;
pub mod pristine;
pub mod proof;
use snafu::Snafu;
use crate::hash::{Digest, Hash, Hasher};
#[derive(Debug, Snafu, PartialEq)]
pub enum Error {
#[snafu(display("log2_target_size is greater than log2_root_size"))]
TargetSizeGreaterThanRootSize,
#[snafu(display("log2_leaf_size is greater than log2_root_size"))]
LeafSizeGreaterThanRootSize,
#[snafu(display("log2_word_size is greater than log2_leaf_size"))]
WordSizeGreaterThanLeafSize,
#[snafu(display("log2_word_size is greater than log2_root_size"))]
WordSizeGreaterThanRootSize,
#[snafu(display("tree is too large for address type"))]
TreeTooLarge,
#[snafu(display("tree is full"))]
TreeIsFull,
#[snafu(display("too many leaves"))]
TooManyLeaves,
#[snafu(display("log2_size is out of range"))]
SizeOutOfRange,
#[snafu(display("address is misaligned"))]
MisalignedAddress,
}
fn get_concat_hash(hasher: &mut Hasher, left: &Hash, right: &Hash) -> Hash | {
hasher.reset();
hasher.update(left.data());
hasher.update(right.data());
hasher.finalize_reset().into()
} |
|
inputfile.rs | use std::fs::File;
use std::io::{self, BufRead, BufReader};
use content_inspector::{self, ContentType};
use crate::errors::*;
pub struct | <'a> {
inner: Box<dyn BufRead + 'a>,
pub first_line: Vec<u8>,
pub content_type: ContentType,
}
impl<'a> InputFileReader<'a> {
fn new<R: BufRead + 'a>(mut reader: R) -> InputFileReader<'a> {
let mut first_line = vec![];
reader.read_until(b'\n', &mut first_line).ok();
let content_type = content_inspector::inspect(&first_line[..]);
if content_type == ContentType::UTF_16LE {
reader.read_until(0x00, &mut first_line).ok();
}
InputFileReader {
inner: Box::new(reader),
first_line,
content_type,
}
}
pub fn read_line(&mut self, buf: &mut Vec<u8>) -> io::Result<bool> {
if self.first_line.is_empty() {
let res = self.inner.read_until(b'\n', buf).map(|size| size > 0)?;
if self.content_type == ContentType::UTF_16LE {
self.inner.read_until(0x00, buf).ok();
}
Ok(res)
} else {
buf.append(&mut self.first_line);
Ok(true)
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum InputFile {
StdIn,
Ordinary(String),
String(String),
}
impl InputFile {
pub fn get_reader(&self) -> Result<InputFileReader> {
match self {
InputFile::Ordinary(filename) => {
let file = File::open(filename)?;
if file.metadata()?.is_dir() {
return Err(format!("'{}' is a directory.", filename).into());
}
Ok(InputFileReader::new(BufReader::new(file)))
}
InputFile::String(s) => Ok(InputFileReader::new(s.as_bytes())),
_ => unimplemented!(), // Used to be InputFile::Stdin
}
}
}
#[test]
fn basic() {
let content = b"#!/bin/bash\necho hello";
let mut reader = InputFileReader::new(&content[..]);
assert_eq!(b"#!/bin/bash\n", &reader.first_line[..]);
let mut buffer = vec![];
let res = reader.read_line(&mut buffer);
assert!(res.is_ok());
assert_eq!(true, res.unwrap());
assert_eq!(b"#!/bin/bash\n", &buffer[..]);
buffer.clear();
let res = reader.read_line(&mut buffer);
assert!(res.is_ok());
assert_eq!(true, res.unwrap());
assert_eq!(b"echo hello", &buffer[..]);
buffer.clear();
let res = reader.read_line(&mut buffer);
assert!(res.is_ok());
assert_eq!(false, res.unwrap());
assert!(buffer.is_empty());
}
#[test]
fn utf16le() {
let content = b"\xFF\xFE\x73\x00\x0A\x00\x64\x00";
let mut reader = InputFileReader::new(&content[..]);
assert_eq!(b"\xFF\xFE\x73\x00\x0A\x00", &reader.first_line[..]);
let mut buffer = vec![];
let res = reader.read_line(&mut buffer);
assert!(res.is_ok());
assert_eq!(true, res.unwrap());
assert_eq!(b"\xFF\xFE\x73\x00\x0A\x00", &buffer[..]);
buffer.clear();
let res = reader.read_line(&mut buffer);
assert!(res.is_ok());
assert_eq!(true, res.unwrap());
assert_eq!(b"\x64\x00", &buffer[..]);
buffer.clear();
let res = reader.read_line(&mut buffer);
assert!(res.is_ok());
assert_eq!(false, res.unwrap());
assert!(buffer.is_empty());
}
| InputFileReader |
tcp_relay.go | package relay
import (
"github.com/SUCHMOKUO/falcon-tun/dns"
"github.com/SUCHMOKUO/falcon-tun/nat"
"io"
"log"
"net"
"strconv"
)
// TCPConnHandler is the type alias of connection handler function.
type TCPConnHandler = func(host, port string, conn io.ReadWriteCloser)
// TCPRelay represent a relay for tcp connection.
type TCPRelay struct {
NAT *nat.NAT4
Addr *net.TCPAddr
HandleConn TCPConnHandler
}
// NewTCPRelay return a new instance of TCPRelay.
func | (addr *net.TCPAddr) *TCPRelay {
relay := new(TCPRelay)
relay.NAT = nat.New()
relay.Addr = addr
return relay
}
// Run create a new tcp relay.
func (tr *TCPRelay) Run() {
l, err := net.ListenTCP("tcp4", tr.Addr)
if err != nil {
log.Fatalln("TCP relay error:", err)
}
for {
conn, err := l.Accept()
if err != nil {
continue
}
host, port := tr.getTargetInfo(conn)
tcpRelayConn := &TCPRelayConn{
relay: tr,
Conn: conn,
}
go tr.HandleConn(host, port, tcpRelayConn)
}
}
func (tr *TCPRelay) getTargetInfo(conn net.Conn) (domain, port string) {
addr := conn.RemoteAddr().String()
ipStr, natPortStr, err := net.SplitHostPort(addr)
if err != nil {
log.Fatalln("Get remote address error:", err)
}
natPort, err := strconv.Atoi(natPortStr)
if err != nil {
log.Fatalln("Get remote address error:", err)
}
// get target port.
_, _, dstPort := tr.NAT.GetRecord(uint16(natPort))
port = strconv.Itoa(int(dstPort))
// get target domain.
domain, err = dns.GetDomain(ipStr)
if err != nil {
log.Fatalln("Get remote address error:", err)
}
return
} | NewTCPRelay |
impl.go | /*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apollo
import (
"fmt"
"regexp"
"strings"
"sync"
)
import (
"github.com/pkg/errors"
"github.com/zouyx/agollo"
)
import (
"github.com/apache/dubbo-go/common"
"github.com/apache/dubbo-go/common/constant"
. "github.com/apache/dubbo-go/config_center"
"github.com/apache/dubbo-go/config_center/parser"
"github.com/apache/dubbo-go/remoting"
)
const (
apolloProtocolPrefix = "http://"
apolloConfigFormat = "%s.%s"
)
type apolloConfiguration struct {
url *common.URL
listeners sync.Map
appConf *agollo.AppConfig
parser parser.ConfigurationParser
}
func | (url *common.URL) (*apolloConfiguration, error) {
c := &apolloConfiguration{
url: url,
}
configAddr := c.getAddressWithProtocolPrefix(url)
configCluster := url.GetParam(constant.CONFIG_CLUSTER_KEY, "")
appId := url.GetParam(constant.CONFIG_APP_ID_KEY, "")
namespaces := getProperties(url.GetParam(constant.CONFIG_NAMESPACE_KEY, DEFAULT_GROUP))
c.appConf = &agollo.AppConfig{
AppId: appId,
Cluster: configCluster,
NamespaceName: namespaces,
Ip: configAddr,
}
agollo.InitCustomConfig(func() (*agollo.AppConfig, error) {
return c.appConf, nil
})
return c, agollo.Start()
}
func getChangeType(change agollo.ConfigChangeType) remoting.EventType {
switch change {
case agollo.ADDED:
return remoting.EventTypeAdd
case agollo.DELETED:
return remoting.EventTypeDel
default:
return remoting.EventTypeUpdate
}
}
func (c *apolloConfiguration) AddListener(key string, listener ConfigurationListener, opts ...Option) {
k := &Options{}
for _, opt := range opts {
opt(k)
}
key = k.Group + key
l, _ := c.listeners.LoadOrStore(key, NewApolloListener())
l.(*apolloListener).AddListener(listener)
}
func (c *apolloConfiguration) RemoveListener(key string, listener ConfigurationListener, opts ...Option) {
k := &Options{}
for _, opt := range opts {
opt(k)
}
key = k.Group + key
l, ok := c.listeners.Load(key)
if ok {
l.(*apolloListener).RemoveListener(listener)
}
}
func getProperties(namespace string) string {
return getNamespaceName(namespace, agollo.Properties)
}
func getNamespaceName(namespace string, configFileFormat agollo.ConfigFileFormat) string {
return fmt.Sprintf(apolloConfigFormat, namespace, configFileFormat)
}
func (c *apolloConfiguration) GetInternalProperty(key string, opts ...Option) (string, error) {
config := agollo.GetConfig(c.appConf.NamespaceName)
if config == nil {
return "", errors.New(fmt.Sprintf("nothing in namespace:%s ", key))
}
return config.GetStringValue(key, ""), nil
}
func (c *apolloConfiguration) GetRule(key string, opts ...Option) (string, error) {
return c.GetInternalProperty(key, opts...)
}
func (c *apolloConfiguration) GetProperties(key string, opts ...Option) (string, error) {
/**
* when group is not null, we are getting startup configs(config file) from Config Center, for example:
* key=dubbo.propertie
*/
config := agollo.GetConfig(key)
if config == nil {
return "", errors.New(fmt.Sprintf("nothing in namespace:%s ", key))
}
return config.GetContent(agollo.Properties), nil
}
func (c *apolloConfiguration) getAddressWithProtocolPrefix(url *common.URL) string {
address := url.Location
converted := address
if len(address) != 0 {
reg := regexp.MustCompile("\\s+")
address = reg.ReplaceAllString(address, "")
parts := strings.Split(address, ",")
addrs := make([]string, 0)
for _, part := range parts {
addr := part
if !strings.HasPrefix(part, apolloProtocolPrefix) {
addr = apolloProtocolPrefix + part
}
addrs = append(addrs, addr)
}
converted = strings.Join(addrs, ",")
}
return converted
}
func (c *apolloConfiguration) Parser() parser.ConfigurationParser {
return c.parser
}
func (c *apolloConfiguration) SetParser(p parser.ConfigurationParser) {
c.parser = p
}
| newApolloConfiguration |
AlertProps.interface.ts | interface AlertProps {
title: string
description?: string
alternative1?: string
alternative1Function?: () => void | alternative2Function?: () => void
}
export default AlertProps | alternative2?: string |
default_unix.go | // +build !windows
/*
Copyright The containerd Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package platforms
import (
"github.com/containerd/containerd/platforms"
)
// Default returns the current platform's default platform specification.
func | () platforms.MatchComparer {
return platforms.Default()
}
| Default |
sc-testcomp-p.component.spec.ts | import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { ScTestcompPComponent } from './sc-testcomp-p.component';
describe('Component: ScTestcompPComponent', () => {
let component: ScTestcompPComponent;
let fixture: ComponentFixture<ScTestcompPComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ ScTestcompPComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(ScTestcompPComponent);
component = fixture.componentInstance; | });
it('Should create', () => {
expect(component).toBeTruthy();
});
}); | fixture.detectChanges(); |
ccpd.py | import os
import numpy as np
from addict import Dict
from PIL import Image
from .reader import Reader
from .builder import READER
__all__ = ['CCPD2019FolderReader']
@READER.register_module()
class CCPD2019FolderReader(Reader):
def __init__(self, root, **kwargs):
super(CCPD2019FolderReader, self).__init__(**kwargs)
self.root = root
self.chars = ('京', '沪', '津', '渝', '冀', '晋', '蒙', '辽', '吉', '黑',
'苏', '浙', '皖', '闽', '赣', '鲁', '豫', '鄂', '湘', '粤',
'桂', '琼', '川', '贵', '云', '藏', '陕', '甘', '青', '宁',
'新',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K',
'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'U', 'V',
'W', 'X', 'Y', 'Z', 'I', 'O', '-')
self.img_paths = sorted(os.listdir(kwargs['root']))
assert len(self.img_paths) > 0
def get_dataset_info(self):
return range(len(self.img_paths)), Dict({'chars': self.chars})
def get_data_info(self, index):
img = Image.open(self.img_paths[index][0])
w, h = img.size
return dict(h=h, w=w)
def __call__(self, index):
# index = data_dict
# img = Image.open(os.path.join(self.root, self.img_paths[index])).convert('RGB')
img = self.read_image(os.path.join(self.root, self.img_paths[index]))
w, h = img.size
path = os.path.join(self.root, self.img_paths[index])
base_name = os.path.basename(self.img_paths[index])
img_name, suffix = os.path.splitext(base_name)
img_name = img_name.split("-")[0].split("_")[0]
# if len(img_name) == 8:
# print(path, 'a')
# if img_name[2] != 'D' and img_name[2] != 'F' and img_name[-1] != 'D' and img_name[-1] != 'F':
# print(path)
# raise ValueError
words = []
for c in img_name:
words.append(self.chars.index(c))
# return {'image': img, 'ori_size': np.array([h, w]).astype(np.float32), 'path': path, 'seq': words, 'seq_length': len(words)}
return dict(
image=img,
ori_size=np.array([h, w]).astype(np.float32),
path=path,
seq=words,
seq_length=len(words)
)
def __repr__(self):
return 'CCPD2019FolderReader(root={}, {})'.format(self.root, s | uper(CCPD2019FolderReader, self).__repr__())
|
|
platform_spec.rs | // Copyright (c) The cargo-guppy Contributors
// SPDX-License-Identifier: MIT OR Apache-2.0
#[allow(unused_imports)]
use crate::platform::EnabledTernary;
use crate::{errors::TargetSpecError, platform::Platform};
use std::sync::Arc;
/// A specifier for a single platform, or for a range of platforms.
///
/// Some uses of `guppy` care about a single platform, and others care about queries against the
/// intersection of all hypothetical platforms, or against a union of any of them. `PlatformSpec`
/// handles the
///
/// `PlatformSpec` does not currently support expressions, but it might in the future, using an
/// [SMT solver](https://en.wikipedia.org/wiki/Satisfiability_modulo_theories).
#[derive(Clone, Debug)]
#[non_exhaustive]
pub enum PlatformSpec {
/// The intersection of all platforms.
///
/// Dependency queries performed against this variant will return [`EnabledTernary::Enabled`] if
/// and only if a dependency is not platform-dependent. They can never return
/// [`EnabledTernary::Unknown`].
///
/// This variant does not currently understand expressions that always evaluate to true
/// (tautologies), like `cfg(any(unix, not(unix)))` or `cfg(all())`. In the future, an SMT
/// solver would be able to handle such expressions.
Always,
/// An individual platform.
///
/// Dependency queries performed against this variant will return [`EnabledTernary::Enabled`] if
/// and only if a dependency is enabled on this platform. They may also return
/// [`EnabledTernary::Unknown`] if a platform is not enabled.
Platform(Arc<Platform>),
/// The union of all platforms.
///
/// Dependency queries performed against this variant will return [`EnabledTernary::Enabled`] if
/// a dependency is enabled on any platform.
///
/// This variant does not currently understand expressions that always evaluate to false
/// (contradictions), like `cfg(all(unix, not(unix)))` or `cfg(any())`. In the future, an SMT
/// solver would be able to handle such expressions.
Any,
}
impl PlatformSpec {
/// Returns a `PlatformSpec` corresponding to the current platform, as detected at build time.
///
/// Returns an error if the current platform was unknown to the version of `target-spec` in
/// use.
pub fn current() -> Result<Self, TargetSpecError> {
Ok(PlatformSpec::Platform(Arc::new(Platform::current()?)))
}
}
impl<T: Into<Arc<Platform>>> From<T> for PlatformSpec {
#[inline]
fn from(platform: T) -> Self |
}
| {
PlatformSpec::Platform(platform.into())
} |
shared_dressed_commoner_artisan_bith_male_01.py | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
| result = Creature()
result.template = "object/mobile/shared_dressed_commoner_artisan_bith_male_01.iff"
result.attribute_template_id = 9
result.stfName("npc_name","bith_base_male")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
|
feature.rs | use std::ffi::CString;
use libc::{c_double, c_int};
use vector::Defn;
use utils::{_string, _last_null_pointer_err};
use gdal_sys::{self, OGRErr, OGRFeatureH, OGRFieldType};
use vector::geometry::Geometry;
use vector::layer::Layer;
#[cfg(feature = "datetime")]
use chrono::{Date, FixedOffset, DateTime, TimeZone, Datelike, Timelike};
use errors::*;
/// OGR Feature
pub struct Feature<'a> {
_defn: &'a Defn,
c_feature: OGRFeatureH,
geometry: Vec<Geometry>,
}
impl<'a> Feature<'a> {
pub fn new(defn: &'a Defn) -> Result<Feature> {
let c_feature = unsafe { gdal_sys::OGR_F_Create(defn.c_defn()) };
if c_feature.is_null() {
Err(_last_null_pointer_err("OGR_F_Create"))?;
};
Ok(Feature {
_defn: defn,
c_feature,
geometry: Feature::_lazy_feature_geometries(defn),
})
}
pub unsafe fn _with_c_feature(defn: &'a Defn, c_feature: OGRFeatureH) -> Feature {
Feature {
_defn: defn,
c_feature,
geometry: Feature::_lazy_feature_geometries(defn),
}
}
pub fn _lazy_feature_geometries(defn: &'a Defn) -> Vec<Geometry> {
let geom_field_count = unsafe { gdal_sys::OGR_FD_GetGeomFieldCount(defn.c_defn()) } as isize;
(0..geom_field_count).map(|_| unsafe { Geometry::lazy_feature_geometry() }).collect()
}
/// Get the value of a named field. If the field exists, it returns a
/// `FieldValue` wrapper, that you need to unpack to a base type
/// (string, float, etc). If the field is missing, returns `None`.
pub fn field(&self, name: &str) -> Result<FieldValue> {
let c_name = CString::new(name)?;
let field_id = unsafe { gdal_sys::OGR_F_GetFieldIndex(self.c_feature, c_name.as_ptr()) };
if field_id == -1 {
Err(ErrorKind::InvalidFieldName{field_name: name.to_string(), method_name: "OGR_F_GetFieldIndex"})?;
}
let field_defn = unsafe { gdal_sys::OGR_F_GetFieldDefnRef(self.c_feature, field_id) };
let field_type = unsafe { gdal_sys::OGR_Fld_GetType(field_defn) };
match field_type {
OGRFieldType::OFTString => {
let rv = unsafe { gdal_sys::OGR_F_GetFieldAsString(self.c_feature, field_id) };
Ok(FieldValue::StringValue(_string(rv)))
},
OGRFieldType::OFTReal => {
let rv = unsafe { gdal_sys::OGR_F_GetFieldAsDouble(self.c_feature, field_id) };
Ok(FieldValue::RealValue(rv as f64))
},
OGRFieldType::OFTInteger => {
let rv = unsafe { gdal_sys::OGR_F_GetFieldAsInteger(self.c_feature, field_id) };
Ok(FieldValue::IntegerValue(rv as i32))
},
#[cfg(feature = "datetime")]
OGRFieldType::OFTDateTime => {
Ok(FieldValue::DateTimeValue(self.get_field_datetime(field_id)?))
},
#[cfg(feature = "datetime")]
OGRFieldType::OFTDate => {
Ok(FieldValue::DateValue(self.get_field_datetime(field_id)?.date()))
},
_ => Err(ErrorKind::UnhandledFieldType{field_type, method_name: "OGR_Fld_GetType"})?
}
}
#[cfg(feature = "datetime")]
fn get_field_datetime(&self, field_id: c_int) -> Result<DateTime<FixedOffset>> {
let mut year: c_int = 0;
let mut month: c_int = 0;
let mut day: c_int = 0;
let mut hour: c_int = 0;
let mut minute: c_int = 0;
let mut second: c_int = 0;
let mut tzflag: c_int = 0;
let success = unsafe {
gdal_sys::OGR_F_GetFieldAsDateTime( | )
};
if success == 0 {
Err(ErrorKind::OgrError { err: OGRErr::OGRERR_FAILURE, method_name: "OGR_F_GetFieldAsDateTime" })?;
}
// from https://github.com/OSGeo/gdal/blob/33a8a0edc764253b582e194d330eec3b83072863/gdal/ogr/ogrutils.cpp#L1309
let tzoffset_secs = if tzflag == 0 || tzflag == 100 {
0
} else {
(tzflag as i32 - 100) * 15 * 60
};
let rv = FixedOffset::east(tzoffset_secs)
.ymd(year as i32, month as u32, day as u32)
.and_hms(hour as u32, minute as u32, second as u32);
Ok(rv)
}
/// Get the field's geometry.
pub fn geometry(&self) -> &Geometry {
if !self.geometry[0].has_gdal_ptr() {
let c_geom = unsafe { gdal_sys::OGR_F_GetGeometryRef(self.c_feature) };
unsafe { self.geometry[0].set_c_geometry(c_geom) };
}
&self.geometry[0]
}
pub fn geometry_by_name(&self, field_name: &str) -> Result<&Geometry> {
let c_str_field_name = CString::new(field_name)?;
let idx = unsafe { gdal_sys::OGR_F_GetGeomFieldIndex(self.c_feature, c_str_field_name.as_ptr())};
if idx == -1 {
Err(ErrorKind::InvalidFieldName{field_name: field_name.to_string(), method_name: "geometry_by_name"})?
} else {
self.geometry_by_index(idx as usize)
}
}
pub fn geometry_by_index(&self, idx: usize) -> Result<&Geometry> {
if idx >= self.geometry.len() {
Err(ErrorKind::InvalidFieldIndex{index: idx, method_name: "geometry_by_name"})?;
}
if ! self.geometry[idx].has_gdal_ptr() {
let c_geom = unsafe { gdal_sys::OGR_F_GetGeomFieldRef(self.c_feature, idx as i32) };
if c_geom.is_null() {
Err(_last_null_pointer_err("OGR_F_GetGeomFieldRef"))?;
}
unsafe { self.geometry[idx].set_c_geometry(c_geom) };
}
Ok(&self.geometry[idx])
}
pub fn create(&self, lyr: &Layer) -> Result<()> {
let rv = unsafe { gdal_sys::OGR_L_CreateFeature(lyr.c_layer(), self.c_feature) };
if rv != OGRErr::OGRERR_NONE {
Err(ErrorKind::OgrError{err: rv, method_name: "OGR_L_CreateFeature"})?;
}
Ok(())
}
pub fn set_field_string(&self, field_name: &str, value: &str) -> Result<()> {
let c_str_field_name = CString::new(field_name)?;
let c_str_value = CString::new(value)?;
let idx = unsafe { gdal_sys::OGR_F_GetFieldIndex(self.c_feature, c_str_field_name.as_ptr())};
if idx == -1 {
Err(ErrorKind::InvalidFieldName{field_name: field_name.to_string(), method_name: "OGR_F_GetFieldIndex"})?;
}
unsafe { gdal_sys::OGR_F_SetFieldString(self.c_feature, idx, c_str_value.as_ptr()) };
Ok(())
}
pub fn set_field_double(&self, field_name: &str, value: f64) -> Result<()> {
let c_str_field_name = CString::new(field_name)?;
let idx = unsafe { gdal_sys::OGR_F_GetFieldIndex(self.c_feature, c_str_field_name.as_ptr())};
if idx == -1 {
Err(ErrorKind::InvalidFieldName{field_name: field_name.to_string(), method_name: "OGR_F_GetFieldIndex"})?;
}
unsafe { gdal_sys::OGR_F_SetFieldDouble(self.c_feature, idx, value as c_double) };
Ok(())
}
pub fn set_field_integer(&self, field_name: &str, value: i32) -> Result<()> {
let c_str_field_name = CString::new(field_name)?;
let idx = unsafe { gdal_sys::OGR_F_GetFieldIndex(self.c_feature, c_str_field_name.as_ptr())};
if idx == -1 {
Err(ErrorKind::InvalidFieldName{field_name: field_name.to_string(), method_name: "OGR_F_GetFieldIndex"})?;
}
unsafe { gdal_sys::OGR_F_SetFieldInteger(self.c_feature, idx, value as c_int) };
Ok(())
}
#[cfg(feature = "datetime")]
pub fn set_field_datetime(&self, field_name: &str, value: DateTime<FixedOffset>) -> Result<()> {
let c_str_field_name = CString::new(field_name)?;
let idx = unsafe { gdal_sys::OGR_F_GetFieldIndex(self.c_feature, c_str_field_name.as_ptr())};
if idx == -1 {
Err(ErrorKind::InvalidFieldName{field_name: field_name.to_string(), method_name: "OGR_F_GetFieldIndex"})?;
}
let year = value.year() as c_int;
let month = value.month() as c_int;
let day = value.day() as c_int;
let hour= value.hour() as c_int;
let minute = value.minute() as c_int;
let second = value.second() as c_int;
let tzflag: c_int = if value.offset().local_minus_utc() == 0 {
0
} else {
100 + (value.offset().local_minus_utc() / (15 * 60))
};
unsafe { gdal_sys::OGR_F_SetFieldDateTime(self.c_feature, idx, year, month, day, hour, minute, second, tzflag) };
Ok(())
}
pub fn set_field(&self, field_name: &str, value: &FieldValue) -> Result<()> {
match *value {
FieldValue::RealValue(value) => self.set_field_double(field_name, value),
FieldValue::StringValue(ref value) => self.set_field_string(field_name, value.as_str()),
FieldValue::IntegerValue(value) => self.set_field_integer(field_name, value),
#[cfg(feature = "datetime")]
FieldValue::DateTimeValue(value) => self.set_field_datetime(field_name, value),
#[cfg(feature = "datetime")]
FieldValue::DateValue(value) => self.set_field_datetime(field_name, value.and_hms(0, 0, 0)),
}
}
pub fn set_geometry(&mut self, geom: Geometry) -> Result<()> {
let rv = unsafe { gdal_sys::OGR_F_SetGeometry(self.c_feature, geom.c_geometry()) };
if rv != OGRErr::OGRERR_NONE {
Err(ErrorKind::OgrError{err: rv, method_name: "OGR_G_SetGeometry"})?;
}
self.geometry[0] = geom;
Ok(())
}
}
impl<'a> Drop for Feature<'a> {
fn drop(&mut self) {
unsafe { gdal_sys::OGR_F_Destroy(self.c_feature); }
}
}
pub enum FieldValue {
IntegerValue(i32),
StringValue(String),
RealValue(f64),
#[cfg(feature = "datetime")]
DateValue(Date<FixedOffset>),
#[cfg(feature = "datetime")]
DateTimeValue(DateTime<FixedOffset>),
}
impl FieldValue {
/// Interpret the value as `String`. Panics if the value is something else.
pub fn into_string(self) -> Option<String> {
match self {
FieldValue::StringValue(rv) => Some(rv),
_ => None
}
}
/// Interpret the value as `f64`. Panics if the value is something else.
pub fn into_real(self) -> Option<f64> {
match self {
FieldValue::RealValue(rv) => Some(rv),
_ => None
}
}
/// Interpret the value as `i32`. Panics if the value is something else.
pub fn into_int(self) -> Option<i32> {
match self {
FieldValue::IntegerValue(rv) => Some(rv),
_ => None
}
}
/// Interpret the value as `Date`.
#[cfg(feature = "datetime")]
pub fn into_date(self) -> Option<Date<FixedOffset>> {
match self {
FieldValue::DateValue(rv) => Some(rv),
FieldValue::DateTimeValue(rv) => Some(rv.date()),
_ => None
}
}
/// Interpret the value as `DateTime`.
#[cfg(feature = "datetime")]
pub fn into_datetime(self) -> Option<DateTime<FixedOffset>> {
match self {
FieldValue::DateTimeValue(rv) => Some(rv),
_ => None
}
}
} | self.c_feature, field_id,
&mut year, &mut month, &mut day, &mut hour, &mut minute, &mut second, &mut tzflag |
func.js | $(document).ready(function() {
$('.mobileNav').click( function(){
if($('.nav ul').hasClass('open') ){$('.nav ul').removeClass('open'); }
else{ $('.nav ul').addClass('open');} | })
}); |
|
reagent.rs | extern crate reagent;
|
fn main() {
let addr = "0.0.0.0:6567".parse().unwrap();
let mut srv = Server::new(&addr).unwrap();
srv.run().unwrap();
} | use reagent::{Server}; |
headers.ts | import { isPlainObject } from './util'
function normalizeHeaderName(headers: any, normalizeName: string): void {
if (!headers) {
return
}
Object.keys(headers).forEach(name => {
if (name !== normalizeName && name.toUpperCase() === normalizeName.toUpperCase()) {
headers[normalizeName] = headers[name]
delete headers[name]
}
})
}
export function | (headers: any, data: any): any {
normalizeHeaderName(headers, 'Content-Type')
if (isPlainObject(data)) {
if (headers && !headers['Content-Type']) {
headers['Content-Type'] = 'application/json;charset=utf-8'
}
}
return headers
}
export function parseHeaders(headers: string): any {
// let parsed = {}
let parsed = Object.create(null)
if (!headers) {
return parsed
}
headers.split('\r\n').forEach(line => {
let [key, value] = line.split(':')
key = key.trim().toLowerCase()
if (!key) {
return
}
if (value) {
value = value.trim()
}
parsed[key] = value
})
return parsed
}
| processHeaders |
manage.py | #!/usr/bin/env python
# http://ericholscher.com/blog/2009/jun/29/enable-setuppy-test-your-django-apps/
# http://www.travisswicegood.com/2010/01/17/django-virtualenv-pip-and-fabric/
# http://code.djangoproject.com/svn/django/trunk/tests/runtests.py
# https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/runtests/runtests.py
import os
import sys
import warnings
warnings.filterwarnings("ignore")
# fix sys path so we don't need to setup PYTHONPATH
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))) | engine = os.environ.get('DATABASE_ENGINE', 'django.db.backends.sqlite3')
if engine.startswith('mysql'):
engine = 'django.db.backends.mysql'
elif engine.startswith('postgre'):
engine = 'django.db.backends.postgresql_psycopg2'
else:
engine = 'django.db.backends.sqlite3'
try:
import django
except SyntaxError:
sys.stderr.write('Unable to import django (older python version)\n')
exit(0)
PYPY = hasattr(sys, 'pypy_version_info')
version = sys.version_info[:2]
PY3 = version[0] == 3
if PYPY and engine.endswith('psycopg2') and bytes != str:
sys.stderr.write('PyPy3 does not have a psycopg implementation\n')
exit(0)
if PY3 and django.VERSION[:2] >= (1, 9) and version <= (3, 3):
sys.stderr.write('Django>=1.9 does not support Python<=3.3\n')
exit(0)
if PY3 and django.VERSION[:2] <= (1, 8) and version >= (3, 5):
sys.stderr.write('Django<=1.8 does not support Python>=3.5\n')
exit(0)
if PY3 and django.VERSION[:2] == (1, 8) and version <= (3, 3):
sys.stderr.write('Django 1.8 does not support Python<=3.3\n')
exit(0)
if django.VERSION[:2] <= (1, 4) and PY3:
sys.stderr.write('Django<=1.4 does not support Python3\n')
exit(0)
if version == (2, 6) and django.VERSION[:2] >= (1, 7):
sys.stderr.write('Django>=1.7 does not support Python2.6\n')
exit(0)
os.environ['DATABASE_ENGINE'] = engine
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
try:
import pymysql
pymysql.install_as_MySQLdb()
except ImportError:
pass
try:
django.setup()
except AttributeError:
pass
if __name__ == '__main__':
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv) | os.environ['DJANGO_SETTINGS_MODULE'] = 'actstream.runtests.settings'
|
generated.go | // Code generated by github.com/99designs/gqlgen, DO NOT EDIT.
package dataloader
import (
"bytes"
"context"
"errors"
"strconv"
"sync"
"sync/atomic"
"time"
"github.com/99designs/gqlgen/graphql"
"github.com/99designs/gqlgen/graphql/introspection"
gqlparser "github.com/vektah/gqlparser/v2"
"github.com/vektah/gqlparser/v2/ast"
)
// region ************************** generated!.gotpl **************************
// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.
func | (cfg Config) graphql.ExecutableSchema {
return &executableSchema{
resolvers: cfg.Resolvers,
directives: cfg.Directives,
complexity: cfg.Complexity,
}
}
type Config struct {
Resolvers ResolverRoot
Directives DirectiveRoot
Complexity ComplexityRoot
}
type ResolverRoot interface {
Customer() CustomerResolver
Order() OrderResolver
Query() QueryResolver
}
type DirectiveRoot struct {
}
type ComplexityRoot struct {
Address struct {
Country func(childComplexity int) int
ID func(childComplexity int) int
Street func(childComplexity int) int
}
Customer struct {
Address func(childComplexity int) int
ID func(childComplexity int) int
Name func(childComplexity int) int
Orders func(childComplexity int) int
}
Item struct {
Name func(childComplexity int) int
}
Order struct {
Amount func(childComplexity int) int
Date func(childComplexity int) int
ID func(childComplexity int) int
Items func(childComplexity int) int
}
Query struct {
Customers func(childComplexity int) int
Torture1d func(childComplexity int, customerIds []int) int
Torture2d func(childComplexity int, customerIds [][]int) int
}
}
type CustomerResolver interface {
Address(ctx context.Context, obj *Customer) (*Address, error)
Orders(ctx context.Context, obj *Customer) ([]*Order, error)
}
type OrderResolver interface {
Items(ctx context.Context, obj *Order) ([]*Item, error)
}
type QueryResolver interface {
Customers(ctx context.Context) ([]*Customer, error)
Torture1d(ctx context.Context, customerIds []int) ([]*Customer, error)
Torture2d(ctx context.Context, customerIds [][]int) ([][]*Customer, error)
}
type executableSchema struct {
resolvers ResolverRoot
directives DirectiveRoot
complexity ComplexityRoot
}
func (e *executableSchema) Schema() *ast.Schema {
return parsedSchema
}
func (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) {
ec := executionContext{nil, e}
_ = ec
switch typeName + "." + field {
case "Address.country":
if e.complexity.Address.Country == nil {
break
}
return e.complexity.Address.Country(childComplexity), true
case "Address.id":
if e.complexity.Address.ID == nil {
break
}
return e.complexity.Address.ID(childComplexity), true
case "Address.street":
if e.complexity.Address.Street == nil {
break
}
return e.complexity.Address.Street(childComplexity), true
case "Customer.address":
if e.complexity.Customer.Address == nil {
break
}
return e.complexity.Customer.Address(childComplexity), true
case "Customer.id":
if e.complexity.Customer.ID == nil {
break
}
return e.complexity.Customer.ID(childComplexity), true
case "Customer.name":
if e.complexity.Customer.Name == nil {
break
}
return e.complexity.Customer.Name(childComplexity), true
case "Customer.orders":
if e.complexity.Customer.Orders == nil {
break
}
return e.complexity.Customer.Orders(childComplexity), true
case "Item.name":
if e.complexity.Item.Name == nil {
break
}
return e.complexity.Item.Name(childComplexity), true
case "Order.amount":
if e.complexity.Order.Amount == nil {
break
}
return e.complexity.Order.Amount(childComplexity), true
case "Order.date":
if e.complexity.Order.Date == nil {
break
}
return e.complexity.Order.Date(childComplexity), true
case "Order.id":
if e.complexity.Order.ID == nil {
break
}
return e.complexity.Order.ID(childComplexity), true
case "Order.items":
if e.complexity.Order.Items == nil {
break
}
return e.complexity.Order.Items(childComplexity), true
case "Query.customers":
if e.complexity.Query.Customers == nil {
break
}
return e.complexity.Query.Customers(childComplexity), true
case "Query.torture1d":
if e.complexity.Query.Torture1d == nil {
break
}
args, err := ec.field_Query_torture1d_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
return e.complexity.Query.Torture1d(childComplexity, args["customerIds"].([]int)), true
case "Query.torture2d":
if e.complexity.Query.Torture2d == nil {
break
}
args, err := ec.field_Query_torture2d_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
return e.complexity.Query.Torture2d(childComplexity, args["customerIds"].([][]int)), true
}
return 0, false
}
func (e *executableSchema) Exec(ctx context.Context) graphql.ResponseHandler {
rc := graphql.GetOperationContext(ctx)
ec := executionContext{rc, e}
first := true
switch rc.Operation.Operation {
case ast.Query:
return func(ctx context.Context) *graphql.Response {
if !first {
return nil
}
first = false
data := ec._Query(ctx, rc.Operation.SelectionSet)
var buf bytes.Buffer
data.MarshalGQL(&buf)
return &graphql.Response{
Data: buf.Bytes(),
}
}
default:
return graphql.OneShot(graphql.ErrorResponse(ctx, "unsupported GraphQL operation"))
}
}
type executionContext struct {
*graphql.OperationContext
*executableSchema
}
func (ec *executionContext) introspectSchema() (*introspection.Schema, error) {
if ec.DisableIntrospection {
return nil, errors.New("introspection disabled")
}
return introspection.WrapSchema(parsedSchema), nil
}
func (ec *executionContext) introspectType(name string) (*introspection.Type, error) {
if ec.DisableIntrospection {
return nil, errors.New("introspection disabled")
}
return introspection.WrapTypeFromDef(parsedSchema, parsedSchema.Types[name]), nil
}
var sources = []*ast.Source{
{Name: "schema.graphql", Input: `type Query {
customers: [Customer!]
# these methods are here to test code generation of nested arrays
torture1d(customerIds: [Int!]): [Customer!]
torture2d(customerIds: [[Int!]]): [[Customer!]]
}
type Customer {
id: Int!
name: String!
address: Address
orders: [Order!]
}
type Address {
id: Int!
street: String!
country: String!
}
type Order {
id: Int!
date: Time!
amount: Float!
items: [Item!]
}
type Item {
name: String!
}
scalar Time
`, BuiltIn: false},
}
var parsedSchema = gqlparser.MustLoadSchema(sources...)
// endregion ************************** generated!.gotpl **************************
// region ***************************** args.gotpl *****************************
func (ec *executionContext) field_Query___type_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error
args := map[string]interface{}{}
var arg0 string
if tmp, ok := rawArgs["name"]; ok {
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name"))
arg0, err = ec.unmarshalNString2string(ctx, tmp)
if err != nil {
return nil, err
}
}
args["name"] = arg0
return args, nil
}
func (ec *executionContext) field_Query_torture1d_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error
args := map[string]interface{}{}
var arg0 []int
if tmp, ok := rawArgs["customerIds"]; ok {
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("customerIds"))
arg0, err = ec.unmarshalOInt2ᚕintᚄ(ctx, tmp)
if err != nil {
return nil, err
}
}
args["customerIds"] = arg0
return args, nil
}
func (ec *executionContext) field_Query_torture2d_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error
args := map[string]interface{}{}
var arg0 [][]int
if tmp, ok := rawArgs["customerIds"]; ok {
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("customerIds"))
arg0, err = ec.unmarshalOInt2ᚕᚕint(ctx, tmp)
if err != nil {
return nil, err
}
}
args["customerIds"] = arg0
return args, nil
}
func (ec *executionContext) field___Type_enumValues_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error
args := map[string]interface{}{}
var arg0 bool
if tmp, ok := rawArgs["includeDeprecated"]; ok {
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("includeDeprecated"))
arg0, err = ec.unmarshalOBoolean2bool(ctx, tmp)
if err != nil {
return nil, err
}
}
args["includeDeprecated"] = arg0
return args, nil
}
func (ec *executionContext) field___Type_fields_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
var err error
args := map[string]interface{}{}
var arg0 bool
if tmp, ok := rawArgs["includeDeprecated"]; ok {
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("includeDeprecated"))
arg0, err = ec.unmarshalOBoolean2bool(ctx, tmp)
if err != nil {
return nil, err
}
}
args["includeDeprecated"] = arg0
return args, nil
}
// endregion ***************************** args.gotpl *****************************
// region ************************** directives.gotpl **************************
// endregion ************************** directives.gotpl **************************
// region **************************** field.gotpl *****************************
func (ec *executionContext) _Address_id(ctx context.Context, field graphql.CollectedField, obj *Address) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Address",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.ID, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(int)
fc.Result = res
return ec.marshalNInt2int(ctx, field.Selections, res)
}
func (ec *executionContext) _Address_street(ctx context.Context, field graphql.CollectedField, obj *Address) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Address",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Street, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalNString2string(ctx, field.Selections, res)
}
func (ec *executionContext) _Address_country(ctx context.Context, field graphql.CollectedField, obj *Address) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Address",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Country, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalNString2string(ctx, field.Selections, res)
}
func (ec *executionContext) _Customer_id(ctx context.Context, field graphql.CollectedField, obj *Customer) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Customer",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.ID, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(int)
fc.Result = res
return ec.marshalNInt2int(ctx, field.Selections, res)
}
func (ec *executionContext) _Customer_name(ctx context.Context, field graphql.CollectedField, obj *Customer) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Customer",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Name, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalNString2string(ctx, field.Selections, res)
}
func (ec *executionContext) _Customer_address(ctx context.Context, field graphql.CollectedField, obj *Customer) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Customer",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: true,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return ec.resolvers.Customer().Address(rctx, obj)
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*Address)
fc.Result = res
return ec.marshalOAddress2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐAddress(ctx, field.Selections, res)
}
func (ec *executionContext) _Customer_orders(ctx context.Context, field graphql.CollectedField, obj *Customer) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Customer",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: true,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return ec.resolvers.Customer().Orders(rctx, obj)
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.([]*Order)
fc.Result = res
return ec.marshalOOrder2ᚕᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐOrderᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) _Item_name(ctx context.Context, field graphql.CollectedField, obj *Item) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Item",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Name, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalNString2string(ctx, field.Selections, res)
}
func (ec *executionContext) _Order_id(ctx context.Context, field graphql.CollectedField, obj *Order) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Order",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.ID, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(int)
fc.Result = res
return ec.marshalNInt2int(ctx, field.Selections, res)
}
func (ec *executionContext) _Order_date(ctx context.Context, field graphql.CollectedField, obj *Order) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Order",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Date, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(time.Time)
fc.Result = res
return ec.marshalNTime2timeᚐTime(ctx, field.Selections, res)
}
func (ec *executionContext) _Order_amount(ctx context.Context, field graphql.CollectedField, obj *Order) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Order",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Amount, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(float64)
fc.Result = res
return ec.marshalNFloat2float64(ctx, field.Selections, res)
}
func (ec *executionContext) _Order_items(ctx context.Context, field graphql.CollectedField, obj *Order) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Order",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: true,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return ec.resolvers.Order().Items(rctx, obj)
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.([]*Item)
fc.Result = res
return ec.marshalOItem2ᚕᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐItemᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) _Query_customers(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Query",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: true,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return ec.resolvers.Query().Customers(rctx)
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.([]*Customer)
fc.Result = res
return ec.marshalOCustomer2ᚕᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐCustomerᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) _Query_torture1d(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Query",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: true,
}
ctx = graphql.WithFieldContext(ctx, fc)
rawArgs := field.ArgumentMap(ec.Variables)
args, err := ec.field_Query_torture1d_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
fc.Args = args
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return ec.resolvers.Query().Torture1d(rctx, args["customerIds"].([]int))
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.([]*Customer)
fc.Result = res
return ec.marshalOCustomer2ᚕᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐCustomerᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) _Query_torture2d(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Query",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: true,
}
ctx = graphql.WithFieldContext(ctx, fc)
rawArgs := field.ArgumentMap(ec.Variables)
args, err := ec.field_Query_torture2d_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
fc.Args = args
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return ec.resolvers.Query().Torture2d(rctx, args["customerIds"].([][]int))
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.([][]*Customer)
fc.Result = res
return ec.marshalOCustomer2ᚕᚕᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐCustomer(ctx, field.Selections, res)
}
func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Query",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
rawArgs := field.ArgumentMap(ec.Variables)
args, err := ec.field_Query___type_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
fc.Args = args
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return ec.introspectType(args["name"].(string))
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*introspection.Type)
fc.Result = res
return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "Query",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return ec.introspectSchema()
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*introspection.Schema)
fc.Result = res
return ec.marshalO__Schema2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐSchema(ctx, field.Selections, res)
}
func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Directive",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Name, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalNString2string(ctx, field.Selections, res)
}
func (ec *executionContext) ___Directive_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Directive",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Description, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalOString2string(ctx, field.Selections, res)
}
func (ec *executionContext) ___Directive_locations(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Directive",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Locations, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.([]string)
fc.Result = res
return ec.marshalN__DirectiveLocation2ᚕstringᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Directive",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Args, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.([]introspection.InputValue)
fc.Result = res
return ec.marshalN__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) ___Directive_isRepeatable(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Directive",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.IsRepeatable, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(bool)
fc.Result = res
return ec.marshalNBoolean2bool(ctx, field.Selections, res)
}
func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__EnumValue",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Name, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalNString2string(ctx, field.Selections, res)
}
func (ec *executionContext) ___EnumValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__EnumValue",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Description, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalOString2string(ctx, field.Selections, res)
}
func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__EnumValue",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.IsDeprecated(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(bool)
fc.Result = res
return ec.marshalNBoolean2bool(ctx, field.Selections, res)
}
func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__EnumValue",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.DeprecationReason(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*string)
fc.Result = res
return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
}
func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Field",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Name, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalNString2string(ctx, field.Selections, res)
}
func (ec *executionContext) ___Field_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Field",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Description, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalOString2string(ctx, field.Selections, res)
}
func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Field",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Args, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.([]introspection.InputValue)
fc.Result = res
return ec.marshalN__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Field",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Type, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(*introspection.Type)
fc.Result = res
return ec.marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
func (ec *executionContext) ___Field_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Field",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.IsDeprecated(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(bool)
fc.Result = res
return ec.marshalNBoolean2bool(ctx, field.Selections, res)
}
func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Field",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.DeprecationReason(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*string)
fc.Result = res
return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
}
func (ec *executionContext) ___InputValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__InputValue",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Name, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalNString2string(ctx, field.Selections, res)
}
func (ec *executionContext) ___InputValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__InputValue",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Description, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalOString2string(ctx, field.Selections, res)
}
func (ec *executionContext) ___InputValue_type(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__InputValue",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Type, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(*introspection.Type)
fc.Result = res
return ec.marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
func (ec *executionContext) ___InputValue_defaultValue(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__InputValue",
Field: field,
Args: nil,
IsMethod: false,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.DefaultValue, nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*string)
fc.Result = res
return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
}
func (ec *executionContext) ___Schema_types(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Schema",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Types(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.([]introspection.Type)
fc.Result = res
return ec.marshalN__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) ___Schema_queryType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Schema",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.QueryType(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(*introspection.Type)
fc.Result = res
return ec.marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
func (ec *executionContext) ___Schema_mutationType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Schema",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.MutationType(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*introspection.Type)
fc.Result = res
return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
func (ec *executionContext) ___Schema_subscriptionType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Schema",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.SubscriptionType(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*introspection.Type)
fc.Result = res
return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
func (ec *executionContext) ___Schema_directives(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Schema",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Directives(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.([]introspection.Directive)
fc.Result = res
return ec.marshalN__Directive2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirectiveᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) ___Type_kind(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Type",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Kind(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalN__TypeKind2string(ctx, field.Selections, res)
}
func (ec *executionContext) ___Type_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Type",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Name(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*string)
fc.Result = res
return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
}
func (ec *executionContext) ___Type_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Type",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Description(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(string)
fc.Result = res
return ec.marshalOString2string(ctx, field.Selections, res)
}
func (ec *executionContext) ___Type_fields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Type",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
rawArgs := field.ArgumentMap(ec.Variables)
args, err := ec.field___Type_fields_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
fc.Args = args
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Fields(args["includeDeprecated"].(bool)), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.([]introspection.Field)
fc.Result = res
return ec.marshalO__Field2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐFieldᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) ___Type_interfaces(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Type",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.Interfaces(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.([]introspection.Type)
fc.Result = res
return ec.marshalO__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) ___Type_possibleTypes(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Type",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.PossibleTypes(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.([]introspection.Type)
fc.Result = res
return ec.marshalO__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) ___Type_enumValues(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Type",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
rawArgs := field.ArgumentMap(ec.Variables)
args, err := ec.field___Type_enumValues_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
fc.Args = args
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.EnumValues(args["includeDeprecated"].(bool)), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.([]introspection.EnumValue)
fc.Result = res
return ec.marshalO__EnumValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValueᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) ___Type_inputFields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Type",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.InputFields(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.([]introspection.InputValue)
fc.Result = res
return ec.marshalO__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx, field.Selections, res)
}
func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = graphql.Null
}
}()
fc := &graphql.FieldContext{
Object: "__Type",
Field: field,
Args: nil,
IsMethod: true,
IsResolver: false,
}
ctx = graphql.WithFieldContext(ctx, fc)
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
return obj.OfType(), nil
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.(*introspection.Type)
fc.Result = res
return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
// endregion **************************** field.gotpl *****************************
// region **************************** input.gotpl *****************************
// endregion **************************** input.gotpl *****************************
// region ************************** interface.gotpl ***************************
// endregion ************************** interface.gotpl ***************************
// region **************************** object.gotpl ****************************
var addressImplementors = []string{"Address"}
func (ec *executionContext) _Address(ctx context.Context, sel ast.SelectionSet, obj *Address) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, addressImplementors)
out := graphql.NewFieldSet(fields)
var invalids uint32
for i, field := range fields {
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("Address")
case "id":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec._Address_id(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "street":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec._Address_street(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "country":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec._Address_country(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 {
return graphql.Null
}
return out
}
var customerImplementors = []string{"Customer"}
func (ec *executionContext) _Customer(ctx context.Context, sel ast.SelectionSet, obj *Customer) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, customerImplementors)
out := graphql.NewFieldSet(fields)
var invalids uint32
for i, field := range fields {
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("Customer")
case "id":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec._Customer_id(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
atomic.AddUint32(&invalids, 1)
}
case "name":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec._Customer_name(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
atomic.AddUint32(&invalids, 1)
}
case "address":
field := field
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
}
}()
res = ec._Customer_address(ctx, field, obj)
return res
}
out.Concurrently(i, func() graphql.Marshaler {
return innerFunc(ctx)
})
case "orders":
field := field
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
}
}()
res = ec._Customer_orders(ctx, field, obj)
return res
}
out.Concurrently(i, func() graphql.Marshaler {
return innerFunc(ctx)
})
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 {
return graphql.Null
}
return out
}
var itemImplementors = []string{"Item"}
func (ec *executionContext) _Item(ctx context.Context, sel ast.SelectionSet, obj *Item) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, itemImplementors)
out := graphql.NewFieldSet(fields)
var invalids uint32
for i, field := range fields {
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("Item")
case "name":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec._Item_name(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 {
return graphql.Null
}
return out
}
var orderImplementors = []string{"Order"}
func (ec *executionContext) _Order(ctx context.Context, sel ast.SelectionSet, obj *Order) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, orderImplementors)
out := graphql.NewFieldSet(fields)
var invalids uint32
for i, field := range fields {
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("Order")
case "id":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec._Order_id(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
atomic.AddUint32(&invalids, 1)
}
case "date":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec._Order_date(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
atomic.AddUint32(&invalids, 1)
}
case "amount":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec._Order_amount(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
atomic.AddUint32(&invalids, 1)
}
case "items":
field := field
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
}
}()
res = ec._Order_items(ctx, field, obj)
return res
}
out.Concurrently(i, func() graphql.Marshaler {
return innerFunc(ctx)
})
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 {
return graphql.Null
}
return out
}
var queryImplementors = []string{"Query"}
func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, queryImplementors)
ctx = graphql.WithFieldContext(ctx, &graphql.FieldContext{
Object: "Query",
})
out := graphql.NewFieldSet(fields)
var invalids uint32
for i, field := range fields {
innerCtx := graphql.WithRootFieldContext(ctx, &graphql.RootFieldContext{
Object: field.Name,
Field: field,
})
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("Query")
case "customers":
field := field
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
}
}()
res = ec._Query_customers(ctx, field)
return res
}
rrm := func(ctx context.Context) graphql.Marshaler {
return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc)
}
out.Concurrently(i, func() graphql.Marshaler {
return rrm(innerCtx)
})
case "torture1d":
field := field
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
}
}()
res = ec._Query_torture1d(ctx, field)
return res
}
rrm := func(ctx context.Context) graphql.Marshaler {
return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc)
}
out.Concurrently(i, func() graphql.Marshaler {
return rrm(innerCtx)
})
case "torture2d":
field := field
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
}
}()
res = ec._Query_torture2d(ctx, field)
return res
}
rrm := func(ctx context.Context) graphql.Marshaler {
return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc)
}
out.Concurrently(i, func() graphql.Marshaler {
return rrm(innerCtx)
})
case "__type":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec._Query___type(ctx, field)
}
out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc)
case "__schema":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec._Query___schema(ctx, field)
}
out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc)
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 {
return graphql.Null
}
return out
}
var __DirectiveImplementors = []string{"__Directive"}
func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionSet, obj *introspection.Directive) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, __DirectiveImplementors)
out := graphql.NewFieldSet(fields)
var invalids uint32
for i, field := range fields {
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("__Directive")
case "name":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Directive_name(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "description":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Directive_description(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
case "locations":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Directive_locations(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "args":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Directive_args(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "isRepeatable":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Directive_isRepeatable(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 {
return graphql.Null
}
return out
}
var __EnumValueImplementors = []string{"__EnumValue"}
func (ec *executionContext) ___EnumValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.EnumValue) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, __EnumValueImplementors)
out := graphql.NewFieldSet(fields)
var invalids uint32
for i, field := range fields {
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("__EnumValue")
case "name":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___EnumValue_name(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "description":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___EnumValue_description(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
case "isDeprecated":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___EnumValue_isDeprecated(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "deprecationReason":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___EnumValue_deprecationReason(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 {
return graphql.Null
}
return out
}
var __FieldImplementors = []string{"__Field"}
func (ec *executionContext) ___Field(ctx context.Context, sel ast.SelectionSet, obj *introspection.Field) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, __FieldImplementors)
out := graphql.NewFieldSet(fields)
var invalids uint32
for i, field := range fields {
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("__Field")
case "name":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Field_name(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "description":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Field_description(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
case "args":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Field_args(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "type":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Field_type(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "isDeprecated":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Field_isDeprecated(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "deprecationReason":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Field_deprecationReason(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 {
return graphql.Null
}
return out
}
var __InputValueImplementors = []string{"__InputValue"}
func (ec *executionContext) ___InputValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.InputValue) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, __InputValueImplementors)
out := graphql.NewFieldSet(fields)
var invalids uint32
for i, field := range fields {
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("__InputValue")
case "name":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___InputValue_name(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "description":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___InputValue_description(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
case "type":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___InputValue_type(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "defaultValue":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___InputValue_defaultValue(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 {
return graphql.Null
}
return out
}
var __SchemaImplementors = []string{"__Schema"}
func (ec *executionContext) ___Schema(ctx context.Context, sel ast.SelectionSet, obj *introspection.Schema) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, __SchemaImplementors)
out := graphql.NewFieldSet(fields)
var invalids uint32
for i, field := range fields {
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("__Schema")
case "types":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Schema_types(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "queryType":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Schema_queryType(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "mutationType":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Schema_mutationType(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
case "subscriptionType":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Schema_subscriptionType(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
case "directives":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Schema_directives(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 {
return graphql.Null
}
return out
}
var __TypeImplementors = []string{"__Type"}
func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, obj *introspection.Type) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, __TypeImplementors)
out := graphql.NewFieldSet(fields)
var invalids uint32
for i, field := range fields {
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString("__Type")
case "kind":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Type_kind(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
if out.Values[i] == graphql.Null {
invalids++
}
case "name":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Type_name(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
case "description":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Type_description(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
case "fields":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Type_fields(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
case "interfaces":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Type_interfaces(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
case "possibleTypes":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Type_possibleTypes(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
case "enumValues":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Type_enumValues(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
case "inputFields":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Type_inputFields(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
case "ofType":
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
return ec.___Type_ofType(ctx, field, obj)
}
out.Values[i] = innerFunc(ctx)
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 {
return graphql.Null
}
return out
}
// endregion **************************** object.gotpl ****************************
// region ***************************** type.gotpl *****************************
func (ec *executionContext) unmarshalNBoolean2bool(ctx context.Context, v interface{}) (bool, error) {
res, err := graphql.UnmarshalBoolean(v)
return res, graphql.ErrorOnPath(ctx, err)
}
func (ec *executionContext) marshalNBoolean2bool(ctx context.Context, sel ast.SelectionSet, v bool) graphql.Marshaler {
res := graphql.MarshalBoolean(v)
if res == graphql.Null {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "must not be null")
}
}
return res
}
func (ec *executionContext) marshalNCustomer2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐCustomer(ctx context.Context, sel ast.SelectionSet, v *Customer) graphql.Marshaler {
if v == nil {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
return ec._Customer(ctx, sel, v)
}
func (ec *executionContext) unmarshalNFloat2float64(ctx context.Context, v interface{}) (float64, error) {
res, err := graphql.UnmarshalFloatContext(ctx, v)
return res, graphql.ErrorOnPath(ctx, err)
}
func (ec *executionContext) marshalNFloat2float64(ctx context.Context, sel ast.SelectionSet, v float64) graphql.Marshaler {
res := graphql.MarshalFloatContext(v)
if res == graphql.Null {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "must not be null")
}
}
return graphql.WrapContextMarshaler(ctx, res)
}
func (ec *executionContext) unmarshalNInt2int(ctx context.Context, v interface{}) (int, error) {
res, err := graphql.UnmarshalInt(v)
return res, graphql.ErrorOnPath(ctx, err)
}
func (ec *executionContext) marshalNInt2int(ctx context.Context, sel ast.SelectionSet, v int) graphql.Marshaler {
res := graphql.MarshalInt(v)
if res == graphql.Null {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "must not be null")
}
}
return res
}
func (ec *executionContext) marshalNItem2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐItem(ctx context.Context, sel ast.SelectionSet, v *Item) graphql.Marshaler {
if v == nil {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
return ec._Item(ctx, sel, v)
}
func (ec *executionContext) marshalNOrder2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐOrder(ctx context.Context, sel ast.SelectionSet, v *Order) graphql.Marshaler {
if v == nil {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
return ec._Order(ctx, sel, v)
}
func (ec *executionContext) unmarshalNString2string(ctx context.Context, v interface{}) (string, error) {
res, err := graphql.UnmarshalString(v)
return res, graphql.ErrorOnPath(ctx, err)
}
func (ec *executionContext) marshalNString2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler {
res := graphql.MarshalString(v)
if res == graphql.Null {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "must not be null")
}
}
return res
}
func (ec *executionContext) unmarshalNTime2timeᚐTime(ctx context.Context, v interface{}) (time.Time, error) {
res, err := graphql.UnmarshalTime(v)
return res, graphql.ErrorOnPath(ctx, err)
}
func (ec *executionContext) marshalNTime2timeᚐTime(ctx context.Context, sel ast.SelectionSet, v time.Time) graphql.Marshaler {
res := graphql.MarshalTime(v)
if res == graphql.Null {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "must not be null")
}
}
return res
}
func (ec *executionContext) marshalN__Directive2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx context.Context, sel ast.SelectionSet, v introspection.Directive) graphql.Marshaler {
return ec.___Directive(ctx, sel, &v)
}
func (ec *executionContext) marshalN__Directive2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirectiveᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.Directive) graphql.Marshaler {
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalN__Directive2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
for _, e := range ret {
if e == graphql.Null {
return graphql.Null
}
}
return ret
}
func (ec *executionContext) unmarshalN__DirectiveLocation2string(ctx context.Context, v interface{}) (string, error) {
res, err := graphql.UnmarshalString(v)
return res, graphql.ErrorOnPath(ctx, err)
}
func (ec *executionContext) marshalN__DirectiveLocation2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler {
res := graphql.MarshalString(v)
if res == graphql.Null {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "must not be null")
}
}
return res
}
func (ec *executionContext) unmarshalN__DirectiveLocation2ᚕstringᚄ(ctx context.Context, v interface{}) ([]string, error) {
var vSlice []interface{}
if v != nil {
vSlice = graphql.CoerceList(v)
}
var err error
res := make([]string, len(vSlice))
for i := range vSlice {
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i))
res[i], err = ec.unmarshalN__DirectiveLocation2string(ctx, vSlice[i])
if err != nil {
return nil, err
}
}
return res, nil
}
func (ec *executionContext) marshalN__DirectiveLocation2ᚕstringᚄ(ctx context.Context, sel ast.SelectionSet, v []string) graphql.Marshaler {
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalN__DirectiveLocation2string(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
for _, e := range ret {
if e == graphql.Null {
return graphql.Null
}
}
return ret
}
func (ec *executionContext) marshalN__EnumValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValue(ctx context.Context, sel ast.SelectionSet, v introspection.EnumValue) graphql.Marshaler {
return ec.___EnumValue(ctx, sel, &v)
}
func (ec *executionContext) marshalN__Field2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐField(ctx context.Context, sel ast.SelectionSet, v introspection.Field) graphql.Marshaler {
return ec.___Field(ctx, sel, &v)
}
func (ec *executionContext) marshalN__InputValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx context.Context, sel ast.SelectionSet, v introspection.InputValue) graphql.Marshaler {
return ec.___InputValue(ctx, sel, &v)
}
func (ec *executionContext) marshalN__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.InputValue) graphql.Marshaler {
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalN__InputValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
for _, e := range ret {
if e == graphql.Null {
return graphql.Null
}
}
return ret
}
func (ec *executionContext) marshalN__Type2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v introspection.Type) graphql.Marshaler {
return ec.___Type(ctx, sel, &v)
}
func (ec *executionContext) marshalN__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.Type) graphql.Marshaler {
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalN__Type2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
for _, e := range ret {
if e == graphql.Null {
return graphql.Null
}
}
return ret
}
func (ec *executionContext) marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v *introspection.Type) graphql.Marshaler {
if v == nil {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "must not be null")
}
return graphql.Null
}
return ec.___Type(ctx, sel, v)
}
func (ec *executionContext) unmarshalN__TypeKind2string(ctx context.Context, v interface{}) (string, error) {
res, err := graphql.UnmarshalString(v)
return res, graphql.ErrorOnPath(ctx, err)
}
func (ec *executionContext) marshalN__TypeKind2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler {
res := graphql.MarshalString(v)
if res == graphql.Null {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "must not be null")
}
}
return res
}
func (ec *executionContext) marshalOAddress2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐAddress(ctx context.Context, sel ast.SelectionSet, v *Address) graphql.Marshaler {
if v == nil {
return graphql.Null
}
return ec._Address(ctx, sel, v)
}
func (ec *executionContext) unmarshalOBoolean2bool(ctx context.Context, v interface{}) (bool, error) {
res, err := graphql.UnmarshalBoolean(v)
return res, graphql.ErrorOnPath(ctx, err)
}
func (ec *executionContext) marshalOBoolean2bool(ctx context.Context, sel ast.SelectionSet, v bool) graphql.Marshaler {
res := graphql.MarshalBoolean(v)
return res
}
func (ec *executionContext) unmarshalOBoolean2ᚖbool(ctx context.Context, v interface{}) (*bool, error) {
if v == nil {
return nil, nil
}
res, err := graphql.UnmarshalBoolean(v)
return &res, graphql.ErrorOnPath(ctx, err)
}
func (ec *executionContext) marshalOBoolean2ᚖbool(ctx context.Context, sel ast.SelectionSet, v *bool) graphql.Marshaler {
if v == nil {
return graphql.Null
}
res := graphql.MarshalBoolean(*v)
return res
}
func (ec *executionContext) marshalOCustomer2ᚕᚕᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐCustomer(ctx context.Context, sel ast.SelectionSet, v [][]*Customer) graphql.Marshaler {
if v == nil {
return graphql.Null
}
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalOCustomer2ᚕᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐCustomerᚄ(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
return ret
}
func (ec *executionContext) marshalOCustomer2ᚕᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐCustomerᚄ(ctx context.Context, sel ast.SelectionSet, v []*Customer) graphql.Marshaler {
if v == nil {
return graphql.Null
}
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalNCustomer2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐCustomer(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
for _, e := range ret {
if e == graphql.Null {
return graphql.Null
}
}
return ret
}
func (ec *executionContext) unmarshalOInt2ᚕintᚄ(ctx context.Context, v interface{}) ([]int, error) {
if v == nil {
return nil, nil
}
var vSlice []interface{}
if v != nil {
vSlice = graphql.CoerceList(v)
}
var err error
res := make([]int, len(vSlice))
for i := range vSlice {
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i))
res[i], err = ec.unmarshalNInt2int(ctx, vSlice[i])
if err != nil {
return nil, err
}
}
return res, nil
}
func (ec *executionContext) marshalOInt2ᚕintᚄ(ctx context.Context, sel ast.SelectionSet, v []int) graphql.Marshaler {
if v == nil {
return graphql.Null
}
ret := make(graphql.Array, len(v))
for i := range v {
ret[i] = ec.marshalNInt2int(ctx, sel, v[i])
}
for _, e := range ret {
if e == graphql.Null {
return graphql.Null
}
}
return ret
}
func (ec *executionContext) unmarshalOInt2ᚕᚕint(ctx context.Context, v interface{}) ([][]int, error) {
if v == nil {
return nil, nil
}
var vSlice []interface{}
if v != nil {
vSlice = graphql.CoerceList(v)
}
var err error
res := make([][]int, len(vSlice))
for i := range vSlice {
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i))
res[i], err = ec.unmarshalOInt2ᚕintᚄ(ctx, vSlice[i])
if err != nil {
return nil, err
}
}
return res, nil
}
func (ec *executionContext) marshalOInt2ᚕᚕint(ctx context.Context, sel ast.SelectionSet, v [][]int) graphql.Marshaler {
if v == nil {
return graphql.Null
}
ret := make(graphql.Array, len(v))
for i := range v {
ret[i] = ec.marshalOInt2ᚕintᚄ(ctx, sel, v[i])
}
return ret
}
func (ec *executionContext) marshalOItem2ᚕᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐItemᚄ(ctx context.Context, sel ast.SelectionSet, v []*Item) graphql.Marshaler {
if v == nil {
return graphql.Null
}
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalNItem2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐItem(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
for _, e := range ret {
if e == graphql.Null {
return graphql.Null
}
}
return ret
}
func (ec *executionContext) marshalOOrder2ᚕᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐOrderᚄ(ctx context.Context, sel ast.SelectionSet, v []*Order) graphql.Marshaler {
if v == nil {
return graphql.Null
}
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalNOrder2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋexampleᚋdataloaderᚐOrder(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
for _, e := range ret {
if e == graphql.Null {
return graphql.Null
}
}
return ret
}
func (ec *executionContext) unmarshalOString2string(ctx context.Context, v interface{}) (string, error) {
res, err := graphql.UnmarshalString(v)
return res, graphql.ErrorOnPath(ctx, err)
}
func (ec *executionContext) marshalOString2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler {
res := graphql.MarshalString(v)
return res
}
func (ec *executionContext) unmarshalOString2ᚖstring(ctx context.Context, v interface{}) (*string, error) {
if v == nil {
return nil, nil
}
res, err := graphql.UnmarshalString(v)
return &res, graphql.ErrorOnPath(ctx, err)
}
func (ec *executionContext) marshalOString2ᚖstring(ctx context.Context, sel ast.SelectionSet, v *string) graphql.Marshaler {
if v == nil {
return graphql.Null
}
res := graphql.MarshalString(*v)
return res
}
func (ec *executionContext) marshalO__EnumValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValueᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.EnumValue) graphql.Marshaler {
if v == nil {
return graphql.Null
}
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalN__EnumValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValue(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
for _, e := range ret {
if e == graphql.Null {
return graphql.Null
}
}
return ret
}
func (ec *executionContext) marshalO__Field2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐFieldᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.Field) graphql.Marshaler {
if v == nil {
return graphql.Null
}
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalN__Field2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐField(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
for _, e := range ret {
if e == graphql.Null {
return graphql.Null
}
}
return ret
}
func (ec *executionContext) marshalO__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.InputValue) graphql.Marshaler {
if v == nil {
return graphql.Null
}
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalN__InputValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
for _, e := range ret {
if e == graphql.Null {
return graphql.Null
}
}
return ret
}
func (ec *executionContext) marshalO__Schema2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐSchema(ctx context.Context, sel ast.SelectionSet, v *introspection.Schema) graphql.Marshaler {
if v == nil {
return graphql.Null
}
return ec.___Schema(ctx, sel, v)
}
func (ec *executionContext) marshalO__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.Type) graphql.Marshaler {
if v == nil {
return graphql.Null
}
ret := make(graphql.Array, len(v))
var wg sync.WaitGroup
isLen1 := len(v) == 1
if !isLen1 {
wg.Add(len(v))
}
for i := range v {
i := i
fc := &graphql.FieldContext{
Index: &i,
Result: &v[i],
}
ctx := graphql.WithFieldContext(ctx, fc)
f := func(i int) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
if !isLen1 {
defer wg.Done()
}
ret[i] = ec.marshalN__Type2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, sel, v[i])
}
if isLen1 {
f(i)
} else {
go f(i)
}
}
wg.Wait()
for _, e := range ret {
if e == graphql.Null {
return graphql.Null
}
}
return ret
}
func (ec *executionContext) marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v *introspection.Type) graphql.Marshaler {
if v == nil {
return graphql.Null
}
return ec.___Type(ctx, sel, v)
}
// endregion ***************************** type.gotpl *****************************
| NewExecutableSchema |
nativescript-easylink.module.ts | import { NgModule, NO_ERRORS_SCHEMA } from '@angular/core';
import { NativeScriptCommonModule, NativeScriptRouterModule } from '@nativescript/angular';
import { NativescriptEasylinkComponent } from './nativescript-easylink.component';
@NgModule({
imports: [NativeScriptCommonModule, NativeScriptRouterModule.forChild([{ path: '', component: NativescriptEasylinkComponent }])],
declarations: [NativescriptEasylinkComponent], | schemas: [NO_ERRORS_SCHEMA],
})
export class NativescriptEasylinkModule {} |
|
order.go | package poloniex
type OrderBook struct {
Asks [][]interface{} `json:"asks"`
Bids [][]interface{} `json:"bids"`
IsFrozen int `json:"isFrozen,string"`
Error string `json:"error"`
}
// This can probably be implemented using UnmarshalJSON | IsFrozen int `json:"isFrozen,string"`
}
type Orderb struct {
Rate string
Quantity float64
}
*/
type OpenOrder struct {
OrderNumber int64 `json:"orderNumber,string"`
Type string `json:"type"`
Rate float64 `json:"rate,string"`
Amount float64 `json:"amount,string"`
Total float64 `json:"total,string"`
} | /*
type OrderBook struct {
Bids []Orderb `json:"bids"`
Asks []Orderb `json:"asks"` |
select.rs | use super::errno::Errno;
use super::time::TimeVal;
use super::Result;
use libc::{self, c_int};
use std::mem;
use std::os::unix::io::RawFd;
use std::ptr::null_mut;
use libc::FD_SETSIZE;
// FIXME: Change to repr(transparent) once it's stable
#[repr(C)]
#[derive(Clone, Copy)]
#[allow(missing_debug_implementations)]
pub struct FdSet(libc::fd_set);
impl FdSet {
pub fn new() -> FdSet {
let mut fdset = unsafe { mem::MaybeUninit::uninit().assume_init() };
unsafe { libc::FD_ZERO(&mut fdset) };
FdSet(fdset)
}
pub fn insert(&mut self, fd: RawFd) {
unsafe { libc::FD_SET(fd, &mut self.0) };
}
pub fn remove(&mut self, fd: RawFd) {
unsafe { libc::FD_CLR(fd, &mut self.0) };
}
pub fn contains(&mut self, fd: RawFd) -> bool {
unsafe { libc::FD_ISSET(fd, &mut self.0) }
}
pub fn clear(&mut self) {
unsafe { libc::FD_ZERO(&mut self.0) };
}
/// Finds the highest file descriptor in the set.
///
/// Returns `None` if the set is empty.
///
/// This can be used to calculate the `nfds` parameter of the [`select`] function.
///
///
/// [`select`]: fn.select.html
pub fn highest(&mut self) -> Option<RawFd> {
for i in (0..FD_SETSIZE).rev() {
let i = i as RawFd;
if unsafe { libc::FD_ISSET(i, self as *mut _ as *mut libc::fd_set) } {
return Some(i);
}
}
None
}
}
/// Monitors file descriptors for readiness
///
/// Returns the total number of ready file descriptors in all sets. The sets are changed so that all
/// file descriptors that are ready for the given operation are set.
///
/// When this function returns, `timeout` has an implementation-defined value.
///
/// # Parameters
///
/// * `nfds`: The highest file descriptor set in any of the passed `FdSet`s, plus 1. If `None`, this
/// is calculated automatically by calling [`FdSet::highest`] on all descriptor sets and adding 1
/// to the maximum of that.
/// * `readfds`: File descriptors to check for being ready to read.
/// * `writefds`: File descriptors to check for being ready to write.
/// * `errorfds`: File descriptors to check for pending error conditions.
/// * `timeout`: Maximum time to wait for descriptors to become ready (`None` to block
/// indefinitely).
///
/// # References
///
/// [select(2)](http://pubs.opengroup.org/onlinepubs/9699919799/functions/select.html)
///
/// [`FdSet::highest`]: struct.FdSet.html#method.highest
pub fn select<'a, N, R, W, E, T>(
nfds: N,
readfds: R,
writefds: W,
errorfds: E,
timeout: T,
) -> Result<c_int>
where
N: Into<Option<c_int>>,
R: Into<Option<&'a mut FdSet>>,
W: Into<Option<&'a mut FdSet>>,
E: Into<Option<&'a mut FdSet>>,
T: Into<Option<&'a mut TimeVal>>,
{
let mut readfds = readfds.into();
let mut writefds = writefds.into();
let mut errorfds = errorfds.into();
let timeout = timeout.into();
let nfds = nfds.into().unwrap_or_else(|| {
readfds
.iter_mut()
.chain(writefds.iter_mut())
.chain(errorfds.iter_mut())
.map(|set| set.highest().unwrap_or(-1))
.max()
.unwrap_or(-1)
+ 1
});
let readfds = readfds
.map(|set| set as *mut _ as *mut libc::fd_set)
.unwrap_or(null_mut());
let writefds = writefds
.map(|set| set as *mut _ as *mut libc::fd_set)
.unwrap_or(null_mut());
let errorfds = errorfds
.map(|set| set as *mut _ as *mut libc::fd_set)
.unwrap_or(null_mut());
let timeout = timeout
.map(|tv| tv as *mut _ as *mut libc::timeval)
.unwrap_or(null_mut());
let res = unsafe { libc::select(nfds, readfds, writefds, errorfds, timeout) };
Errno::result(res)
}
#[cfg(test)]
mod tests {
use super::*;
use std::os::unix::io::RawFd;
#[test]
fn fdset_insert() {
let mut fd_set = FdSet::new();
for i in 0..FD_SETSIZE {
assert!(!fd_set.contains(i as RawFd));
}
fd_set.insert(7);
assert!(fd_set.contains(7));
}
#[test]
fn fdset_remove() {
let mut fd_set = FdSet::new();
for i in 0..FD_SETSIZE {
assert!(!fd_set.contains(i as RawFd));
}
fd_set.insert(7);
fd_set.remove(7);
for i in 0..FD_SETSIZE {
assert!(!fd_set.contains(i as RawFd));
}
}
#[test]
fn fdset_clear() {
let mut fd_set = FdSet::new();
fd_set.insert(1);
fd_set.insert((FD_SETSIZE / 2) as RawFd);
fd_set.insert((FD_SETSIZE - 1) as RawFd);
fd_set.clear();
for i in 0..FD_SETSIZE {
assert!(!fd_set.contains(i as RawFd));
}
}
#[test]
fn fdset_highest() |
}
| {
let mut set = FdSet::new();
assert_eq!(set.highest(), None);
set.insert(0);
assert_eq!(set.highest(), Some(0));
set.insert(90);
assert_eq!(set.highest(), Some(90));
set.remove(0);
assert_eq!(set.highest(), Some(90));
set.remove(90);
assert_eq!(set.highest(), None);
set.insert(4);
set.insert(5);
set.insert(7);
assert_eq!(set.highest(), Some(7));
} |
get_trypico2wave.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os.path
import subprocess
import uuid
from aptts import alarmpi_tts
class trypico2wave(alarmpi_tts):
def play(self, content, ramdrive='/mnt/ram/'):
if self.debug:
print "Trying pico2wave."
rval = True
p2w = self.sconfig['head']
lang =self.sconfig['lang']
if not os.path.isfile(p2w): # The executable does not exist
if self.debug:
print 'File ' + p2w + ' does not exist.'
return False
try:
tmfn = ramdrive + str(uuid.uuid4()) + self.sconfig['tail']
cmd = p2w + ' -l ' + lang + ' -w ' + tmfn + ' "' + content + '"'
if self.debug:
print cmd
print subprocess.call(cmd, shell=True)
cmd = self.sconfig['player'] + ' ' + tmfn
if self.debug:
print cmd
print subprocess.call(cmd, shell=True)
cmd = 'rm -f ' + tmfn
if self.debug:
print cmd
print subprocess.call(cmd, shell=True)
except subprocess.CalledProcessError:
rval = False
# Cleanup any ogg files created in this directory.
if self.debug:
print 'cleaning up now'
rmcmd = 'rm -f ' + ramdrive + '*' + self.sconfig['tail']
if self.debug:
print rmcmd | print subprocess.call (rmcmd, shell=True)
return rval |
|
algebraic_alternative.rs | use super::{constructor::Constructor, expression::Expression};
use crate::types::Type;
use std::collections::{HashMap, HashSet};
#[derive(Clone, Debug, PartialEq)]
pub struct AlgebraicAlternative {
constructor: Constructor,
element_names: Vec<String>,
expression: Expression,
}
impl AlgebraicAlternative {
pub fn new(
constructor: Constructor,
element_names: Vec<String>,
expression: impl Into<Expression>,
) -> Self {
Self {
constructor,
element_names,
expression: expression.into(),
}
}
pub fn constructor(&self) -> &Constructor |
pub fn element_names(&self) -> &[String] {
&self.element_names
}
pub fn expression(&self) -> &Expression {
&self.expression
}
pub(crate) fn find_variables(&self) -> HashSet<String> {
let mut variables = self.expression.find_variables();
for element_name in &self.element_names {
variables.remove(element_name);
}
variables
}
pub(crate) fn infer_environment(&self, variables: &HashMap<String, Type>) -> Self {
let mut variables = variables.clone();
for (name, type_) in self
.element_names
.iter()
.zip(self.constructor.constructor_type().elements())
{
variables.insert(name.into(), type_.clone());
}
Self {
constructor: self.constructor.clone(),
element_names: self.element_names.clone(),
expression: self.expression.infer_environment(&variables),
}
}
pub(crate) fn convert_types(&self, convert: &impl Fn(&Type) -> Type) -> Self {
Self {
constructor: self.constructor.convert_types(convert),
element_names: self.element_names.clone(),
expression: self.expression.convert_types(convert),
}
}
}
| {
&self.constructor
} |
list.go | /*
Copyright (c) 2016-2017 Bitnami
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package nats
import (
"fmt"
"io"
"github.com/gosuri/uitable"
"github.com/kubeless/kubeless/pkg/client/clientset/versioned"
"github.com/kubeless/kubeless/pkg/utils"
"github.com/sirupsen/logrus"
"github.com/spf13/cobra"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
var listCmd = &cobra.Command{
Use: "list FLAG",
Aliases: []string{"ls"},
Short: "list all NATS triggers deployed to Kubeless",
Long: `list all NATS triggers deployed to Kubeless`,
Run: func(cmd *cobra.Command, args []string) {
ns, err := cmd.Flags().GetString("namespace")
if err != nil {
logrus.Fatal(err.Error())
}
if ns == "" {
ns = utils.GetDefaultNamespace()
}
kubelessClient, err := utils.GetKubelessClientOutCluster()
if err != nil {
logrus.Fatalf("Can not create out-of-cluster client: %v", err)
}
if err := doList(cmd.OutOrStdout(), kubelessClient, ns); err != nil {
logrus.Fatal(err.Error())
}
},
}
func init() {
listCmd.Flags().StringP("namespace", "n", "", "Specify namespace for the NATS trigger")
}
func | (w io.Writer, kubelessClient versioned.Interface, ns string) error {
triggersList, err := kubelessClient.KubelessV1beta1().NATSTriggers(ns).List(metav1.ListOptions{})
if err != nil {
return err
}
table := uitable.New()
table.MaxColWidth = 50
table.Wrap = true
table.AddRow("NAME", "NAMESPACE", "TOPIC", "FUNCTION SELECTOR")
for _, trigger := range triggersList.Items {
table.AddRow(trigger.Name, trigger.Namespace, trigger.Spec.Topic, metav1.FormatLabelSelector(&trigger.Spec.FunctionSelector))
}
fmt.Fprintln(w, table)
return nil
}
| doList |
authentication.rs | // Copyright (c) Microsoft. All rights reserved.
use std::cell::RefCell;
use std::sync::{Arc, Mutex};
use futures::future::{Either, IntoFuture};
use futures::{future, Future, Stream};
use hyper::service::Service;
use hyper::{Body, Request};
use log::Level;
use typed_headers::{Authorization, HeaderMapExt};
use edgelet_core::AuthId;
use edgelet_utils::log_failure;
use kube_client::{Client as KubeClient, Error as KubeClientError, TokenSource};
use crate::constants::EDGE_ORIGINAL_MODULEID;
use crate::error::Error;
use crate::KubeModuleRuntime;
pub fn authenticate<T, S>(
runtime: &KubeModuleRuntime<T, S>,
req: &Request<Body>,
) -> impl Future<Item = AuthId, Error = Error>
where
T: TokenSource + 'static,
S: Service + Send + 'static,
S::ReqBody: From<Vec<u8>>,
S::ResBody: Stream,
Body: From<S::ResBody>,
S::Error: Into<KubeClientError>,
S::Future: Send,
{
req.headers()
.typed_get::<Authorization>()
.map(|auth| {
auth.and_then(|auth| {
auth.as_bearer().map(|token| {
let client_copy = runtime.client();
let namespace = runtime.settings().namespace().to_owned();
let fut = runtime
.client()
.lock()
.expect("Unexpected lock error")
.borrow_mut()
.token_review(runtime.settings().namespace(), token.as_str())
.map_err(|err| {
log_failure(Level::Warn, &err);
Error::from(err)
})
.and_then(move |token_review| {
token_review
.status
.as_ref()
.filter(|status| status.authenticated.filter(|x| *x).is_some())
.and_then(|status| {
status.user.as_ref().and_then(|user| user.username.clone())
})
.map_or(Either::A(future::ok(AuthId::None)), |name| {
Either::B(get_module_original_name(
&client_copy,
&namespace,
&name,
))
})
});
Either::A(fut)
})
})
.unwrap_or_else(|| Either::B(future::ok(AuthId::None)))
})
.map_err(Error::from)
.into_future()
.flatten()
}
fn get_module_original_name<T, S>(
client: &Arc<Mutex<RefCell<KubeClient<T, S>>>>,
namespace: &str,
username: &str,
) -> impl Future<Item = AuthId, Error = Error>
where
T: TokenSource + 'static,
S: Service + Send + 'static,
S::ReqBody: From<Vec<u8>>,
S::ResBody: Stream,
Body: From<S::ResBody>,
S::Error: Into<KubeClientError>,
S::Future: Send,
| {
match username.split(':').last() {
Some(name) => Either::A({
let name = name.to_owned();
client
.lock()
.expect("Unexpected lock error")
.borrow_mut()
.get_service_account(namespace, &name)
.map_err(|err| {
log_failure(Level::Warn, &err);
Error::from(err)
})
.map(|service_account| {
let module_name = service_account
.metadata
.as_ref()
.and_then(|metadata| metadata.annotations.as_ref())
.and_then(|annotations| annotations.get(EDGE_ORIGINAL_MODULEID).cloned())
.unwrap_or(name);
AuthId::Value(module_name.into())
})
}),
None => Either::B(future::ok(AuthId::None)),
}
} |
|
01000_add_counties_subcounties_courts_prisons_offences.py | # Generated by Django 2.0.1 on 2018-01-28 19:30
from django.db import migrations
def add_initial_data(apps, schema_editor):
County = apps.get_model('petitions', 'County')
Court = apps.get_model('petitions', 'Court')
SubCounty = apps.get_model('petitions', 'SubCounty')
Prison = apps.get_model('petitions', 'Prison')
Offence = apps.get_model('petitions', 'Offence')
baringo = County.objects.create(name='BARINGO')
SubCounty.objects.create(name='BARINGO EAST', county=baringo)
SubCounty.objects.create(name='BARINGO WEST', county=baringo)
SubCounty.objects.create(name='BARINGO CENTRAL', county=baringo)
SubCounty.objects.create(name='MOCHONGOI', county=baringo)
SubCounty.objects.create(name='MOGOTIO', county=baringo)
SubCounty.objects.create(name='ELDAMA RAVINE', county=baringo)
bomet = County.objects.create(name='BOMET')
SubCounty.objects.create(name='SOTIK', county=bomet)
SubCounty.objects.create(name='CHEPALUNGU', county=bomet)
SubCounty.objects.create(name='BOMET EAST', county=bomet)
SubCounty.objects.create(name='BOMET CENTRAL', county=bomet)
SubCounty.objects.create(name='KONOIN', county=bomet)
bungoma = County.objects.create(name='BUNGOMA')
SubCounty.objects.create(name='MT ELGON', county=bungoma)
SubCounty.objects.create(name='SIRISIA', county=bungoma)
SubCounty.objects.create(name='KABUCHIA', county=bungoma)
SubCounty.objects.create(name='BUMULA', county=bungoma)
SubCounty.objects.create(name='KANDUNYI', county=bungoma)
SubCounty.objects.create(name='WEBUYE', county=bungoma)
SubCounty.objects.create(name='BOKOLI', county=bungoma)
SubCounty.objects.create(name='KIMILILI', county=bungoma)
SubCounty.objects.create(name='TONGAREN', county=bungoma)
busia = County.objects.create(name='BUSIA')
SubCounty.objects.create(name='TESO NORTH', county=busia)
SubCounty.objects.create(name='TESO SOUTH', county=busia)
SubCounty.objects.create(name='NAMBALE', county=busia)
SubCounty.objects.create(name='MATAYOS', county=busia)
SubCounty.objects.create(name='BUTULA', county=busia)
SubCounty.objects.create(name='FUNYULA', county=busia)
SubCounty.objects.create(name='BUDALANGI', county=busia)
elgeiyomarakwet = County.objects.create(name='ELGEYO MARAKWET')
SubCounty.objects.create(name='MARAKWET EAST', county=elgeiyomarakwet)
SubCounty.objects.create(name='MARAKWET WEST', county=elgeiyomarakwet)
SubCounty.objects.create(name='KEIYO EAST', county=elgeiyomarakwet)
SubCounty.objects.create(name='KEIYO SOUTH', county=elgeiyomarakwet)
embu = County.objects.create(name='EMBU')
SubCounty.objects.create(name='MANYATTA', county=embu)
SubCounty.objects.create(name='RUNYENJES', county=embu)
SubCounty.objects.create(name='GACHOKA', county=embu)
SubCounty.objects.create(name='SIAKAGO', county=embu)
garissa = County.objects.create(name='GARISSA')
SubCounty.objects.create(name='TAVEDUJIS', county=garissa)
SubCounty.objects.create(name='BALAMBALA', county=garissa)
SubCounty.objects.create(name='LAGDERA', county=garissa)
SubCounty.objects.create(name='DADAAB', county=garissa)
SubCounty.objects.create(name='FAFI', county=garissa)
SubCounty.objects.create(name='IJARA', county=garissa)
homabay = County.objects.create(name='HOMA BAY')
SubCounty.objects.create(name='KASIPUL', county=homabay)
SubCounty.objects.create(name='KABONDO', county=homabay)
SubCounty.objects.create(name='KARACHUONYO', county=homabay)
SubCounty.objects.create(name='RANGWE', county=homabay)
SubCounty.objects.create(name='HOMABAY TOWN', county=homabay)
SubCounty.objects.create(name='NDHIWA', county=homabay)
SubCounty.objects.create(name='MBITA', county=homabay)
SubCounty.objects.create(name='GWASSI', county=homabay)
isiolo = County.objects.create(name='ISIOLO')
SubCounty.objects.create(name='ISIOLO NORTH', county=isiolo)
SubCounty.objects.create(name='ISIOLO SOUTH', county=isiolo)
kajiado = County.objects.create(name='KAJIADO')
SubCounty.objects.create(name='KAJIADO CENTRAL', county=kajiado)
SubCounty.objects.create(name='KAJIADO NORTH', county=kajiado)
SubCounty.objects.create(name='KAJIADO SOUTH', county=kajiado)
kakamega = County.objects.create(name='KAKAMEGA')
SubCounty.objects.create(name='LUGARI', county=kakamega)
SubCounty.objects.create(name='LIKUYANI', county=kakamega)
SubCounty.objects.create(name='MALAVA', county=kakamega)
SubCounty.objects.create(name='LURAMBI', county=kakamega)
SubCounty.objects.create(name='MAKHOLO', county=kakamega)
SubCounty.objects.create(name='MUMIAS', county=kakamega)
SubCounty.objects.create(name='MUMIAS EAST', county=kakamega)
SubCounty.objects.create(name='MATUNGU', county=kakamega)
SubCounty.objects.create(name='BUTERE', county=kakamega)
SubCounty.objects.create(name='KHWISERO', county=kakamega)
SubCounty.objects.create(name='SHINYALU', county=kakamega)
SubCounty.objects.create(name='IKOLOMANI', county=kakamega)
kericho = County.objects.create(name='KERICHO')
SubCounty.objects.create(name='AINAMOI', county=kericho)
SubCounty.objects.create(name='BELGUT', county=kericho)
SubCounty.objects.create(name='KIPKELION', county=kericho)
kiambu = County.objects.create(name='KIAMBU')
SubCounty.objects.create(name='GATUNDU SOUTH', county=kiambu)
SubCounty.objects.create(name='GATUNDU NORTH', county=kiambu)
SubCounty.objects.create(name='JUJA', county=kiambu)
SubCounty.objects.create(name='THIKA TOWN', county=kiambu)
SubCounty.objects.create(name='RUIRU GITHUNGURI', county=kiambu)
SubCounty.objects.create(name='KIAMBU', county=kiambu)
SubCounty.objects.create(name='KIAMBAA', county=kiambu)
SubCounty.objects.create(name='KABETE', county=kiambu)
SubCounty.objects.create(name='KIKUYU', county=kiambu)
SubCounty.objects.create(name='LIMURU', county=kiambu)
SubCounty.objects.create(name='LARI', county=kiambu)
kilifi = County.objects.create(name='KILIFI')
SubCounty.objects.create(name='KILIFI NORTH', county=kilifi)
SubCounty.objects.create(name='KILIFI SOUTH', county=kilifi)
SubCounty.objects.create(name='KALOLENI', county=kilifi)
SubCounty.objects.create(name='RABAI', county=kilifi)
SubCounty.objects.create(name='GANZE', county=kilifi)
SubCounty.objects.create(name='MALINDI', county=kilifi)
SubCounty.objects.create(name='MAGARINI', county=kilifi)
kirinyaga = County.objects.create(name='KIRINYAGA')
SubCounty.objects.create(name='MWEA', county=kirinyaga)
SubCounty.objects.create(name='GICHUGU', county=kirinyaga)
SubCounty.objects.create(name='NDIA', county=kirinyaga)
SubCounty.objects.create(name='KIRINYAGA CENTRAL', county=kirinyaga)
kisii = County.objects.create(name='KISII')
SubCounty.objects.create(name='BONCHARI', county=kisii)
SubCounty.objects.create(name='SOUTH MUGIRANGO', county=kisii)
SubCounty.objects.create(name='BOMACHOGE', county=kisii)
SubCounty.objects.create(name='BOBASI', county=kisii)
SubCounty.objects.create(name='GUCHA', county=kisii)
SubCounty.objects.create(name='NYARIBARI MASABA', county=kisii)
SubCounty.objects.create(name='NYARIBARI CHACHE', county=kisii)
SubCounty.objects.create(name='MATRANI', county=kisii)
SubCounty.objects.create(name='MOSOCHO', county=kisii)
kisumu = County.objects.create(name='KISUMU')
SubCounty.objects.create(name='KISUMU EAST', county=kisumu)
SubCounty.objects.create(name='KISUMU WEST', county=kisumu)
SubCounty.objects.create(name='KISUMU CENTRAL', county=kisumu)
SubCounty.objects.create(name='SEME', county=kisumu)
SubCounty.objects.create(name='NYANDO', county=kisumu)
SubCounty.objects.create(name='MUHORONI', county=kisumu)
SubCounty.objects.create(name='NYAKACH', county=kisumu)
kitui = County.objects.create(name='KITUI')
SubCounty.objects.create(name='MWINGI NORTH', county=kitui)
SubCounty.objects.create(name='MWINGI CENTRAL', county=kitui)
SubCounty.objects.create(name='MWINGI SOUTH', county=kitui)
SubCounty.objects.create(name='KITUI WEST', county=kitui)
SubCounty.objects.create(name='KITUI RURAL', county=kitui)
SubCounty.objects.create(name='KITUI TOWN', county=kitui)
SubCounty.objects.create(name='MUTITU', county=kitui)
SubCounty.objects.create(name='KITUI SOUTH', county=kitui)
kwale = County.objects.create(name='KWALE')
SubCounty.objects.create(name='MSAMBWENI', county=kwale)
SubCounty.objects.create(name='LUNGA LUNGA', county=kwale)
SubCounty.objects.create(name='MATUGA', county=kwale)
SubCounty.objects.create(name='KINANGO', county=kwale)
laikipia = County.objects.create(name='LAIKIPIA')
SubCounty.objects.create(name='LAIKIPIA WEST', county=laikipia)
SubCounty.objects.create(name='LAIKIPIA EAST', county=laikipia)
SubCounty.objects.create(name='LAIKIPIA NORTH', county=laikipia)
lamu = County.objects.create(name='LAMU')
SubCounty.objects.create(name='LAMU EAST', county=lamu)
SubCounty.objects.create(name='LAMU WEST', county=lamu)
machakos = County.objects.create(name='MACHAKOS')
SubCounty.objects.create(name='MASINGA', county=machakos)
SubCounty.objects.create(name='YATTA', county=machakos)
SubCounty.objects.create(name='KANGUNDO', county=machakos)
SubCounty.objects.create(name='MATUNGULU', county=machakos)
SubCounty.objects.create(name='KATHIANI', county=machakos)
SubCounty.objects.create(name='MAVOKO', county=machakos)
SubCounty.objects.create(name='MACHAKOS TOWN', county=machakos)
SubCounty.objects.create(name='MWALA', county=machakos)
makueni = County.objects.create(name='MAKUENI')
SubCounty.objects.create(name='MBOONI', county=makueni)
SubCounty.objects.create(name='KILOME', county=makueni)
SubCounty.objects.create(name='KAITI', county=makueni)
SubCounty.objects.create(name='MAKUENI', county=makueni)
SubCounty.objects.create(name='KIBWEZI WEST', county=makueni)
SubCounty.objects.create(name='KIBWEZI EAST', county=makueni)
mandera = County.objects.create(name='MANDERA')
SubCounty.objects.create(name='MANDERA WEST', county=mandera)
SubCounty.objects.create(name='BANISA', county=mandera)
SubCounty.objects.create(name='MANDERA NORTH', county=mandera)
SubCounty.objects.create(name='MANDERA EAST', county=mandera)
SubCounty.objects.create(name='LAFEY', county=mandera)
marsabit = County.objects.create(name='MARSABIT')
SubCounty.objects.create(name='MOYALE', county=marsabit)
SubCounty.objects.create(name='NORTH HORR', county=marsabit)
SubCounty.objects.create(name='SAKU', county=marsabit)
SubCounty.objects.create(name='LAISAMIS', county=marsabit)
meru = County.objects.create(name='MERU')
SubCounty.objects.create(name='IGEMBE SOUTH', county=meru)
SubCounty.objects.create(name='IGEMBE CENTRAL', county=meru)
SubCounty.objects.create(name='IGEMBE NORTH', county=meru)
SubCounty.objects.create(name='TIGANIA WEST', county=meru)
SubCounty.objects.create(name='TIGANIA EAST', county=meru)
SubCounty.objects.create(name='NORTH IMENTI', county=meru)
SubCounty.objects.create(name='BUURI', county=meru)
SubCounty.objects.create(name='CENTRAL IMENTI', county=meru)
SubCounty.objects.create(name='SOUTH IMENTI', county=meru)
migori = County.objects.create(name='MIGORI')
SubCounty.objects.create(name='RONGO', county=migori)
SubCounty.objects.create(name='AWENDO', county=migori)
SubCounty.objects.create(name='MIGORI EAST', county=migori)
SubCounty.objects.create(name='MIGORI WEST', county=migori)
SubCounty.objects.create(name='URIRI', county=migori)
SubCounty.objects.create(name='NYATIKE', county=migori)
SubCounty.objects.create(name='KURIA EAST', county=migori)
SubCounty.objects.create(name='KURIA WEST', county=migori)
mombasa = County.objects.create(name='MOMBASA')
SubCounty.objects.create(name='CHANGAMWE', county=mombasa)
SubCounty.objects.create(name='JOMVU', county=mombasa)
SubCounty.objects.create(name='KISAUNI', county=mombasa)
SubCounty.objects.create(name='NYALI', county=mombasa)
SubCounty.objects.create(name='LIKONI', county=mombasa)
SubCounty.objects.create(name='MVITA', county=mombasa)
muranga = County.objects.create(name='MURANGA')
SubCounty.objects.create(name='KANGEMA', county=muranga)
SubCounty.objects.create(name='MATHIOYA', county=muranga)
SubCounty.objects.create(name='KIHARU', county=muranga)
SubCounty.objects.create(name='KIGUMO', county=muranga)
SubCounty.objects.create(name='MARAGWA', county=muranga)
SubCounty.objects.create(name='KANDARA', county=muranga)
SubCounty.objects.create(name='GATANGA', county=muranga)
nairobi = County.objects.create(name='NAIROBI')
SubCounty.objects.create(name='WESTLANDS', county=nairobi)
SubCounty.objects.create(name='PARKLANDS', county=nairobi)
SubCounty.objects.create(name='DAGORETTI', county=nairobi)
SubCounty.objects.create(name='KAREN / LANGATA', county=nairobi)
SubCounty.objects.create(name='KIBIRA', county=nairobi)
SubCounty.objects.create(name='ROYSAMBU', county=nairobi)
SubCounty.objects.create(name='KASARANI', county=nairobi)
SubCounty.objects.create(name='RUARAKA', county=nairobi)
SubCounty.objects.create(name='KARIOBANGI', county=nairobi)
SubCounty.objects.create(name='KAYOLE', county=nairobi)
SubCounty.objects.create(name='EMBAKASI', county=nairobi)
SubCounty.objects.create(name='MIHANG’O', county=nairobi)
SubCounty.objects.create(name='NAIROBI WEST', county=nairobi)
SubCounty.objects.create(name='MAKADARA', county=nairobi)
SubCounty.objects.create(name='KAMUKUNJI', county=nairobi)
SubCounty.objects.create(name='STAREHE', county=nairobi)
SubCounty.objects.create(name='MATHARE', county=nairobi)
nakuru = County.objects.create(name='NAKURU')
SubCounty.objects.create(name='MOLO', county=nakuru)
SubCounty.objects.create(name='NJORO', county=nakuru)
SubCounty.objects.create(name='NAIVASHA', county=nakuru)
SubCounty.objects.create(name='GILGIL', county=nakuru)
SubCounty.objects.create(name='KURESOI SOUTH', county=nakuru)
SubCounty.objects.create(name='KURESOI NORTH', county=nakuru)
SubCounty.objects.create(name='SUBUKIA', county=nakuru)
SubCounty.objects.create(name='RONGAI', county=nakuru)
SubCounty.objects.create(name='BAHATI', county=nakuru)
SubCounty.objects.create(name='NAKURU TOWN WEST', county=nakuru)
SubCounty.objects.create(name='NAKURU TOWN EAST', county=nakuru)
nandi = County.objects.create(name='NANDI')
SubCounty.objects.create(name='TINDERET', county=nandi)
SubCounty.objects.create(name='ALDAI', county=nandi)
SubCounty.objects.create(name='NANDI HILLS', county=nandi)
SubCounty.objects.create(name='EMGWEN NORTH', county=nandi)
SubCounty.objects.create(name='EMGWEN SOUTH', county=nandi)
SubCounty.objects.create(name='MOSOP', county=nandi)
narok = County.objects.create(name='NAROK')
SubCounty.objects.create(name='KILGORIS', county=narok)
SubCounty.objects.create(name='EMURUA DIKIRR', county=narok)
SubCounty.objects.create(name='NAROK NORTH', county=narok)
SubCounty.objects.create(name='KAJIADO EAST', county=narok)
SubCounty.objects.create(name='KAJIADO WEST', county=narok)
nyamira = County.objects.create(name='NYAMIRA')
SubCounty.objects.create(name='KITUTU MASABA', county=nyamira)
SubCounty.objects.create(name='NORTH MUGIRANGO', county=nyamira)
SubCounty.objects.create(name='WEST MUGIRANGO', county=nyamira)
nyandarua = County.objects.create(name='NYANDARUA')
SubCounty.objects.create(name='KINANGOP', county=nyandarua)
SubCounty.objects.create(name='KIPIPIRI', county=nyandarua)
SubCounty.objects.create(name='OL-KALOU', county=nyandarua)
SubCounty.objects.create(name='OL-JOROK', county=nyandarua)
SubCounty.objects.create(name='NDARAGWA', county=nyandarua)
nyeri = County.objects.create(name='NYERI')
SubCounty.objects.create(name='TETU', county=nyeri)
SubCounty.objects.create(name='KIENI', county=nyeri)
SubCounty.objects.create(name='MATHIRA', county=nyeri)
SubCounty.objects.create(name='OTHAYA', county=nyeri)
SubCounty.objects.create(name='MUKUWE-INI', county=nyeri)
SubCounty.objects.create(name='NYERI TOWN', county=nyeri)
samburu = County.objects.create(name='SAMBURU')
SubCounty.objects.create(name='SAMBURU WEST', county=samburu)
SubCounty.objects.create(name='SAMBURU NORTH', county=samburu)
SubCounty.objects.create(name='SAMBURU EAST', county=samburu)
siaya = County.objects.create(name='SIAYA')
SubCounty.objects.create(name='UGENYA', county=siaya)
SubCounty.objects.create(name='UGUNJA', county=siaya)
SubCounty.objects.create(name='ALEGO USONGA', county=siaya)
SubCounty.objects.create(name='GEM', county=siaya)
SubCounty.objects.create(name='BONDO', county=siaya)
SubCounty.objects.create(name='RARIEDA', county=siaya)
taitataveta = County.objects.create(name='TAITA TAVETA')
SubCounty.objects.create(name='TAVETA', county=taitataveta)
SubCounty.objects.create(name='WUNDANYI', county=taitataveta)
SubCounty.objects.create(name='MWATATE', county=taitataveta)
SubCounty.objects.create(name='VOI', county=taitataveta)
tanariver = County.objects.create(name='TANA RIVER')
SubCounty.objects.create(name='GARSEN', county=tanariver)
SubCounty.objects.create(name='GALOLE', county=tanariver)
SubCounty.objects.create(name='BURA', county=tanariver)
tharakanithi = County.objects.create(name='THARAKA NITHI')
SubCounty.objects.create(name='NITHI', county=tharakanithi)
SubCounty.objects.create(name='MAARA', county=tharakanithi)
SubCounty.objects.create(name='THARAKA', county=tharakanithi)
transnzoia = County.objects.create(name='TRANS NZOIA')
SubCounty.objects.create(name='KWANZA', county=transnzoia)
SubCounty.objects.create(name='ENDEBESS', county=transnzoia)
SubCounty.objects.create(name='SABOTI', county=transnzoia)
SubCounty.objects.create(name='KIMININI', county=transnzoia)
SubCounty.objects.create(name='CHERENGANYI', county=transnzoia)
turkana = County.objects.create(name='TURKANA')
SubCounty.objects.create(name='TURKANA NORTH', county=turkana)
SubCounty.objects.create(name='TURKANA WEST', county=turkana)
SubCounty.objects.create(name='TURKANA CENTRAL', county=turkana)
SubCounty.objects.create(name='LOIMA', county=turkana)
SubCounty.objects.create(name='TURKANA SOUTH', county=turkana)
SubCounty.objects.create(name='TURKANA EAST', county=turkana)
uasingishu = County.objects.create(name='UASIN GISHU')
SubCounty.objects.create(name='ELDORET EAST', county=uasingishu)
SubCounty.objects.create(name='ELDORET NORT', county=uasingishu)
SubCounty.objects.create(name='ELDORET SOUTH', county=uasingishu)
vihiga = County.objects.create(name='VIHIGA')
SubCounty.objects.create(name='VIHIGA', county=vihiga)
SubCounty.objects.create(name='SABATIA', county=vihiga)
SubCounty.objects.create(name='HAMISI', county=vihiga)
SubCounty.objects.create(name='EMUHAYA', county=vihiga)
SubCounty.objects.create(name='LUANDA', county=vihiga)
wajir = County.objects.create(name='WAJIR')
SubCounty.objects.create(name='WAJIR NORTH', county=wajir)
SubCounty.objects.create(name='WAJIR EAST', county=wajir)
SubCounty.objects.create(name='TARBAJ', county=wajir)
SubCounty.objects.create(name='WAJIR WEST', county=wajir)
SubCounty.objects.create(name='ELDAS', county=wajir)
SubCounty.objects.create(name='WAJIR SOUTH', county=wajir)
westpokot = County.objects.create(name='WEST POKOT')
SubCounty.objects.create(name='KAPENGURIA ', county=westpokot)
SubCounty.objects.create(name='SIGOR ', county=westpokot)
SubCounty.objects.create(name='KACHELIBA', county=westpokot)
SubCounty.objects.create(name='POKOT SOUTH ', county=westpokot)
#courts
instance = Court.objects.create(name='BARICHO MAGISTRATES\' COURT')
instance = Court.objects.create(name='BOMET LAW COURT')
instance = Court.objects.create(name='BOMET MAGISTRATES\' COURT')
instance = Court.objects.create(name='BONDO MAGISTRATES\' COURT')
instance = Court.objects.create(name='BUNGOMA LAW COURT')
instance = Court.objects.create(name='BUSIA LAW COURT')
instance = Court.objects.create(name='BUTALI MAGISTRATES\' COURT')
instance = Court.objects.create(name='BUTERE MAGISTRATES\' COURT')
instance = Court.objects.create(name='CHILDREN’S COURT NAIROBI MAGISTRATES\' COURT')
instance = Court.objects.create(name='CHUKA LAW COURT')
instance = Court.objects.create(name='CHUKA MAGISTRATES\' COURT')
instance = Court.objects.create(name='CITY COURT MAGISTRATES\' COURT')
instance = Court.objects.create(name='ELDAMA RAVINE MAGISTRATES\' COURT')
instance = Court.objects.create(name='ELDORET LAW COURT')
instance = Court.objects.create(name='ELDORET MAGISTRATES\' COURT')
instance = Court.objects.create(name='EMBU LAW COURT')
instance = Court.objects.create(name='EMBU MAGISTRATES\' COURT')
instance = Court.objects.create(name='ENGINEER MAGISTRATES\' COURT')
instance = Court.objects.create(name='GARISSA LAW COURT')
instance = Court.objects.create(name='GARISSA MAGISTRATES\' COURT')
instance = Court.objects.create(name='GARSEN LAW COURT')
instance = Court.objects.create(name='GATUNDU MAGISTRATES\' COURT')
instance = Court.objects.create(name='GICHUGU MAGISTRATES\' COURT')
instance = Court.objects.create(name='GITHUNGURI MAGISTRATES\' COURT')
instance = Court.objects.create(name='HAMISI MAGISTRATES\' COURT')
instance = Court.objects.create(name='HOLA MAGISTRATES\' COURT')
instance = Court.objects.create(name='HOMA-BAY LAW COURT')
instance = Court.objects.create(name='HOMABAY MAGISTRATES\' COURT')
instance = Court.objects.create(name='ISIOLO MAGISTRATES\' COURT')
instance = Court.objects.create(name='ITEN MAGISTRATES\' COURT')
instance = Court.objects.create(name='KABARNET LAW COURT')
instance = Court.objects.create(name='KABARNET MAGISTRATES\' COURT')
instance = Court.objects.create(name='KABARNET MAGISTRATES\' COURT')
instance = Court.objects.create(name='KADHI MAGISTRATES\' COURT')
instance = Court.objects.create(name='KAJIADO LAW COURT')
instance = Court.objects.create(name='KAJIADO MAGISTRATES\' COURT')
instance = Court.objects.create(name='KAKAMEGA LAW COURT')
instance = Court.objects.create(name='KAKAMEGA MAGISTRATES\' COURT')
instance = Court.objects.create(name='KALOLENI MAGISTRATES\' COURT')
instance = Court.objects.create(name='KANDARA MAGISTRATES\' COURT')
instance = Court.objects.create(name='KANGEMA MAGISTRATES\' COURT')
instance = Court.objects.create(name='KANGUNDO MAGISTRATES\' COURT')
instance = Court.objects.create(name='KAPENGURIA LAW COURT')
instance = Court.objects.create(name='KAPENGURIA MAGISTRATES\' COURT')
instance = Court.objects.create(name='KAPSABET MAGISTRATES\' COURT')
instance = Court.objects.create(name='KARATINA MAGISTRATES\' COURT')
instance = Court.objects.create(name='KEHANCHA MAGISTRATES\' COURT')
instance = Court.objects.create(name='KERICHO LAW COURT')
instance = Court.objects.create(name='KERICHO MAGISTRATES\' COURT')
instance = Court.objects.create(name='KEROKA MAGISTRATES\' COURT')
instance = Court.objects.create(name='KERUGOYA LAW COURT')
instance = Court.objects.create(name='KERUGOYA MAGISTRATES\' COURT')
instance = Court.objects.create(name='KIAMBU LAW COURT')
instance = Court.objects.create(name='KIAMBU MAGISTRATES\' COUR')
instance = Court.objects.create(name='KIBERA MAGISTRATES\' COURT')
instance = Court.objects.create(name='KIGUMO MAGISTRATES\' COURT')
instance = Court.objects.create(name='KIKUYU MAGISTRATES\' COURT')
instance = Court.objects.create(name='KILGORIS MAGISTRATES\' COURT')
instance = Court.objects.create(name='KILIFI MAGISTRATES\' COURT')
instance = Court.objects.create(name='KILUNGU/NUNGUNI MAGISTRATES\' COURT')
instance = Court.objects.create(name='KIMILILI MAGISTRATES\' COURT')
instance = Court.objects.create(name='KISII LAW COURT')
instance = Court.objects.create(name='KISII MAGISTRATES\' COURT')
instance = Court.objects.create(name='KISUMU LAW COURT')
instance = Court.objects.create(name='KISUMU MAGISTRATES\' COURT')
instance = Court.objects.create(name='KITALE LAW COURT')
instance = Court.objects.create(name='KITALE MAGISTRATES\' COURT')
instance = Court.objects.create(name='KITHIMANI/YATTA MAGISTRATES\' COURT')
instance = Court.objects.create(name='KITUI LAW COURT')
instance = Court.objects.create(name='KITUI MAGISTRATES\' COURT')
instance = Court.objects.create(name='KWALE MAGISTRATES\' COURT')
instance = Court.objects.create(name='KYUSO MAGISTRATES\' COURT')
instance = Court.objects.create(name='LAMU MAGISTRATES\' COURT')
instance = Court.objects.create(name='LIMURU MAGISTRATES\' COURT')
instance = Court.objects.create(name='LODWAR LAW COURT')
instance = Court.objects.create(name='LODWAR MAGISTRATES\' COURT')
instance = Court.objects.create(name='MACHAKOS LAW COURT')
instance = Court.objects.create(name='MACHAKOS MAGISTRATES\' COURT')
instance = Court.objects.create(name='MAKADARA MAGISTRATES\' COURT')
instance = Court.objects.create(name='MAKINDU MAGISTRATES\' COURT')
instance = Court.objects.create(name='MAKUENI LAW COURT')
instance = Court.objects.create(name='MAKUENI MAGISTRATES\' COURT')
instance = Court.objects.create(name='MALINDI LAW COURT')
instance = Court.objects.create(name='MALINDI MAGISTRATES\' COURT')
instance = Court.objects.create(name='MANDERA MAGISTRATES\' COURT')
instance = Court.objects.create(name='MARALAL MAGISTRATES\' COURT')
instance = Court.objects.create(name='MARIAKANI MAGISTRATES\' COURT')
instance = Court.objects.create(name='MARIMANTI MAGISTRATES\' COURT')
instance = Court.objects.create(name='MARSABIT LAW COURT')
instance = Court.objects.create(name='MARSABIT MAGISTRATES\' COURT')
instance = Court.objects.create(name='MASENO MAGISTRATES\' COURT')
instance = Court.objects.create(name='MAUA MAGISTRATES\' COURT')
instance = Court.objects.create(name='MAVOKO MAGISTRATES\' COURT')
instance = Court.objects.create(name='MERU LAW COURT')
instance = Court.objects.create(name='MERU MAGISTRATES\' COURT')
instance = Court.objects.create(name='MIGORI LAW COURT')
instance = Court.objects.create(name='MIGORI MAGISTRATES\' COURT')
instance = Court.objects.create(name='MILIMANI COMMERCIAL COURT MAGISTRATES\' COURT')
instance = Court.objects.create(name='MILIMANI LAW COURT')
instance = Court.objects.create(name='MILIMANI MAGISTRATES\' COURT')
instance = Court.objects.create(name='MOLO MAGISTRATES\' COURT')
instance = Court.objects.create(name='MOMBASA LAW COURT')
instance = Court.objects.create(name='MOMBASA MAGISTRATES\' COURT')
instance = Court.objects.create(name='MOYALE MAGISTRATES\' COURT')
instance = Court.objects.create(name='MUKURWEINI MAGISTRATES\' COURT')
instance = Court.objects.create(name='MUMIAS MAGISTRATES\' COURT')
instance = Court.objects.create(name='MURANG’A LAW COURT')
instance = Court.objects.create(name='MURANG’A MAGISTRATES\' COURT')
instance = Court.objects.create(name='MUTOMO MAGISTRATES\' COURT')
instance = Court.objects.create(name='MWINGI MAGISTRATES\' COURT')
instance = Court.objects.create(name='NAIVASHA LAW COURT')
instance = Court.objects.create(name='NAIVASHA MAGISTRATES\' COURT')
instance = Court.objects.create(name='NAKURU LAW COURT')
instance = Court.objects.create(name='NAKURU MAGISTRATES\' COURT')
instance = Court.objects.create(name='NANYUKI LAW COURT')
instance = Court.objects.create(name='NANYUKI MAGISTRATES\' COURT')
instance = Court.objects.create(name='NAROK LAW COURT')
instance = Court.objects.create(name='NAROK MAGISTRATES\' COURT')
instance = Court.objects.create(name='NDHIWA MAGISTRATES\' COURT')
instance = Court.objects.create(name='NKUBU MAGISTRATES\' COURT')
instance = Court.objects.create(name='NYAHURURU LAW COURT')
instance = Court.objects.create(name='NYAHURURU MAGISTRATES\' COURT')
instance = Court.objects.create(name='NYAMIRA LAW COURT')
instance = Court.objects.create(name='NYAMIRA MAGISTRATES\' COURT')
instance = Court.objects.create(name='NYANDO MAGISTRATES\' COURT')
instance = Court.objects.create(name='NYERI LAW COURT')
instance = Court.objects.create(name='NYERI MAGISTRATES\' COURT')
instance = Court.objects.create(name='OGEMBO MAGISTRATES\' COURT')
instance = Court.objects.create(name='OTHAYA MAGISTRATES\' COURT')
instance = Court.objects.create(name='OYUGIS MAGISTRATES\' COURT')
instance = Court.objects.create(name='RONGO MAGISTRATES\' COURT')
instance = Court.objects.create(name='RUNYENJES MAGISTRATES\' COURT')
instance = Court.objects.create(name='SHANZU MAGISTRATES\' COURT')
instance = Court.objects.create(name='SIAKAGO MAGISTRATES\' COURT')
instance = Court.objects.create(name='SIAYA LAW COURT')
instance = Court.objects.create(name='SIAYA MAGISTRATES\' COURT')
instance = Court.objects.create(name='SIRISIA MAGISTRATES\' COURT')
instance = Court.objects.create(name='SOTIK MAGISTRATES\' COURT')
instance = Court.objects.create(name='TAMU MAGISTRATES\' COURT')
instance = Court.objects.create(name='TAVETA MAGISTRATES\' COURT')
instance = Court.objects.create(name='TAWA MAGISTRATES\' COURT')
instance = Court.objects.create(name='THIKA MAGISTRATES\' COURT')
instance = Court.objects.create(name='TIGANIA MAGISTRATES\' COURT')
instance = Court.objects.create(name='UKWALA MAGISTRATES\' COURT')
instance = Court.objects.create(name='VIHIGA MAGISTRATES\' COURT')
instance = Court.objects.create(name='VOI LAW COURT')
instance = Court.objects.create(name='VOI MAGISTRATES\' COURT')
instance = Court.objects.create(name='WAJIR MAGISTRATES\' COURT')
instance = Court.objects.create(name='WANGURU MAGISTRATES\' COURT')
instance = Court.objects.create(name='WINAM MAGISTRATES\' COURT')
instance = Court.objects.create(name='WUNDANYI MAGISTRATES\' COURT')
#prisons
instance = Prison.objects.create(name='ATHI RIVER PRISON')
instance = Prison.objects.create(name='BOMET PRISON')
instance = Prison.objects.create(name='BUNGOMA')
instance = Prison.objects.create(name='BUSIA MAIN')
instance = Prison.objects.create(name='CHUKA')
instance = Prison.objects.create(name='ELDAMA RAVINE')
instance = Prison.objects.create(name='ELDORET MAIN PRISON')
instance = Prison.objects.create(name='ELDORET WOMEN PRISON')
instance = Prison.objects.create(name='EMBU MAIN')
instance = Prison.objects.create(name='EMBU WOMEN')
instance = Prison.objects.create(name='GARISSA MAIN')
instance = Prison.objects.create(name='GARISSA MEDIUM')
instance = Prison.objects.create(name='HINDI')
instance = Prison.objects.create(name='HOLA')
instance = Prison.objects.create(name='HOMABAY')
instance = Prison.objects.create(name='ISIOLO')
instance = Prison.objects.create(name='JAMUHURI PRISON')
instance = Prison.objects.create(name='KABARNET')
instance = Prison.objects.create(name='KAJIADO MAIN PRISON')
instance = Prison.objects.create(name='KAKAMEGA MAIN')
instance = Prison.objects.create(name='KAKAMEGA WOMEN')
instance = Prison.objects.create(name='KALOLENI')
instance = Prison.objects.create(name='KAMAE GIRLS PRISON')
instance = Prison.objects.create(name='KAMITI MAXIMUM SECURITY PRISON')
instance = Prison.objects.create(name='KAMITI MEDIUM PRISON')
instance = Prison.objects.create(name='KAMITI YCTC')
instance = Prison.objects.create(name='KANGETA')
instance = Prison.objects.create(name='KAPENGURIA PRISON')
instance = Prison.objects.create(name='KAPSABET')
instance = Prison.objects.create(name='KEHANCHA')
instance = Prison.objects.create(name='KERICHO MAIN')
instance = Prison.objects.create(name='KERICHO MEDIUM')
instance = Prison.objects.create(name='KERICHO WOMEN')
instance = Prison.objects.create(name='KERUGOYA PRISON')
instance = Prison.objects.create(name='KIAMBU PRISON')
instance = Prison.objects.create(name='KIBOS MAIN')
instance = Prison.objects.create(name='KIBOS MEDIUM')
instance = Prison.objects.create(name='KILGORIS')
instance = Prison.objects.create(name='KILIFI')
instance = Prison.objects.create(name='KING\'ORANI')
instance = Prison.objects.create(name='KISII MAIN')
instance = Prison.objects.create(name='KISII WOMEN')
instance = Prison.objects.create(name='KISUMU MAIN')
instance = Prison.objects.create(name='KISUMU MEDIUM')
instance = Prison.objects.create(name='KISUMU WOMEN')
instance = Prison.objects.create(name='KITALE ANNEXE')
instance = Prison.objects.create(name='KITALE MAIN')
instance = Prison.objects.create(name='KITALE MEDIUM')
instance = Prison.objects.create(name='KITALE WOMEN')
instance = Prison.objects.create(name='KITUI MAIN')
instance = Prison.objects.create(name='KITUI WOMEN')
instance = Prison.objects.create(name='KWALE MAIN')
instance = Prison.objects.create(name='KWALE WOMEN')
instance = Prison.objects.create(name='LANGATA WOMEN MAXIMUM PRISON')
instance = Prison.objects.create(name='LODWAR')
instance = Prison.objects.create(name='LOITOKTOK PRISON')
instance = Prison.objects.create(name='MACHAKOS MAIN')
instance = Prison.objects.create(name='MACHAKOS WOMEN')
instance = Prison.objects.create(name='MAKUENI REMAND')
instance = Prison.objects.create(name='MALINDI MAIN')
instance = Prison.objects.create(name='MALINDI WOMEN')
instance = Prison.objects.create(name='MANDERA')
instance = Prison.objects.create(name='MANYANI')
instance = Prison.objects.create(name='MARA')
instance = Prison.objects.create(name='MARALAL')
instance = Prison.objects.create(name='MARANJAU PRISON')
instance = Prison.objects.create(name='MARIMATI')
instance = Prison.objects.create(name='MARSABIT')
instance = Prison.objects.create(name='MAUKENI MAIN')
instance = Prison.objects.create(name='MERU MAIN')
instance = Prison.objects.create(name='MERU WOMEN')
instance = Prison.objects.create(name='MIGORI MAIN')
instance = Prison.objects.create(name='MIGORI WOMEN')
instance = Prison.objects.create(name='MOYALE')
instance = Prison.objects.create(name='MURANGA MAIN PRSION')
instance = Prison.objects.create(name='MURANGA WOMEN PRISON')
instance = Prison.objects.create(name='MUTOMO')
instance = Prison.objects.create(name='MWEA MAIN PRISON')
instance = Prison.objects.create(name='MWINGI')
instance = Prison.objects.create(name='NAIROBI MEDIUM PRISON')
instance = Prison.objects.create(name='NAIROBI REMAND AND ALLOCATION MAXIMUM PRISON')
instance = Prison.objects.create(name='NAIROBI WEST PRISON')
instance = Prison.objects.create(name='NAIVASHA MAXIMUM PRISON')
instance = Prison.objects.create(name='NAIVASHA MEDIUM PRISON')
instance = Prison.objects.create(name='NAIVASHA WOMEN PRISON')
instance = Prison.objects.create(name='NAKURU MAIN PRISON')
instance = Prison.objects.create(name='NAKURU WOMEN PRISON')
instance = Prison.objects.create(name='NANYUKI')
instance = Prison.objects.create(name='NAROK')
instance = Prison.objects.create(name='NGERIA FARM')
instance = Prison.objects.create(name='NYAMIRA')
instance = Prison.objects.create(name='NYANDARUA MAIN PRISON')
instance = Prison.objects.create(name='NYERI MAIN MAXIMUM PRISON')
instance = Prison.objects.create(name='NYERI MEDIUM PRISON')
instance = Prison.objects.create(name='NYERI WOMEN PRISON')
instance = Prison.objects.create(name='RACHUONYO')
instance = Prison.objects.create(name='RC EASTERN')
instance = Prison.objects.create(name='RUIRU PRISON')
instance = Prison.objects.create(name='RUMURUTI')
instance = Prison.objects.create(name='SHIKUSA B.I')
instance = Prison.objects.create(name='SHIKUSA FARM')
instance = Prison.objects.create(name='SHIMO B.I')
instance = Prison.objects.create(name='SHIMO MAIN')
instance = Prison.objects.create(name='SHIMO MEDIUM')
instance = Prison.objects.create(name='SHIMO WOMEN')
instance = Prison.objects.create(name='SIAYA')
instance = Prison.objects.create(name='SOTIK')
instance = Prison.objects.create(name='T/FALL WOMEN PRISON')
instance = Prison.objects.create(name='T/FALLS MAIN PRISON')
instance = Prison.objects.create(name='TAMBACH')
instance = Prison.objects.create(name='TAVETA')
instance = Prison.objects.create(name='THIKA MAIN PRISON')
instance = Prison.objects.create(name='THIKA WOMEN PRISON')
instance = Prison.objects.create(name='URUKU')
instance = Prison.objects.create(name='VIHIGA')
instance = Prison.objects.create(name='VOI')
instance = Prison.objects.create(name='WAJIR')
instance = Prison.objects.create(name='WUNDANYI')
instance = Prison.objects.create(name='YATTA')
#add few offences
instance = Offence.objects.create(name='Assault')
instance = Offence.objects.create(name='Handling of stolen goods')
instance = Offence.objects.create(name='Grevious harm')
instance = Offence.objects.create(name='Attempted defilement')
instance = Offence.objects.create(name='Robbery with violence contrary to section 296(2) of the Penal Code')
instance = Offence.objects.create(name='Murder')
instance = Offence.objects.create(name='Robbery')
instance = Offence.objects.create(name='Manslaughter')
instance = Offence.objects.create(name='Defilement')
instance = Offence.objects.create(name='Rape')
instance = Offence.objects.create(name='Attempted Rape')
instance = Offence.objects.create(name='Attempted Robbery With Violence')
class Migration(migrations.Migration):
dependen | cies = [
('petitions', '0001_initial'),
]
operations = [
migrations.RunPython(add_initial_data),
]
|
|
uri.ts | import { fetch, resolveURI } from '../utils';
export default class URI {
async getMetadata(uri: string) {
const { uri: resolvedURI, isOnChain } = resolveURI(uri);
if (isOnChain) {
return resolvedURI;
}
// check if resolvedURI is an image, if it is return the url
const isImage = await isImageURI(resolvedURI);
if (isImage) {
return { image: resolvedURI };
}
// if resolvedURI is not an image, try retrieve the data. | const response = await fetch(resolvedURI);
return await response?.data;
}
}
function isImageURI(url: string) {
return new Promise(resolve => {
fetch({ url, method: 'HEAD' })
.then(result => {
if (result.status === 200) {
// retrieve content type header to check if content is image
const contentType = result.headers['content-type'];
resolve(contentType?.startsWith('image/'));
} else {
resolve(false);
}
})
.catch(error => {
// if error is not cors related then fail
if (typeof error.response !== 'undefined') {
// in case of cors, use image api to validate if given url is an actual image
resolve(false);
return;
}
if (!globalThis.hasOwnProperty('Image')) {
// fail in NodeJS, since the error is not cors but any other network issue
resolve(false);
return;
}
const img = new Image();
img.onload = () => {
resolve(true);
};
img.onerror = () => {
resolve(false);
};
img.src = url;
});
});
} | |
segmentation_mask.py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import torch
import pycocotools.mask as mask_utils
from pycocotools import mask as maskUtils
import numpy as np
# from maskrcnn_benchmark.utils.miscellaneous
# transpose
FLIP_LEFT_RIGHT = 0
FLIP_TOP_BOTTOM = 1
class Mask(object):
"""
This class is unfinished and not meant for use yet
It is supposed to contain the mask for an object as
a 2d tensor
"""
def __init__(self, masks, size, mode):
self.masks = masks
self.size = size
self.mode = mode
def transpose(self, method):
if method not in (FLIP_LEFT_RIGHT, FLIP_TOP_BOTTOM):
raise NotImplementedError(
"Only FLIP_LEFT_RIGHT and FLIP_TOP_BOTTOM implemented"
)
width, height = self.size
if method == FLIP_LEFT_RIGHT:
dim = width
idx = 2
elif method == FLIP_TOP_BOTTOM:
dim = height
idx = 1
flip_idx = list(range(dim)[::-1])
flipped_masks = self.masks.index_select(dim, flip_idx)
return Mask(flipped_masks, self.size, self.mode)
def crop(self, box):
w, h = box[2] - box[0], box[3] - box[1]
cropped_masks = self.masks[:, box[1] : box[3], box[0] : box[2]]
return Mask(cropped_masks, size=(w, h), mode=self.mode)
def resize(self, size, *args, **kwargs):
pass
class Polygons(object):
"""
This class holds a set of polygons that represents a single instance
of an object mask. The object can be represented as a set of
polygons
"""
def __init__(self, polygons, size, mode):
# assert isinstance(polygons, list), '{}'.format(polygons)
if isinstance(polygons, list):
polygons = [torch.as_tensor(p, dtype=torch.float32) for p in polygons]
elif isinstance(polygons, Polygons):
polygons = polygons.polygons
self.polygons = polygons
self.size = size
self.mode = mode
def transpose(self, method):
if method not in (FLIP_LEFT_RIGHT, FLIP_TOP_BOTTOM):
raise NotImplementedError(
"Only FLIP_LEFT_RIGHT and FLIP_TOP_BOTTOM implemented"
)
flipped_polygons = []
width, height = self.size
if method == FLIP_LEFT_RIGHT:
dim = width
idx = 0
elif method == FLIP_TOP_BOTTOM:
dim = height
idx = 1
for poly in self.polygons:
p = poly.clone()
TO_REMOVE = 1
p[idx::2] = dim - poly[idx::2] - TO_REMOVE
flipped_polygons.append(p)
return Polygons(flipped_polygons, size=self.size, mode=self.mode)
def crop(self, box):
w, h = box[2] - box[0], box[3] - box[1]
# TODO chck if necessary
w = max(w, 1)
h = max(h, 1)
cropped_polygons = []
for poly in self.polygons:
p = poly.clone()
p[0::2] = p[0::2] - box[0] # .clamp(min=0, max=w)
p[1::2] = p[1::2] - box[1] # .clamp(min=0, max=h)
cropped_polygons.append(p)
return Polygons(cropped_polygons, size=(w, h), mode=self.mode)
def resize(self, size, *args, **kwargs):
ratios = tuple(float(s) / float(s_orig) for s, s_orig in zip(size, self.size))
if ratios[0] == ratios[1]:
ratio = ratios[0]
scaled_polys = [p * ratio for p in self.polygons]
return Polygons(scaled_polys, size, mode=self.mode)
ratio_w, ratio_h = ratios
scaled_polygons = []
for poly in self.polygons:
p = poly.clone()
p[0::2] *= ratio_w
p[1::2] *= ratio_h
scaled_polygons.append(p)
return Polygons(scaled_polygons, size=size, mode=self.mode)
def convert(self, mode):
width, height = self.size
if mode == "mask":
rles = mask_utils.frPyObjects(
[p.numpy() for p in self.polygons], height, width
)
rle = mask_utils.merge(rles)
mask = mask_utils.decode(rle)
mask = torch.from_numpy(mask)
# TODO add squeeze?
return mask
def __repr__(self):
s = self.__class__.__name__ + "("
s += "num_polygons={}, ".format(len(self.polygons))
s += "image_width={}, ".format(self.size[0])
s += "image_height={}, ".format(self.size[1])
s += "mode={})".format(self.mode)
return s
class SegmentationMask(object):
"""
This class stores the segmentations for all objects in the image
"""
def __init__(self, polygons, size, mode=None):
"""
Arguments:
polygons: a list of list of lists of numbers. The first
level of the list correspond to individual instances,
the second level to all the polygons that compose the
object, and the third level to the polygon coordinates.
"""
# print(polygons)
assert isinstance(polygons, list)
if not isinstance(polygons[0], np.ndarray):
self.polygons = [Polygons(p, size, mode) for p in polygons]
else:
self.polygons = []
self.mask = polygons
self.size = size
self.mode = mode
def decode(self, h, w):
# covnert mask object to binary mask numpy array
# RLES = []
binary_mask = np.zeros((h,w))
for segm in self.polygons:
mask = segm.convert('mask')
binary_mask = binary_mask + mask.numpy()
return binary_mask
def transpose(self, method):
|
def crop(self, box):
if isinstance(self.polygons[0], Polygons):
w, h = box[2] - box[0], box[3] - box[1]
cropped = []
for polygon in self.polygons:
cropped.append(polygon.crop(box))
return SegmentationMask(cropped, size=(w, h), mode=self.mode)
else:
cropped = []
w, h = box[2] - box[0], box[3] - box[1]
for mask in self.mask:
mask = mask[box[1]:box[3], box[0]:box[2]]
cropped.append(mask)
return SegmentationMask(cropped, size = (w,h), mode =
self.mode)
def resize(self, size, *args, **kwargs):
scaled = []
for polygon in self.polygons:
scaled.append(polygon.resize(size, *args, **kwargs))
return SegmentationMask(scaled, size=size, mode=self.mode)
def to(self, *args, **kwargs):
return self
def __getitem__(self, item):
if isinstance(item, (int, slice)):
selected_polygons = [self.polygons[item]]
else:
# advanced indexing on a single dimension
selected_polygons = []
if isinstance(item, torch.Tensor) and item.dtype == torch.uint8:
item = item.nonzero()
item = item.squeeze(1) if item.numel() > 0 else item
item = item.tolist()
for i in item:
# print(self.polygons[i])
selected_polygons.append(self.polygons[i])
return SegmentationMask(selected_polygons, size=self.size, mode=self.mode)
def __iter__(self):
return iter(self.polygons)
def __repr__(self):
s = self.__class__.__name__ + "("
s += "num_instances={}, ".format(len(self.polygons))
s += "image_width={}, ".format(self.size[0])
s += "image_height={})".format(self.size[1])
return s
| if method not in (FLIP_LEFT_RIGHT, FLIP_TOP_BOTTOM):
raise NotImplementedError(
"Only FLIP_LEFT_RIGHT and FLIP_TOP_BOTTOM implemented"
)
flipped = []
for polygon in self.polygons:
flipped.append(polygon.transpose(method))
return SegmentationMask(flipped, size=self.size, mode=self.mode) |
value_metric.py | from .base_metric import BaseMetric
class ValueMetric(BaseMetric):
""" Base class for metrics that don't have state and just calculate a simple value """
def __init__(self, name):
super().__init__(name)
self._metric_value = None
def calculate(self, data_dict):
""" Calculate value of a metric based on supplied data """
self._metric_value = self._value_function(data_dict)
def reset(self):
""" Reset value of a metric """
pass
def | (self):
""" Return current value for the metric """
return self._metric_value
def _value_function(self, data_dict):
raise NotImplementedError
| value |
spawner.rs | use super::{
map::MAP_WIDTH, BlocksTile, CombatStats, Item, Monster, Name, Player, Position, Potion, Rect,
Renderable, Viewshed,
};
use rltk::{RandomNumberGenerator, RGB};
use specs::prelude::*;
pub fn player(ecs: &mut World, x: i32, y: i32) -> Entity {
ecs.create_entity()
.with(Position { x, y })
.with(Renderable {
glyph: rltk::to_cp437('@'),
fg: RGB::named(rltk::YELLOW),
bg: RGB::named(rltk::BLACK),
render_order: 0,
})
.with(Player {})
.with(Viewshed {
visible_tiles: Vec::new(),
range: 8,
dirty: true,
})
.with(Name {
name: "Player".to_string(),
})
.with(CombatStats {
max_hp: 30,
hp: 30,
defense: 2,
power: 5,
})
.build()
}
const MAX_MONSTERS: i32 = 4;
const MAX_ITEMS: i32 = 2;
pub fn spawn_room(ecs: &mut World, room: &Rect) {
let mut monster_spawn_points: Vec<usize> = Vec::new();
let mut item_spawn_points: Vec<usize> = Vec::new();
{
let mut rng = ecs.write_resource::<RandomNumberGenerator>();
let num_monsters = rng.roll_dice(1, MAX_MONSTERS + 1) - 1;
let num_items = rng.roll_dice(1, MAX_ITEMS + 1) - 1;
for _ in 0..num_monsters {
let mut added = false;
while !added {
let x = (room.x1 + rng.roll_dice(1, i32::abs(room.x2 - room.x1))) as usize;
let y = (room.y1 + rng.roll_dice(1, i32::abs(room.y2 - room.y1))) as usize;
let idx = (y * MAP_WIDTH) + x;
if !monster_spawn_points.contains(&idx) {
monster_spawn_points.push(idx);
added = true;
}
}
}
for _ in 0..num_items {
let mut added = false;
while !added {
let x = (room.x1 + rng.roll_dice(1, i32::abs(room.x2 - room.x1))) as usize;
let y = (room.y1 + rng.roll_dice(1, i32::abs(room.y2 - room.y1))) as usize;
let idx = (y * MAP_WIDTH) + x;
if !item_spawn_points.contains(&idx) && !monster_spawn_points.contains(&idx) {
item_spawn_points.push(idx);
added = true;
}
}
}
}
for &idx in monster_spawn_points.iter() {
let x = (idx % MAP_WIDTH) as i32;
let y = (idx / MAP_WIDTH) as i32;
random_monster(ecs, x, y);
}
for &idx in item_spawn_points.iter() {
let x = (idx % MAP_WIDTH) as i32;
let y = (idx / MAP_WIDTH) as i32;
health_potion(ecs, x, y);
}
}
pub fn random_monster(ecs: &mut World, x: i32, y: i32) {
let roll: i32;
{
let mut rng = ecs.write_resource::<RandomNumberGenerator>();
roll = rng.roll_dice(1, 2);
}
match roll {
1 => orc(ecs, x, y),
_ => goblin(ecs, x, y),
}
}
pub fn orc(ecs: &mut World, x: i32, y: i32) {
monster(ecs, x, y, rltk::to_cp437('o'), "Orc".to_string());
}
pub fn goblin(ecs: &mut World, x: i32, y: i32) {
monster(ecs, x, y, rltk::to_cp437('g'), "Goblin".to_string());
}
pub fn monster<S: ToString>(ecs: &mut World, x: i32, y: i32, glyph: rltk::FontCharType, name: S) {
ecs.create_entity()
.with(Position { x, y })
.with(Renderable {
glyph,
fg: RGB::named(rltk::RED),
bg: RGB::named(rltk::BLACK),
render_order: 1,
})
.with(Monster {})
.with(Viewshed {
visible_tiles: Vec::new(),
range: 8,
dirty: true,
}) | name: name.to_string(),
})
.with(BlocksTile {})
.with(CombatStats {
max_hp: 16,
hp: 16,
defense: 1,
power: 4,
})
.build();
}
fn health_potion(ecs: &mut World, x: i32, y: i32) {
ecs.create_entity()
.with(Position { x, y })
.with(Renderable {
glyph: rltk::to_cp437('¡'),
fg: RGB::named(rltk::MAGENTA),
bg: RGB::named(rltk::BLACK),
render_order: 2,
})
.with(Name {
name: "Health Potion".to_string(),
})
.with(Item {})
.with(Potion { heal_amount: 8 })
.build();
} | .with(Name { |
sample_1554095940395.js | function main() {
const v4 = [13.37,13.37,13.37,13.37];
const v6 = [1337];
const v7 = [v6];
const v8 = {max:Function,setPrototypeOf:10};
const v9 = {exec:v6};
let v10 = "undefined";
const v15 = Function && 13.37;
let v18 = 0;
while (v18 < 3) {
v18 = 10;
const v19 = v18 + 1;
v18 = v19;
}
const v20 = [13.37,13.37,13.37,13.37];
const v22 = [1337];
const v23 = [v22];
const v24 = {max:Function,setPrototypeOf:10};
const v25 = {exec:v22};
let v26 = "undefined";
v24[5] = 0;
let v29 = 13.37; | const v42 = [1337];
function v43(v44,v45,v46,v47) {
function v48(v49,v50,v51,v52) {
const v53 = [v50,v40];
return v49;
}
return Function;
}
const v54 = [v42];
const v55 = {max:Function,setPrototypeOf:10};
const v56 = {exec:v42};
let v57 = "undefined";
const v62 = [13.37,13.37,13.37,13.37];
const v64 = [1337];
const v65 = [v64];
const v66 = {max:Function,setPrototypeOf:10};
const v67 = {exec:v64};
let v68 = "undefined";
let v75 = 10;
const v76 = [13.37,13.37,13.37,13.37];
const v78 = [1337];
const v79 = [v78];
const v80 = {max:Function,setPrototypeOf:10};
const v81 = {exec:v78};
let v82 = "undefined";
const v84 = Object();
const v86 = Object();
const v90 = [13.37,13.37,13.37,13.37];
const v92 = [13.37];
const v93 = [v92];
const v94 = {max:Function,setPrototypeOf:1337};
let v95 = "undefined";
const v97 = Object();
const v98 = {max:Function,setPrototypeOf:1337};
let v99 = "undefined";
const v101 = {pow:v34,catch:"undefined",b:v35,flat:v20};
const v102 = Object();
}
%NeverOptimizeFunction(main);
main(); | const v32 = [13.37,13.37,13.37,13.37];
const v34 = [13.37];
const v35 = [v34];
const v40 = [13.37,13.37,13.37,13.37]; |
modulefinder.py | """Find modules used by a script, using introspection."""
from __future__ import generators
import dis
import imp
import marshal
import os
import sys
import types
import struct
if hasattr(sys.__stdout__, "newlines"):
READ_MODE = "U" # universal line endings
else:
# Python < 2.3 compatibility, no longer strictly required
READ_MODE = "r"
LOAD_CONST = dis.opmap['LOAD_CONST']
IMPORT_NAME = dis.opmap['IMPORT_NAME']
STORE_NAME = dis.opmap['STORE_NAME']
STORE_GLOBAL = dis.opmap['STORE_GLOBAL']
STORE_OPS = STORE_NAME, STORE_GLOBAL
HAVE_ARGUMENT = dis.HAVE_ARGUMENT
EXTENDED_ARG = dis.EXTENDED_ARG
def _unpack_opargs(code):
# enumerate() is not an option, since we sometimes process
# multiple elements on a single pass through the loop
extended_arg = 0
n = len(code)
i = 0
while i < n:
op = ord(code[i])
offset = i
i = i+1
arg = None
if op >= HAVE_ARGUMENT:
arg = ord(code[i]) + ord(code[i+1])*256 + extended_arg
extended_arg = 0
i = i+2
if op == EXTENDED_ARG:
extended_arg = arg*65536
yield (offset, op, arg)
# Modulefinder does a good job at simulating Python's, but it can not
# handle __path__ modifications packages make at runtime. Therefore there
# is a mechanism whereby you can register extra paths in this map for a
# package, and it will be honored.
# Note this is a mapping is lists of paths.
packagePathMap = {}
# A Public interface
def AddPackagePath(packagename, path):
paths = packagePathMap.get(packagename, [])
paths.append(path)
packagePathMap[packagename] = paths
replacePackageMap = {}
# This ReplacePackage mechanism allows modulefinder to work around the
# way the _xmlplus package injects itself under the name "xml" into
# sys.modules at runtime by calling ReplacePackage("_xmlplus", "xml")
# before running ModuleFinder.
def ReplacePackage(oldname, newname):
replacePackageMap[oldname] = newname
class Module:
def __init__(self, name, file=None, path=None):
self.__name__ = name
self.__file__ = file
self.__path__ = path
self.__code__ = None
# The set of global names that are assigned to in the module.
# This includes those names imported through starimports of
# Python modules.
self.globalnames = {}
# The set of starimports this module did that could not be
# resolved, ie. a starimport from a non-Python module.
self.starimports = {}
def __repr__(self):
s = "Module(%r" % (self.__name__,)
if self.__file__ is not None:
s = s + ", %r" % (self.__file__,)
if self.__path__ is not None:
s = s + ", %r" % (self.__path__,)
s = s + ")"
return s
class ModuleFinder:
def __init__(self, path=None, debug=0, excludes=[], replace_paths=[]):
if path is None:
path = sys.path
self.path = path
self.modules = {}
self.badmodules = {}
self.debug = debug
self.indent = 0
self.excludes = excludes
self.replace_paths = replace_paths
self.processed_paths = [] # Used in debugging only
def msg(self, level, str, *args):
if level <= self.debug:
for i in range(self.indent):
print " ",
print str,
for arg in args:
print repr(arg),
print
def msgin(self, *args):
level = args[0]
if level <= self.debug:
self.indent = self.indent + 1
self.msg(*args)
def msgout(self, *args):
level = args[0]
if level <= self.debug:
self.indent = self.indent - 1
self.msg(*args)
def run_script(self, pathname):
self.msg(2, "run_script", pathname)
with open(pathname, READ_MODE) as fp:
stuff = ("", "r", imp.PY_SOURCE)
self.load_module('__main__', fp, pathname, stuff)
def load_file(self, pathname):
dir, name = os.path.split(pathname)
name, ext = os.path.splitext(name)
with open(pathname, READ_MODE) as fp:
stuff = (ext, "r", imp.PY_SOURCE)
self.load_module(name, fp, pathname, stuff)
def import_hook(self, name, caller=None, fromlist=None, level=-1):
self.msg(3, "import_hook", name, caller, fromlist, level)
parent = self.determine_parent(caller, level=level)
q, tail = self.find_head_package(parent, name)
m = self.load_tail(q, tail)
if not fromlist:
return q
if m.__path__:
self.ensure_fromlist(m, fromlist)
return None
def determine_parent(self, caller, level=-1):
self.msgin(4, "determine_parent", caller, level)
if not caller or level == 0:
self.msgout(4, "determine_parent -> None")
return None
pname = caller.__name__
if level >= 1: # relative import
if caller.__path__:
level -= 1
if level == 0:
parent = self.modules[pname]
assert parent is caller
self.msgout(4, "determine_parent ->", parent)
return parent
if pname.count(".") < level:
raise ImportError, "relative importpath too deep"
pname = ".".join(pname.split(".")[:-level])
parent = self.modules[pname]
self.msgout(4, "determine_parent ->", parent)
return parent
if caller.__path__:
parent = self.modules[pname]
assert caller is parent
self.msgout(4, "determine_parent ->", parent)
return parent
if '.' in pname:
i = pname.rfind('.')
pname = pname[:i]
parent = self.modules[pname]
assert parent.__name__ == pname
self.msgout(4, "determine_parent ->", parent)
return parent
self.msgout(4, "determine_parent -> None")
return None
def find_head_package(self, parent, name):
self.msgin(4, "find_head_package", parent, name)
if '.' in name:
i = name.find('.')
head = name[:i]
tail = name[i+1:]
else:
head = name
tail = ""
if parent:
qname = "%s.%s" % (parent.__name__, head)
else:
qname = head
q = self.import_module(head, qname, parent)
if q:
self.msgout(4, "find_head_package ->", (q, tail))
return q, tail
if parent:
qname = head
parent = None
q = self.import_module(head, qname, parent)
if q:
self.msgout(4, "find_head_package ->", (q, tail))
return q, tail
self.msgout(4, "raise ImportError: No module named", qname)
raise ImportError, "No module named " + qname
def load_tail(self, q, tail):
self.msgin(4, "load_tail", q, tail)
m = q
while tail:
i = tail.find('.')
if i < 0: i = len(tail)
head, tail = tail[:i], tail[i+1:]
mname = "%s.%s" % (m.__name__, head)
m = self.import_module(head, mname, m)
if not m:
self.msgout(4, "raise ImportError: No module named", mname)
raise ImportError, "No module named " + mname
self.msgout(4, "load_tail ->", m)
return m
def ensure_fromlist(self, m, fromlist, recursive=0):
self.msg(4, "ensure_fromlist", m, fromlist, recursive)
for sub in fromlist:
if sub == "*":
if not recursive:
all = self.find_all_submodules(m)
if all:
self.ensure_fromlist(m, all, 1)
elif not hasattr(m, sub):
subname = "%s.%s" % (m.__name__, sub)
submod = self.import_module(sub, subname, m)
if not submod:
raise ImportError, "No module named " + subname
def find_all_submodules(self, m):
if not m.__path__:
return
modules = {}
# 'suffixes' used to be a list hardcoded to [".py", ".pyc", ".pyo"].
# But we must also collect Python extension modules - although
# we cannot separate normal dlls from Python extensions.
suffixes = []
for triple in imp.get_suffixes():
suffixes.append(triple[0])
for dir in m.__path__:
try:
names = os.listdir(dir)
except os.error:
self.msg(2, "can't list directory", dir)
continue
for name in names:
mod = None
for suff in suffixes:
n = len(suff)
if name[-n:] == suff:
mod = name[:-n]
break
if mod and mod != "__init__":
modules[mod] = mod
return modules.keys()
def import_module(self, partname, fqname, parent):
self.msgin(3, "import_module", partname, fqname, parent)
try:
m = self.modules[fqname]
except KeyError:
pass
else:
self.msgout(3, "import_module ->", m)
return m
if fqname in self.badmodules:
self.msgout(3, "import_module -> None")
return None
if parent and parent.__path__ is None:
self.msgout(3, "import_module -> None")
return None
try:
fp, pathname, stuff = self.find_module(partname,
parent and parent.__path__, parent)
except ImportError:
self.msgout(3, "import_module ->", None)
return None
try:
m = self.load_module(fqname, fp, pathname, stuff)
finally:
if fp: fp.close()
if parent:
setattr(parent, partname, m)
self.msgout(3, "import_module ->", m)
return m
def load_module(self, fqname, fp, pathname, file_info):
suffix, mode, type = file_info
self.msgin(2, "load_module", fqname, fp and "fp", pathname)
if type == imp.PKG_DIRECTORY:
m = self.load_package(fqname, pathname)
self.msgout(2, "load_module ->", m)
return m
if type == imp.PY_SOURCE:
co = compile(fp.read()+'\n', pathname, 'exec')
elif type == imp.PY_COMPILED:
if fp.read(4) != imp.get_magic():
self.msgout(2, "raise ImportError: Bad magic number", pathname)
raise ImportError, "Bad magic number in %s" % pathname
fp.read(4)
co = marshal.load(fp)
else:
co = None
m = self.add_module(fqname)
m.__file__ = pathname
if co:
if self.replace_paths:
co = self.replace_paths_in_code(co)
m.__code__ = co
self.scan_code(co, m)
self.msgout(2, "load_module ->", m)
return m
def _add_badmodule(self, name, caller):
if name not in self.badmodules:
self.badmodules[name] = {}
if caller:
self.badmodules[name][caller.__name__] = 1
else:
self.badmodules[name]["-"] = 1
def _safe_import_hook(self, name, caller, fromlist, level=-1):
# wrapper for self.import_hook() that won't raise ImportError
if name in self.badmodules:
self._add_badmodule(name, caller)
return
try:
self.import_hook(name, caller, level=level)
except ImportError, msg:
self.msg(2, "ImportError:", str(msg))
self._add_badmodule(name, caller)
else:
if fromlist:
for sub in fromlist:
if sub in self.badmodules:
self._add_badmodule(sub, caller)
continue
try:
self.import_hook(name, caller, [sub], level=level)
except ImportError, msg:
self.msg(2, "ImportError:", str(msg))
fullname = name + "." + sub
self._add_badmodule(fullname, caller)
def scan_opcodes_cli(self, co):
import ast
with open(co.co_filename, 'rU') as f:
nodes = ast.parse(f.read(), co.co_filename)
items = []
class ModuleFinderVisitor(ast.NodeVisitor):
def visit_Assign(self, node):
for x in node.targets:
if isinstance(x, ast.Subscript):
if isinstance(x.value, ast.Name):
items.append(("store", (x.value.id, )))
elif isinstance(x.value, ast.Attribute):
items.append(("store", (x.value.attr, )))
else:
print 'Unknown in store: %s' % type(x.value).__name__
elif isinstance(x, ast.Name):
items.append(("store", (x.id, )))
def visit_Import(self, node):
items.extend([("import", (None, x.name)) for x in node.names])
def visit_ImportFrom(self, node):
if node.level == 1:
items.append(("relative_import", (node.level, [x.name for x in node.names], node.module)))
else:
items.extend([("import", ([x.name for x in node.names], node.module))])
v = ModuleFinderVisitor()
v.visit(nodes)
for what, args in items:
yield what, args
def scan_opcodes(self, co,
unpack = struct.unpack):
# Scan the code, and yield 'interesting' opcode combinations
# Version for Python 2.4 and older
code = co.co_code
names = co.co_names
consts = co.co_consts
opargs = [(op, arg) for _, op, arg in _unpack_opargs(code) | continue
if (op == IMPORT_NAME and i >= 1
and opargs[i-1][0] == LOAD_CONST):
fromlist = consts[opargs[i-1][1]]
yield "import", (fromlist, names[oparg])
continue
def scan_opcodes_25(self, co):
# Scan the code, and yield 'interesting' opcode combinations
code = co.co_code
names = co.co_names
consts = co.co_consts
opargs = [(op, arg) for _, op, arg in _unpack_opargs(code)
if op != EXTENDED_ARG]
for i, (op, oparg) in enumerate(opargs):
if op in STORE_OPS:
yield "store", (names[oparg],)
continue
if (op == IMPORT_NAME and i >= 2
and opargs[i-1][0] == opargs[i-2][0] == LOAD_CONST):
level = consts[opargs[i-2][1]]
fromlist = consts[opargs[i-1][1]]
if level == -1: # normal import
yield "import", (fromlist, names[oparg])
elif level == 0: # absolute import
yield "absolute_import", (fromlist, names[oparg])
else: # relative import
yield "relative_import", (level, fromlist, names[oparg])
continue
def scan_code(self, co, m):
code = co.co_code
if sys.platform == 'cli':
scanner = self.scan_opcodes_cli
elif sys.version_info >= (2, 5):
scanner = self.scan_opcodes_25
else:
scanner = self.scan_opcodes
for what, args in scanner(co):
if what == "store":
name, = args
m.globalnames[name] = 1
elif what in ("import", "absolute_import"):
fromlist, name = args
have_star = 0
if fromlist is not None:
if "*" in fromlist:
have_star = 1
fromlist = [f for f in fromlist if f != "*"]
if what == "absolute_import": level = 0
else: level = -1
self._safe_import_hook(name, m, fromlist, level=level)
if have_star:
# We've encountered an "import *". If it is a Python module,
# the code has already been parsed and we can suck out the
# global names.
mm = None
if m.__path__:
# At this point we don't know whether 'name' is a
# submodule of 'm' or a global module. Let's just try
# the full name first.
mm = self.modules.get(m.__name__ + "." + name)
if mm is None:
mm = self.modules.get(name)
if mm is not None:
m.globalnames.update(mm.globalnames)
m.starimports.update(mm.starimports)
if mm.__code__ is None:
m.starimports[name] = 1
else:
m.starimports[name] = 1
elif what == "relative_import":
level, fromlist, name = args
if name:
self._safe_import_hook(name, m, fromlist, level=level)
else:
parent = self.determine_parent(m, level=level)
self._safe_import_hook(parent.__name__, None, fromlist, level=0)
else:
# We don't expect anything else from the generator.
raise RuntimeError(what)
for c in co.co_consts:
if isinstance(c, type(co)):
self.scan_code(c, m)
def load_package(self, fqname, pathname):
self.msgin(2, "load_package", fqname, pathname)
newname = replacePackageMap.get(fqname)
if newname:
fqname = newname
m = self.add_module(fqname)
m.__file__ = pathname
m.__path__ = [pathname]
# As per comment at top of file, simulate runtime __path__ additions.
m.__path__ = m.__path__ + packagePathMap.get(fqname, [])
fp, buf, stuff = self.find_module("__init__", m.__path__)
self.load_module(fqname, fp, buf, stuff)
self.msgout(2, "load_package ->", m)
if fp:
fp.close()
return m
def add_module(self, fqname):
if fqname in self.modules:
return self.modules[fqname]
self.modules[fqname] = m = Module(fqname)
return m
def find_module(self, name, path, parent=None):
if parent is not None:
# assert path is not None
fullname = parent.__name__+'.'+name
else:
fullname = name
if fullname in self.excludes:
self.msgout(3, "find_module -> Excluded", fullname)
raise ImportError, name
if path is None:
if name in sys.builtin_module_names:
return (None, None, ("", "", imp.C_BUILTIN))
path = self.path
return imp.find_module(name, path)
def report(self):
"""Print a report to stdout, listing the found modules with their
paths, as well as modules that are missing, or seem to be missing.
"""
print
print " %-25s %s" % ("Name", "File")
print " %-25s %s" % ("----", "----")
# Print modules found
keys = self.modules.keys()
keys.sort()
for key in keys:
m = self.modules[key]
if m.__path__:
print "P",
else:
print "m",
print "%-25s" % key, m.__file__ or ""
# Print missing modules
missing, maybe = self.any_missing_maybe()
if missing:
print
print "Missing modules:"
for name in missing:
mods = self.badmodules[name].keys()
mods.sort()
print "?", name, "imported from", ', '.join(mods)
# Print modules that may be missing, but then again, maybe not...
if maybe:
print
print "Submodules that appear to be missing, but could also be",
print "global names in the parent package:"
for name in maybe:
mods = self.badmodules[name].keys()
mods.sort()
print "?", name, "imported from", ', '.join(mods)
def any_missing(self):
"""Return a list of modules that appear to be missing. Use
any_missing_maybe() if you want to know which modules are
certain to be missing, and which *may* be missing.
"""
missing, maybe = self.any_missing_maybe()
return missing + maybe
def any_missing_maybe(self):
"""Return two lists, one with modules that are certainly missing
and one with modules that *may* be missing. The latter names could
either be submodules *or* just global names in the package.
The reason it can't always be determined is that it's impossible to
tell which names are imported when "from module import *" is done
with an extension module, short of actually importing it.
"""
missing = []
maybe = []
for name in self.badmodules:
if name in self.excludes:
continue
i = name.rfind(".")
if i < 0:
missing.append(name)
continue
subname = name[i+1:]
pkgname = name[:i]
pkg = self.modules.get(pkgname)
if pkg is not None:
if pkgname in self.badmodules[name]:
# The package tried to import this module itself and
# failed. It's definitely missing.
missing.append(name)
elif subname in pkg.globalnames:
# It's a global in the package: definitely not missing.
pass
elif pkg.starimports:
# It could be missing, but the package did an "import *"
# from a non-Python module, so we simply can't be sure.
maybe.append(name)
else:
# It's not a global in the package, the package didn't
# do funny star imports, it's very likely to be missing.
# The symbol could be inserted into the package from the
# outside, but since that's not good style we simply list
# it missing.
missing.append(name)
else:
missing.append(name)
missing.sort()
maybe.sort()
return missing, maybe
def replace_paths_in_code(self, co):
new_filename = original_filename = os.path.normpath(co.co_filename)
for f, r in self.replace_paths:
if original_filename.startswith(f):
new_filename = r + original_filename[len(f):]
break
if self.debug and original_filename not in self.processed_paths:
if new_filename != original_filename:
self.msgout(2, "co_filename %r changed to %r" \
% (original_filename,new_filename,))
else:
self.msgout(2, "co_filename %r remains unchanged" \
% (original_filename,))
self.processed_paths.append(original_filename)
consts = list(co.co_consts)
for i in range(len(consts)):
if isinstance(consts[i], type(co)):
consts[i] = self.replace_paths_in_code(consts[i])
return types.CodeType(co.co_argcount, co.co_nlocals, co.co_stacksize,
co.co_flags, co.co_code, tuple(consts), co.co_names,
co.co_varnames, new_filename, co.co_name,
co.co_firstlineno, co.co_lnotab,
co.co_freevars, co.co_cellvars)
def test():
# Parse command line
import getopt
try:
opts, args = getopt.getopt(sys.argv[1:], "dmp:qx:")
except getopt.error, msg:
print msg
return
# Process options
debug = 1
domods = 0
addpath = []
exclude = []
for o, a in opts:
if o == '-d':
debug = debug + 1
if o == '-m':
domods = 1
if o == '-p':
addpath = addpath + a.split(os.pathsep)
if o == '-q':
debug = 0
if o == '-x':
exclude.append(a)
# Provide default arguments
if not args:
script = "hello.py"
else:
script = args[0]
# Set the path based on sys.path and the script directory
path = sys.path[:]
path[0] = os.path.dirname(script)
path = addpath + path
if debug > 1:
print "path:"
for item in path:
print " ", repr(item)
# Create the module finder and turn its crank
mf = ModuleFinder(path, debug, exclude)
for arg in args[1:]:
if arg == '-m':
domods = 1
continue
if domods:
if arg[-2:] == '.*':
mf.import_hook(arg[:-2], None, ["*"])
else:
mf.import_hook(arg)
else:
mf.load_file(arg)
mf.run_script(script)
mf.report()
return mf # for -i debugging
if __name__ == '__main__':
try:
mf = test()
except KeyboardInterrupt:
print "\n[interrupt]" | if op != EXTENDED_ARG]
for i, (op, oparg) in enumerate(opargs):
if c in STORE_OPS:
yield "store", (names[oparg],) |
ranges_test.go | package ranges
import (
"fmt"
"math/rand"
"testing"
"github.com/stretchr/testify/assert"
)
func TestRangeEnd(t *testing.T) {
assert.Equal(t, int64(3), Range{Pos: 1, Size: 2}.End())
}
func TestRangeIsEmpty(t *testing.T) |
func TestRangeClip(t *testing.T) {
r := Range{Pos: 1, Size: 2}
r.Clip(5)
assert.Equal(t, Range{Pos: 1, Size: 2}, r)
r = Range{Pos: 1, Size: 6}
r.Clip(5)
assert.Equal(t, Range{Pos: 1, Size: 4}, r)
r = Range{Pos: 5, Size: 6}
r.Clip(5)
assert.Equal(t, Range{Pos: 5, Size: 0}, r)
r = Range{Pos: 7, Size: 6}
r.Clip(5)
assert.Equal(t, Range{Pos: 0, Size: 0}, r)
}
func TestRangeIntersection(t *testing.T) {
for _, test := range []struct {
r Range
b Range
want Range
}{
{
r: Range{1, 1},
b: Range{3, 1},
want: Range{},
},
{
r: Range{1, 1},
b: Range{1, 1},
want: Range{1, 1},
},
{
r: Range{1, 9},
b: Range{3, 2},
want: Range{3, 2},
},
{
r: Range{1, 5},
b: Range{3, 5},
want: Range{3, 3},
},
} {
what := fmt.Sprintf("test r=%v, b=%v", test.r, test.b)
got := test.r.Intersection(test.b)
assert.Equal(t, test.want, got, what)
got = test.b.Intersection(test.r)
assert.Equal(t, test.want, got, what)
}
}
func TestRangeMerge(t *testing.T) {
for _, test := range []struct {
new Range
dst Range
want Range
wantMerged bool
}{
{
new: Range{Pos: 1, Size: 1}, // .N.......
dst: Range{Pos: 3, Size: 3}, // ...DDD...
want: Range{Pos: 3, Size: 3}, // ...DDD...
wantMerged: false,
},
{
new: Range{Pos: 1, Size: 2}, // .NN......
dst: Range{Pos: 3, Size: 3}, // ...DDD...
want: Range{Pos: 1, Size: 5}, // .XXXXX...
wantMerged: true,
},
{
new: Range{Pos: 1, Size: 3}, // .NNN.....
dst: Range{Pos: 3, Size: 3}, // ...DDD...
want: Range{Pos: 1, Size: 5}, // .XXXXX...
wantMerged: true,
},
{
new: Range{Pos: 1, Size: 5}, // .NNNNN...
dst: Range{Pos: 3, Size: 3}, // ...DDD...
want: Range{Pos: 1, Size: 5}, // .XXXXX...
wantMerged: true,
},
{
new: Range{Pos: 1, Size: 6}, // .NNNNNN..
dst: Range{Pos: 3, Size: 3}, // ...DDD...
want: Range{Pos: 1, Size: 6}, // .XXXXXX..
wantMerged: true,
},
{
new: Range{Pos: 3, Size: 3}, // ...NNN...
dst: Range{Pos: 3, Size: 3}, // ...DDD...
want: Range{Pos: 3, Size: 3}, // ...XXX...
wantMerged: true,
},
{
new: Range{Pos: 3, Size: 2}, // ...NN....
dst: Range{Pos: 3, Size: 3}, // ...DDD...
want: Range{Pos: 3, Size: 3}, // ...XXX...
wantMerged: true,
},
{
new: Range{Pos: 3, Size: 4}, // ...NNNN..
dst: Range{Pos: 3, Size: 3}, // ...DDD...
want: Range{Pos: 3, Size: 4}, // ...XXXX..
wantMerged: true,
},
} {
what := fmt.Sprintf("test new=%v, dst=%v", test.new, test.dst)
gotMerged := merge(&test.new, &test.dst)
assert.Equal(t, test.wantMerged, gotMerged)
assert.Equal(t, test.want, test.dst, what)
}
}
func checkRanges(t *testing.T, rs Ranges, what string) bool {
if len(rs) < 2 {
return true
}
ok := true
for i := 0; i < len(rs)-1; i++ {
a := rs[i]
b := rs[i+1]
if a.Pos >= b.Pos {
assert.Failf(t, "%s: Ranges in wrong order at %d in: %v", what, i, rs)
ok = false
}
if a.End() > b.Pos {
assert.Failf(t, "%s: Ranges overlap at %d in: %v", what, i, rs)
ok = false
}
if a.End() == b.Pos {
assert.Failf(t, "%s: Ranges not coalesced at %d in: %v", what, i, rs)
ok = false
}
}
return ok
}
func TestRangeCoalesce(t *testing.T) {
for _, test := range []struct {
rs Ranges
i int
want Ranges
}{
{
rs: Ranges{},
want: Ranges{},
},
{
rs: Ranges{
{Pos: 1, Size: 1},
},
want: Ranges{
{Pos: 1, Size: 1},
},
i: 0,
},
{
rs: Ranges{
{Pos: 1, Size: 1},
{Pos: 2, Size: 1},
{Pos: 3, Size: 1},
},
want: Ranges{
{Pos: 1, Size: 3},
},
i: 0,
},
{
rs: Ranges{
{Pos: 1, Size: 1},
{Pos: 3, Size: 1},
{Pos: 4, Size: 1},
{Pos: 5, Size: 1},
},
want: Ranges{
{Pos: 1, Size: 1},
{Pos: 3, Size: 3},
},
i: 2,
},
{
rs: Ranges{{38, 8}, {51, 10}, {60, 3}},
want: Ranges{{38, 8}, {51, 12}},
i: 1,
},
} {
got := append(Ranges{}, test.rs...)
got.coalesce(test.i)
what := fmt.Sprintf("test rs=%v, i=%d", test.rs, test.i)
assert.Equal(t, test.want, got, what)
checkRanges(t, got, what)
}
}
func TestRangeInsert(t *testing.T) {
for _, test := range []struct {
new Range
rs Ranges
want Ranges
}{
{
new: Range{Pos: 1, Size: 0},
rs: Ranges{},
want: Ranges(nil),
},
{
new: Range{Pos: 1, Size: 1}, // .N.......
rs: Ranges{}, // .........
want: Ranges{ // .N.......
{Pos: 1, Size: 1},
},
},
{
new: Range{Pos: 1, Size: 1}, // .N.......
rs: Ranges{{Pos: 5, Size: 1}}, // .....R...
want: Ranges{ // .N...R...
{Pos: 1, Size: 1},
{Pos: 5, Size: 1},
},
},
{
new: Range{Pos: 5, Size: 1}, // .....R...
rs: Ranges{{Pos: 1, Size: 1}}, // .N.......
want: Ranges{ // .N...R...
{Pos: 1, Size: 1},
{Pos: 5, Size: 1},
},
},
{
new: Range{Pos: 1, Size: 1}, // .N.......
rs: Ranges{{Pos: 2, Size: 1}}, // ..R......
want: Ranges{ // .XX......
{Pos: 1, Size: 2},
},
},
{
new: Range{Pos: 2, Size: 1}, // ..N.......
rs: Ranges{{Pos: 1, Size: 1}}, // .R......
want: Ranges{ // .XX......
{Pos: 1, Size: 2},
},
},
{
new: Range{Pos: 51, Size: 10},
rs: Ranges{{38, 8}, {57, 2}, {60, 3}},
want: Ranges{{38, 8}, {51, 12}},
},
} {
got := append(Ranges(nil), test.rs...)
got.Insert(test.new)
what := fmt.Sprintf("test new=%v, rs=%v", test.new, test.rs)
assert.Equal(t, test.want, got, what)
checkRanges(t, test.rs, what)
checkRanges(t, got, what)
}
}
func TestRangeInsertRandom(t *testing.T) {
for i := 0; i < 100; i++ {
var rs Ranges
for j := 0; j < 100; j++ {
var r = Range{
Pos: rand.Int63n(100),
Size: rand.Int63n(10) + 1,
}
what := fmt.Sprintf("inserting %v into %v\n", r, rs)
rs.Insert(r)
if !checkRanges(t, rs, what) {
break
}
//fmt.Printf("%d: %d: %v\n", i, j, rs)
}
}
}
func TestRangeFind(t *testing.T) {
for _, test := range []struct {
rs Ranges
r Range
wantCurr Range
wantNext Range
wantPresent bool
}{
{
r: Range{Pos: 1, Size: 0},
rs: Ranges{},
wantCurr: Range{Pos: 1, Size: 0},
wantNext: Range{},
wantPresent: false,
},
{
r: Range{Pos: 1, Size: 1},
rs: Ranges{},
wantCurr: Range{Pos: 1, Size: 1},
wantNext: Range{},
wantPresent: false,
},
{
r: Range{Pos: 1, Size: 2},
rs: Ranges{
Range{Pos: 1, Size: 10},
},
wantCurr: Range{Pos: 1, Size: 2},
wantNext: Range{Pos: 3, Size: 0},
wantPresent: true,
},
{
r: Range{Pos: 1, Size: 10},
rs: Ranges{
Range{Pos: 1, Size: 2},
},
wantCurr: Range{Pos: 1, Size: 2},
wantNext: Range{Pos: 3, Size: 8},
wantPresent: true,
},
{
r: Range{Pos: 1, Size: 2},
rs: Ranges{
Range{Pos: 5, Size: 2},
},
wantCurr: Range{Pos: 1, Size: 2},
wantNext: Range{Pos: 0, Size: 0},
wantPresent: false,
},
{
r: Range{Pos: 2, Size: 10},
rs: Ranges{
Range{Pos: 1, Size: 2},
},
wantCurr: Range{Pos: 2, Size: 1},
wantNext: Range{Pos: 3, Size: 9},
wantPresent: true,
},
{
r: Range{Pos: 1, Size: 9},
rs: Ranges{
Range{Pos: 2, Size: 1},
Range{Pos: 4, Size: 1},
},
wantCurr: Range{Pos: 1, Size: 1},
wantNext: Range{Pos: 2, Size: 8},
wantPresent: false,
},
{
r: Range{Pos: 2, Size: 8},
rs: Ranges{
Range{Pos: 2, Size: 1},
Range{Pos: 4, Size: 1},
},
wantCurr: Range{Pos: 2, Size: 1},
wantNext: Range{Pos: 3, Size: 7},
wantPresent: true,
},
{
r: Range{Pos: 3, Size: 7},
rs: Ranges{
Range{Pos: 2, Size: 1},
Range{Pos: 4, Size: 1},
},
wantCurr: Range{Pos: 3, Size: 1},
wantNext: Range{Pos: 4, Size: 6},
wantPresent: false,
},
{
r: Range{Pos: 4, Size: 6},
rs: Ranges{
Range{Pos: 2, Size: 1},
Range{Pos: 4, Size: 1},
},
wantCurr: Range{Pos: 4, Size: 1},
wantNext: Range{Pos: 5, Size: 5},
wantPresent: true,
},
{
r: Range{Pos: 5, Size: 5},
rs: Ranges{
Range{Pos: 2, Size: 1},
Range{Pos: 4, Size: 1},
},
wantCurr: Range{Pos: 5, Size: 5},
wantNext: Range{Pos: 0, Size: 0},
wantPresent: false,
},
} {
what := fmt.Sprintf("test r=%v, rs=%v", test.r, test.rs)
checkRanges(t, test.rs, what)
gotCurr, gotNext, gotPresent := test.rs.Find(test.r)
assert.Equal(t, test.r.Pos, gotCurr.Pos, what)
assert.Equal(t, test.wantCurr, gotCurr, what)
assert.Equal(t, test.wantNext, gotNext, what)
assert.Equal(t, test.wantPresent, gotPresent, what)
}
}
func TestRangeFindAll(t *testing.T) {
for _, test := range []struct {
rs Ranges
r Range
want []FoundRange
wantNext Range
wantPresent bool
}{
{
r: Range{Pos: 1, Size: 0},
rs: Ranges{},
want: []FoundRange(nil),
},
{
r: Range{Pos: 1, Size: 1},
rs: Ranges{},
want: []FoundRange{
{
R: Range{Pos: 1, Size: 1},
Present: false,
},
},
},
{
r: Range{Pos: 1, Size: 2},
rs: Ranges{
Range{Pos: 1, Size: 10},
},
want: []FoundRange{
{
R: Range{Pos: 1, Size: 2},
Present: true,
},
},
},
{
r: Range{Pos: 1, Size: 10},
rs: Ranges{
Range{Pos: 1, Size: 2},
},
want: []FoundRange{
{
R: Range{Pos: 1, Size: 2},
Present: true,
},
{
R: Range{Pos: 3, Size: 8},
Present: false,
},
},
},
{
r: Range{Pos: 5, Size: 5},
rs: Ranges{
Range{Pos: 4, Size: 2},
Range{Pos: 7, Size: 1},
Range{Pos: 9, Size: 2},
},
want: []FoundRange{
{
R: Range{Pos: 5, Size: 1},
Present: true,
},
{
R: Range{Pos: 6, Size: 1},
Present: false,
},
{
R: Range{Pos: 7, Size: 1},
Present: true,
},
{
R: Range{Pos: 8, Size: 1},
Present: false,
},
{
R: Range{Pos: 9, Size: 1},
Present: true,
},
},
},
} {
what := fmt.Sprintf("test r=%v, rs=%v", test.r, test.rs)
checkRanges(t, test.rs, what)
got := test.rs.FindAll(test.r)
assert.Equal(t, test.want, got, what)
}
}
func TestRangePresent(t *testing.T) {
for _, test := range []struct {
rs Ranges
r Range
wantPresent bool
}{
{
r: Range{Pos: 1, Size: 0},
rs: Ranges{},
wantPresent: true,
},
{
r: Range{Pos: 1, Size: 0},
rs: Ranges(nil),
wantPresent: true,
},
{
r: Range{Pos: 0, Size: 1},
rs: Ranges{},
wantPresent: false,
},
{
r: Range{Pos: 0, Size: 1},
rs: Ranges(nil),
wantPresent: false,
},
{
r: Range{Pos: 1, Size: 2},
rs: Ranges{
Range{Pos: 1, Size: 1},
},
wantPresent: false,
},
{
r: Range{Pos: 1, Size: 2},
rs: Ranges{
Range{Pos: 1, Size: 2},
},
wantPresent: true,
},
{
r: Range{Pos: 1, Size: 2},
rs: Ranges{
Range{Pos: 1, Size: 10},
},
wantPresent: true,
},
{
r: Range{Pos: 1, Size: 2},
rs: Ranges{
Range{Pos: 5, Size: 2},
},
wantPresent: false,
},
{
r: Range{Pos: 1, Size: 9},
rs: Ranges{
Range{Pos: 2, Size: 1},
Range{Pos: 4, Size: 1},
},
wantPresent: false,
},
{
r: Range{Pos: 2, Size: 8},
rs: Ranges{
Range{Pos: 2, Size: 1},
Range{Pos: 4, Size: 1},
},
wantPresent: false,
},
{
r: Range{Pos: 3, Size: 7},
rs: Ranges{
Range{Pos: 2, Size: 1},
Range{Pos: 4, Size: 1},
},
wantPresent: false,
},
{
r: Range{Pos: 4, Size: 6},
rs: Ranges{
Range{Pos: 2, Size: 1},
Range{Pos: 4, Size: 1},
},
wantPresent: false,
},
{
r: Range{Pos: 5, Size: 5},
rs: Ranges{
Range{Pos: 2, Size: 1},
Range{Pos: 4, Size: 1},
},
wantPresent: false,
},
} {
what := fmt.Sprintf("test r=%v, rs=%v", test.r, test.rs)
checkRanges(t, test.rs, what)
gotPresent := test.rs.Present(test.r)
assert.Equal(t, test.wantPresent, gotPresent, what)
checkRanges(t, test.rs, what)
}
}
func TestRangesIntersection(t *testing.T) {
for _, test := range []struct {
rs Ranges
r Range
want Ranges
}{
{
rs: Ranges(nil),
r: Range{},
want: Ranges(nil),
},
{
rs: Ranges{},
r: Range{},
want: Ranges{},
},
{
rs: Ranges{},
r: Range{Pos: 1, Size: 0},
want: Ranges{},
},
{
rs: Ranges{},
r: Range{Pos: 1, Size: 1},
want: Ranges{},
},
{
rs: Ranges{{Pos: 1, Size: 5}},
r: Range{Pos: 1, Size: 3},
want: Ranges{
{Pos: 1, Size: 3},
},
},
{
rs: Ranges{{Pos: 1, Size: 5}},
r: Range{Pos: 1, Size: 10},
want: Ranges{
{Pos: 1, Size: 5},
},
},
{
rs: Ranges{{Pos: 1, Size: 5}},
r: Range{Pos: 3, Size: 10},
want: Ranges{
{Pos: 3, Size: 3},
},
},
{
rs: Ranges{{Pos: 1, Size: 5}},
r: Range{Pos: 6, Size: 10},
want: Ranges(nil),
},
{
rs: Ranges{
{Pos: 1, Size: 2},
{Pos: 11, Size: 2},
{Pos: 21, Size: 2},
{Pos: 31, Size: 2},
{Pos: 41, Size: 2},
},
r: Range{Pos: 12, Size: 20},
want: Ranges{
{Pos: 12, Size: 1},
{Pos: 21, Size: 2},
{Pos: 31, Size: 1},
},
},
} {
got := test.rs.Intersection(test.r)
what := fmt.Sprintf("test ra=%v, r=%v", test.rs, test.r)
assert.Equal(t, test.want, got, what)
checkRanges(t, test.rs, what)
checkRanges(t, got, what)
}
}
func TestRangesEqual(t *testing.T) {
for _, test := range []struct {
rs Ranges
bs Ranges
want bool
}{
{
rs: Ranges(nil),
bs: Ranges(nil),
want: true,
},
{
rs: Ranges{},
bs: Ranges(nil),
want: true,
},
{
rs: Ranges(nil),
bs: Ranges{},
want: true,
},
{
rs: Ranges{},
bs: Ranges{},
want: true,
},
{
rs: Ranges{
{Pos: 0, Size: 1},
},
bs: Ranges{},
want: false,
},
{
rs: Ranges{
{Pos: 0, Size: 1},
},
bs: Ranges{
{Pos: 0, Size: 1},
},
want: true,
},
{
rs: Ranges{
{Pos: 0, Size: 1},
{Pos: 10, Size: 9},
{Pos: 20, Size: 21},
},
bs: Ranges{
{Pos: 0, Size: 1},
{Pos: 10, Size: 9},
{Pos: 20, Size: 22},
},
want: false,
},
{
rs: Ranges{
{Pos: 0, Size: 1},
{Pos: 10, Size: 9},
{Pos: 20, Size: 21},
},
bs: Ranges{
{Pos: 0, Size: 1},
{Pos: 10, Size: 9},
{Pos: 20, Size: 21},
},
want: true,
},
} {
got := test.rs.Equal(test.bs)
what := fmt.Sprintf("test rs=%v, bs=%v", test.rs, test.bs)
assert.Equal(t, test.want, got, what)
checkRanges(t, test.bs, what)
checkRanges(t, test.rs, what)
}
}
func TestRangesSize(t *testing.T) {
for _, test := range []struct {
rs Ranges
want int64
}{
{
rs: Ranges(nil),
want: 0,
},
{
rs: Ranges{},
want: 0,
},
{
rs: Ranges{
{Pos: 7, Size: 11},
},
want: 11,
},
{
rs: Ranges{
{Pos: 0, Size: 1},
{Pos: 10, Size: 9},
{Pos: 20, Size: 21},
},
want: 31,
},
} {
got := test.rs.Size()
what := fmt.Sprintf("test rs=%v", test.rs)
assert.Equal(t, test.want, got, what)
checkRanges(t, test.rs, what)
}
}
func TestFindMissing(t *testing.T) {
for _, test := range []struct {
r Range
rs Ranges
want Range
}{
{
r: Range{},
rs: Ranges(nil),
want: Range{},
},
{
r: Range{},
rs: Ranges{},
want: Range{},
},
{
r: Range{Pos: 3, Size: 5},
rs: Ranges{
{Pos: 10, Size: 5},
{Pos: 20, Size: 5},
},
want: Range{Pos: 3, Size: 5},
},
{
r: Range{Pos: 3, Size: 15},
rs: Ranges{
{Pos: 10, Size: 5},
{Pos: 20, Size: 5},
},
want: Range{Pos: 3, Size: 15},
},
{
r: Range{Pos: 10, Size: 5},
rs: Ranges{
{Pos: 10, Size: 5},
{Pos: 20, Size: 5},
},
want: Range{Pos: 15, Size: 0},
},
{
r: Range{Pos: 10, Size: 7},
rs: Ranges{
{Pos: 10, Size: 5},
{Pos: 20, Size: 5},
},
want: Range{Pos: 15, Size: 2},
},
{
r: Range{Pos: 11, Size: 7},
rs: Ranges{
{Pos: 10, Size: 5},
{Pos: 20, Size: 5},
},
want: Range{Pos: 15, Size: 3},
},
} {
got := test.rs.FindMissing(test.r)
what := fmt.Sprintf("test r=%v, rs=%v", test.r, test.rs)
assert.Equal(t, test.want, got, what)
assert.Equal(t, test.r.End(), got.End())
checkRanges(t, test.rs, what)
}
}
| {
assert.Equal(t, false, Range{Pos: 1, Size: 2}.IsEmpty())
assert.Equal(t, true, Range{Pos: 1, Size: 0}.IsEmpty())
assert.Equal(t, true, Range{Pos: 1, Size: -1}.IsEmpty())
} |
logout.go | package cmd
import (
"fmt"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
// logoutCmd represents the logout command
var logoutCmd = &cobra.Command{
Use: "logout",
Short: "Log out of the CLI",
Args: cobra.NoArgs,
// allow logout command to be run without token
PersistentPreRunE: rootPreRunE(false),
RunE: func(cmd *cobra.Command, args []string) error {
viper.Set("token", "")
err := viper.WriteConfig()
if err != nil {
return err
}
fmt.Print("Successfully logged out!")
return nil
},
}
func | () {
rootCmd.AddCommand(logoutCmd)
}
| init |
qr.js | //二维码相关的内容
//参考自 https://github.com/johansten/stargazer/blob/master/docs/qr-codes.md
// 生成名片信息(用于生成二维码)
// json数据格式
import StellarSdk from 'stellar-sdk'
export function exportNameCard(account){
let data = {
"stellar": {
"account": {
"name":account.name, //账户名称
"id": account.address, //地址
"fed": account.federationAddress, //联邦地址
"memo": account.memo,//备注
}
}
}
return JSON.stringify(data)
}
// 导出账户,包括公私钥信息
export function exportAccount(account,accountData){
let data = {
"stellar": {
"account": {
"name":account.name, //账户名称
//id: account.address, //地址
//"fed": account.federationAddress, //联绑地址
//"inflate": account.inflationAddress,//通账地址
"memo": account.memo,//备注
},
"key": accountData.seed // 私钥地址
}
}
let firefly = JSON.stringify(data)
console.log(firefly)
return firefly
}
export function exportPayment(account,amount,asset_code,asset_issuer,memo_type,memo){
let data = {
"stellar": {
"payment": {
"destination": account.address,
//"network": NETWORK_CODE, // (*)
"amount": amount,
"asset": { // (**)
"code": asset_code,
"issuer": asset_issuer
},
"memo": {
"type": memo_type,
"value": memo
}
}
}
}
return JSON.stringify(data)
}
export function importContact(data){
if(data === null)return {status: false}
if(typeof data === 'undefined')return {status: false}
if(typeof data === 'object'){
return getContactFromJSON(jsondata)
}
if(typeof data === 'string'){
data = data.trim()
try{
var jsondata = JSON.parse(data)
return getContactFromJSON(jsondata)
}catch(e){
let ok = StellarSdk.StrKey.isValidEd25519PublicKey(data)
if(ok){
return {status: true, ret: {stellar_id:data}}
}
}
}
return { status: false }
}
function getContactFromJSON(data){
console.log(data)
console.log(typeof data)
let account = data.stellar.account
if(account){
// stargaze格式
let name = account.name //contact name
let stellar_id = account.id //contact address
let federationAddress = account.fed //contact federation address default is null
let memo_type = account.memoType // contact memo type ,default is null
let memo = account.memo //contact memo default is null
if(stellar_id){
return {status: true, ret: {name,stellar_id,federationAddress,memo_type,memo}}
}
}
return {status:false}
} | * 导入账户
* 1. 直接导入私钥
* 2. 导入的是stargaze的账户类型
*/
export function importAccountFromData(data){
if(data === null)return {status: false}
if(typeof data === 'undefined')return {status: false}
if(typeof data === 'object'){
return getSeedFromJSON(jsondata)
}
if(typeof data === 'string'){
data = data.trim()
try{
var jsondata = JSON.parse(data)
return getSeedFromJSON(jsondata)
}catch(e){
console.error(e)
let ok = StellarSdk.StrKey.isValidEd25519SecretSeed(data)
if(ok){
return {status: true, seed: data}
}
}
}
return { status: false }
}
function getSeedFromJSON(data){
let stellar = data.stellar
if(stellar){
// stargaze格式
let seed = stellar.key
if(seed){
return {status: true, seed, data}
}
}
return {status:false}
} |
/** |
0016_autozones_path.py | # ----------------------------------------------------------------------
# autozones_path
# ----------------------------------------------------------------------
# Copyright (C) 2007-2019 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# Third-party modules
from django.db import models
# NOC modules
from noc.core.migration.base import BaseMigration
class Migration(BaseMigration):
def migrate(self):
| self.db.add_column(
"dns_dnsserver",
"autozones_path",
models.CharField(
"Autozones path", max_length=256, blank=True, null=True, default="autozones"
),
) |
|
hacker-prank.py | import time
def wait(sec):
|
wait(1)
print("")
print("Granting information")
print("")
wait(1)
print("")
print("Running nmap on target 54.32.43.1")
print("")
wait(1)
print("nmap -sS 54.32.43.1")
print("Nmap Copyright 2022")
print("port/tcp 8080 open. http-proxy")
print("Done with scan")
wait(1)
print("")
print("burpsuite &")
print("")
wait(1)
print("")
print("git clone https://github.com/sqlmapproject/sqlmap")
print("")
wait(1)
print("")
print("Successfully cloned!")
print("")
wait(1)
print("")
print("cd sqlmap")
print("")
wait(1)
print("")
print("python3 sqlmap.py damn-son.gov")
print("")
wait(1)
print("")
print("sqlmap")
print("[CRITICAL] Failed")
print("You must give sqlmap root privileges")
print("")
wait(1)
print("")
print("sudo python3 sqlmap.py damn-sen.gov")
print("")
wait(1)
print("")
print("sqlmap")
print("[CRITICAL]: Failed")
print("The site 'damn-sen.gov' was not found. Please make sure the website is running. Or the website address is correct.")
print("")
wait(1)
print("")
print("sudo python3 sqlmap.py damn-son.gov")
print("")
wait(1)
print("")
print("[INFO]: Success!")
print("[INFO]: Running SQL injection")
print("[INFO]: Found username witch is 'admin'")
print("[INFO]: Found password witch is '123321123'")
print("")
wait(1)
print("")
print("Done..")
print("") | time.sleep(sec) |
ccprovider.go | /*
Copyright IBM Corp. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package ccprovider
import (
"bytes"
"context"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strings"
"github.com/golang/protobuf/proto"
"github.com/hyperledger/fabric/common/flogging"
"github.com/hyperledger/fabric/core/ledger"
pb "github.com/hyperledger/fabric/protos/peer"
)
var ccproviderLogger = flogging.MustGetLogger("ccprovider")
var chaincodeInstallPath string
// CCPackage encapsulates a chaincode package which can be
// raw ChaincodeDeploymentSpec
// SignedChaincodeDeploymentSpec
// Attempt to keep the interface at a level with minimal
// interface for possible generalization.
type CCPackage interface {
//InitFromBuffer initialize the package from bytes
InitFromBuffer(buf []byte) (*ChaincodeData, error)
// InitFromFS gets the chaincode from the filesystem (includes the raw bytes too)
InitFromFS(ccname string, ccversion string) ([]byte, *pb.ChaincodeDeploymentSpec, error)
// PutChaincodeToFS writes the chaincode to the filesystem
PutChaincodeToFS() error
// GetDepSpec gets the ChaincodeDeploymentSpec from the package
GetDepSpec() *pb.ChaincodeDeploymentSpec
// GetDepSpecBytes gets the serialized ChaincodeDeploymentSpec from the package
GetDepSpecBytes() []byte
// ValidateCC validates and returns the chaincode deployment spec corresponding to
// ChaincodeData. The validation is based on the metadata from ChaincodeData
// One use of this method is to validate the chaincode before launching
ValidateCC(ccdata *ChaincodeData) error
// GetPackageObject gets the object as a proto.Message
GetPackageObject() proto.Message
// GetChaincodeData gets the ChaincodeData
GetChaincodeData() *ChaincodeData
// GetId gets the fingerprint of the chaincode based on package computation
GetId() []byte
}
// SetChaincodesPath sets the chaincode path for this peer
func SetChaincodesPath(path string) {
if s, err := os.Stat(path); err != nil {
if os.IsNotExist(err) {
if err := os.Mkdir(path, 0755); err != nil {
panic(fmt.Sprintf("Could not create chaincodes install path: %s", err))
}
} else {
panic(fmt.Sprintf("Could not stat chaincodes install path: %s", err))
}
} else if !s.IsDir() {
panic(fmt.Errorf("chaincode path exists but not a dir: %s", path))
}
chaincodeInstallPath = path
}
func | (ccname string, ccversion string) ([]byte, error) {
return GetChaincodePackageFromPath(ccname, ccversion, chaincodeInstallPath)
}
// GetChaincodePackage returns the chaincode package from the file system
func GetChaincodePackageFromPath(ccname string, ccversion string, ccInstallPath string) ([]byte, error) {
path := fmt.Sprintf("%s/%s.%s", ccInstallPath, ccname, ccversion)
var ccbytes []byte
var err error
if ccbytes, err = ioutil.ReadFile(path); err != nil {
return nil, err
}
return ccbytes, nil
}
// ChaincodePackageExists returns whether the chaincode package exists in the file system
func ChaincodePackageExists(ccname string, ccversion string) (bool, error) {
path := filepath.Join(chaincodeInstallPath, ccname+"."+ccversion)
_, err := os.Stat(path)
if err == nil {
// chaincodepackage already exists
return true, nil
}
return false, err
}
type CCCacheSupport interface {
// GetChaincode is needed by the cache to get chaincode data
GetChaincode(ccname string, ccversion string) (CCPackage, error)
}
// CCInfoFSImpl provides the implementation for CC on the FS and the access to it
// It implements CCCacheSupport
type CCInfoFSImpl struct{}
// GetChaincodeFromFS this is a wrapper for hiding package implementation.
// It calls GetChaincodeFromPath with the chaincodeInstallPath
func (cifs *CCInfoFSImpl) GetChaincode(ccname string, ccversion string) (CCPackage, error) {
return cifs.GetChaincodeFromPath(ccname, ccversion, chaincodeInstallPath)
}
// GetChaincodeFromPath this is a wrapper for hiding package implementation.
func (*CCInfoFSImpl) GetChaincodeFromPath(ccname string, ccversion string, path string) (CCPackage, error) {
// try raw CDS
cccdspack := &CDSPackage{}
_, _, err := cccdspack.InitFromPath(ccname, ccversion, path)
if err != nil {
// try signed CDS
ccscdspack := &SignedCDSPackage{}
_, _, err = ccscdspack.InitFromPath(ccname, ccversion, path)
if err != nil {
return nil, err
}
return ccscdspack, nil
}
return cccdspack, nil
}
// PutChaincodeIntoFS is a wrapper for putting raw ChaincodeDeploymentSpec
//using CDSPackage. This is only used in UTs
func (*CCInfoFSImpl) PutChaincode(depSpec *pb.ChaincodeDeploymentSpec) (CCPackage, error) {
buf, err := proto.Marshal(depSpec)
if err != nil {
return nil, err
}
cccdspack := &CDSPackage{}
if _, err := cccdspack.InitFromBuffer(buf); err != nil {
return nil, err
}
err = cccdspack.PutChaincodeToFS()
if err != nil {
return nil, err
}
return cccdspack, nil
}
// The following lines create the cache of CCPackage data that sits
// on top of the file system and avoids a trip to the file system
// every time. The cache is disabled by default and only enabled
// if EnableCCInfoCache is called. This is an unfortunate hack
// required by some legacy tests that remove chaincode packages
// from the file system as a means of simulating particular test
// conditions. This way of testing is incompatible with the
// immutable nature of chaincode packages that is assumed by hlf v1
// and implemented by this cache. For this reason, tests are for now
// allowed to run with the cache disabled (unless they enable it)
// until a later time in which they are fixed. The peer process on
// the other hand requires the benefits of this cache and therefore
// enables it.
// TODO: (post v1) enable cache by default as soon as https://jira.hyperledger.org/browse/FAB-3785 is completed
// ccInfoFSStorageMgr is the storage manager used either by the cache or if the
// cache is bypassed
var ccInfoFSProvider = &CCInfoFSImpl{}
// ccInfoCache is the cache instance itself
var ccInfoCache = NewCCInfoCache(ccInfoFSProvider)
// ccInfoCacheEnabled keeps track of whether the cache is enable
// (it is disabled by default)
var ccInfoCacheEnabled bool
// EnableCCInfoCache can be called to enable the cache
func EnableCCInfoCache() {
ccInfoCacheEnabled = true
}
// GetChaincodeFromFS retrieves chaincode information from the file system
func GetChaincodeFromFS(ccname string, ccversion string) (CCPackage, error) {
return ccInfoFSProvider.GetChaincode(ccname, ccversion)
}
// PutChaincodeIntoFS puts chaincode information in the file system (and
// also in the cache to prime it) if the cache is enabled, or directly
// from the file system otherwise
func PutChaincodeIntoFS(depSpec *pb.ChaincodeDeploymentSpec) error {
_, err := ccInfoFSProvider.PutChaincode(depSpec)
return err
}
// GetChaincodeData gets chaincode data from cache if there's one
func GetChaincodeData(ccname string, ccversion string) (*ChaincodeData, error) {
if ccInfoCacheEnabled {
ccproviderLogger.Debugf("Getting chaincode data for <%s, %s> from cache", ccname, ccversion)
return ccInfoCache.GetChaincodeData(ccname, ccversion)
}
if ccpack, err := ccInfoFSProvider.GetChaincode(ccname, ccversion); err != nil {
return nil, err
} else {
ccproviderLogger.Infof("Putting chaincode data for <%s, %s> into cache", ccname, ccversion)
return ccpack.GetChaincodeData(), nil
}
}
func CheckInstantiationPolicy(name, version string, cdLedger *ChaincodeData) error {
ccdata, err := GetChaincodeData(name, version)
if err != nil {
return err
}
// we have the info from the fs, check that the policy
// matches the one on the file system if one was specified;
// this check is required because the admin of this peer
// might have specified instantiation policies for their
// chaincode, for example to make sure that the chaincode
// is only instantiated on certain channels; a malicious
// peer on the other hand might have created a deploy
// transaction that attempts to bypass the instantiation
// policy. This check is there to ensure that this will not
// happen, i.e. that the peer will refuse to invoke the
// chaincode under these conditions. More info on
// https://jira.hyperledger.org/browse/FAB-3156
if ccdata.InstantiationPolicy != nil {
if !bytes.Equal(ccdata.InstantiationPolicy, cdLedger.InstantiationPolicy) {
return fmt.Errorf("Instantiation policy mismatch for cc %s/%s", name, version)
}
}
return nil
}
// GetCCPackage tries each known package implementation one by one
// till the right package is found
func GetCCPackage(buf []byte) (CCPackage, error) {
// try raw CDS
cccdspack := &CDSPackage{}
if _, err := cccdspack.InitFromBuffer(buf); err != nil {
// try signed CDS
ccscdspack := &SignedCDSPackage{}
if _, err := ccscdspack.InitFromBuffer(buf); err != nil {
return nil, err
}
return ccscdspack, nil
}
return cccdspack, nil
}
// GetInstalledChaincodes returns a map whose key is the chaincode id and
// value is the ChaincodeDeploymentSpec struct for that chaincodes that have
// been installed (but not necessarily instantiated) on the peer by searching
// the chaincode install path
func GetInstalledChaincodes() (*pb.ChaincodeQueryResponse, error) {
files, err := ioutil.ReadDir(chaincodeInstallPath)
if err != nil {
return nil, err
}
// array to store info for all chaincode entries from LSCC
var ccInfoArray []*pb.ChaincodeInfo
for _, file := range files {
// split at first period as chaincode versions can contain periods while
// chaincode names cannot
fileNameArray := strings.SplitN(file.Name(), ".", 2)
// check that length is 2 as expected, otherwise skip to next cc file
if len(fileNameArray) == 2 {
ccname := fileNameArray[0]
ccversion := fileNameArray[1]
ccpack, err := GetChaincodeFromFS(ccname, ccversion)
if err != nil {
// either chaincode on filesystem has been tampered with or
// a non-chaincode file has been found in the chaincodes directory
ccproviderLogger.Errorf("Unreadable chaincode file found on filesystem: %s", file.Name())
continue
}
cdsfs := ccpack.GetDepSpec()
name := cdsfs.GetChaincodeSpec().GetChaincodeId().Name
version := cdsfs.GetChaincodeSpec().GetChaincodeId().Version
if name != ccname || version != ccversion {
// chaincode name/version in the chaincode file name has been modified
// by an external entity
ccproviderLogger.Errorf("Chaincode file's name/version has been modified on the filesystem: %s", file.Name())
continue
}
path := cdsfs.GetChaincodeSpec().ChaincodeId.Path
// since this is just an installed chaincode these should be blank
input, escc, vscc := "", "", ""
ccInfo := &pb.ChaincodeInfo{Name: name, Version: version, Path: path, Input: input, Escc: escc, Vscc: vscc, Id: ccpack.GetId()}
// add this specific chaincode's metadata to the array of all chaincodes
ccInfoArray = append(ccInfoArray, ccInfo)
}
}
// add array with info about all instantiated chaincodes to the query
// response proto
cqr := &pb.ChaincodeQueryResponse{Chaincodes: ccInfoArray}
return cqr, nil
}
// CCContext pass this around instead of string of args
type CCContext struct {
// ChainID chain id
ChainID string
// Name chaincode name
Name string
// Version used to construct the chaincode image and register
Version string
// TxID is the transaction id for the proposal (if any)
TxID string
// Syscc is this a system chaincode
Syscc bool
// SignedProposal for this invoke (if any) this is kept here for access
// control and in case we need to pass something from this to the chaincode
SignedProposal *pb.SignedProposal
// Proposal for this invoke (if any) this is kept here just in case we need to
// pass something from this to the chaincode
Proposal *pb.Proposal
// canonicalName is not set but computed
canonicalName string
// this is additional data passed to the chaincode
ProposalDecorations map[string][]byte
}
// NewCCContext just construct a new struct with whatever args
func NewCCContext(cname, name, version, txid string, syscc bool, signedProp *pb.SignedProposal, prop *pb.Proposal) *CCContext {
cccid := &CCContext{
ChainID: cname,
Name: name,
Version: version,
TxID: txid,
Syscc: syscc,
SignedProposal: signedProp,
Proposal: prop,
canonicalName: name + ":" + version,
ProposalDecorations: nil,
}
// The version CANNOT be empty. The chaincode namespace has to use version and chain name.
// Note that neither channel name nor version are stored on the ledger.
if version == "" {
panic(fmt.Sprintf("---empty version---(%s)", cccid))
}
ccproviderLogger.Debugf("NewCCCC(%s)", cccid)
return cccid
}
func (cccid *CCContext) String() string {
return fmt.Sprintf("chain=%s,chaincode=%s,version=%s,txid=%s,syscc=%t,proposal=%p,canname=%s",
cccid.ChainID, cccid.Name, cccid.Version, cccid.TxID, cccid.Syscc, cccid.Proposal, cccid.canonicalName)
}
// GetCanonicalName returns the canonical name associated with the proposal context
func (cccid *CCContext) GetCanonicalName() string {
if cccid.canonicalName == "" {
panic(fmt.Sprintf("missing canonical name: %s", cccid))
}
return cccid.canonicalName
}
//-------- ChaincodeDefinition - interface for ChaincodeData ------
// ChaincodeDefinition describes all of the necessary information for a peer to decide whether to endorse
// a proposal and whether to validate a transaction, for a particular chaincode.
type ChaincodeDefinition interface {
// CCName returns the name of this chaincode (the name it was put in the ChaincodeRegistry with).
CCName() string
// Hash returns the hash of the chaincode.
Hash() []byte
// CCVersion returns the version of the chaincode.
CCVersion() string
// Validation returns how to validate transactions for this chaincode.
// The string returned is the name of the validation method (usually 'vscc')
// and the bytes returned are the argument to the validation (in the case of
// 'vscc', this is a marshaled pb.VSCCArgs message).
Validation() (string, []byte)
// Endorsement returns how to endorse proposals for this chaincode.
// The string returns is the name of the endorsement method (usually 'escc').
Endorsement() string
}
//-------- ChaincodeData is stored on the LSCC -------
// ChaincodeData defines the datastructure for chaincodes to be serialized by proto
// Type provides an additional check by directing to use a specific package after instantiation
// Data is Type specifc (see CDSPackage and SignedCDSPackage)
type ChaincodeData struct {
// Name of the chaincode
Name string `protobuf:"bytes,1,opt,name=name"`
// Version of the chaincode
Version string `protobuf:"bytes,2,opt,name=version"`
// Escc for the chaincode instance
Escc string `protobuf:"bytes,3,opt,name=escc"`
// Vscc for the chaincode instance
Vscc string `protobuf:"bytes,4,opt,name=vscc"`
// Policy endorsement policy for the chaincode instance
Policy []byte `protobuf:"bytes,5,opt,name=policy,proto3"`
// Data data specific to the package
Data []byte `protobuf:"bytes,6,opt,name=data,proto3"`
// Id of the chaincode that's the unique fingerprint for the CC This is not
// currently used anywhere but serves as a good eyecatcher
Id []byte `protobuf:"bytes,7,opt,name=id,proto3"`
// InstantiationPolicy for the chaincode
InstantiationPolicy []byte `protobuf:"bytes,8,opt,name=instantiation_policy,proto3"`
}
// CCName returns the name of this chaincode (the name it was put in the ChaincodeRegistry with).
func (cd *ChaincodeData) CCName() string {
return cd.Name
}
// Hash returns the hash of the chaincode.
func (cd *ChaincodeData) Hash() []byte {
return cd.Id
}
// CCVersion returns the version of the chaincode.
func (cd *ChaincodeData) CCVersion() string {
return cd.Version
}
// Validation returns how to validate transactions for this chaincode.
// The string returned is the name of the validation method (usually 'vscc')
// and the bytes returned are the argument to the validation (in the case of
// 'vscc', this is a marshaled pb.VSCCArgs message).
func (cd *ChaincodeData) Validation() (string, []byte) {
return cd.Vscc, cd.Policy
}
// Endorsement returns how to endorse proposals for this chaincode.
// The string returns is the name of the endorsement method (usually 'escc').
func (cd *ChaincodeData) Endorsement() string {
return cd.Escc
}
// implement functions needed from proto.Message for proto's mar/unmarshal functions
// Reset resets
func (cd *ChaincodeData) Reset() { *cd = ChaincodeData{} }
// String converts to string
func (cd *ChaincodeData) String() string { return proto.CompactTextString(cd) }
// ProtoMessage just exists to make proto happy
func (*ChaincodeData) ProtoMessage() {}
// ChaincodeSpecGetter normalizes getting a chaincode spec from an
// ChaincodeInvocationSpec or a ChaincodeDeploymentSpec.
type ChaincodeSpecGetter interface {
GetChaincodeSpec() *pb.ChaincodeSpec
}
// ChaincodeProvider provides an abstraction layer that is
// used for different packages to interact with code in the
// chaincode package without importing it; more methods
// should be added below if necessary
type ChaincodeProvider interface {
// GetContext returns a ledger context and a tx simulator; it's the
// caller's responsability to release the simulator by calling its
// done method once it is no longer useful
GetContext(ledger ledger.PeerLedger, txid string) (context.Context, ledger.TxSimulator, error)
// ExecuteChaincode executes the chaincode given context and args
ExecuteChaincode(ctxt context.Context, cccid *CCContext, args [][]byte, height uint64) (*pb.Response, *pb.ChaincodeEvent, error)
// Execute executes the chaincode given context and spec (invocation or deploy)
Execute(ctxt context.Context, cccid *CCContext, spec ChaincodeSpecGetter, height uint64) (*pb.Response, *pb.ChaincodeEvent, error)
// Stop stops the chaincode given context and deployment spec
Stop(ctxt context.Context, cccid *CCContext, spec *pb.ChaincodeDeploymentSpec) error
}
| GetChaincodePackage |
client.rs | use tungstenite::{connect, Message};
use url::Url;
fn main() {
env_logger::init();
let (mut socket, response) =
connect(Url::parse("ws://localhost:3012/socket").unwrap()).expect("Can't connect");
println!("Connected to the server");
println!("Response HTTP code: {}", response.status());
println!("Response contains the following headers:");
for (ref header, _value) in response.headers() {
println!("* {}", header); | }
socket
.write_message(Message::Text("Hello WebSocket".into()))
.unwrap();
loop {
let msg = socket.read_message().expect("Error reading message");
println!("Received: {}", msg);
}
// socket.close(None);
} | |
0004_add_modify_staff_status_permission.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
| dependencies = [
('accounts', '0003_create_system_user'),
]
operations = [
migrations.AlterModelOptions(
name='user',
options={'verbose_name': 'user', 'verbose_name_plural': 'users', 'permissions': (('can_view_banned', 'Can view banned users.'), ('can_ban', 'Can ban users.'), ('can_unban', 'Can unban users.'), ('can_view_user_report', 'Can view user report.'), ('can_view_group_report', 'Can view group report.'), ('can_impersonate', 'Can impersonate other users.'), ('can_moderate_all_messages', 'Can moderate all messages.'), ('can_initiate_direct_messages', 'Can initiate direct messages.'), ('can_modify_permissions', 'Can modify user permissions.'), ('can_modify_staff_status', "Can modify a user's staff status"))},
),
] |
|
af_xdp.go | // Copyright (C) 2020 Cisco Systems Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package uplink
import (
"fmt"
"github.com/pkg/errors"
"github.com/projectcalico/vpp-dataplane/vpp-manager/config"
"github.com/projectcalico/vpp-dataplane/vpp-manager/utils"
"github.com/projectcalico/vpp-dataplane/vpplink"
"github.com/projectcalico/vpp-dataplane/vpplink/types"
log "github.com/sirupsen/logrus"
"github.com/vishvananda/netlink"
)
const (
minAfXDPKernelVersion = "5.4.0-0"
maxAfXDPMTU = 3072
)
type AFXDPDriver struct {
UplinkDriverData
}
func (d *AFXDPDriver) IsSupported(warn bool) bool {
minVersion, err := utils.ParseKernelVersion(minAfXDPKernelVersion)
if err != nil {
log.Panicf("Error getting min kernel version %v", err)
}
if d.params.KernelVersion == nil {
if warn {
log.Warnf("Unkown current kernel version")
}
return false
}
if !d.params.KernelVersion.IsAtLeast(minVersion) {
if warn {
log.Warnf("Kernel %s doesn't support AF_XDP", d.params.KernelVersion)
}
return false
}
if d.conf.Mtu > maxAfXDPMTU {
if warn {
log.Warnf("MTU %d too large for AF_XDP (max 3072)", d.conf.Mtu)
}
return false
}
return false
}
func (d *AFXDPDriver) PreconfigureLinux() error {
link, err := netlink.LinkByName(d.params.MainInterface)
if err != nil {
return errors.Wrapf(err, "Error finding link %s", d.params.MainInterface)
}
err = netlink.SetPromiscOn(link)
if err != nil {
return errors.Wrapf(err, "Error setting link %s promisc on", d.params.MainInterface)
}
err = utils.SetInterfaceRxQueues(d.params.MainInterface, d.params.NumRxQueues)
if err != nil {
log.Errorf("Error setting link %s NumQueues to %d, using %d queues: %v", d.params.MainInterface, d.params.NumRxQueues, d.conf.NumRxQueues, err)
/* Try with linux NumRxQueues on error, otherwise af_xdp wont start */
d.params.NumRxQueues = d.conf.NumRxQueues
}
if d.conf.Mtu > maxAfXDPMTU {
log.Infof("Reducing interface MTU to %d for AF_XDP", maxAfXDPMTU)
err = netlink.LinkSetMTU(link, maxAfXDPMTU)
if err != nil {
return errors.Wrapf(err, "Error reducing MTU to %d", maxAfXDPMTU)
}
d.conf.Mtu = maxAfXDPMTU
}
if d.params.UserSpecifiedMtu > maxAfXDPMTU {
log.Infof("Reducing user specified MTU to %d", maxAfXDPMTU)
d.params.UserSpecifiedMtu = maxAfXDPMTU
}
return nil
}
func (d *AFXDPDriver) RestoreLinux() {
if !d.conf.IsUp {
return
}
// Interface should pop back in root ns once vpp exits
link, err := utils.SafeSetInterfaceUpByName(d.params.MainInterface)
if err != nil {
log.Warnf("Error setting %s up: %v", d.params.MainInterface, err)
return
}
/* Restore XDP specific settings */
log.Infof("Removing AF XDP conf")
if !d.conf.PromiscOn {
log.Infof("Setting promisc off")
err = netlink.SetPromiscOff(link)
if err != nil {
log.Errorf("Error setting link %s promisc off %v", d.params.MainInterface, err)
}
}
if d.conf.NumRxQueues != d.params.NumRxQueues {
log.Infof("Setting back %d queues", d.conf.NumRxQueues)
err = utils.SetInterfaceRxQueues(d.params.MainInterface, d.conf.NumRxQueues)
if err != nil {
log.Errorf("Error setting link %s NumQueues to %d %v", d.params.MainInterface, d.conf.NumRxQueues, err)
}
}
// Re-add all adresses and routes
d.restoreLinuxIfConf(link)
}
func (d *AFXDPDriver) CreateMainVppInterface(vpp *vpplink.VppLink, vppPid int) (err error) {
err = d.moveInterfaceToNS(d.params.MainInterface, vppPid)
if err != nil {
return errors.Wrap(err, "Moving uplink in NS failed")
}
intf := types.VppXDPInterface{
GenericVppInterface: d.getGenericVppInterface(),
}
err = vpp.CreateAfXDP(&intf)
if err != nil {
return errors.Wrapf(err, "Error creating AF_XDP interface")
}
log.Infof("Created AF_XDP interface %d", intf.SwIfIndex)
if intf.SwIfIndex != config.DataInterfaceSwIfIndex {
return fmt.Errorf("Created AF_XDP interface has wrong swIfIndex %d!", intf.SwIfIndex)
}
err = vpp.SetInterfaceMacAddress(intf.SwIfIndex, &d.conf.HardwareAddr)
if err != nil {
return errors.Wrap(err, "could not set af_xdp interface mac address in vpp")
}
return nil
}
func | (params *config.VppManagerParams, conf *config.InterfaceConfig) *AFXDPDriver {
d := &AFXDPDriver{}
d.name = NATIVE_DRIVER_AF_XDP
d.conf = conf
d.params = params
return d
}
| NewAFXDPDriver |
cluster_service_pb2_grpc.py | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.cloud.container_v1beta1.proto import (
cluster_service_pb2 as google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2,
)
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class ClusterManagerStub(object):
"""Google Kubernetes Engine Cluster Manager v1beta1
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ListClusters = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/ListClusters",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListClustersRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListClustersResponse.FromString,
)
self.GetCluster = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/GetCluster",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.GetClusterRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Cluster.FromString,
)
self.CreateCluster = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/CreateCluster",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.CreateClusterRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.UpdateCluster = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/UpdateCluster",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.UpdateClusterRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.UpdateNodePool = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/UpdateNodePool",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.UpdateNodePoolRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.SetNodePoolAutoscaling = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/SetNodePoolAutoscaling",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetNodePoolAutoscalingRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.SetLoggingService = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/SetLoggingService",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetLoggingServiceRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.SetMonitoringService = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/SetMonitoringService",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetMonitoringServiceRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.SetAddonsConfig = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/SetAddonsConfig",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetAddonsConfigRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.SetLocations = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/SetLocations",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetLocationsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.UpdateMaster = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/UpdateMaster",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.UpdateMasterRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.SetMasterAuth = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/SetMasterAuth",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetMasterAuthRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.DeleteCluster = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/DeleteCluster",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.DeleteClusterRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.ListOperations = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/ListOperations",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListOperationsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListOperationsResponse.FromString,
)
self.GetOperation = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/GetOperation",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.GetOperationRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.CancelOperation = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/CancelOperation",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.CancelOperationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetServerConfig = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/GetServerConfig",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.GetServerConfigRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ServerConfig.FromString,
)
self.ListNodePools = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/ListNodePools",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListNodePoolsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListNodePoolsResponse.FromString,
)
self.GetNodePool = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/GetNodePool",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.GetNodePoolRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.NodePool.FromString,
)
self.CreateNodePool = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/CreateNodePool",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.CreateNodePoolRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.DeleteNodePool = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/DeleteNodePool",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.DeleteNodePoolRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.RollbackNodePoolUpgrade = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/RollbackNodePoolUpgrade",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.RollbackNodePoolUpgradeRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.SetNodePoolManagement = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/SetNodePoolManagement",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetNodePoolManagementRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.SetLabels = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/SetLabels",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetLabelsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.SetLegacyAbac = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/SetLegacyAbac",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetLegacyAbacRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.StartIPRotation = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/StartIPRotation",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.StartIPRotationRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.CompleteIPRotation = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/CompleteIPRotation",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.CompleteIPRotationRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.SetNodePoolSize = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/SetNodePoolSize",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetNodePoolSizeRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.SetNetworkPolicy = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/SetNetworkPolicy",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetNetworkPolicyRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.SetMaintenancePolicy = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/SetMaintenancePolicy",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetMaintenancePolicyRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.FromString,
)
self.ListUsableSubnetworks = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/ListUsableSubnetworks",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListUsableSubnetworksRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListUsableSubnetworksResponse.FromString,
)
self.ListLocations = channel.unary_unary(
"/google.container.v1beta1.ClusterManager/ListLocations",
request_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListLocationsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListLocationsResponse.FromString,
)
class ClusterManagerServicer(object):
"""Google Kubernetes Engine Cluster Manager v1beta1
"""
def ListClusters(self, request, context):
"""Lists all clusters owned by a project in either the specified zone or all
zones.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetCluster(self, request, context):
"""Gets the details for a specific cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CreateCluster(self, request, context):
"""Creates a cluster, consisting of the specified number and type of Google
Compute Engine instances.
By default, the cluster is created in the project's
[default network](/compute/docs/networks-and-firewalls#networks).
One firewall is added for the cluster. After cluster creation,
the cluster creates routes for each node to allow the containers
on that node to communicate with all other instances in the
cluster.
Finally, an entry is added to the project's global metadata indicating
which CIDR range is being used by the cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateCluster(self, request, context):
"""Updates the settings for a specific cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateNodePool(self, request, context):
"""Updates the version and/or image type of a specific node pool.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SetNodePoolAutoscaling(self, request, context):
"""Sets the autoscaling settings of a specific node pool.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SetLoggingService(self, request, context):
"""Sets the logging service for a specific cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SetMonitoringService(self, request, context):
"""Sets the monitoring service for a specific cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SetAddonsConfig(self, request, context):
"""Sets the addons for a specific cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SetLocations(self, request, context):
"""Sets the locations for a specific cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateMaster(self, request, context):
|
def SetMasterAuth(self, request, context):
"""Used to set master auth materials. Currently supports :-
Changing the admin password for a specific cluster.
This can be either via password generation or explicitly set.
Modify basic_auth.csv and reset the K8S API server.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteCluster(self, request, context):
"""Deletes the cluster, including the Kubernetes endpoint and all worker
nodes.
Firewalls and routes that were configured during cluster creation
are also deleted.
Other Google Compute Engine resources that might be in use by the cluster
(e.g. load balancer resources) will not be deleted if they weren't present
at the initial create time.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListOperations(self, request, context):
"""Lists all operations in a project in a specific zone or all zones.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetOperation(self, request, context):
"""Gets the specified operation.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CancelOperation(self, request, context):
"""Cancels the specified operation.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetServerConfig(self, request, context):
"""Returns configuration info about the Kubernetes Engine service.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListNodePools(self, request, context):
"""Lists the node pools for a cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetNodePool(self, request, context):
"""Retrieves the node pool requested.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CreateNodePool(self, request, context):
"""Creates a node pool for a cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteNodePool(self, request, context):
"""Deletes a node pool from a cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def RollbackNodePoolUpgrade(self, request, context):
"""Roll back the previously Aborted or Failed NodePool upgrade.
This will be an no-op if the last upgrade successfully completed.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SetNodePoolManagement(self, request, context):
"""Sets the NodeManagement options for a node pool.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SetLabels(self, request, context):
"""Sets labels on a cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SetLegacyAbac(self, request, context):
"""Enables or disables the ABAC authorization mechanism on a cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def StartIPRotation(self, request, context):
"""Start master IP rotation.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CompleteIPRotation(self, request, context):
"""Completes master IP rotation.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SetNodePoolSize(self, request, context):
"""Sets the size for a specific node pool.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SetNetworkPolicy(self, request, context):
"""Enables/Disables Network Policy for a cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SetMaintenancePolicy(self, request, context):
"""Sets the maintenance policy for a cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListUsableSubnetworks(self, request, context):
"""Lists subnetworks that are usable for creating clusters in a project.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListLocations(self, request, context):
"""Used to fetch locations that offer GKE.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def add_ClusterManagerServicer_to_server(servicer, server):
rpc_method_handlers = {
"ListClusters": grpc.unary_unary_rpc_method_handler(
servicer.ListClusters,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListClustersRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListClustersResponse.SerializeToString,
),
"GetCluster": grpc.unary_unary_rpc_method_handler(
servicer.GetCluster,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.GetClusterRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Cluster.SerializeToString,
),
"CreateCluster": grpc.unary_unary_rpc_method_handler(
servicer.CreateCluster,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.CreateClusterRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"UpdateCluster": grpc.unary_unary_rpc_method_handler(
servicer.UpdateCluster,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.UpdateClusterRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"UpdateNodePool": grpc.unary_unary_rpc_method_handler(
servicer.UpdateNodePool,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.UpdateNodePoolRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"SetNodePoolAutoscaling": grpc.unary_unary_rpc_method_handler(
servicer.SetNodePoolAutoscaling,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetNodePoolAutoscalingRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"SetLoggingService": grpc.unary_unary_rpc_method_handler(
servicer.SetLoggingService,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetLoggingServiceRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"SetMonitoringService": grpc.unary_unary_rpc_method_handler(
servicer.SetMonitoringService,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetMonitoringServiceRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"SetAddonsConfig": grpc.unary_unary_rpc_method_handler(
servicer.SetAddonsConfig,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetAddonsConfigRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"SetLocations": grpc.unary_unary_rpc_method_handler(
servicer.SetLocations,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetLocationsRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"UpdateMaster": grpc.unary_unary_rpc_method_handler(
servicer.UpdateMaster,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.UpdateMasterRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"SetMasterAuth": grpc.unary_unary_rpc_method_handler(
servicer.SetMasterAuth,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetMasterAuthRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"DeleteCluster": grpc.unary_unary_rpc_method_handler(
servicer.DeleteCluster,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.DeleteClusterRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"ListOperations": grpc.unary_unary_rpc_method_handler(
servicer.ListOperations,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListOperationsRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListOperationsResponse.SerializeToString,
),
"GetOperation": grpc.unary_unary_rpc_method_handler(
servicer.GetOperation,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.GetOperationRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"CancelOperation": grpc.unary_unary_rpc_method_handler(
servicer.CancelOperation,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.CancelOperationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
"GetServerConfig": grpc.unary_unary_rpc_method_handler(
servicer.GetServerConfig,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.GetServerConfigRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ServerConfig.SerializeToString,
),
"ListNodePools": grpc.unary_unary_rpc_method_handler(
servicer.ListNodePools,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListNodePoolsRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListNodePoolsResponse.SerializeToString,
),
"GetNodePool": grpc.unary_unary_rpc_method_handler(
servicer.GetNodePool,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.GetNodePoolRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.NodePool.SerializeToString,
),
"CreateNodePool": grpc.unary_unary_rpc_method_handler(
servicer.CreateNodePool,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.CreateNodePoolRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"DeleteNodePool": grpc.unary_unary_rpc_method_handler(
servicer.DeleteNodePool,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.DeleteNodePoolRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"RollbackNodePoolUpgrade": grpc.unary_unary_rpc_method_handler(
servicer.RollbackNodePoolUpgrade,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.RollbackNodePoolUpgradeRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"SetNodePoolManagement": grpc.unary_unary_rpc_method_handler(
servicer.SetNodePoolManagement,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetNodePoolManagementRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"SetLabels": grpc.unary_unary_rpc_method_handler(
servicer.SetLabels,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetLabelsRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"SetLegacyAbac": grpc.unary_unary_rpc_method_handler(
servicer.SetLegacyAbac,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetLegacyAbacRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"StartIPRotation": grpc.unary_unary_rpc_method_handler(
servicer.StartIPRotation,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.StartIPRotationRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"CompleteIPRotation": grpc.unary_unary_rpc_method_handler(
servicer.CompleteIPRotation,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.CompleteIPRotationRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"SetNodePoolSize": grpc.unary_unary_rpc_method_handler(
servicer.SetNodePoolSize,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetNodePoolSizeRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"SetNetworkPolicy": grpc.unary_unary_rpc_method_handler(
servicer.SetNetworkPolicy,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetNetworkPolicyRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"SetMaintenancePolicy": grpc.unary_unary_rpc_method_handler(
servicer.SetMaintenancePolicy,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.SetMaintenancePolicyRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.Operation.SerializeToString,
),
"ListUsableSubnetworks": grpc.unary_unary_rpc_method_handler(
servicer.ListUsableSubnetworks,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListUsableSubnetworksRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListUsableSubnetworksResponse.SerializeToString,
),
"ListLocations": grpc.unary_unary_rpc_method_handler(
servicer.ListLocations,
request_deserializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListLocationsRequest.FromString,
response_serializer=google_dot_cloud_dot_container__v1beta1_dot_proto_dot_cluster__service__pb2.ListLocationsResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
"google.container.v1beta1.ClusterManager", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
| """Updates the master for a specific cluster.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!") |
lane_finder.py | import sys | import camera
import image_thresholding
import line_fitting
import matplotlib.image as mpimg
class Line():
def __init__(self):
self.detected = False # lane line detected in previous iteration
self.fit = None # most recent polynomial fit
self.fitx = None # most recent x pixel values for line
def process_image(img):
color_grad_combined = image_thresholding.color_grad(img)
warped, Minv = camera.perspective_transform(color_grad_combined, mtx, dist)
if left_line.detected and right_line.detected:
(left_line.fit, right_line.fit,
left_line.fitx, right_line.fitx,
ploty) = line_fitting.search_around_poly(warped, left_line.fit,
right_line.fit)
else:
(left_line.fit, right_line.fit,
left_line.fitx, right_line.fitx,
ploty) = line_fitting.fit_polynomial(warped, detected=False)
left_line.detected = True
right_line.detected = True
result = line_fitting.draw_lines(img, warped, Minv, left_line.fitx,
right_line.fitx, ploty)
return result
if __name__ == "__main__":
mtx, dist = camera.calibrate()
left_line = Line()
right_line = Line()
if (sys.argv[1].split(".")[-1] == "mp4"):
clip = VideoFileClip(sys.argv[1])
output = clip.fl_image(process_image)
output.write_videofile("output.mp4", audio=False)
else:
img = mpimg.imread(sys.argv[1])
img = process_image(img)
mpimg.imsave("output.jpg", img) | from moviepy.editor import VideoFileClip |
error.py | import math
import os
import sys
import traceback
import discord
from discord.ext import commands
class Errors(commands.Cog):
"""
Error handler
"""
def __init__(self, bot):
self.bot = bot
@commands.Cog.listener()
async def on_ready(self):
print("Error cog loaded successfully")
@commands.Cog.listener()
async def on_command_error(self, ctx, error):
|
def setup(bot):
bot.add_cog(Errors(bot))
| if hasattr(ctx.command, "on_error"):
return
# get the original exception
error = getattr(error, "original", error)
if isinstance(error, commands.BotMissingPermissions):
missing = [
perm.replace("_", " ").replace("guild", "server").title()
for perm in error.missing_perms
]
if len(missing) > 2:
fmt = "{}, and {}".format("**, **".join(missing[:-1]), missing[-1])
else:
fmt = " and ".join(missing)
embed = discord.Embed(
title="Missing Permissions",
description=f"I am missing **{fmt}** permissions to run this command :(",
color=0xFF0000,
)
return
if isinstance(error, commands.DisabledCommand):
await ctx.send("This command has been disabled.")
return
if isinstance(error, commands.CommandOnCooldown):
embed = discord.Embed(
title="Cooldown",
description=f"This command is on cooldown, please retry in {math.ceil(error.retry_after)}s.",
color=0xFF0000,
)
await ctx.send(embed=embed)
return
if isinstance(error, commands.MissingPermissions):
missing = [
perm.replace("_", " ").replace("guild", "server").title()
for perm in error.missing_perms
]
if len(missing) > 2:
fmt = "{}, and {}".format("**, **".join(missing[:-1]), missing[-1])
else:
fmt = " and ".join(missing)
embed = discord.Embed(
title="Insufficient Permission(s)",
description=f"You need the **{fmt}** permission(s) to use this command.",
color=0xFF0000,
)
await ctx.send(embed=embed)
return
if isinstance(error, commands.UserInputError):
embed = discord.Embed(
title="Error",
color=0xFF0000,
)
await ctx.send(embed=embed)
return
if isinstance(error, commands.NoPrivateMessage):
try:
await ctx.author.send("This command cannot be used in direct messages.")
except discord.Forbidden:
raise error
return
if isinstance(error, commands.CheckFailure):
embed = discord.Embed(
title="Permissions Not Satisfied",
color=0xFF0000,
)
await ctx.send(embed=embed)
return
if isinstance(error, commands.CommandNotFound):
return
print("Ignoring exception in command {}:".format(ctx.command), file=sys.stderr)
traceback.print_exception(
type(error), error, error.__traceback__, file=sys.stderr
) |
locales.py | from babel import localedata
from grow.pods import errors
from grow.pods import messages
import pickle
import os
import babel
import re
class Locales(object):
def __init__(self, pod):
self.pod = pod
def list_groups(self):
if 'locales' not in self.pod.yaml:
return []
return self.pod.yaml['locales'].keys()
def get_regions(self, group_name='default'):
if 'regions' not in self.pod.yaml:
return []
try:
return self.pod.yaml['locales'][group_name].get('regions', [])
except errors.PodConfigurationError:
return []
def get_languages(self, group_name='default'):
if 'locales' not in self.pod.yaml:
return []
try:
return self.pod.yaml['locales'][group_name].get('languages', [])
except errors.PodConfigurationError:
return []
def to_message(self): | group_message = messages.LocaleGroupMessage()
group_message.group_name = group_name
group_message.regions = self.get_regions(group_name)
group_message.languages = self.get_languages(group_name)
message.groups.append(group_message)
return message
class Locale(babel.Locale):
RTL_REGEX = re.compile('^(he|ar|fa|ur)(\W|$)')
_alias = None
def __init__(self, language, *args, **kwargs):
# Normalize from "de_de" to "de_DE" for case-sensitive filesystems.
parts = language.rsplit('_', 1)
if len(parts) > 1:
language = '{}_{}'.format(parts[0], parts[1].upper())
super(Locale, self).__init__(language, *args, **kwargs)
@classmethod
def parse(cls, *args, **kwargs):
locale = super(Locale, cls).parse(*args, **kwargs)
# Weak attempt to permit fuzzy locales (locales for which we still have
# language and country information, but not a full localedata file for),
# but disallow completely invalid locales. See note at end of file.
if locale and locale.get_display_name() is None:
raise ValueError('{} is not a valid locale identifier'.format(args[0]))
return locale
def __hash__(self):
return hash(str(self))
def __eq__(self, other):
if isinstance(other, basestring):
return str(self).lower() == other.lower()
return super(Locale, self).__eq__(other)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return '<Locale: "{}">'.format(str(self))
@classmethod
def parse_codes(cls, codes):
return [cls.parse(code) for code in codes]
@property
def is_rtl(self):
return Locale.RTL_REGEX.match(self.language)
@property
def direction(self):
return 'rtl' if self.is_rtl else 'ltr'
@classmethod
def from_alias(cls, pod, alias):
podspec = pod.get_podspec()
config = podspec.get_config()
if 'localization' in config and 'aliases' in config['localization']:
aliases = config['localization']['aliases']
for custom_locale, babel_locale in aliases.iteritems():
if custom_locale == alias:
return cls.parse(babel_locale)
return cls.parse(alias)
def set_alias(self, pod):
podspec = pod.get_podspec()
self._alias = podspec.get_locale_alias(str(self).lower())
@property
def alias(self):
return self._alias
@alias.setter
def alias(self, alias):
self._alias = alias
# NOTE: Babel does not support "fuzzy" locales. A locale is considered "fuzzy"
# when a corresponding "localedata" file that matches a given locale's full
# identifier (e.g. "en_US") does not exist. Here's one example: "en_BD". CLDR
# does not have a localedata file matching "en_BD" (English in Bangladesh), but
# it does have individual files for "en" and also "bn_BD". As it turns
# out, localedata files that correspond to a locale's full identifier (e.g.
# "bn_BD.dat") are actually pretty light on the content (largely containing
# things like start-of-week information) and most of the "meat" of the data is
# contained in the main localedata file, e.g. "en.dat".
#
# Users may need to generate pages corresponding to locales that we don't
# have full localedata for, and until Babel supports fuzzy locales, we'll
# monkeypatch two Babel functions to provide partial support for fuzzy locales.
#
# With this monkeypatch, locales will be valid even if Babel doesn't have a
# localedata file matching a locale's full identifier, but locales will still
# fail with a ValueError if the user specifies a territory that does not exist.
# With this patch, a user can, however, specify an invalid language. Obviously,
# this patch should be removed when/if Babel adds support for fuzzy locales.
# Optionally, we may want to provide users with more control over whether a
# locale is valid or invalid, but we can revisit that later.
# See: https://github.com/grow/grow/issues/93
def fuzzy_load(name, merge_inherited=True):
localedata._cache_lock.acquire()
try:
data = localedata._cache.get(name)
if not data:
# Load inherited data
if name == 'root' or not merge_inherited:
data = {}
else:
parts = name.split('_')
if len(parts) == 1:
parent = 'root'
else:
parent = '_'.join(parts[:-1])
data = fuzzy_load(parent).copy()
filename = os.path.join(localedata._dirname, '%s.dat' % name)
try:
fileobj = open(filename, 'rb')
try:
if name != 'root' and merge_inherited:
localedata.merge(data, pickle.load(fileobj))
else:
data = pickle.load(fileobj)
localedata._cache[name] = data
finally:
fileobj.close()
except IOError:
pass
return data
finally:
localedata._cache_lock.release()
localedata.exists = lambda name: True
localedata.load = fuzzy_load | message = messages.LocalesMessage()
message.groups = []
for group_name in self.list_groups(): |
datatype_uuid_container.go | // Code generated by github.com/actgardner/gogen-avro/v7. DO NOT EDIT.
/*
* SOURCE:
* namespace.avsc
*/
package avro
import (
"io"
"github.com/actgardner/gogen-avro/v7/compiler"
"github.com/actgardner/gogen-avro/v7/container"
"github.com/actgardner/gogen-avro/v7/vm"
)
func NewDatatypeUUIDWriter(writer io.Writer, codec container.Codec, recordsPerBlock int64) (*container.Writer, error) {
str := NewDatatypeUUID()
return container.NewWriter(writer, codec, recordsPerBlock, str.AvroRecordSchema())
}
// container reader
type DatatypeUUIDReader struct {
r io.Reader
p *vm.Program
}
func NewDatatypeUUIDReader(r io.Reader) (*DatatypeUUIDReader, error) |
func (r DatatypeUUIDReader) Read() (*DatatypeUUID, error) {
t := NewDatatypeUUID()
err := vm.Eval(r.r, r.p, t)
return t, err
}
| {
containerReader, err := container.NewReader(r)
if err != nil {
return nil, err
}
t := NewDatatypeUUID()
deser, err := compiler.CompileSchemaBytes([]byte(containerReader.AvroContainerSchema()), []byte(t.AvroRecordSchema()))
if err != nil {
return nil, err
}
return &DatatypeUUIDReader{
r: containerReader,
p: deser,
}, nil
} |
db.ts | import keysToObject from 'keys-to-object';
import {
Args,
Data,
fieldIs,
Obj,
Record,
RecordValue,
Resolver,
ResolveRequest,
Schema,
} from '../typings';
import {
createCompare,
getId,
isEqual,
isNewId,
mapArray,
mapDataAsync,
mergeRecord,
noUndef,
undefOr,
} from '../utils';
import walker from '../walker';
export interface IdRecord {
id: string;
[field: string]: RecordValue | null;
}
export interface Db {
find(
type: string,
args: Args,
fields: string[],
): IdRecord[] | Promise<IdRecord[]>;
insert(type: string, record: Record): string | Promise<string>;
update(type: string, id: string, record: Record): void | Promise<void>;
delete: (type: string, id: string) => void | Promise<void>;
}
const runner = walker<
Promise<void>,
{ db: Db; data: Data<Record>; firstIds: Data<string | null> }
>(
async (
{
root,
field,
args,
fields,
extra = { start: 0, end: 0 },
trace,
path,
key,
},
relations,
{ db, data, firstIds },
rootRecords: IdRecord[],
records: Obj<Obj<Obj<Record>>>,
querying: boolean,
noExtraFields?: boolean,
) => {
const idFields = Array.from(new Set(['id', ...fields]));
const sort =
args.sort && args.sort.filter(s => idFields.includes(s.replace('-', '')));
const slice = {
start: (args.start || 0) - extra.start,
end: undefOr(args.end, args.end! + extra.end) as number | undefined,
};
const relationFields = relations.filter(r => !r.foreign).map(r => r.name);
const allFields = Array.from(
new Set([
...idFields,
...relationFields,
...(!noExtraFields
? (args.sort || []).map(s => s.replace('-', ''))
: []),
]),
);
if (querying) {
records[key] = records[key] || {};
const results = {} as Obj<IdRecord>;
const doSingleQuery = async (
rootId: string,
fields: string[],
filter?: any[],
) => {
records[key][rootId] = records[key][rootId] || {};
(await db.find(
field.type,
{
...args,
sort,
start: 0,
end: slice.end,
filter:
args.filter && filter
? ['AND', args.filter, filter]
: args.filter || filter,
},
fields,
)).forEach(idRecord => {
const { id, ...record } = idRecord;
results[id] = idRecord;
mergeRecord(records[key][rootId], id, record);
});
};
if (!root.type) {
await doSingleQuery('', allFields);
} else {
const rootField = fieldIs.relation(field) ? root.field : 'id';
const relField = fieldIs.relation(field) ? 'id' : field.foreign;
if (!noExtraFields || allFields.includes(relField)) {
await Promise.all(
rootRecords.map(
rootRecord =>
rootRecord[rootField]
? doSingleQuery(rootRecord.id, allFields, [
relField,
'in',
[].concat(rootRecord[rootField] as any),
])
: Promise.resolve(),
),
);
}
}
const resultsArray = Object.keys(results).map(id => results[id]);
const newRecords = {};
await Promise.all(
relations.map(r =>
r.walk(resultsArray, newRecords, true, noExtraFields),
),
);
await Promise.all(
relations.map(r => r.walk(resultsArray, newRecords, false)),
);
} else {
data[field.type] = data[field.type] || {};
const fieldPath = [...path, key].join('_');
firstIds[fieldPath] = firstIds[fieldPath] || {};
Object.keys(records[key]).forEach(rootId => {
const sorted = Object.keys(records[key][rootId]).sort(
createCompare(
(id, k) => (k === 'id' ? id : noUndef(records[key][rootId][id][k])),
args.sort,
),
);
sorted.forEach((id, i) => {
if (i >= slice.start && (slice.end === undefined || i < slice.end)) {
const record = keysToObject(
trace &&
i >= trace.start &&
(trace.end === undefined || i < trace.end)
? relationFields
: allFields,
f => noUndef(records[key][rootId][id][f]),
);
delete record.id;
mergeRecord(data[field.type], id, record);
}
});
if (fieldIs.foreignRelation(field) || (field.isList && args.sort)) {
firstIds[fieldPath][rootId] = sorted[args.start || 0] || null;
}
});
}
},
);
const commit = async (
commits: Data[] = [],
schema: Schema,
db: Db,
newIds: Data<string>,
) => {
for (const records of commits) {
await mapDataAsync(records, async (record, type, id) => {
newIds[type] = newIds[type] || {};
if (!record) await db.delete(type, id);
else if (isNewId(id)) newIds[type][id] = await db.insert(type, record);
else await db.update(type, id, record);
});
await mapDataAsync(records, async (r, type, id) => {
if (r) {
const record = { ...r };
let hasNewIds = false;
for (const f of Object.keys(record)) {
const field = schema[type][f];
if (fieldIs.relation(field) && newIds[field.type]) {
const prev = record[f];
record[f] = mapArray(record[f], id =>
getId(id, newIds[field.type]),
);
if (!isEqual(record[f], prev)) hasNewIds = true;
}
}
if (hasNewIds) await db.update(type, getId(id, newIds[type])!, record);
}
});
}
};
export default function | (schema: Schema, db: Db) {
return (async (request?: ResolveRequest) => {
if (!request) return schema;
const newIds: Data<string> = {};
await commit(request.commits, schema, db, newIds);
const records: Obj<Obj<Obj<Record>>> = {};
const context = { db, data: {}, firstIds: {} };
await Promise.all(
runner(
request.queries || [],
schema,
context,
[{}],
records,
true,
request.context && request.context.noExtraFields,
),
);
await Promise.all(
runner(request.queries || [], schema, context, [{}], records, false),
);
return {
data: context.data,
newIds,
errors: [],
firstIds: context.firstIds,
};
}) as Resolver;
}
| dbResolver |
test.rs | //! Test helpers to use during testing.
use std::{convert::TryFrom, net, str::FromStr, sync::mpsc, thread};
#[cfg(feature = "cookie")]
use coo_kie::{Cookie, CookieJar};
use crate::ws::{error::WsClientError, WsClient, WsConnection};
use crate::{io::Filter, io::Io, rt::System, server::Server, service::ServiceFactory};
use crate::{time::Millis, time::Seconds, util::Bytes};
use super::client::{Client, ClientRequest, ClientResponse, Connector};
use super::error::{HttpError, PayloadError};
use super::header::{self, HeaderMap, HeaderName, HeaderValue};
use super::payload::Payload;
use super::{Method, Request, Uri, Version};
/// Test `Request` builder
///
/// ```rust,no_run
/// use ntex::http::test::TestRequest;
/// use ntex::http::{header, Request, Response, StatusCode, HttpMessage};
///
/// fn index(req: Request) -> Response {
/// if let Some(hdr) = req.headers().get(header::CONTENT_TYPE) {
/// Response::Ok().into()
/// } else {
/// Response::BadRequest().into()
/// }
/// }
///
/// let resp = index(
/// TestRequest::with_header("content-type", "text/plain").finish());
/// assert_eq!(resp.status(), StatusCode::OK);
///
/// let resp = index(
/// TestRequest::default().finish());
/// assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
/// ```
pub struct TestRequest(Option<Inner>);
struct Inner {
version: Version,
method: Method,
uri: Uri,
headers: HeaderMap,
#[cfg(feature = "cookie")]
cookies: CookieJar,
payload: Option<Payload>,
}
impl Default for TestRequest {
fn default() -> TestRequest {
TestRequest(Some(Inner {
method: Method::GET,
uri: Uri::from_str("/").unwrap(),
version: Version::HTTP_11,
headers: HeaderMap::new(),
#[cfg(feature = "cookie")]
cookies: CookieJar::new(),
payload: None,
}))
}
}
impl TestRequest {
/// Create TestRequest and set request uri
pub fn with_uri(path: &str) -> TestRequest {
TestRequest::default().uri(path).take()
}
/// Create TestRequest and set header
pub fn with_header<K, V>(key: K, value: V) -> TestRequest
where
HeaderName: TryFrom<K>,
HeaderValue: TryFrom<V>,
<HeaderName as TryFrom<K>>::Error: Into<HttpError>,
{
TestRequest::default().header(key, value).take()
}
/// Set HTTP version of this request
pub fn version(&mut self, ver: Version) -> &mut Self {
parts(&mut self.0).version = ver;
self
}
/// Set HTTP method of this request
pub fn method(&mut self, meth: Method) -> &mut Self {
parts(&mut self.0).method = meth;
self
}
/// Set HTTP Uri of this request
pub fn uri(&mut self, path: &str) -> &mut Self {
parts(&mut self.0).uri = Uri::from_str(path).unwrap();
self
}
/// Set a header
pub fn header<K, V>(&mut self, key: K, value: V) -> &mut Self
where
HeaderName: TryFrom<K>,
HeaderValue: TryFrom<V>,
<HeaderName as TryFrom<K>>::Error: Into<HttpError>,
{
if let Ok(key) = HeaderName::try_from(key) {
if let Ok(value) = HeaderValue::try_from(value) {
parts(&mut self.0).headers.append(key, value);
return self;
}
}
panic!("Cannot create header");
}
#[cfg(feature = "cookie")]
/// Set cookie for this request
pub fn cookie<'a>(&mut self, cookie: Cookie<'a>) -> &mut Self {
parts(&mut self.0).cookies.add(cookie.into_owned());
self
}
/// Set request payload
pub fn set_payload<B: Into<Bytes>>(&mut self, data: B) -> &mut Self {
let mut payload = crate::http::h1::Payload::empty();
payload.unread_data(data.into());
parts(&mut self.0).payload = Some(payload.into());
self
}
/// Take test request
pub fn take(&mut self) -> TestRequest {
TestRequest(self.0.take())
}
/// Complete request creation and generate `Request` instance
pub fn finish(&mut self) -> Request {
let inner = self.0.take().expect("cannot reuse test request builder");
let mut req = if let Some(pl) = inner.payload {
Request::with_payload(pl)
} else {
Request::with_payload(crate::http::h1::Payload::empty().into())
};
let head = req.head_mut();
head.uri = inner.uri;
head.method = inner.method;
head.version = inner.version;
head.headers = inner.headers;
if let Some(conn) = head.headers.get(header::CONNECTION) {
if let Ok(s) = conn.to_str() {
if s.to_lowercase().contains("upgrade") {
head.set_upgrade()
}
}
}
#[cfg(feature = "cookie")]
{
use percent_encoding::percent_encode;
use std::fmt::Write as FmtWrite;
let mut cookie = String::new();
for c in inner.cookies.delta() {
let name = percent_encode(c.name().as_bytes(), super::helpers::USERINFO);
let value = percent_encode(c.value().as_bytes(), super::helpers::USERINFO);
let _ = write!(cookie, "; {}={}", name, value);
}
if !cookie.is_empty() {
head.headers.insert(
super::header::COOKIE,
HeaderValue::from_str(&cookie.as_str()[2..]).unwrap(),
);
}
}
req
}
}
#[inline]
fn parts(parts: &mut Option<Inner>) -> &mut Inner {
parts.as_mut().expect("cannot reuse test request builder")
}
/// Start test server
///
/// `TestServer` is very simple test server that simplify process of writing
/// integration tests cases for ntex web applications.
///
/// # Examples
///
/// ```rust
/// use ntex::http;
/// use ntex::web::{self, App, HttpResponse};
///
/// async fn my_handler() -> Result<HttpResponse, std::io::Error> {
/// Ok(HttpResponse::Ok().into())
/// }
///
/// #[ntex::test]
/// async fn test_example() {
/// let mut srv = http::test::server(
/// || http::HttpService::new(
/// App::new().service(
/// web::resource("/").to(my_handler))
/// )
/// );
///
/// let req = srv.get("/");
/// let response = req.send().await.unwrap();
/// assert!(response.status().is_success());
/// }
/// ```
pub fn server<F, R>(factory: F) -> TestServer
where
F: Fn() -> R + Send + Clone + 'static,
R: ServiceFactory<Io>,
{
let (tx, rx) = mpsc::channel();
// run server in separate thread
thread::spawn(move || {
let sys = System::new("test-server");
let tcp = net::TcpListener::bind("127.0.0.1:0").unwrap();
let local_addr = tcp.local_addr().unwrap();
tx.send((sys.system(), local_addr)).unwrap();
sys.run(|| {
Server::build()
.listen("test", tcp, move |_| factory())?
.workers(1)
.disable_signals()
.run();
Ok(())
})
});
let (system, addr) = rx.recv().unwrap();
let client = {
let connector = {
#[cfg(feature = "openssl")]
{
use tls_openssl::ssl::{SslConnector, SslMethod, SslVerifyMode};
let mut builder = SslConnector::builder(SslMethod::tls()).unwrap();
builder.set_verify(SslVerifyMode::NONE);
let _ = builder
.set_alpn_protos(b"\x02h2\x08http/1.1")
.map_err(|e| log::error!("Cannot set alpn protocol: {:?}", e));
Connector::default()
.timeout(Millis(30_000))
.openssl(builder.build())
.finish()
}
#[cfg(not(feature = "openssl"))]
{
Connector::default().timeout(Millis(30_000)).finish()
}
};
Client::build()
.timeout(Seconds(30))
.connector(connector)
.finish()
};
TestServer {
addr,
client,
system,
}
}
/// Test server controller
pub struct TestServer {
addr: net::SocketAddr,
client: Client,
system: System,
}
impl TestServer {
/// Construct test server url
pub fn addr(&self) -> net::SocketAddr {
self.addr
}
/// Construct test server url
pub fn url(&self, uri: &str) -> String {
if uri.starts_with('/') {
format!("http://localhost:{}{}", self.addr.port(), uri)
} else {
format!("http://localhost:{}/{}", self.addr.port(), uri)
}
}
/// Construct test https server url
pub fn surl(&self, uri: &str) -> String {
if uri.starts_with('/') {
format!("https://localhost:{}{}", self.addr.port(), uri)
} else {
format!("https://localhost:{}/{}", self.addr.port(), uri)
}
}
/// Create client request
pub fn request<S: AsRef<str>>(&self, method: Method, path: S) -> ClientRequest {
self.client
.request(method, self.url(path.as_ref()).as_str())
}
/// Create secure client request
pub fn srequest<S: AsRef<str>>(&self, method: Method, path: S) -> ClientRequest {
self.client
.request(method, self.surl(path.as_ref()).as_str())
}
/// Load response's body
pub async fn load_body(
&mut self,
mut response: ClientResponse,
) -> Result<Bytes, PayloadError> |
/// Connect to a websocket server
pub async fn ws(&mut self) -> Result<WsConnection<impl Filter>, WsClientError> {
self.ws_at("/").await
}
/// Connect to websocket server at a given path
pub async fn ws_at(
&mut self,
path: &str,
) -> Result<WsConnection<impl Filter>, WsClientError> {
WsClient::build(self.url(path))
.address(self.addr)
.timeout(Seconds(30))
.finish()
.unwrap()
.connect()
.await
}
#[cfg(feature = "openssl")]
/// Connect to a websocket server
pub async fn wss(
&mut self,
) -> Result<WsConnection<crate::connect::openssl::SslFilter>, WsClientError> {
self.wss_at("/").await
}
#[cfg(feature = "openssl")]
/// Connect to secure websocket server at a given path
pub async fn wss_at(
&mut self,
path: &str,
) -> Result<WsConnection<crate::connect::openssl::SslFilter>, WsClientError> {
use tls_openssl::ssl::{SslConnector, SslMethod, SslVerifyMode};
let mut builder = SslConnector::builder(SslMethod::tls()).unwrap();
builder.set_verify(SslVerifyMode::NONE);
let _ = builder
.set_alpn_protos(b"\x08http/1.1")
.map_err(|e| log::error!("Cannot set alpn protocol: {:?}", e));
WsClient::build(self.url(path))
.address(self.addr)
.timeout(Seconds(30))
.openssl(builder.build())
.take()
.finish()
.unwrap()
.connect()
.await
}
/// Stop http server
fn stop(&mut self) {
self.system.stop();
}
}
impl Drop for TestServer {
fn drop(&mut self) {
self.stop()
}
}
| {
response.body().limit(10_485_760).await
} |
job_cli.go | /*
* Copyright 2019-2021 VMware, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package cli
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"net/http"
"github.com/FederatedAI/KubeFATE/k8s-deploy/pkg/api"
"github.com/rs/zerolog/log"
"github.com/spf13/viper"
"github.com/urfave/cli/v2"
)
func JobCommand() *cli.Command {
return &cli.Command{
Name: "job",
Flags: []cli.Flag{},
Subcommands: []*cli.Command{
JobListCommand(),
JobInfoCommand(),
JobStopCommand(),
JobDeleteCommand(),
},
Usage: "List jobs, describe and delete a job",
}
}
func JobListCommand() *cli.Command {
return &cli.Command{
Name: "list",
Aliases: []string{"ls"},
Flags: []cli.Flag{},
Usage: "Show job list",
Action: func(c *cli.Context) error {
job := new(Job)
return GetItemList(job)
},
}
}
func JobDeleteCommand() *cli.Command {
return &cli.Command{
Name: "delete",
Aliases: []string{"del"},
Flags: []cli.Flag{},
Usage: "Delete a job",
Action: func(c *cli.Context) error {
var uuid string
if c.Args().Len() > 0 {
uuid = c.Args().Get(0)
} else {
return errors.New("not uuid")
}
job := new(Job)
return DeleteItem(job, uuid)
},
}
}
func JobInfoCommand() *cli.Command {
return &cli.Command{
Name: "describe",
Flags: []cli.Flag{
&cli.StringFlag{
Name: "uuid",
Value: "",
Usage: "Describe a job with given UUID",
},
},
Usage: "Show job's details info",
Action: func(c *cli.Context) error {
var uuid string
if c.Args().Len() > 0 | else {
return errors.New("not uuid")
}
job := new(Job)
return GetItem(job, uuid)
},
}
}
func JobStopCommand() *cli.Command {
return &cli.Command{
Name: "stop",
Flags: []cli.Flag{
&cli.StringFlag{
Name: "uuid",
Value: "",
Usage: "Describe a job with given UUID",
},
},
Usage: "Stop job",
Action: func(c *cli.Context) error {
var uuid string
if c.Args().Len() > 0 {
uuid = c.Args().Get(0)
} else {
return errors.New("not uuid")
}
r := &Request{
Type: "PUT",
Path: "job",
Body: nil,
}
serviceURL := viper.GetString("serviceurl")
apiVersion := api.APIVersion + "/"
if serviceURL == "" {
serviceURL = "localhost:8080/"
}
URL := "http://" + serviceURL + "/" + apiVersion + r.Path + "/" + uuid + "?jobStatus=stop"
body := bytes.NewReader(r.Body)
log.Debug().Str("Type", r.Type).Str("url", URL).Msg("Request")
request, err := http.NewRequest(r.Type, URL, body)
if err != nil {
return err
}
token, err := getToken()
if err != nil {
return err
}
Authorization := fmt.Sprintf("Bearer %s", token)
request.Header.Add("Authorization", Authorization)
request.Header.Add("user-agent", "kubefate")
resp, err := http.DefaultClient.Do(request)
if err != nil {
return err
}
respBody, err := ioutil.ReadAll(resp.Body)
if err != nil {
return err
}
if resp.StatusCode != 200 {
type JobErrMsg struct {
Error string
}
jobErrMsg := new(JobErrMsg)
err = json.Unmarshal(respBody, &jobErrMsg)
if err != nil {
return err
}
return fmt.Errorf("resp.StatusCode=%d, error: %s", resp.StatusCode, jobErrMsg.Error)
}
type JobResultMsg struct {
Msg string
Data string
}
JobResult := new(JobResultMsg)
err = json.Unmarshal(respBody, &JobResult)
if err != nil {
return err
}
log.Debug().Int("Code", resp.StatusCode).Bytes("Body", respBody).Msg("ok")
fmt.Println(JobResult.Data)
return nil
},
}
}
| {
uuid = c.Args().Get(0)
} |
validation_logic.go | /*
Copyright IBM Corp. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package v12
import (
"bytes"
"fmt"
"regexp"
"github.com/hyperledger/fabric/core/chaincode/platforms/ccmetadata"
"github.com/golang/protobuf/proto"
"github.com/hyperledger/fabric/common/channelconfig"
commonerrors "github.com/hyperledger/fabric/common/errors"
"github.com/hyperledger/fabric/common/flogging"
"github.com/hyperledger/fabric/core/chaincode/platforms"
"github.com/hyperledger/fabric/core/chaincode/platforms/car"
"github.com/hyperledger/fabric/core/chaincode/platforms/golang"
"github.com/hyperledger/fabric/core/chaincode/platforms/java"
"github.com/hyperledger/fabric/core/chaincode/platforms/node"
"github.com/hyperledger/fabric/core/common/ccprovider"
"github.com/hyperledger/fabric/core/common/privdata"
. "github.com/hyperledger/fabric/core/handlers/validation/api/capabilities"
. "github.com/hyperledger/fabric/core/handlers/validation/api/identities"
. "github.com/hyperledger/fabric/core/handlers/validation/api/policies"
. "github.com/hyperledger/fabric/core/handlers/validation/api/state"
"github.com/hyperledger/fabric/core/ledger/kvledger/txmgmt/rwsetutil"
"github.com/hyperledger/fabric/core/scc/lscc"
"github.com/hyperledger/fabric/protos/common"
"github.com/hyperledger/fabric/protos/ledger/rwset/kvrwset"
"github.com/hyperledger/fabric/protos/msp"
pb "github.com/hyperledger/fabric/protos/peer"
"github.com/hyperledger/fabric/protos/utils"
"github.com/pkg/errors"
)
var logger = flogging.MustGetLogger("vscc")
const (
DUPLICATED_IDENTITY_ERROR = "Endorsement policy evaluation failure might be caused by duplicated identities"
)
var validCollectionNameRegex = regexp.MustCompile(ccmetadata.AllowedCharsCollectionName)
//go:generate mockery -dir ../../api/capabilities/ -name Capabilities -case underscore -output mocks/
//go:generate mockery -dir ../../api/state/ -name StateFetcher -case underscore -output mocks/
//go:generate mockery -dir ../../api/identities/ -name IdentityDeserializer -case underscore -output mocks/
//go:generate mockery -dir ../../api/policies/ -name PolicyEvaluator -case underscore -output mocks/
// New creates a new instance of the default VSCC
// Typically this will only be invoked once per peer
func New(c Capabilities, s StateFetcher, d IdentityDeserializer, pe PolicyEvaluator) *Validator {
return &Validator{
capabilities: c,
stateFetcher: s,
deserializer: d,
policyEvaluator: pe,
}
}
// Validator implements the default transaction validation policy,
// which is to check the correctness of the read-write set and the endorsement
// signatures against an endorsement policy that is supplied as argument to
// every invoke
type Validator struct {
deserializer IdentityDeserializer
capabilities Capabilities
stateFetcher StateFetcher
policyEvaluator PolicyEvaluator
}
// Validate validates the given envelope corresponding to a transaction with an endorsement
// policy as given in its serialized form
func (vscc *Validator) Validate(
block *common.Block,
namespace string,
txPosition int,
actionPosition int,
policyBytes []byte,
) commonerrors.TxValidationError {
// get the envelope...
env, err := utils.GetEnvelopeFromBlock(block.Data.Data[txPosition])
if err != nil {
logger.Errorf("VSCC error: GetEnvelope failed, err %s", err)
return policyErr(err)
}
// ...and the payload...
payl, err := utils.GetPayload(env)
if err != nil {
logger.Errorf("VSCC error: GetPayload failed, err %s", err)
return policyErr(err)
}
chdr, err := utils.UnmarshalChannelHeader(payl.Header.ChannelHeader)
if err != nil {
return policyErr(err)
}
// validate the payload type
if common.HeaderType(chdr.Type) != common.HeaderType_ENDORSER_TRANSACTION {
logger.Errorf("Only Endorser Transactions are supported, provided type %d", chdr.Type)
return policyErr(fmt.Errorf("Only Endorser Transactions are supported, provided type %d", chdr.Type))
}
// ...and the transaction...
tx, err := utils.GetTransaction(payl.Data)
if err != nil {
logger.Errorf("VSCC error: GetTransaction failed, err %s", err)
return policyErr(err)
}
cap, err := utils.GetChaincodeActionPayload(tx.Actions[actionPosition].Payload)
if err != nil {
logger.Errorf("VSCC error: GetChaincodeActionPayload failed, err %s", err)
return policyErr(err)
}
signatureSet, err := vscc.deduplicateIdentity(cap)
if err != nil {
return policyErr(err)
}
// evaluate the signature set against the policy
err = vscc.policyEvaluator.Evaluate(policyBytes, signatureSet)
if err != nil {
logger.Warningf("Endorsement policy failure for transaction txid=%s, err: %s", chdr.GetTxId(), err.Error())
if len(signatureSet) < len(cap.Action.Endorsements) {
// Warning: duplicated identities exist, endorsement failure might be cause by this reason
return policyErr(errors.New(DUPLICATED_IDENTITY_ERROR))
}
return policyErr(fmt.Errorf("VSCC error: endorsement policy failure, err: %s", err))
}
// do some extra validation that is specific to lscc
if namespace == "lscc" {
logger.Debugf("VSCC info: doing special validation for LSCC")
err := vscc.ValidateLSCCInvocation(chdr.ChannelId, env, cap, payl, vscc.capabilities)
if err != nil {
logger.Errorf("VSCC error: ValidateLSCCInvocation failed, err %s", err)
return err
}
}
return nil
}
// checkInstantiationPolicy evaluates an instantiation policy against a signed proposal
func (vscc *Validator) checkInstantiationPolicy(chainName string, env *common.Envelope, instantiationPolicy []byte, payl *common.Payload) commonerrors.TxValidationError {
// get the signature header
shdr, err := utils.GetSignatureHeader(payl.Header.SignatureHeader)
if err != nil {
return policyErr(err)
}
// construct signed data we can evaluate the instantiation policy against
sd := []*common.SignedData{{
Data: env.Payload,
Identity: shdr.Creator,
Signature: env.Signature,
}}
err = vscc.policyEvaluator.Evaluate(instantiationPolicy, sd)
if err != nil {
return policyErr(fmt.Errorf("chaincode instantiation policy violated, error %s", err))
}
return nil
}
func validateNewCollectionConfigs(newCollectionConfigs []*common.CollectionConfig) error {
newCollectionsMap := make(map[string]bool, len(newCollectionConfigs))
// Process each collection config from a set of collection configs
for _, newCollectionConfig := range newCollectionConfigs {
newCollection := newCollectionConfig.GetStaticCollectionConfig()
if newCollection == nil {
return errors.New("unknown collection configuration type")
}
// Ensure that there are no duplicate collection names
collectionName := newCollection.GetName()
if err := validateCollectionName(collectionName); err != nil {
return err
}
if _, ok := newCollectionsMap[collectionName]; !ok {
newCollectionsMap[collectionName] = true
} else {
return fmt.Errorf("collection-name: %s -- found duplicate collection configuration", collectionName)
}
// Validate gossip related parameters present in the collection config
maximumPeerCount := newCollection.GetMaximumPeerCount()
requiredPeerCount := newCollection.GetRequiredPeerCount()
if maximumPeerCount < requiredPeerCount {
return fmt.Errorf("collection-name: %s -- maximum peer count (%d) cannot be greater than the required peer count (%d)",
collectionName, maximumPeerCount, requiredPeerCount)
}
if requiredPeerCount < 0 {
return fmt.Errorf("collection-name: %s -- requiredPeerCount (%d) cannot be less than zero (%d)",
collectionName, maximumPeerCount, requiredPeerCount)
}
// make sure that the signature policy is meaningful (only consists of ORs)
err := validateSpOrConcat(newCollection.MemberOrgsPolicy.GetSignaturePolicy().Rule)
if err != nil {
return errors.WithMessage(err, fmt.Sprintf("collection-name: %s -- error in member org policy", collectionName))
}
}
return nil
}
// validateSpOrConcat checks if the supplied signature policy is just an OR-concatenation of identities
func validateSpOrConcat(sp *common.SignaturePolicy) error {
if sp.GetNOutOf() == nil {
return nil
}
// check if N == 1 (OR concatenation)
if sp.GetNOutOf().N != 1 {
return errors.New(fmt.Sprintf("signature policy is not an OR concatenation, NOutOf %d", sp.GetNOutOf().N))
}
// recurse into all sub-rules
for _, rule := range sp.GetNOutOf().Rules {
err := validateSpOrConcat(rule)
if err != nil {
return err
}
}
return nil
}
func checkForMissingCollections(newCollectionsMap map[string]*common.StaticCollectionConfig, oldCollectionConfigs []*common.CollectionConfig,
) error {
var missingCollections []string
// In the new collection config package, ensure that there is one entry per old collection. Any
// number of new collections are allowed.
for _, oldCollectionConfig := range oldCollectionConfigs {
oldCollection := oldCollectionConfig.GetStaticCollectionConfig()
// It cannot be nil
if oldCollection == nil {
return policyErr(fmt.Errorf("unknown collection configuration type"))
}
// All old collection must exist in the new collection config package
oldCollectionName := oldCollection.GetName()
_, ok := newCollectionsMap[oldCollectionName]
if !ok {
missingCollections = append(missingCollections, oldCollectionName)
}
}
if len(missingCollections) > 0 {
return policyErr(fmt.Errorf("the following existing collections are missing in the new collection configuration package: %v",
missingCollections))
}
return nil
}
func checkForModifiedCollectionsBTL(newCollectionsMap map[string]*common.StaticCollectionConfig, oldCollectionConfigs []*common.CollectionConfig,
) error {
var modifiedCollectionsBTL []string
// In the new collection config package, ensure that the block to live value is not
// modified for the existing collections.
for _, oldCollectionConfig := range oldCollectionConfigs {
oldCollection := oldCollectionConfig.GetStaticCollectionConfig()
// It cannot be nil
if oldCollection == nil {
return policyErr(fmt.Errorf("unknown collection configuration type"))
}
oldCollectionName := oldCollection.GetName()
newCollection, _ := newCollectionsMap[oldCollectionName]
// BlockToLive cannot be changed
if newCollection.GetBlockToLive() != oldCollection.GetBlockToLive() {
modifiedCollectionsBTL = append(modifiedCollectionsBTL, oldCollectionName)
}
}
if len(modifiedCollectionsBTL) > 0 {
return policyErr(fmt.Errorf("the BlockToLive in the following existing collections must not be modified: %v",
modifiedCollectionsBTL))
}
return nil
}
func validateNewCollectionConfigsAgainstOld(newCollectionConfigs []*common.CollectionConfig, oldCollectionConfigs []*common.CollectionConfig,
) error |
func validateCollectionName(collectionName string) error {
if collectionName == "" {
return fmt.Errorf("empty collection-name is not allowed")
}
match := validCollectionNameRegex.FindString(collectionName)
if len(match) != len(collectionName) {
return fmt.Errorf("collection-name: %s not allowed. A valid collection name follows the pattern: %s",
collectionName, ccmetadata.AllowedCharsCollectionName)
}
return nil
}
// validateRWSetAndCollection performs validation of the rwset
// of an LSCC deploy operation and then it validates any collection
// configuration
func (vscc *Validator) validateRWSetAndCollection(
lsccrwset *kvrwset.KVRWSet,
cdRWSet *ccprovider.ChaincodeData,
lsccArgs [][]byte,
lsccFunc string,
ac channelconfig.ApplicationCapabilities,
channelName string,
) commonerrors.TxValidationError {
/********************************************/
/* security check 0.a - validation of rwset */
/********************************************/
// there can only be one or two writes
if len(lsccrwset.Writes) > 2 {
return policyErr(fmt.Errorf("LSCC can only issue one or two putState upon deploy"))
}
/**********************************************************/
/* security check 0.b - validation of the collection data */
/**********************************************************/
var collectionsConfigArg []byte
if len(lsccArgs) > 5 {
collectionsConfigArg = lsccArgs[5]
}
var collectionsConfigLedger []byte
if len(lsccrwset.Writes) == 2 {
key := privdata.BuildCollectionKVSKey(cdRWSet.Name)
if lsccrwset.Writes[1].Key != key {
return policyErr(fmt.Errorf("invalid key for the collection of chaincode %s:%s; expected '%s', received '%s'",
cdRWSet.Name, cdRWSet.Version, key, lsccrwset.Writes[1].Key))
}
collectionsConfigLedger = lsccrwset.Writes[1].Value
}
if !bytes.Equal(collectionsConfigArg, collectionsConfigLedger) {
return policyErr(fmt.Errorf("collection configuration arguments supplied for chaincode %s:%s do not match the configuration in the lscc writeset",
cdRWSet.Name, cdRWSet.Version))
}
channelState, err := vscc.stateFetcher.FetchState()
if err != nil {
return &commonerrors.VSCCExecutionFailureError{Err: fmt.Errorf("failed obtaining query executor: %v", err)}
}
defer channelState.Done()
state := &state{channelState}
// The following condition check added in v1.1 may not be needed as it is not possible to have the chaincodeName~collection key in
// the lscc namespace before a chaincode deploy. To avoid forks in v1.2, the following condition is retained.
if lsccFunc == lscc.DEPLOY {
colCriteria := common.CollectionCriteria{Channel: channelName, Namespace: cdRWSet.Name}
ccp, err := privdata.RetrieveCollectionConfigPackageFromState(colCriteria, state)
if err != nil {
// fail if we get any error other than NoSuchCollectionError
// because it means something went wrong while looking up the
// older collection
if _, ok := err.(privdata.NoSuchCollectionError); !ok {
return &commonerrors.VSCCExecutionFailureError{Err: fmt.Errorf("unable to check whether collection existed earlier for chaincode %s:%s",
cdRWSet.Name, cdRWSet.Version),
}
}
}
if ccp != nil {
return policyErr(fmt.Errorf("collection data should not exist for chaincode %s:%s", cdRWSet.Name, cdRWSet.Version))
}
}
// TODO: Once the new chaincode lifecycle is available (FAB-8724), the following validation
// and other validation performed in ValidateLSCCInvocation can be moved to LSCC itself.
newCollectionConfigPackage := &common.CollectionConfigPackage{}
if collectionsConfigArg != nil {
err := proto.Unmarshal(collectionsConfigArg, newCollectionConfigPackage)
if err != nil {
return policyErr(fmt.Errorf("invalid collection configuration supplied for chaincode %s:%s",
cdRWSet.Name, cdRWSet.Version))
}
} else {
return nil
}
if ac.V1_2Validation() {
newCollectionConfigs := newCollectionConfigPackage.GetConfig()
if err := validateNewCollectionConfigs(newCollectionConfigs); err != nil {
return policyErr(err)
}
if lsccFunc == lscc.UPGRADE {
collectionCriteria := common.CollectionCriteria{Channel: channelName, Namespace: cdRWSet.Name}
// oldCollectionConfigPackage denotes the existing collection config package in the ledger
oldCollectionConfigPackage, err := privdata.RetrieveCollectionConfigPackageFromState(collectionCriteria, state)
if err != nil {
// fail if we get any error other than NoSuchCollectionError
// because it means something went wrong while looking up the
// older collection
if _, ok := err.(privdata.NoSuchCollectionError); !ok {
return &commonerrors.VSCCExecutionFailureError{Err: fmt.Errorf("unable to check whether collection existed earlier for chaincode %s:%s: %v",
cdRWSet.Name, cdRWSet.Version, err),
}
}
}
// oldCollectionConfigPackage denotes the existing collection config package in the ledger
if oldCollectionConfigPackage != nil {
oldCollectionConfigs := oldCollectionConfigPackage.GetConfig()
if err := validateNewCollectionConfigsAgainstOld(newCollectionConfigs, oldCollectionConfigs); err != nil {
return policyErr(err)
}
}
}
}
return nil
}
func (vscc *Validator) ValidateLSCCInvocation(
chid string,
env *common.Envelope,
cap *pb.ChaincodeActionPayload,
payl *common.Payload,
ac channelconfig.ApplicationCapabilities,
) commonerrors.TxValidationError {
cpp, err := utils.GetChaincodeProposalPayload(cap.ChaincodeProposalPayload)
if err != nil {
logger.Errorf("VSCC error: GetChaincodeProposalPayload failed, err %s", err)
return policyErr(err)
}
cis := &pb.ChaincodeInvocationSpec{}
err = proto.Unmarshal(cpp.Input, cis)
if err != nil {
logger.Errorf("VSCC error: Unmarshal ChaincodeInvocationSpec failed, err %s", err)
return policyErr(err)
}
if cis.ChaincodeSpec == nil ||
cis.ChaincodeSpec.Input == nil ||
cis.ChaincodeSpec.Input.Args == nil {
logger.Errorf("VSCC error: committing invalid vscc invocation")
return policyErr(fmt.Errorf("malformed chaincode invocation spec"))
}
lsccFunc := string(cis.ChaincodeSpec.Input.Args[0])
lsccArgs := cis.ChaincodeSpec.Input.Args[1:]
logger.Debugf("VSCC info: ValidateLSCCInvocation acting on %s %#v", lsccFunc, lsccArgs)
switch lsccFunc {
case lscc.UPGRADE, lscc.DEPLOY:
logger.Debugf("VSCC info: validating invocation of lscc function %s on arguments %#v", lsccFunc, lsccArgs)
if len(lsccArgs) < 2 {
return policyErr(fmt.Errorf("Wrong number of arguments for invocation lscc(%s): expected at least 2, received %d", lsccFunc, len(lsccArgs)))
}
if (!ac.PrivateChannelData() && len(lsccArgs) > 5) ||
(ac.PrivateChannelData() && len(lsccArgs) > 6) {
return policyErr(fmt.Errorf("Wrong number of arguments for invocation lscc(%s): received %d", lsccFunc, len(lsccArgs)))
}
cdsArgs, err := utils.GetChaincodeDeploymentSpec(lsccArgs[1], platforms.NewRegistry(
// XXX We should definitely _not_ have this external dependency in VSCC
// as adding a platform could cause non-determinism. This is yet another
// reason why all of this custom LSCC validation at commit time has no
// long term hope of staying deterministic and needs to be removed.
&golang.Platform{},
&node.Platform{},
&java.Platform{},
&car.Platform{},
))
if err != nil {
return policyErr(fmt.Errorf("GetChaincodeDeploymentSpec error %s", err))
}
if cdsArgs == nil || cdsArgs.ChaincodeSpec == nil || cdsArgs.ChaincodeSpec.ChaincodeId == nil ||
cap.Action == nil || cap.Action.ProposalResponsePayload == nil {
return policyErr(fmt.Errorf("VSCC error: invocation of lscc(%s) does not have appropriate arguments", lsccFunc))
}
// get the rwset
pRespPayload, err := utils.GetProposalResponsePayload(cap.Action.ProposalResponsePayload)
if err != nil {
return policyErr(fmt.Errorf("GetProposalResponsePayload error %s", err))
}
if pRespPayload.Extension == nil {
return policyErr(fmt.Errorf("nil pRespPayload.Extension"))
}
respPayload, err := utils.GetChaincodeAction(pRespPayload.Extension)
if err != nil {
return policyErr(fmt.Errorf("GetChaincodeAction error %s", err))
}
txRWSet := &rwsetutil.TxRwSet{}
if err = txRWSet.FromProtoBytes(respPayload.Results); err != nil {
return policyErr(fmt.Errorf("txRWSet.FromProtoBytes error %s", err))
}
// extract the rwset for lscc
var lsccrwset *kvrwset.KVRWSet
for _, ns := range txRWSet.NsRwSets {
logger.Debugf("Namespace %s", ns.NameSpace)
if ns.NameSpace == "lscc" {
lsccrwset = ns.KvRwSet
break
}
}
// retrieve from the ledger the entry for the chaincode at hand
cdLedger, ccExistsOnLedger, err := vscc.getInstantiatedCC(chid, cdsArgs.ChaincodeSpec.ChaincodeId.Name)
if err != nil {
return &commonerrors.VSCCExecutionFailureError{Err: err}
}
/******************************************/
/* security check 0 - validation of rwset */
/******************************************/
// there has to be a write-set
if lsccrwset == nil {
return policyErr(fmt.Errorf("No read write set for lscc was found"))
}
// there must be at least one write
if len(lsccrwset.Writes) < 1 {
return policyErr(fmt.Errorf("LSCC must issue at least one single putState upon deploy/upgrade"))
}
// the first key name must be the chaincode id provided in the deployment spec
if lsccrwset.Writes[0].Key != cdsArgs.ChaincodeSpec.ChaincodeId.Name {
return policyErr(fmt.Errorf("expected key %s, found %s", cdsArgs.ChaincodeSpec.ChaincodeId.Name, lsccrwset.Writes[0].Key))
}
// the value must be a ChaincodeData struct
cdRWSet := &ccprovider.ChaincodeData{}
err = proto.Unmarshal(lsccrwset.Writes[0].Value, cdRWSet)
if err != nil {
return policyErr(fmt.Errorf("unmarhsalling of ChaincodeData failed, error %s", err))
}
// the chaincode name in the lsccwriteset must match the chaincode name in the deployment spec
if cdRWSet.Name != cdsArgs.ChaincodeSpec.ChaincodeId.Name {
return policyErr(fmt.Errorf("expected cc name %s, found %s", cdsArgs.ChaincodeSpec.ChaincodeId.Name, cdRWSet.Name))
}
// the chaincode version in the lsccwriteset must match the chaincode version in the deployment spec
if cdRWSet.Version != cdsArgs.ChaincodeSpec.ChaincodeId.Version {
return policyErr(fmt.Errorf("expected cc version %s, found %s", cdsArgs.ChaincodeSpec.ChaincodeId.Version, cdRWSet.Version))
}
// it must only write to 2 namespaces: LSCC's and the cc that we are deploying/upgrading
for _, ns := range txRWSet.NsRwSets {
if ns.NameSpace != "lscc" && ns.NameSpace != cdRWSet.Name && len(ns.KvRwSet.Writes) > 0 {
return policyErr(fmt.Errorf("LSCC invocation is attempting to write to namespace %s", ns.NameSpace))
}
}
logger.Debugf("Validating %s for cc %s version %s", lsccFunc, cdRWSet.Name, cdRWSet.Version)
switch lsccFunc {
case lscc.DEPLOY:
/******************************************************************/
/* security check 1 - cc not in the LCCC table of instantiated cc */
/******************************************************************/
if ccExistsOnLedger {
return policyErr(fmt.Errorf("Chaincode %s is already instantiated", cdsArgs.ChaincodeSpec.ChaincodeId.Name))
}
/****************************************************************************/
/* security check 2 - validation of rwset (and of collections if enabled) */
/****************************************************************************/
if ac.PrivateChannelData() {
// do extra validation for collections
err := vscc.validateRWSetAndCollection(lsccrwset, cdRWSet, lsccArgs, lsccFunc, ac, chid)
if err != nil {
return err
}
} else {
// there can only be a single ledger write
if len(lsccrwset.Writes) != 1 {
return policyErr(fmt.Errorf("LSCC can only issue a single putState upon deploy"))
}
}
/*****************************************************/
/* security check 3 - check the instantiation policy */
/*****************************************************/
pol := cdRWSet.InstantiationPolicy
if pol == nil {
return policyErr(fmt.Errorf("no instantiation policy was specified"))
}
// FIXME: could we actually pull the cds package from the
// file system to verify whether the policy that is specified
// here is the same as the one on disk?
// PROS: we prevent attacks where the policy is replaced
// CONS: this would be a point of non-determinism
err := vscc.checkInstantiationPolicy(chid, env, pol, payl)
if err != nil {
return err
}
case lscc.UPGRADE:
/**************************************************************/
/* security check 1 - cc in the LCCC table of instantiated cc */
/**************************************************************/
if !ccExistsOnLedger {
return policyErr(fmt.Errorf("Upgrading non-existent chaincode %s", cdsArgs.ChaincodeSpec.ChaincodeId.Name))
}
/**********************************************************/
/* security check 2 - existing cc's version was different */
/**********************************************************/
if cdLedger.Version == cdsArgs.ChaincodeSpec.ChaincodeId.Version {
return policyErr(fmt.Errorf("Existing version of the cc on the ledger (%s) should be different from the upgraded one", cdsArgs.ChaincodeSpec.ChaincodeId.Version))
}
/****************************************************************************/
/* security check 3 validation of rwset (and of collections if enabled) */
/****************************************************************************/
// Only in v1.2, a collection can be updated during a chaincode upgrade
if ac.V1_2Validation() {
// do extra validation for collections
err := vscc.validateRWSetAndCollection(lsccrwset, cdRWSet, lsccArgs, lsccFunc, ac, chid)
if err != nil {
return err
}
} else {
// there can only be a single ledger write
if len(lsccrwset.Writes) != 1 {
return policyErr(fmt.Errorf("LSCC can only issue a single putState upon upgrade"))
}
}
/*****************************************************/
/* security check 4 - check the instantiation policy */
/*****************************************************/
pol := cdLedger.InstantiationPolicy
if pol == nil {
return policyErr(fmt.Errorf("No instantiation policy was specified"))
}
// FIXME: could we actually pull the cds package from the
// file system to verify whether the policy that is specified
// here is the same as the one on disk?
// PROS: we prevent attacks where the policy is replaced
// CONS: this would be a point of non-determinism
err := vscc.checkInstantiationPolicy(chid, env, pol, payl)
if err != nil {
return err
}
/******************************************************************/
/* security check 5 - check the instantiation policy in the rwset */
/******************************************************************/
if ac.V1_1Validation() {
polNew := cdRWSet.InstantiationPolicy
if polNew == nil {
return policyErr(fmt.Errorf("No instantiation policy was specified"))
}
// no point in checking it again if they are the same policy
if !bytes.Equal(polNew, pol) {
err = vscc.checkInstantiationPolicy(chid, env, polNew, payl)
if err != nil {
return err
}
}
}
}
// all is good!
return nil
default:
return policyErr(fmt.Errorf("VSCC error: committing an invocation of function %s of lscc is invalid", lsccFunc))
}
}
func (vscc *Validator) getInstantiatedCC(chid, ccid string) (cd *ccprovider.ChaincodeData, exists bool, err error) {
qe, err := vscc.stateFetcher.FetchState()
if err != nil {
err = fmt.Errorf("could not retrieve QueryExecutor for channel %s, error %s", chid, err)
return
}
defer qe.Done()
channelState := &state{qe}
bytes, err := channelState.GetState("lscc", ccid)
if err != nil {
err = fmt.Errorf("could not retrieve state for chaincode %s on channel %s, error %s", ccid, chid, err)
return
}
if bytes == nil {
return
}
cd = &ccprovider.ChaincodeData{}
err = proto.Unmarshal(bytes, cd)
if err != nil {
err = fmt.Errorf("unmarshalling ChaincodeQueryResponse failed, error %s", err)
return
}
exists = true
return
}
func (vscc *Validator) deduplicateIdentity(cap *pb.ChaincodeActionPayload) ([]*common.SignedData, error) {
// this is the first part of the signed message
prespBytes := cap.Action.ProposalResponsePayload
// build the signature set for the evaluation
signatureSet := []*common.SignedData{}
signatureMap := make(map[string]struct{})
// loop through each of the endorsements and build the signature set
for _, endorsement := range cap.Action.Endorsements {
//unmarshal endorser bytes
serializedIdentity := &msp.SerializedIdentity{}
if err := proto.Unmarshal(endorsement.Endorser, serializedIdentity); err != nil {
logger.Errorf("Unmarshal endorser error: %s", err)
return nil, policyErr(fmt.Errorf("Unmarshal endorser error: %s", err))
}
identity := serializedIdentity.Mspid + string(serializedIdentity.IdBytes)
if _, ok := signatureMap[identity]; ok {
// Endorsement with the same identity has already been added
logger.Warningf("Ignoring duplicated identity, Mspid: %s, pem:\n%s", serializedIdentity.Mspid, serializedIdentity.IdBytes)
continue
}
data := make([]byte, len(prespBytes)+len(endorsement.Endorser))
copy(data, prespBytes)
copy(data[len(prespBytes):], endorsement.Endorser)
signatureSet = append(signatureSet, &common.SignedData{
// set the data that is signed; concatenation of proposal response bytes and endorser ID
Data: data,
// set the identity that signs the message: it's the endorser
Identity: endorsement.Endorser,
// set the signature
Signature: endorsement.Signature})
signatureMap[identity] = struct{}{}
}
logger.Debugf("Signature set is of size %d out of %d endorsement(s)", len(signatureSet), len(cap.Action.Endorsements))
return signatureSet, nil
}
type state struct {
State
}
// GetState retrieves the value for the given key in the given namespace
func (s *state) GetState(namespace string, key string) ([]byte, error) {
values, err := s.GetStateMultipleKeys(namespace, []string{key})
if err != nil {
return nil, err
}
if len(values) == 0 {
return nil, nil
}
return values[0], nil
}
func policyErr(err error) *commonerrors.VSCCEndorsementPolicyError {
return &commonerrors.VSCCEndorsementPolicyError{
Err: err,
}
}
| {
newCollectionsMap := make(map[string]*common.StaticCollectionConfig, len(newCollectionConfigs))
for _, newCollectionConfig := range newCollectionConfigs {
newCollection := newCollectionConfig.GetStaticCollectionConfig()
// Collection object itself is stored as value so that we can
// check whether the block to live is changed -- FAB-7810
newCollectionsMap[newCollection.GetName()] = newCollection
}
if err := checkForMissingCollections(newCollectionsMap, oldCollectionConfigs); err != nil {
return err
}
if err := checkForModifiedCollectionsBTL(newCollectionsMap, oldCollectionConfigs); err != nil {
return err
}
return nil
} |
surgeon.go | package operations
import (
"fmt"
"github.com/go-logr/logr"
"github.com/whitesource/spring4shell-detect/records"
"os"
)
type Surgeon interface {
Validate(paths []string) error
// Operate confirms the availability of all requirements
// it processes the manifest files to generate the []utils.OperationResult
Operate(paths []string) ([]records.OperationResult, error)
}
// Perform performs the dependency resolution for all matching manifest files, with the corresponding Surgeons
// detected is a map from utils.Organ to a list of matching manifest files
// return: a list of scan result per manifest file
// (might be less for multi-module projects)
// A scan result will be returned only in a case a Surgeon matching the utils.Organ is found
func Perform(logger logr.Logger, detected map[records.Organ][]string, surgeons map[records.Organ]Surgeon) (results []records.OperationResult) {
for o, s := range surgeons {
paths := detected[o]
if len(paths) == 0 {
continue
}
if err := s.Validate(paths); err != nil {
_, _ = fmt.Fprintf(os.Stderr, "error: %s project detected (manifest: %s), but an error occurred: %v\n", o, paths[0], err)
continue
}
r, err := s.Operate(paths)
if err != nil { | logger.Error(err, "failed to scan projects", "projectType", o)
continue
}
results = append(results, r...)
}
return results
} | _, _ = fmt.Fprintf(os.Stderr, "error: failed to scan %s project: %v\n", o, err) |
d3js_for_dataviz.dev.js | "use strict";
/* eslint-disable no-unused-vars */ |
/* eslint-disable no-undef */
var dataArray = [{
x: 5,
y: 5
}, {
x: 10,
y: 15
}, {
x: 20,
y: 7
}, {
x: 30,
y: 18
}, {
x: 40,
y: 10
}];
var interpolateTypes = [d3.curveLinear, d3.curveNatural, d3.curveStep, d3.curveBasis, d3.curveBundle, d3.curveCardinal];
var svg1 = d3.select("#example1").append("svg").attr("height", "100%").attr("width", "100%");
for (var p = 0; p < 6; p++) {
var line = d3.line().x(function (d, i) {
return d.x * 6;
}).y(function (d, i) {
return d.y * 4;
}).curve(interpolateTypes[p]);
var shiftX = p * 250;
var shiftY = 0;
var chartGroup = svg1.append("g").attr("class", "group" + p).attr("transform", "translate(" + shiftX + ",0)");
chartGroup.append("path").attr("fill", "none").attr("stroke", "blue").attr("d", line(dataArray));
chartGroup.selectAll("circle.grp" + p).data(dataArray).enter().append("circle").attr("class", function (d, i) {
return "grp" + i;
}).attr("cx", function (d, i) {
return d.x * 6;
}).attr("cy", function (d, i) {
return d.y * 4;
}).attr("r", "2");
}
d3.select("#html").html("<h1>Hello WOrld!</h1><p>My name is Dataviz!<p>");
d3.select("#event").style("padding", "3rem 2rem").on("mouseover", function () {
d3.select(this).style("background-color", "black").style("color", "white");
}).on("mouseout", function () {
d3.select(this).style("background-color", "yellow").style("color", "black");
}); | |
main.rs | const MINIMUM_VALUE: u32 = 206_938;
const MAXIMUM_VALUE: u32 = 679_128;
// I'm not _unhappy_ with this implementation, but it's simplistic.
// You really want to look at AxlLind's:
// https://github.com/AxlLind/AdventOfCode2019/blob/master/src/bin/04.rs
// It's a thing of beauty, and runs in a fraction of the time of this code.
// I'm not going to work on this further as I'd just be copying his.
fn main() {
let start_time = std::time::Instant::now();
let (part_1_valid, part_2_valid) = (MINIMUM_VALUE..=MAXIMUM_VALUE).map(evaluate_password).unzip::<bool, bool, Vec<bool>, Vec<bool>>();
let part_1_count = part_1_valid.iter().filter(|item| **item).count();
let part_2_count = part_2_valid.iter().filter(|item| **item).count();
println!(
"Part 1: {}\nPart 2: {}\nTime: {}ms",
part_1_count,
part_2_count,
start_time.elapsed().as_millis()
);
}
// Returns two bools - the first is whether the password is valid by part 1 rules,
// and the second by part 2 rules.
fn evaluate_password(password: u32) -> (bool, bool) {
let pass_str = password.to_string();
let mut chars = pass_str.chars();
let mut previous_digit = chars.next().unwrap();
let mut at_least_double_digit = false;
let mut double_digit = false;
let mut digit_repetition = 1; // How many times have we seen the current digit consecutively?
for digit in chars { | if digit == previous_digit {
digit_repetition += 1;
} else {
if digit_repetition >= 2 {
at_least_double_digit = true;
}
if digit_repetition == 2 {
double_digit = true;
}
digit_repetition = 1;
}
previous_digit = digit;
}
if digit_repetition >= 2 {
at_least_double_digit = true;
}
if digit_repetition == 2 {
double_digit = true;
}
(at_least_double_digit, double_digit)
} | if digit < previous_digit {
return (false, false); // Digits may never decrease
} |
domain_api_vulnerability_remediation_facet_v2.rs | /*
* CrowdStrike API Specification | *
* Use this API specification as a reference for the API endpoints you can use to interact with your Falcon environment. These endpoints support authentication via OAuth2 and interact with detections and network containment. For detailed usage guides and more information about API endpoints that don't yet support OAuth2, see our [documentation inside the Falcon console](https://falcon.crowdstrike.com/support/documentation). To use the APIs described below, combine the base URL with the path shown for each API endpoint. For commercial cloud customers, your base URL is `https://api.crowdstrike.com`. Each API endpoint requires authorization via an OAuth2 token. Your first API request should retrieve an OAuth2 token using the `oauth2/token` endpoint, such as `https://api.crowdstrike.com/oauth2/token`. For subsequent requests, include the OAuth2 token in an HTTP authorization header. Tokens expire after 30 minutes, after which you should make a new token request to continue making API requests.
*
* The version of the OpenAPI document: rolling
*
* Generated by: https://openapi-generator.tech
*/
#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
pub struct DomainApiVulnerabilityRemediationFacetV2 {
#[serde(rename = "entities", skip_serializing_if = "Option::is_none")]
pub entities: Option<Vec<crate::models::DomainApiRemediationV2>>,
#[serde(rename = "ids", skip_serializing_if = "Option::is_none")]
pub ids: Option<Vec<String>>,
}
impl DomainApiVulnerabilityRemediationFacetV2 {
pub fn new() -> DomainApiVulnerabilityRemediationFacetV2 {
DomainApiVulnerabilityRemediationFacetV2 { entities: None, ids: None }
}
} | |
index.js | import 'file-loader?name=[name].[ext]!./src/html/index.html';
import {
Scene,
OrthographicCamera,
WebGLRenderer,
Mesh,
DataTexture,
RGBAFormat,
FloatType,
PlaneBufferGeometry,
ShaderMaterial,
Vector2,
TextureLoader,
Sprite,
MeshBasicMaterial,
RepeatWrapping,
RGBFormat,
} from 'three';
import PingpongRenderTarget from "./src/PingpongRenderTarget"
import RenderTarget from "./src/RenderTarget"
import dat from "dat.gui";
import Controls from "./src/Controls";
// 0 configure scene
//////////////////////////////////////
let w = window.innerWidth
let h = window.innerHeight
const renderer = new WebGLRenderer({
alpha: true
});
document.body.appendChild(renderer.domElement);
renderer.setSize(w, h);
const scene = new Scene();
const camera = new OrthographicCamera(-w / 2, w / 2, h / 2, -h / 2, 0.1, 100);
camera.position.z = 1
// 1 init buffers
//////////////////////////////////////
let size = 1440 // particles amount = ( size ^ 2 )
let count = size * size;
let pos = new Float32Array(count * 3)
let uvs = new Float32Array(count * 2)
let ptexdata = new Float32Array(count * 4)
// let food = new Float32Array(count * 3); // value for if food, and concentration
let id = 0, u,v;
for (let i = 0; i < count; i++) {
//point cloud vertex
id = i * 3
pos[id++] = pos[id++] = pos[id++] = 0;
//computes the uvs
u = (i % size) / size;
v = ~~(i / size) / size;
id = i * 2
uvs[id++] = u
uvs[id] = v
//particle texture values (agents)
id = i * 4
ptexdata[id++] = Math.random() // normalized pos x
ptexdata[id++] = Math.random() // normalized pos y
ptexdata[id++] = Math.random() // normalized angle
ptexdata[id++] = 1
}
let foodNodes = [
1, 0, 1, 1,
0, 1, 1, 1
] // (0, 0) and (w, h) are the corners of the screen 1 is concentration.
// these values are normalized so its actually 0 0 and 1 1
let foodData = new Float32Array(w*h*4);
// x, y, foodValue
let y = -1;
let debugObject = {
x: 0,
y: 0,
distanceBetween: 0,
foodX: 0,
foodY: 0,
foodValue: 0
}
console.log(uvs.length / 3)
console.log(foodData.length / 4)
for (let i = 0; i < foodData.length; i+=4) {
// foodData[i] = uvs[(i/4)];
// foodData[i+1] = uvs[(i/4) + 1];
if ((i/4) % (w) == 0) {
y++;
}
foodData[i] = ((i/4) - (y*w)) / w;
foodData[i + 1] = y / h;
}
// it seems like uvs only render at the location of the particles
// we need to render food everywhere, so is this not an option?
let debugObjects = [];
for (let i = 0; i < foodNodes.length/4; i++) {
let foodX = foodNodes[i*4];
let foodY = foodNodes[i*4+1];
let foodStrength = foodNodes[i*4+2];
let radius = 1.4;
let steepness = 3;
for (let j = 0; j < foodData.length/4; j++) {
debugObject = {}
let x = foodData[j*4];
let y = foodData[j*4+1]; // these are normalized
let distanceBetween = Math.abs(Math.sqrt(Math.pow(foodX - x, 2) + Math.pow(foodY - y, 2)));
let foodValue;
if (distanceBetween < radius) {
let percentageOfMaximumDistance = (1.4143 - (distanceBetween))/1.4143;
foodValue = Math.sin(Math.PI/2 * Math.pow(percentageOfMaximumDistance, steepness))
}
else {
foodValue = 0;
}
foodData[j*4+2] += (foodStrength) * foodValue; // this could be wrong
debugObject.x = x;
debugObject.y = y;
debugObject.distanceBetween = distanceBetween;
debugObject.foodX = foodX;
debugObject.foodY = foodY;
debugObject.foodValue = foodValue;
debugObject.foodStrength = foodStrength;
let alpha = 1;
foodData[j * 4 + 3] = alpha;
debugObjects.push(debugObject);
}
}
console.log(debugObjects[0])
let foodTex = new DataTexture(foodData, w, h, RGBAFormat, FloatType);
foodTex.needsUpdate = true;
console.log(foodData)
// x, y, foodValue accum, alpha
// 2 data & trails
//////////////////////////////////////
//performs the diffusion and decay
let diffuse_decay = new ShaderMaterial({
uniforms: {
points: {
value: null
},
decay: {
value: .9
},
food_texture: {
value: foodTex
}
},
opacity: 0.5,
vertexShader: require('./src/glsl/quad_vs.glsl'),
fragmentShader: require('./src/glsl/diffuse_decay_fs.glsl')
})
let trails = new PingpongRenderTarget(w, h, diffuse_decay, null, foodData)
// 3 agents
//////////////////////////////////////
//moves agents around
const heightmapTexture = new TextureLoader().load( "heightmaps/georgia.png" );
heightmapTexture.wrapT = RepeatWrapping;
heightmapTexture.repeat.y = - 1;
const heightmapMaterial = new MeshBasicMaterial( {
map: heightmapTexture,
transparent: true,
opacity: 0.6,
} );
//var sprite = new Sprite( heightmapMaterial );
let update_agents = new ShaderMaterial({
uniforms: {
data: { value: null },
sa: { value: 2 }, // sensor angle
ra: { value: 4 }, // rotation angle
so: { value: 12 }, // look ahead distance
ss: { value: 1.1 }, // step size (speed)
heightmap_texture: { value: heightmapTexture },
hl: { value: 0.0005 }, // height level
},
vertexShader: require('./src/glsl/quad_vs.glsl'),
fragmentShader: require('./src/glsl/update_agents_fs.glsl'),
opacity: 0.5
})
let agents = new PingpongRenderTarget(size, size, update_agents, ptexdata)
// 4 point cloud
//////////////////////////////////////
//renders the updated agents as red dots
let render_agents = new ShaderMaterial({
vertexShader: require('./src/glsl/render_agents_vs.glsl'),
fragmentShader: require('./src/glsl/render_agents_fs.glsl')
})
let render = new RenderTarget(w,h,render_agents, pos, uvs) // no data is sent
// 5 post process
//////////////////////////////////////
//post process the result of the trails (render the trails as greyscale)
let postprocess = new ShaderMaterial({
uniforms: {
data: {
value: null
},
heightmap_texture: {
value: heightmapTexture
},
food_texture: {
value: foodTex
}
},
transparent: true,
opacity: 1,
vertexShader: require('./src/glsl/quad_vs.glsl'),
fragmentShader: require('./src/glsl/postprocess_fs.glsl'),
});
let foodMaterial = new ShaderMaterial({
uniforms: {
food_texture: {
value: foodTex
}
},
opacity: 0.1,
transparent: true,
vertexShader: require('./src/glsl/food_vs.glsl'),
fragmentShader: require('./src/glsl/food_fs.glsl'),
})
let diffuse_decay_mesh = new Mesh(new PlaneBufferGeometry(), diffuse_decay);
let postprocess_mesh = new Mesh(new PlaneBufferGeometry(), postprocess)
var heightmapMesh = new Mesh(new PlaneBufferGeometry(), heightmapMaterial);
var foodMesh = new Mesh(new PlaneBufferGeometry(), foodMaterial);
heightmapMesh.scale.set(w, h, 1)
postprocess_mesh.scale.set(w, h, 1)
foodMesh.scale.set(w, h, 1);
foodMesh.renderOrder = 1;
heightmapMesh.renderOrder = 2;
postprocess_mesh.renderOrder = 0;
//scene.add(diffuse_decay_mesh)
scene.add(postprocess_mesh)
heightmapMesh.material.needsUpdate = true;
postprocess_mesh.material.needsUpdate = true;
scene.add(heightmapMesh);
//scene.add(foodMesh);
// 6 interactive controls
//////////////////////////////////////
let controls = new Controls( renderer, agents )
controls.count = ~~(size * size * .05)
// animation loop
//////////////////////////////////////
let triggered = false;
function | (){
if (time > 2 && !triggered) {
//console.log(uvs);
triggered = true;
// let values = {};
// for (let i of render.texture.image.data) {
// values[i] = true
// }
// console.log(render.texture.image.data.length)
}
requestAnimationFrame(raf)
time = (Date.now() - start) * 0.001
trails.material.uniforms.points.value = render.texture
trails.render( renderer, time )
agents.material.uniforms.data.value = trails.texture
agents.render(renderer, time)
render.render( renderer, time )
postprocess_mesh.material.uniforms.data.value = trails.texture
renderer.setSize(w,h)
renderer.clear()
renderer.render(scene, camera)
}
//////////////////////////////////////////////////
let materials = [
diffuse_decay, update_agents, render_agents
]
let resolution = new Vector2(w,h);
materials.forEach( (mat)=>{mat.uniforms.resolution.value = resolution;})
let start = Date.now();
let time = 0;
raf()
// settings
//////////////////////////////////////////////////
let gui = new dat.GUI()
gui.add(diffuse_decay.uniforms.decay, "value", 0.01, .99, .01).name("Decay Factor")
gui.add(update_agents.uniforms.sa, "value", 1, 90, .1).name("Sensor Angle (sa)")
gui.add(update_agents.uniforms.ra, "value", 1, 90, .1).name("Rotation Angle (ra)")
gui.add(update_agents.uniforms.so, "value", 1, 90, .1).name("Scaling Factor (so)")
gui.add(update_agents.uniforms.ss, "value", 0.1, 10, .1).name("Sensor Speed (ss)")
gui.add(update_agents.uniforms.hl, "value", 0.00001, 1, .0001).name("Minimum Height Level for Movement").onChange((value) => {
//update_agents.uniforms.hl = value
})
gui.add(controls, "random").name("Spawn at center")
gui.add(controls, "radius",.001,.25)
gui.add(controls, "count", 1,size*size, 1)
let heightmapOption = {
heightmap: "georgia"
}
let heightmaps = {
georgia: "heightmaps/georgia.png",
florida: "heightmaps/florida.png",
balkans: "heightmaps/balkans.png",
sanfranscisco_bay: "heightmaps/sanfranscisco_bay.png",
germany: "heightmaps/germany.png",
manaus_amazon: "heightmaps/amazon_manaus.png",
japan: "heightmaps/japan.png",
australia_melbourne: "heightmaps/australia_melbourne.png",
brazil_rio: "heightmaps/brazil_rio.png",
france_paris: "heightmaps/france_paris.png",
london: "heightmaps/london.png",
new_york_city: "heightmaps/new_york_city.png",
spain_madrid: "heightmaps/spain_madrid.png",
texas_houston: "heightmaps/texas_houston.png",
northern_ireland: "heightmaps/northern_ireland.png",
brunei: "heightmaps/brunei.png",
singapore: "heightmaps/singapore.png",
}
gui.add(heightmapOption, "heightmap", heightmaps).name("Heightmap").onChange((value) => {
let heightmapTexture = new TextureLoader().load( value );
heightmapTexture.wrapT = RepeatWrapping;
heightmapTexture.repeat.y = - 1;
let heightmapMaterial = new MeshBasicMaterial( {
map: heightmapTexture,
transparent: true,
opacity: 0.6,
} );
heightmapMesh.material = heightmapMaterial;
update_agents.uniforms.heightmap_texture.value = heightmapTexture;
postprocess.uniforms.heightmap_texture.value = heightmapTexture;
}) | raf |
binlog_locations.go | // Copyright 2021 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package syncer
import (
"fmt"
"strings"
"sync"
"github.com/go-mysql-org/go-mysql/replication"
"go.uber.org/zap"
"github.com/pingcap/tiflow/dm/pkg/binlog"
"github.com/pingcap/tiflow/dm/pkg/binlog/event"
"github.com/pingcap/tiflow/dm/pkg/gtid"
"github.com/pingcap/tiflow/dm/pkg/log"
)
type locationRecorder struct {
// +-------------+
// ... |current event| ...
// ^ +-------------+ ^
// | |
// curStartLocation curEndLocation
// there may be more events between curStartLocation and curEndLocation due to the limitation of binlog or
// implementation of DM, but in such scenario, those events should always belong to one transaction.
// When curStartLocation is equal to curEndLocation, it means current event is not a data change.
//
// curStartLocation is used when
// - display a meaningful location
// - match the injected location by handle-error
// - save table checkpoint of DML
// curEndLocation is used when
// - handle end location of DDL, when save table checkpoint or shard-resync
curStartLocation binlog.Location
curEndLocation binlog.Location
// txnEndLocation is the end location of last seen transaction. If current event is the last event of a txn,
// txnEndLocation will be assigned from curEndLocation
// it is used when
// - reset binlog replication for a finer granularity
// - save global checkpoint
txnEndLocation binlog.Location
// DML will also generate a query event if user set session binlog_format='statement', we use this field to
// distinguish DML query event.
inDML bool
// we assign startGTID := endGTID after COMMIT, so at COMMIT we turn on the flag.
needUpdateStartGTID bool
mu sync.Mutex // guard curEndLocation because Syncer.printStatus is reading it from another goroutine.
}
func (l *locationRecorder) reset(loc binlog.Location) {
l.mu.Lock()
defer l.mu.Unlock()
// need to clone location to avoid the modification leaking outside
clone := loc.Clone()
l.curStartLocation = clone
l.curEndLocation = clone
l.txnEndLocation = clone
}
//nolint:unused
func (l *locationRecorder) getCurEndLocation() binlog.Location {
l.mu.Lock()
defer l.mu.Unlock()
return l.curEndLocation
}
//nolint:unused
func (l *locationRecorder) setCurEndLocation(location binlog.Location) {
l.mu.Lock()
defer l.mu.Unlock()
l.curEndLocation = location
}
func (l *locationRecorder) saveTxnEndLocation() {
l.txnEndLocation = l.curEndLocation.Clone()
}
// shouldUpdatePos returns true when the given event is from a real upstream writing, returns false when the event is
// header, heartbeat, etc.
func | (e *replication.BinlogEvent) bool {
switch e.Header.EventType {
case replication.FORMAT_DESCRIPTION_EVENT, replication.HEARTBEAT_EVENT, replication.IGNORABLE_EVENT,
replication.PREVIOUS_GTIDS_EVENT, replication.MARIADB_GTID_LIST_EVENT:
return false
}
//nolint:gosimple
if e.Header.Flags&replication.LOG_EVENT_ARTIFICIAL_F != 0 {
// ignore events with LOG_EVENT_ARTIFICIAL_F flag(0x0020) set
// ref: https://dev.mysql.com/doc/internals/en/binlog-event-flag.html
return false
}
return true
}
func (l *locationRecorder) updateCurStartGTID() {
gsetWrapped := l.curEndLocation.GetGTID()
if gsetWrapped == nil {
return
}
gset := gsetWrapped.Origin()
err := l.curStartLocation.SetGTID(gset)
if err != nil {
log.L().DPanic("failed to set GTID set",
zap.Any("GTID set", gset),
zap.Error(err))
}
}
func (l *locationRecorder) setCurEndGTID(e *replication.BinlogEvent) {
gtidStr, err := event.GetGTIDStr(e)
if err != nil {
log.L().DPanic("failed to get GTID from event",
zap.Any("event", e),
zap.Error(err))
return
}
gset := l.curEndLocation.GetGTID()
if gset == nil {
gset, _ = gtid.ParserGTID("", gtidStr)
_ = l.curEndLocation.SetGTID(gset.Origin())
return
}
clone := gset.Clone()
err = clone.Update(gtidStr)
if err != nil {
log.L().DPanic("failed to update GTID set",
zap.String("GTID", gtidStr),
zap.Error(err))
return
}
err = l.curEndLocation.SetGTID(clone.Origin())
if err != nil {
log.L().DPanic("failed to set GTID set",
zap.String("GTID", gtidStr),
zap.Error(err))
}
}
// update maintains the member of locationRecorder as their definitions.
// - curStartLocation is assigned to curEndLocation
// - curEndLocation is tried to be updated in-place
// - txnEndLocation is assigned to curEndLocation when `e` is the last event of a transaction.
func (l *locationRecorder) update(e *replication.BinlogEvent) {
l.mu.Lock()
defer l.mu.Unlock()
// GTID part is maintained separately
l.curStartLocation.Position = l.curEndLocation.Position
if l.needUpdateStartGTID {
l.updateCurStartGTID()
l.needUpdateStartGTID = false
}
if !shouldUpdatePos(e) {
return
}
if event, ok := e.Event.(*replication.RotateEvent); ok {
nextName := string(event.NextLogName)
if l.curEndLocation.Position.Name != nextName {
l.curEndLocation.Position.Name = nextName
l.curEndLocation.Position.Pos = binlog.FileHeaderLen
l.saveTxnEndLocation()
}
return
}
l.curEndLocation.Position.Pos = e.Header.LogPos
switch ev := e.Event.(type) {
case *replication.GTIDEvent:
l.setCurEndGTID(e)
case *replication.MariadbGTIDEvent:
l.setCurEndGTID(e)
if !ev.IsDDL() {
l.inDML = true
}
case *replication.XIDEvent:
// for transactional engines like InnoDB, COMMIT is xid event
l.saveTxnEndLocation()
l.inDML = false
l.needUpdateStartGTID = true
case *replication.QueryEvent:
query := strings.TrimSpace(string(ev.Query))
switch query {
case "BEGIN":
// MySQL will write a "BEGIN" query event when it starts a DML transaction, we use this event to distinguish
// DML query event which comes from a session binlog_format = STATEMENT.
// But MariaDB will not write "BEGIN" query event, we simply hope user should not do that.
l.inDML = true
case "COMMIT":
// for non-transactional engines like MyISAM, COMMIT is query event
l.inDML = false
}
if l.inDML {
return
}
l.needUpdateStartGTID = true
l.saveTxnEndLocation()
}
}
// String implements fmt.Stringer.
func (l *locationRecorder) String() string {
return fmt.Sprintf("curStartLocation: %s, curEndLocation: %s, txnEndLocation: %s",
l.curStartLocation.String(), l.curEndLocation.String(), l.txnEndLocation.String())
}
| shouldUpdatePos |
compat.py | import sys
import django
PY3 = (sys.version_info >= (3,))
try:
# Django 1.5+
from django.utils.encoding import smart_text, smart_bytes
except ImportError:
# older Django, thus definitely Python 2
from django.utils.encoding import smart_unicode, smart_str
smart_text = smart_unicode
smart_bytes = smart_str
if PY3:
bytes_type = bytes
else:
|
if django.VERSION[:2] >= (1, 6):
from django.core.cache.backends.base import DEFAULT_TIMEOUT as DJANGO_DEFAULT_TIMEOUT
DEFAULT_TIMEOUT = DJANGO_DEFAULT_TIMEOUT
else:
DEFAULT_TIMEOUT = None
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
Backported from Django 1.5+.
"""
if not PY3:
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
| bytes_type = str |
0076_auto_20210624_1015.py | # Generated by Django 3.1.12 on 2021-06-24 10:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("users", "0075_auto_20210607_1312"),
]
operations = [
migrations.AlterField(
model_name="userprofile",
name="lang",
field=models.CharField(
blank=True,
choices=[
("ar", "العربية"),
("as", "অসমীয়া"),
("bcl", "Bikol Central"),
("br", "brezhoneg"),
("da", "dansk"),
("dag", "dagbanli"),
("de", "Deutsch"),
("diq", "Zazaki"),
("en", "English"),
("en-gb", "British English"),
("eo", "Esperanto"),
("es", "español"),
("fa", "فارسی"),
("fi", "suomi"),
("fr", "français"),
("gu", "ગુજરાતી"),
("guw", "gungbe"),
("he", "עברית"),
("hi", "हिन्दी"),
("hy", "հայերեն"),
("id", "Bahasa Indonesia"),
("io", "Ido"),
("it", "italiano"),
("ja", "日本語"),
("ko", "한국어"),
("lv", "latviešu"),
("mk", "македонски"),
("mnw", "ဘာသာ မန်"),
("mr", "मराठी"),
("ms", "Bahasa Melayu"),
("my", "မြန်မာဘာသာ"),
("pl", "polski"),
("pt", "português"),
("pt-br", "português do Brasil"),
("ro", "română"),
("ru", "русский"),
("scn", "sicilianu"),
("sr-ec", "sr-cyrl"),
("sv", "svenska"),
("ta", "தமிழ்"),
("tr", "Türkçe"),
("uk", "українська"),
("vi", "Tiếng Việt"),
("zh-hans", "中文(简体)"),
("zh-hant", "中文(繁體)"),
],
help_text="Language", | null=True,
),
),
] | max_length=128, |
gen_test.go | package gen
import ( | "database/sql"
"io/ioutil"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/tal-tech/go-zero/core/logx"
"github.com/tal-tech/go-zero/core/stringx"
"github.com/sliveryou/goctl/config"
"github.com/sliveryou/goctl/model/sql/builderx"
)
var source = "CREATE TABLE `test_user` (\n `id` bigint NOT NULL AUTO_INCREMENT,\n `mobile` varchar(255) COLLATE utf8mb4_bin NOT NULL,\n `class` bigint NOT NULL,\n `name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL,\n `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,\n `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,\n PRIMARY KEY (`id`),\n UNIQUE KEY `mobile_unique` (`mobile`),\n UNIQUE KEY `class_name_unique` (`class`,`name`),\n KEY `create_index` (`create_time`),\n KEY `name_index` (`name`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;"
func TestCacheModel(t *testing.T) {
logx.Disable()
_ = Clean()
sqlFile := filepath.Join(t.TempDir(), "tmp.sql")
err := ioutil.WriteFile(sqlFile, []byte(source), 0o777)
assert.Nil(t, err)
dir := filepath.Join(t.TempDir(), "./testmodel")
cacheDir := filepath.Join(dir, "cache")
noCacheDir := filepath.Join(dir, "nocache")
g, err := NewDefaultGenerator(cacheDir, &config.Config{
NamingFormat: "GoZero",
})
assert.Nil(t, err)
err = g.StartFromDDL(sqlFile, true, "go_zero")
assert.Nil(t, err)
assert.True(t, func() bool {
_, err := os.Stat(filepath.Join(cacheDir, "TestUserModel.go"))
return err == nil
}())
g, err = NewDefaultGenerator(noCacheDir, &config.Config{
NamingFormat: "gozero",
})
assert.Nil(t, err)
err = g.StartFromDDL(sqlFile, false, "go_zero")
assert.Nil(t, err)
assert.True(t, func() bool {
_, err := os.Stat(filepath.Join(noCacheDir, "testusermodel.go"))
return err == nil
}())
}
func TestNamingModel(t *testing.T) {
logx.Disable()
_ = Clean()
sqlFile := filepath.Join(t.TempDir(), "tmp.sql")
err := ioutil.WriteFile(sqlFile, []byte(source), 0o777)
assert.Nil(t, err)
dir, _ := filepath.Abs("./testmodel")
camelDir := filepath.Join(dir, "camel")
snakeDir := filepath.Join(dir, "snake")
defer func() {
_ = os.RemoveAll(dir)
}()
g, err := NewDefaultGenerator(camelDir, &config.Config{
NamingFormat: "GoZero",
})
assert.Nil(t, err)
err = g.StartFromDDL(sqlFile, true, "go_zero")
assert.Nil(t, err)
assert.True(t, func() bool {
_, err := os.Stat(filepath.Join(camelDir, "TestUserModel.go"))
return err == nil
}())
g, err = NewDefaultGenerator(snakeDir, &config.Config{
NamingFormat: "go_zero",
})
assert.Nil(t, err)
err = g.StartFromDDL(sqlFile, true, "go_zero")
assert.Nil(t, err)
assert.True(t, func() bool {
_, err := os.Stat(filepath.Join(snakeDir, "test_user_model.go"))
return err == nil
}())
}
func TestWrapWithRawString(t *testing.T) {
assert.Equal(t, "``", wrapWithRawString("", false))
assert.Equal(t, "``", wrapWithRawString("``", false))
assert.Equal(t, "`a`", wrapWithRawString("a", false))
assert.Equal(t, "a", wrapWithRawString("a", true))
assert.Equal(t, "` `", wrapWithRawString(" ", false))
}
func TestFields(t *testing.T) {
type Student struct {
ID int64 `db:"id"`
Name string `db:"name"`
Age sql.NullInt64 `db:"age"`
Score sql.NullFloat64 `db:"score"`
CreateTime time.Time `db:"create_time"`
UpdateTime sql.NullTime `db:"update_time"`
}
var (
studentFieldNames = builderx.RawFieldNames(&Student{})
studentRows = strings.Join(studentFieldNames, ",")
studentRowsExpectAutoSet = strings.Join(stringx.Remove(studentFieldNames, "`id`", "`create_time`", "`update_time`"), ",")
studentRowsWithPlaceHolder = strings.Join(stringx.Remove(studentFieldNames, "`id`", "`create_time`", "`update_time`"), "=?,") + "=?"
)
assert.Equal(t, []string{"`id`", "`name`", "`age`", "`score`", "`create_time`", "`update_time`"}, studentFieldNames)
assert.Equal(t, "`id`,`name`,`age`,`score`,`create_time`,`update_time`", studentRows)
assert.Equal(t, "`name`,`age`,`score`", studentRowsExpectAutoSet)
assert.Equal(t, "`name`=?,`age`=?,`score`=?", studentRowsWithPlaceHolder)
} | |
collapse-vtune.rs | mod common;
use std::fs::File;
use std::io::{self, BufReader, Cursor};
use std::process::{Command, Stdio};
use assert_cmd::prelude::*;
use inferno::collapse::vtune::{Folder, Options};
use log::Level;
use pretty_assertions::assert_eq;
use testing_logger::CapturedLog;
fn test_collapse_vtune(test_file: &str, expected_file: &str, options: Options) -> io::Result<()> {
common::test_collapse(Folder::from(options), test_file, expected_file, false)
}
fn test_collapse_vtune_error(test_file: &str, options: Options) -> io::Error {
common::test_collapse_error(Folder::from(options), test_file)
}
fn test_collapse_vtune_logs_with_options<F>(input_file: &str, asserter: F, options: Options)
where
F: Fn(&Vec<CapturedLog>),
{
common::test_collapse_logs(Folder::from(options), input_file, asserter);
}
fn test_collapse_vtune_logs<F>(input_file: &str, asserter: F)
where
F: Fn(&Vec<CapturedLog>),
{
test_collapse_vtune_logs_with_options(input_file, asserter, Options::default());
}
#[test] | }
#[test]
fn collapse_vtune_no_modules() {
let test_file = "./tests/data/collapse-vtune/vtune.csv";
let result_file = "./tests/data/collapse-vtune/results/vtune-no-modules.txt";
test_collapse_vtune(
test_file,
result_file,
Options {
no_modules: true,
..Default::default()
},
)
.unwrap()
}
#[test]
fn collapse_vtune_should_log_warning_for_ending_before_header() {
test_collapse_vtune_logs(
"./tests/data/collapse-vtune/end-before-header.csv",
|captured_logs| {
let nwarnings = captured_logs
.into_iter()
.filter(|log| log.body == "File ended before header" && log.level == Level::Warn)
.count();
assert_eq!(
nwarnings, 1,
"warning logged {} times, but should be logged exactly once",
nwarnings
);
},
);
}
#[test]
fn collapse_vtune_should_return_error_for_skipped_indent_level() {
let test_file = "./tests/data/collapse-vtune/skipped-indentation.csv";
let error = test_collapse_vtune_error(test_file, Options::default());
assert_eq!(error.kind(), io::ErrorKind::InvalidData);
assert!(error
.to_string()
.starts_with("Skipped indentation level at line"));
}
#[test]
fn collapse_vtune_should_return_error_for_invalid_time_field() {
let test_file = "./tests/data/collapse-vtune/invalid-time-field.csv";
let error = test_collapse_vtune_error(test_file, Options::default());
assert_eq!(error.kind(), io::ErrorKind::InvalidData);
assert!(error
.to_string()
.starts_with("Invalid `CPU Time:Self` field"));
}
#[test]
fn collapse_vtune_should_return_error_for_bad_stack_line() {
let test_file = "./tests/data/collapse-vtune/bad-stack-line.csv";
let error = test_collapse_vtune_error(test_file, Options::default());
assert_eq!(error.kind(), io::ErrorKind::InvalidData);
assert!(error.to_string().starts_with("Unable to parse stack line"));
}
#[test]
fn collapse_vtune_cli() {
let input_file = "./tests/data/collapse-vtune/vtune.csv";
let expected_file = "./tests/data/collapse-vtune/results/vtune-default.txt";
// Test with file passed in
let output = Command::cargo_bin("inferno-collapse-vtune")
.unwrap()
.arg(input_file)
.output()
.expect("failed to execute process");
let expected = BufReader::new(File::open(expected_file).unwrap());
common::compare_results(Cursor::new(output.stdout), expected, expected_file, false);
// Test with STDIN
let mut child = Command::cargo_bin("inferno-collapse-vtune")
.unwrap()
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()
.expect("Failed to spawn child process");
let mut input = BufReader::new(File::open(input_file).unwrap());
let stdin = child.stdin.as_mut().expect("Failed to open stdin");
io::copy(&mut input, stdin).unwrap();
let output = child.wait_with_output().expect("Failed to read stdout");
let expected = BufReader::new(File::open(expected_file).unwrap());
common::compare_results(Cursor::new(output.stdout), expected, expected_file, false);
} | fn collapse_vtune_default() {
let test_file = "./tests/data/collapse-vtune/vtune.csv";
let result_file = "./tests/data/collapse-vtune/results/vtune-default.txt";
test_collapse_vtune(test_file, result_file, Options::default()).unwrap() |
main.go | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by cloud.google.com/go/internal/gapicgen/gensnippets. DO NOT EDIT.
// [START vision_v1_generated_ImageAnnotator_AsyncBatchAnnotateFiles_sync]
package main
import (
"context"
vision "cloud.google.com/go/vision/apiv1"
visionpb "google.golang.org/genproto/googleapis/cloud/vision/v1"
)
func | () {
ctx := context.Background()
c, err := vision.NewImageAnnotatorClient(ctx)
if err != nil {
// TODO: Handle error.
}
defer c.Close()
req := &visionpb.AsyncBatchAnnotateFilesRequest{
// TODO: Fill request struct fields.
// See https://pkg.go.dev/google.golang.org/genproto/googleapis/cloud/vision/v1#AsyncBatchAnnotateFilesRequest.
}
op, err := c.AsyncBatchAnnotateFiles(ctx, req)
if err != nil {
// TODO: Handle error.
}
resp, err := op.Wait(ctx)
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
// [END vision_v1_generated_ImageAnnotator_AsyncBatchAnnotateFiles_sync]
| main |
test_finance.py | #
# Copyright 2013 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Tests for the zipline.finance package
"""
from datetime import datetime, timedelta
import os
from nose.tools import timed
from nose.tools import nottest
import numpy as np
import pandas as pd
import pytz
from six import iteritems
from six.moves import range
from testfixtures import TempDirectory
from zipline.finance.blotter.simulation_blotter import SimulationBlotter
from zipline.finance.execution import MarketOrder, LimitOrder
from zipline.finance.metrics import MetricsTracker, load as load_metrics_set
from zipline.finance.trading import SimulationParameters
from zipline.data.us_equity_pricing import BcolzDailyBarReader
from zipline.data.minute_bars import BcolzMinuteBarReader
from zipline.data.data_portal import DataPortal
from zipline.data.us_equity_pricing import BcolzDailyBarWriter
from zipline.finance.slippage import FixedSlippage, FixedBasisPointsSlippage
from zipline.finance.asset_restrictions import NoRestrictions
from zipline.protocol import BarData
from zipline.testing import write_bcolz_minute_data
import zipline.testing.fixtures as zf
import zipline.utils.factory as factory
DEFAULT_TIMEOUT = 15 # seconds
EXTENDED_TIMEOUT = 90
_multiprocess_can_split_ = False
class FinanceTestCase(zf.WithAssetFinder,
zf.WithTradingCalendars,
zf.ZiplineTestCase):
ASSET_FINDER_EQUITY_SIDS = 1, 2, 133
start = START_DATE = pd.Timestamp('2006-01-01', tz='utc')
end = END_DATE = pd.Timestamp('2006-12-31', tz='utc')
def | (self):
super(FinanceTestCase, self).init_instance_fixtures()
self.zipline_test_config = {'sid': 133}
# TODO: write tests for short sales
# TODO: write a test to do massive buying or shorting.
@timed(DEFAULT_TIMEOUT)
@nottest
def test_partially_filled_orders(self):
# create a scenario where order size and trade size are equal
# so that orders must be spread out over several trades.
params = {
'trade_count': 360,
'trade_interval': timedelta(minutes=1),
'order_count': 2,
'order_amount': 100,
'order_interval': timedelta(minutes=1),
# because we placed two orders for 100 shares each, and the volume
# of each trade is 100, and by default you can take up 10% of the
# bar's volume (per FixedBasisPointsSlippage, the default slippage
# model), the simulator should spread the order into 20 trades of
# 10 shares per order.
'expected_txn_count': 20,
'expected_txn_volume': 2 * 100,
'default_slippage': True
}
self.transaction_sim(**params)
# same scenario, but with short sales
params2 = {
'trade_count': 360,
'trade_interval': timedelta(minutes=1),
'order_count': 2,
'order_amount': -100,
'order_interval': timedelta(minutes=1),
'expected_txn_count': 20,
'expected_txn_volume': 2 * -100,
'default_slippage': True
}
self.transaction_sim(**params2)
@timed(DEFAULT_TIMEOUT)
@nottest
def test_collapsing_orders(self):
# create a scenario where order.amount <<< trade.volume
# to test that several orders can be covered properly by one trade,
# but are represented by multiple transactions.
params1 = {
'trade_count': 6,
'trade_interval': timedelta(hours=1),
'order_count': 24,
'order_amount': 1,
'order_interval': timedelta(minutes=1),
# because we placed an orders totaling less than 25% of one trade
# the simulator should produce just one transaction.
'expected_txn_count': 24,
'expected_txn_volume': 24
}
self.transaction_sim(**params1)
# second verse, same as the first. except short!
params2 = {
'trade_count': 6,
'trade_interval': timedelta(hours=1),
'order_count': 24,
'order_amount': -1,
'order_interval': timedelta(minutes=1),
'expected_txn_count': 24,
'expected_txn_volume': -24
}
self.transaction_sim(**params2)
# Runs the collapsed trades over daily trade intervals.
# Ensuring that our delay works for daily intervals as well.
params3 = {
'trade_count': 6,
'trade_interval': timedelta(days=1),
'order_count': 24,
'order_amount': 1,
'order_interval': timedelta(minutes=1),
'expected_txn_count': 24,
'expected_txn_volume': 24
}
self.transaction_sim(**params3)
@timed(DEFAULT_TIMEOUT)
@nottest
def test_alternating_long_short(self):
# create a scenario where we alternate buys and sells
params1 = {
'trade_count': int(6.5 * 60 * 4),
'trade_interval': timedelta(minutes=1),
'order_count': 4,
'order_amount': 10,
'order_interval': timedelta(hours=24),
'alternate': True,
'complete_fill': True,
'expected_txn_count': 4,
'expected_txn_volume': 0 # equal buys and sells
}
self.transaction_sim(**params1)
def transaction_sim(self, **params):
"""This is a utility method that asserts expected
results for conversion of orders to transactions given a
trade history
"""
trade_count = params['trade_count']
trade_interval = params['trade_interval']
order_count = params['order_count']
order_amount = params['order_amount']
order_interval = params['order_interval']
expected_txn_count = params['expected_txn_count']
expected_txn_volume = params['expected_txn_volume']
# optional parameters
# ---------------------
# if present, alternate between long and short sales
alternate = params.get('alternate')
# if present, expect transaction amounts to match orders exactly.
complete_fill = params.get('complete_fill')
asset1 = self.asset_finder.retrieve_asset(1)
with TempDirectory() as tempdir:
if trade_interval < timedelta(days=1):
sim_params = factory.create_simulation_parameters(
start=self.start,
end=self.end,
data_frequency="minute"
)
minutes = self.trading_calendar.minutes_window(
sim_params.first_open,
int((trade_interval.total_seconds() / 60) * trade_count)
+ 100)
price_data = np.array([10.1] * len(minutes))
assets = {
asset1.sid: pd.DataFrame({
"open": price_data,
"high": price_data,
"low": price_data,
"close": price_data,
"volume": np.array([100] * len(minutes)),
"dt": minutes
}).set_index("dt")
}
write_bcolz_minute_data(
self.trading_calendar,
self.trading_calendar.sessions_in_range(
self.trading_calendar.minute_to_session_label(
minutes[0]
),
self.trading_calendar.minute_to_session_label(
minutes[-1]
)
),
tempdir.path,
iteritems(assets),
)
equity_minute_reader = BcolzMinuteBarReader(tempdir.path)
data_portal = DataPortal(
self.asset_finder, self.trading_calendar,
first_trading_day=equity_minute_reader.first_trading_day,
equity_minute_reader=equity_minute_reader,
)
else:
sim_params = factory.create_simulation_parameters(
data_frequency="daily"
)
days = sim_params.sessions
assets = {
1: pd.DataFrame({
"open": [10.1] * len(days),
"high": [10.1] * len(days),
"low": [10.1] * len(days),
"close": [10.1] * len(days),
"volume": [100] * len(days),
"day": [day.value for day in days]
}, index=days)
}
path = os.path.join(tempdir.path, "testdata.bcolz")
BcolzDailyBarWriter(path, self.trading_calendar, days[0],
days[-1]).write(
assets.items()
)
equity_daily_reader = BcolzDailyBarReader(path)
data_portal = DataPortal(
self.asset_finder, self.trading_calendar,
first_trading_day=equity_daily_reader.first_trading_day,
equity_daily_reader=equity_daily_reader,
)
if "default_slippage" not in params or \
not params["default_slippage"]:
slippage_func = FixedBasisPointsSlippage()
else:
slippage_func = None
blotter = SimulationBlotter(slippage_func)
start_date = sim_params.first_open
if alternate:
alternator = -1
else:
alternator = 1
tracker = MetricsTracker(
trading_calendar=self.trading_calendar,
first_session=sim_params.start_session,
last_session=sim_params.end_session,
capital_base=sim_params.capital_base,
emission_rate=sim_params.emission_rate,
data_frequency=sim_params.data_frequency,
asset_finder=self.asset_finder,
metrics=load_metrics_set('none'),
)
# replicate what tradesim does by going through every minute or day
# of the simulation and processing open orders each time
if sim_params.data_frequency == "minute":
ticks = minutes
else:
ticks = days
transactions = []
order_list = []
order_date = start_date
for tick in ticks:
blotter.current_dt = tick
if tick >= order_date and len(order_list) < order_count:
# place an order
direction = alternator ** len(order_list)
order_id = blotter.order(
asset1,
order_amount * direction,
MarketOrder(),
)
order_list.append(blotter.orders[order_id])
order_date = order_date + order_interval
# move after market orders to just after market next
# market open.
if order_date.hour >= 21:
if order_date.minute >= 00:
order_date = order_date + timedelta(days=1)
order_date = order_date.replace(hour=14, minute=30)
else:
bar_data = BarData(
data_portal=data_portal,
simulation_dt_func=lambda: tick,
data_frequency=sim_params.data_frequency,
trading_calendar=self.trading_calendar,
restrictions=NoRestrictions(),
)
txns, _, closed_orders = blotter.get_transactions(bar_data)
for txn in txns:
tracker.process_transaction(txn)
transactions.append(txn)
blotter.prune_orders(closed_orders)
for i in range(order_count):
order = order_list[i]
self.assertEqual(order.asset, asset1)
self.assertEqual(order.amount, order_amount * alternator ** i)
if complete_fill:
self.assertEqual(len(transactions), len(order_list))
total_volume = 0
for i in range(len(transactions)):
txn = transactions[i]
total_volume += txn.amount
if complete_fill:
order = order_list[i]
self.assertEqual(order.amount, txn.amount)
self.assertEqual(total_volume, expected_txn_volume)
self.assertEqual(len(transactions), expected_txn_count)
if total_volume == 0:
self.assertRaises(KeyError, lambda: tracker.positions[asset1])
else:
cumulative_pos = tracker.positions[asset1]
self.assertEqual(total_volume, cumulative_pos.amount)
# the open orders should not contain the asset.
oo = blotter.open_orders
self.assertNotIn(
asset1,
oo,
"Entry is removed when no open orders"
)
def test_blotter_processes_splits(self):
blotter = SimulationBlotter(equity_slippage=FixedSlippage())
# set up two open limit orders with very low limit prices,
# one for sid 1 and one for sid 2
asset1 = self.asset_finder.retrieve_asset(1)
asset2 = self.asset_finder.retrieve_asset(2)
asset133 = self.asset_finder.retrieve_asset(133)
blotter.order(asset1, 100, LimitOrder(10, asset=asset1))
blotter.order(asset2, 100, LimitOrder(10, asset=asset2))
# send in splits for assets 133 and 2. We have no open orders for
# asset 133 so it should be ignored.
blotter.process_splits([(asset133, 0.5), (asset2, 0.3333)])
for asset in [asset1, asset2]:
order_lists = blotter.open_orders[asset]
self.assertIsNotNone(order_lists)
self.assertEqual(1, len(order_lists))
asset1_order = blotter.open_orders[1][0]
asset2_order = blotter.open_orders[2][0]
# make sure the asset1 order didn't change
self.assertEqual(100, asset1_order.amount)
self.assertEqual(10, asset1_order.limit)
self.assertEqual(1, asset1_order.asset)
# make sure the asset2 order did change
# to 300 shares at 3.33
self.assertEqual(300, asset2_order.amount)
self.assertEqual(3.33, asset2_order.limit)
self.assertEqual(2, asset2_order.asset)
class SimParamsTestCase(zf.WithTradingCalendars, zf.ZiplineTestCase):
"""
Tests for date management utilities in zipline.finance.trading.
"""
def test_simulation_parameters(self):
sp = SimulationParameters(
start_session=pd.Timestamp("2008-01-01", tz='UTC'),
end_session=pd.Timestamp("2008-12-31", tz='UTC'),
capital_base=100000,
trading_calendar=self.trading_calendar,
)
self.assertTrue(sp.last_close.month == 12)
self.assertTrue(sp.last_close.day == 31)
@timed(DEFAULT_TIMEOUT)
def test_sim_params_days_in_period(self):
# January 2008
# Su Mo Tu We Th Fr Sa
# 1 2 3 4 5
# 6 7 8 9 10 11 12
# 13 14 15 16 17 18 19
# 20 21 22 23 24 25 26
# 27 28 29 30 31
params = SimulationParameters(
start_session=pd.Timestamp("2007-12-31", tz='UTC'),
end_session=pd.Timestamp("2008-01-07", tz='UTC'),
capital_base=100000,
trading_calendar=self.trading_calendar,
)
expected_trading_days = (
datetime(2007, 12, 31, tzinfo=pytz.utc),
# Skip new years
# holidays taken from: http://www.nyse.com/press/1191407641943.html
datetime(2008, 1, 2, tzinfo=pytz.utc),
datetime(2008, 1, 3, tzinfo=pytz.utc),
datetime(2008, 1, 4, tzinfo=pytz.utc),
# Skip Saturday
# Skip Sunday
datetime(2008, 1, 7, tzinfo=pytz.utc)
)
num_expected_trading_days = 5
self.assertEquals(
num_expected_trading_days,
len(params.sessions)
)
np.testing.assert_array_equal(expected_trading_days,
params.sessions.tolist())
| init_instance_fixtures |
conf.py | from pathlib import Path
import subprocess
root_path = Path(__file__).parent.parent.resolve()
extensions = [
'sphinx.ext.todo',
'sphinxcontrib.drawio', | ]
version = (root_path / 'VERSION').read_text().strip()
project = 'hat-stc'
copyright = '2020-2021, Hat Open AUTHORS'
master_doc = 'index'
html_theme = 'furo'
html_static_path = ['static']
html_css_files = ['custom.css']
html_use_index = False
html_show_sourcelink = False
html_show_sphinx = False
html_sidebars = {'**': ["sidebar/brand.html",
"sidebar/scroll-start.html",
"sidebar/navigation.html",
"sidebar/scroll-end.html"]}
todo_include_todos = True
p = subprocess.run(['which', 'drawio'], capture_output=True, check=True)
drawio_binary_path = p.stdout.decode('utf-8').strip() | |
mod.rs | pub mod dovi_rpu;
pub mod extension_metadata;
pub mod generate;
pub mod profiles;
pub mod rpu_data_header;
pub mod rpu_data_mapping;
pub mod rpu_data_nlq;
pub mod vdr_dm_data;
use crc::{Crc, CRC_32_MPEG_2};
pub const NUM_COMPONENTS: usize = 3;
#[inline(always)]
fn compute_crc32(data: &[u8]) -> u32 | {
let crc = Crc::<u32>::new(&CRC_32_MPEG_2);
let mut digest = crc.digest();
digest.update(data);
digest.finalize()
} |
|
list.go | // Copyright 2020 The Okteto Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
import (
"context"
"github.com/okteto/okteto/cmd/namespace"
"github.com/spf13/cobra"
)
//List lists resources
func | (ctx context.Context) *cobra.Command {
cmd := &cobra.Command{
Use: "list",
Short: "List resources",
}
cmd.AddCommand(namespace.List(ctx))
return cmd
}
| List |
preview_test.go | package preview_test
import (
"github.com/jenkins-x/jx/pkg/cmd/preview"
"github.com/jenkins-x/jx/pkg/cmd/testhelpers"
"os"
"testing"
"github.com/jenkins-x/jx/pkg/cmd/opts"
"github.com/jenkins-x/jx/pkg/config"
gits_test "github.com/jenkins-x/jx/pkg/gits/mocks"
helm_test "github.com/jenkins-x/jx/pkg/helm/mocks"
)
// Constants for some test data to be used.
const (
namespace = "jx"
)
func TestGetPreviewValuesConfig(t *testing.T) {
t.Parallel()
tests := []struct {
opts preview.PreviewOptions
env map[string]string
domain string
expectedYAMLConfig string
}{
{
opts: preview.PreviewOptions{
HelmValuesConfig: config.HelmValuesConfig{
ExposeController: &config.ExposeController{},
},
},
env: map[string]string{
preview.DOCKER_REGISTRY: "my.registry",
preview.ORG: "my-org",
preview.APP_NAME: "my-app",
preview.PREVIEW_VERSION: "1.0.0", | image:
repository: my.registry/my-org/my-app
tag: 1.0.0
`,
},
{
opts: preview.PreviewOptions{
HelmValuesConfig: config.HelmValuesConfig{
ExposeController: &config.ExposeController{
Config: config.ExposeControllerConfig{
HTTP: "false",
TLSAcme: "true",
},
},
},
},
env: map[string]string{
preview.DOCKER_REGISTRY: "my.registry",
preview.ORG: "my-org",
preview.APP_NAME: "my-app",
preview.PREVIEW_VERSION: "1.0.0",
},
domain: "jenkinsx.io",
expectedYAMLConfig: `expose:
config:
domain: jenkinsx.io
http: "false"
tlsacme: "true"
preview:
image:
repository: my.registry/my-org/my-app
tag: 1.0.0
`,
},
}
co := &opts.CommonOptions{}
testhelpers.ConfigureTestOptions(co, gits_test.NewMockGitter(), helm_test.NewMockHelmer())
for i, test := range tests {
for k, v := range test.env {
os.Setenv(k, v)
}
test.opts.CommonOptions = co
config, err := test.opts.GetPreviewValuesConfig(nil, test.domain)
if err != nil {
t.Errorf("[%d] got unexpected err: %v", i, err)
continue
}
configYAML, err := config.String()
if err != nil {
t.Errorf("[%d] %v", i, err)
continue
}
if test.expectedYAMLConfig != configYAML {
t.Errorf("[%d] expected %#v but got %#v", i, test.expectedYAMLConfig, configYAML)
}
}
} | },
expectedYAMLConfig: `expose:
config: {}
preview: |
0052_merge_0051_catalog_content_0051_video_resource_group.py | # Generated by Django 3.2.3 on 2021-07-13 14:58
from django.db import migrations
class Migration(migrations.Migration):
| dependencies = [
('api', '0051_catalog_content'),
('api', '0051_video_resource_group'),
]
operations = [
] |
|
register.py |
#
# Py-Alpha-AMD Registration Framework
# Author: Johan Ofverstedt
# Reference: Fast and Robust Symmetric Image Registration Based on Distances Combining Intensity and Spatial Information
#
# Copyright 2019 Johan Ofverstedt
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
#
# Registration framework
#
# Import Numpy/Scipy
import numpy as np
import scipy as sp
import scipy.misc
# Import transforms
from transforms import CompositeTransform
from transforms import AffineTransform
from transforms import Rigid2DTransform
from transforms import Rotate2DTransform
from transforms import TranslationTransform
from transforms import ScalingTransform
# Import distances
from distances import QuantizedImage
from distances import alpha_amd
from distances import symmetric_amd_distance
# Import optimizers
from optimizers import GradientDescentOptimizer
# Import generators and filters
import generators
import filters
# Import misc
import math
import sys
import time
import cProfile, pstats
class Register:
def __init__(self, dim):
self.dim = dim
self.sampling_fraction = 1.0
self.step_lengths = np.array([[0.1, 1.0]])
self.iterations = 1500
self.alpha_levels = 7
self.gradient_magnitude_threshold = 0.00001
self.ref_im = None
self.flo_im = None
self.ref_mask = None
self.flo_mask = None
self.ref_weights = None
self.flo_weights = None
# Transforms
self.initial_transforms = []
self.transforms_param_scaling = []
self.output_transforms = []
self.values = []
self.value_history = []
# Resolution pyramid levels
self.pyramid_factors = []
self.pyramid_sigmas = []
self.distances = []
# Reporting/Output
self.report_func = None
self.report_freq = 25
def add_initial_transform(self, transform, param_scaling=None):
if param_scaling is None:
param_scaling = np.ones((transforms.get_param_count(),))
self.initial_transforms.append(transform)
self.transforms_param_scaling.append(param_scaling)
def add_initial_transforms(self, transforms, param_scaling=None):
for i, t in enumerate(transforms):
if param_scaling is None:
pscaling = np.ones((transforms.get_param_count(),))
else:
pscaling = param_scaling[i]
self.add_initial_transform(t, pscaling)
def clear_transforms(self):
self.initial_transforms = []
self.output_transforms = []
self.transforms_param_scaling = []
self.values = []
self.value_history = []
def get_output(self, index):
return self.output_transforms[index], self.values[index]
def get_value_history(self, index, level):
return self.value_history[index][level]
def add_pyramid_level(self, factor, sigma):
self.pyramid_factors.append(factor)
self.pyramid_sigmas.append(sigma)
def add_pyramid_levels(self, factors, sigmas):
for i in range(len(factors)):
self.add_pyramid_level(factors[i], sigmas[i])
def | (self):
return len(self.pyramid_factors)
def set_sampling_fraction(self, sampling_fraction):
self.sampling_fraction = sampling_fraction
def set_iterations(self, iterations):
self.iterations = iterations
def set_alpha_levels(self, alpha_levels):
self.alpha_levels = alpha_levels
def set_step_lengths(self, step_lengths):
self.step_lengths = np.array(step_lengths)#np.array([start_step_length, end_step_length])
def set_reference_image(self, image, spacing = None):
self.ref_im = image
if spacing is None:
self.ref_spacing = np.ones(image.ndim)
else:
self.ref_spacing = spacing
def set_floating_image(self, image, spacing = None):
self.flo_im = image
if spacing is None:
self.flo_spacing = np.ones(image.ndim)
else:
self.flo_spacing = spacing
def set_reference_mask(self, mask):
self.ref_mask = mask
def set_floating_mask(self, mask):
self.flo_mask = mask
def set_reference_weights(self, weights):
self.ref_weights = weights
def set_floating_weights(self, weights):
self.flo_weights = weights
def set_gradient_magnitude_threshold(self, t):
self.gradient_magnitude_threshold = t
def set_report_freq(self, freq):
self.report_freq = freq
def set_report_func(self, func):
self.report_func = func
def initialize(self, pyramid_images_output_path=None):
if len(self.pyramid_factors) == 0:
self.add_pyramid_level(1, 0.0)
if len(self.initial_transforms) == 0:
self.add_initial_transform(AffineTransform(self.dim))
### Preprocessing
pyramid_levels = len(self.pyramid_factors)
for i in range(pyramid_levels):
factor = self.pyramid_factors[i]
ref_resampled = filters.downsample(filters.gaussian_filter(self.ref_im, self.pyramid_sigmas[i]), factor)
flo_resampled = filters.downsample(filters.gaussian_filter(self.flo_im, self.pyramid_sigmas[i]), factor)
ref_mask_resampled = filters.downsample(self.ref_mask, factor)
flo_mask_resampled = filters.downsample(self.flo_mask, factor)
ref_resampled = filters.normalize(ref_resampled, 0.0, ref_mask_resampled)
flo_resampled = filters.normalize(flo_resampled, 0.0, flo_mask_resampled)
if pyramid_images_output_path is not None and ref_resampled.ndim == 2:
scipy.misc.imsave('%sref_resampled_%d.png' % (pyramid_images_output_path, i+1), ref_resampled)
scipy.misc.imsave('%sflo_resampled_%d.png' % (pyramid_images_output_path, i+1), flo_resampled)
if self.ref_weights is None:
ref_weights = np.zeros(ref_resampled.shape)
ref_weights[ref_mask_resampled] = 1.0
else:
ref_weights = filters.downsample(self.ref_weights, factor)
if self.flo_weights is None:
flo_weights = np.zeros(flo_resampled.shape)
flo_weights[flo_mask_resampled] = 1.0
else:
flo_weights = filters.downsample(self.flo_weights, factor)
ref_diag = np.sqrt(np.square(np.array(ref_resampled.shape)*self.ref_spacing).sum())
flo_diag = np.sqrt(np.square(np.array(flo_resampled.shape)*self.flo_spacing).sum())
q_ref = QuantizedImage(ref_resampled, self.alpha_levels, ref_weights, self.ref_spacing*factor, remove_zero_weight_pnts = True)
q_flo = QuantizedImage(flo_resampled, self.alpha_levels, flo_weights, self.flo_spacing*factor, remove_zero_weight_pnts = True)
tf_ref = alpha_amd.AlphaAMD(q_ref, self.alpha_levels, ref_diag, self.ref_spacing*factor, ref_mask_resampled, ref_mask_resampled, interpolator_mode='linear', dt_fun = None, mask_out_edges = True)
tf_flo = alpha_amd.AlphaAMD(q_flo, self.alpha_levels, flo_diag, self.flo_spacing*factor, flo_mask_resampled, flo_mask_resampled, interpolator_mode='linear', dt_fun = None, mask_out_edges = True)
symmetric_measure = True
squared_measure = False
sym_dist = symmetric_amd_distance.SymmetricAMDDistance(symmetric_measure=symmetric_measure, squared_measure=squared_measure)
sym_dist.set_ref_image_source(q_ref)
sym_dist.set_ref_image_target(tf_ref)
sym_dist.set_flo_image_source(q_flo)
sym_dist.set_flo_image_target(tf_flo)
sym_dist.set_sampling_fraction(self.sampling_fraction)
sym_dist.initialize()
self.distances.append(sym_dist)
def run(self):
pyramid_level_count = len(self.pyramid_factors)
transform_count = len(self.initial_transforms)
for t_it in range(transform_count):
init_transform = self.initial_transforms[t_it]
param_scaling = self.transforms_param_scaling[t_it]
self.value_history.append([])
for lvl_it in range(pyramid_level_count):
opt = GradientDescentOptimizer(self.distances[lvl_it], init_transform.copy())
if self.step_lengths.ndim == 1:
opt.set_step_length(self.step_lengths[0], self.step_lengths[1])
else:
opt.set_step_length(self.step_lengths[lvl_it, 0], self.step_lengths[lvl_it, 1])
opt.set_scalings(param_scaling)
opt.set_gradient_magnitude_threshold(self.gradient_magnitude_threshold)
opt.set_report_freq(self.report_freq)
if type(self.report_func) is list or type(self.report_func) is tuple:
opt.set_report_callback(self.report_func[t_it])
else:
opt.set_report_callback(self.report_func)
if isinstance(self.iterations, int):
itercount = self.iterations
else:
assert(len(self.iterations) == pyramid_level_count)
itercount = self.iterations[lvl_it]
opt.optimize(itercount)
if lvl_it + 1 == pyramid_level_count:
self.output_transforms.append(opt.get_transform())
self.values.append(opt.get_value())
self.initial_transforms[t_it] = opt.get_transform()
else:
init_transform = opt.get_transform()
self.value_history[-1].append(opt.get_value_history())
| get_pyramid_level_count |
object.go | package v1 | HeaderKeyObjectMD5 = "baetyl-object-md5"
HeaderKeyObjectDir = "baetyl-object-dir"
) |
const (
HeaderKeyObjectUnpack = "baetyl-object-unpack" |
__init__.py | #!/usr/bin/env python | """This module contains several models for Knowledge Graph Embedding
All model classes must inherit class `BaseModel` (defined in model.py)
"""
# import selected Classes into the package level so they can be convieniently imported from the package.
# use from model import TransE instead of from model.transE import TransE
from kge.transE import TransE
from kge.distmult import DISTMULT
from kge.transH import TransH
from kge.transR import TransR
from kge.transD import TransD
from kge.stransE import STransE
# from model import *
__all__ = ["TransE", "DISTMULT", "TransH", "TransR", "TransE", "TransD", "STransE"] | # coding: utf-8
# @Author: lapis-hong
# @Date : 2018/8/14 |
BiMenu.js | // THIS FILE IS AUTO GENERATED
var GenIcon = require('../lib').GenIcon
module.exports.BiMenu = function BiMenu (props) { | return GenIcon({"tag":"svg","attr":{"viewBox":"0 0 24 24"},"child":[{"tag":"path","attr":{"d":"M4 6H20V8H4zM4 11H20V13H4zM4 16H20V18H4z"}}]})(props);
}; |
|
AgentCAMovementStatusAdviceV01.go | package seev
import (
"encoding/xml"
"github.com/fairxio/finance-messaging/iso20022"
)
type Document02200101 struct {
XMLName xml.Name `xml:"urn:iso:std:iso:20022:tech:xsd:seev.022.001.01 Document"`
Message *AgentCAMovementStatusAdviceV01 `xml:"AgtCAMvmntStsAdvc"`
}
func (d *Document02200101) AddMessage() *AgentCAMovementStatusAdviceV01 {
d.Message = new(AgentCAMovementStatusAdviceV01)
return d.Message
}
// Scope
// This message is sent by a CSD to an issuer (or its agent) to report the status, or a change in status, of
// - a global distribution status advice released by an issuer (or its agent);
// - a movement instruction released by an issuer (or its agent);
// - a movement cancellation request sent by the issuer (or its agent); and
// - the non-settlement of the movements at the CSD.
// Usage
// This message is used to report the status of:
// - the movements resulting from a movement instruction message, in which case, the Agent Corporate Action Movement Instruction Identification must be present;
// - the movements resulting from a global distribution status advice message (with the status, authorised), in which case, the Agent Corporate Action Global Distribution Status Advice Identification must be present;
// - the movement cancellation request, in which case, the Agent Corporate Action Movement Cancellation Request Identification must be present; and
// - the movements resulting from an election status advice (if the status of the election advice is rejected or if the status of the election cancellation request or amendment request is accepted) in case there is a settlement problem. The Election Status Advice Identification must be present.
// In the case of a failed settlement, the message contains details of the movement, such as account details, securities or cash information and the reason of the failure.
// This message should not be used to provide the confirmation of the settlement; the Agent Corporate Action Movement Confirmation message should be used instead.
type AgentCAMovementStatusAdviceV01 struct {
// Identification assigned by the Sender to unambiguously identify the status advice.
Identification *iso20022.DocumentIdentification8 `xml:"Id"`
// Identification of the Agent Corporate Action Election Status Advice.
AgentCAElectionStatusAdviceIdentification *iso20022.DocumentIdentification8 `xml:"AgtCAElctnStsAdvcId"`
// Identification of the Agent Corporate Action Global Distribution Status Advice.
AgentCAGlobalDistributionStatusAdviceIdentification *iso20022.DocumentIdentification8 `xml:"AgtCAGblDstrbtnStsAdvcId"`
// Identification of the linked Agent CA Movement Instruction for which a status is given.
AgentCAMovementInstructionIdentification *iso20022.DocumentIdentification8 `xml:"AgtCAMvmntInstrId"`
// Identification of the linked Agent CA Movement Cancellation Request for which a status is given.
AgentCAMovementCancellationRequestIdentification *iso20022.DocumentIdentification8 `xml:"AgtCAMvmntCxlReqId"`
// General information about the corporate action event.
CorporateActionGeneralInformation *iso20022.CorporateActionInformation1 `xml:"CorpActnGnlInf"`
// Status of the movement instruction. |
// Status of the movement cancellation request.
MovementCancellationStatusDetails *iso20022.CorporateMovementStatus2 `xml:"MvmntCxlStsDtls"`
}
func (a *AgentCAMovementStatusAdviceV01) AddIdentification() *iso20022.DocumentIdentification8 {
a.Identification = new(iso20022.DocumentIdentification8)
return a.Identification
}
func (a *AgentCAMovementStatusAdviceV01) AddAgentCAElectionStatusAdviceIdentification() *iso20022.DocumentIdentification8 {
a.AgentCAElectionStatusAdviceIdentification = new(iso20022.DocumentIdentification8)
return a.AgentCAElectionStatusAdviceIdentification
}
func (a *AgentCAMovementStatusAdviceV01) AddAgentCAGlobalDistributionStatusAdviceIdentification() *iso20022.DocumentIdentification8 {
a.AgentCAGlobalDistributionStatusAdviceIdentification = new(iso20022.DocumentIdentification8)
return a.AgentCAGlobalDistributionStatusAdviceIdentification
}
func (a *AgentCAMovementStatusAdviceV01) AddAgentCAMovementInstructionIdentification() *iso20022.DocumentIdentification8 {
a.AgentCAMovementInstructionIdentification = new(iso20022.DocumentIdentification8)
return a.AgentCAMovementInstructionIdentification
}
func (a *AgentCAMovementStatusAdviceV01) AddAgentCAMovementCancellationRequestIdentification() *iso20022.DocumentIdentification8 {
a.AgentCAMovementCancellationRequestIdentification = new(iso20022.DocumentIdentification8)
return a.AgentCAMovementCancellationRequestIdentification
}
func (a *AgentCAMovementStatusAdviceV01) AddCorporateActionGeneralInformation() *iso20022.CorporateActionInformation1 {
a.CorporateActionGeneralInformation = new(iso20022.CorporateActionInformation1)
return a.CorporateActionGeneralInformation
}
func (a *AgentCAMovementStatusAdviceV01) AddMovementStatusDetails() *iso20022.CorporateActionMovementStatus1Choice {
a.MovementStatusDetails = new(iso20022.CorporateActionMovementStatus1Choice)
return a.MovementStatusDetails
}
func (a *AgentCAMovementStatusAdviceV01) AddMovementCancellationStatusDetails() *iso20022.CorporateMovementStatus2 {
a.MovementCancellationStatusDetails = new(iso20022.CorporateMovementStatus2)
return a.MovementCancellationStatusDetails
} | MovementStatusDetails *iso20022.CorporateActionMovementStatus1Choice `xml:"MvmntStsDtls"` |
sandbox.go | // Copyright (c) 2019 Atrio, Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package store
import (
"fmt"
"strings"
"sync"
"time"
"github.com/docker/distribution/uuid"
runtimeApi "k8s.io/kubernetes/pkg/kubelet/apis/cri/runtime/v1alpha2"
)
type SandboxStoreInterface interface {
Add(sm *SandboxMetadata)
Update(sm *SandboxMetadata)
Remove(ID string)
RemoveContainer(ID, containerId string)
Get(ID string) (*SandboxMetadata, error)
List() map[string]*SandboxMetadata
ListK8s(filter *runtimeApi.PodSandboxFilter, locaCRI string) []*runtimeApi.PodSandbox
CreateSandboxMetadata(state runtimeApi.PodSandboxState, config runtimeApi.PodSandboxConfig, runtimeHandler string) *SandboxMetadata
}
type SandboxStorage struct {
lock sync.RWMutex
persist *SandboxPersist
SandboxPool map[string]*SandboxMetadata
}
func NewSandboxStorage(resourceCachePath string, enablePersistence bool) (SandboxStoreInterface, error) {
ss := new(SandboxStorage)
if enablePersistence {
p, err := NewSandboxPersist(resourceCachePath) //todo manage error
if err != nil {
return nil, fmt.Errorf("Error when opening Sandbox %s", err.Error())
}
ss.persist = p
ss.SandboxPool = p.LoadAll()
} else {
ss.SandboxPool = make(map[string]*SandboxMetadata)
}
return ss, nil
}
func (ss *SandboxStorage) List() map[string]*SandboxMetadata {
ss.lock.Lock()
defer ss.lock.Unlock()
return ss.SandboxPool
}
func (ss *SandboxStorage) Add(sm *SandboxMetadata) {
ss.lock.Lock()
defer ss.lock.Unlock()
if _, exists := ss.SandboxPool[sm.ID]; exists {
fmt.Errorf("Sandbox %s already exits", sm.ID)
}
ss.SandboxPool[sm.ID] = sm
if ss.persist != nil {
ss.persist.Put(sm.ID, sm)
}
}
func (ss *SandboxStorage) Update(sm *SandboxMetadata) {
ss.lock.Lock()
defer ss.lock.Unlock()
ss.SandboxPool[sm.ID] = sm
if ss.persist != nil {
ss.persist.Put(sm.ID, sm)
}
}
func (ss *SandboxStorage) Remove(ID string) {
ss.lock.Lock()
defer ss.lock.Unlock()
delete(ss.SandboxPool, ID)
if ss.persist != nil {
ss.persist.Delete(ID)
}
}
func (ss *SandboxStorage) RemoveContainer(ID, containerId string) {
ss.lock.Lock()
defer ss.lock.Unlock()
sm := ss.SandboxPool[ID]
if sm != nil {
ss.SandboxPool[sm.ID] = sm
for i, v := range sm.Containers {
if v == containerId {
sm.Containers = append(sm.Containers[:i], sm.Containers[i+1:]...)
}
}
ss.SandboxPool[sm.ID] = sm
if ss.persist != nil {
ss.persist.Put(sm.ID, sm)
}
}
}
func (ss *SandboxStorage) Get(ID string) (*SandboxMetadata, error) {
ss.lock.Lock()
defer ss.lock.Unlock()
value, ok := ss.SandboxPool[ID]
if !ok {
return nil, fmt.Errorf("Pod not found")
}
return value, nil
}
func (ss *SandboxStorage) ListK8s(filter *runtimeApi.PodSandboxFilter, localCRI string) []*runtimeApi.PodSandbox {
ss.lock.Lock()
defer ss.lock.Unlock()
var result []*runtimeApi.PodSandbox
filterPodID := ""
filterPodUID := ""
filterState := runtimeApi.PodSandboxState(100)
if filter != nil {
if filter.State != nil {
filterState = filter.GetState().State
}
filterPodID = filter.Id
if filter.LabelSelector != nil {
for k, v := range filter.LabelSelector {
if k == "io.kubernetes.pod.uid" {
filterPodUID = v
}
}
}
}
for _, sandbox := range ss.SandboxPool {
if filterPodID != "" && sandbox.ID != filterPodID {
continue
}
if filterPodUID != "" && sandbox.ID != filterPodUID {
continue
}
if filterState < 100 && filterState != sandbox.State {
continue
}
if sandbox.RuntimeHandler != localCRI {
continue
}
result = append(result, ParseToK8sSandbox(sandbox))
}
return result
}
func getPodID(config runtimeApi.PodSandboxConfig) string |
func (ss *SandboxStorage) CheckIfExist(config runtimeApi.PodSandboxConfig) (*SandboxMetadata, error) {
ID := getPodID(config)
return ss.Get(ID)
}
func (ss *SandboxStorage) CreateSandboxMetadata(state runtimeApi.PodSandboxState, config runtimeApi.PodSandboxConfig,
runtimeHandler string) *SandboxMetadata {
ID := getPodID(config)
createdAt := int64(time.Now().UnixNano())
cgroup := ""
if config.Linux != nil {
cgroup = config.Linux.CgroupParent
}
sm := SandboxMetadata{ID: ID, State: state, Config: config, CreatedAt: createdAt, CgroupsParent: cgroup,
RuntimeHandler: runtimeHandler}
ss.Add(&sm)
return &sm
}
// converts sandbox metadata into CRI pod sandbox.
func ParseToK8sSandbox(sandbox *SandboxMetadata) *runtimeApi.PodSandbox {
ID := sandbox.ID
config := sandbox.Config
return &runtimeApi.PodSandbox{
Id: ID,
Metadata: config.GetMetadata(),
State: sandbox.State,
CreatedAt: sandbox.CreatedAt,
Labels: config.GetLabels(),
Annotations: config.GetAnnotations(),
}
}
// converts sandbox metadata into CRI pod sandbox status.
func ParseToK8sSandboxStatus(sandbox *SandboxMetadata, ip string) *runtimeApi.PodSandboxStatus {
config := sandbox.Config
nsOpts := config.GetLinux().GetSecurityContext().GetNamespaceOptions()
return &runtimeApi.PodSandboxStatus{
Id: sandbox.ID,
Metadata: config.GetMetadata(),
State: sandbox.State,
CreatedAt: sandbox.CreatedAt,
Network: &runtimeApi.PodSandboxNetworkStatus{Ip: ip},
Linux: &runtimeApi.LinuxPodSandboxStatus{
Namespaces: &runtimeApi.Namespace{
Options: nsOpts,
},
},
Labels: config.GetLabels(),
Annotations: config.GetAnnotations(),
}
}
| {
var ID string
if len(config.GetMetadata().Uid) > 0 {
ID = config.GetMetadata().Uid
} else {
ID = strings.Replace(uuid.Generate().String(), "-", "", -1)
}
return ID
} |
router.go | package niuchayoufutu
import (
logintypes "fgame/fgame/account/login/types"
"fgame/fgame/charge_server/charge"
"fgame/fgame/charge_server/remote"
"fgame/fgame/sdk"
sdksdk "fgame/fgame/sdk/sdk"
"fmt"
"net/http"
"strconv"
log "github.com/Sirupsen/logrus"
"github.com/gorilla/mux"
)
const (
niuchayoufutuPath = "/niuchayoufutu"
)
func Router(r *mux.Router) {
sr := r.PathPrefix(niuchayoufutuPath).Subrouter()
sr.Path("/ios").Handler(http.HandlerFunc(handleNiuChaYouFuTuIOS))
sr.Path("/android").Handler(http.HandlerFunc(handleNiuChaYouFuTuAndroid))
}
func | (rw http.ResponseWriter, req *http.Request) {
query := req.URL.Query()
userIdStr := query.Get("userid")
serverStr := query.Get("server")
moneyStr := query.Get("money")
pay := query.Get("pay")
order := query.Get("order")
timeStr := query.Get("time")
sign := query.Get("sign")
log.WithFields(
log.Fields{
"ip": req.RemoteAddr,
"userId": userIdStr,
"server": serverStr,
"money": moneyStr,
"pay": pay,
"order": order,
"timeStr": timeStr,
"sign": sign,
}).Info("charge:牛叉游-浮屠幻境安卓充值请求")
serverIdInt, err := strconv.ParseInt(serverStr, 10, 64)
if err != nil {
log.WithFields(
log.Fields{
"ip": req.RemoteAddr,
"userId": userIdStr,
"server": serverStr,
"money": moneyStr,
"pay": pay,
"order": order,
"timeStr": timeStr,
"sign": sign,
"error": err,
}).Warn("charge:牛叉游-浮屠幻境安卓充值请求,解析错误")
rw.WriteHeader(http.StatusBadRequest)
return
}
server := int32(serverIdInt)
moneyFloat, err := strconv.ParseFloat(moneyStr, 64)
if err != nil {
log.WithFields(
log.Fields{
"ip": req.RemoteAddr,
"userId": userIdStr,
"server": serverStr,
"money": moneyStr,
"pay": pay,
"order": order,
"timeStr": timeStr,
"sign": sign,
"error": err,
}).Warn("charge:牛叉游-浮屠幻境安卓充值请求,解析错误")
rw.WriteHeader(http.StatusBadRequest)
return
}
money := int32(moneyFloat)
receiveTime, err := strconv.ParseInt(timeStr, 10, 64)
if err != nil {
log.WithFields(
log.Fields{
"ip": req.RemoteAddr,
"userId": userIdStr,
"server": serverStr,
"money": moneyStr,
"pay": pay,
"order": order,
"timeStr": timeStr,
"sign": sign,
"error": err,
}).Warn("charge:牛叉游-浮屠幻境安卓充值请求,解析错误")
rw.WriteHeader(http.StatusBadRequest)
return
}
sdkType := logintypes.SDKTypeNiuChaYouFuTu
sdkConfig := sdk.GetSdkService().GetSdkConfig(sdkType)
if sdkConfig == nil {
log.WithFields(
log.Fields{
"ip": req.RemoteAddr,
"error": err,
}).Warn("charge:牛叉游-浮屠幻境安卓充值请求,sdk配置为空")
rw.WriteHeader(http.StatusInternalServerError)
return
}
hengGeWanConfig, ok := sdkConfig.(*sdksdk.NiuChaYouFuTuConfig)
if !ok {
log.WithFields(
log.Fields{
"ip": req.RemoteAddr,
"error": err,
}).Warn("charge:牛叉游-浮屠幻境安卓充值请求,sdk配置强制转换失败")
rw.WriteHeader(http.StatusInternalServerError)
return
}
devicePlatformType := logintypes.DevicePlatformTypeAndroid
chargeKey := hengGeWanConfig.GetChargeKey(devicePlatformType)
//TODO 验证签名
getSign := niuChaYouFuTuSign(chargeKey, userIdStr, server, moneyStr, pay, order, receiveTime)
if sign != getSign {
log.WithFields(
log.Fields{
"ip": req.RemoteAddr,
"orderId": order,
"userId": userIdStr,
"server": server,
"money": money,
"pay": pay,
"receiveTime": receiveTime,
"sign": sign,
"getSign": getSign,
}).Warn("charge:牛叉游-浮屠幻境安卓充值请求,签名错误")
rw.WriteHeader(http.StatusBadRequest)
return
}
ctx := req.Context()
chargeService := charge.ChargeServiceInContext(ctx)
obj, repeat, err := chargeService.OrderPay(order, pay, logintypes.SDKTypeNiuChaYouFuTu, money, userIdStr, receiveTime)
if err != nil {
log.WithFields(
log.Fields{
"ip": req.RemoteAddr,
"orderId": order,
"userId": userIdStr,
"server": server,
"money": money,
"pay": pay,
"receiveTime": receiveTime,
"sign": sign,
"error": err,
}).Error("charge:牛叉游-浮屠幻境安卓请求,错误")
rw.WriteHeader(http.StatusBadRequest)
return
}
if obj == nil {
log.WithFields(
log.Fields{
"ip": req.RemoteAddr,
"orderId": order,
"userId": userIdStr,
"server": server,
"money": money,
"pay": pay,
"receiveTime": receiveTime,
"sign": sign,
"error": err,
}).Warn("charge:牛叉游-浮屠幻境安卓请求,订单不存在")
rw.WriteHeader(http.StatusBadRequest)
return
}
//放入回调队列中
if !repeat {
//放入回调队列中
remoteService := remote.RemoteServiceInContext(ctx)
flag := remoteService.Charge(obj)
if !flag {
panic(fmt.Errorf("charge:添加到回调队列应该成功"))
}
}
result := "success"
rw.WriteHeader(http.StatusOK)
rw.Write([]byte(result))
log.WithFields(
log.Fields{
"orderId": order,
"userId": userIdStr,
"server": server,
"money": money,
"pay": pay,
"receiveTime": receiveTime,
"sign": sign,
}).Info("charge:牛叉游-浮屠幻境安卓充值请求")
}
| handleNiuChaYouFuTuAndroid |
update_single_poll.go | package requests
import (
"encoding/json"
"fmt"
"net/url"
"strings"
"github.com/google/go-querystring/query"
"github.com/atomicjolt/canvasapi"
)
// UpdateSinglePoll Update an existing poll belonging to the current user
// https://canvas.instructure.com/doc/api/polls.html
//
// Path Parameters:
// # Path.ID (Required) ID
//
// Form Parameters:
// # Form.Polls.Question (Required) The title of the poll.
// # Form.Polls.Description (Optional) A brief description or instructions for the poll.
//
type UpdateSinglePoll struct {
Path struct {
ID string `json:"id" url:"id,omitempty"` // (Required)
} `json:"path"`
Form struct {
Polls struct {
Question []string `json:"question" url:"question,omitempty"` // (Required)
Description []string `json:"description" url:"description,omitempty"` // (Optional)
} `json:"polls" url:"polls,omitempty"`
} `json:"form"`
}
func (t *UpdateSinglePoll) GetMethod() string {
return "PUT"
}
func (t *UpdateSinglePoll) GetURLPath() string {
path := "polls/{id}"
path = strings.ReplaceAll(path, "{id}", fmt.Sprintf("%v", t.Path.ID))
return path
}
func (t *UpdateSinglePoll) GetQuery() (string, error) {
return "", nil
}
func (t *UpdateSinglePoll) GetBody() (url.Values, error) {
return query.Values(t.Form)
}
func (t *UpdateSinglePoll) GetJSON() ([]byte, error) {
j, err := json.Marshal(t.Form)
if err != nil {
return nil, nil
}
return j, nil
}
func (t *UpdateSinglePoll) HasErrors() error {
errs := []string{}
if t.Path.ID == "" {
errs = append(errs, "'Path.ID' is required")
}
if t.Form.Polls.Question == nil {
errs = append(errs, "'Form.Polls.Question' is required")
}
if len(errs) > 0 |
return nil
}
func (t *UpdateSinglePoll) Do(c *canvasapi.Canvas) error {
_, err := c.SendRequest(t)
if err != nil {
return err
}
return nil
}
| {
return fmt.Errorf(strings.Join(errs, ", "))
} |
generator.py | from ..apibits import *
from ..endpoints import GeneratorsEndpoint
from ..endpoints import GeneratorRowsEndpoint
class Generator(ApiResource):
@classmethod
def all(cls, params={}, headers={}):
res = cls.default_client().generators().all(params, headers)
return res
@classmethod
def retrieve(cls, generator_id, params={}, headers={}):
res = cls.default_client().generators().retrieve(generator_id, params, headers)
return res
@classmethod
def update(cls, generator_id, params={}, headers={}):
res = cls.default_client().generators().update(generator_id, params, headers)
return res
@classmethod
def create(cls, params={}, headers={}):
res = cls.default_client().generators().create(params, headers)
return res
def refresh(self, params={}, headers={}):
res = self.get_client().generators().retrieve(self.id, params, headers)
return self.refresh_from(res.json, res.api_method, res.client)
def delete(self, params={}, headers={}):
res = self.get_client().generators().delete(self.id, params, headers)
return res
def | (self):
from ..endpoints import GeneratorRowsEndpoint
return GeneratorRowsEndpoint(self.client, self)
# Everything below here is used behind the scenes.
def __init__(self, *args, **kwargs):
super(Generator, self).__init__(*args, **kwargs)
ApiResource.register_api_subclass(self, "generator")
_api_attributes = {
"columns" : {},
"created_at" : {},
"data" : {},
"description" : {},
"generator_type" : {},
"id" : {},
"name" : {},
"row_count" : {},
}
| rows |
main.py | from os import mkdir
from bottle import route, get, request, static_file, run
from settings import PORT, DIR_CACHE, DIR_GRAPH
from crypkograph import render_graph
@route('/')
@route('/index.html')
def serve_html():
return static_file('index.html', '.')
@route('/static/<filename:path>')
def serve_static(filename):
return static_file(filename, 'static')
@route('/generated/<filename:re:.*\.gv\.(png|pdf)>')
def serve_generated(filename):
ext = filename.split('.')[-1]
if ext == 'png':
return static_file(filename, DIR_GRAPH, mimetype='image/png')
elif ext == 'pdf':
return static_file(filename, DIR_GRAPH, download=filename)
# /api/render?owner_addr={owner_addr}
@get('/api/render')
def render():
owner_addr = request.query['owner_addr']
if not owner_addr:
raise Exception()
render_graph(owner_addr, subdir=DIR_GRAPH)
if __name__ == '__main__':
| try:
mkdir(DIR_CACHE)
except FileExistsError:
pass
run(host='0.0.0.0', port=PORT) |
|
io.py | """
The data-file handling functions
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from yt.utilities.io_handler import \
BaseIOHandler
import numpy as np
from yt.funcs import mylog, defaultdict
from .data_structures import chk23
float_size = {"float":np.dtype(">f4").itemsize,
"double":np.dtype(">f8").itemsize}
axis_list = ["_x","_y","_z"]
class IOHandlerAthena(BaseIOHandler):
_dataset_type = "athena"
_offset_string = 'data:offsets=0'
_data_string = 'data:datatype=0'
_read_table_offset = None
def _field_dict(self,fhandle):
keys = fhandle['field_types'].keys()
val = fhandle['field_types'].keys()
return dict(zip(keys,val))
def _read_field_names(self,grid):
pass
def _read_chunk_data(self,chunk,fields):
|
def _read_data_slice(self, grid, field, axis, coord):
sl = [slice(None), slice(None), slice(None)]
sl[axis] = slice(coord, coord + 1)
if grid.ds.field_ordering == 1:
sl.reverse()
return self._read_data_set(grid, field)[sl]
def _read_fluid_selection(self, chunks, selector, fields, size):
chunks = list(chunks)
if any((ftype != "athena" for ftype, fname in fields)):
raise NotImplementedError
rv = {}
for field in fields:
rv[field] = np.empty(size, dtype="float64")
ng = sum(len(c.objs) for c in chunks)
mylog.debug("Reading %s cells of %s fields in %s grids",
size, [f2 for f1, f2 in fields], ng)
ind = 0
for chunk in chunks:
data = self._read_chunk_data(chunk, fields)
for g in chunk.objs:
for field in fields:
ftype, fname = field
ds = data[g.id].pop(field)
nd = g.select(selector, ds, rv[field], ind) # caches
ind += nd
data.pop(g.id)
return rv
def get_read_table_offset(f):
line = f.readline()
while True:
splitup = line.strip().split()
chkc = chk23('CELL_DATA')
chkp = chk23('POINT_DATA')
if chkc in splitup or chkp in splitup:
f.readline()
read_table_offset = f.tell()
break
line = f.readline()
return read_table_offset
| data = {}
if len(chunk.objs) == 0: return data
for grid in chunk.objs:
if grid.filename is None:
continue
f = open(grid.filename, "rb")
data[grid.id] = {}
grid_dims = grid.ActiveDimensions
read_dims = grid.read_dims.astype("int64")
grid_ncells = np.prod(read_dims)
grid0_ncells = np.prod(grid.index.grids[0].read_dims)
read_table_offset = get_read_table_offset(f)
for field in fields:
ftype, offsetr, dtype = grid.index._field_map[field]
if grid_ncells != grid0_ncells:
offset = offsetr + ((grid_ncells-grid0_ncells) * (offsetr//grid0_ncells))
if grid_ncells == grid0_ncells:
offset = offsetr
offset = int(offset) # Casting to be certain.
file_offset = grid.file_offset[2]*read_dims[0]*read_dims[1]*float_size[dtype]
xread = slice(grid.file_offset[0],grid.file_offset[0]+grid_dims[0])
yread = slice(grid.file_offset[1],grid.file_offset[1]+grid_dims[1])
f.seek(read_table_offset+offset+file_offset)
if dtype == 'float':
dt = '>f4'
elif dtype == 'double':
dt = '>f8'
if ftype == 'scalar':
f.seek(read_table_offset+offset+file_offset)
v = np.fromfile(f, dtype=dt,
count=grid_ncells).reshape(read_dims,order='F')
if ftype == 'vector':
vec_offset = axis_list.index(field[-1][-2:])
f.seek(read_table_offset+offset+3*file_offset)
v = np.fromfile(f, dtype=dt, count=3*grid_ncells)
v = v[vec_offset::3].reshape(read_dims,order='F')
if grid.ds.field_ordering == 1:
data[grid.id][field] = v[xread,yread,:].T.astype("float64")
else:
data[grid.id][field] = v[xread,yread,:].astype("float64")
f.close()
return data |
vm.rs | #[macro_use]
extern crate pretty_assertions;
extern crate gluon_completion as completion;
#[macro_use]
mod support;
use crate::support::*;
use gluon::{
base::{pos::BytePos, source::Source, types::Type},
vm,
vm::{
api::{FunctionRef, Hole, OpaqueValue, ValueRef, IO},
channel::Sender,
thread::Thread,
},
Error, ThreadExt,
};
test_expr! { pass_function_value,
r"
let lazy: () -> Int = \x -> 42 in
let test: (() -> Int) -> Int = \f -> f () #Int+ 10
in test lazy
",
52i32
}
test_expr! { lambda,
r"
let y = 100 in
let f = \x -> y #Int+ x #Int+ 1
in f(22)
",
123i32
}
test_expr! { add_operator,
r"
#[infix(left, 6)]
let (+) = \x y -> x #Int+ y in 1 + 2 + 3
",
6i32
}
test_expr! { divide_int,
r" 120 #Int/ 4
",
30i32
}
test_expr! { divide_float,
r" 120.0 #Float/ 4.0
",
30.0f64
}
test_expr! { infix_propagates,
r"
#[infix(left, 6)]
let (+) = \x y -> x #Int+ y
let { (+) = (++) } = { (+) }
1 ++ 2 ++ 3
",
6i32
}
#[test]
fn record() {
let _ = ::env_logger::try_init();
let text = r"
{ x = 0, y = 1.0, z = {} }
";
let vm = make_vm();
let value = run_expr::<OpaqueValue<&Thread, Hole>>(&vm, text);
match value.get_ref() {
ValueRef::Data(data) => {
assert_eq!(data.get(0).unwrap(), ValueRef::Int(0));
assert_eq!(data.get(1).unwrap(), ValueRef::Float(1.0));
match &data.get(2).unwrap() {
ValueRef::Data(data) if data.len() == 0 => (),
_ => panic!(),
}
}
_ => panic!(),
}
}
#[test]
fn add_record() {
let _ = ::env_logger::try_init();
let text = r"
type T = { x: Int, y: Int } in
let add = \l r -> { x = l.x #Int+ r.x, y = l.y #Int+ r.y } in
add { x = 0, y = 1 } { x = 1, y = 1 }
";
let vm = make_vm();
let value = run_expr::<OpaqueValue<&Thread, Hole>>(&vm, text);
match value.get_ref() {
ValueRef::Data(data) => {
assert_eq!(data.get(0).unwrap(), ValueRef::Int(1));
assert_eq!(data.get(1).unwrap(), ValueRef::Int(2));
}
_ => panic!(),
}
}
#[test]
fn script() {
let _ = ::env_logger::try_init();
let text = r"
type T = { x: Int, y: Int } in
let add l r = { x = l.x #Int+ r.x, y = l.y #Int+ r.y } in
let sub l r = { x = l.x #Int- r.x, y = l.y #Int- r.y } in
{ T, add, sub }
";
let vm = make_vm();
load_script(&vm, "vec", text).unwrap_or_else(|err| panic!("{}", err));
let script = r#"
let { T, add, sub } = import! vec
in add { x = 10, y = 5 } { x = 1, y = 2 }
"#;
let value = run_expr::<OpaqueValue<&Thread, Hole>>(&vm, script);
match value.get_ref() {
ValueRef::Data(data) => {
assert_eq!(data.get(0), Some(ValueRef::Int(11)));
assert_eq!(data.get(1), Some(ValueRef::Int(7)));
}
_ => panic!(),
}
}
#[test]
fn adt() {
let _ = ::env_logger::try_init();
let text = r"
type Option a = | None | Some a
in Some 1
";
let vm = make_vm();
let value = run_expr::<OpaqueValue<&Thread, Hole>>(&vm, text);
match value.get_ref() {
ValueRef::Data(ref data) if data.tag() == 1 && data.get(0) == Some(ValueRef::Int(1)) => (),
_ => panic!("{:?}", value),
}
}
test_expr! { recursive_function,
r"
rec let fib x =
if x #Int< 3
then 1
else fib (x #Int- 1) #Int+ fib (x #Int- 2)
in fib 7
",
13i32
}
test_expr! { mutually_recursive_function,
r"
rec
let f x = if x #Int< 0
then x
else g x
let g x = f (x #Int- 1)
in g 3
",
-1
}
test_expr! { no_capture_self_function,
r"
let x = 2 in
let f y = x
in f 4
",
2i32
}
test_expr! { primitive_char_eq,
r"
'a' #Char== 'a'
",
true
}
test_expr! { primitive_char_lt,
r"
'a' #Char< 'a'
",
false
}
test_expr! { primitive_byte_arithmetic,
r"
let x = 100b #Byte+ 13b
x #Byte* 2b #Byte/ 3b
",
75u8
}
test_expr! { primitive_byte_eq,
r"
100b #Byte== 100b
",
true
}
test_expr! { primitive_byte_lt,
r"
100b #Byte< 100b
",
false
}
test_expr! { prelude overloaded_compare_int,
r"
99 < 100
",
true
}
test_expr! { prelude overloaded_compare_float,
r"
99.0 < 100.0
",
true
}
test_expr! { implicit_call_without_type_in_scope,
r"
let int @ { ? } = import! std.int
let prelude @ { (==) } = import! std.prelude
99 == 100
",
false
}
test_expr! { partial_application1,
r"
let f x y = x #Int+ y in
let g = f 10
in g 2 #Int+ g 3
",
25i32
}
test_expr! { partial_application2,
r"
let f x y z = x #Int+ y #Int+ z in
let g = f 10 in
let h = g 20
in h 2 #Int+ g 10 3
",
55i32
}
test_expr! { to_many_args_application,
r"
let f x = \y -> x #Int+ y in
let g = f 20
in f 10 2 #Int+ g 3
",
35i32
}
test_expr! { to_many_args_partial_application_twice,
r"
let f x = \y z -> x #Int+ y #Int+ z in
let g = f 20 5
in f 10 2 1 #Int+ g 2
",
40i32
}
test_expr! { excess_arguments_larger_than_stack,
r#"
let f a b c = c
(\x -> f) 1 2 3 4
"#,
4i32
}
test_expr! { char,
r#"
'a'
"#,
'a'
}
test_expr! { prelude handle_fields_being_ignored_in_optimize,
r#"
let large_record = { x = 1, y = 2 }
large_record.x
"#,
1
}
test_expr! { any zero_argument_variant_is_int,
r#"
type Test = | A Int | B
B
"#,
ValueRef::tag(1)
}
test_expr! { any marshalled_option_none_is_int,
r#"
let string_prim = import! std.string.prim
string_prim.find "a" "b"
"#,
ValueRef::tag(0)
}
test_expr! { any marshalled_ordering_is_int,
r#"
let { string_compare } = import! std.prim
string_compare "a" "b"
"#,
ValueRef::tag(0)
}
test_expr! { discriminant_value,
r#"
type Variant a = | A | B Int | C String
let prim = import! std.prim
prim.discriminant_value (C "")
"#,
2
}
test_expr! { unit_expr,
r#"
let x = ()
let y = 1
in y
"#,
1i32
}
test_expr! { return_unit,
"()",
()
}
test_expr! { let_not_in_tail_position,
r#"
1 #Int+ (let x = 2 in x)
"#,
3i32
}
test_expr! { field_access_not_in_tail_position,
r#"
let id x = x
in (id { x = 1 }).x
"#,
1i32
}
test_expr! { module_function,
r#"
let string_prim = import! std.string.prim
let x = string_prim.len "test" in x
"#,
4i32
}
test_expr! { prelude true_branch_not_affected_by_false_branch,
r#"
let { Bool } = import! std.bool
if True then
let x = 1
x
else
0
"#,
1i32
}
test_expr! { prelude and_operator_stack,
r#"
let { Bool } = import! std.bool
let b = True && True
let b2 = False
b
"#,
true
}
test_expr! { prelude or_operator_stack,
r#"
let { Bool } = import! std.bool
let b = False || True
let b2 = False
b
"#,
true
}
test_expr! { overload_resolution_with_record_pattern,
r#"
let f =
\x g ->
let { x } = g x
x
f 0 (\r -> { x = r #Int+ 1 })
"#,
1i32
}
test_expr! { record_base_duplicate_fields,
r#"
{ x = "" .. { x = 1 } }.x
"#,
"".to_string()
}
test_expr! { record_base_duplicate_fields2,
r#"
{ x = "" .. { x = 1, y = 2 } }.y
"#,
2
}
test_expr! { record_base_duplicate_fields_different_order,
r#"
{ z = 3.0, y = "y", x = "x" .. { x = 1, y = 2 } }.x
"#,
String::from("x")
}
test_expr! { load_simple,
r#"
let _ = import! std.foldable
()
"#,
()
}
test_expr! { load_option,
r#"
let _ = import! std.option
()
"#,
()
}
test_expr! { load_applicative,
r#"
let _ = import! std.applicative
()
"#,
()
}
test_expr! { prelude do_expression_option_some,
r#"
let { monad = { flat_map } } = import! std.option
do x = Some 1
Some (x + 2)
"#,
Some(3)
}
test_expr! { prelude do_expression_option_none,
r#"
let { monad = { flat_map } } = import! std.option
do x = None
Some 1
"#,
None::<i32>
}
test_expr! { function_with_implicit_argument_from_record,
r#"
let f ?t x: [Int] -> () -> Int = t
let x @ { ? } =
#[implicit]
let test = 1
{ test }
f ()
"#,
1
}
test_expr! { prelude not_equal_operator,
r#"
1 /= 2
"#,
true
}
test_expr! { implicit_argument_selection1,
r#"
#[implicit]
type Test = | Test ()
let f y: [a] -> a -> () = ()
let i = Test ()
f (Test ())
"#,
()
}
test_expr! { prelude implicit_argument_selection2,
r#"
let string = import! std.string
let { append } = string.semigroup
#[infix(left, 6)]
let (++) = append
let equality l r : [Eq a] -> a -> a -> String =
if l == r then " == " else " != "
let cmp l r : [Show a] -> [Eq a] -> a -> a -> String =
(show l) ++ (equality l r) ++ (show r)
cmp 5 6
"#,
String::from("5 != 6")
}
#[test]
fn rename_types_after_binding() {
let _ = ::env_logger::try_init();
let text = r#"
let list = import! std.list
let { List } = list
let eq_list: Eq (List Int) = list.eq
in Cons 1 Nil == Nil
"#;
let vm = make_vm();
let (result, _) = vm
.run_expr::<bool>("<top>", text)
.unwrap_or_else(|err| panic!("{}", err));
let expected = false;
assert_eq!(result, expected);
}
#[test]
fn record_splat_ice() {
let _ = ::env_logger::try_init();
let text = r#"
let large_record = { x = 1 }
{
field = 123,
..
large_record
}
"#;
let vm = make_vm();
vm.get_database_mut().implicit_prelude(false);
let result = vm.run_expr::<OpaqueValue<&Thread, Hole>>("example", text);
assert!(result.is_ok(), "{}", result.unwrap_err());
}
#[test]
fn test_implicit_prelude() {
let _ = ::env_logger::try_init();
let text = r#"1.0 + 3.0 - 2.0"#;
let vm = make_vm();
vm.run_expr::<OpaqueValue<&Thread, Hole>>("<top>", text)
.unwrap_or_else(|err| panic!("{}", err));
}
#[test]
fn access_field_through_vm() {
let _ = ::env_logger::try_init();
let text = r#" { x = 0, inner = { y = 1.0 } } "#;
let vm = make_vm();
load_script(&vm, "test", text).unwrap_or_else(|err| panic!("{}", err));
let test_x = vm.get_global("test.x");
assert_eq!(test_x, Ok(0));
let test_inner_y = vm.get_global("test.inner.y");
assert_eq!(test_inner_y, Ok(1.0));
}
#[test]
fn access_operator_without_parentheses() {
let _ = ::env_logger::try_init();
let vm = make_vm();
vm.run_expr::<OpaqueValue<&Thread, Hole>>("example", r#" import! std.prelude "#)
.unwrap();
let result: Result<FunctionRef<fn(i32, i32) -> i32>, _> =
vm.get_global("std.prelude.num_Int.+");
assert!(result.is_err());
}
#[test]
fn get_binding_with_alias_type() {
let _ = ::env_logger::try_init();
let text = r#"
type Test = Int
let x: Test = 0
{ Test, x }
"#;
let vm = make_vm();
load_script(&vm, "test", text).unwrap_or_else(|err| panic!("{}", err));
let test_x = vm.get_global("test.x");
assert_eq!(test_x, Ok(0));
}
#[test]
fn get_binding_with_generic_params() {
let _ = ::env_logger::try_init();
let vm = make_vm();
run_expr::<OpaqueValue<&Thread, Hole>>(&vm, r#" import! std.function "#);
let mut id: FunctionRef<fn(String) -> String> = vm
.get_global("std.function.id")
.unwrap_or_else(|err| panic!("{}", err));
assert_eq!(id.call("test".to_string()), Ok("test".to_string()));
}
#[test]
fn test_prelude() {
let _ = ::env_logger::try_init();
let vm = make_vm();
run_expr::<OpaqueValue<&Thread, Hole>>(&vm, r#" import! std.prelude "#);
}
#[test]
fn access_types_by_path() {
let _ = ::env_logger::try_init();
let vm = make_vm();
run_expr::<OpaqueValue<&Thread, Hole>>(&vm, r#" import! std.option "#);
run_expr::<OpaqueValue<&Thread, Hole>>(&vm, r#" import! std.result "#);
assert!(vm.find_type_info("std.option.Option").is_ok());
assert!(vm.find_type_info("std.result.Result").is_ok());
let text = r#" type T a = | T a in { x = 0, inner = { T, y = 1.0 } } "#;
load_script(&vm, "test", text).unwrap_or_else(|err| panic!("{}", err));
let result = vm.find_type_info("test.inner.T");
assert!(result.is_ok(), "{}", result.unwrap_err());
}
#[test]
fn opaque_value_type_mismatch() {
let _ = ::env_logger::try_init();
let vm = make_vm();
vm.get_database_mut().implicit_prelude(false);
vm.run_expr::<()>("<top>", "let _ = import! std.channel in ()")
.unwrap();
let expr = r#"
let { sender, receiver } = channel 0
send sender 1
sender
"#;
let result = vm.run_expr::<OpaqueValue<&Thread, Sender<f64>>>("<top>", expr);
match result {
Err(Error::Typecheck(..)) => (),
Err(err) => panic!("Unexpected error `{}`", err),
Ok(_) => panic!("Expected an error"),
}
}
#[test]
fn invalid_string_slice_dont_panic() {
let _ = ::env_logger::try_init();
let text = r#"
let string = import! std.string
let s = "åäö"
string.slice s 1 (string.len s)
"#;
let vm = make_vm();
let result = vm.run_expr::<String>("<top>", text);
match result {
Err(Error::VM(..)) => (),
Err(err) => panic!("Unexpected error `{}`", err),
Ok(_) => panic!("Expected an error"),
}
}
#[test]
fn arithmetic_over_flow_dont_panic() {
let _ = ::env_logger::try_init();
let text = r#"
let int = import! std.int
int.max_value * 2
"#;
let vm = make_vm();
let result = vm.run_expr::<i32>("<top>", text);
match result {
Err(Error::VM(vm::Error::Message(ref err))) if err.contains("overflow") => (),
Err(err) => panic!("Unexpected error `{}`", err),
Ok(_) => panic!("Expected an error"),
}
}
#[test]
fn partially_applied_constructor_is_lambda() {
let _ = ::env_logger::try_init();
let vm = make_vm();
let result = vm.run_expr::<FunctionRef<fn(i32) -> Option<i32>>>(
"test",
r#"let { Option } = import! std.option in Some"#,
);
assert!(result.is_ok(), "{}", result.err().unwrap());
assert_eq!(result.unwrap().0.call(123), Ok(Some(123)));
}
#[test]
fn stacktrace() {
use gluon::vm::stack::StacktraceFrame;
let _ = ::env_logger::try_init();
let text = r#"
let end _ = 1 + error "test"
rec
let f x =
if x == 0 then
3 + end ()
else
1 + g (x - 1)
let g x = 1 + f (x / 2)
in
g 10
"#;
let vm = make_vm();
vm.get_database_mut().set_optimize(false);
let result = vm.run_expr::<i32>("<top>", text);
match result {
Err(Error::VM(vm::Error::Panic(_, Some(stacktrace)))) => {
let g = stacktrace.frames[0].as_ref().unwrap().name.clone();
assert_eq!(g.declared_name(), "g");
let f = stacktrace.frames[1].as_ref().unwrap().name.clone();
let end = stacktrace.frames[6].as_ref().unwrap().name.clone();
let error = stacktrace.frames[7].as_ref().unwrap().name.clone();
assert_eq!(
stacktrace.frames,
vec![
// Removed due to being a tail call
// Some(StacktraceFrame { name: f.clone(), line: 9 }),
Some(StacktraceFrame {
name: g.clone(),
line: Some(8.into()),
}),
Some(StacktraceFrame {
name: f.clone(),
line: Some(7.into()),
}),
Some(StacktraceFrame {
name: g.clone(),
line: Some(8.into()),
}),
Some(StacktraceFrame {
name: f.clone(),
line: Some(7.into()),
}),
Some(StacktraceFrame {
name: g.clone(),
line: Some(8.into()),
}),
Some(StacktraceFrame {
name: f.clone(),
line: Some(5.into()),
}),
Some(StacktraceFrame {
name: end.clone(),
line: Some(1.into()),
}),
Some(StacktraceFrame {
name: error.clone(),
line: None,
}),
]
);
}
Err(err) => panic!("Unexpected error `{}`", err),
Ok(_) => panic!("Expected an error"),
}
}
#[tokio::test]
async fn completion_with_prelude() {
let _ = ::env_logger::try_init();
let vm = make_vm_async().await;
let source = r#"
let prelude = import! std.prelude
let { Option } = import! std.option
let { Num } = prelude
let { Lazy, lazy } = import! std.lazy
rec
type Stream_ a =
| Value a (Stream a)
| Empty
type Stream a = Lazy (Stream_ a)
in
let from f : (Int -> Option a) -> Stream a =
rec let from_ i =
lazy (\_ ->
match f i with
| Some x -> Value x (from_ (i + 1))
| None -> Empty
)
in from_ 0
{ from }
"#;
let (expr, _) = vm
.typecheck_str_async("example", source, None)
.await
.unwrap_or_else(|err| panic!("{}", err));
let lines = vm.get_database().get_filemap("example").expect("file_map");
let result = completion::find(
&vm.get_env(),
lines.span(),
&expr.expr(),
lines.byte_index(16.into(), 29.into()).unwrap(),
)
.map(|either| either.right().unwrap());
assert_eq!(result, Ok(Type::int()));
}
#[tokio::test]
async fn completion_with_prelude_at_0() {
let _ = ::env_logger::try_init();
let vm = make_vm_async().await;
let expr = "1";
let (expr, _) = vm
.typecheck_str_async("example", expr, None)
.await
.unwrap_or_else(|err| panic!("{}", err));
let file_map = vm.get_database().get_filemap("example").expect("file_map");
let result = completion::find(
&vm.get_env(),
file_map.span(),
&expr.expr(),
BytePos::from(0),
)
.map(|either| either.right().unwrap());
assert_eq!(result, Ok(Type::int()));
}
#[tokio::test]
async fn suggestion_from_implicit_prelude() {
let _ = ::env_logger::try_init();
let vm = make_vm_async().await;
let expr = "1 ";
let (expr, _) = vm
.typecheck_str_async("example", expr, None)
.await
.unwrap_or_else(|err| panic!("{}", err));
let lines = vm.get_database().get_filemap("example").expect("file_map");
let result = completion::suggest(
&vm.get_env(),
lines.span(),
&expr.expr(),
lines.byte_index(0.into(), 2.into()).unwrap(),
);
assert!(!result.is_empty());
}
/// Would cause panics in `Source` as the spans from the implicit prelude were used with the
/// `Source` from the normal expression
#[tokio::test]
async fn dont_use_the_implicit_prelude_span_in_the_top_expr() {
let _ = ::env_logger::try_init();
let vm = make_vm_async().await;
let expr = "1";
vm.typecheck_str_async("example", expr, Some(&Type::float()))
.await
.unwrap_err();
}
#[test]
fn deep_clone_partial_application() {
let _ = ::env_logger::try_init();
let vm = gluon::VmBuilder::new().build();
let child = vm.new_thread().unwrap();
assert_eq!(child.allocated_memory(), 0);
child.get_database_mut().set_implicit_prelude(false);
let result = child.run_expr::<OpaqueValue<&Thread, Hole>>(
"test",
r#"
let f x y = y
f 1
"#,
);
assert!(result.is_ok(), "{}", result.err().unwrap());
let global_memory_without_closures = vm.global_env().gc.lock().unwrap().allocated_memory();
let memory_for_closures = child.allocated_memory();
vm.get_database_mut().set_global(
"test",
Type::hole(),
Default::default(),
&result.unwrap().0.into_inner(),
);
let global_memory_with_closures = vm.global_env().gc.lock().unwrap().allocated_memory();
assert_eq!(
global_memory_without_closures + memory_for_closures,
global_memory_with_closures
);
}
test_expr! { prelude issue_601,
r"
let { wrap } = import! std.applicative
let { flat_map } = import! std.monad
type Id a = a
let id_functor: Functor Id = {
map = \f x -> f x,
}
let id_applicative: Applicative Id = {
functor = id_functor,
apply = \f x -> f x,
wrap = \x -> x,
}
let id_monad: Monad Id = {
applicative = id_applicative,
flat_map = \f x -> f x,
}
let foo: [Functor f] -> Id () = ()
let bar: Id () =
do _ = foo
wrap ()
in ()
",
()
}
test_expr! { recursive_record,
r#"
rec
let x = { y }
let y = { z = 2 }
x.y.z
"#,
2
}
test_expr! { recursive_variant,
r#"
type List a = | Nil | Cons a (List a)
rec let ones = Cons 1 ones
in
match ones with
| Cons x xs -> x
| Nil -> 2
"#,
1
}
test_expr! { recursive_implicit,
r#"
rec
type Test = | Test Test2 | Nil
type Test2 = | Test2 Test
in
#[implicit]
type Size a = { size : a -> Int }
let size ?s : [Size a] -> a -> Int = s.size
rec
let size_test : Size Test =
let size_ x =
match x with
| Test t -> 1 #Int+ size t
| Nil -> 0
{ size = size_ }
let size_test2 : Size Test2 =
let size_ x =
match x with
| Test2 t -> 1 #Int+ size t
{ size = size_ }
in
size (Test (Test2 (Test (Test2 Nil))))
"#,
4
}
test_expr! { prelude thread_join,
r#"
let thread = import! std.thread
let io @ { ? } = import! std.io
let { wrap, (*>) } = import! std.applicative
let { (>>=) } = import! std.monad
let { id } = import! std.function
do t = thread.new_thread ()
thread.join (io.println "test" *> wrap 123) (thread.spawn_on t (\_ -> wrap "abc") >>= id)
"#,
IO::Value((123, "abc".to_string()))
}
test_expr! { category_bug,
r"
let { Category } = import! std.category
let category : Category (->) = {
id = \x -> x,
compose = \f g x -> f (g x),
}
1
",
1i32
}
test_expr! { load_io_skolem_bug,
r"
let io_prim @ { IO } = import! std.io.prim
type Monad (m : Type -> Type) = {
flat_map : forall a b . (a -> m b) -> m a -> m b
}
let monad : Monad IO = {
flat_map = io_prim.flat_map,
}
1
", | }
test_expr! { lift_effect_skolem_bug,
r"
let _ = import! std.effect.lift
1
",
1i32
}
test_expr! { use_bool,
r"
let { Bool } = import! std.types
True
",
true
}
test_expr! {
recursive_eff_arr,
r#"
rec
type Eff (r : Type -> Type) a =
| Pure a
| Impure : forall x . Arr r x a -> Eff r a
type Arr r a b = a -> Eff r b
in
type Writer r a = .. r
let tell : Eff [| writer : Writer | r |] () =
Impure Pure
()
"#,
()
}
test_expr! { issue_863,
r"
let { (<|) } = import! std.function
let g f x = x
let f a =
g a <| f a
{ f }
"
} | 1i32 |
webserver.py | from asyncio import AbstractEventLoop, Task, get_event_loop
from dataclasses import asdict
from datetime import datetime
from functools import wraps | from typing import Callable, Optional, Tuple
from quart import Quart, request
from werkzeug.exceptions import HTTPException
from .config import config
from .logger import get_logger, set_quart_logger_formatter
from .postgres import POSTGRES_HEALTHCHECK_TASK_NAME, postgres_healthcheck
from .state import state
from .streamer import STREAMING_TASK_NAME, stream
logger = get_logger(__name__)
app = Quart(__name__)
set_quart_logger_formatter()
def response(func: Callable) -> Callable:
@wraps(func)
async def inner(*args, **kwargs) -> dict:
extra: Optional[dict] = await func(*args, **kwargs)
if not extra:
extra = {"success": True}
return {
**asdict(state),
**{
"server_time": datetime.now(),
"path": request.path,
"method": request.method,
"status": "200 OK",
"status_code": 200,
},
**extra,
}
return inner
def error(code: int, status: str) -> Callable:
def wrapper(func: Callable) -> Callable:
@wraps(func)
async def inner(*args, **kwargs) -> Tuple[dict, int]:
extra: Optional[dict] = await func(*args, **kwargs)
if not extra:
extra = {}
return (
{
**{
"server_time": datetime.now(),
"success": False,
"path": request.path,
"method": request.method,
"status": f"{code} {status}",
"status_code": code,
},
**extra,
},
code,
)
return inner
return wrapper
@app.route("/healthz", methods=["GET"])
async def healthz() -> Tuple[str, int]:
return "", 200
@app.route("/status", methods=["GET"])
@response
async def status() -> None:
pass
@app.route("/start", methods=["POST"])
@response
async def start() -> dict:
logger.info("starting")
if state.streaming:
return {"success": True, "message": "Currently streaming"}
if not state.postgres:
return {"success": False, "message": "Postgres not available"}
loop: AbstractEventLoop = get_event_loop()
loop.create_task(stream(), name=STREAMING_TASK_NAME)
state.streaming = True
return {"success": True, "message": "Started streaming"}
@app.route("/stop", methods=["POST"])
@response
async def stop() -> dict:
logger.info("stopping")
if not state.streaming:
return {"success": True, "message": "Not currently streaming"}
for task in Task.all_tasks():
if task.get_name() == STREAMING_TASK_NAME:
task.cancel()
break
state.streaming = False
return {"success": True, "message": "Stopped streaming"}
@app.errorhandler(404)
@error(404, "Not Found")
async def page_not_found(e: HTTPException) -> None:
pass
@app.errorhandler(405)
@error(405, "Method Not Allowed")
async def method_not_allowed(e: HTTPException) -> None:
pass
@app.before_serving
async def startup() -> None:
loop: AbstractEventLoop = get_event_loop()
loop.create_task(postgres_healthcheck(), name=POSTGRES_HEALTHCHECK_TASK_NAME)
def webserver() -> None:
app.run(host=config["webserver"]["host"], port=config["webserver"]["port"]) | |
test_gross_price_component_dto.py | # coding: utf-8
"""
AVACloud API 1.17.3
AVACloud API specification # noqa: E501
OpenAPI spec version: 1.17.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import avacloud_client_python
from avacloud_client_python.models.gross_price_component_dto import GrossPriceComponentDto # noqa: E501
from avacloud_client_python.rest import ApiException
class TestGrossPriceComponentDto(unittest.TestCase):
"""GrossPriceComponentDto unit test stubs"""
def setUp(self):
pass
def | (self):
pass
def testGrossPriceComponentDto(self):
"""Test GrossPriceComponentDto"""
# FIXME: construct object with mandatory attributes with example values
# model = avacloud_client_python.models.gross_price_component_dto.GrossPriceComponentDto() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| tearDown |
authorization.go | // Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package utils
import (
"github.com/zgordan-vv/zacmm-server/model"
)
func | (roles map[string]*model.Role, cfg *model.Config, isLicensed bool) map[string]*model.Role {
if isLicensed {
switch *cfg.TeamSettings.DEPRECATED_DO_NOT_USE_RestrictPublicChannelCreation {
case model.PERMISSIONS_ALL:
roles[model.TEAM_USER_ROLE_ID].Permissions = append(
roles[model.TEAM_USER_ROLE_ID].Permissions,
model.PERMISSION_CREATE_PUBLIC_CHANNEL.Id,
)
case model.PERMISSIONS_TEAM_ADMIN:
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_CREATE_PUBLIC_CHANNEL.Id,
)
}
} else {
roles[model.TEAM_USER_ROLE_ID].Permissions = append(
roles[model.TEAM_USER_ROLE_ID].Permissions,
model.PERMISSION_CREATE_PUBLIC_CHANNEL.Id,
)
}
if isLicensed {
switch *cfg.TeamSettings.DEPRECATED_DO_NOT_USE_RestrictPublicChannelManagement {
case model.PERMISSIONS_ALL:
roles[model.CHANNEL_USER_ROLE_ID].Permissions = append(
roles[model.CHANNEL_USER_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PUBLIC_CHANNEL_PROPERTIES.Id,
)
case model.PERMISSIONS_CHANNEL_ADMIN:
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PUBLIC_CHANNEL_PROPERTIES.Id,
)
roles[model.CHANNEL_ADMIN_ROLE_ID].Permissions = append(
roles[model.CHANNEL_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PUBLIC_CHANNEL_PROPERTIES.Id,
)
case model.PERMISSIONS_TEAM_ADMIN:
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PUBLIC_CHANNEL_PROPERTIES.Id,
)
}
} else {
roles[model.CHANNEL_USER_ROLE_ID].Permissions = append(
roles[model.CHANNEL_USER_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PUBLIC_CHANNEL_PROPERTIES.Id,
)
}
if isLicensed {
switch *cfg.TeamSettings.DEPRECATED_DO_NOT_USE_RestrictPublicChannelDeletion {
case model.PERMISSIONS_ALL:
roles[model.CHANNEL_USER_ROLE_ID].Permissions = append(
roles[model.CHANNEL_USER_ROLE_ID].Permissions,
model.PERMISSION_DELETE_PUBLIC_CHANNEL.Id,
)
case model.PERMISSIONS_CHANNEL_ADMIN:
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_DELETE_PUBLIC_CHANNEL.Id,
)
roles[model.CHANNEL_ADMIN_ROLE_ID].Permissions = append(
roles[model.CHANNEL_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_DELETE_PUBLIC_CHANNEL.Id,
)
case model.PERMISSIONS_TEAM_ADMIN:
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_DELETE_PUBLIC_CHANNEL.Id,
)
}
} else {
roles[model.CHANNEL_USER_ROLE_ID].Permissions = append(
roles[model.CHANNEL_USER_ROLE_ID].Permissions,
model.PERMISSION_DELETE_PUBLIC_CHANNEL.Id,
)
}
if isLicensed {
switch *cfg.TeamSettings.DEPRECATED_DO_NOT_USE_RestrictPrivateChannelCreation {
case model.PERMISSIONS_ALL:
roles[model.TEAM_USER_ROLE_ID].Permissions = append(
roles[model.TEAM_USER_ROLE_ID].Permissions,
model.PERMISSION_CREATE_PRIVATE_CHANNEL.Id,
)
case model.PERMISSIONS_TEAM_ADMIN:
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_CREATE_PRIVATE_CHANNEL.Id,
)
}
} else {
roles[model.TEAM_USER_ROLE_ID].Permissions = append(
roles[model.TEAM_USER_ROLE_ID].Permissions,
model.PERMISSION_CREATE_PRIVATE_CHANNEL.Id,
)
}
if isLicensed {
switch *cfg.TeamSettings.DEPRECATED_DO_NOT_USE_RestrictPrivateChannelManagement {
case model.PERMISSIONS_ALL:
roles[model.CHANNEL_USER_ROLE_ID].Permissions = append(
roles[model.CHANNEL_USER_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PRIVATE_CHANNEL_PROPERTIES.Id,
)
case model.PERMISSIONS_CHANNEL_ADMIN:
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PRIVATE_CHANNEL_PROPERTIES.Id,
)
roles[model.CHANNEL_ADMIN_ROLE_ID].Permissions = append(
roles[model.CHANNEL_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PRIVATE_CHANNEL_PROPERTIES.Id,
)
case model.PERMISSIONS_TEAM_ADMIN:
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PRIVATE_CHANNEL_PROPERTIES.Id,
)
}
} else {
roles[model.CHANNEL_USER_ROLE_ID].Permissions = append(
roles[model.CHANNEL_USER_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PRIVATE_CHANNEL_PROPERTIES.Id,
)
}
if isLicensed {
switch *cfg.TeamSettings.DEPRECATED_DO_NOT_USE_RestrictPrivateChannelDeletion {
case model.PERMISSIONS_ALL:
roles[model.CHANNEL_USER_ROLE_ID].Permissions = append(
roles[model.CHANNEL_USER_ROLE_ID].Permissions,
model.PERMISSION_DELETE_PRIVATE_CHANNEL.Id,
)
case model.PERMISSIONS_CHANNEL_ADMIN:
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_DELETE_PRIVATE_CHANNEL.Id,
)
roles[model.CHANNEL_ADMIN_ROLE_ID].Permissions = append(
roles[model.CHANNEL_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_DELETE_PRIVATE_CHANNEL.Id,
)
case model.PERMISSIONS_TEAM_ADMIN:
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_DELETE_PRIVATE_CHANNEL.Id,
)
}
} else {
roles[model.CHANNEL_USER_ROLE_ID].Permissions = append(
roles[model.CHANNEL_USER_ROLE_ID].Permissions,
model.PERMISSION_DELETE_PRIVATE_CHANNEL.Id,
)
}
// Restrict permissions for Private Channel Manage Members
if isLicensed {
switch *cfg.TeamSettings.DEPRECATED_DO_NOT_USE_RestrictPrivateChannelManageMembers {
case model.PERMISSIONS_ALL:
roles[model.CHANNEL_USER_ROLE_ID].Permissions = append(
roles[model.CHANNEL_USER_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PRIVATE_CHANNEL_MEMBERS.Id,
)
case model.PERMISSIONS_CHANNEL_ADMIN:
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PRIVATE_CHANNEL_MEMBERS.Id,
)
roles[model.CHANNEL_ADMIN_ROLE_ID].Permissions = append(
roles[model.CHANNEL_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PRIVATE_CHANNEL_MEMBERS.Id,
)
case model.PERMISSIONS_TEAM_ADMIN:
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PRIVATE_CHANNEL_MEMBERS.Id,
)
}
} else {
roles[model.CHANNEL_USER_ROLE_ID].Permissions = append(
roles[model.CHANNEL_USER_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_PRIVATE_CHANNEL_MEMBERS.Id,
)
}
if !*cfg.ServiceSettings.DEPRECATED_DO_NOT_USE_EnableOnlyAdminIntegrations {
roles[model.TEAM_USER_ROLE_ID].Permissions = append(
roles[model.TEAM_USER_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_INCOMING_WEBHOOKS.Id,
model.PERMISSION_MANAGE_OUTGOING_WEBHOOKS.Id,
model.PERMISSION_MANAGE_SLASH_COMMANDS.Id,
)
roles[model.SYSTEM_USER_ROLE_ID].Permissions = append(
roles[model.SYSTEM_USER_ROLE_ID].Permissions,
model.PERMISSION_MANAGE_OAUTH.Id,
)
}
// Grant permissions for inviting and adding users to a team.
if isLicensed {
if *cfg.TeamSettings.DEPRECATED_DO_NOT_USE_RestrictTeamInvite == model.PERMISSIONS_TEAM_ADMIN {
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_INVITE_USER.Id,
model.PERMISSION_ADD_USER_TO_TEAM.Id,
)
} else if *cfg.TeamSettings.DEPRECATED_DO_NOT_USE_RestrictTeamInvite == model.PERMISSIONS_ALL {
roles[model.TEAM_USER_ROLE_ID].Permissions = append(
roles[model.TEAM_USER_ROLE_ID].Permissions,
model.PERMISSION_INVITE_USER.Id,
model.PERMISSION_ADD_USER_TO_TEAM.Id,
)
}
} else {
roles[model.TEAM_USER_ROLE_ID].Permissions = append(
roles[model.TEAM_USER_ROLE_ID].Permissions,
model.PERMISSION_INVITE_USER.Id,
model.PERMISSION_ADD_USER_TO_TEAM.Id,
)
}
if isLicensed {
switch *cfg.ServiceSettings.DEPRECATED_DO_NOT_USE_RestrictPostDelete {
case model.PERMISSIONS_DELETE_POST_ALL:
roles[model.CHANNEL_USER_ROLE_ID].Permissions = append(
roles[model.CHANNEL_USER_ROLE_ID].Permissions,
model.PERMISSION_DELETE_POST.Id,
)
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_DELETE_POST.Id,
model.PERMISSION_DELETE_OTHERS_POSTS.Id,
)
case model.PERMISSIONS_DELETE_POST_TEAM_ADMIN:
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_DELETE_POST.Id,
model.PERMISSION_DELETE_OTHERS_POSTS.Id,
)
}
} else {
roles[model.CHANNEL_USER_ROLE_ID].Permissions = append(
roles[model.CHANNEL_USER_ROLE_ID].Permissions,
model.PERMISSION_DELETE_POST.Id,
)
roles[model.TEAM_ADMIN_ROLE_ID].Permissions = append(
roles[model.TEAM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_DELETE_POST.Id,
model.PERMISSION_DELETE_OTHERS_POSTS.Id,
)
}
if *cfg.TeamSettings.DEPRECATED_DO_NOT_USE_EnableTeamCreation {
roles[model.SYSTEM_USER_ROLE_ID].Permissions = append(
roles[model.SYSTEM_USER_ROLE_ID].Permissions,
model.PERMISSION_CREATE_TEAM.Id,
)
}
if isLicensed {
switch *cfg.ServiceSettings.DEPRECATED_DO_NOT_USE_AllowEditPost {
case model.ALLOW_EDIT_POST_ALWAYS, model.ALLOW_EDIT_POST_TIME_LIMIT:
roles[model.CHANNEL_USER_ROLE_ID].Permissions = append(
roles[model.CHANNEL_USER_ROLE_ID].Permissions,
model.PERMISSION_EDIT_POST.Id,
)
roles[model.SYSTEM_ADMIN_ROLE_ID].Permissions = append(
roles[model.SYSTEM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_EDIT_POST.Id,
)
}
} else {
roles[model.CHANNEL_USER_ROLE_ID].Permissions = append(
roles[model.CHANNEL_USER_ROLE_ID].Permissions,
model.PERMISSION_EDIT_POST.Id,
)
roles[model.SYSTEM_ADMIN_ROLE_ID].Permissions = append(
roles[model.SYSTEM_ADMIN_ROLE_ID].Permissions,
model.PERMISSION_EDIT_POST.Id,
)
}
return roles
}
| SetRolePermissionsFromConfig |
crashinterceptor.go | package serverinterceptors
import (
"context"
"runtime/debug"
"gitlab.deepwisdomai.com/infra/go-zero/core/logx"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
// StreamCrashInterceptor catches panics in processing stream requests and recovers.
func StreamCrashInterceptor(srv interface{}, stream grpc.ServerStream, info *grpc.StreamServerInfo,
handler grpc.StreamHandler) (err error) {
defer handleCrash(func(r interface{}) {
err = toPanicError(r)
})
return handler(srv, stream)
}
// UnaryCrashInterceptor catches panics in processing unary requests and recovers.
func UnaryCrashInterceptor() grpc.UnaryServerInterceptor {
return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo,
handler grpc.UnaryHandler) (resp interface{}, err error) {
defer handleCrash(func(r interface{}) {
err = toPanicError(r)
})
return handler(ctx, req)
}
}
func | (handler func(interface{})) {
if r := recover(); r != nil {
handler(r)
}
}
func toPanicError(r interface{}) error {
logx.Errorf("%+v %s", r, debug.Stack())
return status.Errorf(codes.Internal, "panic: %v", r)
}
| handleCrash |
geom.rs | use crate::region::Region;
use inari::{interval, Interval};
/// A one-dimensional geometric region that represents a line segment.
///
/// Conceptually, it is a pair of two [`Interval`]s `inner` and `outer`
/// that satisfy `inner ⊆ outer`. `inner` can be empty, while `outer` cannot.
#[derive(Clone, Debug)]
pub struct Box1D {
l: Interval,
r: Interval,
}
impl Box1D {
/// Creates a new [`Box1D`] with the given bounds.
pub fn new(l: Interval, r: Interval) -> Self {
assert!(l.inf() <= r.sup());
Self { l, r }
}
/// Returns the inner region.
pub fn inner(&self) -> Interval {
let l = self.l.sup();
let r = self.r.inf();
if l <= r {
interval!(l, r).unwrap()
} else {
Interval::EMPTY
}
}
/// Returns the left bound of the region.
pub fn left(&self) -> Interval {
self.l
}
/// Returns the outer region.
pub fn outer(&self) -> Interval {
interval!(self.l.inf(), self.r.sup()).unwrap()
}
/// Returns the right bound of the region.
pub fn right(&self) -> Interval {
self.r
}
}
/// A two-dimensional geometric region that represents an axis-aligned rectangle.
///
/// Conceptually, it is a pair of two [`Region`]s `inner` and `outer`
/// that satisfy `inner ⊆ outer`. `inner` can be empty, while `outer` cannot.
#[derive(Clone, Debug)]
pub struct Box2D(Box1D, Box1D);
impl Box2D {
/// Creates a new [`Box2D`] with the given bounds.
pub fn new(l: Interval, r: Interval, b: Interval, t: Interval) -> Self {
Self(Box1D::new(l, r), Box1D::new(b, t))
}
/// Returns the bottom bound of the region.
pub fn bottom(&self) -> Interval {
self.1.left()
}
/// Returns the inner region.
pub fn inner(&self) -> Region {
Region::new(self.0.inner(), self.1.inner())
}
/// Returns the left bound of the region.
pub fn left(&self) -> Interval {
self.0.left()
}
/// Returns the outer region.
pub fn outer(&self) -> Region {
Region::new(self.0.outer(), self.1.outer())
}
/// Returns the right bound of the region.
pub fn right(&self) -> Interval {
| /// Returns the top bound of the region.
pub fn top(&self) -> Interval {
self.1.right()
}
/// Swaps the axes of the region.
#[must_use]
pub fn transpose(&self) -> Self {
Self(self.1.clone(), self.0.clone())
}
}
/// The type of the formula that should be used for performing geometric transformations.
#[derive(Clone, Copy, Debug)]
pub enum TransformationMode {
/// Suitable for transformation from image coordinates to real coordinates,
/// which usually involves exact divisions (division by image dimensions).
Fast,
/// Suitable for transformation from real coordinates to image coordinates,
/// which usually involves inexact divisions (division by lengths of the plot range).
Precise,
}
/// A one-dimensional affine geometric transformation that consists of only scaling and translation.
#[derive(Clone, Debug)]
pub enum Transformation1D {
Fast {
s: Interval,
t: Interval,
},
Precise {
a0: Interval,
a01: Interval,
x0: Interval,
x01: Interval,
},
}
impl Transformation1D {
/// Creates a transformation that maps each source point to the corresponding destination point.
pub fn new(
from_points: [Interval; 2],
to_points: [Interval; 2],
mode: TransformationMode,
) -> Self {
let [a0, a1] = from_points;
let [x0, x1] = to_points;
match mode {
TransformationMode::Fast => Self::Fast {
s: (x1 - x0) / (a1 - a0),
t: (-a0).mul_add((x1 - x0) / (a1 - a0), x0),
},
TransformationMode::Precise => Self::Precise {
a0,
a01: a1 - a0,
x0,
x01: x1 - x0,
},
}
}
}
/// A two-dimensional affine geometric transformation that consists of only scaling and translation.
#[derive(Clone, Debug)]
pub struct Transformation2D(Transformation1D, Transformation1D);
impl Transformation2D {
/// Creates a transformation that maps each source point to the corresponding destination point.
pub fn new(from_points: [Region; 2], to_points: [Region; 2], mode: TransformationMode) -> Self {
Self(
Transformation1D::new(
[from_points[0].x(), from_points[1].x()],
[to_points[0].x(), to_points[1].x()],
mode,
),
Transformation1D::new(
[from_points[0].y(), from_points[1].y()],
[to_points[0].y(), to_points[1].y()],
mode,
),
)
}
}
pub trait Transform<T> {
/// Returns an enclosure of the geometric object transformed by `t`.
fn transform(&self, t: &T) -> Self;
}
impl Transform<Transformation1D> for Box1D {
fn transform(&self, t: &Transformation1D) -> Self {
Self {
l: self.l.transform(t),
r: self.r.transform(t),
}
}
}
impl Transform<Transformation2D> for Box2D {
fn transform(&self, t: &Transformation2D) -> Self {
Self(self.0.transform(&t.0), self.1.transform(&t.1))
}
}
impl Transform<Transformation1D> for Interval {
fn transform(&self, t: &Transformation1D) -> Self {
match *t {
Transformation1D::Fast { s, t } => self.mul_add(s, t),
Transformation1D::Precise { a0, a01, x0, x01 } => ((*self - a0) / a01).mul_add(x01, x0),
}
}
}
pub trait TransformInPlace<T> {
/// The in-place version of [`Transform`].
fn transform_in_place(&mut self, t: &T);
}
#[cfg(test)]
mod tests {
use super::*;
use inari::const_interval;
#[test]
fn box2d() {
let u = Box2D::new(
const_interval!(0.33, 0.34),
const_interval!(0.66, 0.67),
const_interval!(1.33, 1.34),
const_interval!(1.66, 1.67),
);
assert_eq!(
u.inner(),
Region::new(const_interval!(0.34, 0.66), const_interval!(1.34, 1.66))
);
assert_eq!(
u.outer(),
Region::new(const_interval!(0.33, 0.67), const_interval!(1.33, 1.67))
);
let u = Box2D::new(
const_interval!(0.33, 0.66),
const_interval!(0.34, 0.67),
const_interval!(1.33, 1.66),
const_interval!(1.34, 1.67),
);
assert_eq!(u.inner(), Region::EMPTY);
assert_eq!(
u.outer(),
Region::new(const_interval!(0.33, 0.67), const_interval!(1.33, 1.67))
);
}
}
| self.0.right()
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.