file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
tplog7.rs
#[doc = "Reader of register TPLOG7"] pub type R = crate::R<u32, super::TPLOG7>; #[doc = "Reader of field `TRIG0`"] pub type TRIG0_R = crate::R<bool, bool>; #[doc = "Reader of field `TRIG1`"] pub type TRIG1_R = crate::R<bool, bool>; #[doc = "Reader of field `TRIG2`"] pub type TRIG2_R = crate::R<bool, bool>; #[doc = "Reader of field `TRIG3`"] pub type TRIG3_R = crate::R<bool, bool>; #[doc = "Reader of field `XOSC`"] pub type XOSC_R = crate::R<bool, bool>; impl R {
Trigger"] #[inline(always)] pub fn trig0(&self) -> TRIG0_R { TRIG0_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Status of TMPR\\[1\\] Trigger"] #[inline(always)] pub fn trig1(&self) -> TRIG1_R { TRIG1_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - Status of TMPR\\[2\\] Trigger"] #[inline(always)] pub fn trig2(&self) -> TRIG2_R { TRIG2_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - Status of TMPR\\[3\\] Trigger"] #[inline(always)] pub fn trig3(&self) -> TRIG3_R { TRIG3_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 16 - Status of external 32"] #[inline(always)] pub fn xosc(&self) -> XOSC_R { XOSC_R::new(((self.bits >> 16) & 0x01) != 0) } }
#[doc = "Bit 0 - Status of TMPR\\[0\\]
types.ts
import { FeatureCode, FeatureMetadata } from 'interfaces/Feature'; export enum GetFeature { REQUEST = 'amundsen/feature/GET_FEATURE_REQUEST', SUCCESS = 'amundsen/feature/GET_FEATURE_SUCCESS', FAILURE = 'amundsen/feature/GET_FEATURE_FAILURE', } export interface GetFeatureRequest { type: GetFeature.REQUEST; payload: { key: string; index?: string; source?: string; }; } export interface GetFeatureResponse { type: GetFeature.SUCCESS | GetFeature.FAILURE; payload: GetFeaturePayload; } export interface GetFeaturePayload { feature?: FeatureMetadata; statusCode?: number;
} export enum GetFeatureCode { REQUEST = 'amundsen/feature/GET_FEATURE_CODE_REQUEST', SUCCESS = 'amundsen/feature/GET_FEATURE_CODE_SUCCESS', FAILURE = 'amundsen/feature/GET_FEATURE_CODE_FAILURE', } export interface GetFeatureCodeRequest { type: GetFeatureCode.REQUEST; payload: { key: string; }; } export interface GetFeatureCodeResponse { type: GetFeatureCode.SUCCESS | GetFeatureCode.FAILURE; payload: GetFeatureCodePayload; } export interface GetFeatureCodePayload { featureCode?: FeatureCode; statusCode?: number; statusMessage?: string; }
statusMessage?: string;
type-test.ts
/** * @license * Copyright (c) 2018 Google Inc. All rights reserved. * This code may only be used under the BSD style license found at * http://polymer.github.io/LICENSE.txt * Code distributed by Google as part of this project is also * subject to an additional IP rights grant found at * http://polymer.github.io/PATENTS.txt */ import {assert} from '../../platform/chai-web.js'; import {Manifest} from '../manifest.js'; import {TypeVariableInfo} from '../type-variable-info.js'; import {ArcType, BigCollectionType, CollectionType, EntityType, HandleType, InterfaceType, ReferenceType, RelationType, SlotType, Type, TypeVariable} from '../type.js'; // For reference, this is a list of all the types and their contained data: // EntityType : Schema // TypeVariable : TypeVariableInfo // CollectionType : Type // BigCollectionType : Type // RelationType : [Type] // InterfaceType : InterfaceInfo // SlotType : SlotInfo // ReferenceType : Type // ArcType : none // HandleType : none describe('types', () => { describe('literals and cloning', () => { // Ignore undefined fields. function deepEqual(a, b) { assert.deepEqual(JSON.parse(JSON.stringify(a)), JSON.parse(JSON.stringify(b))); } it('Entity', async () => { const entity = EntityType.make(['Foo'], {value: 'Text'}); deepEqual(entity.toLiteral(), { tag: 'Entity', data: {names: ['Foo'], fields: {value: {kind: 'schema-primitive', type: 'Text'}}, description: {}} }); deepEqual(entity, Type.fromLiteral(entity.toLiteral())); deepEqual(entity, entity.clone(new Map())); }); it('TypeVariable', async () => { const variable = TypeVariable.make('a', null, null); deepEqual(variable.toLiteral(), { tag: 'TypeVariable', data: {name: 'a', canWriteSuperset: null, canReadSubset: null} }); deepEqual(variable, Type.fromLiteral(variable.toLiteral())); deepEqual(variable, variable.clone(new Map())); }); it('Collection', async () => { // Collection of entities const entity = EntityType.make(['Foo'], {value: 'Text'}); const col1 = new CollectionType(entity); deepEqual(col1.toLiteral(), {tag: 'Collection', data: entity.toLiteral()}); deepEqual(col1, Type.fromLiteral(col1.toLiteral())); deepEqual(col1, col1.clone(new Map())); // Collection of collection of variables const variable = TypeVariable.make('a', null, null); const inner = new CollectionType(variable); const col2 = new CollectionType(inner); deepEqual(col2.toLiteral(), { tag: 'Collection', data: {tag: 'Collection', data: variable.toLiteral()} }); deepEqual(col2, Type.fromLiteral(col2.toLiteral())); deepEqual(col2, col2.clone(new Map())); // Collection of references to slots const slot = SlotType.make('f', 'h'); const reference = new ReferenceType(slot); const col3 = new CollectionType(reference); deepEqual(col3.toLiteral(), {tag: 'Collection', data: reference.toLiteral()}); deepEqual(col3, Type.fromLiteral(col3.toLiteral())); deepEqual(col3, col3.clone(new Map())); }); it('BigCollection', async () => { // BigCollection of entities const entity = EntityType.make(['Foo'], {value: 'Text'}); const big1 = new BigCollectionType(entity); deepEqual(big1.toLiteral(), {tag: 'BigCollection', data: entity.toLiteral()}); deepEqual(big1, Type.fromLiteral(big1.toLiteral())); deepEqual(big1, big1.clone(new Map())); // BigCollection of BigCollection of variables const variable = TypeVariable.make('a', null, null); const inner = new BigCollectionType(variable); const big2 = new BigCollectionType(inner); deepEqual(big2.toLiteral(), { tag: 'BigCollection', data: {tag: 'BigCollection', data: variable.toLiteral()} }); deepEqual(big2, Type.fromLiteral(big2.toLiteral())); deepEqual(big2, big2.clone(new Map())); // BigCollection of references to slots const slot = SlotType.make('f', 'h'); const reference = new ReferenceType(slot); const big3 = new BigCollectionType(reference); deepEqual(big3.toLiteral(), {tag: 'BigCollection', data: reference.toLiteral()}); deepEqual(big3, Type.fromLiteral(big3.toLiteral())); deepEqual(big3, big3.clone(new Map())); }); it('Relation', async () => { const entity = EntityType.make(['Foo'], {value: 'Text'}); const variable = TypeVariable.make('a', null, null); const col = new CollectionType(entity); const relation = new RelationType([entity, variable, col]); deepEqual(relation.toLiteral(), { tag: 'Relation', data: [entity.toLiteral(), variable.toLiteral(), col.toLiteral()] }); deepEqual(relation, Type.fromLiteral(relation.toLiteral())); deepEqual(relation, relation.clone(new Map())); }); it('Interface', async () => { const entity = EntityType.make(['Foo'], {value: 'Text'}); const variable = TypeVariable.make('a', null, null); const col = new CollectionType(entity); const iface = InterfaceType.make('i', [{type: entity}, {type: variable}, {type: col}], [{name: 'x'}]); deepEqual(iface.toLiteral(), { tag: 'Interface', data: { name: 'i', handles: [{type: entity.toLiteral()}, {type: variable.toLiteral()}, {type: col.toLiteral()}], slots: [{name: 'x'}] } }); deepEqual(iface, Type.fromLiteral(iface.toLiteral())); deepEqual(iface, iface.clone(new Map())); }); it('Slot', async () => { const slot = SlotType.make('f', 'h'); deepEqual(slot.toLiteral(), {tag: 'Slot', data: {formFactor: 'f', handle: 'h'}}); deepEqual(slot, Type.fromLiteral(slot.toLiteral())); deepEqual(slot, slot.clone(new Map())); }); it('Reference', async () => { // Reference to entity const entity = EntityType.make(['Foo'], {value: 'Text'}); const ref1 = new ReferenceType(entity); deepEqual(ref1.toLiteral(), {tag: 'Reference', data: entity.toLiteral()}); deepEqual(ref1, Type.fromLiteral(ref1.toLiteral())); deepEqual(ref1, ref1.clone(new Map())); // Reference to reference variable const variable = TypeVariable.make('a', null, null); const inner = new ReferenceType(variable); const ref2 = new ReferenceType(inner); deepEqual(ref2.toLiteral(), { tag: 'Reference', data: {tag: 'Reference', data: variable.toLiteral()} }); deepEqual(ref2, Type.fromLiteral(ref2.toLiteral())); deepEqual(ref2, ref2.clone(new Map())); // Reference to collection of slots const slot = SlotType.make('f', 'h'); const col = new CollectionType(slot); const ref3 = new ReferenceType(col); deepEqual(ref3.toLiteral(), {tag: 'Reference', data: col.toLiteral()}); deepEqual(ref3, Type.fromLiteral(ref3.toLiteral())); deepEqual(ref3, ref3.clone(new Map())); }); it('ArcInfo', async () => { const arcInfo = new ArcType(); deepEqual(arcInfo.toLiteral(), {tag: 'Arc'}); deepEqual(arcInfo, Type.fromLiteral(arcInfo.toLiteral())); deepEqual(arcInfo, arcInfo.clone(new Map())); }); it('HandleInfo', async () => { const handleInfo = new HandleType(); deepEqual(handleInfo.toLiteral(), {tag: 'Handle'}); deepEqual(handleInfo, Type.fromLiteral(handleInfo.toLiteral())); deepEqual(handleInfo, handleInfo.clone(new Map())); }); it('combine all the types', async () => { const slot = SlotType.make('f', 'h'); const bigCol = new BigCollectionType(slot); const reference = new ReferenceType(bigCol); const entity = EntityType.make(['Foo'], {value: 'Text'}); const variable = TypeVariable.make('a', null, null); const arcInfo = new ArcType(); const iface = InterfaceType.make('i', [{type: entity}, {type: variable}, {type: arcInfo}], []); const handleInfo = new HandleType(); const relation = new RelationType([reference, iface, handleInfo]); const collection = new CollectionType(relation); deepEqual(collection, Type.fromLiteral(collection.toLiteral())); deepEqual(collection, collection.clone(new Map())); }); }); describe('TypeVariable', () => { const resolutionAssertMsg = 'variable cannot resolve to collection of itself'; it(`setting the resolution to itself is a no-op`, () => { const a = TypeVariable.make('x'); a.variable.resolution = a; assert.isNull(a.variable.resolution); }); it(`allows 2 type variables to resolve to each other`, () => { const a = TypeVariable.make('x'); const b = TypeVariable.make('x'); a.variable.resolution = b; b.variable.resolution = a; assert.strictEqual(a.resolvedType(), b.resolvedType()); }); it(`allows the resolution to be a Collection of other type variable`, () => { const a = TypeVariable.make('x'); const b = TypeVariable.make('x'); a.variable.resolution = b.collectionOf(); }); it(`allows the resolution to be a BigCollection of other type variable`, () => { const a = TypeVariable.make('x'); const b = TypeVariable.make('x'); a.variable.resolution = b.bigCollectionOf(); }); it(`disallows the resolution to be a Collection of itself`, () => { const a = TypeVariable.make('x'); assert.throws(() => a.variable.resolution = a.collectionOf(), resolutionAssertMsg); }); it(`disallows the resolution to be a BigCollection of itself`, () => { const a = TypeVariable.make('x'); assert.throws(() => a.variable.resolution = a.bigCollectionOf(), resolutionAssertMsg); }); it(`disallows the resolution of x to be a Collection of type variable that resolve to x`, () => { const a = TypeVariable.make('x'); const b = TypeVariable.make('x'); b.variable.resolution = a; assert.throws(() => a.variable.resolution = b.collectionOf(), resolutionAssertMsg); }); it(`disallows the resolution of x to be a BigCollection of type variable that resolve to x`, () => { const a = TypeVariable.make('x'); const b = TypeVariable.make('x'); b.variable.resolution = a; assert.throws(() => a.variable.resolution = b.bigCollectionOf(), resolutionAssertMsg); }); it(`disallows the resolution of x to be a type variable that resolves to Collection of x`, () => { const a = TypeVariable.make('x'); const b = TypeVariable.make('x'); b.variable.resolution = a.collectionOf(); assert.throws(() => a.variable.resolution = b, resolutionAssertMsg); }); it(`disallows the resolution of x to be a type variable that resolves to BigCollection of x`, () => { const a = TypeVariable.make('x'); const b = TypeVariable.make('x'); b.variable.resolution = a.bigCollectionOf(); assert.throws(() => a.variable.resolution = b, resolutionAssertMsg); }); it(`maybeEnsureResolved clears canReadSubset and canWriteSuperset`, () => { const a = new TypeVariableInfo('x'); const b = EntityType.make(['Thing'], {}); a.maybeMergeCanWriteSuperset(b); assert.equal(a.canWriteSuperset, b); assert.notExists(a.canReadSubset); assert.notExists(a.resolution); a.maybeEnsureResolved(); assert.notExists(a.canWriteSuperset); assert.notExists(a.canReadSubset); assert.equal(a.resolution, b); }); }); describe('serialization', () => { it('serializes interfaces', () => { const entity = EntityType.make(['Foo'], {value: 'Text'}); const variable = TypeVariable.make('a', null, null); const iface = InterfaceType.make('i', [{type: entity, name: 'foo'}, {type: variable}], [{name: 'x', direction: 'consume'}]); assert.equal(iface.interfaceInfo.toString(), `interface i Foo {Text value} foo ~a * consume x `); }); // Regression test for https://github.com/PolymerLabs/arcs/issues/2575 it('disregards type variable resolutions in interfaces', () => { const variable = TypeVariable.make('a', null, null); variable.variable.resolution = EntityType.make(['Foo'], {value: 'Text'}); const iface = InterfaceType.make('i', [{type: variable}], []); assert.equal(iface.interfaceInfo.toString(), `interface i ~a * `); }); }); describe('integration', () => { const manifestText = ` schema Product Text name schema Lego extends Product Text setID
particle ReadsProduct in [Product] product recipe MatchBasic create as v0 WritesLego lego -> v0 ReadsProduct product <- v0 recipe MatchExisting use 'test:1' as v0 WritesLego lego -> v0 ReadsProduct product <- v0`; it('a subtype matches to a supertype that wants to be read', async () => { const manifest = await Manifest.parse(manifestText); const recipe = manifest.recipes[0]; assert(recipe.normalize()); assert(recipe.isResolved()); assert.equal(recipe.handles.length, 1); assert.equal((recipe.handles[0].type.getContainedType().canReadSubset as EntityType).entitySchema.name, 'Lego'); assert.equal((recipe.handles[0].type.getContainedType().canWriteSuperset as EntityType).entitySchema.name, 'Product'); }); it('a subtype matches to a supertype that wants to be read when a handle exists', async () => { const manifest = await Manifest.parse(manifestText); const recipe = manifest.recipes[1]; recipe.handles[0].mapToStorage({ id: 'test1', type: manifest.findSchemaByName('Product').entityClass().type.collectionOf() }); assert(recipe.normalize()); assert(recipe.isResolved()); assert.lengthOf(recipe.handles, 1); assert.equal((recipe.handles[0].type.getContainedType() as EntityType).entitySchema.name, 'Product'); }); it('a subtype matches to a supertype that wants to be read when a handle exists', async () => { const manifest = await Manifest.parse(manifestText); const recipe = manifest.recipes[1]; recipe.handles[0].mapToStorage({ id: 'test1', type: manifest.findSchemaByName('Lego').entityClass().type.collectionOf() }); assert(recipe.normalize()); assert(recipe.isResolved()); assert.lengthOf(recipe.handles, 1); assert.equal((recipe.handles[0].type.getContainedType() as EntityType).entitySchema.name, 'Lego'); }); }); });
particle WritesLego out [Lego] lego
server.py
# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Object Server for Swift """ import cPickle as pickle import os
import socket import math from swift import gettext_ as _ from hashlib import md5 from eventlet import sleep, wsgi, Timeout from swift.common.utils import public, get_logger, \ config_true_value, timing_stats, replication, \ normalize_delete_at_timestamp, get_log_line, Timestamp, \ get_expirer_container from swift.common.bufferedhttp import http_connect from swift.common.constraints import check_object_creation, \ valid_timestamp, check_utf8 from swift.common.exceptions import ConnectionTimeout, DiskFileQuarantined, \ DiskFileNotExist, DiskFileCollision, DiskFileNoSpace, DiskFileDeleted, \ DiskFileDeviceUnavailable, DiskFileExpired, ChunkReadTimeout, \ DiskFileXattrNotSupported from swift.obj import ssync_receiver from swift.common.http import is_success from swift.common.base_storage_server import BaseStorageServer from swift.common.request_helpers import get_name_and_placement, \ is_user_meta, is_sys_or_user_meta from swift.common.swob import HTTPAccepted, HTTPBadRequest, HTTPCreated, \ HTTPInternalServerError, HTTPNoContent, HTTPNotFound, \ HTTPPreconditionFailed, HTTPRequestTimeout, HTTPUnprocessableEntity, \ HTTPClientDisconnect, HTTPMethodNotAllowed, Request, Response, \ HTTPInsufficientStorage, HTTPForbidden, HTTPException, HeaderKeyDict, \ HTTPConflict from swift.obj.diskfile import DATAFILE_SYSTEM_META, DiskFileManager class EventletPlungerString(str): """ Eventlet won't send headers until it's accumulated at least eventlet.wsgi.MINIMUM_CHUNK_SIZE bytes or the app iter is exhausted. If we want to send the response body behind Eventlet's back, perhaps with some zero-copy wizardry, then we have to unclog the plumbing in eventlet.wsgi to force the headers out, so we use an EventletPlungerString to empty out all of Eventlet's buffers. """ def __len__(self): return wsgi.MINIMUM_CHUNK_SIZE + 1 class ObjectController(BaseStorageServer): """Implements the WSGI application for the Swift Object Server.""" server_type = 'object-server' def __init__(self, conf, logger=None): """ Creates a new WSGI application for the Swift Object Server. An example configuration is given at <source-dir>/etc/object-server.conf-sample or /etc/swift/object-server.conf-sample. """ super(ObjectController, self).__init__(conf) self.logger = logger or get_logger(conf, log_route='object-server') self.node_timeout = int(conf.get('node_timeout', 3)) self.conn_timeout = float(conf.get('conn_timeout', 0.5)) self.client_timeout = int(conf.get('client_timeout', 60)) self.disk_chunk_size = int(conf.get('disk_chunk_size', 65536)) self.network_chunk_size = int(conf.get('network_chunk_size', 65536)) self.log_requests = config_true_value(conf.get('log_requests', 'true')) self.max_upload_time = int(conf.get('max_upload_time', 86400)) self.slow = int(conf.get('slow', 0)) self.keep_cache_private = \ config_true_value(conf.get('keep_cache_private', 'false')) default_allowed_headers = ''' content-disposition, content-encoding, x-delete-at, x-object-manifest, x-static-large-object, ''' extra_allowed_headers = [ header.strip().lower() for header in conf.get( 'allowed_headers', default_allowed_headers).split(',') if header.strip() ] self.allowed_headers = set() for header in extra_allowed_headers: if header not in DATAFILE_SYSTEM_META: self.allowed_headers.add(header) self.auto_create_account_prefix = \ conf.get('auto_create_account_prefix') or '.' self.expiring_objects_account = self.auto_create_account_prefix + \ (conf.get('expiring_objects_account_name') or 'expiring_objects') self.expiring_objects_container_divisor = \ int(conf.get('expiring_objects_container_divisor') or 86400) # Initialization was successful, so now apply the network chunk size # parameter as the default read / write buffer size for the network # sockets. # # NOTE WELL: This is a class setting, so until we get set this on a # per-connection basis, this affects reading and writing on ALL # sockets, those between the proxy servers and external clients, and # those between the proxy servers and the other internal servers. # # ** Because the primary motivation for this is to optimize how data # is written back to the proxy server, we could use the value from the # disk_chunk_size parameter. However, it affects all created sockets # using this class so we have chosen to tie it to the # network_chunk_size parameter value instead. socket._fileobject.default_bufsize = self.network_chunk_size # Provide further setup specific to an object server implementation. self.setup(conf) def setup(self, conf): """ Implementation specific setup. This method is called at the very end by the constructor to allow a specific implementation to modify existing attributes or add its own attributes. :param conf: WSGI configuration parameter """ # Common on-disk hierarchy shared across account, container and object # servers. self._diskfile_mgr = DiskFileManager(conf, self.logger) # This is populated by global_conf_callback way below as the semaphore # is shared by all workers. if 'replication_semaphore' in conf: # The value was put in a list so it could get past paste self.replication_semaphore = conf['replication_semaphore'][0] else: self.replication_semaphore = None self.replication_failure_threshold = int( conf.get('replication_failure_threshold') or 100) self.replication_failure_ratio = float( conf.get('replication_failure_ratio') or 1.0) def get_diskfile(self, device, partition, account, container, obj, policy_idx, **kwargs): """ Utility method for instantiating a DiskFile object supporting a given REST API. An implementation of the object server that wants to use a different DiskFile class would simply over-ride this method to provide that behavior. """ return self._diskfile_mgr.get_diskfile( device, partition, account, container, obj, policy_idx, **kwargs) def async_update(self, op, account, container, obj, host, partition, contdevice, headers_out, objdevice, policy_index): """ Sends or saves an async update. :param op: operation performed (ex: 'PUT', or 'DELETE') :param account: account name for the object :param container: container name for the object :param obj: object name :param host: host that the container is on :param partition: partition that the container is on :param contdevice: device name that the container is on :param headers_out: dictionary of headers to send in the container request :param objdevice: device name that the object is in :param policy_index: the associated storage policy index """ headers_out['user-agent'] = 'object-server %s' % os.getpid() full_path = '/%s/%s/%s' % (account, container, obj) if all([host, partition, contdevice]): try: with ConnectionTimeout(self.conn_timeout): ip, port = host.rsplit(':', 1) conn = http_connect(ip, port, contdevice, partition, op, full_path, headers_out) with Timeout(self.node_timeout): response = conn.getresponse() response.read() if is_success(response.status): return else: self.logger.error(_( 'ERROR Container update failed ' '(saving for async update later): %(status)d ' 'response from %(ip)s:%(port)s/%(dev)s'), {'status': response.status, 'ip': ip, 'port': port, 'dev': contdevice}) except (Exception, Timeout): self.logger.exception(_( 'ERROR container update failed with ' '%(ip)s:%(port)s/%(dev)s (saving for async update later)'), {'ip': ip, 'port': port, 'dev': contdevice}) data = {'op': op, 'account': account, 'container': container, 'obj': obj, 'headers': headers_out} timestamp = headers_out['x-timestamp'] self._diskfile_mgr.pickle_async_update(objdevice, account, container, obj, data, timestamp, policy_index) def container_update(self, op, account, container, obj, request, headers_out, objdevice, policy_idx): """ Update the container when objects are updated. :param op: operation performed (ex: 'PUT', or 'DELETE') :param account: account name for the object :param container: container name for the object :param obj: object name :param request: the original request object driving the update :param headers_out: dictionary of headers to send in the container request(s) :param objdevice: device name that the object is in """ headers_in = request.headers conthosts = [h.strip() for h in headers_in.get('X-Container-Host', '').split(',')] contdevices = [d.strip() for d in headers_in.get('X-Container-Device', '').split(',')] contpartition = headers_in.get('X-Container-Partition', '') if len(conthosts) != len(contdevices): # This shouldn't happen unless there's a bug in the proxy, # but if there is, we want to know about it. self.logger.error(_('ERROR Container update failed: different ' 'numbers of hosts and devices in request: ' '"%s" vs "%s"') % (headers_in.get('X-Container-Host', ''), headers_in.get('X-Container-Device', ''))) return if contpartition: updates = zip(conthosts, contdevices) else: updates = [] headers_out['x-trans-id'] = headers_in.get('x-trans-id', '-') headers_out['referer'] = request.as_referer() headers_out['X-Backend-Storage-Policy-Index'] = policy_idx for conthost, contdevice in updates: self.async_update(op, account, container, obj, conthost, contpartition, contdevice, headers_out, objdevice, policy_idx) def delete_at_update(self, op, delete_at, account, container, obj, request, objdevice, policy_index): """ Update the expiring objects container when objects are updated. :param op: operation performed (ex: 'PUT', or 'DELETE') :param delete_at: scheduled delete in UNIX seconds, int :param account: account name for the object :param container: container name for the object :param obj: object name :param request: the original request driving the update :param objdevice: device name that the object is in :param policy_index: the policy index to be used for tmp dir """ if config_true_value( request.headers.get('x-backend-replication', 'f')): return delete_at = normalize_delete_at_timestamp(delete_at) updates = [(None, None)] partition = None hosts = contdevices = [None] headers_in = request.headers headers_out = HeaderKeyDict({ # system accounts are always Policy-0 'X-Backend-Storage-Policy-Index': 0, 'x-timestamp': request.timestamp.internal, 'x-trans-id': headers_in.get('x-trans-id', '-'), 'referer': request.as_referer()}) if op != 'DELETE': delete_at_container = headers_in.get('X-Delete-At-Container', None) if not delete_at_container: self.logger.warning( 'X-Delete-At-Container header must be specified for ' 'expiring objects background %s to work properly. Making ' 'best guess as to the container name for now.' % op) # TODO(gholt): In a future release, change the above warning to # a raised exception and remove the guess code below. delete_at_container = get_expirer_container( delete_at, self.expiring_objects_container_divisor, account, container, obj) partition = headers_in.get('X-Delete-At-Partition', None) hosts = headers_in.get('X-Delete-At-Host', '') contdevices = headers_in.get('X-Delete-At-Device', '') updates = [upd for upd in zip((h.strip() for h in hosts.split(',')), (c.strip() for c in contdevices.split(','))) if all(upd) and partition] if not updates: updates = [(None, None)] headers_out['x-size'] = '0' headers_out['x-content-type'] = 'text/plain' headers_out['x-etag'] = 'd41d8cd98f00b204e9800998ecf8427e' else: # DELETEs of old expiration data have no way of knowing what the # old X-Delete-At-Container was at the time of the initial setting # of the data, so a best guess is made here. # Worst case is a DELETE is issued now for something that doesn't # exist there and the original data is left where it is, where # it will be ignored when the expirer eventually tries to issue the # object DELETE later since the X-Delete-At value won't match up. delete_at_container = get_expirer_container( delete_at, self.expiring_objects_container_divisor, account, container, obj) delete_at_container = normalize_delete_at_timestamp( delete_at_container) for host, contdevice in updates: self.async_update( op, self.expiring_objects_account, delete_at_container, '%s-%s/%s/%s' % (delete_at, account, container, obj), host, partition, contdevice, headers_out, objdevice, policy_index) @public @timing_stats() def POST(self, request): """Handle HTTP POST requests for the Swift Object Server.""" device, partition, account, container, obj, policy_idx = \ get_name_and_placement(request, 5, 5, True) print("----------------------------------------------------: In POST") req_timestamp = valid_timestamp(request) new_delete_at = int(request.headers.get('X-Delete-At') or 0) if new_delete_at and new_delete_at < time.time(): return HTTPBadRequest(body='X-Delete-At in past', request=request, content_type='text/plain') try: disk_file = self.get_diskfile( device, partition, account, container, obj, policy_idx=policy_idx) except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) try: orig_metadata = disk_file.read_metadata() except DiskFileXattrNotSupported: return HTTPInsufficientStorage(drive=device, request=request) except (DiskFileNotExist, DiskFileQuarantined): return HTTPNotFound(request=request) orig_timestamp = Timestamp(orig_metadata.get('X-Timestamp', 0)) if orig_timestamp >= req_timestamp: return HTTPConflict( request=request, headers={'X-Backend-Timestamp': orig_timestamp.internal}) metadata = {'X-Timestamp': req_timestamp.internal} metadata.update(val for val in request.headers.iteritems() if is_user_meta('object', val[0])) for header_key in self.allowed_headers: if header_key in request.headers: header_caps = header_key.title() metadata[header_caps] = request.headers[header_key] orig_delete_at = int(orig_metadata.get('X-Delete-At') or 0) if orig_delete_at != new_delete_at: if new_delete_at: self.delete_at_update('PUT', new_delete_at, account, container, obj, request, device, policy_idx) if orig_delete_at: self.delete_at_update('DELETE', orig_delete_at, account, container, obj, request, device, policy_idx) try: disk_file.write_metadata(metadata) except (DiskFileXattrNotSupported, DiskFileNoSpace): return HTTPInsufficientStorage(drive=device, request=request) return HTTPAccepted(request=request) @public @timing_stats() def PUT(self, request): """Handle HTTP PUT requests for the Swift Object Server.""" device, partition, account, container, obj, policy_idx = \ get_name_and_placement(request, 5, 5, True) req_timestamp = valid_timestamp(request) error_response = check_object_creation(request, obj) if error_response: return error_response new_delete_at = int(request.headers.get('X-Delete-At') or 0) if new_delete_at and new_delete_at < time.time(): return HTTPBadRequest(body='X-Delete-At in past', request=request, content_type='text/plain') try: fsize = request.message_length() except ValueError as e: return HTTPBadRequest(body=str(e), request=request, content_type='text/plain') try: disk_file = self.get_diskfile( device, partition, account, container, obj, policy_idx=policy_idx) except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) try: orig_metadata = disk_file.read_metadata() except DiskFileXattrNotSupported: return HTTPInsufficientStorage(drive=device, request=request) except (DiskFileNotExist, DiskFileQuarantined): orig_metadata = {} # Checks for If-None-Match if request.if_none_match is not None and orig_metadata: if '*' in request.if_none_match: # File exists already so return 412 return HTTPPreconditionFailed(request=request) if orig_metadata.get('ETag') in request.if_none_match: # The current ETag matches, so return 412 return HTTPPreconditionFailed(request=request) orig_timestamp = Timestamp(orig_metadata.get('X-Timestamp', 0)) if orig_timestamp >= req_timestamp: return HTTPConflict( request=request, headers={'X-Backend-Timestamp': orig_timestamp.internal}) orig_delete_at = int(orig_metadata.get('X-Delete-At') or 0) upload_expiration = time.time() + self.max_upload_time etag = md5() elapsed_time = 0 try: with disk_file.create(size=fsize) as writer: upload_size = 0 def timeout_reader(): with ChunkReadTimeout(self.client_timeout): return request.environ['wsgi.input'].read( self.network_chunk_size) try: ### IGNORE the following commented lines # if(os.path.ismount("/mnt/SSD")): # # f = open("/mnt/SSD/"+str(partition)+"_"+str(writer._name.split("/")[-1]),"a") # f = open("/mnt/SSD/"+str(partition),"a") # else: # print("===SSD is unmounted===") # f = open('/home/hduser/errorSSD.txt','w') #################################### SSD FILE TO WRITE TO ############################### f = open("/SSD/"+str(partition),"a") #################################### SSD FILE TO WRITE TO ############################### for chunk in iter(lambda: timeout_reader(), ''): start_time = time.time() f.write(chunk) f.close() if start_time > upload_expiration: self.logger.increment('PUT.timeouts') return HTTPRequestTimeout(request=request) etag.update(chunk) ############################ # os.write(f,chunk) ############################ upload_size = writer.write(chunk) elapsed_time += time.time() - start_time except ChunkReadTimeout: return HTTPRequestTimeout(request=request) if upload_size: self.logger.transfer_rate( 'PUT.' + device + '.timing', elapsed_time, upload_size) if fsize is not None and fsize != upload_size: return HTTPClientDisconnect(request=request) etag = etag.hexdigest() if 'etag' in request.headers and \ request.headers['etag'].lower() != etag: return HTTPUnprocessableEntity(request=request) metadata = { 'X-Timestamp': request.timestamp.internal, 'Content-Type': request.headers['content-type'], 'ETag': etag, 'Content-Length': str(upload_size), } metadata.update(val for val in request.headers.iteritems() if is_sys_or_user_meta('object', val[0])) headers_to_copy = ( request.headers.get( 'X-Backend-Replication-Headers', '').split() + list(self.allowed_headers)) for header_key in headers_to_copy: if header_key in request.headers: header_caps = header_key.title() metadata[header_caps] = request.headers[header_key] writer.put(metadata) except (DiskFileXattrNotSupported, DiskFileNoSpace): return HTTPInsufficientStorage(drive=device, request=request) if orig_delete_at != new_delete_at: if new_delete_at: self.delete_at_update( 'PUT', new_delete_at, account, container, obj, request, device, policy_idx) if orig_delete_at: self.delete_at_update( 'DELETE', orig_delete_at, account, container, obj, request, device, policy_idx) update_headers = HeaderKeyDict({ 'x-size': metadata['Content-Length'], 'x-content-type': metadata['Content-Type'], 'x-timestamp': metadata['X-Timestamp'], 'x-etag': metadata['ETag']}) # apply any container update header overrides sent with request for key, val in request.headers.iteritems(): override_prefix = 'x-backend-container-update-override-' if key.lower().startswith(override_prefix): override = key.lower().replace(override_prefix, 'x-') update_headers[override] = val self.container_update( 'PUT', account, container, obj, request, update_headers, device, policy_idx) return HTTPCreated(request=request, etag=etag) @public @timing_stats() def GET(self, request): """Handle HTTP GET requests for the Swift Object Server.""" device, partition, account, container, obj, policy_idx = \ get_name_and_placement(request, 5, 5, True) keep_cache = self.keep_cache_private or ( 'X-Auth-Token' not in request.headers and 'X-Storage-Token' not in request.headers) try: disk_file = self.get_diskfile( device, partition, account, container, obj, policy_idx=policy_idx) except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) try: with disk_file.open(): metadata = disk_file.get_metadata() obj_size = int(metadata['Content-Length']) file_x_ts = Timestamp(metadata['X-Timestamp']) keep_cache = (self.keep_cache_private or ('X-Auth-Token' not in request.headers and 'X-Storage-Token' not in request.headers)) response = Response( app_iter=disk_file.reader(keep_cache=keep_cache), request=request, conditional_response=True) response.headers['Content-Type'] = metadata.get( 'Content-Type', 'application/octet-stream') for key, value in metadata.iteritems(): if is_sys_or_user_meta('object', key) or \ key.lower() in self.allowed_headers: response.headers[key] = value response.etag = metadata['ETag'] response.last_modified = math.ceil(float(file_x_ts)) response.content_length = obj_size try: response.content_encoding = metadata[ 'Content-Encoding'] except KeyError: pass response.headers['X-Timestamp'] = file_x_ts.normal response.headers['X-Backend-Timestamp'] = file_x_ts.internal resp = request.get_response(response) except DiskFileXattrNotSupported: return HTTPInsufficientStorage(drive=device, request=request) except (DiskFileNotExist, DiskFileQuarantined) as e: headers = {} if hasattr(e, 'timestamp'): headers['X-Backend-Timestamp'] = e.timestamp.internal resp = HTTPNotFound(request=request, headers=headers, conditional_response=True) return resp @public @timing_stats(sample_rate=0.8) def HEAD(self, request): """Handle HTTP HEAD requests for the Swift Object Server.""" device, partition, account, container, obj, policy_idx = \ get_name_and_placement(request, 5, 5, True) try: disk_file = self.get_diskfile( device, partition, account, container, obj, policy_idx=policy_idx) except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) try: metadata = disk_file.read_metadata() except DiskFileXattrNotSupported: return HTTPInsufficientStorage(drive=device, request=request) except (DiskFileNotExist, DiskFileQuarantined) as e: headers = {} if hasattr(e, 'timestamp'): headers['X-Backend-Timestamp'] = e.timestamp.internal return HTTPNotFound(request=request, headers=headers, conditional_response=True) response = Response(request=request, conditional_response=True) response.headers['Content-Type'] = metadata.get( 'Content-Type', 'application/octet-stream') for key, value in metadata.iteritems(): if is_sys_or_user_meta('object', key) or \ key.lower() in self.allowed_headers: response.headers[key] = value response.etag = metadata['ETag'] ts = Timestamp(metadata['X-Timestamp']) response.last_modified = math.ceil(float(ts)) # Needed for container sync feature response.headers['X-Timestamp'] = ts.normal response.headers['X-Backend-Timestamp'] = ts.internal response.content_length = int(metadata['Content-Length']) try: response.content_encoding = metadata['Content-Encoding'] except KeyError: pass return response @public @timing_stats() def DELETE(self, request): """Handle HTTP DELETE requests for the Swift Object Server.""" device, partition, account, container, obj, policy_idx = \ get_name_and_placement(request, 5, 5, True) req_timestamp = valid_timestamp(request) try: disk_file = self.get_diskfile( device, partition, account, container, obj, policy_idx=policy_idx) except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) try: orig_metadata = disk_file.read_metadata() except DiskFileXattrNotSupported: return HTTPInsufficientStorage(drive=device, request=request) except DiskFileExpired as e: orig_timestamp = e.timestamp orig_metadata = e.metadata response_class = HTTPNotFound except DiskFileDeleted as e: orig_timestamp = e.timestamp orig_metadata = {} response_class = HTTPNotFound except (DiskFileNotExist, DiskFileQuarantined): orig_timestamp = 0 orig_metadata = {} response_class = HTTPNotFound else: orig_timestamp = Timestamp(orig_metadata.get('X-Timestamp', 0)) if orig_timestamp < req_timestamp: response_class = HTTPNoContent else: response_class = HTTPConflict response_timestamp = max(orig_timestamp, req_timestamp) orig_delete_at = int(orig_metadata.get('X-Delete-At') or 0) try: req_if_delete_at_val = request.headers['x-if-delete-at'] req_if_delete_at = int(req_if_delete_at_val) except KeyError: pass except ValueError: return HTTPBadRequest( request=request, body='Bad X-If-Delete-At header value') else: # request includes x-if-delete-at; we must not place a tombstone # if we can not verify the x-if-delete-at time if not orig_timestamp: # no object found at all return HTTPNotFound() if orig_delete_at != req_if_delete_at: return HTTPPreconditionFailed( request=request, body='X-If-Delete-At and X-Delete-At do not match') else: # differentiate success from no object at all response_class = HTTPNoContent if orig_delete_at: self.delete_at_update('DELETE', orig_delete_at, account, container, obj, request, device, policy_idx) if orig_timestamp < req_timestamp: disk_file.delete(req_timestamp) self.container_update( 'DELETE', account, container, obj, request, HeaderKeyDict({'x-timestamp': req_timestamp.internal}), device, policy_idx) return response_class( request=request, headers={'X-Backend-Timestamp': response_timestamp.internal}) @public @replication @timing_stats(sample_rate=0.1) def REPLICATE(self, request): """ Handle REPLICATE requests for the Swift Object Server. This is used by the object replicator to get hashes for directories. """ device, partition, suffix, policy_idx = \ get_name_and_placement(request, 2, 3, True) try: hashes = self._diskfile_mgr.get_hashes(device, partition, suffix, policy_idx) except DiskFileDeviceUnavailable: resp = HTTPInsufficientStorage(drive=device, request=request) else: resp = Response(body=pickle.dumps(hashes)) return resp @public @replication @timing_stats(sample_rate=0.1) def REPLICATION(self, request): return Response(app_iter=ssync_receiver.Receiver(self, request)()) def __call__(self, env, start_response): """WSGI Application entry point for the Swift Object Server.""" start_time = time.time() req = Request(env) self.logger.txn_id = req.headers.get('x-trans-id', None) if not check_utf8(req.path_info): res = HTTPPreconditionFailed(body='Invalid UTF8 or contains NULL') else: try: # disallow methods which have not been marked 'public' try: if req.method not in self.allowed_methods: raise AttributeError('Not allowed method.') except AttributeError: res = HTTPMethodNotAllowed() else: method = getattr(self, req.method) res = method(req) except DiskFileCollision: res = HTTPForbidden(request=req) except HTTPException as error_response: res = error_response except (Exception, Timeout): self.logger.exception(_( 'ERROR __call__ error with %(method)s' ' %(path)s '), {'method': req.method, 'path': req.path}) res = HTTPInternalServerError(body=traceback.format_exc()) trans_time = time.time() - start_time if self.log_requests: log_line = get_log_line(req, res, trans_time, '') if req.method in ('REPLICATE', 'REPLICATION') or \ 'X-Backend-Replication' in req.headers: self.logger.debug(log_line) else: self.logger.info(log_line) if req.method in ('PUT', 'DELETE'): slow = self.slow - trans_time if slow > 0: sleep(slow) # To be able to zero-copy send the object, we need a few things. # First, we have to be responding successfully to a GET, or else we're # not sending the object. Second, we have to be able to extract the # socket file descriptor from the WSGI input object. Third, the # diskfile has to support zero-copy send. # # There's a good chance that this could work for 206 responses too, # but the common case is sending the whole object, so we'll start # there. if req.method == 'GET' and res.status_int == 200 and \ isinstance(env['wsgi.input'], wsgi.Input): app_iter = getattr(res, 'app_iter', None) checker = getattr(app_iter, 'can_zero_copy_send', None) if checker and checker(): # For any kind of zero-copy thing like sendfile or splice, we # need the file descriptor. Eventlet doesn't provide a clean # way of getting that, so we resort to this. wsock = env['wsgi.input'].get_socket() wsockfd = wsock.fileno() # Don't call zero_copy_send() until after we force the HTTP # headers out of Eventlet and into the socket. def zero_copy_iter(): # If possible, set TCP_CORK so that headers don't # immediately go on the wire, but instead, wait for some # response body to make the TCP frames as large as # possible (and hence as few packets as possible). # # On non-Linux systems, we might consider TCP_NODELAY, but # since the only known zero-copy-capable diskfile uses # Linux-specific syscalls, we'll defer that work until # someone needs it. if hasattr(socket, 'TCP_CORK'): wsock.setsockopt(socket.IPPROTO_TCP, socket.TCP_CORK, 1) yield EventletPlungerString() try: app_iter.zero_copy_send(wsockfd) except Exception: self.logger.exception("zero_copy_send() blew up") raise yield '' # Get headers ready to go out res(env, start_response) return zero_copy_iter() else: return res(env, start_response) else: return res(env, start_response) def global_conf_callback(preloaded_app_conf, global_conf): """ Callback for swift.common.wsgi.run_wsgi during the global_conf creation so that we can add our replication_semaphore, used to limit the number of concurrent REPLICATION_REQUESTS across all workers. :param preloaded_app_conf: The preloaded conf for the WSGI app. This conf instance will go away, so just read from it, don't write. :param global_conf: The global conf that will eventually be passed to the app_factory function later. This conf is created before the worker subprocesses are forked, so can be useful to set up semaphores, shared memory, etc. """ replication_concurrency = int( preloaded_app_conf.get('replication_concurrency') or 4) if replication_concurrency: # Have to put the value in a list so it can get past paste global_conf['replication_semaphore'] = [ multiprocessing.BoundedSemaphore(replication_concurrency)] def app_factory(global_conf, **local_conf): """paste.deploy app factory for creating WSGI object server apps""" conf = global_conf.copy() conf.update(local_conf) return ObjectController(conf)
import multiprocessing import time import traceback
Html5Fill.js
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var Html5Fill = { name: 'html5', theme: 'fill', icon: { tag: 'svg', attrs: { viewBox: '64 64 896 896', focusable: false }, children: [ { tag: 'path', attrs: { d: 'M145.2 96l66 746.6L512 928l299.6-85.4L878.9 96H145.2zm595 177.1l-4.8 47.2-1.7 19.5H382.3l8.2 94.2h335.1l-3.3 24.3-21.2 242.2-1.7 16.2-187 51.6v.3h-1.2l-.3.1v-.1h-.1l-188.6-52L310.8 572h91.1l6.5 73.2 102.4 27.7h.4l102-27.6 11.4-118.6H510.9v-.1H306l-22.8-253.5-1.7-24.3h460.3l-1.6 24.3z' } } ] }
}; exports.default = Html5Fill;
4K2Taiko.py
from os.path import splitext import sys from msvcrt import getch import traceback class
(Exception): def __init__(self, msg): super().__init__(msg) print('4K2Taiko v1.0.1') print('by Jakads\n') # need exactly one .osu file dragged in if len(sys.argv) != 2: print('drag single file into this program.') getch() sys.exit() print('1. ddkk') print('2. kddk') print('3. kkdd') print('4. dkkd') # repeat until 1~4 is pressed while True: try: play_type = int(getch()) if 1 <= play_type <= 4: break except: pass # set IS_KAT for each column if play_type == 1: IS_KAT = [False, False, True, True] elif play_type == 2: IS_KAT = [True, False, False, True] elif play_type == 3: IS_KAT = [True, True, False, False] else: IS_KAT = [False, True, True, False] # file_path = folder\file_name.osu file_path = sys.argv[1] # new_file_path = folder\file_name_taiko[play_type].osu new_file_path = splitext(file_path)[0] + '_taiko' + str(play_type) + '.osu' try: # open file and read all the lines (including \n) with open(file_path, encoding='utf-8') as osu: content = osu.readlines() # metadata_list = all the lines before [TimingPoints] # timing_list = timing points ([TimingPoints] ~ [HitObjects]) # object_list = objects ([HitObjects] ~ EOF) timing_index = content.index('[TimingPoints]\n') object_index = content.index('[HitObjects]\n') metadata_list = content[:timing_index+1] timing_list = content[timing_index+1:object_index] object_list = content[object_index+1:] # save the new converted file content to new_osu_list new_osu_list = [] for line in metadata_list: # change the game mode to taiko if line.startswith('Mode:'): new_line = 'Mode: 1\n' # add (taiko convert_[play_type]) to the difficulty name elif line.startswith('Version:'): original_version = line[8:-1] new_version = f'{original_version} (taiko convert_{play_type})' new_line = f'Version:{new_version}\n' # set BeatmapID to 0 so it's as if a diff is added to the mapset elif line.startswith('BeatmapID:'): new_line = 'BeatmapID:0\n' else: new_line = line new_osu_list.append(new_line) keys = 4 # colrange = [0, 128, 256, 384, 512] colrange = [512*column/keys for column in range(keys+1)] # bpm_dict = {offset: bpm} bpm_dict = {} # note_dict = {offset: dk} note_dict = {} # save bpm to bpm_dict for timing in timing_list: timing_element = timing.split(',') # if it's a valid timing point: # time,beatLength,meter,sampleSet,sampleIndex,volume,uninherited,effects if len(timing_element) == 8: Uninherited = int(timing_element[6]) if Uninherited: offset = float(timing_element[0]) # beatLength = ms per beat # beats/min = (60s/min) * (1000s/ms) / (ms/beat) bpm = 60000 / float(timing_element[1]) bpm_dict[offset] = bpm new_osu_list.append(timing) # save note to note_dict for note in object_list: note_element = note.split(',') # hit object = x,y,time,type,hitSound,objectParams,hitSample # type is written in binary # 1 = hitcircle, 2 = slider, 4 = newcombo, 8 = spinner, # 16~64 = related to combo colors, 128 = mania LN # for circles: no objectParams needed # for sliders: objectParams = curveType|curvePoints,slides,length,edgeSounds,edgeSets # for spinners & LNs: objectParams = endTime x = int(note_element[0]) offset = int(note_element[2]) # LN type = 128 or 128+4 LN = True if note_element[3] == '128' or note_element[3] == '132' else False if LN: offset_end = int(note_element[5].split(':')[0]) # if x is 0~128: it's col 1, and so on for i in range(keys): if colrange[i] <= x <= colrange[i+1]: column = i # add key if the offset key is not present in note_dict if offset not in note_dict: note_dict[offset] = [] # if regular note: append [False, if the col is assigned as kat] if not LN: note_dict[offset].append([LN, IS_KAT[column]]) # if LN: append [True, end ms of the LN] else: note_dict[offset].append([LN, offset_end]) new_osu_list.append('\n[HitObjects]\n') # set default value to impossibly small one ln_end_offset = -1 for offset, notes in note_dict.items(): don_count, kat_count, ln_count = 0, 0, 0 for note in notes: # whether it's LN or not LN = note[0] if not LN: # if the col is assigned as kat if note[1]: kat_count += 1 else: don_count += 1 else: ln_count += 1 offset_end = note[1] count = ln_count * 100 + kat_count * 10 + don_count Slider = False LNCheck = False if count == 1: # don extra = '1,0' elif count == 2: # DON extra = '1,4' elif count == 10: # kat extra = '1,8' elif count == 20: # KAT extra = '1,12' elif count == 100: # slider extra = '2,0,' Slider = True elif count == 200: # SLIDER extra = '2,4,' Slider = True LNCheck = True elif count == 400: # spinner extra = '8,0,' + str(offset_end) LNCheck = True else: # invalid raise InvalidPatternException(f'invalid pattern at {offset}ms') # if a note or LN overlap with a LN if offset <= ln_end_offset: raise InvalidPatternException(f'invalid pattern at {offset}ms') if LNCheck: for note in notes: # if LNs at current ms doesn't end at same ms if note[1] != offset_end: raise InvalidPatternException(f'invalid pattern at {offset_end}ms') # if Slider is created if Slider: # find current bpm for bpm_offset in bpm_dict.keys(): if offset >= bpm_offset: bpm_offset_now = bpm_offset else: break bpm = bpm_dict[bpm_offset] length = offset_end - offset # round the beat to 1/16, 1/12 snaps beat = round(length / (60000 / bpm) * 48) / 48 # calculated out of trial and error slider_length = beat * 130 # linear slider from (256,192) to (257,192) extra += 'L|257:192,1,' + str(slider_length) ln_end_offset = offset_end new_osu_list.append(f'256,192,{offset},{extra}\n') # write new osu file according to new_osu_list with open(new_file_path,mode='w',encoding='utf-8') as osu: for line in new_osu_list: osu.write(line) print('done.') getch() sys.exit() except: # if any exceptions occur, print the traceback print(traceback.format_exc()) getch() sys.exit()
InvalidPatternException
0014_auto_20180727_1635.py
# Generated by Django 2.0.5 on 2018-07-27 11:05 import django.core.validators from django.db import migrations, models
dependencies = [ ('hospital', '0013_hospital_verified'), ] operations = [ migrations.AlterField( model_name='hospital', name='phone_number', field=models.CharField(blank=True, help_text='Please enter valid email, it will be used for verification.', max_length=17, validators=[django.core.validators.RegexValidator(message="Phone number must be entered in the format:\\ '+919999999999'.", regex='^\\+?1?\\d{9,15}$')]), ), ]
class Migration(migrations.Migration):
cast.rs
// Helpers for handling cast expressions, used in both // typeck and codegen. use crate::ty::{self, Ty}; use rustc_ast as ast; use rustc_macros::HashStable; /// Types that are represented as ints. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum
{ U(ast::UintTy), I, CEnum, Bool, Char, } // Valid types for the result of a non-coercion cast #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum CastTy<'tcx> { /// Various types that are represented as ints and handled mostly /// in the same way, merged for easier matching. Int(IntTy), /// Floating-Point types Float, /// Function Pointers FnPtr, /// Raw pointers Ptr(ty::TypeAndMut<'tcx>), } /// Cast Kind. See RFC 401 (or librustc_typeck/check/cast.rs) #[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)] pub enum CastKind { CoercionCast, PtrPtrCast, PtrAddrCast, AddrPtrCast, NumericCast, EnumCast, PrimIntCast, U8CharCast, ArrayPtrCast, FnPtrPtrCast, FnPtrAddrCast, } impl<'tcx> CastTy<'tcx> { /// Returns `Some` for integral/pointer casts. /// casts like unsizing casts will return `None` pub fn from_ty(t: Ty<'tcx>) -> Option<CastTy<'tcx>> { match t.kind { ty::Bool => Some(CastTy::Int(IntTy::Bool)), ty::Char => Some(CastTy::Int(IntTy::Char)), ty::Int(_) => Some(CastTy::Int(IntTy::I)), ty::Infer(ty::InferTy::IntVar(_)) => Some(CastTy::Int(IntTy::I)), ty::Infer(ty::InferTy::FloatVar(_)) => Some(CastTy::Float), ty::Uint(u) => Some(CastTy::Int(IntTy::U(u))), ty::Float(_) => Some(CastTy::Float), ty::Adt(d, _) if d.is_enum() && d.is_payloadfree() => Some(CastTy::Int(IntTy::CEnum)), ty::RawPtr(mt) => Some(CastTy::Ptr(mt)), ty::FnPtr(..) => Some(CastTy::FnPtr), _ => None, } } }
IntTy
Promises.js
/** * Copyright (c) 2015-present, Facebook, Inc. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ import React, { Component } from 'react'; import PropTypes from 'prop-types'; function load() { return Promise.resolve([ { id: 1, name: '1' }, { id: 2, name: '2' }, { id: 3, name: '3' }, { id: 4, name: '4' }, ]); } export default class extends Component { static propTypes = { onReady: PropTypes.func.isRequired, }; constructor(props) { super(props); this.state = { users: [] }; } componentDidMount() { load().then(users => { this.setState({ users }); }); }
this.props.onReady(); } render() { return ( <div id="feature-promises"> {this.state.users.map(user => ( <div key={user.id}>{user.name}</div> ))} </div> ); } }
componentDidUpdate() {
views.py
from django.shortcuts import redirect, render from django.views.generic.base import TemplateView from django.views.generic.edit import CreateView from django.urls import reverse_lazy from django.contrib.auth import login from django.contrib import messages from django.db import transaction from .forms import CustomUserCreateForm from . import models as mainModels from . import forms as mainForms from . import utils from judge import tasks, problem # Create your views here. class SignupView(CreateView): template_name = "registration/signup.html" form_class = CustomUserCreateForm success_url = reverse_lazy("mainApp:index") def get_form_kwargs(self): kwargs = super().get_form_kwargs() kwargs["cap-token"] = self.request.POST.get("g-recaptcha-response", "") return kwargs def form_valid(self, form): self.object = form.save() login(self.request, self.object) messages.info(self.request, self.object.username + "님, 회원가입을 환영합니다.") return redirect(self.get_success_url()) class IndexView(TemplateView): template_name = "mainApp/index.html" class ProblemListView(TemplateView): template_name = "mainApp/problem-list.html" def dispatch(self, request, *args, **kwargs): problem_per_page = 10 # 한 페이지에 보여줄 문제수 cache = mainModels.ProblemPost.objects.filter(show=True) kwargs["problem_total_count"] = cache.count() # 문제 총 개수 kwargs["last_page"] = kwargs["problem_total_count"] // problem_per_page + 1 # 마지막 페이지 번호 if kwargs["problem_total_count"] % problem_per_page == 0: kwargs["last_page"] -= 1 # 현재 페이지가 유효범위 안에 있어야 함 or 문제가 하나도 없으면 OK if not (1 <= kwargs["current_page"] <= kwargs["last_page"]) \ and not (kwargs["current_page"] == 1 and kwargs["last_page"] == 0): messages.info(request, "문제가 존재하지 않습니다.") return redirect("mainApp:index") kwargs["pages"] = range(1, kwargs["last_page"] + 1) show_start_range = (kwargs["current_page"] - 1) * problem_per_page show_end_range = show_start_range + problem_per_page kwargs["problems"] = cache.order_by("pk")[show_start_range:show_end_range] # 현재 페이지에 보여줄 문제 목록 return super().dispatch(request, *args, **kwargs) class ProblemView(TemplateView): template_name = "mainApp/problem.html" def dispatch(self, request, *args, **kwargs): # 현재 문제가 존재해야 됨 result = mainModels.ProblemPost.objects.filter(pk=kwargs["pk"], show=True) if not result.exists(): messages.info(request, "문제가 존재하지 않습니다.") return redirect("mainApp:index") kwargs["problem"] = result[0] kwargs["full_absolute_url"] = request.build_absolute_uri(result[0].get_absolute_url()) return super().dispatch(request, *args, **kwargs) class ProblemSubmitView(CreateView): template_name = "mainApp/problem-submit.html" form_class = mainForms.SolvePostForm def dispatch(self, request, *args, **kwargs): try: self.kwargs["problem"] = mainModels.ProblemPost.objects.filter(pk=self.kwargs["problem_pk"], show=True).first() if not request.user.is_authenticated: messages.info(request, "로그인을 해주세요.") return redirect(self.kwargs["problem"].get_absolute_url()) except: return redirect("mainApp:problems", current_page=1) return super().dispatch(request, *args, **kwargs) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["problem"] = self.kwargs["problem"] return context def get_success_url(self): return self.kwargs["problem"].get_absolute_status_url() def get_form_kwargs(self): kwargs = super().get_form_kwargs() kwargs["user"] = self.request.user kwargs["ip"] = utils.get_real_ip(self.request) kwargs["problem"] = self.kwargs["problem"] return kwargs def form_valid(self, form): self.object = form.save(commit=False) self.object.user_pk = self.request.user self.object.problem_pk = self.kwargs["problem"] self.object.ip = utils.get_real_ip(self.request) self.object.save() tasks.activate_judge() # 채점 시스템 가동 return redirect(self.get_success_url(), pk=self.kwargs["problem_pk"]) class ProblemStatusView(TemplateView): template_name = "mainApp/problem-status.html" def get(self, request, *args, **kwargs): submit_per_page = 10 # 한 페이지에 보여줄 제출수 single_mode = False if "problem_pk" in kwargs: # problem_pk에 해당하는 문제가 존재하
self.object.creator = self.request.user self.object.save() problem.save_testcase(self.object.pk, form.cleaned_data["input_file"], form.cleaned_data["output_file"]) except: messages.warning(self.request, "문제 생성에 실패했습니다.") return render(self.request, self.template_name, {"form": form}) messages.info(self.request, "문제가 생성되었습니다.") return redirect("mainApp:problem", pk=self.object.pk)
면 그에 맞는 채점 현황만 로드 result = mainModels.ProblemPost.objects.filter(pk=kwargs["problem_pk"], show=True) if result.exists(): submits = mainModels.SolvePost.objects.filter(problem_pk=result.first(), show=True).order_by("-pk") kwargs["heading"] = str(kwargs["problem_pk"]) + "번 문제 채점 현황" single_mode = True if not single_mode: # 그런 문제가 없으면 전체 채점 현황을 로드 submits = mainModels.SolvePost.objects.select_related("problem_pk").filter(show=True, problem_pk__show=True).order_by("-pk") kwargs["heading"] = "전체 채점 현황" kwargs["single_mode"] = single_mode kwargs["total_count"] = submits.count() # 제출 총 개수 kwargs["last_page"] = kwargs["total_count"] // submit_per_page + 1 # 마지막 페이지 번호 if kwargs["total_count"] % submit_per_page == 0: kwargs["last_page"] -= 1 # 현재 페이지가 유효범위 안에 있어야 함 or 제출 현황이 하나도 없으면 OK if not (1 <= kwargs["current_page"] <= kwargs["last_page"]) \ and not (kwargs["current_page"] == 1 and kwargs["last_page"] == 0): return redirect("mainApp:index") kwargs["pages"] = range(1, kwargs["last_page"] + 1) show_start_range = (kwargs["current_page"] - 1) * submit_per_page show_end_range = show_start_range + submit_per_page kwargs["submits"] = submits[show_start_range:show_end_range] return super().get(request, *args, **kwargs) class ProblemMakeView(CreateView): template_name = "mainApp/problem-make.html" form_class = mainForms.CreateProblemForm def get_form_kwargs(self): kwargs = super().get_form_kwargs() kwargs["cap-token"] = self.request.POST.get("g-recaptcha-response", "") return kwargs def form_valid(self, form): try: with transaction.atomic(): self.object = form.save()
main.rs
#![cfg(target_arch = "wasm32")] use wasm_bindgen_test::*; wasm_bindgen_test_configure!(run_in_browser); pub mod anchor_element; pub mod blob; pub mod body_element; pub mod br_element; pub mod button_element; pub mod console;
pub mod event; pub mod head_element; pub mod headers; pub mod heading_element; pub mod history; pub mod hr_element; pub mod html_element; pub mod html_html_element; pub mod image_data; pub mod input_element; //TODO: Both menu-related tests completely break in Chrome, but run fine in Firefox. //pub mod menu_element; //pub mod menu_item_element; pub mod dom_point; pub mod indexeddb; pub mod location; pub mod meta_element; pub mod meter_element; pub mod mod_elements; pub mod olist_element; pub mod optgroup_element; pub mod option_element; pub mod options_collection; pub mod output_element; pub mod paragraph_element; pub mod param_element; pub mod performance; pub mod pre_element; pub mod progress_element; pub mod quote_element; pub mod response; pub mod rtc_rtp_transceiver_direction; pub mod script_element; pub mod select_element; pub mod slot_element; pub mod span_element; pub mod style_element; pub mod table_element; pub mod title_element; pub mod whitelisted_immutable_slices; pub mod xpath_result; #[wasm_bindgen_test] fn deref_works() { fn _check(a: &web_sys::XmlHttpRequestUpload) { let _x: &web_sys::XmlHttpRequestEventTarget = a; let _x: &web_sys::EventTarget = a; let _x: &js_sys::Object = a; let _x: &wasm_bindgen::JsValue = a; } }
pub mod div_element; pub mod element;
label.rs
//! ## Label //! //! label component /** * MIT License * * tui-realm - Copyright (C) 2021 Christian Visintin * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ use super::Msg; use tuirealm::command::{Cmd, CmdResult}; use tuirealm::props::{Alignment, Color, Style, TextModifiers}; use tuirealm::tui::{layout::Rect, widgets::Paragraph}; use tuirealm::{ AttrValue, Attribute, Component, Event, Frame, MockComponent, NoUserEvent, Props, State, }; /// ## Label /// /// Simple label component; just renders a text /// NOTE: since I need just one label, I'm not going to use different object; I will directly implement Component for Label. /// This is not ideal actually and in a real app you should differentiate Mock Components from Application Components. pub struct Label { props: Props, } impl Default for Label { fn default() -> Self { Self { props: Props::default(), } } } impl Label { pub fn text<S>(mut self, s: S) -> Self where S: AsRef<str>, { self.attr(Attribute::Text, AttrValue::String(s.as_ref().to_string())); self } pub fn alignment(mut self, a: Alignment) -> Self { self.attr(Attribute::TextAlign, AttrValue::Alignment(a)); self } pub fn foreground(mut self, c: Color) -> Self { self.attr(Attribute::Foreground, AttrValue::Color(c)); self } pub fn background(mut self, c: Color) -> Self { self.attr(Attribute::Background, AttrValue::Color(c)); self } pub fn modifiers(mut self, m: TextModifiers) -> Self { self.attr(Attribute::TextProps, AttrValue::TextModifiers(m)); self } } impl MockComponent for Label { fn view(&mut self, frame: &mut Frame, area: Rect) { // Check if visible if self.props.get_or(Attribute::Display, AttrValue::Flag(true)) == AttrValue::Flag(true)
} fn query(&self, attr: Attribute) -> Option<AttrValue> { self.props.get(attr) } fn attr(&mut self, attr: Attribute, value: AttrValue) { self.props.set(attr, value); } fn state(&self) -> State { State::None } fn perform(&mut self, _: Cmd) -> CmdResult { CmdResult::None } } impl Component<Msg, NoUserEvent> for Label { fn on(&mut self, _: Event<NoUserEvent>) -> Option<Msg> { // Does nothing None } }
{ // Get properties let text = self .props .get_or(Attribute::Text, AttrValue::String(String::default())) .unwrap_string(); let alignment = self .props .get_or(Attribute::TextAlign, AttrValue::Alignment(Alignment::Left)) .unwrap_alignment(); let foreground = self .props .get_or(Attribute::Foreground, AttrValue::Color(Color::Reset)) .unwrap_color(); let background = self .props .get_or(Attribute::Background, AttrValue::Color(Color::Reset)) .unwrap_color(); let modifiers = self .props .get_or( Attribute::TextProps, AttrValue::TextModifiers(TextModifiers::empty()), ) .unwrap_text_modifiers(); frame.render_widget( Paragraph::new(text) .style( Style::default() .fg(foreground) .bg(background) .add_modifier(modifiers), ) .alignment(alignment), area, ); }
SmartTable.controller.js
sap.ui.define([ 'sap/ui/core/mvc/Controller', 'sap/ui/model/odata/v2/ODataModel', 'sap/ui/core/util/MockServer' ], function (Controller, ODataModel, MockServer) { "use strict"; return Controller.extend("sap.ui.comp.sample.smarttable.mtableHighlight.SmartTable", { onInit: function () { var oModel, oView; var oMockServer = new MockServer({ rootUri: "sapuicompsmarttablehlighlight/" }); this._oMockServer = oMockServer; oMockServer.simulate("test-resources/sap/ui/comp/demokit/sample/smarttable/mockserver/metadata.xml", "test-resources/sap/ui/comp/demokit/sample/smarttable/mockserver/"); oMockServer.start(); oModel = new ODataModel("sapuicompsmarttablehlighlight", { defaultCountMode: "Inline" }); oView = this.getView(); oView.setModel(oModel); }, formatRowHighlight: function (oValue) {
return "Warning"; } else if (oValue < 5) { return "None"; } return "Success"; }, onBeforeExport: function (oEvt) { var mExcelSettings = oEvt.getParameter("exportSettings"); // Disable Worker as Mockserver is used in Demokit sample mExcelSettings.worker = false; }, onExit: function () { this._oMockServer.stop(); } }); });
// Your logic for rowHighlight goes here if (oValue < 2) { return "Error"; } else if (oValue < 3) {
561. Array Partition I.js
/** * Given an array of 2n integers, your task is to group these integers into n pairs of integer, * say (a1, b1), (a2, b2), ..., (an, bn) which makes sum of min(ai, bi) for all i from 1 to n as large as possible. * * Example 1: * Input: [1,4,3,2] * * Output: 4 * Explanation: n is 2, and the maximum sum of pairs is 4. * Note: * n is a positive integer, which is in the range of [1, 10000]. * All the integers in the array will be in the range of [-10000, 10000]. * * 证明:http://www.voidcn.com/blog/huanghanqian/article/p-6572441.html * ①.假设在每对(ai,bi)中,bi>=ai(顺序不影响min,所以调换顺序没有关系) * ②.令 Sm = min(a1, b1) + min(a2, b2) + ... + min(an, bn) ,那么该题所要求的就是最大的Sm。由①可得,Sm = a1 + a2 + ... + an. * ③.令Sa = a1 + b1 + a2 + b2 + ... + an + bn. Sa是一个常量。 * ④.令di = |ai - bi|. 由①可得,di = bi - ai. 令Sd = d1 + d2 + ... + dn. * ⑤.所以Sa = a1 + a1 + d1 + a2 + a2 + d2 + ... + an + an + di = 2Sm + Sd => Sm = (Sa - Sd) / 2.
* 为了得到最大的Sm,考虑到Sa是一个常量,所以我们期望Sd最小。 * ⑥所以这个问题转化为另一个问题:找到一个数组中的pairs (ai,bi),使得|ai-bi|(ai和bi之间的距离)之和最小。 */ /** * @param {number[]} nums * @return {number} */ var arrayPairSum = function(nums) { nums.sort(function (a, b) { return a - b; }); var len = nums.length; var sum = 0; for(var i=0; i<len; i+=2) { sum += nums[i]; } return sum; };
intent.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: google/cloud/dialogflow/v2/intent.proto package dialogflow import ( context "context" fmt "fmt" math "math" proto "github.com/golang/protobuf/proto" _ "github.com/golang/protobuf/ptypes/duration" empty "github.com/golang/protobuf/ptypes/empty" _struct "github.com/golang/protobuf/ptypes/struct" _ "google.golang.org/genproto/googleapis/api/annotations" longrunning "google.golang.org/genproto/googleapis/longrunning" field_mask "google.golang.org/genproto/protobuf/field_mask" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package // Represents the options for views of an intent. // An intent can be a sizable object. Therefore, we provide a resource view that // does not return training phrases in the response by default. type IntentView int32 const ( // Training phrases field is not populated in the response. IntentView_INTENT_VIEW_UNSPECIFIED IntentView = 0 // All fields are populated. IntentView_INTENT_VIEW_FULL IntentView = 1 ) var IntentView_name = map[int32]string{ 0: "INTENT_VIEW_UNSPECIFIED", 1: "INTENT_VIEW_FULL", } var IntentView_value = map[string]int32{ "INTENT_VIEW_UNSPECIFIED": 0, "INTENT_VIEW_FULL": 1, } func (x IntentView) String() string { return proto.EnumName(IntentView_name, int32(x)) } func (IntentView) EnumDescriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0} } // Represents the different states that webhooks can be in. type Intent_WebhookState int32 const ( // Webhook is disabled in the agent and in the intent. Intent_WEBHOOK_STATE_UNSPECIFIED Intent_WebhookState = 0 // Webhook is enabled in the agent and in the intent. Intent_WEBHOOK_STATE_ENABLED Intent_WebhookState = 1 // Webhook is enabled in the agent and in the intent. Also, each slot // filling prompt is forwarded to the webhook. Intent_WEBHOOK_STATE_ENABLED_FOR_SLOT_FILLING Intent_WebhookState = 2 ) var Intent_WebhookState_name = map[int32]string{ 0: "WEBHOOK_STATE_UNSPECIFIED", 1: "WEBHOOK_STATE_ENABLED", 2: "WEBHOOK_STATE_ENABLED_FOR_SLOT_FILLING", } var Intent_WebhookState_value = map[string]int32{ "WEBHOOK_STATE_UNSPECIFIED": 0, "WEBHOOK_STATE_ENABLED": 1, "WEBHOOK_STATE_ENABLED_FOR_SLOT_FILLING": 2, } func (x Intent_WebhookState) String() string { return proto.EnumName(Intent_WebhookState_name, int32(x)) } func (Intent_WebhookState) EnumDescriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 0} } // Represents different types of training phrases. type Intent_TrainingPhrase_Type int32 const ( // Not specified. This value should never be used. Intent_TrainingPhrase_TYPE_UNSPECIFIED Intent_TrainingPhrase_Type = 0 // Examples do not contain @-prefixed entity type names, but example parts // can be annotated with entity types. Intent_TrainingPhrase_EXAMPLE Intent_TrainingPhrase_Type = 1 // Templates are not annotated with entity types, but they can contain // @-prefixed entity type names as substrings. // Template mode has been deprecated. Example mode is the only supported // way to create new training phrases. If you have existing training // phrases that you've created in template mode, those will continue to // work. Intent_TrainingPhrase_TEMPLATE Intent_TrainingPhrase_Type = 2 // Deprecated: Do not use. ) var Intent_TrainingPhrase_Type_name = map[int32]string{ 0: "TYPE_UNSPECIFIED", 1: "EXAMPLE", 2: "TEMPLATE", } var Intent_TrainingPhrase_Type_value = map[string]int32{ "TYPE_UNSPECIFIED": 0, "EXAMPLE": 1, "TEMPLATE": 2, } func (x Intent_TrainingPhrase_Type) String() string { return proto.EnumName(Intent_TrainingPhrase_Type_name, int32(x)) } func (Intent_TrainingPhrase_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 0, 0} } // Represents different platforms that a rich message can be intended for. type Intent_Message_Platform int32 const ( // Not specified. Intent_Message_PLATFORM_UNSPECIFIED Intent_Message_Platform = 0 // Facebook. Intent_Message_FACEBOOK Intent_Message_Platform = 1 // Slack. Intent_Message_SLACK Intent_Message_Platform = 2 // Telegram. Intent_Message_TELEGRAM Intent_Message_Platform = 3 // Kik. Intent_Message_KIK Intent_Message_Platform = 4 // Skype. Intent_Message_SKYPE Intent_Message_Platform = 5 // Line. Intent_Message_LINE Intent_Message_Platform = 6 // Viber. Intent_Message_VIBER Intent_Message_Platform = 7 // Actions on Google. // When using Actions on Google, you can choose one of the specific // Intent.Message types that mention support for Actions on Google, // or you can use the advanced Intent.Message.payload field. // The payload field provides access to AoG features not available in the // specific message types. // If using the Intent.Message.payload field, it should have a structure // similar to the JSON message shown here. For more information, see // [Actions on Google Webhook // Format](https://developers.google.com/actions/dialogflow/webhook) // <pre>{ // "expectUserResponse": true, // "isSsml": false, // "noInputPrompts": [], // "richResponse": { // "items": [ // { // "simpleResponse": { // "displayText": "hi", // "textToSpeech": "hello" // } // } // ], // "suggestions": [ // { // "title": "Say this" // }, // {
// ] // }, // "systemIntent": { // "data": { // "@type": "type.googleapis.com/google.actions.v2.OptionValueSpec", // "listSelect": { // "items": [ // { // "optionInfo": { // "key": "key1", // "synonyms": [ // "key one" // ] // }, // "title": "must not be empty, but unique" // }, // { // "optionInfo": { // "key": "key2", // "synonyms": [ // "key two" // ] // }, // "title": "must not be empty, but unique" // } // ] // } // }, // "intent": "actions.intent.OPTION" // } // }</pre> Intent_Message_ACTIONS_ON_GOOGLE Intent_Message_Platform = 8 // Google Hangouts. Intent_Message_GOOGLE_HANGOUTS Intent_Message_Platform = 11 ) var Intent_Message_Platform_name = map[int32]string{ 0: "PLATFORM_UNSPECIFIED", 1: "FACEBOOK", 2: "SLACK", 3: "TELEGRAM", 4: "KIK", 5: "SKYPE", 6: "LINE", 7: "VIBER", 8: "ACTIONS_ON_GOOGLE", 11: "GOOGLE_HANGOUTS", } var Intent_Message_Platform_value = map[string]int32{ "PLATFORM_UNSPECIFIED": 0, "FACEBOOK": 1, "SLACK": 2, "TELEGRAM": 3, "KIK": 4, "SKYPE": 5, "LINE": 6, "VIBER": 7, "ACTIONS_ON_GOOGLE": 8, "GOOGLE_HANGOUTS": 11, } func (x Intent_Message_Platform) String() string { return proto.EnumName(Intent_Message_Platform_name, int32(x)) } func (Intent_Message_Platform) EnumDescriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 0} } // Format of response media type. type Intent_Message_MediaContent_ResponseMediaType int32 const ( // Unspecified. Intent_Message_MediaContent_RESPONSE_MEDIA_TYPE_UNSPECIFIED Intent_Message_MediaContent_ResponseMediaType = 0 // Response media type is audio. Intent_Message_MediaContent_AUDIO Intent_Message_MediaContent_ResponseMediaType = 1 ) var Intent_Message_MediaContent_ResponseMediaType_name = map[int32]string{ 0: "RESPONSE_MEDIA_TYPE_UNSPECIFIED", 1: "AUDIO", } var Intent_Message_MediaContent_ResponseMediaType_value = map[string]int32{ "RESPONSE_MEDIA_TYPE_UNSPECIFIED": 0, "AUDIO": 1, } func (x Intent_Message_MediaContent_ResponseMediaType) String() string { return proto.EnumName(Intent_Message_MediaContent_ResponseMediaType_name, int32(x)) } func (Intent_Message_MediaContent_ResponseMediaType) EnumDescriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 13, 0} } // Image display options for Actions on Google. This should be used for // when the image's aspect ratio does not match the image container's // aspect ratio. type Intent_Message_BrowseCarouselCard_ImageDisplayOptions int32 const ( // Fill the gaps between the image and the image container with gray // bars. Intent_Message_BrowseCarouselCard_IMAGE_DISPLAY_OPTIONS_UNSPECIFIED Intent_Message_BrowseCarouselCard_ImageDisplayOptions = 0 // Fill the gaps between the image and the image container with gray // bars. Intent_Message_BrowseCarouselCard_GRAY Intent_Message_BrowseCarouselCard_ImageDisplayOptions = 1 // Fill the gaps between the image and the image container with white // bars. Intent_Message_BrowseCarouselCard_WHITE Intent_Message_BrowseCarouselCard_ImageDisplayOptions = 2 // Image is scaled such that the image width and height match or exceed // the container dimensions. This may crop the top and bottom of the // image if the scaled image height is greater than the container // height, or crop the left and right of the image if the scaled image // width is greater than the container width. This is similar to "Zoom // Mode" on a widescreen TV when playing a 4:3 video. Intent_Message_BrowseCarouselCard_CROPPED Intent_Message_BrowseCarouselCard_ImageDisplayOptions = 3 // Pad the gaps between image and image frame with a blurred copy of the // same image. Intent_Message_BrowseCarouselCard_BLURRED_BACKGROUND Intent_Message_BrowseCarouselCard_ImageDisplayOptions = 4 ) var Intent_Message_BrowseCarouselCard_ImageDisplayOptions_name = map[int32]string{ 0: "IMAGE_DISPLAY_OPTIONS_UNSPECIFIED", 1: "GRAY", 2: "WHITE", 3: "CROPPED", 4: "BLURRED_BACKGROUND", } var Intent_Message_BrowseCarouselCard_ImageDisplayOptions_value = map[string]int32{ "IMAGE_DISPLAY_OPTIONS_UNSPECIFIED": 0, "GRAY": 1, "WHITE": 2, "CROPPED": 3, "BLURRED_BACKGROUND": 4, } func (x Intent_Message_BrowseCarouselCard_ImageDisplayOptions) String() string { return proto.EnumName(Intent_Message_BrowseCarouselCard_ImageDisplayOptions_name, int32(x)) } func (Intent_Message_BrowseCarouselCard_ImageDisplayOptions) EnumDescriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 14, 0} } // Type of the URI. type Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint int32 const ( // Unspecified Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_URL_TYPE_HINT_UNSPECIFIED Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint = 0 // Url would be an amp action Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_AMP_ACTION Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint = 1 // URL that points directly to AMP content, or to a canonical URL // which refers to AMP content via <link rel="amphtml">. Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_AMP_CONTENT Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint = 2 ) var Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint_name = map[int32]string{ 0: "URL_TYPE_HINT_UNSPECIFIED", 1: "AMP_ACTION", 2: "AMP_CONTENT", } var Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint_value = map[string]int32{ "URL_TYPE_HINT_UNSPECIFIED": 0, "AMP_ACTION": 1, "AMP_CONTENT": 2, } func (x Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint) String() string { return proto.EnumName(Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint_name, int32(x)) } func (Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint) EnumDescriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 14, 0, 0, 0} } // Text alignments within a cell. type Intent_Message_ColumnProperties_HorizontalAlignment int32 const ( // Text is aligned to the leading edge of the column. Intent_Message_ColumnProperties_HORIZONTAL_ALIGNMENT_UNSPECIFIED Intent_Message_ColumnProperties_HorizontalAlignment = 0 // Text is aligned to the leading edge of the column. Intent_Message_ColumnProperties_LEADING Intent_Message_ColumnProperties_HorizontalAlignment = 1 // Text is centered in the column. Intent_Message_ColumnProperties_CENTER Intent_Message_ColumnProperties_HorizontalAlignment = 2 // Text is aligned to the trailing edge of the column. Intent_Message_ColumnProperties_TRAILING Intent_Message_ColumnProperties_HorizontalAlignment = 3 ) var Intent_Message_ColumnProperties_HorizontalAlignment_name = map[int32]string{ 0: "HORIZONTAL_ALIGNMENT_UNSPECIFIED", 1: "LEADING", 2: "CENTER", 3: "TRAILING", } var Intent_Message_ColumnProperties_HorizontalAlignment_value = map[string]int32{ "HORIZONTAL_ALIGNMENT_UNSPECIFIED": 0, "LEADING": 1, "CENTER": 2, "TRAILING": 3, } func (x Intent_Message_ColumnProperties_HorizontalAlignment) String() string { return proto.EnumName(Intent_Message_ColumnProperties_HorizontalAlignment_name, int32(x)) } func (Intent_Message_ColumnProperties_HorizontalAlignment) EnumDescriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 16, 0} } // Represents an intent. // Intents convert a number of user expressions or patterns into an action. An // action is an extraction of a user command or sentence semantics. type Intent struct { // The unique identifier of this intent. // Required for [Intents.UpdateIntent][google.cloud.dialogflow.v2.Intents.UpdateIntent] and [Intents.BatchUpdateIntents][google.cloud.dialogflow.v2.Intents.BatchUpdateIntents] // methods. // Format: `projects/<Project ID>/agent/intents/<Intent ID>`. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Required. The name of this intent. DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` // Optional. Indicates whether webhooks are enabled for the intent. WebhookState Intent_WebhookState `protobuf:"varint,6,opt,name=webhook_state,json=webhookState,proto3,enum=google.cloud.dialogflow.v2.Intent_WebhookState" json:"webhook_state,omitempty"` // Optional. The priority of this intent. Higher numbers represent higher // priorities. // // - If the supplied value is unspecified or 0, the service // translates the value to 500,000, which corresponds to the // `Normal` priority in the console. // - If the supplied value is negative, the intent is ignored // in runtime detect intent requests. Priority int32 `protobuf:"varint,3,opt,name=priority,proto3" json:"priority,omitempty"` // Optional. Indicates whether this is a fallback intent. IsFallback bool `protobuf:"varint,4,opt,name=is_fallback,json=isFallback,proto3" json:"is_fallback,omitempty"` // Optional. Indicates whether Machine Learning is disabled for the intent. // Note: If `ml_disabled` setting is set to true, then this intent is not // taken into account during inference in `ML ONLY` match mode. Also, // auto-markup in the UI is turned off. MlDisabled bool `protobuf:"varint,19,opt,name=ml_disabled,json=mlDisabled,proto3" json:"ml_disabled,omitempty"` // Optional. The list of context names required for this intent to be // triggered. // Format: `projects/<Project ID>/agent/sessions/-/contexts/<Context ID>`. InputContextNames []string `protobuf:"bytes,7,rep,name=input_context_names,json=inputContextNames,proto3" json:"input_context_names,omitempty"` // Optional. The collection of event names that trigger the intent. // If the collection of input contexts is not empty, all of the contexts must // be present in the active user session for an event to trigger this intent. Events []string `protobuf:"bytes,8,rep,name=events,proto3" json:"events,omitempty"` // Optional. The collection of examples that the agent is // trained on. TrainingPhrases []*Intent_TrainingPhrase `protobuf:"bytes,9,rep,name=training_phrases,json=trainingPhrases,proto3" json:"training_phrases,omitempty"` // Optional. The name of the action associated with the intent. // Note: The action name must not contain whitespaces. Action string `protobuf:"bytes,10,opt,name=action,proto3" json:"action,omitempty"` // Optional. The collection of contexts that are activated when the intent // is matched. Context messages in this collection should not set the // parameters field. Setting the `lifespan_count` to 0 will reset the context // when the intent is matched. // Format: `projects/<Project ID>/agent/sessions/-/contexts/<Context ID>`. OutputContexts []*Context `protobuf:"bytes,11,rep,name=output_contexts,json=outputContexts,proto3" json:"output_contexts,omitempty"` // Optional. Indicates whether to delete all contexts in the current // session when this intent is matched. ResetContexts bool `protobuf:"varint,12,opt,name=reset_contexts,json=resetContexts,proto3" json:"reset_contexts,omitempty"` // Optional. The collection of parameters associated with the intent. Parameters []*Intent_Parameter `protobuf:"bytes,13,rep,name=parameters,proto3" json:"parameters,omitempty"` // Optional. The collection of rich messages corresponding to the // `Response` field in the Dialogflow console. Messages []*Intent_Message `protobuf:"bytes,14,rep,name=messages,proto3" json:"messages,omitempty"` // Optional. The list of platforms for which the first responses will be // copied from the messages in PLATFORM_UNSPECIFIED (i.e. default platform). DefaultResponsePlatforms []Intent_Message_Platform `protobuf:"varint,15,rep,packed,name=default_response_platforms,json=defaultResponsePlatforms,proto3,enum=google.cloud.dialogflow.v2.Intent_Message_Platform" json:"default_response_platforms,omitempty"` // Read-only. The unique identifier of the root intent in the chain of // followup intents. It identifies the correct followup intents chain for // this intent. We populate this field only in the output. // // Format: `projects/<Project ID>/agent/intents/<Intent ID>`. RootFollowupIntentName string `protobuf:"bytes,16,opt,name=root_followup_intent_name,json=rootFollowupIntentName,proto3" json:"root_followup_intent_name,omitempty"` // Read-only after creation. The unique identifier of the parent intent in the // chain of followup intents. You can set this field when creating an intent, // for example with [CreateIntent][google.cloud.dialogflow.v2.Intents.CreateIntent] or // [BatchUpdateIntents][google.cloud.dialogflow.v2.Intents.BatchUpdateIntents], in order to make this // intent a followup intent. // // It identifies the parent followup intent. // Format: `projects/<Project ID>/agent/intents/<Intent ID>`. ParentFollowupIntentName string `protobuf:"bytes,17,opt,name=parent_followup_intent_name,json=parentFollowupIntentName,proto3" json:"parent_followup_intent_name,omitempty"` // Read-only. Information about all followup intents that have this intent as // a direct or indirect parent. We populate this field only in the output. FollowupIntentInfo []*Intent_FollowupIntentInfo `protobuf:"bytes,18,rep,name=followup_intent_info,json=followupIntentInfo,proto3" json:"followup_intent_info,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent) Reset() { *m = Intent{} } func (m *Intent) String() string { return proto.CompactTextString(m) } func (*Intent) ProtoMessage() {} func (*Intent) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0} } func (m *Intent) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent.Unmarshal(m, b) } func (m *Intent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent.Marshal(b, m, deterministic) } func (m *Intent) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent.Merge(m, src) } func (m *Intent) XXX_Size() int { return xxx_messageInfo_Intent.Size(m) } func (m *Intent) XXX_DiscardUnknown() { xxx_messageInfo_Intent.DiscardUnknown(m) } var xxx_messageInfo_Intent proto.InternalMessageInfo func (m *Intent) GetName() string { if m != nil { return m.Name } return "" } func (m *Intent) GetDisplayName() string { if m != nil { return m.DisplayName } return "" } func (m *Intent) GetWebhookState() Intent_WebhookState { if m != nil { return m.WebhookState } return Intent_WEBHOOK_STATE_UNSPECIFIED } func (m *Intent) GetPriority() int32 { if m != nil { return m.Priority } return 0 } func (m *Intent) GetIsFallback() bool { if m != nil { return m.IsFallback } return false } func (m *Intent) GetMlDisabled() bool { if m != nil { return m.MlDisabled } return false } func (m *Intent) GetInputContextNames() []string { if m != nil { return m.InputContextNames } return nil } func (m *Intent) GetEvents() []string { if m != nil { return m.Events } return nil } func (m *Intent) GetTrainingPhrases() []*Intent_TrainingPhrase { if m != nil { return m.TrainingPhrases } return nil } func (m *Intent) GetAction() string { if m != nil { return m.Action } return "" } func (m *Intent) GetOutputContexts() []*Context { if m != nil { return m.OutputContexts } return nil } func (m *Intent) GetResetContexts() bool { if m != nil { return m.ResetContexts } return false } func (m *Intent) GetParameters() []*Intent_Parameter { if m != nil { return m.Parameters } return nil } func (m *Intent) GetMessages() []*Intent_Message { if m != nil { return m.Messages } return nil } func (m *Intent) GetDefaultResponsePlatforms() []Intent_Message_Platform { if m != nil { return m.DefaultResponsePlatforms } return nil } func (m *Intent) GetRootFollowupIntentName() string { if m != nil { return m.RootFollowupIntentName } return "" } func (m *Intent) GetParentFollowupIntentName() string { if m != nil { return m.ParentFollowupIntentName } return "" } func (m *Intent) GetFollowupIntentInfo() []*Intent_FollowupIntentInfo { if m != nil { return m.FollowupIntentInfo } return nil } // Represents an example that the agent is trained on. type Intent_TrainingPhrase struct { // Output only. The unique identifier of this training phrase. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Required. The type of the training phrase. Type Intent_TrainingPhrase_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.dialogflow.v2.Intent_TrainingPhrase_Type" json:"type,omitempty"` // Required. The ordered list of training phrase parts. // The parts are concatenated in order to form the training phrase. // // Note: The API does not automatically annotate training phrases like the // Dialogflow Console does. // // Note: Do not forget to include whitespace at part boundaries, // so the training phrase is well formatted when the parts are concatenated. // // If the training phrase does not need to be annotated with parameters, // you just need a single part with only the [Part.text][google.cloud.dialogflow.v2.Intent.TrainingPhrase.Part.text] field set. // // If you want to annotate the training phrase, you must create multiple // parts, where the fields of each part are populated in one of two ways: // // - `Part.text` is set to a part of the phrase that has no parameters. // - `Part.text` is set to a part of the phrase that you want to annotate, // and the `entity_type`, `alias`, and `user_defined` fields are all // set. Parts []*Intent_TrainingPhrase_Part `protobuf:"bytes,3,rep,name=parts,proto3" json:"parts,omitempty"` // Optional. Indicates how many times this example was added to // the intent. Each time a developer adds an existing sample by editing an // intent or training, this counter is increased. TimesAddedCount int32 `protobuf:"varint,4,opt,name=times_added_count,json=timesAddedCount,proto3" json:"times_added_count,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_TrainingPhrase) Reset() { *m = Intent_TrainingPhrase{} } func (m *Intent_TrainingPhrase) String() string { return proto.CompactTextString(m) } func (*Intent_TrainingPhrase) ProtoMessage() {} func (*Intent_TrainingPhrase) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 0} } func (m *Intent_TrainingPhrase) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_TrainingPhrase.Unmarshal(m, b) } func (m *Intent_TrainingPhrase) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_TrainingPhrase.Marshal(b, m, deterministic) } func (m *Intent_TrainingPhrase) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_TrainingPhrase.Merge(m, src) } func (m *Intent_TrainingPhrase) XXX_Size() int { return xxx_messageInfo_Intent_TrainingPhrase.Size(m) } func (m *Intent_TrainingPhrase) XXX_DiscardUnknown() { xxx_messageInfo_Intent_TrainingPhrase.DiscardUnknown(m) } var xxx_messageInfo_Intent_TrainingPhrase proto.InternalMessageInfo func (m *Intent_TrainingPhrase) GetName() string { if m != nil { return m.Name } return "" } func (m *Intent_TrainingPhrase) GetType() Intent_TrainingPhrase_Type { if m != nil { return m.Type } return Intent_TrainingPhrase_TYPE_UNSPECIFIED } func (m *Intent_TrainingPhrase) GetParts() []*Intent_TrainingPhrase_Part { if m != nil { return m.Parts } return nil } func (m *Intent_TrainingPhrase) GetTimesAddedCount() int32 { if m != nil { return m.TimesAddedCount } return 0 } // Represents a part of a training phrase. type Intent_TrainingPhrase_Part struct { // Required. The text for this part. Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` // Optional. The entity type name prefixed with `@`. // This field is required for annotated parts of the training phrase. EntityType string `protobuf:"bytes,2,opt,name=entity_type,json=entityType,proto3" json:"entity_type,omitempty"` // Optional. The parameter name for the value extracted from the // annotated part of the example. // This field is required for annotated parts of the training phrase. Alias string `protobuf:"bytes,3,opt,name=alias,proto3" json:"alias,omitempty"` // Optional. Indicates whether the text was manually annotated. // This field is set to true when the Dialogflow Console is used to // manually annotate the part. When creating an annotated part with the // API, you must set this to true. UserDefined bool `protobuf:"varint,4,opt,name=user_defined,json=userDefined,proto3" json:"user_defined,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_TrainingPhrase_Part) Reset() { *m = Intent_TrainingPhrase_Part{} } func (m *Intent_TrainingPhrase_Part) String() string { return proto.CompactTextString(m) } func (*Intent_TrainingPhrase_Part) ProtoMessage() {} func (*Intent_TrainingPhrase_Part) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 0, 0} } func (m *Intent_TrainingPhrase_Part) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_TrainingPhrase_Part.Unmarshal(m, b) } func (m *Intent_TrainingPhrase_Part) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_TrainingPhrase_Part.Marshal(b, m, deterministic) } func (m *Intent_TrainingPhrase_Part) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_TrainingPhrase_Part.Merge(m, src) } func (m *Intent_TrainingPhrase_Part) XXX_Size() int { return xxx_messageInfo_Intent_TrainingPhrase_Part.Size(m) } func (m *Intent_TrainingPhrase_Part) XXX_DiscardUnknown() { xxx_messageInfo_Intent_TrainingPhrase_Part.DiscardUnknown(m) } var xxx_messageInfo_Intent_TrainingPhrase_Part proto.InternalMessageInfo func (m *Intent_TrainingPhrase_Part) GetText() string { if m != nil { return m.Text } return "" } func (m *Intent_TrainingPhrase_Part) GetEntityType() string { if m != nil { return m.EntityType } return "" } func (m *Intent_TrainingPhrase_Part) GetAlias() string { if m != nil { return m.Alias } return "" } func (m *Intent_TrainingPhrase_Part) GetUserDefined() bool { if m != nil { return m.UserDefined } return false } // Represents intent parameters. type Intent_Parameter struct { // The unique identifier of this parameter. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Required. The name of the parameter. DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` // Optional. The definition of the parameter value. It can be: // - a constant string, // - a parameter value defined as `$parameter_name`, // - an original parameter value defined as `$parameter_name.original`, // - a parameter value from some context defined as // `#context_name.parameter_name`. Value string `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` // Optional. The default value to use when the `value` yields an empty // result. // Default values can be extracted from contexts by using the following // syntax: `#context_name.parameter_name`. DefaultValue string `protobuf:"bytes,4,opt,name=default_value,json=defaultValue,proto3" json:"default_value,omitempty"` // Optional. The name of the entity type, prefixed with `@`, that // describes values of the parameter. If the parameter is // required, this must be provided. EntityTypeDisplayName string `protobuf:"bytes,5,opt,name=entity_type_display_name,json=entityTypeDisplayName,proto3" json:"entity_type_display_name,omitempty"` // Optional. Indicates whether the parameter is required. That is, // whether the intent cannot be completed without collecting the parameter // value. Mandatory bool `protobuf:"varint,6,opt,name=mandatory,proto3" json:"mandatory,omitempty"` // Optional. The collection of prompts that the agent can present to the // user in order to collect a value for the parameter. Prompts []string `protobuf:"bytes,7,rep,name=prompts,proto3" json:"prompts,omitempty"` // Optional. Indicates whether the parameter represents a list of values. IsList bool `protobuf:"varint,8,opt,name=is_list,json=isList,proto3" json:"is_list,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Parameter) Reset() { *m = Intent_Parameter{} } func (m *Intent_Parameter) String() string { return proto.CompactTextString(m) } func (*Intent_Parameter) ProtoMessage() {} func (*Intent_Parameter) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 1} } func (m *Intent_Parameter) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Parameter.Unmarshal(m, b) } func (m *Intent_Parameter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Parameter.Marshal(b, m, deterministic) } func (m *Intent_Parameter) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Parameter.Merge(m, src) } func (m *Intent_Parameter) XXX_Size() int { return xxx_messageInfo_Intent_Parameter.Size(m) } func (m *Intent_Parameter) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Parameter.DiscardUnknown(m) } var xxx_messageInfo_Intent_Parameter proto.InternalMessageInfo func (m *Intent_Parameter) GetName() string { if m != nil { return m.Name } return "" } func (m *Intent_Parameter) GetDisplayName() string { if m != nil { return m.DisplayName } return "" } func (m *Intent_Parameter) GetValue() string { if m != nil { return m.Value } return "" } func (m *Intent_Parameter) GetDefaultValue() string { if m != nil { return m.DefaultValue } return "" } func (m *Intent_Parameter) GetEntityTypeDisplayName() string { if m != nil { return m.EntityTypeDisplayName } return "" } func (m *Intent_Parameter) GetMandatory() bool { if m != nil { return m.Mandatory } return false } func (m *Intent_Parameter) GetPrompts() []string { if m != nil { return m.Prompts } return nil } func (m *Intent_Parameter) GetIsList() bool { if m != nil { return m.IsList } return false } // Corresponds to the `Response` field in the Dialogflow console. type Intent_Message struct { // Required. The rich response message. // // Types that are valid to be assigned to Message: // *Intent_Message_Text_ // *Intent_Message_Image_ // *Intent_Message_QuickReplies_ // *Intent_Message_Card_ // *Intent_Message_Payload // *Intent_Message_SimpleResponses_ // *Intent_Message_BasicCard_ // *Intent_Message_Suggestions_ // *Intent_Message_LinkOutSuggestion_ // *Intent_Message_ListSelect_ // *Intent_Message_CarouselSelect_ // *Intent_Message_BrowseCarouselCard_ // *Intent_Message_TableCard_ // *Intent_Message_MediaContent_ Message isIntent_Message_Message `protobuf_oneof:"message"` // Optional. The platform that this message is intended for. Platform Intent_Message_Platform `protobuf:"varint,6,opt,name=platform,proto3,enum=google.cloud.dialogflow.v2.Intent_Message_Platform" json:"platform,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message) Reset() { *m = Intent_Message{} } func (m *Intent_Message) String() string { return proto.CompactTextString(m) } func (*Intent_Message) ProtoMessage() {} func (*Intent_Message) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2} } func (m *Intent_Message) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message.Unmarshal(m, b) } func (m *Intent_Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message.Marshal(b, m, deterministic) } func (m *Intent_Message) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message.Merge(m, src) } func (m *Intent_Message) XXX_Size() int { return xxx_messageInfo_Intent_Message.Size(m) } func (m *Intent_Message) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message proto.InternalMessageInfo type isIntent_Message_Message interface { isIntent_Message_Message() } type Intent_Message_Text_ struct { Text *Intent_Message_Text `protobuf:"bytes,1,opt,name=text,proto3,oneof"` } type Intent_Message_Image_ struct { Image *Intent_Message_Image `protobuf:"bytes,2,opt,name=image,proto3,oneof"` } type Intent_Message_QuickReplies_ struct { QuickReplies *Intent_Message_QuickReplies `protobuf:"bytes,3,opt,name=quick_replies,json=quickReplies,proto3,oneof"` } type Intent_Message_Card_ struct { Card *Intent_Message_Card `protobuf:"bytes,4,opt,name=card,proto3,oneof"` } type Intent_Message_Payload struct { Payload *_struct.Struct `protobuf:"bytes,5,opt,name=payload,proto3,oneof"` } type Intent_Message_SimpleResponses_ struct { SimpleResponses *Intent_Message_SimpleResponses `protobuf:"bytes,7,opt,name=simple_responses,json=simpleResponses,proto3,oneof"` } type Intent_Message_BasicCard_ struct { BasicCard *Intent_Message_BasicCard `protobuf:"bytes,8,opt,name=basic_card,json=basicCard,proto3,oneof"` } type Intent_Message_Suggestions_ struct { Suggestions *Intent_Message_Suggestions `protobuf:"bytes,9,opt,name=suggestions,proto3,oneof"` } type Intent_Message_LinkOutSuggestion_ struct { LinkOutSuggestion *Intent_Message_LinkOutSuggestion `protobuf:"bytes,10,opt,name=link_out_suggestion,json=linkOutSuggestion,proto3,oneof"` } type Intent_Message_ListSelect_ struct { ListSelect *Intent_Message_ListSelect `protobuf:"bytes,11,opt,name=list_select,json=listSelect,proto3,oneof"` } type Intent_Message_CarouselSelect_ struct { CarouselSelect *Intent_Message_CarouselSelect `protobuf:"bytes,12,opt,name=carousel_select,json=carouselSelect,proto3,oneof"` } type Intent_Message_BrowseCarouselCard_ struct { BrowseCarouselCard *Intent_Message_BrowseCarouselCard `protobuf:"bytes,22,opt,name=browse_carousel_card,json=browseCarouselCard,proto3,oneof"` } type Intent_Message_TableCard_ struct { TableCard *Intent_Message_TableCard `protobuf:"bytes,23,opt,name=table_card,json=tableCard,proto3,oneof"` } type Intent_Message_MediaContent_ struct { MediaContent *Intent_Message_MediaContent `protobuf:"bytes,24,opt,name=media_content,json=mediaContent,proto3,oneof"` } func (*Intent_Message_Text_) isIntent_Message_Message() {} func (*Intent_Message_Image_) isIntent_Message_Message() {} func (*Intent_Message_QuickReplies_) isIntent_Message_Message() {} func (*Intent_Message_Card_) isIntent_Message_Message() {} func (*Intent_Message_Payload) isIntent_Message_Message() {} func (*Intent_Message_SimpleResponses_) isIntent_Message_Message() {} func (*Intent_Message_BasicCard_) isIntent_Message_Message() {} func (*Intent_Message_Suggestions_) isIntent_Message_Message() {} func (*Intent_Message_LinkOutSuggestion_) isIntent_Message_Message() {} func (*Intent_Message_ListSelect_) isIntent_Message_Message() {} func (*Intent_Message_CarouselSelect_) isIntent_Message_Message() {} func (*Intent_Message_BrowseCarouselCard_) isIntent_Message_Message() {} func (*Intent_Message_TableCard_) isIntent_Message_Message() {} func (*Intent_Message_MediaContent_) isIntent_Message_Message() {} func (m *Intent_Message) GetMessage() isIntent_Message_Message { if m != nil { return m.Message } return nil } func (m *Intent_Message) GetText() *Intent_Message_Text { if x, ok := m.GetMessage().(*Intent_Message_Text_); ok { return x.Text } return nil } func (m *Intent_Message) GetImage() *Intent_Message_Image { if x, ok := m.GetMessage().(*Intent_Message_Image_); ok { return x.Image } return nil } func (m *Intent_Message) GetQuickReplies() *Intent_Message_QuickReplies { if x, ok := m.GetMessage().(*Intent_Message_QuickReplies_); ok { return x.QuickReplies } return nil } func (m *Intent_Message) GetCard() *Intent_Message_Card { if x, ok := m.GetMessage().(*Intent_Message_Card_); ok { return x.Card } return nil } func (m *Intent_Message) GetPayload() *_struct.Struct { if x, ok := m.GetMessage().(*Intent_Message_Payload); ok { return x.Payload } return nil } func (m *Intent_Message) GetSimpleResponses() *Intent_Message_SimpleResponses { if x, ok := m.GetMessage().(*Intent_Message_SimpleResponses_); ok { return x.SimpleResponses } return nil } func (m *Intent_Message) GetBasicCard() *Intent_Message_BasicCard { if x, ok := m.GetMessage().(*Intent_Message_BasicCard_); ok { return x.BasicCard } return nil } func (m *Intent_Message) GetSuggestions() *Intent_Message_Suggestions { if x, ok := m.GetMessage().(*Intent_Message_Suggestions_); ok { return x.Suggestions } return nil } func (m *Intent_Message) GetLinkOutSuggestion() *Intent_Message_LinkOutSuggestion { if x, ok := m.GetMessage().(*Intent_Message_LinkOutSuggestion_); ok { return x.LinkOutSuggestion } return nil } func (m *Intent_Message) GetListSelect() *Intent_Message_ListSelect { if x, ok := m.GetMessage().(*Intent_Message_ListSelect_); ok { return x.ListSelect } return nil } func (m *Intent_Message) GetCarouselSelect() *Intent_Message_CarouselSelect { if x, ok := m.GetMessage().(*Intent_Message_CarouselSelect_); ok { return x.CarouselSelect } return nil } func (m *Intent_Message) GetBrowseCarouselCard() *Intent_Message_BrowseCarouselCard { if x, ok := m.GetMessage().(*Intent_Message_BrowseCarouselCard_); ok { return x.BrowseCarouselCard } return nil } func (m *Intent_Message) GetTableCard() *Intent_Message_TableCard { if x, ok := m.GetMessage().(*Intent_Message_TableCard_); ok { return x.TableCard } return nil } func (m *Intent_Message) GetMediaContent() *Intent_Message_MediaContent { if x, ok := m.GetMessage().(*Intent_Message_MediaContent_); ok { return x.MediaContent } return nil } func (m *Intent_Message) GetPlatform() Intent_Message_Platform { if m != nil { return m.Platform } return Intent_Message_PLATFORM_UNSPECIFIED } // XXX_OneofWrappers is for the internal use of the proto package. func (*Intent_Message) XXX_OneofWrappers() []interface{} { return []interface{}{ (*Intent_Message_Text_)(nil), (*Intent_Message_Image_)(nil), (*Intent_Message_QuickReplies_)(nil), (*Intent_Message_Card_)(nil), (*Intent_Message_Payload)(nil), (*Intent_Message_SimpleResponses_)(nil), (*Intent_Message_BasicCard_)(nil), (*Intent_Message_Suggestions_)(nil), (*Intent_Message_LinkOutSuggestion_)(nil), (*Intent_Message_ListSelect_)(nil), (*Intent_Message_CarouselSelect_)(nil), (*Intent_Message_BrowseCarouselCard_)(nil), (*Intent_Message_TableCard_)(nil), (*Intent_Message_MediaContent_)(nil), } } // The text response message. type Intent_Message_Text struct { // Optional. The collection of the agent's responses. Text []string `protobuf:"bytes,1,rep,name=text,proto3" json:"text,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_Text) Reset() { *m = Intent_Message_Text{} } func (m *Intent_Message_Text) String() string { return proto.CompactTextString(m) } func (*Intent_Message_Text) ProtoMessage() {} func (*Intent_Message_Text) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 0} } func (m *Intent_Message_Text) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_Text.Unmarshal(m, b) } func (m *Intent_Message_Text) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_Text.Marshal(b, m, deterministic) } func (m *Intent_Message_Text) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_Text.Merge(m, src) } func (m *Intent_Message_Text) XXX_Size() int { return xxx_messageInfo_Intent_Message_Text.Size(m) } func (m *Intent_Message_Text) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_Text.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_Text proto.InternalMessageInfo func (m *Intent_Message_Text) GetText() []string { if m != nil { return m.Text } return nil } // The image response message. type Intent_Message_Image struct { // Optional. The public URI to an image file. ImageUri string `protobuf:"bytes,1,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` // Optional. A text description of the image to be used for accessibility, // e.g., screen readers. AccessibilityText string `protobuf:"bytes,2,opt,name=accessibility_text,json=accessibilityText,proto3" json:"accessibility_text,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_Image) Reset() { *m = Intent_Message_Image{} } func (m *Intent_Message_Image) String() string { return proto.CompactTextString(m) } func (*Intent_Message_Image) ProtoMessage() {} func (*Intent_Message_Image) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 1} } func (m *Intent_Message_Image) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_Image.Unmarshal(m, b) } func (m *Intent_Message_Image) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_Image.Marshal(b, m, deterministic) } func (m *Intent_Message_Image) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_Image.Merge(m, src) } func (m *Intent_Message_Image) XXX_Size() int { return xxx_messageInfo_Intent_Message_Image.Size(m) } func (m *Intent_Message_Image) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_Image.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_Image proto.InternalMessageInfo func (m *Intent_Message_Image) GetImageUri() string { if m != nil { return m.ImageUri } return "" } func (m *Intent_Message_Image) GetAccessibilityText() string { if m != nil { return m.AccessibilityText } return "" } // The quick replies response message. type Intent_Message_QuickReplies struct { // Optional. The title of the collection of quick replies. Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` // Optional. The collection of quick replies. QuickReplies []string `protobuf:"bytes,2,rep,name=quick_replies,json=quickReplies,proto3" json:"quick_replies,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_QuickReplies) Reset() { *m = Intent_Message_QuickReplies{} } func (m *Intent_Message_QuickReplies) String() string { return proto.CompactTextString(m) } func (*Intent_Message_QuickReplies) ProtoMessage() {} func (*Intent_Message_QuickReplies) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 2} } func (m *Intent_Message_QuickReplies) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_QuickReplies.Unmarshal(m, b) } func (m *Intent_Message_QuickReplies) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_QuickReplies.Marshal(b, m, deterministic) } func (m *Intent_Message_QuickReplies) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_QuickReplies.Merge(m, src) } func (m *Intent_Message_QuickReplies) XXX_Size() int { return xxx_messageInfo_Intent_Message_QuickReplies.Size(m) } func (m *Intent_Message_QuickReplies) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_QuickReplies.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_QuickReplies proto.InternalMessageInfo func (m *Intent_Message_QuickReplies) GetTitle() string { if m != nil { return m.Title } return "" } func (m *Intent_Message_QuickReplies) GetQuickReplies() []string { if m != nil { return m.QuickReplies } return nil } // The card response message. type Intent_Message_Card struct { // Optional. The title of the card. Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` // Optional. The subtitle of the card. Subtitle string `protobuf:"bytes,2,opt,name=subtitle,proto3" json:"subtitle,omitempty"` // Optional. The public URI to an image file for the card. ImageUri string `protobuf:"bytes,3,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` // Optional. The collection of card buttons. Buttons []*Intent_Message_Card_Button `protobuf:"bytes,4,rep,name=buttons,proto3" json:"buttons,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_Card) Reset() { *m = Intent_Message_Card{} } func (m *Intent_Message_Card) String() string { return proto.CompactTextString(m) } func (*Intent_Message_Card) ProtoMessage() {} func (*Intent_Message_Card) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 3} } func (m *Intent_Message_Card) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_Card.Unmarshal(m, b) } func (m *Intent_Message_Card) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_Card.Marshal(b, m, deterministic) } func (m *Intent_Message_Card) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_Card.Merge(m, src) } func (m *Intent_Message_Card) XXX_Size() int { return xxx_messageInfo_Intent_Message_Card.Size(m) } func (m *Intent_Message_Card) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_Card.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_Card proto.InternalMessageInfo func (m *Intent_Message_Card) GetTitle() string { if m != nil { return m.Title } return "" } func (m *Intent_Message_Card) GetSubtitle() string { if m != nil { return m.Subtitle } return "" } func (m *Intent_Message_Card) GetImageUri() string { if m != nil { return m.ImageUri } return "" } func (m *Intent_Message_Card) GetButtons() []*Intent_Message_Card_Button { if m != nil { return m.Buttons } return nil } // Contains information about a button. type Intent_Message_Card_Button struct { // Optional. The text to show on the button. Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` // Optional. The text to send back to the Dialogflow API or a URI to // open. Postback string `protobuf:"bytes,2,opt,name=postback,proto3" json:"postback,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_Card_Button) Reset() { *m = Intent_Message_Card_Button{} } func (m *Intent_Message_Card_Button) String() string { return proto.CompactTextString(m) } func (*Intent_Message_Card_Button) ProtoMessage() {} func (*Intent_Message_Card_Button) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 3, 0} } func (m *Intent_Message_Card_Button) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_Card_Button.Unmarshal(m, b) } func (m *Intent_Message_Card_Button) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_Card_Button.Marshal(b, m, deterministic) } func (m *Intent_Message_Card_Button) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_Card_Button.Merge(m, src) } func (m *Intent_Message_Card_Button) XXX_Size() int { return xxx_messageInfo_Intent_Message_Card_Button.Size(m) } func (m *Intent_Message_Card_Button) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_Card_Button.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_Card_Button proto.InternalMessageInfo func (m *Intent_Message_Card_Button) GetText() string { if m != nil { return m.Text } return "" } func (m *Intent_Message_Card_Button) GetPostback() string { if m != nil { return m.Postback } return "" } // The simple response message containing speech or text. type Intent_Message_SimpleResponse struct { // One of text_to_speech or ssml must be provided. The plain text of the // speech output. Mutually exclusive with ssml. TextToSpeech string `protobuf:"bytes,1,opt,name=text_to_speech,json=textToSpeech,proto3" json:"text_to_speech,omitempty"` // One of text_to_speech or ssml must be provided. Structured spoken // response to the user in the SSML format. Mutually exclusive with // text_to_speech. Ssml string `protobuf:"bytes,2,opt,name=ssml,proto3" json:"ssml,omitempty"` // Optional. The text to display. DisplayText string `protobuf:"bytes,3,opt,name=display_text,json=displayText,proto3" json:"display_text,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_SimpleResponse) Reset() { *m = Intent_Message_SimpleResponse{} } func (m *Intent_Message_SimpleResponse) String() string { return proto.CompactTextString(m) } func (*Intent_Message_SimpleResponse) ProtoMessage() {} func (*Intent_Message_SimpleResponse) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 4} } func (m *Intent_Message_SimpleResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_SimpleResponse.Unmarshal(m, b) } func (m *Intent_Message_SimpleResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_SimpleResponse.Marshal(b, m, deterministic) } func (m *Intent_Message_SimpleResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_SimpleResponse.Merge(m, src) } func (m *Intent_Message_SimpleResponse) XXX_Size() int { return xxx_messageInfo_Intent_Message_SimpleResponse.Size(m) } func (m *Intent_Message_SimpleResponse) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_SimpleResponse.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_SimpleResponse proto.InternalMessageInfo func (m *Intent_Message_SimpleResponse) GetTextToSpeech() string { if m != nil { return m.TextToSpeech } return "" } func (m *Intent_Message_SimpleResponse) GetSsml() string { if m != nil { return m.Ssml } return "" } func (m *Intent_Message_SimpleResponse) GetDisplayText() string { if m != nil { return m.DisplayText } return "" } // The collection of simple response candidates. // This message in `QueryResult.fulfillment_messages` and // `WebhookResponse.fulfillment_messages` should contain only one // `SimpleResponse`. type Intent_Message_SimpleResponses struct { // Required. The list of simple responses. SimpleResponses []*Intent_Message_SimpleResponse `protobuf:"bytes,1,rep,name=simple_responses,json=simpleResponses,proto3" json:"simple_responses,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_SimpleResponses) Reset() { *m = Intent_Message_SimpleResponses{} } func (m *Intent_Message_SimpleResponses) String() string { return proto.CompactTextString(m) } func (*Intent_Message_SimpleResponses) ProtoMessage() {} func (*Intent_Message_SimpleResponses) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 5} } func (m *Intent_Message_SimpleResponses) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_SimpleResponses.Unmarshal(m, b) } func (m *Intent_Message_SimpleResponses) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_SimpleResponses.Marshal(b, m, deterministic) } func (m *Intent_Message_SimpleResponses) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_SimpleResponses.Merge(m, src) } func (m *Intent_Message_SimpleResponses) XXX_Size() int { return xxx_messageInfo_Intent_Message_SimpleResponses.Size(m) } func (m *Intent_Message_SimpleResponses) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_SimpleResponses.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_SimpleResponses proto.InternalMessageInfo func (m *Intent_Message_SimpleResponses) GetSimpleResponses() []*Intent_Message_SimpleResponse { if m != nil { return m.SimpleResponses } return nil } // The basic card message. Useful for displaying information. type Intent_Message_BasicCard struct { // Optional. The title of the card. Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` // Optional. The subtitle of the card. Subtitle string `protobuf:"bytes,2,opt,name=subtitle,proto3" json:"subtitle,omitempty"` // Required, unless image is present. The body text of the card. FormattedText string `protobuf:"bytes,3,opt,name=formatted_text,json=formattedText,proto3" json:"formatted_text,omitempty"` // Optional. The image for the card. Image *Intent_Message_Image `protobuf:"bytes,4,opt,name=image,proto3" json:"image,omitempty"` // Optional. The collection of card buttons. Buttons []*Intent_Message_BasicCard_Button `protobuf:"bytes,5,rep,name=buttons,proto3" json:"buttons,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_BasicCard) Reset() { *m = Intent_Message_BasicCard{} } func (m *Intent_Message_BasicCard) String() string { return proto.CompactTextString(m) } func (*Intent_Message_BasicCard) ProtoMessage() {} func (*Intent_Message_BasicCard) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 6} } func (m *Intent_Message_BasicCard) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_BasicCard.Unmarshal(m, b) } func (m *Intent_Message_BasicCard) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_BasicCard.Marshal(b, m, deterministic) } func (m *Intent_Message_BasicCard) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_BasicCard.Merge(m, src) } func (m *Intent_Message_BasicCard) XXX_Size() int { return xxx_messageInfo_Intent_Message_BasicCard.Size(m) } func (m *Intent_Message_BasicCard) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_BasicCard.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_BasicCard proto.InternalMessageInfo func (m *Intent_Message_BasicCard) GetTitle() string { if m != nil { return m.Title } return "" } func (m *Intent_Message_BasicCard) GetSubtitle() string { if m != nil { return m.Subtitle } return "" } func (m *Intent_Message_BasicCard) GetFormattedText() string { if m != nil { return m.FormattedText } return "" } func (m *Intent_Message_BasicCard) GetImage() *Intent_Message_Image { if m != nil { return m.Image } return nil } func (m *Intent_Message_BasicCard) GetButtons() []*Intent_Message_BasicCard_Button { if m != nil { return m.Buttons } return nil } // The button object that appears at the bottom of a card. type Intent_Message_BasicCard_Button struct { // Required. The title of the button. Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` // Required. Action to take when a user taps on the button. OpenUriAction *Intent_Message_BasicCard_Button_OpenUriAction `protobuf:"bytes,2,opt,name=open_uri_action,json=openUriAction,proto3" json:"open_uri_action,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_BasicCard_Button) Reset() { *m = Intent_Message_BasicCard_Button{} } func (m *Intent_Message_BasicCard_Button) String() string { return proto.CompactTextString(m) } func (*Intent_Message_BasicCard_Button) ProtoMessage() {} func (*Intent_Message_BasicCard_Button) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 6, 0} } func (m *Intent_Message_BasicCard_Button) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_BasicCard_Button.Unmarshal(m, b) } func (m *Intent_Message_BasicCard_Button) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_BasicCard_Button.Marshal(b, m, deterministic) } func (m *Intent_Message_BasicCard_Button) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_BasicCard_Button.Merge(m, src) } func (m *Intent_Message_BasicCard_Button) XXX_Size() int { return xxx_messageInfo_Intent_Message_BasicCard_Button.Size(m) } func (m *Intent_Message_BasicCard_Button) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_BasicCard_Button.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_BasicCard_Button proto.InternalMessageInfo func (m *Intent_Message_BasicCard_Button) GetTitle() string { if m != nil { return m.Title } return "" } func (m *Intent_Message_BasicCard_Button) GetOpenUriAction() *Intent_Message_BasicCard_Button_OpenUriAction { if m != nil { return m.OpenUriAction } return nil } // Opens the given URI. type Intent_Message_BasicCard_Button_OpenUriAction struct { // Required. The HTTP or HTTPS scheme URI. Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_BasicCard_Button_OpenUriAction) Reset() { *m = Intent_Message_BasicCard_Button_OpenUriAction{} } func (m *Intent_Message_BasicCard_Button_OpenUriAction) String() string { return proto.CompactTextString(m) } func (*Intent_Message_BasicCard_Button_OpenUriAction) ProtoMessage() {} func (*Intent_Message_BasicCard_Button_OpenUriAction) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 6, 0, 0} } func (m *Intent_Message_BasicCard_Button_OpenUriAction) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.Unmarshal(m, b) } func (m *Intent_Message_BasicCard_Button_OpenUriAction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.Marshal(b, m, deterministic) } func (m *Intent_Message_BasicCard_Button_OpenUriAction) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.Merge(m, src) } func (m *Intent_Message_BasicCard_Button_OpenUriAction) XXX_Size() int { return xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.Size(m) } func (m *Intent_Message_BasicCard_Button_OpenUriAction) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction proto.InternalMessageInfo func (m *Intent_Message_BasicCard_Button_OpenUriAction) GetUri() string { if m != nil { return m.Uri } return "" } // The suggestion chip message that the user can tap to quickly post a reply // to the conversation. type Intent_Message_Suggestion struct { // Required. The text shown the in the suggestion chip. Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_Suggestion) Reset() { *m = Intent_Message_Suggestion{} } func (m *Intent_Message_Suggestion) String() string { return proto.CompactTextString(m) } func (*Intent_Message_Suggestion) ProtoMessage() {} func (*Intent_Message_Suggestion) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 7} } func (m *Intent_Message_Suggestion) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_Suggestion.Unmarshal(m, b) } func (m *Intent_Message_Suggestion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_Suggestion.Marshal(b, m, deterministic) } func (m *Intent_Message_Suggestion) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_Suggestion.Merge(m, src) } func (m *Intent_Message_Suggestion) XXX_Size() int { return xxx_messageInfo_Intent_Message_Suggestion.Size(m) } func (m *Intent_Message_Suggestion) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_Suggestion.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_Suggestion proto.InternalMessageInfo func (m *Intent_Message_Suggestion) GetTitle() string { if m != nil { return m.Title } return "" } // The collection of suggestions. type Intent_Message_Suggestions struct { // Required. The list of suggested replies. Suggestions []*Intent_Message_Suggestion `protobuf:"bytes,1,rep,name=suggestions,proto3" json:"suggestions,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_Suggestions) Reset() { *m = Intent_Message_Suggestions{} } func (m *Intent_Message_Suggestions) String() string { return proto.CompactTextString(m) } func (*Intent_Message_Suggestions) ProtoMessage() {} func (*Intent_Message_Suggestions) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 8} } func (m *Intent_Message_Suggestions) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_Suggestions.Unmarshal(m, b) } func (m *Intent_Message_Suggestions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_Suggestions.Marshal(b, m, deterministic) } func (m *Intent_Message_Suggestions) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_Suggestions.Merge(m, src) } func (m *Intent_Message_Suggestions) XXX_Size() int { return xxx_messageInfo_Intent_Message_Suggestions.Size(m) } func (m *Intent_Message_Suggestions) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_Suggestions.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_Suggestions proto.InternalMessageInfo func (m *Intent_Message_Suggestions) GetSuggestions() []*Intent_Message_Suggestion { if m != nil { return m.Suggestions } return nil } // The suggestion chip message that allows the user to jump out to the app // or website associated with this agent. type Intent_Message_LinkOutSuggestion struct { // Required. The name of the app or site this chip is linking to. DestinationName string `protobuf:"bytes,1,opt,name=destination_name,json=destinationName,proto3" json:"destination_name,omitempty"` // Required. The URI of the app or site to open when the user taps the // suggestion chip. Uri string `protobuf:"bytes,2,opt,name=uri,proto3" json:"uri,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_LinkOutSuggestion) Reset() { *m = Intent_Message_LinkOutSuggestion{} } func (m *Intent_Message_LinkOutSuggestion) String() string { return proto.CompactTextString(m) } func (*Intent_Message_LinkOutSuggestion) ProtoMessage() {} func (*Intent_Message_LinkOutSuggestion) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 9} } func (m *Intent_Message_LinkOutSuggestion) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_LinkOutSuggestion.Unmarshal(m, b) } func (m *Intent_Message_LinkOutSuggestion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_LinkOutSuggestion.Marshal(b, m, deterministic) } func (m *Intent_Message_LinkOutSuggestion) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_LinkOutSuggestion.Merge(m, src) } func (m *Intent_Message_LinkOutSuggestion) XXX_Size() int { return xxx_messageInfo_Intent_Message_LinkOutSuggestion.Size(m) } func (m *Intent_Message_LinkOutSuggestion) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_LinkOutSuggestion.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_LinkOutSuggestion proto.InternalMessageInfo func (m *Intent_Message_LinkOutSuggestion) GetDestinationName() string { if m != nil { return m.DestinationName } return "" } func (m *Intent_Message_LinkOutSuggestion) GetUri() string { if m != nil { return m.Uri } return "" } // The card for presenting a list of options to select from. type Intent_Message_ListSelect struct { // Optional. The overall title of the list. Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` // Required. List items. Items []*Intent_Message_ListSelect_Item `protobuf:"bytes,2,rep,name=items,proto3" json:"items,omitempty"` // Optional. Subtitle of the list. Subtitle string `protobuf:"bytes,3,opt,name=subtitle,proto3" json:"subtitle,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_ListSelect) Reset() { *m = Intent_Message_ListSelect{} } func (m *Intent_Message_ListSelect) String() string { return proto.CompactTextString(m) } func (*Intent_Message_ListSelect) ProtoMessage() {} func (*Intent_Message_ListSelect) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 10} } func (m *Intent_Message_ListSelect) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_ListSelect.Unmarshal(m, b) } func (m *Intent_Message_ListSelect) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_ListSelect.Marshal(b, m, deterministic) } func (m *Intent_Message_ListSelect) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_ListSelect.Merge(m, src) } func (m *Intent_Message_ListSelect) XXX_Size() int { return xxx_messageInfo_Intent_Message_ListSelect.Size(m) } func (m *Intent_Message_ListSelect) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_ListSelect.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_ListSelect proto.InternalMessageInfo func (m *Intent_Message_ListSelect) GetTitle() string { if m != nil { return m.Title } return "" } func (m *Intent_Message_ListSelect) GetItems() []*Intent_Message_ListSelect_Item { if m != nil { return m.Items } return nil } func (m *Intent_Message_ListSelect) GetSubtitle() string { if m != nil { return m.Subtitle } return "" } // An item in the list. type Intent_Message_ListSelect_Item struct { // Required. Additional information about this option. Info *Intent_Message_SelectItemInfo `protobuf:"bytes,1,opt,name=info,proto3" json:"info,omitempty"` // Required. The title of the list item. Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` // Optional. The main text describing the item. Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` // Optional. The image to display. Image *Intent_Message_Image `protobuf:"bytes,4,opt,name=image,proto3" json:"image,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_ListSelect_Item) Reset() { *m = Intent_Message_ListSelect_Item{} } func (m *Intent_Message_ListSelect_Item) String() string { return proto.CompactTextString(m) } func (*Intent_Message_ListSelect_Item) ProtoMessage() {} func (*Intent_Message_ListSelect_Item) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 10, 0} } func (m *Intent_Message_ListSelect_Item) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_ListSelect_Item.Unmarshal(m, b) } func (m *Intent_Message_ListSelect_Item) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_ListSelect_Item.Marshal(b, m, deterministic) } func (m *Intent_Message_ListSelect_Item) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_ListSelect_Item.Merge(m, src) } func (m *Intent_Message_ListSelect_Item) XXX_Size() int { return xxx_messageInfo_Intent_Message_ListSelect_Item.Size(m) } func (m *Intent_Message_ListSelect_Item) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_ListSelect_Item.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_ListSelect_Item proto.InternalMessageInfo func (m *Intent_Message_ListSelect_Item) GetInfo() *Intent_Message_SelectItemInfo { if m != nil { return m.Info } return nil } func (m *Intent_Message_ListSelect_Item) GetTitle() string { if m != nil { return m.Title } return "" } func (m *Intent_Message_ListSelect_Item) GetDescription() string { if m != nil { return m.Description } return "" } func (m *Intent_Message_ListSelect_Item) GetImage() *Intent_Message_Image { if m != nil { return m.Image } return nil } // The card for presenting a carousel of options to select from. type Intent_Message_CarouselSelect struct { // Required. Carousel items. Items []*Intent_Message_CarouselSelect_Item `protobuf:"bytes,1,rep,name=items,proto3" json:"items,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_CarouselSelect) Reset() { *m = Intent_Message_CarouselSelect{} } func (m *Intent_Message_CarouselSelect) String() string { return proto.CompactTextString(m) } func (*Intent_Message_CarouselSelect) ProtoMessage() {} func (*Intent_Message_CarouselSelect) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 11} } func (m *Intent_Message_CarouselSelect) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_CarouselSelect.Unmarshal(m, b) } func (m *Intent_Message_CarouselSelect) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_CarouselSelect.Marshal(b, m, deterministic) } func (m *Intent_Message_CarouselSelect) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_CarouselSelect.Merge(m, src) } func (m *Intent_Message_CarouselSelect) XXX_Size() int { return xxx_messageInfo_Intent_Message_CarouselSelect.Size(m) } func (m *Intent_Message_CarouselSelect) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_CarouselSelect.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_CarouselSelect proto.InternalMessageInfo func (m *Intent_Message_CarouselSelect) GetItems() []*Intent_Message_CarouselSelect_Item { if m != nil { return m.Items } return nil } // An item in the carousel. type Intent_Message_CarouselSelect_Item struct { // Required. Additional info about the option item. Info *Intent_Message_SelectItemInfo `protobuf:"bytes,1,opt,name=info,proto3" json:"info,omitempty"` // Required. Title of the carousel item. Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` // Optional. The body text of the card. Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` // Optional. The image to display. Image *Intent_Message_Image `protobuf:"bytes,4,opt,name=image,proto3" json:"image,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_CarouselSelect_Item) Reset() { *m = Intent_Message_CarouselSelect_Item{} } func (m *Intent_Message_CarouselSelect_Item) String() string { return proto.CompactTextString(m) } func (*Intent_Message_CarouselSelect_Item) ProtoMessage() {} func (*Intent_Message_CarouselSelect_Item) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 11, 0} } func (m *Intent_Message_CarouselSelect_Item) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_CarouselSelect_Item.Unmarshal(m, b) } func (m *Intent_Message_CarouselSelect_Item) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_CarouselSelect_Item.Marshal(b, m, deterministic) } func (m *Intent_Message_CarouselSelect_Item) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_CarouselSelect_Item.Merge(m, src) } func (m *Intent_Message_CarouselSelect_Item) XXX_Size() int { return xxx_messageInfo_Intent_Message_CarouselSelect_Item.Size(m) } func (m *Intent_Message_CarouselSelect_Item) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_CarouselSelect_Item.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_CarouselSelect_Item proto.InternalMessageInfo func (m *Intent_Message_CarouselSelect_Item) GetInfo() *Intent_Message_SelectItemInfo { if m != nil { return m.Info } return nil } func (m *Intent_Message_CarouselSelect_Item) GetTitle() string { if m != nil { return m.Title } return "" } func (m *Intent_Message_CarouselSelect_Item) GetDescription() string { if m != nil { return m.Description } return "" } func (m *Intent_Message_CarouselSelect_Item) GetImage() *Intent_Message_Image { if m != nil { return m.Image } return nil } // Additional info about the select item for when it is triggered in a // dialog. type Intent_Message_SelectItemInfo struct { // Required. A unique key that will be sent back to the agent if this // response is given. Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` // Optional. A list of synonyms that can also be used to trigger this // item in dialog. Synonyms []string `protobuf:"bytes,2,rep,name=synonyms,proto3" json:"synonyms,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_SelectItemInfo) Reset() { *m = Intent_Message_SelectItemInfo{} } func (m *Intent_Message_SelectItemInfo) String() string { return proto.CompactTextString(m) } func (*Intent_Message_SelectItemInfo) ProtoMessage() {} func (*Intent_Message_SelectItemInfo) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 12} } func (m *Intent_Message_SelectItemInfo) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_SelectItemInfo.Unmarshal(m, b) } func (m *Intent_Message_SelectItemInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_SelectItemInfo.Marshal(b, m, deterministic) } func (m *Intent_Message_SelectItemInfo) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_SelectItemInfo.Merge(m, src) } func (m *Intent_Message_SelectItemInfo) XXX_Size() int { return xxx_messageInfo_Intent_Message_SelectItemInfo.Size(m) } func (m *Intent_Message_SelectItemInfo) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_SelectItemInfo.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_SelectItemInfo proto.InternalMessageInfo func (m *Intent_Message_SelectItemInfo) GetKey() string { if m != nil { return m.Key } return "" } func (m *Intent_Message_SelectItemInfo) GetSynonyms() []string { if m != nil { return m.Synonyms } return nil } // The media content card for Actions on Google. type Intent_Message_MediaContent struct { // Optional. What type of media is the content (ie "audio"). MediaType Intent_Message_MediaContent_ResponseMediaType `protobuf:"varint,1,opt,name=media_type,json=mediaType,proto3,enum=google.cloud.dialogflow.v2.Intent_Message_MediaContent_ResponseMediaType" json:"media_type,omitempty"` // Required. List of media objects. MediaObjects []*Intent_Message_MediaContent_ResponseMediaObject `protobuf:"bytes,2,rep,name=media_objects,json=mediaObjects,proto3" json:"media_objects,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_MediaContent) Reset() { *m = Intent_Message_MediaContent{} } func (m *Intent_Message_MediaContent) String() string { return proto.CompactTextString(m) } func (*Intent_Message_MediaContent) ProtoMessage() {} func (*Intent_Message_MediaContent) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 13} } func (m *Intent_Message_MediaContent) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_MediaContent.Unmarshal(m, b) } func (m *Intent_Message_MediaContent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_MediaContent.Marshal(b, m, deterministic) } func (m *Intent_Message_MediaContent) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_MediaContent.Merge(m, src) } func (m *Intent_Message_MediaContent) XXX_Size() int { return xxx_messageInfo_Intent_Message_MediaContent.Size(m) } func (m *Intent_Message_MediaContent) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_MediaContent.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_MediaContent proto.InternalMessageInfo func (m *Intent_Message_MediaContent) GetMediaType() Intent_Message_MediaContent_ResponseMediaType { if m != nil { return m.MediaType } return Intent_Message_MediaContent_RESPONSE_MEDIA_TYPE_UNSPECIFIED } func (m *Intent_Message_MediaContent) GetMediaObjects() []*Intent_Message_MediaContent_ResponseMediaObject { if m != nil { return m.MediaObjects } return nil } // Response media object for media content card. type Intent_Message_MediaContent_ResponseMediaObject struct { // Required. Name of media card. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Optional. Description of media card. Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` // Image to show with the media card. // // Types that are valid to be assigned to Image: // *Intent_Message_MediaContent_ResponseMediaObject_LargeImage // *Intent_Message_MediaContent_ResponseMediaObject_Icon Image isIntent_Message_MediaContent_ResponseMediaObject_Image `protobuf_oneof:"image"` // Required. Url where the media is stored. ContentUrl string `protobuf:"bytes,5,opt,name=content_url,json=contentUrl,proto3" json:"content_url,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_MediaContent_ResponseMediaObject) Reset() { *m = Intent_Message_MediaContent_ResponseMediaObject{} } func (m *Intent_Message_MediaContent_ResponseMediaObject) String() string { return proto.CompactTextString(m) } func (*Intent_Message_MediaContent_ResponseMediaObject) ProtoMessage() {} func (*Intent_Message_MediaContent_ResponseMediaObject) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 13, 0} } func (m *Intent_Message_MediaContent_ResponseMediaObject) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_MediaContent_ResponseMediaObject.Unmarshal(m, b) } func (m *Intent_Message_MediaContent_ResponseMediaObject) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_MediaContent_ResponseMediaObject.Marshal(b, m, deterministic) } func (m *Intent_Message_MediaContent_ResponseMediaObject) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_MediaContent_ResponseMediaObject.Merge(m, src) } func (m *Intent_Message_MediaContent_ResponseMediaObject) XXX_Size() int { return xxx_messageInfo_Intent_Message_MediaContent_ResponseMediaObject.Size(m) } func (m *Intent_Message_MediaContent_ResponseMediaObject) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_MediaContent_ResponseMediaObject.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_MediaContent_ResponseMediaObject proto.InternalMessageInfo func (m *Intent_Message_MediaContent_ResponseMediaObject) GetName() string { if m != nil { return m.Name } return "" } func (m *Intent_Message_MediaContent_ResponseMediaObject) GetDescription() string { if m != nil { return m.Description } return "" } type isIntent_Message_MediaContent_ResponseMediaObject_Image interface { isIntent_Message_MediaContent_ResponseMediaObject_Image() } type Intent_Message_MediaContent_ResponseMediaObject_LargeImage struct { LargeImage *Intent_Message_Image `protobuf:"bytes,3,opt,name=large_image,json=largeImage,proto3,oneof"` } type Intent_Message_MediaContent_ResponseMediaObject_Icon struct { Icon *Intent_Message_Image `protobuf:"bytes,4,opt,name=icon,proto3,oneof"` } func (*Intent_Message_MediaContent_ResponseMediaObject_LargeImage) isIntent_Message_MediaContent_ResponseMediaObject_Image() { } func (*Intent_Message_MediaContent_ResponseMediaObject_Icon) isIntent_Message_MediaContent_ResponseMediaObject_Image() { } func (m *Intent_Message_MediaContent_ResponseMediaObject) GetImage() isIntent_Message_MediaContent_ResponseMediaObject_Image { if m != nil { return m.Image } return nil } func (m *Intent_Message_MediaContent_ResponseMediaObject) GetLargeImage() *Intent_Message_Image { if x, ok := m.GetImage().(*Intent_Message_MediaContent_ResponseMediaObject_LargeImage); ok { return x.LargeImage } return nil } func (m *Intent_Message_MediaContent_ResponseMediaObject) GetIcon() *Intent_Message_Image { if x, ok := m.GetImage().(*Intent_Message_MediaContent_ResponseMediaObject_Icon); ok { return x.Icon } return nil } func (m *Intent_Message_MediaContent_ResponseMediaObject) GetContentUrl() string { if m != nil { return m.ContentUrl } return "" } // XXX_OneofWrappers is for the internal use of the proto package. func (*Intent_Message_MediaContent_ResponseMediaObject) XXX_OneofWrappers() []interface{} { return []interface{}{ (*Intent_Message_MediaContent_ResponseMediaObject_LargeImage)(nil), (*Intent_Message_MediaContent_ResponseMediaObject_Icon)(nil), } } // Browse Carousel Card for Actions on Google. // https://developers.google.com/actions/assistant/responses#browsing_carousel type Intent_Message_BrowseCarouselCard struct { // Required. List of items in the Browse Carousel Card. Minimum of two // items, maximum of ten. Items []*Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem `protobuf:"bytes,1,rep,name=items,proto3" json:"items,omitempty"` // Optional. Settings for displaying the image. Applies to every image in // [items][google.cloud.dialogflow.v2.Intent.Message.BrowseCarouselCard.items]. ImageDisplayOptions Intent_Message_BrowseCarouselCard_ImageDisplayOptions `protobuf:"varint,2,opt,name=image_display_options,json=imageDisplayOptions,proto3,enum=google.cloud.dialogflow.v2.Intent_Message_BrowseCarouselCard_ImageDisplayOptions" json:"image_display_options,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_BrowseCarouselCard) Reset() { *m = Intent_Message_BrowseCarouselCard{} } func (m *Intent_Message_BrowseCarouselCard) String() string { return proto.CompactTextString(m) } func (*Intent_Message_BrowseCarouselCard) ProtoMessage() {} func (*Intent_Message_BrowseCarouselCard) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 14} } func (m *Intent_Message_BrowseCarouselCard) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_BrowseCarouselCard.Unmarshal(m, b) } func (m *Intent_Message_BrowseCarouselCard) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_BrowseCarouselCard.Marshal(b, m, deterministic) } func (m *Intent_Message_BrowseCarouselCard) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_BrowseCarouselCard.Merge(m, src) } func (m *Intent_Message_BrowseCarouselCard) XXX_Size() int { return xxx_messageInfo_Intent_Message_BrowseCarouselCard.Size(m) } func (m *Intent_Message_BrowseCarouselCard) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_BrowseCarouselCard.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_BrowseCarouselCard proto.InternalMessageInfo func (m *Intent_Message_BrowseCarouselCard) GetItems() []*Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem { if m != nil { return m.Items } return nil } func (m *Intent_Message_BrowseCarouselCard) GetImageDisplayOptions() Intent_Message_BrowseCarouselCard_ImageDisplayOptions { if m != nil { return m.ImageDisplayOptions } return Intent_Message_BrowseCarouselCard_IMAGE_DISPLAY_OPTIONS_UNSPECIFIED } // Browsing carousel tile type Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem struct { // Required. Action to present to the user. OpenUriAction *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction `protobuf:"bytes,1,opt,name=open_uri_action,json=openUriAction,proto3" json:"open_uri_action,omitempty"` // Required. Title of the carousel item. Maximum of two lines of text. Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` // Optional. Description of the carousel item. Maximum of four lines of // text. Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` // Optional. Hero image for the carousel item. Image *Intent_Message_Image `protobuf:"bytes,4,opt,name=image,proto3" json:"image,omitempty"` // Optional. Text that appears at the bottom of the Browse Carousel // Card. Maximum of one line of text. Footer string `protobuf:"bytes,5,opt,name=footer,proto3" json:"footer,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem) Reset() { *m = Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem{} } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem) String() string { return proto.CompactTextString(m) } func (*Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem) ProtoMessage() {} func (*Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 14, 0} } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem.Unmarshal(m, b) } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem.Marshal(b, m, deterministic) } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem.Merge(m, src) } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem) XXX_Size() int { return xxx_messageInfo_Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem.Size(m) } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem proto.InternalMessageInfo func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem) GetOpenUriAction() *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction { if m != nil { return m.OpenUriAction } return nil } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem) GetTitle() string { if m != nil { return m.Title } return "" } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem) GetDescription() string { if m != nil { return m.Description } return "" } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem) GetImage() *Intent_Message_Image { if m != nil { return m.Image } return nil } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem) GetFooter() string { if m != nil { return m.Footer } return "" } // Actions on Google action to open a given url. type Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction struct { // Required. URL Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` // Optional. Specifies the type of viewer that is used when opening // the URL. Defaults to opening via web browser. UrlTypeHint Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint `protobuf:"varint,3,opt,name=url_type_hint,json=urlTypeHint,proto3,enum=google.cloud.dialogflow.v2.Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint" json:"url_type_hint,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction) Reset() { *m = Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction{} } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction) String() string { return proto.CompactTextString(m) } func (*Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction) ProtoMessage() {} func (*Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 14, 0, 0} } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction.Unmarshal(m, b) } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction.Marshal(b, m, deterministic) } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction.Merge(m, src) } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction) XXX_Size() int { return xxx_messageInfo_Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction.Size(m) } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction proto.InternalMessageInfo func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction) GetUrl() string { if m != nil { return m.Url } return "" } func (m *Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction) GetUrlTypeHint() Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint { if m != nil { return m.UrlTypeHint } return Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_URL_TYPE_HINT_UNSPECIFIED } // Table card for Actions on Google. type Intent_Message_TableCard struct { // Required. Title of the card. Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` // Optional. Subtitle to the title. Subtitle string `protobuf:"bytes,2,opt,name=subtitle,proto3" json:"subtitle,omitempty"` // Optional. Image which should be displayed on the card. Image *Intent_Message_Image `protobuf:"bytes,3,opt,name=image,proto3" json:"image,omitempty"` // Optional. Display properties for the columns in this table. ColumnProperties []*Intent_Message_ColumnProperties `protobuf:"bytes,4,rep,name=column_properties,json=columnProperties,proto3" json:"column_properties,omitempty"` // Optional. Rows in this table of data. Rows []*Intent_Message_TableCardRow `protobuf:"bytes,5,rep,name=rows,proto3" json:"rows,omitempty"` // Optional. List of buttons for the card. Buttons []*Intent_Message_BasicCard_Button `protobuf:"bytes,6,rep,name=buttons,proto3" json:"buttons,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_TableCard) Reset() { *m = Intent_Message_TableCard{} } func (m *Intent_Message_TableCard) String() string { return proto.CompactTextString(m) } func (*Intent_Message_TableCard) ProtoMessage() {} func (*Intent_Message_TableCard) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 15} } func (m *Intent_Message_TableCard) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_TableCard.Unmarshal(m, b) } func (m *Intent_Message_TableCard) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_TableCard.Marshal(b, m, deterministic) } func (m *Intent_Message_TableCard) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_TableCard.Merge(m, src) } func (m *Intent_Message_TableCard) XXX_Size() int { return xxx_messageInfo_Intent_Message_TableCard.Size(m) } func (m *Intent_Message_TableCard) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_TableCard.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_TableCard proto.InternalMessageInfo func (m *Intent_Message_TableCard) GetTitle() string { if m != nil { return m.Title } return "" } func (m *Intent_Message_TableCard) GetSubtitle() string { if m != nil { return m.Subtitle } return "" } func (m *Intent_Message_TableCard) GetImage() *Intent_Message_Image { if m != nil { return m.Image } return nil } func (m *Intent_Message_TableCard) GetColumnProperties() []*Intent_Message_ColumnProperties { if m != nil { return m.ColumnProperties } return nil } func (m *Intent_Message_TableCard) GetRows() []*Intent_Message_TableCardRow { if m != nil { return m.Rows } return nil } func (m *Intent_Message_TableCard) GetButtons() []*Intent_Message_BasicCard_Button { if m != nil { return m.Buttons } return nil } // Column properties for [TableCard][google.cloud.dialogflow.v2.Intent.Message.TableCard]. type Intent_Message_ColumnProperties struct { // Required. Column heading. Header string `protobuf:"bytes,1,opt,name=header,proto3" json:"header,omitempty"` // Optional. Defines text alignment for all cells in this column. HorizontalAlignment Intent_Message_ColumnProperties_HorizontalAlignment `protobuf:"varint,2,opt,name=horizontal_alignment,json=horizontalAlignment,proto3,enum=google.cloud.dialogflow.v2.Intent_Message_ColumnProperties_HorizontalAlignment" json:"horizontal_alignment,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_ColumnProperties) Reset() { *m = Intent_Message_ColumnProperties{} } func (m *Intent_Message_ColumnProperties) String() string { return proto.CompactTextString(m) } func (*Intent_Message_ColumnProperties) ProtoMessage() {} func (*Intent_Message_ColumnProperties) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 16} } func (m *Intent_Message_ColumnProperties) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_ColumnProperties.Unmarshal(m, b) } func (m *Intent_Message_ColumnProperties) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_ColumnProperties.Marshal(b, m, deterministic) } func (m *Intent_Message_ColumnProperties) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_ColumnProperties.Merge(m, src) } func (m *Intent_Message_ColumnProperties) XXX_Size() int { return xxx_messageInfo_Intent_Message_ColumnProperties.Size(m) } func (m *Intent_Message_ColumnProperties) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_ColumnProperties.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_ColumnProperties proto.InternalMessageInfo func (m *Intent_Message_ColumnProperties) GetHeader() string { if m != nil { return m.Header } return "" } func (m *Intent_Message_ColumnProperties) GetHorizontalAlignment() Intent_Message_ColumnProperties_HorizontalAlignment { if m != nil { return m.HorizontalAlignment } return Intent_Message_ColumnProperties_HORIZONTAL_ALIGNMENT_UNSPECIFIED } // Row of [TableCard][google.cloud.dialogflow.v2.Intent.Message.TableCard]. type Intent_Message_TableCardRow struct { // Optional. List of cells that make up this row. Cells []*Intent_Message_TableCardCell `protobuf:"bytes,1,rep,name=cells,proto3" json:"cells,omitempty"` // Optional. Whether to add a visual divider after this row. DividerAfter bool `protobuf:"varint,2,opt,name=divider_after,json=dividerAfter,proto3" json:"divider_after,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_TableCardRow) Reset() { *m = Intent_Message_TableCardRow{} } func (m *Intent_Message_TableCardRow) String() string { return proto.CompactTextString(m) } func (*Intent_Message_TableCardRow) ProtoMessage() {} func (*Intent_Message_TableCardRow) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 17} } func (m *Intent_Message_TableCardRow) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_TableCardRow.Unmarshal(m, b) } func (m *Intent_Message_TableCardRow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_TableCardRow.Marshal(b, m, deterministic) } func (m *Intent_Message_TableCardRow) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_TableCardRow.Merge(m, src) } func (m *Intent_Message_TableCardRow) XXX_Size() int { return xxx_messageInfo_Intent_Message_TableCardRow.Size(m) } func (m *Intent_Message_TableCardRow) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_TableCardRow.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_TableCardRow proto.InternalMessageInfo func (m *Intent_Message_TableCardRow) GetCells() []*Intent_Message_TableCardCell { if m != nil { return m.Cells } return nil } func (m *Intent_Message_TableCardRow) GetDividerAfter() bool { if m != nil { return m.DividerAfter } return false } // Cell of [TableCardRow][google.cloud.dialogflow.v2.Intent.Message.TableCardRow]. type Intent_Message_TableCardCell struct { // Required. Text in this cell. Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_Message_TableCardCell) Reset() { *m = Intent_Message_TableCardCell{} } func (m *Intent_Message_TableCardCell) String() string { return proto.CompactTextString(m) } func (*Intent_Message_TableCardCell) ProtoMessage() {} func (*Intent_Message_TableCardCell) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 2, 18} } func (m *Intent_Message_TableCardCell) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_Message_TableCardCell.Unmarshal(m, b) } func (m *Intent_Message_TableCardCell) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_Message_TableCardCell.Marshal(b, m, deterministic) } func (m *Intent_Message_TableCardCell) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_Message_TableCardCell.Merge(m, src) } func (m *Intent_Message_TableCardCell) XXX_Size() int { return xxx_messageInfo_Intent_Message_TableCardCell.Size(m) } func (m *Intent_Message_TableCardCell) XXX_DiscardUnknown() { xxx_messageInfo_Intent_Message_TableCardCell.DiscardUnknown(m) } var xxx_messageInfo_Intent_Message_TableCardCell proto.InternalMessageInfo func (m *Intent_Message_TableCardCell) GetText() string { if m != nil { return m.Text } return "" } // Represents a single followup intent in the chain. type Intent_FollowupIntentInfo struct { // The unique identifier of the followup intent. // Format: `projects/<Project ID>/agent/intents/<Intent ID>`. FollowupIntentName string `protobuf:"bytes,1,opt,name=followup_intent_name,json=followupIntentName,proto3" json:"followup_intent_name,omitempty"` // The unique identifier of the followup intent's parent. // Format: `projects/<Project ID>/agent/intents/<Intent ID>`. ParentFollowupIntentName string `protobuf:"bytes,2,opt,name=parent_followup_intent_name,json=parentFollowupIntentName,proto3" json:"parent_followup_intent_name,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Intent_FollowupIntentInfo) Reset() { *m = Intent_FollowupIntentInfo{} } func (m *Intent_FollowupIntentInfo) String() string { return proto.CompactTextString(m) } func (*Intent_FollowupIntentInfo) ProtoMessage() {} func (*Intent_FollowupIntentInfo) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{0, 3} } func (m *Intent_FollowupIntentInfo) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Intent_FollowupIntentInfo.Unmarshal(m, b) } func (m *Intent_FollowupIntentInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Intent_FollowupIntentInfo.Marshal(b, m, deterministic) } func (m *Intent_FollowupIntentInfo) XXX_Merge(src proto.Message) { xxx_messageInfo_Intent_FollowupIntentInfo.Merge(m, src) } func (m *Intent_FollowupIntentInfo) XXX_Size() int { return xxx_messageInfo_Intent_FollowupIntentInfo.Size(m) } func (m *Intent_FollowupIntentInfo) XXX_DiscardUnknown() { xxx_messageInfo_Intent_FollowupIntentInfo.DiscardUnknown(m) } var xxx_messageInfo_Intent_FollowupIntentInfo proto.InternalMessageInfo func (m *Intent_FollowupIntentInfo) GetFollowupIntentName() string { if m != nil { return m.FollowupIntentName } return "" } func (m *Intent_FollowupIntentInfo) GetParentFollowupIntentName() string { if m != nil { return m.ParentFollowupIntentName } return "" } // The request message for [Intents.ListIntents][google.cloud.dialogflow.v2.Intents.ListIntents]. type ListIntentsRequest struct { // Required. The agent to list all intents from. // Format: `projects/<Project ID>/agent`. Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` // Optional. The language to list training phrases, parameters and rich // messages for. If not specified, the agent's default language is used. // [Many // languages](https://cloud.google.com/dialogflow/docs/reference/language) // are supported. Note: languages must be enabled in the agent before they can // be used. LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` // Optional. The resource view to apply to the returned intent. IntentView IntentView `protobuf:"varint,3,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2.IntentView" json:"intent_view,omitempty"` // Optional. The maximum number of items to return in a single page. By // default 100 and at most 1000. PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` // Optional. The next_page_token value returned from a previous list request. PageToken string `protobuf:"bytes,5,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *ListIntentsRequest) Reset() { *m = ListIntentsRequest{} } func (m *ListIntentsRequest) String() string { return proto.CompactTextString(m) } func (*ListIntentsRequest) ProtoMessage() {} func (*ListIntentsRequest) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{1} } func (m *ListIntentsRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListIntentsRequest.Unmarshal(m, b) } func (m *ListIntentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_ListIntentsRequest.Marshal(b, m, deterministic) } func (m *ListIntentsRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_ListIntentsRequest.Merge(m, src) } func (m *ListIntentsRequest) XXX_Size() int { return xxx_messageInfo_ListIntentsRequest.Size(m) } func (m *ListIntentsRequest) XXX_DiscardUnknown() { xxx_messageInfo_ListIntentsRequest.DiscardUnknown(m) } var xxx_messageInfo_ListIntentsRequest proto.InternalMessageInfo func (m *ListIntentsRequest) GetParent() string { if m != nil { return m.Parent } return "" } func (m *ListIntentsRequest) GetLanguageCode() string { if m != nil { return m.LanguageCode } return "" } func (m *ListIntentsRequest) GetIntentView() IntentView { if m != nil { return m.IntentView } return IntentView_INTENT_VIEW_UNSPECIFIED } func (m *ListIntentsRequest) GetPageSize() int32 { if m != nil { return m.PageSize } return 0 } func (m *ListIntentsRequest) GetPageToken() string { if m != nil { return m.PageToken } return "" } // The response message for [Intents.ListIntents][google.cloud.dialogflow.v2.Intents.ListIntents]. type ListIntentsResponse struct { // The list of agent intents. There will be a maximum number of items // returned based on the page_size field in the request. Intents []*Intent `protobuf:"bytes,1,rep,name=intents,proto3" json:"intents,omitempty"` // Token to retrieve the next page of results, or empty if there are no // more results in the list. NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *ListIntentsResponse) Reset() { *m = ListIntentsResponse{} } func (m *ListIntentsResponse) String() string { return proto.CompactTextString(m) } func (*ListIntentsResponse) ProtoMessage() {} func (*ListIntentsResponse) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{2} } func (m *ListIntentsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListIntentsResponse.Unmarshal(m, b) } func (m *ListIntentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_ListIntentsResponse.Marshal(b, m, deterministic) } func (m *ListIntentsResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_ListIntentsResponse.Merge(m, src) } func (m *ListIntentsResponse) XXX_Size() int { return xxx_messageInfo_ListIntentsResponse.Size(m) } func (m *ListIntentsResponse) XXX_DiscardUnknown() { xxx_messageInfo_ListIntentsResponse.DiscardUnknown(m) } var xxx_messageInfo_ListIntentsResponse proto.InternalMessageInfo func (m *ListIntentsResponse) GetIntents() []*Intent { if m != nil { return m.Intents } return nil } func (m *ListIntentsResponse) GetNextPageToken() string { if m != nil { return m.NextPageToken } return "" } // The request message for [Intents.GetIntent][google.cloud.dialogflow.v2.Intents.GetIntent]. type GetIntentRequest struct { // Required. The name of the intent. // Format: `projects/<Project ID>/agent/intents/<Intent ID>`. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Optional. The language to retrieve training phrases, parameters and rich // messages for. If not specified, the agent's default language is used. // [Many // languages](https://cloud.google.com/dialogflow/docs/reference/language) // are supported. Note: languages must be enabled in the agent before they can // be used. LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` // Optional. The resource view to apply to the returned intent. IntentView IntentView `protobuf:"varint,3,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2.IntentView" json:"intent_view,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *GetIntentRequest) Reset() { *m = GetIntentRequest{} } func (m *GetIntentRequest) String() string { return proto.CompactTextString(m) } func (*GetIntentRequest) ProtoMessage() {} func (*GetIntentRequest) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{3} } func (m *GetIntentRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetIntentRequest.Unmarshal(m, b) } func (m *GetIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_GetIntentRequest.Marshal(b, m, deterministic) } func (m *GetIntentRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_GetIntentRequest.Merge(m, src) } func (m *GetIntentRequest) XXX_Size() int { return xxx_messageInfo_GetIntentRequest.Size(m) } func (m *GetIntentRequest) XXX_DiscardUnknown() { xxx_messageInfo_GetIntentRequest.DiscardUnknown(m) } var xxx_messageInfo_GetIntentRequest proto.InternalMessageInfo func (m *GetIntentRequest) GetName() string { if m != nil { return m.Name } return "" } func (m *GetIntentRequest) GetLanguageCode() string { if m != nil { return m.LanguageCode } return "" } func (m *GetIntentRequest) GetIntentView() IntentView { if m != nil { return m.IntentView } return IntentView_INTENT_VIEW_UNSPECIFIED } // The request message for [Intents.CreateIntent][google.cloud.dialogflow.v2.Intents.CreateIntent]. type CreateIntentRequest struct { // Required. The agent to create a intent for. // Format: `projects/<Project ID>/agent`. Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` // Required. The intent to create. Intent *Intent `protobuf:"bytes,2,opt,name=intent,proto3" json:"intent,omitempty"` // Optional. The language of training phrases, parameters and rich messages // defined in `intent`. If not specified, the agent's default language is // used. [Many // languages](https://cloud.google.com/dialogflow/docs/reference/language) // are supported. Note: languages must be enabled in the agent before they can // be used. LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` // Optional. The resource view to apply to the returned intent. IntentView IntentView `protobuf:"varint,4,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2.IntentView" json:"intent_view,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *CreateIntentRequest) Reset() { *m = CreateIntentRequest{} } func (m *CreateIntentRequest) String() string { return proto.CompactTextString(m) } func (*CreateIntentRequest) ProtoMessage() {} func (*CreateIntentRequest) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{4} } func (m *CreateIntentRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CreateIntentRequest.Unmarshal(m, b) } func (m *CreateIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CreateIntentRequest.Marshal(b, m, deterministic) } func (m *CreateIntentRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_CreateIntentRequest.Merge(m, src) } func (m *CreateIntentRequest) XXX_Size() int { return xxx_messageInfo_CreateIntentRequest.Size(m) } func (m *CreateIntentRequest) XXX_DiscardUnknown() { xxx_messageInfo_CreateIntentRequest.DiscardUnknown(m) } var xxx_messageInfo_CreateIntentRequest proto.InternalMessageInfo func (m *CreateIntentRequest) GetParent() string { if m != nil { return m.Parent } return "" } func (m *CreateIntentRequest) GetIntent() *Intent { if m != nil { return m.Intent } return nil } func (m *CreateIntentRequest) GetLanguageCode() string { if m != nil { return m.LanguageCode } return "" } func (m *CreateIntentRequest) GetIntentView() IntentView { if m != nil { return m.IntentView } return IntentView_INTENT_VIEW_UNSPECIFIED } // The request message for [Intents.UpdateIntent][google.cloud.dialogflow.v2.Intents.UpdateIntent]. type UpdateIntentRequest struct { // Required. The intent to update. Intent *Intent `protobuf:"bytes,1,opt,name=intent,proto3" json:"intent,omitempty"` // Optional. The language of training phrases, parameters and rich messages // defined in `intent`. If not specified, the agent's default language is // used. [Many // languages](https://cloud.google.com/dialogflow/docs/reference/language) // are supported. Note: languages must be enabled in the agent before they can // be used. LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` // Optional. The mask to control which fields get updated. UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` // Optional. The resource view to apply to the returned intent. IntentView IntentView `protobuf:"varint,4,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2.IntentView" json:"intent_view,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *UpdateIntentRequest) Reset() { *m = UpdateIntentRequest{} } func (m *UpdateIntentRequest) String() string { return proto.CompactTextString(m) } func (*UpdateIntentRequest) ProtoMessage() {} func (*UpdateIntentRequest) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{5} } func (m *UpdateIntentRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UpdateIntentRequest.Unmarshal(m, b) } func (m *UpdateIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UpdateIntentRequest.Marshal(b, m, deterministic) } func (m *UpdateIntentRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_UpdateIntentRequest.Merge(m, src) } func (m *UpdateIntentRequest) XXX_Size() int { return xxx_messageInfo_UpdateIntentRequest.Size(m) } func (m *UpdateIntentRequest) XXX_DiscardUnknown() { xxx_messageInfo_UpdateIntentRequest.DiscardUnknown(m) } var xxx_messageInfo_UpdateIntentRequest proto.InternalMessageInfo func (m *UpdateIntentRequest) GetIntent() *Intent { if m != nil { return m.Intent } return nil } func (m *UpdateIntentRequest) GetLanguageCode() string { if m != nil { return m.LanguageCode } return "" } func (m *UpdateIntentRequest) GetUpdateMask() *field_mask.FieldMask { if m != nil { return m.UpdateMask } return nil } func (m *UpdateIntentRequest) GetIntentView() IntentView { if m != nil { return m.IntentView } return IntentView_INTENT_VIEW_UNSPECIFIED } // The request message for [Intents.DeleteIntent][google.cloud.dialogflow.v2.Intents.DeleteIntent]. type DeleteIntentRequest struct { // Required. The name of the intent to delete. If this intent has direct or // indirect followup intents, we also delete them. // Format: `projects/<Project ID>/agent/intents/<Intent ID>`. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *DeleteIntentRequest) Reset() { *m = DeleteIntentRequest{} } func (m *DeleteIntentRequest) String() string { return proto.CompactTextString(m) } func (*DeleteIntentRequest) ProtoMessage() {} func (*DeleteIntentRequest) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{6} } func (m *DeleteIntentRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DeleteIntentRequest.Unmarshal(m, b) } func (m *DeleteIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_DeleteIntentRequest.Marshal(b, m, deterministic) } func (m *DeleteIntentRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_DeleteIntentRequest.Merge(m, src) } func (m *DeleteIntentRequest) XXX_Size() int { return xxx_messageInfo_DeleteIntentRequest.Size(m) } func (m *DeleteIntentRequest) XXX_DiscardUnknown() { xxx_messageInfo_DeleteIntentRequest.DiscardUnknown(m) } var xxx_messageInfo_DeleteIntentRequest proto.InternalMessageInfo func (m *DeleteIntentRequest) GetName() string { if m != nil { return m.Name } return "" } // The request message for [Intents.BatchUpdateIntents][google.cloud.dialogflow.v2.Intents.BatchUpdateIntents]. type BatchUpdateIntentsRequest struct { // Required. The name of the agent to update or create intents in. // Format: `projects/<Project ID>/agent`. Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` // The source of the intent batch. // // Types that are valid to be assigned to IntentBatch: // *BatchUpdateIntentsRequest_IntentBatchUri // *BatchUpdateIntentsRequest_IntentBatchInline IntentBatch isBatchUpdateIntentsRequest_IntentBatch `protobuf_oneof:"intent_batch"` // Optional. The language of training phrases, parameters and rich messages // defined in `intents`. If not specified, the agent's default language is // used. [Many // languages](https://cloud.google.com/dialogflow/docs/reference/language) // are supported. Note: languages must be enabled in the agent before they can // be used. LanguageCode string `protobuf:"bytes,4,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` // Optional. The mask to control which fields get updated. UpdateMask *field_mask.FieldMask `protobuf:"bytes,5,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` // Optional. The resource view to apply to the returned intent. IntentView IntentView `protobuf:"varint,6,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2.IntentView" json:"intent_view,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *BatchUpdateIntentsRequest) Reset() { *m = BatchUpdateIntentsRequest{} } func (m *BatchUpdateIntentsRequest) String() string { return proto.CompactTextString(m) } func (*BatchUpdateIntentsRequest) ProtoMessage() {} func (*BatchUpdateIntentsRequest) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{7} } func (m *BatchUpdateIntentsRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_BatchUpdateIntentsRequest.Unmarshal(m, b) } func (m *BatchUpdateIntentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_BatchUpdateIntentsRequest.Marshal(b, m, deterministic) } func (m *BatchUpdateIntentsRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_BatchUpdateIntentsRequest.Merge(m, src) } func (m *BatchUpdateIntentsRequest) XXX_Size() int { return xxx_messageInfo_BatchUpdateIntentsRequest.Size(m) } func (m *BatchUpdateIntentsRequest) XXX_DiscardUnknown() { xxx_messageInfo_BatchUpdateIntentsRequest.DiscardUnknown(m) } var xxx_messageInfo_BatchUpdateIntentsRequest proto.InternalMessageInfo func (m *BatchUpdateIntentsRequest) GetParent() string { if m != nil { return m.Parent } return "" } type isBatchUpdateIntentsRequest_IntentBatch interface { isBatchUpdateIntentsRequest_IntentBatch() } type BatchUpdateIntentsRequest_IntentBatchUri struct { IntentBatchUri string `protobuf:"bytes,2,opt,name=intent_batch_uri,json=intentBatchUri,proto3,oneof"` } type BatchUpdateIntentsRequest_IntentBatchInline struct { IntentBatchInline *IntentBatch `protobuf:"bytes,3,opt,name=intent_batch_inline,json=intentBatchInline,proto3,oneof"` } func (*BatchUpdateIntentsRequest_IntentBatchUri) isBatchUpdateIntentsRequest_IntentBatch() {} func (*BatchUpdateIntentsRequest_IntentBatchInline) isBatchUpdateIntentsRequest_IntentBatch() {} func (m *BatchUpdateIntentsRequest) GetIntentBatch() isBatchUpdateIntentsRequest_IntentBatch { if m != nil { return m.IntentBatch } return nil } func (m *BatchUpdateIntentsRequest) GetIntentBatchUri() string { if x, ok := m.GetIntentBatch().(*BatchUpdateIntentsRequest_IntentBatchUri); ok { return x.IntentBatchUri } return "" } func (m *BatchUpdateIntentsRequest) GetIntentBatchInline() *IntentBatch { if x, ok := m.GetIntentBatch().(*BatchUpdateIntentsRequest_IntentBatchInline); ok { return x.IntentBatchInline } return nil } func (m *BatchUpdateIntentsRequest) GetLanguageCode() string { if m != nil { return m.LanguageCode } return "" } func (m *BatchUpdateIntentsRequest) GetUpdateMask() *field_mask.FieldMask { if m != nil { return m.UpdateMask } return nil } func (m *BatchUpdateIntentsRequest) GetIntentView() IntentView { if m != nil { return m.IntentView } return IntentView_INTENT_VIEW_UNSPECIFIED } // XXX_OneofWrappers is for the internal use of the proto package. func (*BatchUpdateIntentsRequest) XXX_OneofWrappers() []interface{} { return []interface{}{ (*BatchUpdateIntentsRequest_IntentBatchUri)(nil), (*BatchUpdateIntentsRequest_IntentBatchInline)(nil), } } // The response message for [Intents.BatchUpdateIntents][google.cloud.dialogflow.v2.Intents.BatchUpdateIntents]. type BatchUpdateIntentsResponse struct { // The collection of updated or created intents. Intents []*Intent `protobuf:"bytes,1,rep,name=intents,proto3" json:"intents,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *BatchUpdateIntentsResponse) Reset() { *m = BatchUpdateIntentsResponse{} } func (m *BatchUpdateIntentsResponse) String() string { return proto.CompactTextString(m) } func (*BatchUpdateIntentsResponse) ProtoMessage() {} func (*BatchUpdateIntentsResponse) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{8} } func (m *BatchUpdateIntentsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_BatchUpdateIntentsResponse.Unmarshal(m, b) } func (m *BatchUpdateIntentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_BatchUpdateIntentsResponse.Marshal(b, m, deterministic) } func (m *BatchUpdateIntentsResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_BatchUpdateIntentsResponse.Merge(m, src) } func (m *BatchUpdateIntentsResponse) XXX_Size() int { return xxx_messageInfo_BatchUpdateIntentsResponse.Size(m) } func (m *BatchUpdateIntentsResponse) XXX_DiscardUnknown() { xxx_messageInfo_BatchUpdateIntentsResponse.DiscardUnknown(m) } var xxx_messageInfo_BatchUpdateIntentsResponse proto.InternalMessageInfo func (m *BatchUpdateIntentsResponse) GetIntents() []*Intent { if m != nil { return m.Intents } return nil } // The request message for [Intents.BatchDeleteIntents][google.cloud.dialogflow.v2.Intents.BatchDeleteIntents]. type BatchDeleteIntentsRequest struct { // Required. The name of the agent to delete all entities types for. Format: // `projects/<Project ID>/agent`. Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` // Required. The collection of intents to delete. Only intent `name` must be // filled in. Intents []*Intent `protobuf:"bytes,2,rep,name=intents,proto3" json:"intents,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *BatchDeleteIntentsRequest) Reset() { *m = BatchDeleteIntentsRequest{} } func (m *BatchDeleteIntentsRequest) String() string { return proto.CompactTextString(m) } func (*BatchDeleteIntentsRequest) ProtoMessage() {} func (*BatchDeleteIntentsRequest) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{9} } func (m *BatchDeleteIntentsRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_BatchDeleteIntentsRequest.Unmarshal(m, b) } func (m *BatchDeleteIntentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_BatchDeleteIntentsRequest.Marshal(b, m, deterministic) } func (m *BatchDeleteIntentsRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_BatchDeleteIntentsRequest.Merge(m, src) } func (m *BatchDeleteIntentsRequest) XXX_Size() int { return xxx_messageInfo_BatchDeleteIntentsRequest.Size(m) } func (m *BatchDeleteIntentsRequest) XXX_DiscardUnknown() { xxx_messageInfo_BatchDeleteIntentsRequest.DiscardUnknown(m) } var xxx_messageInfo_BatchDeleteIntentsRequest proto.InternalMessageInfo func (m *BatchDeleteIntentsRequest) GetParent() string { if m != nil { return m.Parent } return "" } func (m *BatchDeleteIntentsRequest) GetIntents() []*Intent { if m != nil { return m.Intents } return nil } // This message is a wrapper around a collection of intents. type IntentBatch struct { // A collection of intents. Intents []*Intent `protobuf:"bytes,1,rep,name=intents,proto3" json:"intents,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *IntentBatch) Reset() { *m = IntentBatch{} } func (m *IntentBatch) String() string { return proto.CompactTextString(m) } func (*IntentBatch) ProtoMessage() {} func (*IntentBatch) Descriptor() ([]byte, []int) { return fileDescriptor_6c20e31044d743bd, []int{10} } func (m *IntentBatch) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_IntentBatch.Unmarshal(m, b) } func (m *IntentBatch) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_IntentBatch.Marshal(b, m, deterministic) } func (m *IntentBatch) XXX_Merge(src proto.Message) { xxx_messageInfo_IntentBatch.Merge(m, src) } func (m *IntentBatch) XXX_Size() int { return xxx_messageInfo_IntentBatch.Size(m) } func (m *IntentBatch) XXX_DiscardUnknown() { xxx_messageInfo_IntentBatch.DiscardUnknown(m) } var xxx_messageInfo_IntentBatch proto.InternalMessageInfo func (m *IntentBatch) GetIntents() []*Intent { if m != nil { return m.Intents } return nil } func init() { proto.RegisterEnum("google.cloud.dialogflow.v2.IntentView", IntentView_name, IntentView_value) proto.RegisterEnum("google.cloud.dialogflow.v2.Intent_WebhookState", Intent_WebhookState_name, Intent_WebhookState_value) proto.RegisterEnum("google.cloud.dialogflow.v2.Intent_TrainingPhrase_Type", Intent_TrainingPhrase_Type_name, Intent_TrainingPhrase_Type_value) proto.RegisterEnum("google.cloud.dialogflow.v2.Intent_Message_Platform", Intent_Message_Platform_name, Intent_Message_Platform_value) proto.RegisterEnum("google.cloud.dialogflow.v2.Intent_Message_MediaContent_ResponseMediaType", Intent_Message_MediaContent_ResponseMediaType_name, Intent_Message_MediaContent_ResponseMediaType_value) proto.RegisterEnum("google.cloud.dialogflow.v2.Intent_Message_BrowseCarouselCard_ImageDisplayOptions", Intent_Message_BrowseCarouselCard_ImageDisplayOptions_name, Intent_Message_BrowseCarouselCard_ImageDisplayOptions_value) proto.RegisterEnum("google.cloud.dialogflow.v2.Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint", Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint_name, Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction_UrlTypeHint_value) proto.RegisterEnum("google.cloud.dialogflow.v2.Intent_Message_ColumnProperties_HorizontalAlignment", Intent_Message_ColumnProperties_HorizontalAlignment_name, Intent_Message_ColumnProperties_HorizontalAlignment_value) proto.RegisterType((*Intent)(nil), "google.cloud.dialogflow.v2.Intent") proto.RegisterType((*Intent_TrainingPhrase)(nil), "google.cloud.dialogflow.v2.Intent.TrainingPhrase") proto.RegisterType((*Intent_TrainingPhrase_Part)(nil), "google.cloud.dialogflow.v2.Intent.TrainingPhrase.Part") proto.RegisterType((*Intent_Parameter)(nil), "google.cloud.dialogflow.v2.Intent.Parameter") proto.RegisterType((*Intent_Message)(nil), "google.cloud.dialogflow.v2.Intent.Message") proto.RegisterType((*Intent_Message_Text)(nil), "google.cloud.dialogflow.v2.Intent.Message.Text") proto.RegisterType((*Intent_Message_Image)(nil), "google.cloud.dialogflow.v2.Intent.Message.Image") proto.RegisterType((*Intent_Message_QuickReplies)(nil), "google.cloud.dialogflow.v2.Intent.Message.QuickReplies") proto.RegisterType((*Intent_Message_Card)(nil), "google.cloud.dialogflow.v2.Intent.Message.Card") proto.RegisterType((*Intent_Message_Card_Button)(nil), "google.cloud.dialogflow.v2.Intent.Message.Card.Button") proto.RegisterType((*Intent_Message_SimpleResponse)(nil), "google.cloud.dialogflow.v2.Intent.Message.SimpleResponse") proto.RegisterType((*Intent_Message_SimpleResponses)(nil), "google.cloud.dialogflow.v2.Intent.Message.SimpleResponses") proto.RegisterType((*Intent_Message_BasicCard)(nil), "google.cloud.dialogflow.v2.Intent.Message.BasicCard") proto.RegisterType((*Intent_Message_BasicCard_Button)(nil), "google.cloud.dialogflow.v2.Intent.Message.BasicCard.Button") proto.RegisterType((*Intent_Message_BasicCard_Button_OpenUriAction)(nil), "google.cloud.dialogflow.v2.Intent.Message.BasicCard.Button.OpenUriAction") proto.RegisterType((*Intent_Message_Suggestion)(nil), "google.cloud.dialogflow.v2.Intent.Message.Suggestion") proto.RegisterType((*Intent_Message_Suggestions)(nil), "google.cloud.dialogflow.v2.Intent.Message.Suggestions") proto.RegisterType((*Intent_Message_LinkOutSuggestion)(nil), "google.cloud.dialogflow.v2.Intent.Message.LinkOutSuggestion") proto.RegisterType((*Intent_Message_ListSelect)(nil), "google.cloud.dialogflow.v2.Intent.Message.ListSelect") proto.RegisterType((*Intent_Message_ListSelect_Item)(nil), "google.cloud.dialogflow.v2.Intent.Message.ListSelect.Item") proto.RegisterType((*Intent_Message_CarouselSelect)(nil), "google.cloud.dialogflow.v2.Intent.Message.CarouselSelect") proto.RegisterType((*Intent_Message_CarouselSelect_Item)(nil), "google.cloud.dialogflow.v2.Intent.Message.CarouselSelect.Item") proto.RegisterType((*Intent_Message_SelectItemInfo)(nil), "google.cloud.dialogflow.v2.Intent.Message.SelectItemInfo") proto.RegisterType((*Intent_Message_MediaContent)(nil), "google.cloud.dialogflow.v2.Intent.Message.MediaContent") proto.RegisterType((*Intent_Message_MediaContent_ResponseMediaObject)(nil), "google.cloud.dialogflow.v2.Intent.Message.MediaContent.ResponseMediaObject") proto.RegisterType((*Intent_Message_BrowseCarouselCard)(nil), "google.cloud.dialogflow.v2.Intent.Message.BrowseCarouselCard") proto.RegisterType((*Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem)(nil), "google.cloud.dialogflow.v2.Intent.Message.BrowseCarouselCard.BrowseCarouselCardItem") proto.RegisterType((*Intent_Message_BrowseCarouselCard_BrowseCarouselCardItem_OpenUrlAction)(nil), "google.cloud.dialogflow.v2.Intent.Message.BrowseCarouselCard.BrowseCarouselCardItem.OpenUrlAction") proto.RegisterType((*Intent_Message_TableCard)(nil), "google.cloud.dialogflow.v2.Intent.Message.TableCard") proto.RegisterType((*Intent_Message_ColumnProperties)(nil), "google.cloud.dialogflow.v2.Intent.Message.ColumnProperties") proto.RegisterType((*Intent_Message_TableCardRow)(nil), "google.cloud.dialogflow.v2.Intent.Message.TableCardRow") proto.RegisterType((*Intent_Message_TableCardCell)(nil), "google.cloud.dialogflow.v2.Intent.Message.TableCardCell") proto.RegisterType((*Intent_FollowupIntentInfo)(nil), "google.cloud.dialogflow.v2.Intent.FollowupIntentInfo") proto.RegisterType((*ListIntentsRequest)(nil), "google.cloud.dialogflow.v2.ListIntentsRequest") proto.RegisterType((*ListIntentsResponse)(nil), "google.cloud.dialogflow.v2.ListIntentsResponse") proto.RegisterType((*GetIntentRequest)(nil), "google.cloud.dialogflow.v2.GetIntentRequest") proto.RegisterType((*CreateIntentRequest)(nil), "google.cloud.dialogflow.v2.CreateIntentRequest") proto.RegisterType((*UpdateIntentRequest)(nil), "google.cloud.dialogflow.v2.UpdateIntentRequest") proto.RegisterType((*DeleteIntentRequest)(nil), "google.cloud.dialogflow.v2.DeleteIntentRequest") proto.RegisterType((*BatchUpdateIntentsRequest)(nil), "google.cloud.dialogflow.v2.BatchUpdateIntentsRequest") proto.RegisterType((*BatchUpdateIntentsResponse)(nil), "google.cloud.dialogflow.v2.BatchUpdateIntentsResponse") proto.RegisterType((*BatchDeleteIntentsRequest)(nil), "google.cloud.dialogflow.v2.BatchDeleteIntentsRequest") proto.RegisterType((*IntentBatch)(nil), "google.cloud.dialogflow.v2.IntentBatch") } func init() { proto.RegisterFile("google/cloud/dialogflow/v2/intent.proto", fileDescriptor_6c20e31044d743bd) } var fileDescriptor_6c20e31044d743bd = []byte{ // 3677 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x3b, 0x5b, 0x6f, 0x23, 0x59, 0x5a, 0x29, 0xdb, 0xb9, 0x7d, 0x76, 0x1c, 0xe7, 0x24, 0xdd, 0xed, 0xae, 0x9e, 0xd9, 0x4e, 0x7b, 0x2e, 0x1d, 0xa2, 0x5e, 0x7b, 0x36, 0xb3, 0x33, 0xcb, 0xf4, 0xcc, 0x30, 0x94, 0x1d, 0x27, 0xb6, 0xe2, 0xc4, 0xee, 0xb2, 0xd3, 0xb3, 0xdd, 0x02, 0x4a, 0x65, 0xfb, 0xc4, 0xa9, 0x4d, 0xb9, 0xaa, 0xa6, 0xea, 0x38, 0x99, 0xcc, 0xd0, 0x42, 0x62, 0x85, 0x90, 0x16, 0x71, 0x91, 0xe6, 0x01, 0xc4, 0x0b, 0x17, 0x09, 0x09, 0x21, 0xf8, 0x01, 0x48, 0xf0, 0xbe, 0x9a, 0x47, 0xf6, 0x01, 0x08, 0x2f, 0x8b, 0xc4, 0x0b, 0xf0, 0xc4, 0x1b, 0x82, 0x07, 0xd0, 0xb9, 0x94, 0x5d, 0x65, 0xbb, 0xd3, 0x76, 0x92, 0xd5, 0x3c, 0xec, 0x53, 0x5c, 0xdf, 0xf9, 0x2e, 0xe7, 0xbb, 0x7f, 0xe7, 0x54, 0x05, 0x1e, 0x76, 0x6c, 0xbb, 0x63, 0xe2, 0x5c, 0xcb, 0xb4, 0x7b, 0xed, 0x5c, 0xdb, 0xd0, 0x4d, 0xbb, 0x73, 0x64, 0xda, 0x67, 0xb9, 0xd3, 0xad, 0x9c, 0x61, 0x11, 0x6c, 0x91, 0xac, 0xe3, 0xda, 0xc4, 0x46, 0x32, 0x47, 0xcc, 0x32, 0xc4, 0xec, 0x00, 0x31, 0x7b, 0xba, 0x25, 0xbf, 0x26, 0x98, 0xe8, 0x8e, 0x91, 0xd3, 0x2d, 0xcb, 0x26, 0x3a, 0x31, 0x6c, 0xcb, 0xe3, 0x94, 0xf2, 0x9d, 0xc0, 0x6a, 0xcb, 0x34, 0xfa, 0x2c, 0xe5, 0xfb, 0x81, 0x85, 0x23, 0x03, 0x9b, 0x6d, 0xad, 0x89, 0x8f, 0xf5, 0x53, 0xc3, 0x76, 0x05, 0xc2, 0xdd, 0x00, 0x82, 0x8b, 0x3d, 0xbb, 0xe7, 0xb6, 0xb0, 0x58, 0xfa, 0xf6, 0x25, 0xfb, 0xd6, 0x7b, 0x6d, 0xc3, 0xd6, 0x5a, 0xb6, 0x75, 0x64, 0x74, 0x04, 0xfa, 0xc6, 0x25, 0xe8, 0x2d, 0xdb, 0x22, 0xf8, 0x73, 0x7f, 0x53, 0x6f, 0x08, 0x4c, 0xd3, 0xb6, 0x3a, 0x6e, 0xcf, 0xb2, 0x0c, 0xab, 0x93, 0xb3, 0x1d, 0xec, 0x86, 0x54, 0xfa, 0x96, 0x40, 0x62, 0x4f, 0xcd, 0xde, 0x51, 0xae, 0xdd, 0xe3, 0x08, 0x62, 0xfd, 0xde, 0xf0, 0x3a, 0xee, 0x3a, 0xe4, 0x5c, 0x2c, 0xae, 0x0f, 0x2f, 0x72, 0xdd, 0xbb, 0xba, 0x77, 0x22, 0x30, 0x5e, 0x1b, 0xc6, 0xf0, 0x88, 0xdb, 0x6b, 0x89, 0x1d, 0x66, 0x7e, 0x98, 0x87, 0xb9, 0x32, 0x73, 0x0d, 0x42, 0x10, 0xb3, 0xf4, 0x2e, 0x4e, 0x4b, 0xeb, 0xd2, 0xc6, 0xa2, 0xca, 0x7e, 0xa3, 0xb7, 0x21, 0xd1, 0x36, 0x3c, 0xc7, 0xd4, 0xcf, 0x35, 0xb6, 0x16, 0xa1, 0x6b, 0xf9, 0xe8, 0x4f, 0x95, 0x88, 0x1a, 0x17, 0x0b, 0x07, 0x14, 0xef, 0x19, 0x2c, 0x9d, 0xe1, 0xe6, 0xb1, 0x6d, 0x9f, 0x68, 0x1e, 0xd1, 0x09, 0x4e, 0xcf, 0xad, 0x4b, 0x1b, 0xc9, 0xad, 0x5c, 0xf6, 0xe5, 0x8e, 0xce, 0x72, 0xb1, 0xd9, 0x4f, 0x39, 0x5d, 0x9d, 0x92, 0x51, 0xce, 0x92, 0x9a, 0x38, 0x0b, 0x80, 0xd0, 0x7d, 0x58, 0x70, 0x5c, 0xc3, 0x76, 0x0d, 0x72, 0x9e, 0x8e, 0xae, 0x4b, 0x1b, 0xb3, 0x1c, 0xa9, 0x0f, 0x44, 0x6f, 0x42, 0xdc, 0xf0, 0xb4, 0x23, 0xdd, 0x34, 0x9b, 0x7a, 0xeb, 0x24, 0x1d, 0x5b, 0x97, 0x36, 0x16, 0x38, 0x0e, 0x18, 0xde, 0x8e, 0x00, 0x53, 0xac, 0xae, 0xa9, 0xb5, 0x0d, 0x4f, 0x6f, 0x9a, 0xb8, 0x9d, 0x5e, 0x0d, 0x60, 0x75, 0xcd, 0x6d, 0x01, 0x46, 0xef, 0xc2, 0xaa, 0x61, 0x39, 0x3d, 0xa2, 0x09, 0x3f, 0x32, 0xad, 0xbd, 0xf4, 0xfc, 0x7a, 0x94, 0xab, 0x2d, 0xa9, 0x2b, 0x6c, 0xbd, 0xc0, 0x97, 0xa9, 0xee, 0x1e, 0xba, 0x07, 0x73, 0xf8, 0x14, 0x5b, 0xc4, 0x4b, 0x2f, 0x0c, 0xf0, 0x04, 0x08, 0xe9, 0x90, 0x22, 0xae, 0x6e, 0x50, 0xd7, 0x6b, 0xce, 0xb1, 0xab, 0x7b, 0xd8, 0x4b, 0x2f, 0xae, 0x47, 0x37, 0xe2, 0x5b, 0xdf, 0x99, 0xc0, 0x38, 0x0d, 0x41, 0x5a, 0x63, 0x94, 0x9c, 0xf3, 0x32, 0x09, 0x01, 0x99, 0x7c, 0xbd, 0x45, 0x03, 0x26, 0x0d, 0xbe, 0x7b, 0x24, 0x55, 0x80, 0xd0, 0x13, 0x58, 0xb6, 0x7b, 0x24, 0xa0, 0x92, 0x97, 0x8e, 0x33, 0xf1, 0x6f, 0x5c, 0x26, 0x5e, 0xe8, 0xc7, 0x59, 0x25, 0x39, 0x03, 0x01, 0xf3, 0xd0, 0x26, 0x24, 0x5d, 0xec, 0xe1, 0x00, 0xc7, 0xc4, 0xc0, 0x9a, 0x4b, 0x6c, 0xa9, 0x8f, 0xfb, 0x04, 0xc0, 0xd1, 0x5d, 0xbd, 0x8b, 0x09, 0x76, 0xbd, 0xf4, 0x12, 0x93, 0xfc, 0x68, 0x02, 0xc5, 0x6b, 0x3e, 0x91, 0xf0, 0xd1, 0x80, 0x09, 0xda, 0x83, 0x85, 0x2e, 0xf6, 0x3c, 0xbd, 0x83, 0xbd, 0x74, 0x92, 0x31, 0xdc, 0x9c, 0x80, 0xe1, 0x3e, 0x27, 0x11, 0xc1, 0xe3, 0x33, 0x40, 0xa7, 0x20, 0xb7, 0xf1, 0x91, 0xde, 0x33, 0x89, 0xe6, 0x62, 0xcf, 0xb1, 0x2d, 0x0f, 0x6b, 0x8e, 0xa9, 0x93, 0x23, 0xdb, 0xed, 0x7a, 0xe9, 0xe5, 0xf5, 0xe8, 0x46, 0x72, 0xeb, 0xdd, 0xc9, 0xd9, 0x67, 0x6b, 0x82, 0x96, 0xcb, 0x49, 0x0b, 0xde, 0xaa, 0x60, 0xed, 0xaf, 0x7a, 0xe8, 0x03, 0xb8, 0xeb, 0xda, 0x36, 0xd1, 0x8e, 0x6c, 0xd3, 0xb4, 0xcf, 0x7a, 0x8e, 0xc6, 0xeb, 0x23, 0xcf, 0xb2, 0x14, 0xcb, 0xc0, 0xdb, 0x14, 0x61, 0x47, 0xac, 0x73, 0x31, 0x2c, 0xd7, 0x3e, 0x86, 0x7b, 0x8e, 0xee, 0x52, 0xe4, 0xb1, 0xc4, 0x2b, 0x8c, 0x38, 0xcd, 0x51, 0xc6, 0x90, 0x77, 0x60, 0x6d, 0x98, 0xce, 0xb0, 0x8e, 0xec, 0x34, 0x62, 0xa6, 0x7c, 0x6f, 0x02, 0x5d, 0xc3, 0x4c, 0xcb, 0xd6, 0x91, 0xad, 0xa2, 0xa3, 0x11, 0x98, 0xfc, 0x77, 0x51, 0x48, 0x86, 0xe3, 0x77, 0x6c, 0x89, 0xa9, 0x42, 0x8c, 0x9c, 0x3b, 0xbc, 0xb4, 0x24, 0xb7, 0xde, 0x9f, 0x3a, 0x29, 0xb2, 0x8d, 0x73, 0x07, 0xf3, 0x92, 0xc4, 0x18, 0xa1, 0x27, 0x30, 0xeb, 0xe8, 0x2e, 0xf1, 0xd2, 0x51, 0xa6, 0xd1, 0x15, 0x38, 0xd6, 0x74, 0x97, 0x70, 0x8e, 0x9c, 0x13, 0xca, 0xc1, 0x0a, 0x31, 0xba, 0xd8, 0xd3, 0xf4, 0x76, 0x1b, 0xb7, 0xb5, 0x96, 0xdd, 0xb3, 0x08, 0x2b, 0x34, 0xb3, 0x7e, 0x4a, 0xd2, 0x55, 0x85, 0x2e, 0x16, 0xe8, 0x9a, 0xfc, 0x43, 0x09, 0x62, 0x94, 0x0b, 0xd5, 0x98, 0x26, 0x82, 0xaf, 0x31, 0xfd, 0x4d, 0x4b, 0x11, 0xb6, 0x88, 0x41, 0xce, 0xb5, 0xbe, 0xe2, 0x22, 0x69, 0x81, 0xc3, 0xa9, 0x46, 0xe8, 0x2e, 0xcc, 0xea, 0xa6, 0xa1, 0x7b, 0xac, 0xe8, 0x89, 0x75, 0x0e, 0xa1, 0x55, 0xb9, 0xe7, 0x61, 0x57, 0x6b, 0xe3, 0x23, 0xc3, 0xc2, 0xed, 0x60, 0xc9, 0x8b, 0xd3, 0x85, 0x6d, 0x0e, 0xcf, 0x7c, 0x08, 0x31, 0xc6, 0x6a, 0x0d, 0x52, 0x8d, 0x67, 0xb5, 0xa2, 0x76, 0x78, 0x50, 0xaf, 0x15, 0x0b, 0xe5, 0x9d, 0x72, 0x71, 0x3b, 0x35, 0x83, 0xe2, 0x30, 0x5f, 0xfc, 0xbe, 0xb2, 0x5f, 0xab, 0x14, 0x53, 0x12, 0x4a, 0xc1, 0x42, 0xa3, 0xb8, 0x5f, 0xab, 0x28, 0x8d, 0x62, 0x2a, 0x22, 0x47, 0x16, 0x24, 0xf9, 0x2f, 0x22, 0xb0, 0xd8, 0xcf, 0xc2, 0xb1, 0x9e, 0x7b, 0x30, 0xae, 0x39, 0x84, 0xfb, 0xc2, 0x5d, 0x98, 0x3d, 0xd5, 0xcd, 0x1e, 0x0e, 0x29, 0xc1, 0x20, 0x68, 0x03, 0x96, 0xfc, 0xcc, 0xe3, 0x28, 0xb1, 0x01, 0x4a, 0x42, 0xac, 0x3c, 0x65, 0x98, 0x1f, 0x41, 0x3a, 0x60, 0x2f, 0x2d, 0x24, 0x73, 0x76, 0x40, 0x74, 0x6b, 0x60, 0xbc, 0xed, 0xc0, 0x16, 0x1e, 0xc0, 0x62, 0x57, 0xb7, 0xda, 0x3a, 0xb1, 0xdd, 0x73, 0xd6, 0x96, 0x84, 0xa5, 0x06, 0x50, 0xf4, 0x3a, 0xcc, 0x3b, 0xae, 0xdd, 0x75, 0x48, 0xa8, 0xd2, 0xfb, 0x30, 0xf4, 0x1a, 0xcc, 0x1b, 0x9e, 0x66, 0x1a, 0x1e, 0x49, 0x2f, 0x0c, 0xe8, 0xe7, 0x0c, 0xaf, 0x62, 0x78, 0x44, 0xfe, 0xaf, 0x77, 0x60, 0x5e, 0x64, 0x3f, 0x2a, 0x06, 0xbc, 0x1d, 0x9f, 0xa8, 0xfb, 0xf9, 0x75, 0xa3, 0x81, 0x3f, 0x27, 0xa5, 0x19, 0x11, 0x20, 0x25, 0x98, 0x35, 0xba, 0x7a, 0x87, 0x5b, 0x34, 0xbe, 0xf5, 0xce, 0x14, 0x7c, 0xca, 0x94, 0xae, 0x34, 0xa3, 0x72, 0x06, 0xe8, 0xd7, 0x60, 0xe9, 0xb3, 0x9e, 0xd1, 0x3a, 0xd1, 0x5c, 0xec, 0x98, 0x06, 0xe6, 0xc1, 0x14, 0xdf, 0xfa, 0xde, 0x14, 0x1c, 0x9f, 0x50, 0x7a, 0x95, 0x93, 0x97, 0x66, 0xd4, 0xc4, 0x67, 0x81, 0x67, 0xaa, 0x70, 0x4b, 0x77, 0x79, 0x04, 0x4e, 0xa7, 0x70, 0x41, 0x77, 0xdb, 0x54, 0x61, 0x4a, 0x8e, 0xde, 0x85, 0x79, 0x47, 0x3f, 0x37, 0x6d, 0xbd, 0xcd, 0x1c, 0x1a, 0xdf, 0xba, 0xe3, 0x73, 0xf2, 0xa7, 0x96, 0x6c, 0x9d, 0x4d, 0x2d, 0xa5, 0x19, 0xd5, 0xc7, 0x44, 0x1d, 0x48, 0x79, 0x46, 0xd7, 0x31, 0x71, 0xbf, 0x72, 0x53, 0xf7, 0x51, 0xea, 0xc7, 0x53, 0xec, 0xa3, 0xce, 0x58, 0xf8, 0x05, 0x9a, 0x6a, 0xb8, 0xec, 0x85, 0x41, 0xe8, 0x10, 0xa0, 0xa9, 0x7b, 0x46, 0x4b, 0x63, 0xaa, 0x2e, 0x30, 0x11, 0xdf, 0x9d, 0x42, 0x44, 0x9e, 0x12, 0x0b, 0x7d, 0x17, 0x9b, 0xfe, 0x03, 0x7a, 0x0e, 0x71, 0xaf, 0xd7, 0xe9, 0x60, 0x8f, 0x0d, 0x83, 0xe9, 0x45, 0xc6, 0xf7, 0xfd, 0x69, 0xb6, 0x3e, 0xa0, 0x2e, 0xcd, 0xa8, 0x41, 0x66, 0xc8, 0x82, 0x55, 0xd3, 0xb0, 0x4e, 0x34, 0xbb, 0x47, 0xb4, 0x01, 0x9c, 0xcd, 0x07, 0xf1, 0xad, 0x8f, 0xa6, 0x90, 0x51, 0x31, 0xac, 0x93, 0x6a, 0x8f, 0x0c, 0x44, 0x95, 0x66, 0xd4, 0x15, 0x73, 0x18, 0x88, 0xbe, 0x0f, 0x71, 0x9a, 0x1f, 0x9a, 0x87, 0x4d, 0xdc, 0x22, 0xe9, 0x38, 0x93, 0xf3, 0xde, 0x54, 0x72, 0x3c, 0x52, 0x67, 0xc4, 0xa5, 0x19, 0x15, 0xcc, 0xfe, 0x13, 0x6a, 0xc3, 0x72, 0x4b, 0x77, 0xed, 0x9e, 0x87, 0x4d, 0x9f, 0x7b, 0x82, 0x71, 0xff, 0x60, 0xba, 0x60, 0x63, 0x1c, 0xfa, 0x12, 0x92, 0xad, 0x10, 0x04, 0x7d, 0x06, 0x6b, 0x4d, 0xd7, 0x3e, 0xf3, 0xb0, 0xd6, 0x17, 0xc6, 0x9c, 0x7d, 0x9b, 0x89, 0xfa, 0x78, 0x1a, 0x67, 0x33, 0x36, 0xbe, 0x40, 0xe1, 0x75, 0xd4, 0x1c, 0x81, 0xd2, 0xa8, 0x22, 0x74, 0xe8, 0xe4, 0x82, 0xee, 0x4c, 0x1d, 0x55, 0x0d, 0x4a, 0xec, 0x47, 0x15, 0xf1, 0x1f, 0x68, 0xc6, 0x77, 0x71, 0xdb, 0xd0, 0xf9, 0x70, 0x66, 0x91, 0x74, 0x7a, 0xea, 0x8c, 0xdf, 0xa7, 0xf4, 0x05, 0x4e, 0x4e, 0x33, 0xbe, 0x1b, 0x78, 0x46, 0x0d, 0x58, 0xf0, 0xe7, 0x23, 0x31, 0xe4, 0x5f, 0x7d, 0x3c, 0xea, 0x73, 0x92, 0xef, 0x43, 0x8c, 0x56, 0x40, 0x74, 0xa7, 0x5f, 0x40, 0xfb, 0x65, 0x98, 0x01, 0xe4, 0x5f, 0x85, 0x59, 0x56, 0xda, 0xd0, 0x3a, 0x2c, 0xb2, 0xd2, 0xa6, 0xf5, 0x5c, 0x83, 0x77, 0x23, 0xc1, 0x8b, 0x41, 0x0f, 0x5d, 0x03, 0x6d, 0x01, 0xd2, 0x5b, 0x2d, 0xec, 0x79, 0x46, 0xd3, 0x30, 0x59, 0xd7, 0xa0, 0x1c, 0x03, 0x5d, 0x76, 0x25, 0xb4, 0x4c, 0xe5, 0xca, 0x75, 0x48, 0x04, 0xeb, 0x1c, 0xed, 0x5b, 0xc4, 0x20, 0x26, 0x0e, 0x4a, 0xe0, 0x10, 0xda, 0xb7, 0xc2, 0x25, 0x35, 0x32, 0xd8, 0x6b, 0xa8, 0x38, 0xca, 0xff, 0x27, 0x41, 0x8c, 0xf9, 0xe4, 0x12, 0x6e, 0xf7, 0x61, 0xc1, 0xeb, 0x35, 0xf9, 0x6a, 0x60, 0x8b, 0x7d, 0x60, 0x58, 0xdf, 0xe8, 0x38, 0x7d, 0x0f, 0x61, 0xbe, 0xd9, 0x23, 0x84, 0xd6, 0x90, 0xd8, 0xc4, 0x13, 0x4f, 0xb0, 0x0c, 0x67, 0xf3, 0x8c, 0x5c, 0x74, 0x3d, 0xc1, 0x4b, 0xce, 0xc3, 0x1c, 0x87, 0x07, 0x9c, 0x22, 0x85, 0x9c, 0xc2, 0x8e, 0x66, 0xb6, 0x47, 0xd8, 0xb1, 0x2b, 0xb8, 0x79, 0x1f, 0x28, 0xbb, 0x90, 0x0c, 0xd7, 0x57, 0xf4, 0x26, 0x24, 0xd9, 0xb9, 0x8a, 0xd8, 0x9a, 0xe7, 0x60, 0xdc, 0x3a, 0x16, 0x13, 0x45, 0x82, 0x42, 0x1b, 0x76, 0x9d, 0xc1, 0xe8, 0xb4, 0xe1, 0x79, 0x5d, 0x53, 0x4c, 0x14, 0xec, 0x77, 0xf0, 0x28, 0xca, 0x76, 0x13, 0xb0, 0x85, 0x3f, 0x72, 0x30, 0x57, 0xfe, 0x3a, 0x2c, 0x0f, 0xd5, 0x74, 0x64, 0x8c, 0xe9, 0x14, 0x12, 0x33, 0xd5, 0x07, 0x57, 0xee, 0x14, 0x7c, 0x3e, 0x1c, 0xee, 0x15, 0xf2, 0x4f, 0xa2, 0xb0, 0xd8, 0xaf, 0xf7, 0xd7, 0x72, 0xfc, 0x5b, 0x90, 0xa4, 0xa9, 0xa1, 0x13, 0x82, 0xdb, 0x01, 0x8d, 0xd5, 0xa5, 0x3e, 0x94, 0x65, 0xcc, 0x9e, 0x3f, 0x2b, 0xc4, 0xae, 0x36, 0x2b, 0x88, 0x4d, 0xf1, 0x71, 0xe1, 0xf9, 0x20, 0x94, 0x66, 0x99, 0x7d, 0x3e, 0xbc, 0x4a, 0x9b, 0x1b, 0x1f, 0x4f, 0x7f, 0x2b, 0xf5, 0x03, 0x6a, 0x2d, 0x64, 0x16, 0xdf, 0x22, 0xa7, 0xb0, 0x6c, 0x3b, 0xd8, 0xa2, 0x81, 0xae, 0x89, 0xf3, 0x2c, 0x9f, 0x7f, 0xca, 0xd7, 0xd8, 0x44, 0xb6, 0xea, 0x60, 0xeb, 0xd0, 0x35, 0x14, 0xc6, 0x90, 0x3b, 0x6d, 0xc9, 0x0e, 0xc2, 0xe4, 0x07, 0xb0, 0x14, 0x42, 0x42, 0x29, 0x88, 0xf6, 0x8b, 0x8b, 0x4a, 0x7f, 0xca, 0x0f, 0x01, 0x02, 0xcd, 0x6e, 0xd4, 0xab, 0x11, 0xa1, 0x83, 0x7c, 0x02, 0xf1, 0x40, 0x57, 0x46, 0xbf, 0x12, 0x6e, 0xf1, 0xd2, 0xc4, 0x47, 0xac, 0xd1, 0x16, 0x2f, 0x2e, 0x5d, 0x02, 0xec, 0xe4, 0xe7, 0xb0, 0x32, 0xd2, 0x9e, 0x51, 0x16, 0x52, 0x6d, 0xfa, 0xd3, 0x62, 0x57, 0x48, 0xda, 0x60, 0x68, 0x17, 0x01, 0x1b, 0x58, 0x64, 0xe3, 0xf1, 0x2d, 0xae, 0x6c, 0xe0, 0x62, 0x87, 0x69, 0xfc, 0x3b, 0x51, 0x80, 0x41, 0x4f, 0xbe, 0x2c, 0x90, 0x0f, 0x61, 0xd6, 0x20, 0xb8, 0xcb, 0xeb, 0xe0, 0x74, 0xb3, 0xd7, 0x40, 0x40, 0xb6, 0x4c, 0x70, 0x57, 0x58, 0x92, 0x71, 0x0b, 0xe5, 0x47, 0x74, 0x4c, 0x7e, 0xc8, 0xff, 0x2e, 0x41, 0x8c, 0x52, 0x21, 0x15, 0x62, 0xec, 0x00, 0x2b, 0x4d, 0x3d, 0x16, 0x70, 0xd9, 0x94, 0x09, 0x3d, 0xb0, 0x8a, 0x33, 0x24, 0xe5, 0x35, 0xd0, 0x37, 0x32, 0xec, 0x62, 0xf4, 0x16, 0xc4, 0xdb, 0xd8, 0x6b, 0xb9, 0x86, 0xc3, 0x42, 0x34, 0x54, 0x86, 0x06, 0xf0, 0x1b, 0xcd, 0x4b, 0xf9, 0x1f, 0x23, 0x90, 0x0c, 0xcf, 0x30, 0xe8, 0x99, 0x6f, 0x76, 0x1e, 0x54, 0xbf, 0x74, 0xe5, 0x69, 0x68, 0xc4, 0xf4, 0x3f, 0x57, 0x96, 0x2d, 0x41, 0x32, 0xbc, 0x57, 0x9a, 0x10, 0x27, 0xf8, 0x3c, 0x98, 0x33, 0xf4, 0x99, 0xc5, 0xe3, 0xb9, 0x65, 0x5b, 0xe7, 0xdd, 0x50, 0xc7, 0xef, 0x03, 0xe5, 0x7f, 0x8a, 0x41, 0x22, 0x38, 0x39, 0xa1, 0x2e, 0x00, 0x9f, 0xc4, 0xd8, 0x29, 0x5f, 0x62, 0xb3, 0x52, 0xf9, 0x8a, 0x63, 0x58, 0xd6, 0xef, 0x30, 0x0c, 0xe8, 0xdf, 0x78, 0xd0, 0x43, 0xac, 0xff, 0x8c, 0x1c, 0x7f, 0xf0, 0xb3, 0x9b, 0x3f, 0xc0, 0x2d, 0xe2, 0xe7, 0xe3, 0xde, 0x8d, 0x48, 0xac, 0x32, 0x9e, 0x62, 0x14, 0xe4, 0x0f, 0x9e, 0xfc, 0x67, 0x11, 0x58, 0x1d, 0x83, 0x35, 0xf6, 0xae, 0x60, 0xc8, 0xb7, 0x91, 0x97, 0xf8, 0x96, 0x9e, 0x23, 0x74, 0xb7, 0x83, 0x35, 0xee, 0xe1, 0xe8, 0x35, 0x3c, 0xcc, 0xce, 0x11, 0x94, 0x17, 0x9f, 0x1b, 0xf7, 0x20, 0x66, 0xb4, 0x6c, 0xeb, 0x5a, 0x41, 0x43, 0xcf, 0xab, 0x94, 0x09, 0xba, 0x0f, 0x71, 0x31, 0x5e, 0x6b, 0x3d, 0xd7, 0xe4, 0x97, 0x10, 0x2a, 0x08, 0xd0, 0xa1, 0x6b, 0xe6, 0xe7, 0x45, 0x8c, 0x66, 0x0a, 0xb0, 0x32, 0xe2, 0x3a, 0xf4, 0x06, 0xdc, 0x57, 0x8b, 0xf5, 0x5a, 0xf5, 0xa0, 0x5e, 0xd4, 0xf6, 0x8b, 0xdb, 0x65, 0x45, 0x1b, 0x73, 0x3d, 0xb3, 0x08, 0xb3, 0xca, 0xe1, 0x76, 0xb9, 0x9a, 0x92, 0xe4, 0x7f, 0x9d, 0x07, 0x34, 0x7a, 0xae, 0x40, 0x3f, 0x08, 0x97, 0x80, 0xc6, 0xb5, 0x4e, 0x29, 0x63, 0x40, 0x34, 0x25, 0xfc, 0x72, 0xfc, 0x23, 0x09, 0x6e, 0xf1, 0x39, 0xd4, 0x1f, 0xc2, 0x6c, 0x87, 0x37, 0x35, 0x7e, 0x6f, 0xf7, 0xe4, 0x7a, 0xc2, 0x99, 0x8d, 0xc5, 0xad, 0x4d, 0x95, 0x33, 0xe6, 0xd1, 0xb1, 0x6a, 0x8c, 0xae, 0xc8, 0xff, 0x1c, 0x83, 0xdb, 0xe3, 0xb7, 0x4b, 0xf7, 0x39, 0x32, 0x45, 0xf0, 0xf2, 0xd5, 0xfc, 0x59, 0x98, 0x47, 0x8c, 0x17, 0x26, 0x9f, 0x1c, 0x86, 0x26, 0x8b, 0xc1, 0x9c, 0x13, 0x09, 0xce, 0x39, 0xdf, 0x40, 0x99, 0x43, 0xf7, 0x60, 0xee, 0xc8, 0xb6, 0x09, 0x76, 0x83, 0x17, 0x66, 0x02, 0x24, 0xff, 0x28, 0xe2, 0x4f, 0x40, 0x66, 0x70, 0x02, 0x32, 0x07, 0x13, 0x90, 0x89, 0xfe, 0x50, 0x82, 0xa5, 0x9e, 0x6b, 0xf2, 0x1b, 0xb8, 0x63, 0xc3, 0xe2, 0xd3, 0x68, 0x72, 0xcb, 0xfa, 0xd9, 0x5b, 0x35, 0x7b, 0xe8, 0x9a, 0x34, 0x6d, 0x4a, 0x86, 0x45, 0xfc, 0x4b, 0xce, 0x01, 0x24, 0xb3, 0x0f, 0xf1, 0x00, 0x02, 0x7a, 0x1d, 0xee, 0x1e, 0xaa, 0x15, 0x9e, 0x50, 0xa5, 0xf2, 0x41, 0x63, 0x28, 0xab, 0x92, 0x00, 0xca, 0x7e, 0x4d, 0x53, 0x0a, 0x8d, 0x72, 0xf5, 0x20, 0x25, 0xa1, 0x65, 0x88, 0xd3, 0xe7, 0x42, 0xf5, 0xa0, 0x51, 0x3c, 0x68, 0xa4, 0x22, 0x99, 0x53, 0x58, 0x1d, 0x13, 0x8c, 0xe8, 0x2d, 0x78, 0x50, 0xde, 0x57, 0x76, 0x8b, 0xda, 0x76, 0xb9, 0x5e, 0xab, 0x28, 0xcf, 0xb4, 0x6a, 0x8d, 0x72, 0xa8, 0x0f, 0xb1, 0x5f, 0x80, 0xd8, 0xae, 0xaa, 0x3c, 0x4b, 0x49, 0x34, 0x7d, 0x3f, 0x2d, 0x95, 0x1b, 0xc5, 0x54, 0x04, 0xc5, 0x61, 0xbe, 0xa0, 0x56, 0x6b, 0xb5, 0xe2, 0x76, 0x2a, 0x8a, 0x6e, 0x03, 0xca, 0x57, 0x0e, 0x55, 0xb5, 0xb8, 0xad, 0xe5, 0x95, 0xc2, 0xde, 0xae, 0x5a, 0x3d, 0x3c, 0xd8, 0x4e, 0xc5, 0xe4, 0xbf, 0x8e, 0xc2, 0x62, 0xff, 0x48, 0xff, 0x92, 0x09, 0xf9, 0x95, 0x67, 0x86, 0x7e, 0xcc, 0x44, 0x6f, 0x20, 0x66, 0xba, 0xb0, 0xd2, 0xb2, 0xcd, 0x5e, 0xd7, 0xd2, 0x1c, 0xd7, 0x76, 0xb0, 0x4b, 0xe8, 0x61, 0x37, 0x36, 0xf5, 0xb1, 0xa0, 0xc0, 0x78, 0xd4, 0xfa, 0x2c, 0xb8, 0x8c, 0x54, 0x6b, 0x08, 0x8c, 0x6a, 0x10, 0xa3, 0xc1, 0x20, 0x0e, 0x1e, 0xdf, 0xbb, 0xca, 0x4d, 0x88, 0x6a, 0x9f, 0x89, 0xe3, 0x29, 0xe5, 0x14, 0x3c, 0xcd, 0xcc, 0xdd, 0xf4, 0x69, 0xe6, 0x8f, 0x22, 0x90, 0x1a, 0xd6, 0x0c, 0xdd, 0x86, 0xb9, 0x63, 0xac, 0xb7, 0xb1, 0x2b, 0xdc, 0x26, 0x9e, 0xd0, 0x6f, 0x4b, 0xb0, 0x76, 0x6c, 0xbb, 0xc6, 0x17, 0xb6, 0x45, 0x74, 0x53, 0xd3, 0x4d, 0xa3, 0x63, 0x75, 0xb1, 0x45, 0x44, 0xed, 0xac, 0x5e, 0xc3, 0x9a, 0xd9, 0x52, 0x9f, 0xaf, 0xe2, 0xb3, 0x15, 0x95, 0xf3, 0x78, 0x74, 0x25, 0xd3, 0x84, 0xd5, 0x31, 0x04, 0xe8, 0x4d, 0x58, 0x2f, 0x55, 0xd5, 0xf2, 0xf3, 0xea, 0x41, 0x43, 0xa9, 0x68, 0x4a, 0xa5, 0xbc, 0x7b, 0xb0, 0x5f, 0x1c, 0xc9, 0x9d, 0x38, 0xcc, 0x57, 0x8a, 0xca, 0x76, 0xf9, 0x60, 0x37, 0x25, 0x21, 0x80, 0xb9, 0x42, 0xf1, 0xa0, 0x51, 0x54, 0x53, 0x11, 0x94, 0x80, 0x85, 0x86, 0xaa, 0x94, 0x2b, 0x74, 0x25, 0x2a, 0xff, 0xae, 0x04, 0x89, 0xa0, 0x4b, 0x50, 0x1d, 0x66, 0x5b, 0xd8, 0x34, 0xfd, 0x3e, 0xf5, 0x8b, 0x57, 0x71, 0x6d, 0x01, 0x9b, 0xa6, 0x88, 0x4e, 0xc6, 0x8b, 0xbd, 0x3e, 0x30, 0x4e, 0x8d, 0x36, 0x76, 0x35, 0xfd, 0x88, 0x16, 0xb6, 0xc8, 0xe0, 0x6a, 0x3e, 0x21, 0x56, 0x14, 0xba, 0x20, 0xbf, 0x01, 0x4b, 0x21, 0x36, 0xe3, 0xde, 0xc9, 0x64, 0xfe, 0x54, 0x82, 0x05, 0xff, 0x72, 0x0a, 0xa5, 0x61, 0xad, 0x56, 0x51, 0x1a, 0x3b, 0x55, 0x75, 0x7f, 0xc8, 0x04, 0x09, 0x58, 0xd8, 0x51, 0x0a, 0xc5, 0x7c, 0xb5, 0xba, 0xc7, 0x73, 0xbc, 0x5e, 0x51, 0x0a, 0x7b, 0xc2, 0x04, 0xc5, 0x4a, 0x71, 0x57, 0x55, 0xf6, 0x53, 0x51, 0x34, 0x0f, 0xd1, 0xbd, 0xf2, 0x5e, 0x2a, 0xc6, 0x30, 0xf6, 0x9e, 0xd5, 0x8a, 0xa9, 0x59, 0x5a, 0x1a, 0x2a, 0xe5, 0x83, 0x62, 0x6a, 0x8e, 0x02, 0x9f, 0x96, 0xf3, 0x45, 0x35, 0x35, 0x8f, 0x6e, 0xc1, 0x0a, 0x2f, 0x45, 0x75, 0xad, 0x7a, 0xa0, 0xed, 0x56, 0xab, 0xbb, 0x95, 0x62, 0x6a, 0x01, 0xad, 0xc2, 0x32, 0xff, 0xad, 0x95, 0x94, 0x83, 0xdd, 0xea, 0x61, 0xa3, 0x9e, 0x8a, 0xe7, 0x17, 0x61, 0x5e, 0xbc, 0xb6, 0x94, 0x7f, 0x4b, 0x02, 0x34, 0xfa, 0x16, 0x0e, 0xbd, 0x33, 0xfa, 0x6a, 0x2f, 0x30, 0x88, 0x0d, 0xbd, 0xa3, 0x9b, 0xe4, 0x5d, 0x62, 0xe4, 0xf2, 0x77, 0x89, 0x19, 0x02, 0x89, 0xe0, 0xeb, 0x7b, 0x5a, 0x7c, 0x3f, 0x2d, 0xe6, 0x4b, 0xd5, 0xea, 0x9e, 0x56, 0x6f, 0x28, 0x8d, 0xe1, 0x91, 0xe6, 0x2e, 0xdc, 0x0a, 0x2f, 0x17, 0x0f, 0x94, 0x7c, 0xa5, 0xb8, 0x9d, 0x92, 0xd0, 0x26, 0xbc, 0x3d, 0x76, 0x49, 0xdb, 0xa9, 0xaa, 0x5a, 0xbd, 0x52, 0x6d, 0x68, 0x3b, 0xe5, 0x0a, 0x0b, 0xb0, 0xc8, 0xe3, 0xda, 0x7f, 0x28, 0xfb, 0xb0, 0x1e, 0x88, 0x1b, 0x1e, 0x4f, 0xba, 0x63, 0x78, 0xd9, 0x96, 0xdd, 0xcd, 0x89, 0xef, 0x19, 0x7e, 0xc1, 0x71, 0x6d, 0x36, 0xa8, 0xe6, 0xbe, 0x14, 0xbf, 0x5e, 0xe4, 0xf4, 0x0e, 0xb6, 0x88, 0xf8, 0x16, 0xc5, 0xcb, 0x7d, 0xc9, 0x7f, 0xbc, 0xc8, 0x7c, 0x15, 0x01, 0x44, 0x0f, 0xa3, 0x9c, 0xd2, 0x53, 0xf1, 0x67, 0x3d, 0xec, 0x11, 0xf4, 0x09, 0xcc, 0x71, 0xd5, 0xc5, 0x69, 0xe0, 0xe1, 0x4f, 0x95, 0xc8, 0xff, 0x28, 0x0f, 0xe0, 0xfe, 0xcb, 0x65, 0x2b, 0x54, 0x80, 0x2a, 0xc8, 0x68, 0x90, 0x9a, 0xba, 0xd5, 0xe9, 0xd1, 0xb9, 0xa9, 0x65, 0xb7, 0x43, 0x55, 0x3b, 0xe1, 0xaf, 0x14, 0xec, 0x36, 0x46, 0x15, 0x88, 0x0b, 0xc3, 0x9f, 0x1a, 0xf8, 0x4c, 0x34, 0xd7, 0xb7, 0x5f, 0x9d, 0x29, 0x4f, 0x0d, 0x7c, 0xe6, 0x7f, 0xec, 0xd0, 0x07, 0xa0, 0x75, 0x58, 0x74, 0xa8, 0x4c, 0xcf, 0xf8, 0x02, 0x07, 0xdf, 0x53, 0x2e, 0x50, 0x68, 0xdd, 0xf8, 0x02, 0xa3, 0x0c, 0x00, 0xc3, 0x20, 0xf6, 0x09, 0xb6, 0x82, 0x43, 0x01, 0x23, 0x6c, 0x50, 0x68, 0xe6, 0x4b, 0x58, 0x0d, 0x19, 0x45, 0x5c, 0xe1, 0x7d, 0x04, 0xf3, 0xc2, 0x80, 0x22, 0xa1, 0x33, 0xaf, 0xde, 0xa6, 0xea, 0x93, 0xa0, 0xb7, 0x61, 0xd9, 0xc2, 0x9f, 0x13, 0x2d, 0x20, 0x9d, 0x47, 0xd9, 0x12, 0x05, 0xd7, 0xfa, 0xc2, 0x7f, 0x2c, 0x41, 0x6a, 0x17, 0x0b, 0xe1, 0xbe, 0x43, 0x3e, 0x0a, 0x9e, 0x2c, 0xf2, 0x1b, 0xcc, 0x1d, 0x99, 0x57, 0x87, 0x82, 0x38, 0x83, 0x7c, 0x43, 0xde, 0xc8, 0xfc, 0x7e, 0x04, 0x56, 0x0b, 0x2e, 0xd6, 0x09, 0x0e, 0x6b, 0x73, 0xed, 0xf0, 0xfa, 0x04, 0xe6, 0xb8, 0x18, 0x71, 0x51, 0x36, 0x81, 0x23, 0xf8, 0x89, 0x56, 0x90, 0x8d, 0x5a, 0x24, 0x3a, 0xa1, 0x45, 0x62, 0xd7, 0xb3, 0xc8, 0x1f, 0x44, 0x60, 0xf5, 0xd0, 0x69, 0x8f, 0xb3, 0x88, 0x50, 0x48, 0xba, 0x21, 0x85, 0x5e, 0xea, 0xe2, 0x5f, 0x86, 0x78, 0x8f, 0xed, 0x80, 0x7d, 0x2b, 0x25, 0x06, 0x26, 0x79, 0xe4, 0xb5, 0xe3, 0x8e, 0x81, 0xcd, 0xf6, 0xbe, 0xee, 0x9d, 0x08, 0x25, 0x38, 0x0d, 0x05, 0xdc, 0xb0, 0x49, 0xea, 0xb0, 0xba, 0x8d, 0x4d, 0x3c, 0x6c, 0x91, 0x6b, 0x45, 0x7c, 0xe6, 0x6f, 0xa2, 0x70, 0x37, 0xaf, 0x93, 0xd6, 0x71, 0xd0, 0xd8, 0x37, 0x57, 0xde, 0x36, 0x21, 0x25, 0x2c, 0xd0, 0xa4, 0x42, 0xb4, 0xfe, 0x45, 0x62, 0x69, 0x46, 0x4d, 0xf2, 0x15, 0x2e, 0xdd, 0x35, 0xd0, 0x33, 0x58, 0x0d, 0xe1, 0x1a, 0x96, 0x69, 0x58, 0xfe, 0xa0, 0xfa, 0x70, 0x02, 0x3f, 0x53, 0xaa, 0xd2, 0x8c, 0xba, 0x12, 0xe0, 0x5b, 0x66, 0x3c, 0x46, 0x9d, 0x1e, 0x9b, 0xd0, 0xe9, 0xb3, 0xd7, 0x76, 0xfa, 0xdc, 0xb5, 0x9c, 0x9e, 0x4f, 0x42, 0x22, 0x68, 0x94, 0xcc, 0x73, 0x90, 0xc7, 0xb9, 0xeb, 0x26, 0x0a, 0x6f, 0xe6, 0x4f, 0x24, 0x11, 0x0b, 0xc1, 0x30, 0xbb, 0xb9, 0x58, 0x50, 0x06, 0x9b, 0x8b, 0x4c, 0xba, 0x39, 0x9e, 0xbb, 0xfd, 0x1d, 0xee, 0x41, 0x3c, 0xe0, 0xeb, 0xeb, 0xa9, 0xbb, 0xf9, 0x09, 0xc0, 0xc0, 0xf2, 0xe8, 0x1e, 0xdc, 0x29, 0xb3, 0x13, 0x9e, 0xf6, 0xb4, 0x5c, 0xfc, 0x74, 0x68, 0x2c, 0x59, 0x83, 0x54, 0x70, 0x71, 0xe7, 0xb0, 0x52, 0x49, 0x49, 0x5b, 0x5f, 0x25, 0x60, 0x5e, 0x18, 0x09, 0xfd, 0xbd, 0x04, 0xf1, 0x40, 0x2b, 0x44, 0xd9, 0xcb, 0x76, 0x32, 0x3a, 0x48, 0xc8, 0xb9, 0x89, 0xf1, 0xb9, 0xab, 0x33, 0xfb, 0x17, 0x8a, 0x30, 0xec, 0x85, 0xb2, 0xc6, 0x7f, 0x3c, 0x0a, 0xc5, 0xf8, 0x6f, 0xfe, 0xe4, 0xdf, 0xbe, 0x8a, 0x3c, 0x44, 0x6f, 0xe5, 0x4e, 0xb7, 0x72, 0x5f, 0x72, 0x84, 0x8f, 0xfb, 0xf3, 0xce, 0x26, 0x9f, 0x73, 0x5e, 0xf8, 0x83, 0x0e, 0xfa, 0x4b, 0x09, 0x16, 0xfb, 0xcd, 0x14, 0x5d, 0xfa, 0xfd, 0xdd, 0x70, 0xcf, 0x95, 0x27, 0xb0, 0x7a, 0xa6, 0x7c, 0xa1, 0xb0, 0x82, 0x73, 0xa1, 0x20, 0xfa, 0xe7, 0x92, 0xad, 0xd2, 0xe5, 0x91, 0x8d, 0xf6, 0x07, 0xb2, 0xcd, 0x17, 0xe8, 0xc7, 0x12, 0x24, 0x82, 0xcd, 0x12, 0x5d, 0x6a, 0xbb, 0x31, 0x6d, 0x75, 0xa2, 0x0d, 0xb7, 0x2e, 0x94, 0x25, 0x61, 0x56, 0x2e, 0xfb, 0x42, 0xb9, 0x17, 0x7a, 0x1e, 0xa3, 0x42, 0x2e, 0x33, 0x99, 0xb5, 0x1f, 0xfb, 0xcd, 0xe8, 0x5f, 0x24, 0x48, 0x04, 0x33, 0xf9, 0x72, 0x55, 0xc6, 0xf4, 0xc3, 0x89, 0x54, 0x39, 0xbd, 0x50, 0xd6, 0xc6, 0xed, 0xf9, 0x42, 0x59, 0x1f, 0x07, 0x7e, 0x14, 0x28, 0x80, 0x4c, 0xad, 0xf7, 0xb7, 0x1e, 0x31, 0xb5, 0xc4, 0x07, 0xda, 0xaf, 0x72, 0x50, 0x5f, 0x3b, 0x7a, 0xcc, 0x0b, 0x96, 0x92, 0xcb, 0xb5, 0x1b, 0xd3, 0xdb, 0xe4, 0xdb, 0x23, 0x85, 0xb7, 0xd8, 0x75, 0xc8, 0x79, 0xe6, 0xbb, 0x22, 0x9a, 0x78, 0xe0, 0x6c, 0x4e, 0x18, 0x38, 0xff, 0x2b, 0x01, 0x1a, 0x2d, 0x9e, 0xe8, 0xd2, 0xb7, 0x6d, 0x2f, 0xed, 0x8d, 0xf2, 0xeb, 0x3e, 0x59, 0xe0, 0xd3, 0xed, 0x6c, 0xd5, 0xff, 0x74, 0x3b, 0xf3, 0x7b, 0xd2, 0xd7, 0x4a, 0x15, 0xde, 0x9b, 0x92, 0xb5, 0xa8, 0xe3, 0xb7, 0xc7, 0x7f, 0xdb, 0xc4, 0xbd, 0x91, 0xf9, 0xce, 0x64, 0x41, 0xd6, 0x1c, 0xf0, 0x7f, 0x2c, 0x6d, 0xa2, 0xff, 0xf4, 0xb5, 0x0f, 0x55, 0xf7, 0x09, 0xb4, 0x1f, 0xd7, 0x0d, 0x5e, 0xa5, 0xfd, 0x6f, 0x7c, 0xad, 0xe4, 0xe0, 0xd6, 0x58, 0xe7, 0xbd, 0x4c, 0xb9, 0x0b, 0x25, 0x19, 0xca, 0x2f, 0xef, 0x0a, 0xda, 0xf2, 0xad, 0x3e, 0x96, 0x36, 0xe5, 0xcf, 0xbf, 0x56, 0xee, 0xbe, 0xb4, 0x31, 0xfd, 0x83, 0xf2, 0xec, 0x98, 0x10, 0xc7, 0x7b, 0x9c, 0xcb, 0x9d, 0x9d, 0x8d, 0x74, 0x2d, 0xbd, 0x47, 0x8e, 0xf9, 0x97, 0xfb, 0xdf, 0xf6, 0xbf, 0x3e, 0x79, 0xf4, 0x2a, 0xf4, 0x81, 0xa8, 0xfc, 0x1f, 0x4b, 0xf0, 0xad, 0x96, 0xdd, 0xbd, 0xc4, 0xac, 0x79, 0xd1, 0xc4, 0x6a, 0xd4, 0x00, 0x35, 0xe9, 0xf9, 0xb6, 0x40, 0xed, 0xd8, 0x34, 0x0f, 0xb3, 0xb6, 0xdb, 0xc9, 0x75, 0xb0, 0xc5, 0xcc, 0x93, 0x1b, 0x08, 0x1b, 0xf7, 0xcf, 0x04, 0x1f, 0x0e, 0x9e, 0xfe, 0x5b, 0x92, 0xfe, 0x3c, 0x12, 0xd9, 0xde, 0xf9, 0xab, 0x88, 0xbc, 0xcb, 0xd9, 0x15, 0x98, 0xe4, 0xed, 0x81, 0xe4, 0xa7, 0x5b, 0xcd, 0x39, 0xc6, 0xf5, 0xdd, 0xff, 0x0f, 0x00, 0x00, 0xff, 0xff, 0x71, 0x22, 0x39, 0x97, 0x88, 0x31, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 // IntentsClient is the client API for Intents service. // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type IntentsClient interface { // Returns the list of all intents in the specified agent. ListIntents(ctx context.Context, in *ListIntentsRequest, opts ...grpc.CallOption) (*ListIntentsResponse, error) // Retrieves the specified intent. GetIntent(ctx context.Context, in *GetIntentRequest, opts ...grpc.CallOption) (*Intent, error) // Creates an intent in the specified agent. CreateIntent(ctx context.Context, in *CreateIntentRequest, opts ...grpc.CallOption) (*Intent, error) // Updates the specified intent. UpdateIntent(ctx context.Context, in *UpdateIntentRequest, opts ...grpc.CallOption) (*Intent, error) // Deletes the specified intent and its direct or indirect followup intents. DeleteIntent(ctx context.Context, in *DeleteIntentRequest, opts ...grpc.CallOption) (*empty.Empty, error) // Updates/Creates multiple intents in the specified agent. // // Operation <response: [BatchUpdateIntentsResponse][google.cloud.dialogflow.v2.BatchUpdateIntentsResponse]> BatchUpdateIntents(ctx context.Context, in *BatchUpdateIntentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) // Deletes intents in the specified agent. // // Operation <response: [google.protobuf.Empty][google.protobuf.Empty]> BatchDeleteIntents(ctx context.Context, in *BatchDeleteIntentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) } type intentsClient struct { cc *grpc.ClientConn } func NewIntentsClient(cc *grpc.ClientConn) IntentsClient { return &intentsClient{cc} } func (c *intentsClient) ListIntents(ctx context.Context, in *ListIntentsRequest, opts ...grpc.CallOption) (*ListIntentsResponse, error) { out := new(ListIntentsResponse) err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Intents/ListIntents", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *intentsClient) GetIntent(ctx context.Context, in *GetIntentRequest, opts ...grpc.CallOption) (*Intent, error) { out := new(Intent) err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Intents/GetIntent", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *intentsClient) CreateIntent(ctx context.Context, in *CreateIntentRequest, opts ...grpc.CallOption) (*Intent, error) { out := new(Intent) err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Intents/CreateIntent", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *intentsClient) UpdateIntent(ctx context.Context, in *UpdateIntentRequest, opts ...grpc.CallOption) (*Intent, error) { out := new(Intent) err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Intents/UpdateIntent", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *intentsClient) DeleteIntent(ctx context.Context, in *DeleteIntentRequest, opts ...grpc.CallOption) (*empty.Empty, error) { out := new(empty.Empty) err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Intents/DeleteIntent", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *intentsClient) BatchUpdateIntents(ctx context.Context, in *BatchUpdateIntentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { out := new(longrunning.Operation) err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Intents/BatchUpdateIntents", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *intentsClient) BatchDeleteIntents(ctx context.Context, in *BatchDeleteIntentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { out := new(longrunning.Operation) err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Intents/BatchDeleteIntents", in, out, opts...) if err != nil { return nil, err } return out, nil } // IntentsServer is the server API for Intents service. type IntentsServer interface { // Returns the list of all intents in the specified agent. ListIntents(context.Context, *ListIntentsRequest) (*ListIntentsResponse, error) // Retrieves the specified intent. GetIntent(context.Context, *GetIntentRequest) (*Intent, error) // Creates an intent in the specified agent. CreateIntent(context.Context, *CreateIntentRequest) (*Intent, error) // Updates the specified intent. UpdateIntent(context.Context, *UpdateIntentRequest) (*Intent, error) // Deletes the specified intent and its direct or indirect followup intents. DeleteIntent(context.Context, *DeleteIntentRequest) (*empty.Empty, error) // Updates/Creates multiple intents in the specified agent. // // Operation <response: [BatchUpdateIntentsResponse][google.cloud.dialogflow.v2.BatchUpdateIntentsResponse]> BatchUpdateIntents(context.Context, *BatchUpdateIntentsRequest) (*longrunning.Operation, error) // Deletes intents in the specified agent. // // Operation <response: [google.protobuf.Empty][google.protobuf.Empty]> BatchDeleteIntents(context.Context, *BatchDeleteIntentsRequest) (*longrunning.Operation, error) } // UnimplementedIntentsServer can be embedded to have forward compatible implementations. type UnimplementedIntentsServer struct { } func (*UnimplementedIntentsServer) ListIntents(ctx context.Context, req *ListIntentsRequest) (*ListIntentsResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method ListIntents not implemented") } func (*UnimplementedIntentsServer) GetIntent(ctx context.Context, req *GetIntentRequest) (*Intent, error) { return nil, status.Errorf(codes.Unimplemented, "method GetIntent not implemented") } func (*UnimplementedIntentsServer) CreateIntent(ctx context.Context, req *CreateIntentRequest) (*Intent, error) { return nil, status.Errorf(codes.Unimplemented, "method CreateIntent not implemented") } func (*UnimplementedIntentsServer) UpdateIntent(ctx context.Context, req *UpdateIntentRequest) (*Intent, error) { return nil, status.Errorf(codes.Unimplemented, "method UpdateIntent not implemented") } func (*UnimplementedIntentsServer) DeleteIntent(ctx context.Context, req *DeleteIntentRequest) (*empty.Empty, error) { return nil, status.Errorf(codes.Unimplemented, "method DeleteIntent not implemented") } func (*UnimplementedIntentsServer) BatchUpdateIntents(ctx context.Context, req *BatchUpdateIntentsRequest) (*longrunning.Operation, error) { return nil, status.Errorf(codes.Unimplemented, "method BatchUpdateIntents not implemented") } func (*UnimplementedIntentsServer) BatchDeleteIntents(ctx context.Context, req *BatchDeleteIntentsRequest) (*longrunning.Operation, error) { return nil, status.Errorf(codes.Unimplemented, "method BatchDeleteIntents not implemented") } func RegisterIntentsServer(s *grpc.Server, srv IntentsServer) { s.RegisterService(&_Intents_serviceDesc, srv) } func _Intents_ListIntents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(ListIntentsRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(IntentsServer).ListIntents(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/google.cloud.dialogflow.v2.Intents/ListIntents", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(IntentsServer).ListIntents(ctx, req.(*ListIntentsRequest)) } return interceptor(ctx, in, info, handler) } func _Intents_GetIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(GetIntentRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(IntentsServer).GetIntent(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/google.cloud.dialogflow.v2.Intents/GetIntent", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(IntentsServer).GetIntent(ctx, req.(*GetIntentRequest)) } return interceptor(ctx, in, info, handler) } func _Intents_CreateIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(CreateIntentRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(IntentsServer).CreateIntent(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/google.cloud.dialogflow.v2.Intents/CreateIntent", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(IntentsServer).CreateIntent(ctx, req.(*CreateIntentRequest)) } return interceptor(ctx, in, info, handler) } func _Intents_UpdateIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(UpdateIntentRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(IntentsServer).UpdateIntent(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/google.cloud.dialogflow.v2.Intents/UpdateIntent", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(IntentsServer).UpdateIntent(ctx, req.(*UpdateIntentRequest)) } return interceptor(ctx, in, info, handler) } func _Intents_DeleteIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(DeleteIntentRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(IntentsServer).DeleteIntent(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/google.cloud.dialogflow.v2.Intents/DeleteIntent", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(IntentsServer).DeleteIntent(ctx, req.(*DeleteIntentRequest)) } return interceptor(ctx, in, info, handler) } func _Intents_BatchUpdateIntents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(BatchUpdateIntentsRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(IntentsServer).BatchUpdateIntents(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/google.cloud.dialogflow.v2.Intents/BatchUpdateIntents", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(IntentsServer).BatchUpdateIntents(ctx, req.(*BatchUpdateIntentsRequest)) } return interceptor(ctx, in, info, handler) } func _Intents_BatchDeleteIntents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(BatchDeleteIntentsRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(IntentsServer).BatchDeleteIntents(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/google.cloud.dialogflow.v2.Intents/BatchDeleteIntents", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(IntentsServer).BatchDeleteIntents(ctx, req.(*BatchDeleteIntentsRequest)) } return interceptor(ctx, in, info, handler) } var _Intents_serviceDesc = grpc.ServiceDesc{ ServiceName: "google.cloud.dialogflow.v2.Intents", HandlerType: (*IntentsServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "ListIntents", Handler: _Intents_ListIntents_Handler, }, { MethodName: "GetIntent", Handler: _Intents_GetIntent_Handler, }, { MethodName: "CreateIntent", Handler: _Intents_CreateIntent_Handler, }, { MethodName: "UpdateIntent", Handler: _Intents_UpdateIntent_Handler, }, { MethodName: "DeleteIntent", Handler: _Intents_DeleteIntent_Handler, }, { MethodName: "BatchUpdateIntents", Handler: _Intents_BatchUpdateIntents_Handler, }, { MethodName: "BatchDeleteIntents", Handler: _Intents_BatchDeleteIntents_Handler, }, }, Streams: []grpc.StreamDesc{}, Metadata: "google/cloud/dialogflow/v2/intent.proto", }
// "title": "or this" // }
contains-duplicate-ii_test.go
package problem0219 import ( "fmt" "testing" "github.com/stretchr/testify/assert" ) type question struct { para ans } // para 是参数 type para struct { nums []int k int } // ans 是答案 type ans struct { one bool } func Test_Problem0219(t *testing.T) { ast := assert.New(t) qs := []question{ question{ para{ []int{1, 2, 1, 3, 4, 5}, 2, }, ans{ true,
question{ para{ []int{1, 2, 3, 4, 5}, 0, }, ans{ false, }, }, question{ para{ []int{1, 2, 3, 4, 5}, 2, }, ans{ false, }, }, // 如需多个测试,可以复制上方元素。 } for _, q := range qs { a, p := q.ans, q.para fmt.Printf("~~%v~~\n", p) ast.Equal(a.one, containsNearbyDuplicate(p.nums, p.k), "输入:%v", p) } }
}, },
coverage.py
import os import subprocess def run_test_coverage(): """ Simple run coverage and do: - Runs the tests - Check your test coverage - Generates HTML coverage report under "htmlcov" directory.
try: subprocess.run(py_test_command.split()) coverage_dir = os.path.join(CURRENT_DIR, "htmlcov") os.chdir(coverage_dir) except AttributeError: print("Please activate your local virtual environment and re-run this script.") def run_http_server(): """Up & Run Simple HTTP Server with 8080 port.""" command = "python -m http.server 8080" subprocess.run(command.split()) if __name__ == "__main__": run_test_coverage() run_http_server()
""" py_test_command = "coverage run -m pytest" CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
shared.ts
import path from 'path'; import resolveFrom from 'resolve-from'; import webpack from 'webpack'; import { CosmosConfig } from '../../../config'; import { getCliArgs } from '../../../shared/cli'; import { moduleExists, requireModule } from '../../../shared/fs'; import { createWebpackCosmosConfig } from '../cosmosConfig/webpack'; import { getDefaultWebpackConfig } from './default'; import { getDefaultExport } from './module'; type WebpackConfigExport = | webpack.Configuration // Mirror webpack API for config functions // https://webpack.js.org/configuration/configuration-types/#exporting-a-function | (( env: unknown, _argv: {} ) => webpack.Configuration | Promise<webpack.Configuration>); // Override arguments are inspired by react-app-rewired // https://github.com/timarney/react-app-rewired/blob/b673379d32fe7b57c71667f4827f3b16e3717363/scripts/start.js#L22 type WebpackOverride = ( baseConfig: webpack.Configuration, env: string ) => webpack.Configuration; export async function getUserWebpackConfig( cosmosConfig: CosmosConfig, userWebpack: typeof webpack ) { const baseWebpackConfig = await getBaseWebpackConfig( cosmosConfig, userWebpack ); const { overridePath } = createWebpackCosmosConfig(cosmosConfig); if (!overridePath || !moduleExists(overridePath)) { return baseWebpackConfig; } const relPath = path.relative(process.cwd(), overridePath); console.log(`[Cosmos] Overriding webpack config at ${relPath}`); const webpackOverride = getDefaultExport( requireModule(overridePath) ) as WebpackOverride; return webpackOverride(baseWebpackConfig, getNodeEnv()); } export async function getBaseWebpackConfig( cosmosConfig: CosmosConfig, userWebpack: typeof webpack ) { const { rootDir } = cosmosConfig; const { configPath } = createWebpackCosmosConfig(cosmosConfig); if (!configPath || !moduleExists(configPath)) { console.log('[Cosmos] Using default webpack config'); return getDefaultWebpackConfig(userWebpack, rootDir); } const relPath = path.relative(process.cwd(), configPath); console.log(`[Cosmos] Using webpack config found at ${relPath}`); const userConfigExport = getDefaultExport( requireModule(configPath) ) as WebpackConfigExport; const cliArgs = getCliArgs(); return typeof userConfigExport === 'function' ? await userConfigExport(cliArgs.env || getNodeEnv(), cliArgs) : userConfigExport; } export function resolveClientPath(relPath: string) { return require.resolve(`../client/${relPath}`); } export function getUserDepsLoaderRule(): webpack.RuleSetRule { return { loader: require.resolve('./userDepsLoader'), include: resolveClientPath('userDeps'), }; } export function resolveLocalReactDeps( cosmosConfig: CosmosConfig, baseWebpackConfig: webpack.Configuration ): webpack.ResolveOptions { const { rootDir } = cosmosConfig; const { resolve = {} } = baseWebpackConfig; let alias = resolve.alias || {}; // Preserve existing React aliases (eg. when using Preact) let reactAlias = hasAlias(alias, 'react'); let reactDomAlias = hasAlias(alias, 'react-dom'); if (reactAlias && reactDomAlias) { console.log('[Cosmos] React and React DOM aliases found in webpack config'); return resolve; } if (reactAlias) { console.log('[Cosmos] React alias found in webpack config'); } else { const reactPath = resolveFrom.silent(rootDir, 'react'); if (!reactPath) throw new Error(`[Cosmos] Local dependency not found: react`); alias = addAlias(alias, 'react', path.dirname(reactPath)); } if (reactDomAlias) { console.log('[Cosmos] React DOM alias found in webpack config'); } else { const reactDomPath = resolveFrom.silent(rootDir, 'react-dom'); if (!reactDomPath) throw new Error(`[Cosmos] Local dependency not found: react-dom`); alias = addAlias(alias, 'react-dom', path.dirname(reactDomPath)); } return { ...resolve, alias }; } export function getGlobalsPlugin( { publicUrl }: CosmosConfig, userWebpack: typeof webpack, devServerOn: boolean ) { const cleanPublicUrl = removeTrailingSlash(publicUrl); return new userWebpack.DefinePlugin({ // "if (__DEV__)" blocks get stripped when compiling a static export build __DEV__: JSON.stringify(devServerOn), 'process.env': { NODE_ENV: JSON.stringify(getNodeEnv()), PUBLIC_URL: JSON.stringify(cleanPublicUrl), }, }); } export function hasPlugin( plugins: void | webpack.WebpackPluginInstance[], pluginName: string ) { return ( plugins && plugins.filter(p => isInstanceOfPlugin(p, pluginName)).length > 0 ); } export function isInstanceOfPlugin( plugin: webpack.WebpackPluginInstance, constructorName: string ) { return plugin.constructor && plugin.constructor.name === constructorName; } function removeTrailingSlash(url: string) { return url.replace(/\/$/, ''); } export function getNodeEnv() { // Disallow non dev/prod environments, like "test" inside Jest, because // they are not supported by webpack return process.env.NODE_ENV === 'production' ? 'production' : 'development'; } function hasAlias(alias: webpack.ResolveOptions['alias'], name: string) { if (!alias) return false; const exactName = `${name}$`;
return keys.includes(name) || keys.includes(exactName); } } function addAlias( alias: webpack.ResolveOptions['alias'], name: string, value: string | false | string[] ) { return Array.isArray(alias) ? [...alias, { name, alias: value }] : { ...alias, [name]: value }; }
if (Array.isArray(alias)) { return alias.some(a => a.name === name || a.name === exactName); } else { const keys = Object.keys(alias);
main.go
package main import "github.com/rsteube/carapace-bin/completers/gimp_completer/cmd" func main() { cmd.Execute()
}
thread_utils.py
import threading import subprocess import time import thread import settings from errors import MultiObjectsError logger = settings.logger class ThreadPlayer(threading.Thread): ''' This threading ought to play songs which in the playlist via the command "mplayer url" As an optional parameter, mins represents the time to play, -1 means no limit about the time However the flag _play_next deceides whether next songs should be play ''' _instance = None def __new__(cls, *args, **kwargs): if cls._instance: raise MultiObjectsError("Only one player thread can exist at the same time") cls._instance = super(ThreadPlayer, cls).__new__(cls, *args, **kwargs) return cls._instance def __init__(self, playlist, mins=-1): super(ThreadPlayer, self).__init__() self._play_next = True if playlist is []: raise IOError("Playlist is empty") self.playlist = playlist self.length = self.get_length(mins) def __del__(self): super(ThreadPlayer, self).__del__() # ThreadPlayer._instance = None logger.info("Player exit") def run(self): song_index = 0 while song_index < self.length and self._play_next: song = self.playlist[song_index] logger.info("Begin to play " + song["name"] + " by " + song["artist"]) cmd = "mplayer \"" + song["url"] + "\"" subprocess.call(cmd, shell=True) # call the command mplayer url in the shell song_index += 1 logger.info("Play to end.") ThreadPlayer._instance = None # thread.exit() return None def stop_play(self): self._play_next = False def get_length(self, mins): ''' get the number of songs will be play according to the minutes ''' length = 0 if mins == -1: length = len(self.playlist) else: ms = mins * 60 * 1000 # convert its unit to microsecond duration = 0 for song in self.playlist: duration += song["duration"] length += 1 if duration >= ms: break logger.info("There are " + str(length) + " songs will be played") return length class ThreadTimer(threading.Thread): ''' This thread ought to time for the house in stationarity
''' _instance = None def __new__(cls, *args, **kwargs): if cls._instance: raise MultiObjectsError("Only one timer thread can exist at the same time") cls._instance = super(ThreadTimer, cls).__new__(cls, *args, **kwargs) return cls._instance def __init__(self, player, bound=15): super(ThreadTimer, self).__init__() logger.info("Timer initialized") self.player = player self.bound = bound * 60 # cpnvert seconds to minutes self.gap = settings.leaps self.elapse = 0 self.over = False def __del__(self): super(ThreadTimer, self).__init__() # ThreadTimer._instance = None logger.info("Timer exit") def run(self): while self.elapse < self.bound: time.sleep(self.gap) self.elapse += self.gap logger.info("Time up") self.player.stop_play() self.over = True ThreadTimer._instance = None return None def reset(self): if self.over: raise AttributeError("Time up") else: self.elapse = 0 logger.info("Timer is reset")
Every time a occlusion detected, the timer was reset.
loader.rs
use std::collections::HashMap; #[cfg(feature = "debugger")] use std::collections::HashSet; use log::{debug, info, trace}; use url::Url; use flowcore::deserializers::deserializer::get_deserializer; use flowcore::flow_manifest::{Cargo, MetaData}; use flowcore::input::InputInitializer; use flowcore::lib_provider::Provider; use crate::errors::*; use crate::model::flow::Flow; use crate::model::name::HasName; use crate::model::name::Name; use crate::model::process::Process; use crate::model::process::Process::FlowProcess; use crate::model::process::Process::FunctionProcess; use crate::model::route::Route; /// `LibType` describes what format the Flow Library is written in #[derive(PartialEq)] pub enum LibType { /// `RustLib` indicates that the library is written in rust with a Cargo.toml to compile it natively RustLib, } /// Many structs in the model implement the `Validate` method which is used to check the /// description deserialized from file obeys some additional constraints that cannot be expressed /// in the struct definition in `serde` pub trait Validate { /// Validate that a deserialized model data structure is valid for use fn validate(&self) -> Result<()>; } /// Load a `Flow` definition from a `Url`, recursively loading all sub-processes referenced. /// /// The return is a `Result` containing the `Process`, or a `String` describing the error /// found while loading. /// /// # Example /// ``` /// use flowcore::lib_provider::Provider; /// use flowcore::errors::Result; /// use std::env; /// use url::Url; /// use std::collections::HashSet; /// /// // Clients need to provide a Provider of content for the loader as flowlibc is independent of /// // file systems and io. /// struct DummyProvider; /// /// // A Provider must implement the `Provider` trait, with the methods to `resolve` a URL and to /// // `get` the contents for parsing. /// impl Provider for DummyProvider { /// fn resolve_url(&self, url: &Url, default_filename: &str, _ext: &[&str]) -> Result<(Url, Option<String>)> { /// // Just fake the url resolution in this example /// Ok((url.clone(), None)) /// } /// /// fn get_contents(&self, url: &Url) -> Result<Vec<u8>> { /// // Return the simplest flow definition possible - ignoring the url passed in /// Ok("flow = \"test\"".as_bytes().to_owned()) /// } /// } /// /// // Create an instance of the `DummyProvider` /// let dummy_provider = DummyProvider{}; /// /// // keep track of the source Urls loaded for this flow /// let mut source_urls = HashSet::<(Url, Url)>::new(); /// /// // load the flow from `url = file:///example.toml` using the `dummy_provider` /// flowclib::compiler::loader::load(&Url::parse("file:///example.toml").unwrap(), &dummy_provider, &mut source_urls).unwrap(); /// ``` pub fn load( url: &Url, provider: &dyn Provider, #[cfg(feature = "debugger")] source_urls: &mut HashSet<(Url, Url)>, ) -> Result<Process> { trace!("load()"); load_process( &Route::default(), &Name::default(), 0, &mut 0, url, provider, &HashMap::new(), #[cfg(feature = "debugger")] source_urls, 0, ) } #[allow(clippy::too_many_arguments)] fn
( parent_route: &Route, alias: &Name, parent_flow_id: usize, flow_count: &mut usize, url: &Url, provider: &dyn Provider, initializations: &HashMap<String, InputInitializer>, #[cfg(feature = "debugger")] source_urls: &mut HashSet<(Url, Url)>, level: usize, ) -> Result<Process> { trace!("load_process()"); let (resolved_url, lib_ref) = provider .resolve_url(url, "context", &["toml"]) .chain_err(|| format!("Could not resolve the url: '{}'", url))?; if &resolved_url != url { debug!("Source URL '{}' resolved to: '{}'", url, resolved_url); } let contents = provider .get_contents(&resolved_url) .chain_err(|| format!("Could not get contents of resolved url: '{}'", resolved_url))?; if !alias.is_empty() { info!("Loading process with alias = '{}'", alias); } let content = String::from_utf8(contents).chain_err(|| "Could not read UTF8 contents")?; let deserializer = get_deserializer::<Process>(&resolved_url)?; debug!( "Loading process from url = '{}' with deserializer: '{}'", resolved_url, deserializer.name() ); let mut process = deserializer .deserialize(&content, Some(url)) .chain_err(|| format!("Could not deserialize process from content in '{}'", url))?; // Track the source file involved and what it resolved to #[cfg(feature = "debugger")] source_urls.insert((url.clone(), resolved_url.clone())); match process { FlowProcess(ref mut flow) => { flow.config( &resolved_url, parent_route, alias, *flow_count, initializations, )?; *flow_count += 1; debug!("Deserialized the Flow, now loading any sub-processes"); load_process_refs( flow, flow_count, provider, #[cfg(feature = "debugger")] source_urls, level, )?; flow.build_connections(level)?; } FunctionProcess(ref mut function) => { function.config( &resolved_url, parent_route, alias, parent_flow_id, lib_ref, initializations, )?; } } Ok(process) } /// load library metadata from the given url using the provider. /// Currently it uses the `package` table of Cargo.toml as a source but it could /// easily use another file as along as it has the required fields to satisfy `MetaData` struct pub fn load_metadata(url: &Url, provider: &dyn Provider) -> Result<(MetaData, LibType)> { trace!("Loading Metadata"); let (resolved_url, _) = provider .resolve_url(url, "Cargo", &["toml"]) .chain_err(|| format!("Could not resolve the url: '{}'", url))?; if &resolved_url != url { debug!("Source URL '{}' resolved to: '{}'", url, resolved_url); } let contents = provider .get_contents(&resolved_url) .chain_err(|| format!("Could not get contents of resolved url: '{}'", resolved_url))?; let content = String::from_utf8(contents).chain_err(|| "Could not read UTF8 contents")?; let deserializer = get_deserializer::<Cargo>(&resolved_url)?; let cargo: Cargo = deserializer.deserialize(&content, Some(&resolved_url))?; Ok((cargo.package, LibType::RustLib)) } /* Load sub-processes from the process_refs in a flow */ fn load_process_refs( flow: &mut Flow, flow_count: &mut usize, provider: &dyn Provider, #[cfg(feature = "debugger")] source_urls: &mut HashSet<(Url, Url)>, level: usize, ) -> Result<()> { for process_ref in &mut flow.process_refs { let subprocess_url = flow .source_url .join(&process_ref.source) .map_err(|e| e.to_string())?; let process = load_process( &flow.route, process_ref.alias(), flow.id, flow_count, &subprocess_url, provider, &process_ref.initializations, #[cfg(feature = "debugger")] source_urls, level + 1, )?; process_ref.set_alias(process.name()); // runtime needs references to library functions to be able to load the implementations at load time // library flow definitions are "compiled down" to just library function references at compile time. if let FunctionProcess(function) = &process { if let Some(lib_ref) = function.get_lib_reference() { flow.lib_references.insert( Url::parse(&format!("lib://{}/{}", lib_ref, function.name())) .map_err(|_| "Could not create Url from library reference")?, ); } } flow.subprocesses .insert(process_ref.alias().to_owned(), process); } Ok(()) } #[cfg(test)] mod test { use url::Url; use flowcore::deserializers::deserializer::get_deserializer; use flowcore::flow_manifest::{Cargo, MetaData}; #[test] fn deserialize_library() { let cargo_toml = r###"[package] name = "Flow Standard Library" version = "0.11.0" authors = ["Andrew Mackenzie <[email protected]>"] description = "The standard library for 'flow' programs compiled with the 'flowc' compiler" exclude = "../..""###; let url = Url::parse("file:///fake.toml").expect("Could not parse URL"); let deserializer = get_deserializer::<Cargo>(&url).expect("Could not get deserializer"); let cargo: Cargo = deserializer .deserialize(cargo_toml, Some(&url)) .expect("Could not deserialize"); let _: MetaData = cargo.package; } }
load_process
enums.rs
// SPDX-License-Identifier: MIT // Copyright (C) 2018-present iced project and contributors use crate::iced_constants::IcedConstants; use crate::iced_error::IcedError; use core::convert::TryFrom; use core::iter::{ExactSizeIterator, FusedIterator, Iterator}; use core::{fmt, mem}; // GENERATOR-BEGIN: CodeSize // ⚠️This was generated by GENERATOR!🦹‍♂️ /// The code size (16/32/64) that was used when an instruction was decoded #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum CodeSize { /// Unknown size Unknown = 0, /// 16-bit code Code16 = 1, /// 32-bit code Code32 = 2, /// 64-bit code Code64 = 3, } #[rustfmt::skip] static GEN_DEBUG_CODE_SIZE: [&str; 4] = [ "Unknown", "Code16", "Code32", "Code64", ]; impl fmt::Debug for CodeSize { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", GEN_DEBUG_CODE_SIZE[*self as usize]) } } impl Default for CodeSize { #[must_use] #[inline] fn default() -> Self { CodeSize::Unknown } } #[rustfmt::skip] impl CodeSize { /// Iterates over all `CodeSize` enum values #[inline] pub fn values() -> impl Iterator<Item = CodeSize> + ExactSizeIterator + FusedIterator { // SAFETY: all values 0-max are valid enum values (0..IcedConstants::CODE_SIZE_ENUM_COUNT).map(|x| unsafe { core::mem::transmute::<u8, CodeSize>(x as u8) }) } } #[test] #[rustfmt::skip] fn test_codesize_values() { let mut iter = CodeSize::values(); assert_eq!(iter.size_hint(), (IcedConstants::CODE_SIZE_ENUM_COUNT, Some(IcedConstants::CODE_SIZE_ENUM_COUNT))); assert_eq!(iter.len(), IcedConstants::CODE_SIZE_ENUM_COUNT); assert!(iter.next().is_some()); assert_eq!(iter.size_hint(), (IcedConstants::CODE_SIZE_ENUM_COUNT - 1, Some(IcedConstants::CODE_SIZE_ENUM_COUNT - 1))); assert_eq!(iter.len(), IcedConstants::CODE_SIZE_ENUM_COUNT - 1); let values: Vec<CodeSize> = CodeSize::values().collect(); assert_eq!(values.len(), IcedConstants::CODE_SIZE_ENUM_COUNT); for (i, value) in values.into_iter().enumerate() { assert_eq!(i, value as usize); } } #[rustfmt::skip] impl TryFrom<usize> for CodeSize { type Error = IcedError; #[inline] fn try_from(value: usize) -> Result<Self, Self::Error> { if value < IcedConstants::CODE_SIZE_ENUM_COUNT { // SAFETY: all values 0-max are valid enum values Ok(unsafe { mem::transmute(value as u8) }) } else { Err(IcedError::new("Invalid CodeSize value")) } } } #[test] #[rustfmt::skip] fn test_codesize_try_from_usize() { for value in CodeSize::values() { let converted = <CodeSize as TryFrom<usize>>::try_from(value as usize).unwrap(); assert_eq!(converted, value); } assert!(<CodeSize as TryFrom<usize>>::try_from(IcedConstants::CODE_SIZE_ENUM_COUNT).is_err()); assert!(<CodeSize as TryFrom<usize>>::try_from(core::usize::MAX).is_err()); } // GENERATOR-END: CodeSize // GENERATOR-BEGIN: RoundingControl // ⚠️This was generated by GENERATOR!🦹‍♂️ /// Rounding control #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum RoundingControl { /// No rounding mode None = 0, /// Round to nearest (even) RoundToNearest = 1, /// Round down (toward -inf) RoundDown = 2, /// Round up (toward +inf) RoundUp = 3, /// Round toward zero (truncate) RoundTowardZero = 4, } #[rustfmt::skip] static GEN_DEBUG_ROUNDING_CONTROL: [&str; 5] = [ "None", "RoundToNearest", "RoundDown", "RoundUp", "RoundTowardZero", ]; impl fmt::Debug for RoundingControl { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", GEN_DEBUG_ROUNDING_CONTROL[*self as usize]) } } impl Default for RoundingControl { #[must_use] #[inline] fn default() -> Self { RoundingControl::None } } #[rustfmt::skip] impl RoundingControl { /// Iterates over all `RoundingControl` enum values #[inline] pub fn values() -> impl Iterator<Item = RoundingControl> + ExactSizeIterator + FusedIterator { // SAFETY: all values 0-max are valid enum values (0..IcedConstants::ROUNDING_CONTROL_ENUM_COUNT).map(|x| unsafe { core::mem::transmute::<u8, RoundingControl>(x as u8) }) } } #[test] #[rustfmt::skip] fn test_roundingcontrol_values() { let mut iter = RoundingControl::values(); assert_eq!(iter.size_hint(), (IcedConstants::ROUNDING_CONTROL_ENUM_COUNT, Some(IcedConstants::ROUNDING_CONTROL_ENUM_COUNT))); assert_eq!(iter.len(), IcedConstants::ROUNDING_CONTROL_ENUM_COUNT); assert!(iter.next().is_some()); assert_eq!(iter.size_hint(), (IcedConstants::ROUNDING_CONTROL_ENUM_COUNT - 1, Some(IcedConstants::ROUNDING_CONTROL_ENUM_COUNT - 1))); assert_eq!(iter.len(), IcedConstants::ROUNDING_CONTROL_ENUM_COUNT - 1); let values: Vec<RoundingControl> = RoundingControl::values().collect(); assert_eq!(values.len(), IcedConstants::ROUNDING_CONTROL_ENUM_COUNT); for (i, value) in values.into_iter().enumerate() { assert_eq!(i, value as usize); } } #[rustfmt::skip] impl TryFrom<usize> for RoundingControl { type Error = IcedError; #[inline] fn try_from(value: usize) -> Result<Self, Self::Error> { if value < IcedConstants::ROUNDING_CONTROL_ENUM_COUNT { // SAFETY: all values 0-max are valid enum values Ok(unsafe { mem::transmute(value as u8) }) } else { Err(IcedError::new("Invalid RoundingControl value")) } } } #[test] #[rustfmt::skip] fn test_roundingcontrol_try_from_usize() { for value in RoundingControl::values() { let converted = <RoundingControl as TryFrom<usize>>::try_from(value as usize).unwrap(); assert_eq!(converted, value); } assert!(<RoundingControl as TryFrom<usize>>::try_from(IcedConstants::ROUNDING_CONTROL_ENUM_COUNT).is_err()); assert!(<RoundingControl as TryFrom<usize>>::try_from(core::usize::MAX).is_err()); } // GENERATOR-END: RoundingControl // GENERATOR-BEGIN: OpKind // ⚠️This was generated by GENERATOR!🦹‍♂️ /// Instruction operand kind #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[allow(non_camel_case_types)] pub enum OpKind { /// A register ([`Register`]). /// /// This operand kind uses [`Instruction::op0_register()`], [`Instruction::op1_register()`], [`Instruction::op2_register()`], [`Instruction::op3_register()`] or [`Instruction::op4_register()`] depending on operand number. See also [`Instruction::op_register()`]. /// /// [`Register`]: enum.Register.html /// [`Instruction::op0_register()`]: struct.Instruction.html#method.op0_register /// [`Instruction::op1_register()`]: struct.Instruction.html#method.op1_register /// [`Instruction::op2_register()`]: struct.Instruction.html#method.op2_register /// [`Instruction::op3_register()`]: struct.Instruction.html#method.op3_register /// [`Instruction::op4_register()`]: struct.Instruction.html#method.op4_register /// [`Instruction::op_register()`]: struct.Instruction.html#method.op_register Register = 0, /// Near 16-bit branch. This operand kind uses [`Instruction::near_branch16()`] /// /// [`Instruction::near_branch16()`]: struct.Instruction.html#method.near_branch16 NearBranch16 = 1, /// Near 32-bit branch. This operand kind uses [`Instruction::near_branch32()`] /// /// [`Instruction::near_branch32()`]: struct.Instruction.html#method.near_branch32 NearBranch32 = 2, /// Near 64-bit branch. This operand kind uses [`Instruction::near_branch64()`] /// /// [`Instruction::near_branch64()`]: struct.Instruction.html#method.near_branch64 NearBranch64 = 3, /// Far 16-bit branch. This operand kind uses [`Instruction::far_branch16()`] and [`Instruction::far_branch_selector()`] /// /// [`Instruction::far_branch16()`]: struct.Instruction.html#method.far_branch16 /// [`Instruction::far_branch_selector()`]: struct.Instruction.html#method.far_branch_selector FarBranch16 = 4, /// Far 32-bit branch. This operand kind uses [`Instruction::far_branch32()`] and [`Instruction::far_branch_selector()`] /// /// [`Instruction::far_branch32()`]: struct.Instruction.html#method.far_branch32 /// [`Instruction::far_branch_selector()`]: struct.Instruction.html#method.far_branch_selector FarBranch32 = 5, /// 8-bit constant. This operand kind uses [`Instruction::immediate8()`] /// /// [`Instruction::immediate8()`]: struct.Instruction.html#method.immediate8 Immediate8 = 6, /// 8-bit constant used by the `ENTER`, `EXTRQ`, `INSERTQ` instructions. This operand kind uses [`Instruction::immediate8_2nd()`] /// /// [`Instruction::immediate8_2nd()`]: struct.Instruction.html#method.immediate8_2nd Immediate8_2nd = 7, /// 16-bit constant. This operand kind uses [`Instruction::immediate16()`] /// /// [`Instruction::immediate16()`]: struct.Instruction.html#method.immediate16 Immediate16 = 8, /// 32-bit constant. This operand kind uses [`Instruction::immediate32()`] /// /// [`Instruction::immediate32()`]: struct.Instruction.html#method.immediate32 Immediate32 = 9, /// 64-bit constant. This operand kind uses [`Instruction::immediate64()`] /// /// [`Instruction::immediate64()`]: struct.Instruction.html#method.immediate64 Immediate64 = 10, /// An 8-bit value sign extended to 16 bits. This operand kind uses [`Instruction::immediate8to16()`] /// /// [`Instruction::immediate8to16()`]: struct.Instruction.html#method.immediate8to16 Immediate8to16 = 11, /// An 8-bit value sign extended to 32 bits. This operand kind uses [`Instruction::immediate8to32()`] /// /// [`Instruction::immediate8to32()`]: struct.Instruction.html#method.immediate8to32 Immediate8to32 = 12, /// An 8-bit value sign extended to 64 bits. This operand kind uses [`Instruction::immediate8to64()`] /// /// [`Instruction::immediate8to64()`]: struct.Instruction.html#method.immediate8to64 Immediate8to64 = 13, /// A 32-bit value sign extended to 64 bits. This operand kind uses [`Instruction::immediate32to64()`] /// /// [`Instruction::immediate32to64()`]: struct.Instruction.html#method.immediate32to64 Immediate32to64 = 14, /// `seg:[SI]`. This operand kind uses [`Instruction::memory_size()`], [`Instruction::memory_segment()`], [`Instruction::segment_prefix()`] /// /// [`Instruction::memory_size()`]: struct.Instruction.html#method.memory_size /// [`Instruction::memory_segment()`]: struct.Instruction.html#method.memory_segment /// [`Instruction::segment_prefix()`]: struct.Instruction.html#method.segment_prefix MemorySegSI = 15, /// `seg:[ESI]`. This operand kind uses [`Instruction::memory_size()`], [`Instruction::memory_segment()`], [`Instruction::segment_prefix()`] /// /// [`Instruction::memory_size()`]: struct.Instruction.html#method.memory_size /// [`Instruction::memory_segment()`]: struct.Instruction.html#method.memory_segment /// [`Instruction::segment_prefix()`]: struct.Instruction.html#method.segment_prefix MemorySegESI = 16, /// `seg:[RSI]`. This operand kind uses [`Instruction::memory_size()`], [`Instruction::memory_segment()`], [`Instruction::segment_prefix()`] /// /// [`Instruction::memory_size()`]: struct.Instruction.html#method.memory_size /// [`Instruction::memory_segment()`]: struct.Instruction.html#method.memory_segment /// [`Instruction::segment_prefix()`]: struct.Instruction.html#method.segment_prefix MemorySegRSI = 17, /// `seg:[DI]`. This operand kind uses [`Instruction::memory_size()`], [`Instruction::memory_segment()`], [`Instruction::segment_prefix()`] /// /// [`Instruction::memory_size()`]: struct.Instruction.html#method.memory_size /// [`Instruction::memory_segment()`]: struct.Instruction.html#method.memory_segment /// [`Instruction::segment_prefix()`]: struct.Instruction.html#method.segment_prefix MemorySegDI = 18, /// `seg:[EDI]`. This operand kind uses [`Instruction::memory_size()`], [`Instruction::memory_segment()`], [`Instruction::segment_prefix()`] /// /// [`Instruction::memory_size()`]: struct.Instruction.html#method.memory_size /// [`Instruction::memory_segment()`]: struct.Instruction.html#method.memory_segment /// [`Instruction::segment_prefix()`]: struct.Instruction.html#method.segment_prefix MemorySegEDI = 19, /// `seg:[RDI]`. This operand kind uses [`Instruction::memory_size()`], [`Instruction::memory_segment()`], [`Instruction::segment_prefix()`] /// /// [`Instruction::memory_size()`]: struct.Instruction.html#method.memory_size /// [`Instruction::memory_segment()`]: struct.Instruction.html#method.memory_segment /// [`Instruction::segment_prefix()`]: struct.Instruction.html#method.segment_prefix MemorySegRDI = 20, /// `ES:[DI]`. This operand kind uses [`Instruction::memory_size()`] /// /// [`Instruction::memory_size()`]: struct.Instruction.html#method.memory_size MemoryESDI = 21, /// `ES:[EDI]`. This operand kind uses [`Instruction::memory_size()`] /// /// [`Instruction::memory_size()`]: struct.Instruction.html#method.memory_size MemoryESEDI = 22, /// `ES:[RDI]`. This operand kind uses [`Instruction::memory_size()`] /// /// [`Instruction::memory_size()`]: struct.Instruction.html#method.memory_size MemoryESRDI = 23, /// DEPRECATED. Use [`Memory`] /// /// [`Memory`]: enum.OpKind.html#variant.Memory #[deprecated(since = "1.11.0", note = "Don't use it!")] Memory64 = 24, /// Memory operand. /// /// This operand kind uses [`Instruction::memory_displ_size()`], [`Instruction::memory_size()`], [`Instruction::memory_index_scale()`], [`Instruction::memory_displacement64()`], [`Instruction::memory_base()`], [`Instruction::memory_index()`], [`Instruction::memory_segment()`], [`Instruction::segment_prefix()`] /// /// [`Instruction::memory_displ_size()`]: struct.Instruction.html#method.memory_displ_size /// [`Instruction::memory_size()`]: struct.Instruction.html#method.memory_size /// [`Instruction::memory_index_scale()`]: struct.Instruction.html#method.memory_index_scale /// [`Instruction::memory_displacement64()`]: struct.Instruction.html#method.memory_displacement64 /// [`Instruction::memory_base()`]: struct.Instruction.html#method.memory_base /// [`Instruction::memory_index()`]: struct.Instruction.html#method.memory_index /// [`Instruction::memory_segment()`]: struct.Instruction.html#method.memory_segment /// [`Instruction::segment_prefix()`]: struct.Instruction.html#method.segment_prefix Memory = 25, } #[rustfmt::skip] static GEN_DEBUG_OP_KIND: [&str; 26] = [ "Register", "NearBranch16", "NearBranch32", "NearBranch64", "FarBranch16", "FarBranch32", "Immediate8", "Immediate8_2nd", "Immediate16", "Immediate32", "Immediate64", "Immediate8to16", "Immediate8to32", "Immediate8to64", "Immediate32to64", "MemorySegSI", "MemorySegESI", "MemorySegRSI", "MemorySegDI", "MemorySegEDI", "MemorySegRDI", "MemoryESDI", "MemoryESEDI", "MemoryESRDI", "Memory64", "Memory", ]; impl fmt::Debug for OpKind { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", GEN_DEBUG_OP_KIND[*self as usize]) } } impl Default for OpKind { #[must_use] #[inline] fn default() -> Self { OpKind::Register } } #[rustfmt::skip] impl OpKind { /// Iterates over all `OpKind` enum values #[inline] pub fn values() -> impl Iterator<Item = OpKind> + ExactSizeIterator + FusedIterator { // SAFETY: all values 0-max are valid enum values (0..IcedConstants::OP_KIND_ENUM_COUNT).map(|x| unsafe { core::mem::transmute::<u8, OpKind>(x as u8) }) } } #[test] #[rustfmt::skip] fn test_opkind_values() { let mut iter = OpKind::values(); assert_eq!(iter.size_hint(), (IcedConstants::OP_KIND_ENUM_COUNT, Some(IcedConstants::OP_KIND_ENUM_COUNT))); assert_eq!(iter.len(), IcedConstants::OP_KIND_ENUM_COUNT); assert!(iter.next().is_some()); assert_eq!(iter.size_hint(), (IcedConstants::OP_KIND_ENUM_COUNT - 1, Some(IcedConstants::OP_KIND_ENUM_COUNT - 1))); assert_eq!(iter.len(), IcedConstants::OP_KIND_ENUM_COUNT - 1); let values: Vec<OpKind> = OpKind::values().collect(); assert_eq!(values.len(), IcedConstants::OP_KIND_ENUM_COUNT); for (i, value) in values.into_iter().enumerate() { assert_eq!(i, value as usize); } } #[rustfmt::skip] impl TryFrom<usize> for OpKind { type Error = IcedError; #[inline] fn try_from(value: usize) -> Result<Self, Self::Error> { if value < IcedConstants::OP_KIND_ENUM_COUNT { // SAFETY: all values 0-max are valid enum values Ok(unsafe { mem::transmute(value as u8) }) } else { Err(IcedError::new("Invalid OpKind value")) } } } #[test] #[rustfmt::skip] fn test_opkind_try_from_usize() { for value in OpKind::values() { let converted = <OpKind as TryFrom<usize>>::try_from(value as usize).unwrap(); assert_eq!(converted, value); } assert!(<OpKind as TryFrom<usize>>::try_from(IcedConstants::OP_KIND_ENUM_COUNT).is_err()); assert!(<OpKind as TryFrom<usize>>::try_from(core::usize::MAX).is_err()); } // GENERATOR-END: OpKind // GENERATOR-BEGIN: VectorLength // ⚠️This was generated by GENERATOR!🦹‍♂️ #[derive(Copy, Clone, Eq, PartialEq)] #[cfg(any(feature = "decoder", feature = "encoder"))] #[allow(dead_code)] pub(crate) enum VectorLength { L128, L256, L512, Unknown, } #[cfg(any(feature = "decoder", feature = "encoder"))] #[rustfmt::skip] static GEN_DEBUG_VECTOR_LENGTH: [&str; 4] = [ "L128", "L256", "L512", "Unknown", ]; #[cfg(any(feature = "decoder", feature = "encoder"))] impl fmt::Debug for VectorLength { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", GEN_DEBUG_VECTOR_LENGTH[*self as usize]) } } #[cfg(any(feature = "decoder", feature = "encoder"))] impl Default for VectorLength { #[must_use] #[inline] fn default() -> Self { VectorLength::L128 } } // GENERATOR-END: VectorLength // GENERATOR-BEGIN: MandatoryPrefixByte // ⚠️This was generated by GENERATOR!🦹‍♂️ #[derive(Copy, Clone, Eq, PartialEq)] #[cfg(any(feature = "decoder", feature = "encoder"))] #[allow(dead_code)] pub(crate) enum MandatoryPrefixByte { None, P66, PF3, PF2, } #[cfg(any(feature = "decoder", feature = "encoder"))] #[rustfmt::skip] static GEN_DEBUG_MANDATORY_PREFIX_BYTE: [&str; 4] = [ "None", "P66", "PF3", "PF2", ]; #[cfg(any(feature = "decoder", feature = "encoder"))] impl fmt::Debug for MandatoryPrefixByte { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", GEN_DEBUG_MANDATORY_PREFIX_BYTE[*self as usize]) } } #[cfg(any(feature = "decoder", feature = "encoder"))] impl Default for MandatoryPrefixByte { #[must_use] #[inline] fn default() -> Self { MandatoryPrefixByte::None } } // GENERATOR-END: MandatoryPrefixByte // GENERATOR-BEGIN: EncodingKind // ⚠️This was generated by GENERATOR!🦹‍♂️ /// Instruction encoding #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[cfg_attr(not(feature = "exhaustive_enums"), non_exhaustive)] #[cfg(any(feature = "decoder", feature = "encoder", feature = "instr_info"))] pub enum EncodingKind { /// Legacy encoding Legacy = 0, /// VEX encoding VEX = 1, /// EVEX encoding EVEX = 2, /// XOP encoding XOP = 3, /// 3DNow! encoding D3NOW = 4, } #[cfg(any(feature = "decoder", feature = "encoder", feature = "instr_info"))] #[rustfmt::skip] static GEN_DEBUG_ENCODING_KIND: [&str; 5] = [ "Legacy", "VEX", "EVEX", "XOP", "D3NOW", ]; #[cfg(any(feature = "decoder", feature = "encoder", feature = "instr_info"))] impl fmt::Debug for EncodingKind { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", GEN_DEBUG_ENCODING_KIND[*self as usize]) } } #[cfg(any(feature = "decoder", feature = "encoder", feature = "instr_info"))] impl Default for EncodingKind { #[must_use] #[inline] fn default() -> Self { EncodingKind::Legacy } } #[cfg(any(feature = "decoder", feature = "encoder", feature = "instr_info"))] #[rustfmt::skip] impl EncodingKind { /// Iterates over all `EncodingKind` enum values #[inline] pub fn values() -> impl Iterator<Item = EncodingKind> + ExactSizeIterator + FusedIterator { // SAFETY: all values 0-max are valid enum values (0..IcedConstants::ENCODING_KIND_ENUM_COUNT).map(|x| unsafe { core::mem::transmute::<u8, EncodingKind>(x as u8) }) } } #[test] #[cfg(any(feature = "decoder", feature = "encoder", feature = "instr_info"))] #[rustfmt::skip] fn test_encodingkind_values() { let mut iter = EncodingKind::values(); assert_eq!(iter.size_hint(), (IcedConstants::ENCODING_KIND_ENUM_COUNT, Some(IcedConstants::ENCODING_KIND_ENUM_COUNT))); assert_eq!(iter.len(), IcedConstants::ENCODING_KIND_ENUM_COUNT); assert!(iter.next().is_some()); assert_eq!(iter.size_hint(), (IcedConstants::ENCODING_KIND_ENUM_COUNT - 1, Some(IcedConstants::ENCODING_KIND_ENUM_COUNT - 1))); assert_eq!(iter.len(), IcedConstants::ENCODING_KIND_ENUM_COUNT - 1); let values: Vec<EncodingKind> = EncodingKind::values().collect(); assert_eq!(values.len(), IcedConstants::ENCODING_KIND_ENUM_COUNT); for (i, value) in values.into_iter().enumerate() { assert_eq!(i, value as usize); } } #[cfg(any(feature = "decoder", feature = "encoder", feature = "instr_info"))] #[rustfmt::skip] impl TryFrom<usize> for EncodingKind { type Error = IcedError; #[inline] fn try_from(value: usize) -> Result<Self, Self::Error> { if value < IcedConstants::ENCODING_KIND_ENUM_COUNT { // SAFETY: all values 0-max are valid enum values Ok(unsafe { mem::transmute(value as u8) }) } else { Err(IcedError::new("Invalid EncodingKind value")) } } } #[cfg(any(feature = "decoder", feature = "encoder", feature = "instr_info"))] #[test] #[rustfmt::skip] fn test_encodingkind_try_from_usize() { for value in EncodingKind::values() { let converted = <EncodingKind as TryFrom<usize>>::try_from(value as usize).unwrap(); assert_eq!(converted, value); } assert!(<EncodingKind as TryFrom<usize>>::try_from(IcedConstants::ENCODING_KIND_ENUM_COUNT).is_err()); assert!(<EncodingKind as TryFrom<usize>>::try_from(core::usize::MAX).is_err()); } // GENERATOR-END: EncodingKind // GENERATOR-BEGIN: TupleType // ⚠️This was generated by GENERATOR!🦹‍♂️ /// Tuple type (EVEX) which can be used to get the disp8 scale factor `N` #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[cfg_attr(not(feature = "exhaustive_enums"), non_exhaustive)] #[cfg(any(feature = "decoder", feature = "encoder"))] #[allow(non_camel_case_types)] pub enum TupleType { /// `N = 1` N1 = 0, /// `N = 2` N2 = 1, /// `N = 4` N4 = 2, /// `N = 8` N8 = 3, /// `N = 16` N16 = 4, /// `N = 32` N32 = 5, /// `N = 64` N64 = 6, /// `N = b ? 4 : 8` N8b4 = 7, /// `N = b ? 4 : 16` N16b4 = 8, /// `N = b ? 4 : 32` N32b4 = 9, /// `N = b ? 4 : 64` N64b4 = 10, /// `N = b ? 8 : 16` N16b8 = 11, /// `N = b ? 8 : 32` N32b8 = 12, /// `N = b ? 8 : 64` N64b8 = 13, } #[cfg(any(feature = "decoder", feature = "encoder"))] #[rustfmt::skip] static GEN_DEBUG_TUPLE_TYPE: [&str; 14] = [ "N1", "N2", "N4", "N8", "N16", "N32", "N64", "N8b4", "N16b4", "N32b4", "N64b4", "N16b8", "N32b8", "N64b8", ]; #[cfg(any(feature = "decoder", feature = "encoder"))] impl fmt::Debug for TupleType { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", GEN_DEBUG_TUPLE_TYPE[*self as usize]) } } #[cfg(any(feature = "decoder", feature = "encoder"))] impl Default for TupleType { #[must_use] #[inline] fn default() -> Self { TupleType::N1 } } #[cfg(any(feature = "decoder", feature = "encoder"))] #[rustfmt::skip] impl TupleType { /// Iterates over all `TupleType` enum values #[inline] pub fn values() -> impl Iterator<Item = TupleType> + ExactSizeIterator + FusedIterator { // SAFETY: all values 0-max are valid enum values (0..IcedConstants::TUPLE_TYPE_ENUM_COUNT).map(|x| unsafe { core::mem::transmute::<u8, TupleType>(x as u8) }) } } #[test] #[cfg(any(feature = "decoder", feature = "encoder"))] #[rustfmt::skip] fn test_tupletype_values() { let mut iter = TupleType::values(); assert_eq!(iter.size_hint(), (IcedConstants::TUPLE_TYPE_ENUM_COUNT, Some(IcedConstants::TUPLE_TYPE_ENUM_COUNT))); assert_eq!(iter.len(), IcedConstants::TUPLE_TYPE_ENUM_COUNT); assert!(iter.next().is_some()); assert_eq!(iter.size_hint(), (IcedConstants::TUPLE_TYPE_ENUM_COUNT - 1, Some(IcedConstants::TUPLE_TYPE_ENUM_COUNT - 1))); assert_eq!(iter.len(), IcedConstants::TUPLE_TYPE_ENUM_COUNT - 1); let values: Vec<TupleType> = TupleType::values().collect(); assert_eq!(values.len(), IcedConstants::TUPLE_TYPE_ENUM_COUNT); for (i, value) in values.into_iter().enumerate() { assert_eq!(i, value as usize); } } #[cfg(any(feature = "decoder", feature = "encoder"))] #[rustfmt::skip] impl TryFrom<usize> for TupleType { type Error = IcedError; #[inline] fn try_from(value: usize) -> Result<Self, Self::Error> { if value < IcedConstants::TUPLE_TYPE_ENUM_COUNT { // SAFETY: all values 0-max are valid enum values Ok(unsafe { mem::transmute(value as u8) }) } else { Err(IcedError::new("Invalid TupleType value")) } } } #[cfg(any(feature = "decoder", feature = "encoder"))] #[test] #[rustfmt::skip] fn test_tupletype_try_from_usize() { for value in TupleType::values() { let converted = <TupleType as TryFrom<usize>>::try_from(value as usize).unwrap(); assert_eq!(converted, value); } assert!(<TupleType as TryFrom<usize>>::try_from(IcedConstants::TUPLE_TYPE_ENUM_COUNT).is_err()); assert!(<TupleType as TryFrom<usize>>::try_from(core::usize::MAX).is_err()); } // GENERATOR-END: TupleType // GENERATOR-BEGIN: FlowControl // ⚠️This was generated by GENERATOR!🦹‍♂️ /// Control flow #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[cfg(feature = "instr_info")] pub enum FlowControl { /// The next instruction that will be executed is the next instruction in the instruction stream Next = 0, /// It's an unconditional branch instruction: `JMP NEAR`, `JMP FAR` UnconditionalBranch = 1, /// It's an unconditional indirect branch: `JMP NEAR reg`, `JMP NEAR [mem]`, `JMP FAR [mem]` IndirectBranch = 2, /// It's a conditional branch instruction: `Jcc SHORT`, `Jcc NEAR`, `LOOP`, `LOOPcc`, `JRCXZ` ConditionalBranch = 3, /// It's a return instruction: `RET NEAR`, `RET FAR`, `IRET`, `SYSRET`, `SYSEXIT`, `RSM`, `SKINIT`, `RDM`, `UIRET` Return = 4, /// It's a call instruction: `CALL NEAR`, `CALL FAR`, `SYSCALL`, `SYSENTER`, `VMLAUNCH`, `VMRESUME`, `VMCALL`, `VMMCALL`, `VMGEXIT`, `VMRUN`, `TDCALL`, `SEAMCALL`, `SEAMRET` Call = 5, /// It's an indirect call instruction: `CALL NEAR reg`, `CALL NEAR [mem]`, `CALL FAR [mem]` IndirectCall = 6, /// It's an interrupt instruction: `INT n`, `INT3`, `INT1`, `INTO`, `SMINT`, `DMINT` Interrupt = 7, /// It's `XBEGIN` XbeginXabortXend = 8, /// It's an invalid instruction, eg. [`Code::INVALID`], `UD0`, `UD1`, `UD2` /// /// [`Code::INVALID`]: enum.Code.html#variant.INVALID Exception = 9, } #[cfg(feature = "instr_info")] #[rustfmt::skip] static GEN_DEBUG_FLOW_CONTROL: [&str; 10] = [ "Next", "UnconditionalBranch", "IndirectBranch", "ConditionalBranch", "Return", "Call", "IndirectCall", "Interrupt", "XbeginXabortXend", "Exception", ]; #[cfg(feature = "instr_info")] impl fmt::Debug for FlowControl { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", GEN_DEBUG_FLOW_CONTROL[*self as usize]) } } #[cfg(feature = "instr_info")] impl Default for FlowControl { #[must_use] #[inline] fn default() -> Self { FlowControl::Next } } #[cfg(feature = "instr_info")] #[rustfmt::skip] impl FlowControl { /// Iterates over all `FlowControl` enum values #[inline] pub fn values() -> impl Iterator<Item = FlowControl> + ExactSizeIterator + FusedIterator { // SAFETY: all values 0-max are valid enum values (0..IcedConstants::FLOW_CONTROL_ENUM_COUNT).map(|x| unsafe { core::mem::transmute::<u8, FlowControl>(x as u8) }) } } #[test] #[cfg(feature = "instr_info")] #[rustfmt::skip] fn test_flowcontrol_values() { let mut iter = FlowControl::values(); assert_eq!(iter.size_hint(), (IcedConstants::FLOW_CONTROL_ENUM_COUNT, Some(IcedConstants::FLOW_CONTROL_ENUM_COUNT))); assert_eq!(iter.len(), IcedConstants::FLOW_CONTROL_ENUM_COUNT); assert!(iter.next().is_some()); assert_eq!(iter.size_hint(), (IcedConstants::FLOW_CONTROL_ENUM_COUNT - 1, Some(IcedConstants::FLOW_CONTROL_ENUM_COUNT - 1))); assert_eq!(iter.len(), IcedConstants::FLOW_CONTROL_ENUM_COUNT - 1); let values: Vec<FlowControl> = FlowControl::values().collect(); assert_eq!(values.len(), IcedConstants::FLOW_CONTROL_ENUM_COUNT); for (i, value) in values.into_iter().enumerate() { assert_eq!(i, value as usize); } } #[cfg(feature = "instr_info")] #[rustfmt::skip] impl TryFrom<usize> for FlowControl { type Error = IcedError; #[inline] fn try_from(value: usize) -> Result<Self, Self::Error> { if value < IcedConstants::FLOW_CONTROL_ENUM_COUNT { // SAFETY: all values 0-max are valid enum values Ok(unsafe { mem::transmute(value as u8) }) } else { Err(IcedError::new("Invalid FlowControl value")) } } } #[cfg(feature = "instr_info")] #[test] #[rustfmt::skip] fn test_flowcontrol_try_from_usize() { for value in FlowControl::values() { let converted = <FlowControl as TryFrom<usize>>::try_from(value as usize).unwrap(); assert_eq!(converted, value); } assert!(<FlowControl as TryFrom<usize>>::try_from(IcedConstants::FLOW_CONTROL_ENUM_COUNT).is_err()); assert!(<FlowControl as TryFrom<usize>>::try_from(core::usize::MAX).is_err()); } // GENERATOR-END: FlowControl // GENERATOR-BEGIN: OpCodeOperandKind // ⚠️This was generated by GENERATOR!🦹‍♂️ /// Operand kind #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[cfg_attr(not(feature = "exhaustive_enums"), non_exhaustive)] #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[allow(non_camel_case_types)] pub enum OpCodeOperandKind { /// No operand None = 0, /// Far branch 16-bit offset, 16-bit segment/selector farbr2_2 = 1, /// Far branch 32-bit offset, 16-bit segment/selector farbr4_2 = 2, /// Memory offset without a modrm byte (eg. `MOV AL,[offset]`) mem_offs = 3, /// Memory (modrm) mem = 4, /// Memory (modrm), MPX: /// /// 16/32-bit mode: must be 32-bit addressing /// /// 64-bit mode: 64-bit addressing is forced and must not be RIP relative mem_mpx = 5, /// Memory (modrm), MPX: /// /// 16/32-bit mode: must be 32-bit addressing /// /// 64-bit mode: 64-bit addressing is forced and must not be RIP relative mem_mib = 6, /// Memory (modrm), vsib32, `XMM` registers mem_vsib32x = 7, /// Memory (modrm), vsib64, `XMM` registers mem_vsib64x = 8, /// Memory (modrm), vsib32, `YMM` registers mem_vsib32y = 9, /// Memory (modrm), vsib64, `YMM` registers mem_vsib64y = 10, /// Memory (modrm), vsib32, `ZMM` registers mem_vsib32z = 11, /// Memory (modrm), vsib64, `ZMM` registers mem_vsib64z = 12, /// 8-bit GPR or memory r8_or_mem = 13, /// 16-bit GPR or memory r16_or_mem = 14, /// 32-bit GPR or memory r32_or_mem = 15, /// 32-bit GPR or memory, MPX: 16/32-bit mode: must be 32-bit addressing, 64-bit mode: 64-bit addressing is forced r32_or_mem_mpx = 16, /// 64-bit GPR or memory r64_or_mem = 17, /// 64-bit GPR or memory, MPX: 16/32-bit mode: must be 32-bit addressing, 64-bit mode: 64-bit addressing is forced r64_or_mem_mpx = 18, /// `MM` register or memory mm_or_mem = 19, /// `XMM` register or memory xmm_or_mem = 20, /// `YMM` register or memory ymm_or_mem = 21, /// `ZMM` register or memory zmm_or_mem = 22, /// `BND` register or memory, MPX: 16/32-bit mode: must be 32-bit addressing, 64-bit mode: 64-bit addressing is forced bnd_or_mem_mpx = 23, /// `K` register or memory k_or_mem = 24, /// 8-bit GPR encoded in the `reg` field of the modrm byte r8_reg = 25, /// 8-bit GPR encoded in the low 3 bits of the opcode r8_opcode = 26, /// 16-bit GPR encoded in the `reg` field of the modrm byte r16_reg = 27, /// 16-bit GPR encoded in the `reg` field of the modrm byte. This is a memory operand and it uses the address size prefix (`67h`) not the operand size prefix (`66h`). r16_reg_mem = 28, /// 16-bit GPR encoded in the `mod + r/m` fields of the modrm byte r16_rm = 29, /// 16-bit GPR encoded in the low 3 bits of the opcode r16_opcode = 30, /// 32-bit GPR encoded in the `reg` field of the modrm byte r32_reg = 31, /// 32-bit GPR encoded in the `reg` field of the modrm byte. This is a memory operand and it uses the address size prefix (`67h`) not the operand size prefix (`66h`). r32_reg_mem = 32, /// 32-bit GPR encoded in the `mod + r/m` fields of the modrm byte r32_rm = 33, /// 32-bit GPR encoded in the low 3 bits of the opcode r32_opcode = 34, /// 32-bit GPR encoded in the the `V'vvvv` field (VEX/EVEX/XOP) r32_vvvv = 35, /// 64-bit GPR encoded in the `reg` field of the modrm byte r64_reg = 36, /// 64-bit GPR encoded in the `reg` field of the modrm byte. This is a memory operand and it uses the address size prefix (`67h`) not the operand size prefix (`66h`). r64_reg_mem = 37, /// 64-bit GPR encoded in the `mod + r/m` fields of the modrm byte r64_rm = 38, /// 64-bit GPR encoded in the low 3 bits of the opcode r64_opcode = 39, /// 64-bit GPR encoded in the the `V'vvvv` field (VEX/EVEX/XOP) r64_vvvv = 40, /// Segment register encoded in the `reg` field of the modrm byte seg_reg = 41, /// `K` register encoded in the `reg` field of the modrm byte k_reg = 42, /// `K` register (+1) encoded in the `reg` field of the modrm byte kp1_reg = 43, /// `K` register encoded in the `mod + r/m` fields of the modrm byte k_rm = 44, /// `K` register encoded in the the `V'vvvv` field (VEX/EVEX/XOP) k_vvvv = 45, /// `MM` register encoded in the `reg` field of the modrm byte mm_reg = 46, /// `MM` register encoded in the `mod + r/m` fields of the modrm byte mm_rm = 47, /// `XMM` register encoded in the `reg` field of the modrm byte xmm_reg = 48, /// `XMM` register encoded in the `mod + r/m` fields of the modrm byte xmm_rm = 49, /// `XMM` register encoded in the the `V'vvvv` field (VEX/EVEX/XOP) xmm_vvvv = 50, /// `XMM` register (+3) encoded in the the `V'vvvv` field (VEX/EVEX/XOP) xmmp3_vvvv = 51, /// `XMM` register encoded in the the high 4 bits of the last 8-bit immediate (VEX/XOP only so only `XMM0`-`XMM15`) xmm_is4 = 52, /// `XMM` register encoded in the the high 4 bits of the last 8-bit immediate (VEX/XOP only so only `XMM0`-`XMM15`) xmm_is5 = 53, /// `YMM` register encoded in the `reg` field of the modrm byte ymm_reg = 54, /// `YMM` register encoded in the `mod + r/m` fields of the modrm byte ymm_rm = 55, /// `YMM` register encoded in the the `V'vvvv` field (VEX/EVEX/XOP) ymm_vvvv = 56, /// `YMM` register encoded in the the high 4 bits of the last 8-bit immediate (VEX/XOP only so only `YMM0`-`YMM15`) ymm_is4 = 57, /// `YMM` register encoded in the the high 4 bits of the last 8-bit immediate (VEX/XOP only so only `YMM0`-`YMM15`) ymm_is5 = 58, /// `ZMM` register encoded in the `reg` field of the modrm byte zmm_reg = 59, /// `ZMM` register encoded in the `mod + r/m` fields of the modrm byte zmm_rm = 60, /// `ZMM` register encoded in the the `V'vvvv` field (VEX/EVEX/XOP) zmm_vvvv = 61, /// `ZMM` register (+3) encoded in the the `V'vvvv` field (VEX/EVEX/XOP) zmmp3_vvvv = 62, /// `CR` register encoded in the `reg` field of the modrm byte cr_reg = 63, /// `DR` register encoded in the `reg` field of the modrm byte dr_reg = 64, /// `TR` register encoded in the `reg` field of the modrm byte tr_reg = 65, /// `BND` register encoded in the `reg` field of the modrm byte bnd_reg = 66, /// `ES` register es = 67, /// `CS` register cs = 68, /// `SS` register ss = 69, /// `DS` register ds = 70, /// `FS` register fs = 71, /// `GS` register gs = 72, /// `AL` register al = 73, /// `CL` register cl = 74, /// `AX` register ax = 75, /// `DX` register dx = 76, /// `EAX` register eax = 77, /// `RAX` register rax = 78, /// `ST(0)` register st0 = 79, /// `ST(i)` register encoded in the low 3 bits of the opcode sti_opcode = 80, /// 4-bit immediate (m2z field, low 4 bits of the /is5 immediate, eg. `VPERMIL2PS`) imm4_m2z = 81, /// 8-bit immediate imm8 = 82, /// Constant 1 (8-bit immediate) imm8_const_1 = 83, /// 8-bit immediate sign extended to 16 bits imm8sex16 = 84, /// 8-bit immediate sign extended to 32 bits imm8sex32 = 85, /// 8-bit immediate sign extended to 64 bits imm8sex64 = 86, /// 16-bit immediate imm16 = 87, /// 32-bit immediate imm32 = 88, /// 32-bit immediate sign extended to 64 bits imm32sex64 = 89, /// 64-bit immediate imm64 = 90, /// `seg:[rSI]` memory operand (string instructions) seg_rSI = 91, /// `es:[rDI]` memory operand (string instructions) es_rDI = 92, /// `seg:[rDI]` memory operand (`(V)MASKMOVQ` instructions) seg_rDI = 93, /// `seg:[rBX+al]` memory operand (`XLATB` instruction) seg_rBX_al = 94, /// 16-bit branch, 1-byte signed relative offset br16_1 = 95, /// 32-bit branch, 1-byte signed relative offset br32_1 = 96, /// 64-bit branch, 1-byte signed relative offset br64_1 = 97, /// 16-bit branch, 2-byte signed relative offset br16_2 = 98, /// 32-bit branch, 4-byte signed relative offset br32_4 = 99, /// 64-bit branch, 4-byte signed relative offset br64_4 = 100, /// `XBEGIN`, 2-byte signed relative offset xbegin_2 = 101, /// `XBEGIN`, 4-byte signed relative offset xbegin_4 = 102, /// 2-byte branch offset (`JMPE` instruction) brdisp_2 = 103, /// 4-byte branch offset (`JMPE` instruction) brdisp_4 = 104, /// Memory (modrm) and the sib byte must be present sibmem = 105, /// `TMM` register encoded in the `reg` field of the modrm byte tmm_reg = 106, /// `TMM` register encoded in the `mod + r/m` fields of the modrm byte tmm_rm = 107, /// `TMM` register encoded in the the `V'vvvv` field (VEX/EVEX/XOP) tmm_vvvv = 108, } #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[rustfmt::skip] static GEN_DEBUG_OP_CODE_OPERAND_KIND: [&str; 109] = [ "None", "farbr2_2", "farbr4_2", "mem_offs", "mem", "mem_mpx", "mem_mib", "mem_vsib32x", "mem_vsib64x", "mem_vsib32y", "mem_vsib64y", "mem_vsib32z", "mem_vsib64z", "r8_or_mem", "r16_or_mem", "r32_or_mem", "r32_or_mem_mpx", "r64_or_mem", "r64_or_mem_mpx", "mm_or_mem", "xmm_or_mem", "ymm_or_mem", "zmm_or_mem", "bnd_or_mem_mpx", "k_or_mem", "r8_reg", "r8_opcode", "r16_reg", "r16_reg_mem", "r16_rm", "r16_opcode", "r32_reg", "r32_reg_mem", "r32_rm", "r32_opcode", "r32_vvvv", "r64_reg", "r64_reg_mem", "r64_rm", "r64_opcode", "r64_vvvv", "seg_reg", "k_reg", "kp1_reg", "k_rm", "k_vvvv", "mm_reg", "mm_rm", "xmm_reg", "xmm_rm", "xmm_vvvv", "xmmp3_vvvv", "xmm_is4", "xmm_is5", "ymm_reg", "ymm_rm", "ymm_vvvv", "ymm_is4", "ymm_is5", "zmm_reg", "zmm_rm", "zmm_vvvv", "zmmp3_vvvv", "cr_reg", "dr_reg", "tr_reg", "bnd_reg", "es", "cs", "ss", "ds", "fs", "gs", "al", "cl", "ax", "dx", "eax", "rax", "st0", "sti_opcode", "imm4_m2z", "imm8", "imm8_const_1", "imm8sex16", "imm8sex32", "imm8sex64", "imm16", "imm32", "imm32sex64", "imm64", "seg_rSI", "es_rDI", "seg_rDI", "seg_rBX_al", "br16_1", "br32_1", "br64_1", "br16_2", "br32_4", "br64_4", "xbegin_2", "xbegin_4", "brdisp_2", "brdisp_4", "sibmem", "tmm_reg", "tmm_rm", "tmm_vvvv", ]; #[cfg(all(feature = "encoder", feature = "op_code_info"))] impl fmt::Debug for OpCodeOperandKind { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", GEN_DEBUG_OP_CODE_OPERAND_KIND[*self as usize]) } } #[cfg(all(feature = "encoder", feature = "op_code_info"))] impl Default for OpCodeOperandKind { #[must_use] #[inline] fn default() -> Self { OpCodeOperandKind::None } } #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[rustfmt::skip] impl OpCodeOperandKind { /// Iterates over all `OpCodeOperandKind` enum values #[inline] pub fn values() -> impl Iterator<Item = OpCodeOperandKind> + ExactSizeIterator + FusedIterator { // SAFETY: all values 0-max are valid enum values (0..IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT).map(|x| unsafe { core::mem::transmute::<u8, OpCodeOperandKind>(x as u8) }) } } #[test] #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[rustfmt::skip] fn test_opcodeoperandkind_values() { let mut iter = OpCodeOperandKind::values(); assert_eq!(iter.size_hint(), (IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT, Some(IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT))); assert_eq!(iter.len(), IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT); assert!(iter.next().is_some()); assert_eq!(iter.size_hint(), (IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT - 1, Some(IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT - 1))); assert_eq!(iter.len(), IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT - 1); let values: Vec<OpCodeOperandKind> = OpCodeOperandKind::values().collect(); assert_eq!(values.len(), IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT); for (i, value) in values.into_iter().enumerate() { assert_eq!(i, value as usize); } } #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[rustfmt::skip] impl TryFrom<usize> for OpCodeOperandKind { type Error = IcedError; #[inline] fn try_from(value: usize) -> Result<Self, Self::Error> { if value < IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT { // SAFETY: all values 0-max are valid enum values Ok(unsafe { mem::transmute(value as u8) }) } else { Err(IcedError::new("Invalid OpCodeOperandKind value")) } } } #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[test] #[rustfmt::skip] fn test_opcodeoperandkind_try_from_usize() { for value in OpCodeOperandKind::values() { let converted = <OpCodeOperandKind as TryFrom<usize>>::try_from(value as usize).unwrap(); assert_eq!(converted, value); } assert!(<OpCodeOperandKind as TryFrom<usize>>::try_from(IcedConstants::OP_CODE_OPERAND_KIND_ENUM_COUNT).is_err()); assert!(<OpCodeOperandKind as TryFrom<usize>>::try_from(core::usize::MAX).is_err()); } // GENERATOR-END: OpCodeOperandKind // GENERATOR-BEGIN: CpuidFeature // ⚠️This was generated by GENERATOR!🦹‍♂️ /// `CPUID` feature flags #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[cfg_attr(not(feature = "exhaustive_enums"), non_exhaustive)] #[cfg(feature = "instr_info")] #[allow(non_camel_case_types)] pub enum CpuidFeature { /// 8086 or later INTEL8086 = 0, /// 8086 only INTEL8086_ONLY = 1, /// 80186 or later INTEL186 = 2, /// 80286 or later INTEL286 = 3, /// 80286 only INTEL286_ONLY = 4, /// 80386 or later INTEL386 = 5, /// 80386 only INTEL386_ONLY = 6, /// 80386 A0-B0 stepping only (`XBTS`, `IBTS` instructions) INTEL386_A0_ONLY = 7, /// Intel486 or later INTEL486 = 8, /// Intel486 A stepping only (`CMPXCHG`) INTEL486_A_ONLY = 9, /// UMOV (80386 and Intel486) UMOV = 10, /// IA-64 IA64 = 11, /// CPUID.80000001H:EDX.LM\[bit 29\] X64 = 12, /// CPUID.(EAX=07H, ECX=0H):EBX.ADX\[bit 19\] ADX = 13, /// CPUID.01H:ECX.AES\[bit 25\] AES = 14, /// CPUID.01H:ECX.AVX\[bit 28\] AVX = 15, /// CPUID.(EAX=07H, ECX=0H):EBX.AVX2\[bit 5\] AVX2 = 16, /// CPUID.(EAX=07H, ECX=0H):EDX.AVX512_4FMAPS\[bit 3\] AVX512_4FMAPS = 17, /// CPUID.(EAX=07H, ECX=0H):EDX.AVX512_4VNNIW\[bit 2\] AVX512_4VNNIW = 18, /// CPUID.(EAX=07H, ECX=1H):EAX.AVX512_BF16\[bit 5\] AVX512_BF16 = 19, /// CPUID.(EAX=07H, ECX=0H):ECX.AVX512_BITALG\[bit 12\] AVX512_BITALG = 20, /// CPUID.(EAX=07H, ECX=0H):EBX.AVX512_IFMA\[bit 21\] AVX512_IFMA = 21, /// CPUID.(EAX=07H, ECX=0H):ECX.AVX512_VBMI\[bit 1\] AVX512_VBMI = 22, /// CPUID.(EAX=07H, ECX=0H):ECX.AVX512_VBMI2\[bit 6\] AVX512_VBMI2 = 23, /// CPUID.(EAX=07H, ECX=0H):ECX.AVX512_VNNI\[bit 11\] AVX512_VNNI = 24, /// CPUID.(EAX=07H, ECX=0H):EDX.AVX512_VP2INTERSECT\[bit 08\] AVX512_VP2INTERSECT = 25, /// CPUID.(EAX=07H, ECX=0H):ECX.AVX512_VPOPCNTDQ\[bit 14\] AVX512_VPOPCNTDQ = 26, /// CPUID.(EAX=07H, ECX=0H):EBX.AVX512BW\[bit 30\] AVX512BW = 27, /// CPUID.(EAX=07H, ECX=0H):EBX.AVX512CD\[bit 28\] AVX512CD = 28, /// CPUID.(EAX=07H, ECX=0H):EBX.AVX512DQ\[bit 17\] AVX512DQ = 29, /// CPUID.(EAX=07H, ECX=0H):EBX.AVX512ER\[bit 27\] AVX512ER = 30, /// CPUID.(EAX=07H, ECX=0H):EBX.AVX512F\[bit 16\] AVX512F = 31, /// CPUID.(EAX=07H, ECX=0H):EBX.AVX512PF\[bit 26\] AVX512PF = 32, /// CPUID.(EAX=07H, ECX=0H):EBX.AVX512VL\[bit 31\] AVX512VL = 33, /// CPUID.(EAX=07H, ECX=0H):EBX.BMI1\[bit 3\] BMI1 = 34, /// CPUID.(EAX=07H, ECX=0H):EBX.BMI2\[bit 8\] BMI2 = 35, /// CPUID.(EAX=07H, ECX=0H):EDX.CET_IBT\[bit 20\] CET_IBT = 36, /// CPUID.(EAX=07H, ECX=0H):ECX.CET_SS\[bit 7\] CET_SS = 37, /// `CL1INVMB` instruction (Intel SCC = Single-Chip Computer) CL1INVMB = 38, /// CPUID.(EAX=07H, ECX=0H):ECX.CLDEMOTE\[bit 25\] CLDEMOTE = 39, /// CPUID.(EAX=07H, ECX=0H):EBX.CLFLUSHOPT\[bit 23\] CLFLUSHOPT = 40, /// CPUID.01H:EDX.CLFSH\[bit 19\] CLFSH = 41, /// CPUID.(EAX=07H, ECX=0H):EBX.CLWB\[bit 24\] CLWB = 42, /// CPUID.80000008H:EBX.CLZERO\[bit 0\] CLZERO = 43, /// CPUID.01H:EDX.CMOV\[bit 15\] CMOV = 44, /// CPUID.01H:ECX.CMPXCHG16B\[bit 13\] CMPXCHG16B = 45, /// `RFLAGS.ID` can be toggled CPUID = 46, /// CPUID.01H:EDX.CX8\[bit 8\] CX8 = 47, /// CPUID.80000001H:EDX.3DNOW\[bit 31\] D3NOW = 48, /// CPUID.80000001H:EDX.3DNOWEXT\[bit 30\] D3NOWEXT = 49, /// CPUID.(EAX=12H, ECX=0H):EAX.OSS\[bit 5\] OSS = 50, /// CPUID.(EAX=07H, ECX=0H):ECX.ENQCMD\[bit 29\] ENQCMD = 51, /// CPUID.01H:ECX.F16C\[bit 29\] F16C = 52, /// CPUID.01H:ECX.FMA\[bit 12\] FMA = 53, /// CPUID.80000001H:ECX.FMA4\[bit 16\] FMA4 = 54, /// 8087 or later (CPUID.01H:EDX.FPU\[bit 0\]) FPU = 55, /// 80287 or later FPU287 = 56, /// 80287XL only FPU287XL_ONLY = 57, /// 80387 or later FPU387 = 58, /// 80387SL only FPU387SL_ONLY = 59, /// CPUID.(EAX=07H, ECX=0H):EBX.FSGSBASE\[bit 0\] FSGSBASE = 60, /// CPUID.01H:EDX.FXSR\[bit 24\] FXSR = 61, /// Cyrix (AMD Geode GX/LX) 3DNow! instructions CYRIX_D3NOW = 62, /// CPUID.(EAX=07H, ECX=0H):ECX.GFNI\[bit 8\] GFNI = 63, /// CPUID.(EAX=07H, ECX=0H):EBX.HLE\[bit 4\] HLE = 64, /// [`HLE`] or [`RTM`] /// /// [`HLE`]: enum.CpuidFeature.html#variant.HLE /// [`RTM`]: enum.CpuidFeature.html#variant.RTM HLE_or_RTM = 65, /// IA32_VMX_EPT_VPID_CAP\[bit 20\] INVEPT = 66, /// CPUID.(EAX=07H, ECX=0H):EBX.INVPCID\[bit 10\] INVPCID = 67, /// IA32_VMX_EPT_VPID_CAP\[bit 32\] INVVPID = 68, /// CPUID.80000001H:ECX.LWP\[bit 15\] LWP = 69, /// CPUID.80000001H:ECX.LZCNT\[bit 5\] LZCNT = 70, /// CPUID.80000008H:EBX.MCOMMIT\[bit 8\] MCOMMIT = 71, /// CPUID.01H:EDX.MMX\[bit 23\] MMX = 72, /// CPUID.01H:ECX.MONITOR\[bit 3\] MONITOR = 73, /// CPUID.80000001H:ECX.MONITORX\[bit 29\] MONITORX = 74, /// CPUID.01H:ECX.MOVBE\[bit 22\] MOVBE = 75, /// CPUID.(EAX=07H, ECX=0H):ECX.MOVDIR64B\[bit 28\] MOVDIR64B = 76, /// CPUID.(EAX=07H, ECX=0H):ECX.MOVDIRI\[bit 27\] MOVDIRI = 77, /// CPUID.(EAX=07H, ECX=0H):EBX.MPX\[bit 14\] MPX = 78, /// CPUID.01H:EDX.MSR\[bit 5\] MSR = 79, /// Multi-byte nops (`0F1F /0`): CPUID.01H.EAX\[Bits 11:8\] = 0110B or 1111B MULTIBYTENOP = 80, /// CPUID.0C0000000H:EAX >= 0C0000001H AND CPUID.0C0000001H:EDX.ACE\[Bits 7:6\] = 11B (\[6\] = exists, \[7\] = enabled) PADLOCK_ACE = 81, /// CPUID.0C0000000H:EAX >= 0C0000001H AND CPUID.0C0000001H:EDX.PHE\[Bits 11:10\] = 11B (\[10\] = exists, \[11\] = enabled) PADLOCK_PHE = 82, /// CPUID.0C0000000H:EAX >= 0C0000001H AND CPUID.0C0000001H:EDX.PMM\[Bits 13:12\] = 11B (\[12\] = exists, \[13\] = enabled) PADLOCK_PMM = 83, /// CPUID.0C0000000H:EAX >= 0C0000001H AND CPUID.0C0000001H:EDX.RNG\[Bits 3:2\] = 11B (\[2\] = exists, \[3\] = enabled) PADLOCK_RNG = 84, /// `PAUSE` instruction (Pentium 4 or later) PAUSE = 85, /// CPUID.01H:ECX.PCLMULQDQ\[bit 1\] PCLMULQDQ = 86, /// CPUID.(EAX=07H, ECX=0H):EBX.PCOMMIT\[bit 22\] PCOMMIT = 87, /// CPUID.(EAX=07H, ECX=0H):EDX.PCONFIG\[bit 18\] PCONFIG = 88, /// CPUID.(EAX=07H, ECX=0H):ECX.PKU\[bit 3\] PKU = 89, /// CPUID.01H:ECX.POPCNT\[bit 23\] POPCNT = 90, /// CPUID.80000001H:ECX.PREFETCHW\[bit 8\] PREFETCHW = 91, /// CPUID.(EAX=07H, ECX=0H):ECX.PREFETCHWT1\[bit 0\] PREFETCHWT1 = 92, /// CPUID.(EAX=14H, ECX=0H):EBX.PTWRITE\[bit 4\] PTWRITE = 93, /// CPUID.(EAX=07H, ECX=0H):ECX.RDPID\[bit 22\] RDPID = 94, /// `RDPMC` instruction (Pentium MMX or later, or Pentium Pro or later) RDPMC = 95, /// CPUID.80000008H:EBX.RDPRU\[bit 4\] RDPRU = 96, /// CPUID.01H:ECX.RDRAND\[bit 30\] RDRAND = 97, /// CPUID.(EAX=07H, ECX=0H):EBX.RDSEED\[bit 18\] RDSEED = 98, /// CPUID.80000001H:EDX.RDTSCP\[bit 27\] RDTSCP = 99, /// CPUID.(EAX=07H, ECX=0H):EBX.RTM\[bit 11\] RTM = 100, /// CPUID.01H:EDX.SEP\[bit 11\] SEP = 101, /// CPUID.(EAX=12H, ECX=0H):EAX.SGX1\[bit 0\] SGX1 = 102, /// CPUID.(EAX=07H, ECX=0H):EBX.SHA\[bit 29\] SHA = 103, /// CPUID.80000001H:ECX.SKINIT\[bit 12\] SKINIT = 104, /// [`SKINIT`] or [`SVM`] /// /// [`SKINIT`]: enum.CpuidFeature.html#variant.SKINIT /// [`SVM`]: enum.CpuidFeature.html#variant.SVM SKINIT_or_SVM = 105, /// CPUID.(EAX=07H, ECX=0H):EBX.SMAP\[bit 20\] SMAP = 106, /// CPUID.01H:ECX.SMX\[bit 6\] SMX = 107, /// CPUID.01H:EDX.SSE\[bit 25\] SSE = 108, /// CPUID.01H:EDX.SSE2\[bit 26\] SSE2 = 109, /// CPUID.01H:ECX.SSE3\[bit 0\] SSE3 = 110, /// CPUID.01H:ECX.SSE4_1\[bit 19\] SSE4_1 = 111, /// CPUID.01H:ECX.SSE4_2\[bit 20\] SSE4_2 = 112, /// CPUID.80000001H:ECX.SSE4A\[bit 6\] SSE4A = 113, /// CPUID.01H:ECX.SSSE3\[bit 9\] SSSE3 = 114, /// CPUID.80000001H:ECX.SVM\[bit 2\] SVM = 115, /// CPUID.8000001FH:EAX.SEV-ES\[bit 3\] SEV_ES = 116, /// CPUID.80000001H:EDX.SYSCALL\[bit 11\] SYSCALL = 117, /// CPUID.80000001H:ECX.TBM\[bit 21\] TBM = 118, /// CPUID.01H:EDX.TSC\[bit 4\] TSC = 119, /// CPUID.(EAX=07H, ECX=0H):ECX.VAES\[bit 9\] VAES = 120, /// CPUID.01H:ECX.VMX\[bit 5\] VMX = 121, /// CPUID.(EAX=07H, ECX=0H):ECX.VPCLMULQDQ\[bit 10\] VPCLMULQDQ = 122, /// CPUID.(EAX=07H, ECX=0H):ECX.WAITPKG\[bit 5\] WAITPKG = 123, /// CPUID.(EAX=80000008H, ECX=0H):EBX.WBNOINVD\[bit 9\] WBNOINVD = 124, /// CPUID.80000001H:ECX.XOP\[bit 11\] XOP = 125, /// CPUID.01H:ECX.XSAVE\[bit 26\] XSAVE = 126, /// CPUID.(EAX=0DH, ECX=1H):EAX.XSAVEC\[bit 1\] XSAVEC = 127, /// CPUID.(EAX=0DH, ECX=1H):EAX.XSAVEOPT\[bit 0\] XSAVEOPT = 128, /// CPUID.(EAX=0DH, ECX=1H):EAX.XSAVES\[bit 3\] XSAVES = 129, /// CPUID.8000001FH:EAX.SEV-SNP\[bit 4\] SEV_SNP = 130, /// CPUID.(EAX=07H, ECX=0H):EDX.SERIALIZE\[bit 14\] SERIALIZE = 131, /// CPUID.(EAX=07H, ECX=0H):EDX.TSXLDTRK\[bit 16\] TSXLDTRK = 132, /// CPUID.80000008H:EBX.INVLPGB\[bit 3\] INVLPGB = 133, /// CPUID.(EAX=07H, ECX=0H):EDX.AMX-BF16\[bit 22\] AMX_BF16 = 134, /// CPUID.(EAX=07H, ECX=0H):EDX.AMX-TILE\[bit 24\] AMX_TILE = 135, /// CPUID.(EAX=07H, ECX=0H):EDX.AMX-INT8\[bit 25\] AMX_INT8 = 136, /// Cyrix FPU instructions (Cyrix, AMD Geode GX/LX) CYRIX_FPU = 137, /// Cyrix SMM instructions: `SVDC`, `RSDC`, `SVLDT`, `RSLDT`, `SVTS`, `RSTS` (Cyrix, AMD Geode GX/LX) CYRIX_SMM = 138, /// Cyrix `SMINT 0F38` (6x86MX and later, AMD Geode GX/LX) CYRIX_SMINT = 139, /// Cyrix `SMINT 0F7E` (6x86 or earlier) CYRIX_SMINT_0F7E = 140, /// Cyrix SMM instructions: `RDSHR`, `WRSHR` (6x86MX, M II, Cyrix III) CYRIX_SHR = 141, /// Cyrix DDI instructions: `BB0_Reset`, `BB1_Reset`, `CPU_READ`, `CPU_WRITE` (MediaGX, GXm, GXLV, GX1) CYRIX_DDI = 142, /// Cyrix AND CPUID.80000001H:EDX.EMMI\[bit 24\] CYRIX_EMMI = 143, /// Cyrix DMI instructions: `DMINT`, `RDM` (AMD Geode GX/LX) CYRIX_DMI = 144, /// CPUID.0C0000000H:EAX >= 0C0000001H AND CPUID.0C0000001H:EDX.AIS\[Bits 1:0\] = 11B (\[0\] = exists, \[1\] = enabled) CENTAUR_AIS = 145, /// MOV to/from TR (80386, Intel486, Cyrix, Geode) MOV_TR = 146, /// `RSM` instruction (some 386s, some 486s, Pentium and later) SMM = 147, /// CPUID.(EAX=??H, ECX=?H):???.????\[bit ??\] TDX = 148, /// CPUID.(EAX=07H, ECX=0H):ECX.KL\[bit 23\] KL = 149, /// CPUID.19H:EBX.AESKLE\[bit 0\] AESKLE = 150, /// CPUID.19H:EBX.WIDE_KL\[bit 2\] WIDE_KL = 151, /// CPUID.(EAX=07H, ECX=0H):EDX.UINTR\[bit 5\] UINTR = 152, /// CPUID.(EAX=07H, ECX=01H):EAX.HRESET\[bit 22\] HRESET = 153, /// CPUID.(EAX=07H, ECX=01H):EAX.AVX-VNNI\[bit 4\] AVX_VNNI = 154, /// CPUID.0C0000000H:EAX >= 0C0000001H AND CPUID.0C0000001H:EDX.GMI\[Bits 5:4\] = 11B (\[4\] = exists, \[5\] = enabled) PADLOCK_GMI = 155, /// CPUID.(EAX=07H, ECX=01H):EAX.FRED\[bit 17\] FRED = 156, /// CPUID.(EAX=07H, ECX=01H):EAX.LKGS\[bit 18\] LKGS = 157, } #[cfg(feature = "instr_info")] #[rustfmt::skip] static GEN_DEBUG_CPUID_FEATURE: [&str; 158] = [ "INTEL8086", "INTEL8086_ONLY", "INTEL186", "INTEL286", "INTEL286_ONLY", "INTEL386", "INTEL386_ONLY", "INTEL386_A0_ONLY", "INTEL486", "INTEL486_A_ONLY", "UMOV", "IA64", "X64", "ADX", "AES", "AVX", "AVX2", "AVX512_4FMAPS", "AVX512_4VNNIW", "AVX512_BF16", "AVX512_BITALG", "AVX512_IFMA", "AVX512_VBMI", "AVX512_VBMI2", "AVX512_VNNI", "AVX512_VP2INTERSECT", "AVX512_VPOPCNTDQ", "AVX512BW", "AVX512CD", "AVX512DQ", "AVX512ER", "AVX512F", "AVX512PF", "AVX512VL", "BMI1", "BMI2", "CET_IBT", "CET_SS", "CL1INVMB", "CLDEMOTE", "CLFLUSHOPT", "CLFSH", "CLWB", "CLZERO", "CMOV", "CMPXCHG16B", "CPUID", "CX8", "D3NOW", "D3NOWEXT", "OSS", "ENQCMD", "F16C", "FMA", "FMA4", "FPU", "FPU287", "FPU287XL_ONLY", "FPU387", "FPU387SL_ONLY", "FSGSBASE", "FXSR", "CYRIX_D3NOW", "GFNI", "HLE", "HLE_or_RTM", "INVEPT", "INVPCID", "INVVPID", "LWP", "LZCNT", "MCOMMIT", "MMX", "MONITOR", "MONITORX", "MOVBE", "MOVDIR64B", "MOVDIRI", "MPX", "MSR", "MULTIBYTENOP", "PADLOCK_ACE", "PADLOCK_PHE", "PADLOCK_PMM", "PADLOCK_RNG", "PAUSE", "PCLMULQDQ", "PCOMMIT", "PCONFIG", "PKU", "POPCNT", "PREFETCHW", "PREFETCHWT1", "PTWRITE", "RDPID", "RDPMC", "RDPRU", "RDRAND", "RDSEED", "RDTSCP", "RTM", "SEP", "SGX1", "SHA", "SKINIT", "SKINIT_or_SVM", "SMAP", "SMX", "SSE", "SSE2", "SSE3", "SSE4_1", "SSE4_2", "SSE4A", "SSSE3", "SVM", "SEV_ES", "SYSCALL", "TBM", "TSC", "VAES", "VMX", "VPCLMULQDQ", "WAITPKG", "WBNOINVD", "XOP", "XSAVE", "XSAVEC", "XSAVEOPT", "XSAVES", "SEV_SNP", "SERIALIZE", "TSXLDTRK", "INVLPGB", "AMX_BF16", "AMX_TILE", "AMX_INT8", "CYRIX_FPU", "CYRIX_SMM", "CYRIX_SMINT", "CYRIX_SMINT_0F7E", "CYRIX_SHR", "CYRIX_DDI", "CYRIX_EMMI", "CYRIX_DMI", "CENTAUR_AIS", "MOV_TR", "SMM", "TDX", "KL", "AESKLE", "WIDE_KL", "UINTR", "HRESET", "AVX_VNNI", "PADLOCK_GMI", "FRED", "LKGS", ]; #[cfg(feature = "instr_info")] impl fmt::Debug for CpuidFeature { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", GEN_DEBUG_CPUID_FEATURE[*self as usize]) } } #[cfg(feature = "instr_info")] impl Default for CpuidFeature { #[must_use] #[inline] fn default() -> Self { CpuidFeature::INTEL8086 } } #[cfg(feature = "instr_info")] #[rustfmt::skip] impl CpuidFeature { /// Iterates over all `CpuidFeature` enum values #[inline] pub fn values() -> impl Iterator<Item = CpuidFeature> + ExactSizeIterator + FusedIterator { // SAFETY: all values 0-max are valid enum values (0..IcedConstants::CPUID_FEATURE_ENUM_COUNT).map(|x| unsafe { core::mem::transmute::<u8, CpuidFeature>(x as u8) }) } } #[test] #[cfg(feature = "instr_info")] #[rustfmt::skip] fn test_cpuidfeature_values() { let mut iter = CpuidFeature::values(); assert_eq!(iter.size_hint(), (IcedConstants::CPUID_FEATURE_ENUM_COUNT, Some(IcedConstants::CPUID_FEATURE_ENUM_COUNT))); assert_eq!(iter.len(), IcedConstants::CPUID_FEATURE_ENUM_COUNT); assert!(iter.next().is_some()); assert_eq!(iter.size_hint(), (IcedConstants::CPUID_FEATURE_ENUM_COUNT - 1, Some(IcedConstants::CPUID_FEATURE_ENUM_COUNT - 1))); assert_eq!(iter.len(), IcedConstants::CPUID_FEATURE_ENUM_COUNT - 1); let values: Vec<CpuidFeature> = CpuidFeature::values().collect(); assert_eq!(values.len(), IcedConstants::CPUID_FEATURE_ENUM_COUNT); for (i, value) in values.into_iter().enumerate() { assert_eq!(i, value as usize); } } #[cfg(feature = "instr_info")] #[rustfmt::skip] impl TryFrom<usize> for CpuidFeature { type Error = IcedError; #[inline] fn try_from(value: usize) -> Result<Self, Self::Error> { if value < IcedConstants::CPUID_FEATURE_ENUM_COUNT { // SAFETY: all values 0-max are valid enum values Ok(unsafe { mem::transmute(value as u8) }) } else { Err(IcedError::new("Invalid CpuidFeature value")) } } } #[cfg(feature = "instr_info")] #[test] #[rustfmt::skip] fn test_cpuidfeature_try_from_usize() { for value in CpuidFeature::values() { let converted = <CpuidFeature as TryFrom<usize>>::try_from(value as usize).unwrap(); assert_eq!(converted, value); } assert!(<CpuidFeature as TryFrom<usize>>::try_from(IcedConstants::CPUID_FEATURE_ENUM_COUNT).is_err()); assert!(<CpuidFeature as TryFrom<usize>>::try_from(core::usize::MAX).is_err()); } // GENERATOR-END: CpuidFeature // GENERATOR-BEGIN: RflagsBits // ⚠️This was generated by GENERATOR!🦹‍♂️ /// `RFLAGS` bits, FPU condition code bits and misc bits (`UIF`) supported by the instruction info code #[allow(missing_copy_implementations)] #[allow(missing_debug_implementations)] #[cfg(feature = "instr_info")] pub struct RflagsBits; #[cfg(feature = "instr_info")] impl RflagsBits { /// No bit is set pub const NONE: u32 = 0x0000_0000; /// `RFLAGS.OF` pub const OF: u32 = 0x0000_0001; /// `RFLAGS.SF` pub const SF: u32 = 0x0000_0002; /// `RFLAGS.ZF` pub const ZF: u32 = 0x0000_0004; /// `RFLAGS.AF` pub const AF: u32 = 0x0000_0008; /// `RFLAGS.CF` pub const CF: u32 = 0x0000_0010; /// `RFLAGS.PF` pub const PF: u32 = 0x0000_0020; /// `RFLAGS.DF` pub const DF: u32 = 0x0000_0040; /// `RFLAGS.IF` pub const IF: u32 = 0x0000_0080; /// `RFLAGS.AC` pub const AC: u32 = 0x0000_0100; /// `UIF` pub const UIF: u32 = 0x0000_0200; /// FPU status word bit `C0` pub const C0: u32 = 0x0000_0400; /// FPU status word bit `C1` pub const C1: u32 = 0x0000_0800; /// FPU status word bit `C2` pub const C2: u32 = 0x0000_1000; /// FPU status word bit `C3` pub const C3: u32 = 0x0000_2000; } // GENERATOR-END: RflagsBits // GENERATOR-BEGIN: OpAccess // ⚠️This was generated by GENERATOR!🦹‍♂️ /// Operand, register and memory access #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[cfg(feature = "instr_info")] pub enum OpAccess { /// Nothing is read and nothing is written None = 0, /// The value is read Read = 1, /// The value is sometimes read and sometimes not CondRead = 2, /// The value is completely overwritten Write = 3, /// Conditional write, sometimes it's written and sometimes it's not modified CondWrite = 4, /// The value is read and written ReadWrite = 5, /// The value is read and sometimes written ReadCondWrite = 6, /// The memory operand doesn't refer to memory (eg. `LEA` instruction) or it's an instruction that doesn't read the data to a register or doesn't write to the memory location, it just prefetches/invalidates it, eg. `INVLPG`, `PREFETCHNTA`, `VGATHERPF0DPS`, etc. Some of those instructions still check if the code can access the memory location. NoMemAccess = 7, } #[cfg(feature = "instr_info")] #[rustfmt::skip] static GEN_DEBUG_OP_ACCESS: [&str; 8] = [ "None", "Read", "CondRead", "Write", "CondWrite", "ReadWrite", "ReadCondWrite", "NoMemAccess", ]; #[cfg(feature = "instr_info")] impl fmt::Debug for OpAccess { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", GEN_DEBUG_OP_ACCESS[*self as usize]) } } #[cfg(feature = "instr_info")] impl Default for OpAccess { #[must_use] #[inline] fn default() -> Self { OpAccess::None } } #[cfg(feature = "instr_info")] #[rustfmt::skip] impl OpAccess { /// Iterates over all `OpAccess` enum values #[i
pub fn values() -> impl Iterator<Item = OpAccess> + ExactSizeIterator + FusedIterator { // SAFETY: all values 0-max are valid enum values (0..IcedConstants::OP_ACCESS_ENUM_COUNT).map(|x| unsafe { core::mem::transmute::<u8, OpAccess>(x as u8) }) } } #[test] #[cfg(feature = "instr_info")] #[rustfmt::skip] fn test_opaccess_values() { let mut iter = OpAccess::values(); assert_eq!(iter.size_hint(), (IcedConstants::OP_ACCESS_ENUM_COUNT, Some(IcedConstants::OP_ACCESS_ENUM_COUNT))); assert_eq!(iter.len(), IcedConstants::OP_ACCESS_ENUM_COUNT); assert!(iter.next().is_some()); assert_eq!(iter.size_hint(), (IcedConstants::OP_ACCESS_ENUM_COUNT - 1, Some(IcedConstants::OP_ACCESS_ENUM_COUNT - 1))); assert_eq!(iter.len(), IcedConstants::OP_ACCESS_ENUM_COUNT - 1); let values: Vec<OpAccess> = OpAccess::values().collect(); assert_eq!(values.len(), IcedConstants::OP_ACCESS_ENUM_COUNT); for (i, value) in values.into_iter().enumerate() { assert_eq!(i, value as usize); } } #[cfg(feature = "instr_info")] #[rustfmt::skip] impl TryFrom<usize> for OpAccess { type Error = IcedError; #[inline] fn try_from(value: usize) -> Result<Self, Self::Error> { if value < IcedConstants::OP_ACCESS_ENUM_COUNT { // SAFETY: all values 0-max are valid enum values Ok(unsafe { mem::transmute(value as u8) }) } else { Err(IcedError::new("Invalid OpAccess value")) } } } #[cfg(feature = "instr_info")] #[test] #[rustfmt::skip] fn test_opaccess_try_from_usize() { for value in OpAccess::values() { let converted = <OpAccess as TryFrom<usize>>::try_from(value as usize).unwrap(); assert_eq!(converted, value); } assert!(<OpAccess as TryFrom<usize>>::try_from(IcedConstants::OP_ACCESS_ENUM_COUNT).is_err()); assert!(<OpAccess as TryFrom<usize>>::try_from(core::usize::MAX).is_err()); } // GENERATOR-END: OpAccess // GENERATOR-BEGIN: ConditionCode // ⚠️This was generated by GENERATOR!🦹‍♂️ /// Instruction condition code (used by `Jcc`, `SETcc`, `CMOVcc`, `LOOPcc`) #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[cfg(feature = "instr_info")] #[allow(non_camel_case_types)] pub enum ConditionCode { /// The instruction doesn't have a condition code None = 0, /// Overflow (`OF=1`) o = 1, /// Not overflow (`OF=0`) no = 2, /// Below (unsigned) (`CF=1`) b = 3, /// Above or equal (unsigned) (`CF=0`) ae = 4, /// Equal / zero (`ZF=1`) e = 5, /// Not equal / zero (`ZF=0`) ne = 6, /// Below or equal (unsigned) (`CF=1 or ZF=1`) be = 7, /// Above (unsigned) (`CF=0 and ZF=0`) a = 8, /// Signed (`SF=1`) s = 9, /// Not signed (`SF=0`) ns = 10, /// Parity (`PF=1`) p = 11, /// Not parity (`PF=0`) np = 12, /// Less (signed) (`SF!=OF`) l = 13, /// Greater than or equal (signed) (`SF=OF`) ge = 14, /// Less than or equal (signed) (`ZF=1 or SF!=OF`) le = 15, /// Greater (signed) (`ZF=0 and SF=OF`) g = 16, } #[cfg(feature = "instr_info")] #[rustfmt::skip] static GEN_DEBUG_CONDITION_CODE: [&str; 17] = [ "None", "o", "no", "b", "ae", "e", "ne", "be", "a", "s", "ns", "p", "np", "l", "ge", "le", "g", ]; #[cfg(feature = "instr_info")] impl fmt::Debug for ConditionCode { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", GEN_DEBUG_CONDITION_CODE[*self as usize]) } } #[cfg(feature = "instr_info")] impl Default for ConditionCode { #[must_use] #[inline] fn default() -> Self { ConditionCode::None } } #[cfg(feature = "instr_info")] #[rustfmt::skip] impl ConditionCode { /// Iterates over all `ConditionCode` enum values #[inline] pub fn values() -> impl Iterator<Item = ConditionCode> + ExactSizeIterator + FusedIterator { // SAFETY: all values 0-max are valid enum values (0..IcedConstants::CONDITION_CODE_ENUM_COUNT).map(|x| unsafe { core::mem::transmute::<u8, ConditionCode>(x as u8) }) } } #[test] #[cfg(feature = "instr_info")] #[rustfmt::skip] fn test_conditioncode_values() { let mut iter = ConditionCode::values(); assert_eq!(iter.size_hint(), (IcedConstants::CONDITION_CODE_ENUM_COUNT, Some(IcedConstants::CONDITION_CODE_ENUM_COUNT))); assert_eq!(iter.len(), IcedConstants::CONDITION_CODE_ENUM_COUNT); assert!(iter.next().is_some()); assert_eq!(iter.size_hint(), (IcedConstants::CONDITION_CODE_ENUM_COUNT - 1, Some(IcedConstants::CONDITION_CODE_ENUM_COUNT - 1))); assert_eq!(iter.len(), IcedConstants::CONDITION_CODE_ENUM_COUNT - 1); let values: Vec<ConditionCode> = ConditionCode::values().collect(); assert_eq!(values.len(), IcedConstants::CONDITION_CODE_ENUM_COUNT); for (i, value) in values.into_iter().enumerate() { assert_eq!(i, value as usize); } } #[cfg(feature = "instr_info")] #[rustfmt::skip] impl TryFrom<usize> for ConditionCode { type Error = IcedError; #[inline] fn try_from(value: usize) -> Result<Self, Self::Error> { if value < IcedConstants::CONDITION_CODE_ENUM_COUNT { // SAFETY: all values 0-max are valid enum values Ok(unsafe { mem::transmute(value as u8) }) } else { Err(IcedError::new("Invalid ConditionCode value")) } } } #[cfg(feature = "instr_info")] #[test] #[rustfmt::skip] fn test_conditioncode_try_from_usize() { for value in ConditionCode::values() { let converted = <ConditionCode as TryFrom<usize>>::try_from(value as usize).unwrap(); assert_eq!(converted, value); } assert!(<ConditionCode as TryFrom<usize>>::try_from(IcedConstants::CONDITION_CODE_ENUM_COUNT).is_err()); assert!(<ConditionCode as TryFrom<usize>>::try_from(core::usize::MAX).is_err()); } // GENERATOR-END: ConditionCode // GENERATOR-BEGIN: MandatoryPrefix // ⚠️This was generated by GENERATOR!🦹‍♂️ /// Mandatory prefix #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[cfg(all(feature = "encoder", feature = "op_code_info"))] pub enum MandatoryPrefix { /// No mandatory prefix (legacy and 3DNow! tables only) None = 0, /// Empty mandatory prefix (no `66`, `F3` or `F2` prefix) PNP = 1, /// `66` prefix P66 = 2, /// `F3` prefix PF3 = 3, /// `F2` prefix PF2 = 4, } #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[rustfmt::skip] static GEN_DEBUG_MANDATORY_PREFIX: [&str; 5] = [ "None", "PNP", "P66", "PF3", "PF2", ]; #[cfg(all(feature = "encoder", feature = "op_code_info"))] impl fmt::Debug for MandatoryPrefix { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", GEN_DEBUG_MANDATORY_PREFIX[*self as usize]) } } #[cfg(all(feature = "encoder", feature = "op_code_info"))] impl Default for MandatoryPrefix { #[must_use] #[inline] fn default() -> Self { MandatoryPrefix::None } } #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[rustfmt::skip] impl MandatoryPrefix { /// Iterates over all `MandatoryPrefix` enum values #[inline] pub fn values() -> impl Iterator<Item = MandatoryPrefix> + ExactSizeIterator + FusedIterator { // SAFETY: all values 0-max are valid enum values (0..IcedConstants::MANDATORY_PREFIX_ENUM_COUNT).map(|x| unsafe { core::mem::transmute::<u8, MandatoryPrefix>(x as u8) }) } } #[test] #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[rustfmt::skip] fn test_mandatoryprefix_values() { let mut iter = MandatoryPrefix::values(); assert_eq!(iter.size_hint(), (IcedConstants::MANDATORY_PREFIX_ENUM_COUNT, Some(IcedConstants::MANDATORY_PREFIX_ENUM_COUNT))); assert_eq!(iter.len(), IcedConstants::MANDATORY_PREFIX_ENUM_COUNT); assert!(iter.next().is_some()); assert_eq!(iter.size_hint(), (IcedConstants::MANDATORY_PREFIX_ENUM_COUNT - 1, Some(IcedConstants::MANDATORY_PREFIX_ENUM_COUNT - 1))); assert_eq!(iter.len(), IcedConstants::MANDATORY_PREFIX_ENUM_COUNT - 1); let values: Vec<MandatoryPrefix> = MandatoryPrefix::values().collect(); assert_eq!(values.len(), IcedConstants::MANDATORY_PREFIX_ENUM_COUNT); for (i, value) in values.into_iter().enumerate() { assert_eq!(i, value as usize); } } #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[rustfmt::skip] impl TryFrom<usize> for MandatoryPrefix { type Error = IcedError; #[inline] fn try_from(value: usize) -> Result<Self, Self::Error> { if value < IcedConstants::MANDATORY_PREFIX_ENUM_COUNT { // SAFETY: all values 0-max are valid enum values Ok(unsafe { mem::transmute(value as u8) }) } else { Err(IcedError::new("Invalid MandatoryPrefix value")) } } } #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[test] #[rustfmt::skip] fn test_mandatoryprefix_try_from_usize() { for value in MandatoryPrefix::values() { let converted = <MandatoryPrefix as TryFrom<usize>>::try_from(value as usize).unwrap(); assert_eq!(converted, value); } assert!(<MandatoryPrefix as TryFrom<usize>>::try_from(IcedConstants::MANDATORY_PREFIX_ENUM_COUNT).is_err()); assert!(<MandatoryPrefix as TryFrom<usize>>::try_from(core::usize::MAX).is_err()); } // GENERATOR-END: MandatoryPrefix // GENERATOR-BEGIN: OpCodeTableKind // ⚠️This was generated by GENERATOR!🦹‍♂️ /// Opcode table #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[cfg_attr(not(feature = "exhaustive_enums"), non_exhaustive)] pub enum OpCodeTableKind { /// Legacy encoding table Normal = 0, /// `0Fxx` table (legacy, VEX, EVEX) T0F = 1, /// `0F38xx` table (legacy, VEX, EVEX) T0F38 = 2, /// `0F3Axx` table (legacy, VEX, EVEX) T0F3A = 3, /// `XOP8` table (XOP) XOP8 = 4, /// `XOP9` table (XOP) XOP9 = 5, /// `XOPA` table (XOP) XOPA = 6, } #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[rustfmt::skip] static GEN_DEBUG_OP_CODE_TABLE_KIND: [&str; 7] = [ "Normal", "T0F", "T0F38", "T0F3A", "XOP8", "XOP9", "XOPA", ]; #[cfg(all(feature = "encoder", feature = "op_code_info"))] impl fmt::Debug for OpCodeTableKind { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", GEN_DEBUG_OP_CODE_TABLE_KIND[*self as usize]) } } #[cfg(all(feature = "encoder", feature = "op_code_info"))] impl Default for OpCodeTableKind { #[must_use] #[inline] fn default() -> Self { OpCodeTableKind::Normal } } #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[rustfmt::skip] impl OpCodeTableKind { /// Iterates over all `OpCodeTableKind` enum values #[inline] pub fn values() -> impl Iterator<Item = OpCodeTableKind> + ExactSizeIterator + FusedIterator { // SAFETY: all values 0-max are valid enum values (0..IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT).map(|x| unsafe { core::mem::transmute::<u8, OpCodeTableKind>(x as u8) }) } } #[test] #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[rustfmt::skip] fn test_opcodetablekind_values() { let mut iter = OpCodeTableKind::values(); assert_eq!(iter.size_hint(), (IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT, Some(IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT))); assert_eq!(iter.len(), IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT); assert!(iter.next().is_some()); assert_eq!(iter.size_hint(), (IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT - 1, Some(IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT - 1))); assert_eq!(iter.len(), IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT - 1); let values: Vec<OpCodeTableKind> = OpCodeTableKind::values().collect(); assert_eq!(values.len(), IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT); for (i, value) in values.into_iter().enumerate() { assert_eq!(i, value as usize); } } #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[rustfmt::skip] impl TryFrom<usize> for OpCodeTableKind { type Error = IcedError; #[inline] fn try_from(value: usize) -> Result<Self, Self::Error> { if value < IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT { // SAFETY: all values 0-max are valid enum values Ok(unsafe { mem::transmute(value as u8) }) } else { Err(IcedError::new("Invalid OpCodeTableKind value")) } } } #[cfg(all(feature = "encoder", feature = "op_code_info"))] #[test] #[rustfmt::skip] fn test_opcodetablekind_try_from_usize() { for value in OpCodeTableKind::values() { let converted = <OpCodeTableKind as TryFrom<usize>>::try_from(value as usize).unwrap(); assert_eq!(converted, value); } assert!(<OpCodeTableKind as TryFrom<usize>>::try_from(IcedConstants::OP_CODE_TABLE_KIND_ENUM_COUNT).is_err()); assert!(<OpCodeTableKind as TryFrom<usize>>::try_from(core::usize::MAX).is_err()); } // GENERATOR-END: OpCodeTableKind
nline]
test_instance_usage_audit_log.py
# Copyright (c) 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime from webob import exc from nova.api.openstack.compute.plugins.v3 import \ instance_usage_audit_log as ial from nova import context from nova import db from nova.openstack.common import timeutils from nova import test from nova.tests.api.openstack import fakes from nova.tests.objects import test_service from nova import utils import urllib service_base = test_service.fake_service TEST_COMPUTE_SERVICES = [dict(service_base, host='foo', topic='compute'), dict(service_base, host='bar', topic='compute'), dict(service_base, host='baz', topic='compute'), dict(service_base, host='plonk', topic='compute'), dict(service_base, host='wibble', topic='bogus'), ] begin1 = datetime.datetime(2012, 7, 4, 6, 0, 0) begin2 = end1 = datetime.datetime(2012, 7, 5, 6, 0, 0) begin3 = end2 = datetime.datetime(2012, 7, 6, 6, 0, 0) end3 = datetime.datetime(2012, 7, 7, 6, 0, 0) #test data TEST_LOGS1 = [ #all services done, no errors. dict(host="plonk", period_beginning=begin1, period_ending=end1, state="DONE", errors=0, task_items=23, message="test1"), dict(host="baz", period_beginning=begin1, period_ending=end1, state="DONE", errors=0, task_items=17, message="test2"), dict(host="bar", period_beginning=begin1, period_ending=end1, state="DONE", errors=0, task_items=10, message="test3"), dict(host="foo", period_beginning=begin1, period_ending=end1, state="DONE", errors=0, task_items=7, message="test4"), ] TEST_LOGS2 = [ #some still running... dict(host="plonk", period_beginning=begin2, period_ending=end2, state="DONE", errors=0, task_items=23, message="test5"), dict(host="baz", period_beginning=begin2, period_ending=end2, state="DONE", errors=0, task_items=17, message="test6"), dict(host="bar", period_beginning=begin2, period_ending=end2, state="RUNNING", errors=0, task_items=10, message="test7"), dict(host="foo", period_beginning=begin2, period_ending=end2, state="DONE", errors=0, task_items=7, message="test8"), ] TEST_LOGS3 = [ #some errors.. dict(host="plonk", period_beginning=begin3, period_ending=end3, state="DONE", errors=0, task_items=23, message="test9"), dict(host="baz", period_beginning=begin3, period_ending=end3, state="DONE", errors=2, task_items=17, message="test10"), dict(host="bar", period_beginning=begin3, period_ending=end3, state="DONE", errors=0, task_items=10, message="test11"), dict(host="foo", period_beginning=begin3, period_ending=end3, state="DONE", errors=1, task_items=7, message="test12"), ] def fake_task_log_get_all(context, task_name, begin, end, host=None, state=None): assert task_name == "instance_usage_audit" if begin == begin1 and end == end1: return TEST_LOGS1 if begin == begin2 and end == end2: return TEST_LOGS2 if begin == begin3 and end == end3: return TEST_LOGS3 raise AssertionError("Invalid date %s to %s" % (begin, end)) def fake_last_completed_audit_period(unit=None, before=None): audit_periods = [(begin3, end3), (begin2, end2), (begin1, end1)] if before is not None: for begin, end in audit_periods: if before > end: return begin, end raise AssertionError("Invalid before date %s" % (before)) return begin1, end1 class InstanceUsageAuditLogTest(test.TestCase): def setUp(self): super(InstanceUsageAuditLogTest, self).setUp() self.context = context.get_admin_context() timeutils.set_time_override(datetime.datetime(2012, 7, 5, 10, 0, 0)) self.controller = ial.InstanceUsageAuditLogController() self.host_api = self.controller.host_api def fake_service_get_all(context, disabled): self.assertTrue(disabled is None) return TEST_COMPUTE_SERVICES self.stubs.Set(utils, 'last_completed_audit_period', fake_last_completed_audit_period) self.stubs.Set(db, 'service_get_all', fake_service_get_all) self.stubs.Set(db, 'task_log_get_all', fake_task_log_get_all) def tearDown(self): super(InstanceUsageAuditLogTest, self).tearDown() timeutils.clear_time_override() def test_index(self): req = fakes.HTTPRequestV3.blank('/os-instance_usage_audit_log') result = self.controller.index(req) self.assertIn('instance_usage_audit_log', result) logs = result['instance_usage_audit_log'] self.assertEquals(57, logs['total_instances']) self.assertEquals(0, logs['total_errors']) self.assertEquals(4, len(logs['log'])) self.assertEquals(4, logs['num_hosts']) self.assertEquals(4, logs['num_hosts_done']) self.assertEquals(0, logs['num_hosts_running']) self.assertEquals(0, logs['num_hosts_not_run']) self.assertEquals("ALL hosts done. 0 errors.", logs['overall_status']) def test_index_with_format1(self):
def test_index_with_format2(self): before = urllib.quote('2012-07-05 10:00:00.10') req = fakes.HTTPRequestV3.blank( '/os-instance_usage_audit_log?before=' + before) result = self.controller.index(req) self.assertIn('instance_usage_audit_log', result) logs = result['instance_usage_audit_log'] self.assertEquals(57, logs['total_instances']) self.assertEquals(0, logs['total_errors']) self.assertEquals(4, len(logs['log'])) self.assertEquals(4, logs['num_hosts']) self.assertEquals(4, logs['num_hosts_done']) self.assertEquals(0, logs['num_hosts_running']) self.assertEquals(0, logs['num_hosts_not_run']) self.assertEquals("ALL hosts done. 0 errors.", logs['overall_status']) def test_index_with_invalid_format(self): req = fakes.HTTPRequestV3.blank( '/os-instance_usage_audit_log?before=abc') self.assertRaises(exc.HTTPBadRequest, self.controller.index, req) def test_index_with_running(self): before = urllib.quote('2012-07-06 10:00:00') req = fakes.HTTPRequestV3.blank( '/os-instance_usage_audit_log?before=' + before) result = self.controller.index(req) self.assertIn('instance_usage_audit_log', result) logs = result['instance_usage_audit_log'] self.assertEquals(57, logs['total_instances']) self.assertEquals(0, logs['total_errors']) self.assertEquals(4, len(logs['log'])) self.assertEquals(4, logs['num_hosts']) self.assertEquals(3, logs['num_hosts_done']) self.assertEquals(1, logs['num_hosts_running']) self.assertEquals(0, logs['num_hosts_not_run']) self.assertEquals("3 of 4 hosts done. 0 errors.", logs['overall_status']) def test_index_with_errors(self): before = urllib.quote('2012-07-07 10:00:00') req = fakes.HTTPRequestV3.blank( '/os-instance_usage_audit_log?before=' + before) result = self.controller.index(req) self.assertIn('instance_usage_audit_log', result) logs = result['instance_usage_audit_log'] self.assertEquals(57, logs['total_instances']) self.assertEquals(3, logs['total_errors']) self.assertEquals(4, len(logs['log'])) self.assertEquals(4, logs['num_hosts']) self.assertEquals(4, logs['num_hosts_done']) self.assertEquals(0, logs['num_hosts_running']) self.assertEquals(0, logs['num_hosts_not_run']) self.assertEquals("ALL hosts done. 3 errors.", logs['overall_status'])
before = urllib.quote("2012-07-05 10:00:00") req = fakes.HTTPRequestV3.blank( '/os-instance_usage_audit_log?before=' + before) result = self.controller.index(req) self.assertIn('instance_usage_audit_log', result) logs = result['instance_usage_audit_log'] self.assertEquals(57, logs['total_instances']) self.assertEquals(0, logs['total_errors']) self.assertEquals(4, len(logs['log'])) self.assertEquals(4, logs['num_hosts']) self.assertEquals(4, logs['num_hosts_done']) self.assertEquals(0, logs['num_hosts_running']) self.assertEquals(0, logs['num_hosts_not_run']) self.assertEquals("ALL hosts done. 0 errors.", logs['overall_status'])
tuple.rs
//! use crate::{Apply, Core, Delta, DeltaResult, FromDelta, IntoDelta}; impl<T0> Core for (T0,) where T0: Core { type Delta = ( Option<<T0 as Core>::Delta>, ); } impl<T0> Apply for (T0,) where T0: Apply, { fn apply(&self, delta: Self::Delta) -> DeltaResult<Self> { let field0: T0 = match delta.0 { Some(d) => self.0.apply(d)?, None => self.0.clone(), }; Ok((field0,)) } } impl<T0> Delta for (T0,) where T0: Delta, { fn delta(&self, rhs: &Self) -> DeltaResult<Self::Delta> { let delta0: <T0 as Core>::Delta = Delta::delta(&self.0, &rhs.0)?; Ok(( if self.0 == rhs.0 { None } else { Some(delta0) }, )) } } impl<T0> FromDelta for (T0,) where T0: FromDelta, { fn from_delta(delta: Self::Delta) -> DeltaResult<Self> { Ok(( delta.0.map(<T0>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T0 as Core>::Delta>"))??, )) } } impl<T0> IntoDelta for (T0,) where T0: IntoDelta, { fn into_delta(self) -> DeltaResult<Self::Delta> { Ok(( Some(self.0.into_delta()?), )) } } impl<T0, T1> Core for (T0, T1) where T0: Core, T1: Core, { type Delta = ( Option<<T0 as Core>::Delta>, Option<<T1 as Core>::Delta> ); } impl<T0, T1> Apply for (T0, T1) where T0: Apply, T1: Apply, { fn apply(&self, delta: Self::Delta) -> DeltaResult<Self> { let field0: T0 = match delta.0 { Some(d) => self.0.apply(d)?, None => self.0.clone(), }; let field1: T1 = match delta.1 { Some(d) => self.1.apply(d)?, None => self.1.clone(), }; Ok((field0, field1)) } } impl<T0, T1> Delta for (T0, T1) where T0: Delta, T1: Delta, { fn delta(&self, rhs: &Self) -> DeltaResult<Self::Delta> { let delta0: <T0 as Core>::Delta = Delta::delta(&self.0, &rhs.0)?; let delta1: <T1 as Core>::Delta = Delta::delta(&self.1, &rhs.1)?; Ok(( if self.0 == rhs.0 { None } else { Some(delta0) }, if self.1 == rhs.1 { None } else { Some(delta1) }, )) } } impl<T0, T1> FromDelta for (T0, T1) where T0: FromDelta, T1: FromDelta, { fn from_delta(delta: Self::Delta) -> DeltaResult<Self> { Ok(( delta.0.map(<T0>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T0 as Core>::Delta>"))??, delta.1.map(<T1>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T1 as Core>::Delta>"))??, )) } } impl<T0, T1> IntoDelta for (T0, T1) where T0: IntoDelta, T1: IntoDelta, { fn into_delta(self) -> DeltaResult<Self::Delta> { Ok(( Some(self.0.into_delta()?), Some(self.1.into_delta()?), )) } } impl<T0, T1, T2> Core for (T0, T1, T2) where T0: Core, T1: Core, T2: Core, { type Delta = ( Option<<T0 as Core>::Delta>, Option<<T1 as Core>::Delta>, Option<<T2 as Core>::Delta> ); } impl<T0, T1, T2> Apply for (T0, T1, T2) where T0: Apply, T1: Apply, T2: Apply, { fn apply(&self, delta: Self::Delta) -> DeltaResult<Self> { let field0: T0 = match delta.0 { Some(d) => self.0.apply(d)?, None => self.0.clone(), }; let field1: T1 = match delta.1 { Some(d) => self.1.apply(d)?, None => self.1.clone(), }; let field2: T2 = match delta.2 { Some(d) => self.2.apply(d)?, None => self.2.clone(), }; Ok((field0, field1, field2)) } } impl<T0, T1, T2> Delta for (T0, T1, T2) where T0: Delta, T1: Delta, T2: Delta, { fn delta(&self, rhs: &Self) -> DeltaResult<Self::Delta> { let delta0: <T0 as Core>::Delta = Delta::delta(&self.0, &rhs.0)?; let delta1: <T1 as Core>::Delta = Delta::delta(&self.1, &rhs.1)?; let delta2: <T2 as Core>::Delta = Delta::delta(&self.2, &rhs.2)?; Ok(( if self.0 == rhs.0 { None } else { Some(delta0) }, if self.1 == rhs.1 { None } else { Some(delta1) }, if self.2 == rhs.2 { None } else { Some(delta2) }, )) } } impl<T0, T1, T2> FromDelta for (T0, T1, T2) where T0: FromDelta, T1: FromDelta, T2: FromDelta, { fn from_delta(delta: Self::Delta) -> DeltaResult<Self> { Ok(( delta.0.map(<T0>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T0 as Core>::Delta>"))??, delta.1.map(<T1>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T1 as Core>::Delta>"))??, delta.2.map(<T2>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T2 as Core>::Delta>"))??, )) } } impl<T0, T1, T2> IntoDelta for (T0, T1, T2) where T0: IntoDelta, T1: IntoDelta, T2: IntoDelta, { fn into_delta(self) -> DeltaResult<Self::Delta> { Ok(( Some(self.0.into_delta()?), Some(self.1.into_delta()?), Some(self.2.into_delta()?), )) } } impl<T0, T1, T2, T3> Core for (T0, T1, T2, T3) where T0: Core, T1: Core, T2: Core, T3: Core, { type Delta = ( Option<<T0 as Core>::Delta>, Option<<T1 as Core>::Delta>, Option<<T2 as Core>::Delta>, Option<<T3 as Core>::Delta> ); } impl<T0, T1, T2, T3> Apply for (T0, T1, T2, T3) where T0: Apply, T1: Apply, T2: Apply, T3: Apply, { fn apply(&self, delta: Self::Delta) -> DeltaResult<Self> { let field0: T0 = match delta.0 { Some(d) => self.0.apply(d)?, None => self.0.clone(), }; let field1: T1 = match delta.1 { Some(d) => self.1.apply(d)?, None => self.1.clone(), }; let field2: T2 = match delta.2 { Some(d) => self.2.apply(d)?, None => self.2.clone(), }; let field3: T3 = match delta.3 { Some(d) => self.3.apply(d)?, None => self.3.clone(), }; Ok((field0, field1, field2, field3)) } } impl<T0, T1, T2, T3> Delta for (T0, T1, T2, T3) where T0: Delta, T1: Delta, T2: Delta, T3: Delta, { fn delta(&self, rhs: &Self) -> DeltaResult<Self::Delta> { let delta0: <T0 as Core>::Delta = Delta::delta(&self.0, &rhs.0)?; let delta1: <T1 as Core>::Delta = Delta::delta(&self.1, &rhs.1)?; let delta2: <T2 as Core>::Delta = Delta::delta(&self.2, &rhs.2)?; let delta3: <T3 as Core>::Delta = Delta::delta(&self.3, &rhs.3)?; Ok(( if self.0 == rhs.0 { None } else { Some(delta0) }, if self.1 == rhs.1 { None } else { Some(delta1) }, if self.2 == rhs.2 { None } else { Some(delta2) }, if self.3 == rhs.3 { None } else { Some(delta3) }, )) } } impl<T0, T1, T2, T3> FromDelta for (T0, T1, T2, T3) where T0: FromDelta, T1: FromDelta, T2: FromDelta, T3: FromDelta, { fn from_delta(delta: Self::Delta) -> DeltaResult<Self> { Ok(( delta.0.map(<T0>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T0 as Core>::Delta>"))??, delta.1.map(<T1>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T1 as Core>::Delta>"))??, delta.2.map(<T2>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T2 as Core>::Delta>"))??, delta.3.map(<T3>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T3 as Core>::Delta>"))??, )) } } impl<T0, T1, T2, T3> IntoDelta for (T0, T1, T2, T3) where T0: IntoDelta, T1: IntoDelta, T2: IntoDelta, T3: IntoDelta, { fn into_delta(self) -> DeltaResult<Self::Delta> { Ok(( Some(self.0.into_delta()?), Some(self.1.into_delta()?), Some(self.2.into_delta()?), Some(self.3.into_delta()?), )) } } impl<T0, T1, T2, T3, T4> Core for (T0, T1, T2, T3, T4) where T0: Core, T1: Core, T2: Core, T3: Core, T4: Core, { type Delta = ( Option<<T0 as Core>::Delta>, Option<<T1 as Core>::Delta>, Option<<T2 as Core>::Delta>, Option<<T3 as Core>::Delta>, Option<<T4 as Core>::Delta> ); } impl<T0, T1, T2, T3, T4> Apply for (T0, T1, T2, T3, T4) where T0: Apply, T1: Apply, T2: Apply, T3: Apply, T4: Apply, { fn apply(&self, delta: Self::Delta) -> DeltaResult<Self> { let field0: T0 = match delta.0 { Some(d) => self.0.apply(d)?, None => self.0.clone(), }; let field1: T1 = match delta.1 { Some(d) => self.1.apply(d)?, None => self.1.clone(), }; let field2: T2 = match delta.2 { Some(d) => self.2.apply(d)?, None => self.2.clone(), }; let field3: T3 = match delta.3 { Some(d) => self.3.apply(d)?, None => self.3.clone(), }; let field4: T4 = match delta.4 { Some(d) => self.4.apply(d)?, None => self.4.clone(), }; Ok((field0, field1, field2, field3, field4)) } } impl<T0, T1, T2, T3, T4> Delta for (T0, T1, T2, T3, T4) where T0: Delta, T1: Delta, T2: Delta, T3: Delta, T4: Delta, { fn delta(&self, rhs: &Self) -> DeltaResult<Self::Delta> { let delta0: <T0 as Core>::Delta = Delta::delta(&self.0, &rhs.0)?; let delta1: <T1 as Core>::Delta = Delta::delta(&self.1, &rhs.1)?; let delta2: <T2 as Core>::Delta = Delta::delta(&self.2, &rhs.2)?; let delta3: <T3 as Core>::Delta = Delta::delta(&self.3, &rhs.3)?; let delta4: <T4 as Core>::Delta = Delta::delta(&self.4, &rhs.4)?; Ok(( if self.0 == rhs.0 { None } else { Some(delta0) }, if self.1 == rhs.1 { None } else { Some(delta1) }, if self.2 == rhs.2 { None } else { Some(delta2) }, if self.3 == rhs.3 { None } else { Some(delta3) }, if self.4 == rhs.4 { None } else { Some(delta4) }, )) } } impl<T0, T1, T2, T3, T4> FromDelta for (T0, T1, T2, T3, T4) where T0: FromDelta, T1: FromDelta, T2: FromDelta, T3: FromDelta, T4: FromDelta, { fn from_delta(delta: Self::Delta) -> DeltaResult<Self> { Ok(( delta.0.map(<T0>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T0 as Core>::Delta>"))??, delta.1.map(<T1>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T1 as Core>::Delta>"))??, delta.2.map(<T2>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T2 as Core>::Delta>"))??, delta.3.map(<T3>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T3 as Core>::Delta>"))??, delta.4.map(<T4>::from_delta) .ok_or_else(|| ExpectedValue!("Option<<T4 as Core>::Delta>"))??, )) } } impl<T0, T1, T2, T3, T4> IntoDelta for (T0, T1, T2, T3, T4) where T0: IntoDelta, T1: IntoDelta, T2: IntoDelta, T3: IntoDelta, T4: IntoDelta, { fn into_delta(self) -> DeltaResult<Self::Delta> { Ok(( Some(self.0.into_delta()?), Some(self.1.into_delta()?), Some(self.2.into_delta()?), Some(self.3.into_delta()?), Some(self.4.into_delta()?), )) } } #[allow(non_snake_case)] #[cfg(test)] mod tests { use serde_json; use super::*; #[test] fn Tuple1__delta__same_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("foo"); let tuple0 = (string0,); let tuple1 = (string1,); let delta: <(String,) as Core>::Delta = tuple0.delta(&tuple1)?; let json_string = serde_json::to_string(&delta) .expect("Could not serialize to json"); println!("json_string: \"{}\"", json_string); assert_eq!(json_string, "[null]"); let delta1: <(String,) as Core>::Delta = serde_json::from_str( &json_string ).expect("Could not deserialize from json"); assert_eq!(delta, delta1); Ok(()) } #[test] fn Tuple1__delta__different_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("bar"); let tuple0 = (string0,); let tuple1 = (string1,); let delta: <(String,) as Core>::Delta = tuple0.delta(&tuple1)?; let json_string = serde_json::to_string(&delta) .expect("Could not serialize to json"); println!("json_string: \"{}\"", json_string); assert_eq!(json_string, "[\"bar\"]"); let delta1: <(String,) as Core>::Delta = serde_json::from_str( &json_string ).expect("Could not deserialize from json"); assert_eq!(delta, delta1); Ok(()) } #[test] fn Tuple1__apply__same_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("foo"); let tuple0 = (string0,); let tuple1 = (string1,); let delta: <(String,) as Core>::Delta = tuple0.delta(&tuple1)?; println!("delta: {:?}", delta); let tuple2 = tuple0.apply(delta)?; assert_eq!(tuple1, tuple2); Ok(()) } #[test] fn Tuple1__apply__different_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("bar"); let tuple0 = (string0,); let tuple1 = (string1,); let delta: <(String,) as Core>::Delta = tuple0.delta(&tuple1)?; let tuple2 = tuple0.apply(delta)?; assert_eq!(tuple1, tuple2); Ok(()) } #[test] fn Tuple2__delta__same_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("foo"); let tuple0 = (string0.clone(), string0.clone()); let tuple1 = (string1.clone(), string1.clone()); let delta: <(String, String) as Core>::Delta = tuple0.delta(&tuple1)?; let json_string = serde_json::to_string(&delta) .expect("Could not serialize to json"); println!("json_string: \"{}\"", json_string); assert_eq!(json_string, "[null,null]"); let delta1: <(String, String) as Core>::Delta = serde_json::from_str( &json_string ).expect("Could not deserialize from json"); assert_eq!(delta, delta1); Ok(()) } #[test] fn Tuple2__delta__different_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("bar"); let tuple0 = (string0.clone(), string0.clone()); let tuple1 = (string1.clone(), string1.clone()); let delta: <(String, String) as Core>::Delta = tuple0.delta(&tuple1)?; let json_string = serde_json::to_string(&delta) .expect("Could not serialize to json"); println!("json_string: \"{}\"", json_string); assert_eq!(json_string, "[\"bar\",\"bar\"]"); let delta1: <(String, String) as Core>::Delta = serde_json::from_str( &json_string ).expect("Could not deserialize from json"); assert_eq!(delta, delta1); Ok(()) } #[test] fn Tuple2__apply__same_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("foo"); let tuple0 = (string0.clone(), string0.clone()); let tuple1 = (string1.clone(), string1.clone()); let delta: <(String, String) as Core>::Delta = tuple0.delta(&tuple1)?; println!("delta: {:?}", delta); let tuple2 = tuple0.apply(delta)?; assert_eq!(tuple1, tuple2); Ok(()) } #[test] fn Tuple2__apply__different_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("bar"); let tuple0 = (string0.clone(), string0.clone()); let tuple1 = (string1.clone(), string1.clone()); let delta: <(String, String) as Core>::Delta = tuple0.delta(&tuple1)?; let tuple2 = tuple0.apply(delta)?; assert_eq!(tuple1, tuple2); Ok(()) } #[test] fn Tuple3__delta__same_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("foo"); let tuple0 = ( string0.clone(), string0.clone(), string0.clone() ); let tuple1 = ( string1.clone(), string1.clone(), string1.clone() ); let delta: <( String, String, String ) as Core>::Delta = tuple0.delta(&tuple1)?; let json_string = serde_json::to_string(&delta) .expect("Could not serialize to json"); println!("json_string: \"{}\"", json_string); assert_eq!(json_string, "[null,null,null]"); let delta1: <( String, String, String ) as Core>::Delta = serde_json::from_str(&json_string) .expect("Could not deserialize from json"); assert_eq!(delta, delta1); Ok(()) } #[test] fn
() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("bar"); let tuple0 = ( string0.clone(), string0.clone(), string0.clone() ); let tuple1 = ( string1.clone(), string1.clone(), string1.clone() ); let delta: <( String, String, String ) as Core>::Delta = tuple0.delta(&tuple1)?; let json_string = serde_json::to_string(&delta) .expect("Could not serialize to json"); println!("json_string: \"{}\"", json_string); assert_eq!(json_string, "[\"bar\",\"bar\",\"bar\"]"); let delta1: <( String, String, String ) as Core>::Delta = serde_json::from_str(&json_string) .expect("Could not deserialize from json"); assert_eq!(delta, delta1); Ok(()) } #[test] fn Tuple3__apply__same_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("foo"); let tuple0 = ( string0.clone(), string0.clone(), string0.clone() ); let tuple1 = ( string1.clone(), string1.clone(), string1.clone() ); let delta: <( String, String, String ) as Core>::Delta = tuple0.delta(&tuple1)?; println!("delta: {:?}", delta); let tuple2 = tuple0.apply(delta)?; assert_eq!(tuple1, tuple2); Ok(()) } #[test] fn Tuple3__apply__different_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("bar"); let tuple0 = ( string0.clone(), string0.clone(), string0.clone() ); let tuple1 = ( string1.clone(), string1.clone(), string1.clone() ); let delta: <( String, String, String ) as Core>::Delta = tuple0.delta(&tuple1)?; let tuple2 = tuple0.apply(delta)?; assert_eq!(tuple1, tuple2); Ok(()) } #[test] fn Tuple4__delta__same_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("foo"); let tuple0 = ( string0.clone(), string0.clone(), string0.clone(), string0.clone() ); let tuple1 = ( string1.clone(), string1.clone(), string1.clone(), string1.clone() ); let delta: <( String, String, String, String ) as Core>::Delta = tuple0.delta(&tuple1)?; let json_string = serde_json::to_string(&delta) .expect("Could not serialize to json"); println!("json_string: \"{}\"", json_string); assert_eq!(json_string, "[null,null,null,null]"); let delta1: <( String, String, String, String ) as Core>::Delta = serde_json::from_str(&json_string) .expect("Could not deserialize from json"); assert_eq!(delta, delta1); Ok(()) } #[test] fn Tuple4__delta__different_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("bar"); let tuple0 = ( string0.clone(), string0.clone(), string0.clone(), string0.clone() ); let tuple1 = ( string1.clone(), string1.clone(), string1.clone(), string1.clone() ); let delta: <( String, String, String, String ) as Core>::Delta = tuple0.delta(&tuple1)?; let json_string = serde_json::to_string(&delta) .expect("Could not serialize to json"); println!("json_string: \"{}\"", json_string); assert_eq!(json_string, "[\"bar\",\"bar\",\"bar\",\"bar\"]"); let delta1: <( String, String, String, String ) as Core>::Delta = serde_json::from_str(&json_string) .expect("Could not deserialize from json"); assert_eq!(delta, delta1); Ok(()) } #[test] fn Tuple4__apply__same_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("foo"); let tuple0 = ( string0.clone(), string0.clone(), string0.clone(), string0.clone() ); let tuple1 = ( string1.clone(), string1.clone(), string1.clone(), string1.clone() ); let delta: <( String, String, String, String ) as Core>::Delta = tuple0.delta(&tuple1)?; println!("delta: {:?}", delta); let tuple2 = tuple0.apply(delta)?; assert_eq!(tuple1, tuple2); Ok(()) } #[test] fn Tuple4__apply__different_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("bar"); let tuple0 = ( string0.clone(), string0.clone(), string0.clone(), string0.clone() ); let tuple1 = ( string1.clone(), string1.clone(), string1.clone(), string1.clone() ); let delta: <( String, String, String, String ) as Core>::Delta = tuple0.delta(&tuple1)?; let tuple2 = tuple0.apply(delta)?; assert_eq!(tuple1, tuple2); Ok(()) } #[test] fn Tuple5__delta__same_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("foo"); let tuple0 = ( string0.clone(), string0.clone(), string0.clone(), string0.clone(), string0.clone() ); let tuple1 = ( string1.clone(), string1.clone(), string1.clone(), string1.clone(), string1.clone() ); let delta: <( String, String, String, String, String ) as Core>::Delta = tuple0.delta(&tuple1)?; let json_string = serde_json::to_string(&delta) .expect("Could not serialize to json"); println!("json_string: \"{}\"", json_string); assert_eq!(json_string, "[null,null,null,null,null]"); let delta1: <( String, String, String, String, String ) as Core>::Delta = serde_json::from_str(&json_string) .expect("Could not deserialize from json"); assert_eq!(delta, delta1); Ok(()) } #[test] fn Tuple5__delta__different_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("bar"); let tuple0 = ( string0.clone(), string0.clone(), string0.clone(), string0.clone(), string0.clone() ); let tuple1 = ( string1.clone(), string1.clone(), string1.clone(), string1.clone(), string1.clone() ); let delta: <( String, String, String, String, String ) as Core>::Delta = tuple0.delta(&tuple1)?; let json_string = serde_json::to_string(&delta) .expect("Could not serialize to json"); println!("json_string: \"{}\"", json_string); assert_eq!(json_string, "[\"bar\",\"bar\",\"bar\",\"bar\",\"bar\"]"); let delta1: <( String, String, String, String, String ) as Core>::Delta = serde_json::from_str(&json_string) .expect("Could not deserialize from json"); assert_eq!(delta, delta1); Ok(()) } #[test] fn Tuple5__apply__same_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("foo"); let tuple0 = ( string0.clone(), string0.clone(), string0.clone(), string0.clone(), string0.clone() ); let tuple1 = ( string1.clone(), string1.clone(), string1.clone(), string1.clone(), string1.clone() ); let delta: <( String, String, String, String, String ) as Core>::Delta = tuple0.delta(&tuple1)?; println!("delta: {:?}", delta); let tuple2 = tuple0.apply(delta)?; assert_eq!(tuple1, tuple2); Ok(()) } #[test] fn Tuple5__apply__different_values() -> DeltaResult<()> { let string0 = String::from("foo"); let string1 = String::from("bar"); let tuple0 = ( string0.clone(), string0.clone(), string0.clone(), string0.clone(), string0.clone() ); let tuple1 = ( string1.clone(), string1.clone(), string1.clone(), string1.clone(), string1.clone() ); let delta: <( String, String, String, String, String ) as Core>::Delta = tuple0.delta(&tuple1)?; let tuple2 = tuple0.apply(delta)?; assert_eq!(tuple1, tuple2); Ok(()) } }
Tuple3__delta__different_values
hello.go
package main import ( "fmt" "time" ) func
(greetingCh chan string) { greetings := []string{"Hello", "Hola", "Bonjour"} for _, greeting := range greetings { greetingCh <- greeting time.Sleep(time.Second) } close(greetingCh) } func printGreetings(greetingCh chan string) { for greeting := range greetingCh { fmt.Println(greeting) } fmt.Println("No more greetings.") } func main() { greetingCh := make(chan string) go generateGreetings(greetingCh) go printGreetings(greetingCh) time.Sleep(time.Second * 5) }
generateGreetings
functions.rs
use neon::prelude::*; use neon::object::This; use neon::result::Throw; fn add1(mut cx: FunctionContext) -> JsResult<JsNumber> { let x = cx.argument::<JsNumber>(0)?.value(); Ok(cx.number(x + 1.0)) } pub fn return_js_function(mut cx: FunctionContext) -> JsResult<JsFunction> { JsFunction::new(&mut cx, add1) } pub fn call_js_function(mut cx: FunctionContext) -> JsResult<JsNumber> { let f = cx.argument::<JsFunction>(0)?; let args: Vec<Handle<JsNumber>> = vec![cx.number(16.0)]; let null = cx.null(); f.call(&mut cx, null, args)?.downcast::<JsNumber>().or_throw(&mut cx)
} pub fn construct_js_function(mut cx: FunctionContext) -> JsResult<JsNumber> { let f = cx.argument::<JsFunction>(0)?; let zero = cx.number(0.0); let o = f.construct(&mut cx, vec![zero])?; let get_utc_full_year_method = o.get(&mut cx, "getUTCFullYear")?.downcast::<JsFunction>().or_throw(&mut cx)?; let args: Vec<Handle<JsValue>> = vec![]; get_utc_full_year_method.call(&mut cx, o.upcast::<JsValue>(), args)?.downcast::<JsNumber>().or_throw(&mut cx) } trait CheckArgument<'a> { fn check_argument<V: Value>(&mut self, i: i32) -> JsResult<'a, V>; } impl<'a, T: This> CheckArgument<'a> for CallContext<'a, T> { fn check_argument<V: Value>(&mut self, i: i32) -> JsResult<'a, V> { self.argument::<V>(i) } } pub fn check_string_and_number(mut cx: FunctionContext) -> JsResult<JsUndefined> { cx.check_argument::<JsString>(0)?; cx.check_argument::<JsNumber>(1)?; Ok(cx.undefined()) } pub fn panic(_: FunctionContext) -> JsResult<JsUndefined> { panic!("zomg") } pub fn panic_after_throw(mut cx: FunctionContext) -> JsResult<JsUndefined> { cx.throw_range_error::<_, ()>("entering throw state with a RangeError").unwrap_err(); panic!("this should override the RangeError") } pub fn num_arguments(mut cx: FunctionContext) -> JsResult<JsNumber> { let n = cx.len(); Ok(cx.number(n)) } pub fn return_this(mut cx: FunctionContext) -> JsResult<JsValue> { Ok(cx.this().upcast()) } pub fn require_object_this(mut cx: FunctionContext) -> JsResult<JsUndefined> { let this = cx.this(); let this = this.downcast::<JsObject>().or_throw(&mut cx)?; let t = cx.boolean(true); this.set(&mut cx, "modified", t)?; Ok(cx.undefined()) } pub fn is_argument_zero_some(mut cx: FunctionContext) -> JsResult<JsBoolean> { let b = cx.argument_opt(0).is_some(); Ok(cx.boolean(b)) } pub fn require_argument_zero_string(mut cx: FunctionContext) -> JsResult<JsString> { let s = cx.argument(0)?; Ok(s) } pub fn execute_scoped(mut cx: FunctionContext) -> JsResult<JsNumber> { let mut i = 0; for _ in 1..100 { cx.execute_scoped(|mut cx| { let n = cx.number(1); i += n.value() as i32; }); } Ok(cx.number(i)) } pub fn compute_scoped(mut cx: FunctionContext) -> JsResult<JsNumber> { let mut i = cx.number(0); for _ in 1..100 { i = cx.compute_scoped(|mut cx| { let n = cx.number(1); Ok(cx.number((i.value() as i32) + (n.value() as i32))) })?; } Ok(i) } pub fn throw_and_catch(mut cx: FunctionContext) -> JsResult<JsValue> { let v = cx.argument_opt(0).unwrap_or_else(|| cx.undefined().upcast()); Ok(cx.try_catch(|cx| { let _ = cx.throw(v)?; Ok(cx.string("unreachable").upcast()) }).unwrap_or_else(|err| err)) } pub fn call_and_catch(mut cx: FunctionContext) -> JsResult<JsValue> { let f: Handle<JsFunction> = cx.argument(0)?; Ok(cx.try_catch(|cx| { let global = cx.global(); let args: Vec<Handle<JsValue>> = vec![]; f.call(cx, global, args) }).unwrap_or_else(|err| err)) } pub fn panic_and_catch(mut cx: FunctionContext) -> JsResult<JsValue> { Ok(cx.try_catch(|_| { panic!("oh no") }) .unwrap_or_else(|err| err)) } pub fn unexpected_throw_and_catch(mut cx: FunctionContext) -> JsResult<JsValue> { Ok(cx.try_catch(|_| { Err(Throw) }) .unwrap_or_else(|err| err)) }
translate_scale.rs
//! A transformation that includes both scale and translation. use std::ops::{Add, AddAssign, Mul, MulAssign, Sub, SubAssign}; use crate::{Affine, Circle, CubicBez, Line, Point, QuadBez, Rect, RoundedRect, Vec2}; /// A transformation including scaling and translation. /// /// If the translation is `(x, y)` and the scale is `s`, then this /// transformation represents this augmented matrix: /// /// ```text /// | s 0 x | /// | 0 s y | /// | 0 0 1 | /// ``` /// /// See [`Affine`](struct.Affine.html) for more details about the /// equivalence with augmented matrices. /// /// Various multiplication ops are defined, and these are all defined /// to be consistent with matrix multiplication. Therefore, /// `TranslateScale * Point` is defined but not the other way around. /// /// Also note that multiplication is not commutative. Thus, /// `TranslateScale::scale(2.0) * TranslateScale::translate(Vec2::new(1.0, 0.0))` /// has a translation of (2, 0), while /// `TranslateScale::translate(Vec2::new(1.0, 0.0)) * TranslateScale::scale(2.0)` /// has a translation of (1, 0). (Both have a scale of 2; also note that /// the first case can be written /// `2.0 * TranslateScale::translate(Vec2::new(1.0, 0.0))` as this case /// has an implicit conversion). /// /// This transformation is less powerful than `Affine`, but can be applied /// to more primitives, especially including [`Rect`](struct.Rect.html). #[derive(Clone, Copy, Debug)] pub struct TranslateScale { translation: Vec2, scale: f64, } impl TranslateScale { /// Create a new transformation from translation and scale. #[inline] pub const fn new(translation: Vec2, scale: f64) -> TranslateScale { TranslateScale { translation, scale } } /// Create a new transformation with scale only. #[inline] pub const fn scale(s: f64) -> TranslateScale { TranslateScale::new(Vec2::ZERO, s) } /// Create a new transformation with translation only. #[inline] pub const fn translate(t: Vec2) -> TranslateScale { TranslateScale::new(t, 1.0) } /// Decompose transformation into translation and scale. pub fn as_tuple(self) -> (Vec2, f64) { (self.translation, self.scale) } /// Compute the inverse transform. /// /// Multiplying a transform with its inverse (either on the /// left or right) results in the identity transform /// (modulo floating point rounding errors). /// /// Produces NaN values when scale is zero. pub fn inverse(self) -> TranslateScale { let scale_recip = self.scale.recip(); TranslateScale { translation: self.translation * -scale_recip, scale: scale_recip, } } } impl Default for TranslateScale { #[inline] fn default() -> TranslateScale { TranslateScale::scale(1.0) } } impl From<TranslateScale> for Affine { fn from(ts: TranslateScale) -> Affine { let TranslateScale { translation, scale } = ts; Affine::new([scale, 0.0, 0.0, scale, translation.x, translation.y]) } } impl Mul<Point> for TranslateScale { type Output = Point; #[inline] fn mul(self, other: Point) -> Point { (self.scale * other.to_vec2()).to_point() + self.translation } } impl Mul for TranslateScale { type Output = TranslateScale; #[inline] fn mul(self, other: TranslateScale) -> TranslateScale { TranslateScale { translation: self.translation + self.scale * other.translation, scale: self.scale * other.scale, } } } impl MulAssign for TranslateScale { #[inline] fn mul_assign(&mut self, other: TranslateScale) { *self = self.mul(other); } } impl Mul<TranslateScale> for f64 { type Output = TranslateScale; #[inline] fn mul(self, other: TranslateScale) -> TranslateScale { TranslateScale { translation: other.translation * self, scale: other.scale * self, } } } impl Add<Vec2> for TranslateScale { type Output = TranslateScale; #[inline] fn add(self, other: Vec2) -> TranslateScale { TranslateScale { translation: self.translation + other, scale: self.scale, } } } impl Add<TranslateScale> for Vec2 { type Output = TranslateScale; #[inline] fn add(self, other: TranslateScale) -> TranslateScale { other + self } } impl AddAssign<Vec2> for TranslateScale { #[inline] fn add_assign(&mut self, other: Vec2) { *self = self.add(other); } } impl Sub<Vec2> for TranslateScale { type Output = TranslateScale; #[inline] fn sub(self, other: Vec2) -> TranslateScale { TranslateScale { translation: self.translation - other, scale: self.scale, } } } impl SubAssign<Vec2> for TranslateScale { #[inline] fn sub_assign(&mut self, other: Vec2) { *self = self.sub(other); } } impl Mul<Circle> for TranslateScale { type Output = Circle; #[inline] fn mul(self, other: Circle) -> Circle { Circle::new(self * other.center, self.scale * other.radius) } } impl Mul<Line> for TranslateScale { type Output = Line; #[inline] fn mul(self, other: Line) -> Line { Line::new(self * other.p0, self * other.p1) } } impl Mul<Rect> for TranslateScale { type Output = Rect; #[inline] fn mul(self, other: Rect) -> Rect { let pt0 = self * Point::new(other.x0, other.y0); let pt1 = self * Point::new(other.x1, other.y1); (pt0, pt1).into() } } impl Mul<RoundedRect> for TranslateScale { type Output = RoundedRect; #[inline] fn mul(self, other: RoundedRect) -> RoundedRect
} impl Mul<QuadBez> for TranslateScale { type Output = QuadBez; #[inline] fn mul(self, other: QuadBez) -> QuadBez { QuadBez::new(self * other.p0, self * other.p1, self * other.p2) } } impl Mul<CubicBez> for TranslateScale { type Output = CubicBez; #[inline] fn mul(self, other: CubicBez) -> CubicBez { CubicBez::new( self * other.p0, self * other.p1, self * other.p2, self * other.p3, ) } } #[cfg(test)] mod tests { use crate::{Affine, Point, TranslateScale, Vec2}; fn assert_near(p0: Point, p1: Point) { assert!((p1 - p0).hypot() < 1e-9, "{:?} != {:?}", p0, p1); } #[test] fn translate_scale() { let p = Point::new(3.0, 4.0); let ts = TranslateScale::new(Vec2::new(5.0, 6.0), 2.0); assert_near(ts * p, Point::new(11.0, 14.0)); } #[test] fn conversions() { let p = Point::new(3.0, 4.0); let s = 2.0; let t = Vec2::new(5.0, 6.0); let ts = TranslateScale::new(t, s); // Test that conversion to affine is consistent. let a: Affine = ts.into(); assert_near(ts * p, a * p); assert_near((s * p.to_vec2()).to_point(), TranslateScale::scale(s) * p); assert_near(p + t, TranslateScale::translate(t) * p); } #[test] fn inverse() { let p = Point::new(3.0, 4.0); let ts = TranslateScale::new(Vec2::new(5.0, 6.0), 2.0); assert_near(p, (ts * ts.inverse()) * p); assert_near(p, (ts.inverse() * ts) * p); } }
{ RoundedRect::from_rect(self * other.rect(), self.scale * other.radius()) }
ANN.py
from sklearn import neural_network import learners class ANNLearner(learners.BaseLearner): def __init__(self, hidden_layer_sizes=(100,), activation="relu", solver='adam', alpha=0.0001, batch_size='auto', learning_rate="constant", learning_rate_init=0.001, power_t=0.5, max_iter=200, shuffle=True, random_state=None, tol=1e-4, verbose=False, warm_start=False, momentum=0.9, nesterovs_momentum=True, early_stopping=False, validation_fraction=0.1, beta_1=0.9, beta_2=0.999, epsilon=1e-8, ): super().__init__(verbose) self._learner = neural_network.MLPClassifier( hidden_layer_sizes=hidden_layer_sizes, activation=activation, solver=solver, alpha=alpha, batch_size=batch_size, learning_rate=learning_rate, learning_rate_init=learning_rate_init, power_t=power_t, max_iter=max_iter, shuffle=shuffle, random_state=random_state, tol=tol, verbose=verbose, warm_start=warm_start, momentum=momentum, nesterovs_momentum=nesterovs_momentum, early_stopping=early_stopping, validation_fraction=validation_fraction, beta_1=beta_1, beta_2=beta_2, epsilon=epsilon ) def
(self): return self._learner
learner
base_test.go
package dao_test import ( "log" "os" "testing" "github.com/inspursoft/wand/src/daemonworker/dao" ) func TestInitDb(t *testing.T) { dao.InitDB() } func TestAddOrUpdateUserAccess(t *testing.T) { err := dao.AddOrUpdateUserAccess("tester1", "123456") if err != nil { log.Printf("Failed to insert data:%+v\n", err) } err = dao.AddOrUpdateUserAccess("tester1", "456789") if err != nil { log.Printf("Failed to insert data:%+v\n", err) } } func
(t *testing.T) { userAccess := dao.GetUserAccess("tester1") log.Printf("User access:%+v\n", userAccess) } func TestCleanUp(t *testing.T) { os.Remove("storage.db") }
TestGetUserAccess
post_characters_character_id_assets_locations_position.rs
/* * EVE Swagger Interface * * An OpenAPI for EVE Online * * OpenAPI spec version: 1.3.8 * * Generated by: https://github.com/swagger-api/swagger-codegen.git */ /// PostCharactersCharacterIdAssetsLocationsPosition : position object #[allow(unused_imports)] use serde_json::Value; #[derive(Debug, Serialize, Deserialize)] pub struct PostCharactersCharacterIdAssetsLocationsPosition { /// x number #[serde(rename = "x")] x: f64, /// y number #[serde(rename = "y")] y: f64, /// z number #[serde(rename = "z")] z: f64 } impl PostCharactersCharacterIdAssetsLocationsPosition { /// position object pub fn new(x: f64, y: f64, z: f64) -> PostCharactersCharacterIdAssetsLocationsPosition { PostCharactersCharacterIdAssetsLocationsPosition { x: x, y: y, z: z } } pub fn set_x(&mut self, x: f64) { self.x = x; } pub fn with_x(mut self, x: f64) -> PostCharactersCharacterIdAssetsLocationsPosition { self.x = x; self } pub fn x(&self) -> &f64 { &self.x } pub fn set_y(&mut self, y: f64) { self.y = y; } pub fn with_y(mut self, y: f64) -> PostCharactersCharacterIdAssetsLocationsPosition { self.y = y; self } pub fn y(&self) -> &f64 { &self.y } pub fn set_z(&mut self, z: f64) { self.z = z; }
self.z = z; self } pub fn z(&self) -> &f64 { &self.z } }
pub fn with_z(mut self, z: f64) -> PostCharactersCharacterIdAssetsLocationsPosition {
file_util.py
# @author: ww import os import pickle import re import yaml def read_yaml(yaml_file): """ Read yaml file :param yaml_file: :return: >>> read_yaml('test_resource/test_read.yml') {'date': datetime.date(2019, 11, 6), 'pkg': {'python': {'version': '3.6.8', 'date': '{{ date }}'}, 'django': {'version': "{% if pkg.python.version|first == '2' %}1.8{% else %}2.2.6{% endif %}"}}} """ with open(yaml_file) as f: data_str = f.read() data = yaml.safe_load(data_str) return data def write_yaml(yaml_file, data): """ Write yaml file :param yaml_file: :param data: :return: >>> import datetime >>> data = {'date': datetime.date(2019, 11, 6), 'pkg': {'python': {'version': '3.6.8', 'date': '{{ date }}'}, 'django': {'version': "{% if pkg.python.version|first == '2' %}1.8{% else %}2.2.6{% endif %}"}}} >>> write_yaml('test_resource/test_write.yaml', data) """ with open(yaml_file, 'w') as f: yaml.dump(data, f) def
(pkl_path): with open(pkl_path, 'rb') as f: return pickle.load(f) def save_pickle(data, pkl_path): os.system('mkdir -p {}'.format(os.path.dirname(pkl_path))) with open(pkl_path, 'wb') as f: pickle.dump(data, f) def makedirs(path, verbose=True, stdout=print): """ A wrap function for os.makedirs :param path: :param verbose: show message :param stdout: :return: >>> _ = os.system('rm -R testmakedirs') >>> makedirs('testmakedirs', True) make dirs: testmakedirs >>> makedirs('testmakedirs', True) """ if not os.path.exists(path): os.makedirs(path) if verbose: stdout(f'make dirs: {path}') def make_parent_dirs(path, verbose=True, stdout=print): """ :param path: :param verbose: :param stdout: :return: """ p, d = os.path.split(path) makedirs(p, verbose, stdout) def match_files(path, pattern): """ Match pattern str and return match files or dirs :param path: :param pattern: :return: >>> path = './' >>> pattern = 'file_*\.py' >>> match_files(path, pattern) [] >>> path = './' >>> pattern = 'file_.*\.py' >>> match_files(path, pattern) ['./file_util.py'] """ file_pattern = re.compile(pattern) # list file and match specified pattern filename_list = os.listdir(path) match_result = list(map(file_pattern.match, filename_list)) match_result = list(filter(lambda x: x[0] is not None, zip(match_result, filename_list))) match_filenames = list(map(lambda x: x[1], match_result)) match_file_paths = list(map(lambda x: os.path.join(path, x), match_filenames)) return match_file_paths def match_paths(root, path_pattern): """ Match pattern str and return match paths :param root: :param path_pattern: :return: >>> root = '' >>> pattern = 'file_.*' >>> match_paths(root, pattern) ['file_util.py'] """ root = os.getcwd() if root == '' else root if path_pattern == '': # end return [] split_list = path_pattern.split('/', maxsplit=1) # a/b/c -> [a b/c] if len(split_list) == 1: split_list.append('') head, tail_pattern = split_list files = os.listdir(root) pattern = re.compile(head) match_result = list(filter(lambda x: pattern.fullmatch(x) is not None, files)) if len(match_result) == 0: # match fail return None # match all child paths all_match_paths = [] for cur_file in match_result: child_match_paths = match_paths(os.path.join(root, cur_file), tail_pattern) if child_match_paths is None: continue elif len(child_match_paths) == 0: # child match fail, so skip all_match_paths.append(cur_file) else: all_match_paths.extend(map(lambda x: os.path.join(cur_file, x), child_match_paths)) return all_match_paths if __name__ == '__main__': import doctest doctest.testmod() # print(read_yaml('test_resource/test_read.yml')) # makedirs('a')
read_pickle
test_cache_insert_error.py
# # Copyright(c) 2019-2021 Intel Corporation # SPDX-License-Identifier: BSD-3-Clause # import pytest from api.cas import casadm from api.cas.cache_config import ( CacheMode, CacheLineSize,
CleaningPolicy, CacheStatus, ) from core.test_run import TestRun from storage_devices.disk import DiskTypeSet, DiskType, DiskTypeLowerThan from test_tools.device_mapper import ErrorDevice, DmTable from test_tools.fio.fio import Fio from test_tools.fio.fio_param import ReadWrite, IoEngine, ErrorFilter, VerifyMethod from test_utils.os_utils import Udev from test_utils.size import Size, Unit @pytest.mark.parametrizex("cache_line_size", CacheLineSize) @pytest.mark.parametrizex("cache_mode", CacheMode) @pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand])) @pytest.mark.require_disk("core", DiskTypeLowerThan("cache")) def test_cache_insert_error(cache_mode, cache_line_size): """ title: Cache insert test with error device description: | Validate CAS ability to handle write errors while it tries to insert cache lines. For lazy writes cache modes (WO, WB) issue only reads. pass_criteria: - No I/O errors returned to the user - Cache write error statistics are counted properly - No cache line gets inserted into cache """ with TestRun.step("Prepare core and cache"): cache, core, core_device = prepare_configuration(cache_mode, cache_line_size) fio_cmd = ( Fio() .create_command() .io_engine(IoEngine.libaio) .size(core.size) .block_size(cache_line_size) .target(core) .direct() ) if cache_mode in [CacheMode.WB, CacheMode.WO]: fio_cmd = fio_cmd.read_write(ReadWrite.randread) else: fio_cmd = fio_cmd.read_write(ReadWrite.randrw).verify_pattern().verify(VerifyMethod.pattern) with TestRun.step("Run fio and verify no errors present"): fio_errors = fio_cmd.run()[0].total_errors() if fio_errors != 0: TestRun.fail(f"Some I/O ended with errors {fio_errors}") with TestRun.step("Check error statistics on cache"): stats = cache.get_statistics() occupancy = cache.get_occupancy().get_value() if occupancy != 0: TestRun.fail(f"Occupancy is not zero, but {occupancy}") cache_writes = stats.block_stats.cache.writes / cache_line_size.value cache_errors = stats.error_stats.cache.total if cache_writes != cache_errors: TestRun.fail( f"Cache errors ({cache_errors}) should equal to number of" f" requests to cache ({cache_writes})" ) if cache_mode not in [CacheMode.WB, CacheMode.WO]: with TestRun.step("Verify core device contents for non-lazy-writes cache modes"): cache.stop() fio_cmd.target(core_device).verify_only().run() @pytest.mark.parametrizex("cache_line_size", CacheLineSize) @pytest.mark.parametrizex("cache_mode", [CacheMode.WB, CacheMode.WO]) @pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand])) @pytest.mark.require_disk("core", DiskTypeLowerThan("cache")) def test_cache_write_lazy_insert_error(cache_mode, cache_line_size): """ title: Cache insert test with error device for writes on lazy writes cache mode description: | Validate CAS ability to handle write errors while it tries to insert cache lines. This test is exclusively for lazy writes cache modes. pass_criteria: - I/O errors returned to user - Cache automatically stops after encountering errors - No cache line gets inserted into cache """ with TestRun.step("Prepare core and cache"): cache, core, _ = prepare_configuration(cache_mode, cache_line_size) with TestRun.step("Run fio and verify errors are present"): fio_errors = ( Fio() .create_command() .io_engine(IoEngine.libaio) .size(core.size) .block_size(cache_line_size) .read_write(ReadWrite.randwrite) .target(core) .continue_on_error(ErrorFilter.io) .direct() .run()[0] .total_errors() ) if fio_errors == 0: TestRun.fail(f"No I/O ended with error") with TestRun.step("Check error statistics and state on cache"): stats = cache.get_statistics() occupancy = cache.get_occupancy().get_value() if occupancy != 0: TestRun.fail(f"Occupancy is not zero, but {occupancy}") cache_writes = stats.block_stats.cache.writes / cache_line_size.value cache_errors = stats.error_stats.cache.total if cache_writes != 1: TestRun.fail(f"There only should be one cache write attempt before cache stop") if cache_writes != cache_errors: TestRun.fail( f"Cache errors ({cache_errors}) should equal to number of requests to" f" cache ({cache_writes})" ) state = cache.get_status() if state != CacheStatus.not_running: TestRun.fail(f"Cache should be in 'Not running' state, and it's {state}") def prepare_configuration(cache_mode, cache_line_size): cache_device = TestRun.disks["cache"] core_device = TestRun.disks["core"] with TestRun.step("Creating cache partition"): cache_device.create_partitions([Size(50, Unit.MebiByte)]) with TestRun.step("Creating cache error device"): error_device = ErrorDevice("error", cache_device.partitions[0]) with TestRun.step("Starting cache to check metadata offset"): cache = casadm.start_cache(error_device, cache_line_size=cache_line_size, force=True) cache_size = cache.size cache.stop() with TestRun.step("Setting errors on non-metadata area"): error_device.change_table( DmTable.error_table( offset=(cache_device.partitions[0].size - cache_size).get_value(Unit.Blocks512), size=cache_size, ).fill_gaps(cache_device.partitions[0]) ) with TestRun.step("Create core partition with size of usable cache space"): core_device.create_partitions([cache_size]) with TestRun.step("Starting and configuring cache"): cache = casadm.start_cache( error_device, cache_mode=cache_mode, cache_line_size=cache_line_size, force=True ) result = cache.set_seq_cutoff_policy(SeqCutOffPolicy.never) if result.exit_code: TestRun.LOGGER.exception("Couldn't set seq cutoff policy") result = cache.set_cleaning_policy(CleaningPolicy.nop) if result.exit_code: TestRun.LOGGER.exception("Couldn't set cleaning policy") with TestRun.step("Stopping udev"): Udev.disable() with TestRun.step("Adding core device"): core = cache.add_core(core_dev=core_device.partitions[0]) return cache, core, core_device.partitions[0]
SeqCutOffPolicy,
sem_utils.py
# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. # -*- coding: utf-8 -*- """ # @Time : 2019/5/27 # @Author : Jiaqi&Zecheng # @File : sem_utils.py # @Software: PyCharm """ import os import json import re as regex import spacy from nltk.stem import WordNetLemmatizer wordnet_lemmatizer = WordNetLemmatizer() nlp = spacy.load('en_core_web_sm', disable=['parser', 'ner']) def partial_match(query, table_name): query = [token.lemma_ for token in nlp(query)] table_name = [nlp(token)[0].lemma_ for token in table_name] if query in table_name: return True return False def is_partial_match(query, table_names): query = nlp(query)[0].lemma_ table_names = [[token.lemma_ for token in nlp(names)] for names in table_names] same_count = 0 result = None for names in table_names: if query in names: same_count += 1 result = names return result if same_count == 1 else False def multi_option(question, q_ind, names, N): for i in range(q_ind + 1, q_ind + N + 1): if i < len(question): re = is_partial_match(question[i][0], names) if re is not False: return re return False def multi_equal(question, q_ind, names, N): for i in range(q_ind + 1, q_ind + N + 1): if i < len(question): if question[i] == names: return i return False def random_choice(question_arg, question_arg_type, names, ground_col_labels, q_ind, N, origin_name): # first try if there are other table for t_ind, t_val in enumerate(question_arg_type): if t_val == ['table']: return names[origin_name.index(question_arg[t_ind])] for i in range(q_ind + 1, q_ind + N + 1): if i < len(question_arg): if len(ground_col_labels) == 0: for n in names: if partial_match(question_arg[i][0], n) is True: return n else: for n_id, n in enumerate(names): if n_id in ground_col_labels and partial_match(question_arg[i][0], n) is True: return n if len(ground_col_labels) > 0: return names[ground_col_labels[0]] else:
def alter_column0(datas): """ Attach column * table :return: model_result_replace """ zero_count = 0 count = 0 result = [] for d in datas: if 'C(0)' in d['model_result']: pattern = regex.compile('C\(.*?\) T\(.*?\)') result_pattern = list(set(pattern.findall(d['model_result']))) ground_col_labels = [] for pa in result_pattern: pa = pa.split(' ') if pa[0] != 'C(0)': index = int(pa[1][2:-1]) ground_col_labels.append(index) ground_col_labels = list(set(ground_col_labels)) question_arg_type = d['question_arg_type'] question_arg = d['question_arg'] table_names = [[token.lemma_ for token in nlp(names)] for names in d['table_names']] origin_table_names = [[wordnet_lemmatizer.lemmatize(x.lower()) for x in names.split(' ')] for names in d['table_names']] count += 1 easy_flag = False for q_ind, q in enumerate(d['question_arg']): q_str = " ".join(" ".join(x) for x in d['question_arg']) if 'how many' in q_str or 'number of' in q_str or 'count of' in q_str: easy_flag = True if easy_flag: # check for the last one is a table word for q_ind, q in enumerate(d['question_arg']): if (q_ind > 0 and q == ['many'] and d['question_arg'][q_ind - 1] == ['how']) or ( q_ind > 0 and q == ['of'] and d['question_arg'][q_ind - 1] == ['number']) or ( q_ind > 0 and q == ['of'] and d['question_arg'][q_ind - 1] == ['count']): re = multi_equal(question_arg_type, q_ind, ['table'], 2) if re is not False: # This step work for the number of [table] example table_result = table_names[origin_table_names.index(question_arg[re])] result.append((d['query'], d['question'], table_result, d)) break else: re = multi_option(question_arg, q_ind, d['table_names'], 2) if re is not False: table_result = re result.append((d['query'], d['question'], table_result, d)) pass else: re = multi_equal(question_arg_type, q_ind, ['table'], len(question_arg_type)) if re is not False: # This step work for the number of [table] example table_result = table_names[origin_table_names.index(question_arg[re])] result.append((d['query'], d['question'], table_result, d)) break pass table_result = random_choice(question_arg=question_arg, question_arg_type=question_arg_type, names=table_names, ground_col_labels=ground_col_labels, q_ind=q_ind, N=2, origin_name=origin_table_names) result.append((d['query'], d['question'], table_result, d)) zero_count += 1 break else: M_OP = False for q_ind, q in enumerate(d['question_arg']): if M_OP is False and q in [['than'], ['least'], ['most'], ['msot'], ['fewest']] or \ question_arg_type[q_ind] == ['M_OP']: M_OP = True re = multi_equal(question_arg_type, q_ind, ['table'], 3) if re is not False: # This step work for the number of [table] example table_result = table_names[origin_table_names.index(question_arg[re])] result.append((d['query'], d['question'], table_result, d)) break else: re = multi_option(question_arg, q_ind, d['table_names'], 3) if re is not False: table_result = re # print(table_result) result.append((d['query'], d['question'], table_result, d)) pass else: # zero_count += 1 re = multi_equal(question_arg_type, q_ind, ['table'], len(question_arg_type)) if re is not False: # This step work for the number of [table] example table_result = table_names[origin_table_names.index(question_arg[re])] result.append((d['query'], d['question'], table_result, d)) break table_result = random_choice(question_arg=question_arg, question_arg_type=question_arg_type, names=table_names, ground_col_labels=ground_col_labels, q_ind=q_ind, N=2, origin_name=origin_table_names) result.append((d['query'], d['question'], table_result, d)) pass if M_OP is False: table_result = random_choice(question_arg=question_arg, question_arg_type=question_arg_type, names=table_names, ground_col_labels=ground_col_labels, q_ind=q_ind, N=2, origin_name=origin_table_names) result.append((d['query'], d['question'], table_result, d)) for re in result: table_names = [[token.lemma_ for token in nlp(names)] for names in re[3]['table_names']] origin_table_names = [[x for x in names.split(' ')] for names in re[3]['table_names']] if re[2] in table_names: re[3]['rule_count'] = table_names.index(re[2]) else: re[3]['rule_count'] = origin_table_names.index(re[2]) for data in datas: if 'rule_count' in data: str_replace = 'C(0) T(' + str(data['rule_count']) + ')' replace_result = regex.sub('C\(0\) T\(.\)', str_replace, data['model_result']) data['model_result_replace'] = replace_result else: data['model_result_replace'] = data['model_result']
return names[0]
sqlstate.rs
use linked_hash_map::LinkedHashMap; use phf_codegen; use std::fs::File; use std::io::{BufWriter, Write}; const ERRCODES_TXT: &str = include_str!("errcodes.txt"); pub fn build() { let mut file = BufWriter::new(File::create("../tokio-postgres/src/error/sqlstate.rs").unwrap()); let codes = parse_codes(); make_type(&mut file); make_consts(&codes, &mut file); make_map(&codes, &mut file); } fn parse_codes() -> LinkedHashMap<String, Vec<String>> { let mut codes = LinkedHashMap::new(); for line in ERRCODES_TXT.lines() { if line.starts_with('#') || line.starts_with("Section") || line.trim().is_empty() {
let mut it = line.split_whitespace(); let code = it.next().unwrap().to_owned(); it.next(); let name = it.next().unwrap().replace("ERRCODE_", ""); codes.entry(code).or_insert_with(Vec::new).push(name); } codes } fn make_type(file: &mut BufWriter<File>) { write!( file, "// Autogenerated file - DO NOT EDIT use std::borrow::Cow; /// A SQLSTATE error code #[derive(PartialEq, Eq, Clone, Debug)] pub struct SqlState(Cow<'static, str>); impl SqlState {{ /// Creates a `SqlState` from its error code. pub fn from_code(s: &str) -> SqlState {{ match SQLSTATE_MAP.get(s) {{ Some(state) => state.clone(), None => SqlState(Cow::Owned(s.to_string())), }} }} /// Returns the error code corresponding to the `SqlState`. pub fn code(&self) -> &str {{ &self.0 }} " ) .unwrap(); } fn make_consts(codes: &LinkedHashMap<String, Vec<String>>, file: &mut BufWriter<File>) { for (code, names) in codes { for name in names { write!( file, r#" /// {code} pub const {name}: SqlState = SqlState(Cow::Borrowed("{code}")); "#, name = name, code = code, ) .unwrap(); } } write!(file, "}}").unwrap(); } fn make_map(codes: &LinkedHashMap<String, Vec<String>>, file: &mut BufWriter<File>) { let mut builder = phf_codegen::Map::new(); for (code, names) in codes { builder.entry(&**code, &format!("SqlState::{}", &names[0])); } write!( file, " #[rustfmt::skip] static SQLSTATE_MAP: phf::Map<&'static str, SqlState> = \n{};\n", builder.build() ).unwrap(); }
continue; }
generate_cert.go
// adopted from https://golang.org/src/crypto/tls/generate_cert.go?m=text // Generate a self-signed X.509 certificate for a TLS server. package testcert import ( "crypto/ecdsa" "crypto/elliptic" "crypto/rand" "crypto/rsa" "crypto/x509" "crypto/x509/pkix" "encoding/pem" "errors" "fmt" "log" "math/big" "net" "os" "strings" "time" ) /* var ( host = flag.String("host", "", "Comma-separated hostnames and IPs to generate a certificate for") validFrom = flag.String("start-date", "", "Creation date formatted as Jan 1 15:04:05 2011") validFor = flag.Duration("duration", 365*24*time.Hour, "Duration that certificate is valid for") isCA = flag.Bool("ca", false, "whether this cert should be its own Certificate Authority") rsaBits = flag.Int("rsa-bits", 2048, "Size of RSA key to generate. Ignored if --ecdsa-curve is set") ecdsaCurve = flag.String("ecdsa-curve", "", "ECDSA curve to use to generate a key. Valid values are P224, P256, P384, P521") ) */ func publicKey(priv interface{}) interface{} { switch k := priv.(type) { case *rsa.PrivateKey: return &k.PublicKey case *ecdsa.PrivateKey: return &k.PublicKey default: return nil } }
return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(k)}, nil case *ecdsa.PrivateKey: b, err := x509.MarshalECPrivateKey(k) if err != nil { err = fmt.Errorf("Unable to marshal ECDSA private key: %v", err) } return &pem.Block{Type: "EC PRIVATE KEY", Bytes: b}, err default: return nil, errors.New("not a private key") } } // validFrom - Creation date formatted as Jan 1 15:04:05 2011 or "" func GenerateCert(host string, validFrom string, validFor time.Duration, isCA bool, rsaBits int, ecdsaCurve string, dirPrefix string) (err error) { if len(host) == 0 { log.Fatalf("Missing required --host parameter") } var priv interface{} switch ecdsaCurve { case "": priv, err = rsa.GenerateKey(rand.Reader, rsaBits) case "P224": priv, err = ecdsa.GenerateKey(elliptic.P224(), rand.Reader) case "P256": priv, err = ecdsa.GenerateKey(elliptic.P256(), rand.Reader) case "P384": priv, err = ecdsa.GenerateKey(elliptic.P384(), rand.Reader) case "P521": priv, err = ecdsa.GenerateKey(elliptic.P521(), rand.Reader) default: err = fmt.Errorf("Unrecognized elliptic curve: %q", ecdsaCurve) } if err != nil { log.Fatalf("failed to generate private key: %s", err) return } var notBefore time.Time if len(validFrom) == 0 { notBefore = time.Now() } else { notBefore, err = time.Parse("Jan 2 15:04:05 2006", validFrom) if err != nil { err = fmt.Errorf("Failed to parse creation date: %s\n", err) return } } notAfter := notBefore.Add(validFor) serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128) serialNumber, err := rand.Int(rand.Reader, serialNumberLimit) if err != nil { log.Fatalf("failed to generate serial number: %s", err) } template := x509.Certificate{ SerialNumber: serialNumber, Subject: pkix.Name{ Organization: []string{"Acme Co"}, }, NotBefore: notBefore, NotAfter: notAfter, KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature, ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth}, BasicConstraintsValid: true, } hosts := strings.Split(host, ",") for _, h := range hosts { if ip := net.ParseIP(h); ip != nil { template.IPAddresses = append(template.IPAddresses, ip) } else { template.DNSNames = append(template.DNSNames, h) } } if isCA { template.IsCA = true template.KeyUsage |= x509.KeyUsageCertSign } derBytes, err := x509.CreateCertificate(rand.Reader, &template, &template, publicKey(priv), priv) if err != nil { log.Fatalf("Failed to create certificate: %s", err) } certOut, err := os.Create(dirPrefix + host + ".cert.pem") if err != nil { log.Fatalf("failed to open cert.pem for writing: %s", err) } err = pem.Encode(certOut, &pem.Block{Type: "CERTIFICATE", Bytes: derBytes}) if err != nil { return } if err = certOut.Sync(); err != nil { return } if err = certOut.Close(); err != nil { return } keyOut, err := os.OpenFile(dirPrefix+host+".key.pem", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600) if err != nil { log.Print("failed to open key.pem for writing:", err) return } var block *pem.Block if block, err = pemBlockForKey(priv); err != nil { return err } if err = pem.Encode(keyOut, block); err != nil { return err } if err = keyOut.Sync(); err != nil { return err } if err = keyOut.Close(); err != nil { return err } return }
func pemBlockForKey(priv interface{}) (*pem.Block, error) { switch k := priv.(type) { case *rsa.PrivateKey:
app.module.ts
import { BrowserModule } from '@angular/platform-browser'; import { NgModule } from '@angular/core'; import { AppRoutingModule } from './app-routing.module'; import { AppComponent } from './app.component';
@NgModule({ declarations: [ AppComponent, ApiErrorMessagePipe ], imports: [ BrowserModule, AppRoutingModule, ReactiveFormsModule, HttpClientModule ], providers: [], bootstrap: [AppComponent] }) export class AppModule { }
import { ReactiveFormsModule } from '@angular/forms'; import { HttpClientModule } from '@angular/common/http'; import { ApiErrorMessagePipe } from './pipes/api-error-message.pipe';
test_tags.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.template import Context, Template, TemplateSyntaxError from django.utils.translation import override import pytest from djmoney.models.fields import MoneyPatched from djmoney.templatetags.djmoney import MoneyLocalizeNode from moneyed import Money def render(template, context): return Template(template).render(Context(context)) class TestMoneyLocalizeNode: def test_repr(self): assert repr(MoneyLocalizeNode(Money(5, 'EUR'))) == '<MoneyLocalizeNode 5 EUR>' def test_invalid_instance(self): with pytest.raises(Exception) as exc: MoneyLocalizeNode(Money(5, 'EUR'), amount=15) assert str(exc.value) == 'You can define either "money" or the "amount" and "currency".' @pytest.mark.parametrize('template, context, error_text', ( ( '{% load djmoney %}{% money_localize "2.5" "PLN" as NEW_M and blabla %}{{NEW_M}}', {}, 'Wrong number of input data to the tag.' ), ( '{% load djmoney %}{% money_localize money %}{{NEW_M}}', {'money': 'Something else'}, 'The variable "money" must be an instance of Money.' ), ( '{% load djmoney %}{% money_localize amount currency %}', {'amount': None, 'currency': 'PLN'}, 'You must define both variables: amount and currency.' ) )) def test_invalid_input(template, context, error_text): with pytest.raises(TemplateSyntaxError) as exc: render(template, context) assert str(exc.value) == error_text
with override('pl'): assert render(string, context) == result @pytest.mark.parametrize( 'string, result, context', ( ( '{% load djmoney %}{% money_localize "2.5" "PLN" as NEW_M %}{{NEW_M}}', '2,50 zł', {} ), ( '{% load djmoney %}{% money_localize "2.5" "PLN" %}', '2,50 zł', {} ), ( '{% load djmoney %}{% money_localize amount currency %}', '2,60 zł', {'amount': 2.6, 'currency': 'PLN'} ), ( '{% load djmoney %}{% money_localize money as NEW_M %}{{NEW_M}}', '2,30 zł', {'money': Money(2.3, 'PLN')} ), ( '{% load djmoney %}{% money_localize money off as NEW_M %}{{NEW_M}}', '2.30 zł', {'money': Money(2.3, 'PLN')} ), ( '{% load djmoney %}{% money_localize money off as NEW_M %}{{NEW_M}}', '0.00 zł', {'money': Money(0, 'PLN')} ), ( # with a tag template "money_localize" '{% load djmoney %}{% money_localize money %}', '2,30 zł', {'money': Money(2.3, 'PLN')} ), ( # without a tag template "money_localize" '{{ money }}', '2,30 zł', {'money': MoneyPatched(2.3, 'PLN')} ), ( '{% load djmoney %}{% money_localize money off %}', '2.30 zł', {'money': Money(2.3, 'PLN')} ), ( '{% load djmoney %}{% money_localize money on %}', '2,30 zł', {'money': Money(2.3, 'PLN')} ) ) ) def test_tag(string, result, context): assert_template(string, result, context) @pytest.mark.parametrize( 'string, result, context', ( ( # money_localize has a default setting USE_L10N = True '{% load djmoney %}{% money_localize money %}', '2,30 zł', {'money': Money(2.3, 'PLN')} ), ( # without a tag template "money_localize" '{{ money }}', '2.30 zł', {'money': Money(2.3, 'PLN')} ), ( '{% load djmoney %}{% money_localize money on %}', '2,30 zł', {'money': Money(2.3, 'PLN')} ), ) ) def test_l10n_off(settings, string, result, context): settings.USE_L10N = False assert_template(string, result, context) def test_forced_l10n(): mp = MoneyPatched(2.3, 'PLN') mp.use_l10n = True assert_template('{{ money }}', '2,30 zł', {'money': mp})
def assert_template(string, result, context=None): context = context or {}
Text.js
// @flow import type { BoxProps } from './Box'; import type { Color, Theme } from '../themes/types'; import Box from './Box'; import React from 'react'; import colorLib from 'color'; import isReactNative from '../../common/app/isReactNative'; // Universal styled Text component. The same API for browsers and React Native. // Some props are ommited or limited or set to match React Native behaviour. // Use style prop for platform specific styling. export type TextProps = BoxProps & { fontFamily?: string, size?: number, align?: 'left' | 'right' | 'center' | 'justify', bold?: boolean, color?: Color, decoration?: 'none' | 'underline' | 'line-through', italic?: boolean, lineHeight?: number, // TODO: shadowColor, shadowOffset, shadowRadius. // Custom fixWebFontSmoothing?: boolean, }; type TextContext = { Text: () => React.Element<*>, theme: Theme, }; // inlehmansterms.net/2014/06/09/groove-to-a-vertical-rhythm const fontSizeWithComputedLineHeight = (typography, size) => { const fontSize = typography.fontSize(size); const lines = Math.ceil(fontSize / typography.lineHeight); const lineHeight = lines * typography.lineHeight; return { fontSize, lineHeight }; }; export const computeTextStyle = (theme: Theme, { fontFamily = theme.text.fontFamily, size = 0, align, bold, color = 'black', decoration, italic, lineHeight, ...props }: TextProps) => { let style = { ...fontSizeWithComputedLineHeight(theme.typography, size), color: theme.colors[color], fontFamily, }; if (align) { style = { ...style, textAlign: align }; } if (bold) { const bold = theme.text.bold; style = { ...style, fontWeight: bold }; } if (decoration) { style = { ...style, textDecoration: decoration }; } if (italic) { style = { ...style, fontStyle: 'italic' }; } if (lineHeight) { style = { ...style, lineHeight }; } return [style, props]; }; // usabilitypost.com/2012/11/05/stop-fixing-font-smoothing // tldr; Fix font smoothing only for light text on dark background. const maybeFixFontSmoothing = (color, backgroundColor) => { const hasColorAndBackgroundColor = color && color !== 'transparent' && backgroundColor && backgroundColor !== 'transparent'; // console.log(hasColorAndBackgroundColor); if (!hasColorAndBackgroundColor) return null; const colorIsLighterThanBackgroundColor = colorLib(color).luminosity() > colorLib(backgroundColor).luminosity(); if (!colorIsLighterThanBackgroundColor) return null; return { MozOsxFontSmoothing: 'grayscale', WebkitFontSmoothing: 'antialiased', }; }; const computePlatformTextStyle = (boxStyle, textStyle, fixWebFontSmoothing) => { if (isReactNative) { if (textStyle.fontWeight) { textStyle = { ...textStyle, fontWeight: String(textStyle.fontWeight) };
} if (textStyle.textDecoration) { textStyle = { ...textStyle, textDecorationLine: textStyle.textDecoration }; delete textStyle.textDecoration; } } else { textStyle = { ...textStyle, ...(fixWebFontSmoothing ? maybeFixFontSmoothing(textStyle.color, boxStyle.backgroundColor) : null), lineHeight: `${textStyle.lineHeight}px`, // browsers need px }; } return textStyle; }; const Text = ({ as, style, fixWebFontSmoothing = true, ...props }: TextProps, { Text: PlatformText, theme, }: TextContext) => { const [textStyle, restProps] = computeTextStyle(theme, props); return ( <Box as={as || PlatformText} {...restProps} style={(theme, boxStyle) => computePlatformTextStyle(boxStyle, { ...textStyle, ...(style && style(theme, { ...boxStyle, ...textStyle })), }, fixWebFontSmoothing)} /> ); }; Text.contextTypes = { Text: React.PropTypes.func, theme: React.PropTypes.object, }; export default Text;
utils.py
import bz2 from six.moves.cPickle import load from string import punctuation def offsets_to_token(left, right, offset_array, lemmas, punc=set(punctuation)): token_start, token_end = None, None for i, c in enumerate(offset_array): if left >= c: token_start = i if c > right and token_end is None: token_end = i break token_end = len(offset_array) - 1 if token_end is None else token_end token_end = token_end - 1 if lemmas[token_end - 1] in punc else token_end return range(token_start, token_end) class CDRTagger(object): def __init__(self, fname='data/unary_tags.pkl.bz2'): with bz2.BZ2File(fname, 'rb') as f: self.tag_dict = load(f) def tag(self, parts): pubmed_id, _, _, sent_start, sent_end = parts['stable_id'].split(':') sent_start, sent_end = int(sent_start), int(sent_end) tags = self.tag_dict.get(pubmed_id, {}) for tag in tags: if not (sent_start <= tag[1] <= sent_end): continue
parts['entity_types'][tok] = ts[0] parts['entity_cids'][tok] = ts[1] return parts class TaggerOneTagger(CDRTagger): def __init__(self, fname_tags='data/taggerone_unary_tags_cdr.pkl.bz2', fname_mesh='data/chem_dis_mesh_dicts.pkl.bz2'): with bz2.BZ2File(fname_tags, 'rb') as f: self.tag_dict = load(f) with bz2.BZ2File(fname_mesh, 'rb') as f: self.chem_mesh_dict, self.dis_mesh_dict = load(f) def tag(self, parts): parts = super(TaggerOneTagger, self).tag(parts) for i, word in enumerate(parts['words']): tag = parts['entity_types'][i] if len(word) > 4 and tag is None: wl = word.lower() if wl in self.dis_mesh_dict: parts['entity_types'][i] = 'Disease' parts['entity_cids'][i] = self.dis_mesh_dict[wl] elif wl in self.chem_mesh_dict: parts['entity_types'][i] = 'Chemical' parts['entity_cids'][i] = self.chem_mesh_dict[wl] return parts
offsets = [offset + sent_start for offset in parts['char_offsets']] toks = offsets_to_token(tag[1], tag[2], offsets, parts['lemmas']) for tok in toks: ts = tag[0].split('|')
memory_integration_test.go
// +build integration package bot_test // memory_integration_test.go - tests that stress the robot's memory functions. import ( "testing" . "github.com/lnxjedi/gopherbot/bot" testc "github.com/lnxjedi/gopherbot/connectors/test" ) func TestMemory(t *testing.T)
{ done, conn := setup("test/membrain", "/tmp/bottest.log", t) /* Note on ordering: Be careful with the plugins you're testing, and be sure that the robot completes all actions before replying. Consider for instance: Say "I'll remember \"$1\" is \"$2\" - but eventually I'll forget!" Remember "$1" "$2" This order of events means the test may well complete (because it got the reply) before actually remembering the fact. The next test, recalling the fact, could then fail because it tries to recall the fact before it's actually been stored in the previous test. I know this because it took me a couple of hours to figure out why my test was failing. */ tests := []testItem{ {carolID, random, ";remember slowly The Alamo", []testc.TestMessage{{null, random, "Ok, .*"}}, []Event{CommandTaskRan, ExternalTaskRan}, 0}, {aliceID, random, ";remember Ferris Bueller", []testc.TestMessage{{null, random, "Ok, .*"}, {null, random, "committed to memory"}}, []Event{CommandTaskRan, ExternalTaskRan}, 0}, {bobID, random, "recall 1, Bender", []testc.TestMessage{{null, random, "Ferris Bueller"}}, []Event{CommandTaskRan, ExternalTaskRan}, 0}, {carolID, random, ";remember Ferris Bueller", []testc.TestMessage{{null, random, "That's already one of my fondest memories"}}, []Event{CommandTaskRan, ExternalTaskRan}, 0}, {davidID, random, "forget 1, Bender", []testc.TestMessage{{null, random, "Ok, .*"}}, []Event{CommandTaskRan, ExternalTaskRan}, 0}, // Short-term memories are contextual to a user in a channel {davidID, general, "Bender, what is Ferris Bueller?", []testc.TestMessage{{david, general, "Gosh, I have no idea .*"}}, []Event{CommandTaskRan, ExternalTaskRan}, 0}, {davidID, general, ";store Ferris Bueller is a Righteous Dude", []testc.TestMessage{{null, general, "I'll remember .*"}}, []Event{CommandTaskRan, ExternalTaskRan}, 0}, {davidID, general, "Bender, what is Ferris Bueller?", []testc.TestMessage{{null, general, "Ferris Bueller is a Righteous Dude"}}, []Event{CommandTaskRan, ExternalTaskRan}, 0}, {carolID, general, "Bender, what is Ferris Bueller?", []testc.TestMessage{{carol, general, "Gosh, I have no idea .*"}}, []Event{CommandTaskRan, ExternalTaskRan}, 0}, {davidID, random, "Bender, what is Ferris Bueller?", []testc.TestMessage{{david, random, "Gosh, I have no idea .*"}}, []Event{CommandTaskRan, ExternalTaskRan}, 0}, {bobID, general, "Bender, link news for nerds to https://slashdot.org", []testc.TestMessage{{null, general, "Link added"}}, []Event{CommandTaskRan, GoPluginRan}, 0}, {bobID, general, ";save https://slashdot.org", []testc.TestMessage{{null, general, "I already have that link"}, {bob, general, "Do you want .*"}}, []Event{CommandTaskRan, GoPluginRan}, 0}, {bobID, general, "yes", []testc.TestMessage{{null, general, "Ok, I'll replace the old one"}, {bob, general, "What keywords or phrase .*"}}, []Event{}, 0}, {bobID, general, "News for Nerds, Stuff that Matters!", []testc.TestMessage{{null, general, "Link added"}}, []Event{}, 0}, {carolID, general, "Bender, look up nerds", []testc.TestMessage{{null, general, `(?s:Here's what I have .*Nerds.*)`}}, []Event{CommandTaskRan, GoPluginRan}, 0}, {aliceID, general, ";link tuna casserole to https://www.allrecipes.com/recipe/17219/best-tuna-casserole/", []testc.TestMessage{{null, general, `Link added`}}, []Event{CommandTaskRan, GoPluginRan}, 0}, {aliceID, general, ";add it to the dinner meals list", []testc.TestMessage{{alice, general, `I don't have a .*`}}, []Event{CommandTaskRan, GoPluginRan}, 0}, {aliceID, general, "yes", []testc.TestMessage{{null, general, `Ok, .*`}}, []Event{}, 0}, {aliceID, general, "Bender, look it up", []testc.TestMessage{{null, general, `(?s:Here's what I have .*best.*)`}}, []Event{CommandTaskRan, GoPluginRan}, 0}, {aliceID, general, "add hamburgers to the list, bender", []testc.TestMessage{{null, general, `Ok, I added hamburgers to the dinner meals list`}}, []Event{CommandTaskRan, GoPluginRan}, 0}, } testcases(t, conn, tests) teardown(t, done, conn) }
index.py
''' script reduz o valor do um produto com base no desconto(%) '''
novo_preço = produto - ((produto / 100) * desconto) print(f'o produto custa {produto:.2f}') print(f'o valor do desconto é {desconto:.2f}') print(f'o produto custará {novo_preço:.2f}')
produto = float(input('preço do produto: ')) desconto = float(input('porcentagem de desconto: '))
_loadbalancer.py
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- from distutils.version import StrictVersion # pylint: disable=no-name-in-module,import-error from knack.log import get_logger from .vendored_sdks.azure_mgmt_preview_aks.v2020_12_01.models import ManagedClusterLoadBalancerProfile from .vendored_sdks.azure_mgmt_preview_aks.v2020_12_01.models import ManagedClusterLoadBalancerProfileManagedOutboundIPs from .vendored_sdks.azure_mgmt_preview_aks.v2020_12_01.models import ManagedClusterLoadBalancerProfileOutboundIPPrefixes from .vendored_sdks.azure_mgmt_preview_aks.v2020_12_01.models import ManagedClusterLoadBalancerProfileOutboundIPs from .vendored_sdks.azure_mgmt_preview_aks.v2020_12_01.models import ResourceReference logger = get_logger(__name__) def set_load_balancer_sku(sku, kubernetes_version): if sku: return sku if kubernetes_version and StrictVersion(kubernetes_version) < StrictVersion("1.13.0"): logger.warning('Setting load_balancer_sku to basic as it is not specified and kubernetes' 'version(%s) less than 1.13.0 only supports basic load balancer SKU\n', kubernetes_version) return "basic" return "standard" def update_load_balancer_profile(managed_outbound_ip_count, outbound_ips, outbound_ip_prefixes, outbound_ports, idle_timeout, profile): """parse and update an existing load balancer profile""" if not is_load_balancer_profile_provided(managed_outbound_ip_count, outbound_ips, outbound_ip_prefixes, outbound_ports, idle_timeout): return profile return configure_load_balancer_profile(managed_outbound_ip_count, outbound_ips, outbound_ip_prefixes, outbound_ports, idle_timeout, profile) def create_load_balancer_profile(managed_outbound_ip_count, outbound_ips, outbound_ip_prefixes, outbound_ports, idle_timeout): """parse and build load balancer profile""" if not is_load_balancer_profile_provided(managed_outbound_ip_count, outbound_ips, outbound_ip_prefixes, outbound_ports, idle_timeout): return None profile = ManagedClusterLoadBalancerProfile() return configure_load_balancer_profile(managed_outbound_ip_count, outbound_ips, outbound_ip_prefixes, outbound_ports, idle_timeout, profile) def configure_load_balancer_profile(managed_outbound_ip_count, outbound_ips, outbound_ip_prefixes, outbound_ports, idle_timeout, profile): """configure a load balancer with customer supplied values""" if not profile: return profile outbound_ip_resources = _get_load_balancer_outbound_ips(outbound_ips) outbound_ip_prefix_resources = _get_load_balancer_outbound_ip_prefixes(outbound_ip_prefixes) if managed_outbound_ip_count or outbound_ip_resources or outbound_ip_prefix_resources: profile.managed_outbound_ips = None profile.outbound_ips = None profile.outbound_ip_prefixes = None if managed_outbound_ip_count: profile.managed_outbound_ips = ManagedClusterLoadBalancerProfileManagedOutboundIPs( count=managed_outbound_ip_count ) if outbound_ip_resources: profile.outbound_ips = ManagedClusterLoadBalancerProfileOutboundIPs( public_ips=outbound_ip_resources ) if outbound_ip_prefix_resources: profile.outbound_ip_prefixes = ManagedClusterLoadBalancerProfileOutboundIPPrefixes( public_ip_prefixes=outbound_ip_prefix_resources ) if outbound_ports: profile.allocated_outbound_ports = outbound_ports if idle_timeout: profile.idle_timeout_in_minutes = idle_timeout return profile def is_load_balancer_profile_provided(managed_outbound_ip_count, outbound_ips, ip_prefixes, outbound_ports, idle_timeout): return any([managed_outbound_ip_count, outbound_ips, ip_prefixes, outbound_ports, idle_timeout]) def
(load_balancer_outbound_ips): """parse load balancer profile outbound IP ids and return an array of references to the outbound IP resources""" load_balancer_outbound_ip_resources = None if load_balancer_outbound_ips: load_balancer_outbound_ip_resources = \ [ResourceReference(id=x.strip()) for x in load_balancer_outbound_ips.split(',')] return load_balancer_outbound_ip_resources def _get_load_balancer_outbound_ip_prefixes(load_balancer_outbound_ip_prefixes): """parse load balancer profile outbound IP prefix ids and return an array \ of references to the outbound IP prefix resources""" load_balancer_outbound_ip_prefix_resources = None if load_balancer_outbound_ip_prefixes: load_balancer_outbound_ip_prefix_resources = \ [ResourceReference(id=x.strip()) for x in load_balancer_outbound_ip_prefixes.split(',')] return load_balancer_outbound_ip_prefix_resources
_get_load_balancer_outbound_ips
jira.py
"""Generic functions related to Jira.""" from typing import Any, Dict import httpx async def post_jira_issue(url: str, jira_user: str, jira_token: str, payload: Dict[str, Any]) -> Dict[str, Any]: """Post payload to create jira issue. Args: url (str): url jira_user (str): jira username jira_token (str): jira token payload (Dict[str, Any]): jira payload describing ticket info Returns: Dict[str, Any]: response of operation """ headers = {'Content-Type': 'application/json'} auth = (jira_user, jira_token) async with httpx.AsyncClient() as client: return await client.post(url=url, json=payload, auth=auth, headers=headers) async def get_jira_issue(url: str, jira_user: str, jira_token: str) -> Dict[str, Any]: """Get Jira issue using jira_link built with the expected jira_id, an example: https://nrccua.atlassian.net/rest/api/2/issue/<jira_id>. Args: url (str): url jira_user (str): jira username jira_token (str): jira token Returns: Dict[str, Any]: response of operation """ headers = {'Content-Type': 'application/json'} auth = (jira_user, jira_token) async with httpx.AsyncClient() as client: return await client.get(url=url, auth=auth, headers=headers) async def add_comment_to_jira(url: str, jira_user: str, jira_token: str, comment: str) -> Dict[str, Any]:
"""Add Jira comment to an existing issue. Args: url (str): url jira_user (str): jira username jira_token (str): jira token comment (str): comment to add to jira ticket Raises: ValueError: problem with url Returns: Dict[str, Any]: response of operation """ if not url.endswith('comment'): msg = 'Check url value! Good example is https://nrccua.atlassian.net/rest/api/2/issue/<jira_id>/comment' raise ValueError(msg) return await post_jira_issue( url=url, payload={'body': comment}, jira_user=jira_user, jira_token=jira_token)
rpm_test.go
package rpm import ( "os" "sort" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/aquasecurity/fanal/types" ) func TestParseRpmInfo(t *testing.T)
func Test_splitFileName(t *testing.T) { tests := []struct { name string filename string wantName string wantVer string wantRel string wantErr bool }{ { name: "valid name", filename: "glibc-2.17-307.el7.1.src.rpm", wantName: "glibc", wantVer: "2.17", wantRel: "307.el7.1", wantErr: false, }, { name: "invalid name", filename: "elasticsearch-5.6.16-1-src.rpm", wantName: "", wantVer: "", wantRel: "", wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { gotName, gotVer, gotRel, err := splitFileName(tt.filename) if tt.wantErr { assert.Error(t, err) } else { assert.NoError(t, err) } assert.Equal(t, tt.wantName, gotName) assert.Equal(t, tt.wantVer, gotVer) assert.Equal(t, tt.wantRel, gotRel) }) } }
{ var tests = map[string]struct { path string pkgs []types.Package }{ "Valid": { path: "./testdata/valid", // cp ./testdata/valid /path/to/testdir/Packages // rpm --dbpath /path/to/testdir -qa --qf "{Name: \"%{NAME}\", Epoch: %{EPOCHNUM}, Version: \"%{VERSION}\", Release: \"%{RELEASE}\", Arch: \"%{ARCH}\"\},\n" pkgs: []types.Package{ {Name: "centos-release", Epoch: 0, Version: "7", Release: "1.1503.el7.centos.2.8", Arch: "x86_64", SrcName: "centos-release", SrcEpoch: 0, SrcVersion: "7", SrcRelease: "1.1503.el7.centos.2.8", License: "GPLv2"}, {Name: "filesystem", Epoch: 0, Version: "3.2", Release: "18.el7", Arch: "x86_64", SrcName: "filesystem", SrcEpoch: 0, SrcVersion: "3.2", SrcRelease: "18.el7", License: "Public Domain"}, }, }, "ValidBig": { path: "./testdata/valid_big", // $ cat rpmqa.py // import rpm // from rpmUtils.miscutils import splitFilename // // // rpm.addMacro('_dbpath', '/tmp/') // ts = rpm.TransactionSet() // mi = ts.dbMatch() // for h in mi: // sname = sversion = srelease = "" // if h[rpm.RPMTAG_SOURCERPM] != "(none)": // sname, sversion, srelease, _, _ = splitFilename(h[rpm.RPMTAG_SOURCERPM]) // print "{Name: \"%s\", Epoch: %d, Version: \"%s\", Release: \"%s\", Arch: \"%s\", SrcName: \"%s\", SrcEpoch: %d, SrcVersion: \"%s\", SrcRelease: \"%s\"}," % ( // h[rpm.RPMTAG_NAME], h[rpm.RPMTAG_EPOCHNUM], h[rpm.RPMTAG_VERSION], h[rpm.RPMTAG_RELEASE], h[rpm.RPMTAG_ARCH], // sname, h[rpm.RPMTAG_EPOCHNUM], sversion, srelease) pkgs: []types.Package{ {Name: "publicsuffix-list-dafsa", Epoch: 0, Version: "20180514", Release: "1.fc28", Arch: "noarch", SrcName: "publicsuffix-list", SrcEpoch: 0, SrcVersion: "20180514", SrcRelease: "1.fc28", Modularitylabel: "", License: "MPLv2.0"}, {Name: "libreport-filesystem", Epoch: 0, Version: "2.9.5", Release: "1.fc28", Arch: "x86_64", SrcName: "libreport", SrcEpoch: 0, SrcVersion: "2.9.5", SrcRelease: "1.fc28", Modularitylabel: "", License: "GPLv2+"}, {Name: "fedora-gpg-keys", Epoch: 0, Version: "28", Release: "5", Arch: "noarch", SrcName: "fedora-repos", SrcEpoch: 0, SrcVersion: "28", SrcRelease: "5", Modularitylabel: "", License: "MIT"}, {Name: "fedora-release", Epoch: 0, Version: "28", Release: "2", Arch: "noarch", SrcName: "fedora-release", SrcEpoch: 0, SrcVersion: "28", SrcRelease: "2", Modularitylabel: "", License: "MIT"}, {Name: "filesystem", Epoch: 0, Version: "3.8", Release: "2.fc28", Arch: "x86_64", SrcName: "filesystem", SrcEpoch: 0, SrcVersion: "3.8", SrcRelease: "2.fc28", Modularitylabel: "", License: "Public Domain"}, {Name: "tzdata", Epoch: 0, Version: "2018e", Release: "1.fc28", Arch: "noarch", SrcName: "tzdata", SrcEpoch: 0, SrcVersion: "2018e", SrcRelease: "1.fc28", Modularitylabel: "", License: "Public Domain"}, {Name: "pcre2", Epoch: 0, Version: "10.31", Release: "10.fc28", Arch: "x86_64", SrcName: "pcre2", SrcEpoch: 0, SrcVersion: "10.31", SrcRelease: "10.fc28", Modularitylabel: "", License: "BSD"}, {Name: "glibc-minimal-langpack", Epoch: 0, Version: "2.27", Release: "32.fc28", Arch: "x86_64", SrcName: "glibc", SrcEpoch: 0, SrcVersion: "2.27", SrcRelease: "32.fc28", Modularitylabel: "", License: "LGPLv2+ and LGPLv2+ with exceptions and GPLv2+ and GPLv2+ with exceptions and BSD and Inner-Net and ISC and Public Domain and GFDL"}, {Name: "glibc-common", Epoch: 0, Version: "2.27", Release: "32.fc28", Arch: "x86_64", SrcName: "glibc", SrcEpoch: 0, SrcVersion: "2.27", SrcRelease: "32.fc28", Modularitylabel: "", License: "LGPLv2+ and LGPLv2+ with exceptions and GPLv2+ and GPLv2+ with exceptions and BSD and Inner-Net and ISC and Public Domain and GFDL"}, {Name: "bash", Epoch: 0, Version: "4.4.23", Release: "1.fc28", Arch: "x86_64", SrcName: "bash", SrcEpoch: 0, SrcVersion: "4.4.23", SrcRelease: "1.fc28", Modularitylabel: "", License: "GPLv3+"}, {Name: "zlib", Epoch: 0, Version: "1.2.11", Release: "8.fc28", Arch: "x86_64", SrcName: "zlib", SrcEpoch: 0, SrcVersion: "1.2.11", SrcRelease: "8.fc28", Modularitylabel: "", License: "zlib and Boost"}, {Name: "bzip2-libs", Epoch: 0, Version: "1.0.6", Release: "26.fc28", Arch: "x86_64", SrcName: "bzip2", SrcEpoch: 0, SrcVersion: "1.0.6", SrcRelease: "26.fc28", Modularitylabel: "", License: "BSD"}, {Name: "libcap", Epoch: 0, Version: "2.25", Release: "9.fc28", Arch: "x86_64", SrcName: "libcap", SrcEpoch: 0, SrcVersion: "2.25", SrcRelease: "9.fc28", Modularitylabel: "", License: "GPLv2"}, {Name: "libgpg-error", Epoch: 0, Version: "1.31", Release: "1.fc28", Arch: "x86_64", SrcName: "libgpg-error", SrcEpoch: 0, SrcVersion: "1.31", SrcRelease: "1.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libzstd", Epoch: 0, Version: "1.3.5", Release: "1.fc28", Arch: "x86_64", SrcName: "zstd", SrcEpoch: 0, SrcVersion: "1.3.5", SrcRelease: "1.fc28", Modularitylabel: "", License: "BSD and GPLv2"}, {Name: "expat", Epoch: 0, Version: "2.2.5", Release: "3.fc28", Arch: "x86_64", SrcName: "expat", SrcEpoch: 0, SrcVersion: "2.2.5", SrcRelease: "3.fc28", Modularitylabel: "", License: "MIT"}, {Name: "nss-util", Epoch: 0, Version: "3.38.0", Release: "1.0.fc28", Arch: "x86_64", SrcName: "nss-util", SrcEpoch: 0, SrcVersion: "3.38.0", SrcRelease: "1.0.fc28", Modularitylabel: "", License: "MPLv2.0"}, {Name: "libcom_err", Epoch: 0, Version: "1.44.2", Release: "0.fc28", Arch: "x86_64", SrcName: "e2fsprogs", SrcEpoch: 0, SrcVersion: "1.44.2", SrcRelease: "0.fc28", Modularitylabel: "", License: "MIT"}, {Name: "libffi", Epoch: 0, Version: "3.1", Release: "16.fc28", Arch: "x86_64", SrcName: "libffi", SrcEpoch: 0, SrcVersion: "3.1", SrcRelease: "16.fc28", Modularitylabel: "", License: "BSD"}, {Name: "libgcrypt", Epoch: 0, Version: "1.8.3", Release: "1.fc28", Arch: "x86_64", SrcName: "libgcrypt", SrcEpoch: 0, SrcVersion: "1.8.3", SrcRelease: "1.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libxml2", Epoch: 0, Version: "2.9.8", Release: "4.fc28", Arch: "x86_64", SrcName: "libxml2", SrcEpoch: 0, SrcVersion: "2.9.8", SrcRelease: "4.fc28", Modularitylabel: "", License: "MIT"}, {Name: "libacl", Epoch: 0, Version: "2.2.53", Release: "1.fc28", Arch: "x86_64", SrcName: "acl", SrcEpoch: 0, SrcVersion: "2.2.53", SrcRelease: "1.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "sed", Epoch: 0, Version: "4.5", Release: "1.fc28", Arch: "x86_64", SrcName: "sed", SrcEpoch: 0, SrcVersion: "4.5", SrcRelease: "1.fc28", Modularitylabel: "", License: "GPLv3+"}, {Name: "libmount", Epoch: 0, Version: "2.32.1", Release: "1.fc28", Arch: "x86_64", SrcName: "util-linux", SrcEpoch: 0, SrcVersion: "2.32.1", SrcRelease: "1.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "p11-kit", Epoch: 0, Version: "0.23.12", Release: "1.fc28", Arch: "x86_64", SrcName: "p11-kit", SrcEpoch: 0, SrcVersion: "0.23.12", SrcRelease: "1.fc28", Modularitylabel: "", License: "BSD"}, {Name: "libidn2", Epoch: 0, Version: "2.0.5", Release: "1.fc28", Arch: "x86_64", SrcName: "libidn2", SrcEpoch: 0, SrcVersion: "2.0.5", SrcRelease: "1.fc28", Modularitylabel: "", License: "(GPLv2+ or LGPLv3+) and GPLv3+"}, {Name: "libcap-ng", Epoch: 0, Version: "0.7.9", Release: "4.fc28", Arch: "x86_64", SrcName: "libcap-ng", SrcEpoch: 0, SrcVersion: "0.7.9", SrcRelease: "4.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "lz4-libs", Epoch: 0, Version: "1.8.1.2", Release: "4.fc28", Arch: "x86_64", SrcName: "lz4", SrcEpoch: 0, SrcVersion: "1.8.1.2", SrcRelease: "4.fc28", Modularitylabel: "", License: "GPLv2+ and BSD"}, {Name: "libassuan", Epoch: 0, Version: "2.5.1", Release: "3.fc28", Arch: "x86_64", SrcName: "libassuan", SrcEpoch: 0, SrcVersion: "2.5.1", SrcRelease: "3.fc28", Modularitylabel: "", License: "LGPLv2+ and GPLv3+"}, {Name: "keyutils-libs", Epoch: 0, Version: "1.5.10", Release: "6.fc28", Arch: "x86_64", SrcName: "keyutils", SrcEpoch: 0, SrcVersion: "1.5.10", SrcRelease: "6.fc28", Modularitylabel: "", License: "GPLv2+ and LGPLv2+"}, {Name: "glib2", Epoch: 0, Version: "2.56.1", Release: "4.fc28", Arch: "x86_64", SrcName: "glib2", SrcEpoch: 0, SrcVersion: "2.56.1", SrcRelease: "4.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "systemd-libs", Epoch: 0, Version: "238", Release: "9.git0e0aa59.fc28", Arch: "x86_64", SrcName: "systemd", SrcEpoch: 0, SrcVersion: "238", SrcRelease: "9.git0e0aa59.fc28", Modularitylabel: "", License: "LGPLv2+ and MIT"}, {Name: "dbus-libs", Epoch: 1, Version: "1.12.10", Release: "1.fc28", Arch: "x86_64", SrcName: "dbus", SrcEpoch: 1, SrcVersion: "1.12.10", SrcRelease: "1.fc28", Modularitylabel: "", License: "(GPLv2+ or AFL) and GPLv2+"}, {Name: "libtasn1", Epoch: 0, Version: "4.13", Release: "2.fc28", Arch: "x86_64", SrcName: "libtasn1", SrcEpoch: 0, SrcVersion: "4.13", SrcRelease: "2.fc28", Modularitylabel: "", License: "GPLv3+ and LGPLv2+"}, {Name: "ca-certificates", Epoch: 0, Version: "2018.2.24", Release: "1.0.fc28", Arch: "noarch", SrcName: "ca-certificates", SrcEpoch: 0, SrcVersion: "2018.2.24", SrcRelease: "1.0.fc28", Modularitylabel: "", License: "Public Domain"}, {Name: "libarchive", Epoch: 0, Version: "3.3.1", Release: "4.fc28", Arch: "x86_64", SrcName: "libarchive", SrcEpoch: 0, SrcVersion: "3.3.1", SrcRelease: "4.fc28", Modularitylabel: "", License: "BSD"}, {Name: "openssl", Epoch: 1, Version: "1.1.0h", Release: "3.fc28", Arch: "x86_64", SrcName: "openssl", SrcEpoch: 1, SrcVersion: "1.1.0h", SrcRelease: "3.fc28", Modularitylabel: "", License: "OpenSSL"}, {Name: "libusbx", Epoch: 0, Version: "1.0.22", Release: "1.fc28", Arch: "x86_64", SrcName: "libusbx", SrcEpoch: 0, SrcVersion: "1.0.22", SrcRelease: "1.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libsemanage", Epoch: 0, Version: "2.8", Release: "2.fc28", Arch: "x86_64", SrcName: "libsemanage", SrcEpoch: 0, SrcVersion: "2.8", SrcRelease: "2.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libutempter", Epoch: 0, Version: "1.1.6", Release: "14.fc28", Arch: "x86_64", SrcName: "libutempter", SrcEpoch: 0, SrcVersion: "1.1.6", SrcRelease: "14.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "mpfr", Epoch: 0, Version: "3.1.6", Release: "1.fc28", Arch: "x86_64", SrcName: "mpfr", SrcEpoch: 0, SrcVersion: "3.1.6", SrcRelease: "1.fc28", Modularitylabel: "", License: "LGPLv3+ and GPLv3+ and GFDL"}, {Name: "gnutls", Epoch: 0, Version: "3.6.3", Release: "4.fc28", Arch: "x86_64", SrcName: "gnutls", SrcEpoch: 0, SrcVersion: "3.6.3", SrcRelease: "4.fc28", Modularitylabel: "", License: "GPLv3+ and LGPLv2+"}, {Name: "gzip", Epoch: 0, Version: "1.9", Release: "3.fc28", Arch: "x86_64", SrcName: "gzip", SrcEpoch: 0, SrcVersion: "1.9", SrcRelease: "3.fc28", Modularitylabel: "", License: "GPLv3+ and GFDL"}, {Name: "acl", Epoch: 0, Version: "2.2.53", Release: "1.fc28", Arch: "x86_64", SrcName: "acl", SrcEpoch: 0, SrcVersion: "2.2.53", SrcRelease: "1.fc28", Modularitylabel: "", License: "GPLv2+"}, {Name: "nss-softokn-freebl", Epoch: 0, Version: "3.38.0", Release: "1.0.fc28", Arch: "x86_64", SrcName: "nss-softokn", SrcEpoch: 0, SrcVersion: "3.38.0", SrcRelease: "1.0.fc28", Modularitylabel: "", License: "MPLv2.0"}, {Name: "nss", Epoch: 0, Version: "3.38.0", Release: "1.0.fc28", Arch: "x86_64", SrcName: "nss", SrcEpoch: 0, SrcVersion: "3.38.0", SrcRelease: "1.0.fc28", Modularitylabel: "", License: "MPLv2.0"}, {Name: "libmetalink", Epoch: 0, Version: "0.1.3", Release: "6.fc28", Arch: "x86_64", SrcName: "libmetalink", SrcEpoch: 0, SrcVersion: "0.1.3", SrcRelease: "6.fc28", Modularitylabel: "", License: "MIT"}, {Name: "libdb-utils", Epoch: 0, Version: "5.3.28", Release: "30.fc28", Arch: "x86_64", SrcName: "libdb", SrcEpoch: 0, SrcVersion: "5.3.28", SrcRelease: "30.fc28", Modularitylabel: "", License: "BSD and LGPLv2 and Sleepycat"}, {Name: "file-libs", Epoch: 0, Version: "5.33", Release: "7.fc28", Arch: "x86_64", SrcName: "file", SrcEpoch: 0, SrcVersion: "5.33", SrcRelease: "7.fc28", Modularitylabel: "", License: "BSD"}, {Name: "libsss_idmap", Epoch: 0, Version: "1.16.3", Release: "2.fc28", Arch: "x86_64", SrcName: "sssd", SrcEpoch: 0, SrcVersion: "1.16.3", SrcRelease: "2.fc28", Modularitylabel: "", License: "LGPLv3+"}, {Name: "libsigsegv", Epoch: 0, Version: "2.11", Release: "5.fc28", Arch: "x86_64", SrcName: "libsigsegv", SrcEpoch: 0, SrcVersion: "2.11", SrcRelease: "5.fc28", Modularitylabel: "", License: "GPLv2+"}, {Name: "krb5-libs", Epoch: 0, Version: "1.16.1", Release: "13.fc28", Arch: "x86_64", SrcName: "krb5", SrcEpoch: 0, SrcVersion: "1.16.1", SrcRelease: "13.fc28", Modularitylabel: "", License: "MIT"}, {Name: "libnsl2", Epoch: 0, Version: "1.2.0", Release: "2.20180605git4a062cf.fc28", Arch: "x86_64", SrcName: "libnsl2", SrcEpoch: 0, SrcVersion: "1.2.0", SrcRelease: "2.20180605git4a062cf.fc28", Modularitylabel: "", License: "BSD and LGPLv2+"}, {Name: "python3-pip", Epoch: 0, Version: "9.0.3", Release: "2.fc28", Arch: "noarch", SrcName: "python-pip", SrcEpoch: 0, SrcVersion: "9.0.3", SrcRelease: "2.fc28", Modularitylabel: "", License: "MIT"}, {Name: "python3", Epoch: 0, Version: "3.6.6", Release: "1.fc28", Arch: "x86_64", SrcName: "python3", SrcEpoch: 0, SrcVersion: "3.6.6", SrcRelease: "1.fc28", Modularitylabel: "", License: "Python"}, {Name: "pam", Epoch: 0, Version: "1.3.1", Release: "1.fc28", Arch: "x86_64", SrcName: "pam", SrcEpoch: 0, SrcVersion: "1.3.1", SrcRelease: "1.fc28", Modularitylabel: "", License: "BSD and GPLv2+"}, {Name: "python3-gobject-base", Epoch: 0, Version: "3.28.3", Release: "1.fc28", Arch: "x86_64", SrcName: "pygobject3", SrcEpoch: 0, SrcVersion: "3.28.3", SrcRelease: "1.fc28", Modularitylabel: "", License: "LGPLv2+ and MIT"}, {Name: "python3-smartcols", Epoch: 0, Version: "0.3.0", Release: "2.fc28", Arch: "x86_64", SrcName: "python-smartcols", SrcEpoch: 0, SrcVersion: "0.3.0", SrcRelease: "2.fc28", Modularitylabel: "", License: "GPLv3+"}, {Name: "python3-iniparse", Epoch: 0, Version: "0.4", Release: "30.fc28", Arch: "noarch", SrcName: "python-iniparse", SrcEpoch: 0, SrcVersion: "0.4", SrcRelease: "30.fc28", Modularitylabel: "", License: "MIT and Python"}, {Name: "openldap", Epoch: 0, Version: "2.4.46", Release: "3.fc28", Arch: "x86_64", SrcName: "openldap", SrcEpoch: 0, SrcVersion: "2.4.46", SrcRelease: "3.fc28", Modularitylabel: "", License: "OpenLDAP"}, {Name: "libseccomp", Epoch: 0, Version: "2.3.3", Release: "2.fc28", Arch: "x86_64", SrcName: "libseccomp", SrcEpoch: 0, SrcVersion: "2.3.3", SrcRelease: "2.fc28", Modularitylabel: "", License: "LGPLv2"}, {Name: "npth", Epoch: 0, Version: "1.5", Release: "4.fc28", Arch: "x86_64", SrcName: "npth", SrcEpoch: 0, SrcVersion: "1.5", SrcRelease: "4.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "gpgme", Epoch: 0, Version: "1.10.0", Release: "4.fc28", Arch: "x86_64", SrcName: "gpgme", SrcEpoch: 0, SrcVersion: "1.10.0", SrcRelease: "4.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "json-c", Epoch: 0, Version: "0.13.1", Release: "2.fc28", Arch: "x86_64", SrcName: "json-c", SrcEpoch: 0, SrcVersion: "0.13.1", SrcRelease: "2.fc28", Modularitylabel: "", License: "MIT"}, {Name: "libyaml", Epoch: 0, Version: "0.1.7", Release: "5.fc28", Arch: "x86_64", SrcName: "libyaml", SrcEpoch: 0, SrcVersion: "0.1.7", SrcRelease: "5.fc28", Modularitylabel: "", License: "MIT"}, {Name: "libpkgconf", Epoch: 0, Version: "1.4.2", Release: "1.fc28", Arch: "x86_64", SrcName: "pkgconf", SrcEpoch: 0, SrcVersion: "1.4.2", SrcRelease: "1.fc28", Modularitylabel: "", License: "ISC"}, {Name: "pkgconf-pkg-config", Epoch: 0, Version: "1.4.2", Release: "1.fc28", Arch: "x86_64", SrcName: "pkgconf", SrcEpoch: 0, SrcVersion: "1.4.2", SrcRelease: "1.fc28", Modularitylabel: "", License: "ISC"}, {Name: "iptables-libs", Epoch: 0, Version: "1.6.2", Release: "3.fc28", Arch: "x86_64", SrcName: "iptables", SrcEpoch: 0, SrcVersion: "1.6.2", SrcRelease: "3.fc28", Modularitylabel: "", License: "GPLv2 and Artistic Licence 2.0 and ISC"}, {Name: "device-mapper-libs", Epoch: 0, Version: "1.02.146", Release: "5.fc28", Arch: "x86_64", SrcName: "lvm2", SrcEpoch: 0, SrcVersion: "2.02.177", SrcRelease: "5.fc28", Modularitylabel: "", License: "LGPLv2"}, {Name: "systemd-pam", Epoch: 0, Version: "238", Release: "9.git0e0aa59.fc28", Arch: "x86_64", SrcName: "systemd", SrcEpoch: 0, SrcVersion: "238", SrcRelease: "9.git0e0aa59.fc28", Modularitylabel: "", License: "LGPLv2+ and MIT and GPLv2+"}, {Name: "systemd", Epoch: 0, Version: "238", Release: "9.git0e0aa59.fc28", Arch: "x86_64", SrcName: "systemd", SrcEpoch: 0, SrcVersion: "238", SrcRelease: "9.git0e0aa59.fc28", Modularitylabel: "", License: "LGPLv2+ and MIT and GPLv2+"}, {Name: "elfutils-default-yama-scope", Epoch: 0, Version: "0.173", Release: "1.fc28", Arch: "noarch", SrcName: "elfutils", SrcEpoch: 0, SrcVersion: "0.173", SrcRelease: "1.fc28", Modularitylabel: "", License: "GPLv2+ or LGPLv3+"}, {Name: "libcurl", Epoch: 0, Version: "7.59.0", Release: "6.fc28", Arch: "x86_64", SrcName: "curl", SrcEpoch: 0, SrcVersion: "7.59.0", SrcRelease: "6.fc28", Modularitylabel: "", License: "MIT"}, {Name: "python3-librepo", Epoch: 0, Version: "1.8.1", Release: "7.fc28", Arch: "x86_64", SrcName: "librepo", SrcEpoch: 0, SrcVersion: "1.8.1", SrcRelease: "7.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "rpm-plugin-selinux", Epoch: 0, Version: "4.14.1", Release: "9.fc28", Arch: "x86_64", SrcName: "rpm", SrcEpoch: 0, SrcVersion: "4.14.1", SrcRelease: "9.fc28", Modularitylabel: "", License: "GPLv2+"}, {Name: "rpm", Epoch: 0, Version: "4.14.1", Release: "9.fc28", Arch: "x86_64", SrcName: "rpm", SrcEpoch: 0, SrcVersion: "4.14.1", SrcRelease: "9.fc28", Modularitylabel: "", License: "GPLv2+"}, {Name: "libdnf", Epoch: 0, Version: "0.11.1", Release: "3.fc28", Arch: "x86_64", SrcName: "libdnf", SrcEpoch: 0, SrcVersion: "0.11.1", SrcRelease: "3.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "rpm-build-libs", Epoch: 0, Version: "4.14.1", Release: "9.fc28", Arch: "x86_64", SrcName: "rpm", SrcEpoch: 0, SrcVersion: "4.14.1", SrcRelease: "9.fc28", Modularitylabel: "", License: "GPLv2+ and LGPLv2+ with exceptions"}, {Name: "python3-rpm", Epoch: 0, Version: "4.14.1", Release: "9.fc28", Arch: "x86_64", SrcName: "rpm", SrcEpoch: 0, SrcVersion: "4.14.1", SrcRelease: "9.fc28", Modularitylabel: "", License: "GPLv2+"}, {Name: "dnf", Epoch: 0, Version: "2.7.5", Release: "12.fc28", Arch: "noarch", SrcName: "dnf", SrcEpoch: 0, SrcVersion: "2.7.5", SrcRelease: "12.fc28", Modularitylabel: "", License: "GPLv2+ and GPLv2 and GPL"}, {Name: "deltarpm", Epoch: 0, Version: "3.6", Release: "25.fc28", Arch: "x86_64", SrcName: "deltarpm", SrcEpoch: 0, SrcVersion: "3.6", SrcRelease: "25.fc28", Modularitylabel: "", License: "BSD"}, {Name: "sssd-client", Epoch: 0, Version: "1.16.3", Release: "2.fc28", Arch: "x86_64", SrcName: "sssd", SrcEpoch: 0, SrcVersion: "1.16.3", SrcRelease: "2.fc28", Modularitylabel: "", License: "LGPLv3+"}, {Name: "cracklib-dicts", Epoch: 0, Version: "2.9.6", Release: "13.fc28", Arch: "x86_64", SrcName: "cracklib", SrcEpoch: 0, SrcVersion: "2.9.6", SrcRelease: "13.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "tar", Epoch: 2, Version: "1.30", Release: "3.fc28", Arch: "x86_64", SrcName: "tar", SrcEpoch: 2, SrcVersion: "1.30", SrcRelease: "3.fc28", Modularitylabel: "", License: "GPLv3+"}, {Name: "diffutils", Epoch: 0, Version: "3.6", Release: "4.fc28", Arch: "x86_64", SrcName: "diffutils", SrcEpoch: 0, SrcVersion: "3.6", SrcRelease: "4.fc28", Modularitylabel: "", License: "GPLv3+"}, {Name: "langpacks-en", Epoch: 0, Version: "1.0", Release: "12.fc28", Arch: "noarch", SrcName: "langpacks", SrcEpoch: 0, SrcVersion: "1.0", SrcRelease: "12.fc28", Modularitylabel: "", License: "GPLv2+"}, {Name: "gpg-pubkey", Epoch: 0, Version: "9db62fb1", Release: "59920156", Arch: "None", SrcName: "", SrcEpoch: 0, SrcVersion: "", SrcRelease: "", Modularitylabel: "", License: "pubkey"}, {Name: "libgcc", Epoch: 0, Version: "8.1.1", Release: "5.fc28", Arch: "x86_64", SrcName: "gcc", SrcEpoch: 0, SrcVersion: "8.1.1", SrcRelease: "5.fc28", Modularitylabel: "", License: "GPLv3+ and GPLv3+ with exceptions and GPLv2+ with exceptions and LGPLv2+ and BSD"}, {Name: "pkgconf-m4", Epoch: 0, Version: "1.4.2", Release: "1.fc28", Arch: "noarch", SrcName: "pkgconf", SrcEpoch: 0, SrcVersion: "1.4.2", SrcRelease: "1.fc28", Modularitylabel: "", License: "GPLv2+ with exceptions"}, {Name: "dnf-conf", Epoch: 0, Version: "2.7.5", Release: "12.fc28", Arch: "noarch", SrcName: "dnf", SrcEpoch: 0, SrcVersion: "2.7.5", SrcRelease: "12.fc28", Modularitylabel: "", License: "GPLv2+ and GPLv2 and GPL"}, {Name: "fedora-repos", Epoch: 0, Version: "28", Release: "5", Arch: "noarch", SrcName: "fedora-repos", SrcEpoch: 0, SrcVersion: "28", SrcRelease: "5", Modularitylabel: "", License: "MIT"}, {Name: "setup", Epoch: 0, Version: "2.11.4", Release: "1.fc28", Arch: "noarch", SrcName: "setup", SrcEpoch: 0, SrcVersion: "2.11.4", SrcRelease: "1.fc28", Modularitylabel: "", License: "Public Domain"}, {Name: "basesystem", Epoch: 0, Version: "11", Release: "5.fc28", Arch: "noarch", SrcName: "basesystem", SrcEpoch: 0, SrcVersion: "11", SrcRelease: "5.fc28", Modularitylabel: "", License: "Public Domain"}, {Name: "ncurses-base", Epoch: 0, Version: "6.1", Release: "5.20180224.fc28", Arch: "noarch", SrcName: "ncurses", SrcEpoch: 0, SrcVersion: "6.1", SrcRelease: "5.20180224.fc28", Modularitylabel: "", License: "MIT"}, {Name: "libselinux", Epoch: 0, Version: "2.8", Release: "1.fc28", Arch: "x86_64", SrcName: "libselinux", SrcEpoch: 0, SrcVersion: "2.8", SrcRelease: "1.fc28", Modularitylabel: "", License: "Public Domain"}, {Name: "ncurses-libs", Epoch: 0, Version: "6.1", Release: "5.20180224.fc28", Arch: "x86_64", SrcName: "ncurses", SrcEpoch: 0, SrcVersion: "6.1", SrcRelease: "5.20180224.fc28", Modularitylabel: "", License: "MIT"}, {Name: "glibc", Epoch: 0, Version: "2.27", Release: "32.fc28", Arch: "x86_64", SrcName: "glibc", SrcEpoch: 0, SrcVersion: "2.27", SrcRelease: "32.fc28", Modularitylabel: "", License: "LGPLv2+ and LGPLv2+ with exceptions and GPLv2+ and GPLv2+ with exceptions and BSD and Inner-Net and ISC and Public Domain and GFDL"}, {Name: "libsepol", Epoch: 0, Version: "2.8", Release: "1.fc28", Arch: "x86_64", SrcName: "libsepol", SrcEpoch: 0, SrcVersion: "2.8", SrcRelease: "1.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "xz-libs", Epoch: 0, Version: "5.2.4", Release: "2.fc28", Arch: "x86_64", SrcName: "xz", SrcEpoch: 0, SrcVersion: "5.2.4", SrcRelease: "2.fc28", Modularitylabel: "", License: "Public Domain"}, {Name: "info", Epoch: 0, Version: "6.5", Release: "4.fc28", Arch: "x86_64", SrcName: "texinfo", SrcEpoch: 0, SrcVersion: "6.5", SrcRelease: "4.fc28", Modularitylabel: "", License: "GPLv3+"}, {Name: "libdb", Epoch: 0, Version: "5.3.28", Release: "30.fc28", Arch: "x86_64", SrcName: "libdb", SrcEpoch: 0, SrcVersion: "5.3.28", SrcRelease: "30.fc28", Modularitylabel: "", License: "BSD and LGPLv2 and Sleepycat"}, {Name: "elfutils-libelf", Epoch: 0, Version: "0.173", Release: "1.fc28", Arch: "x86_64", SrcName: "elfutils", SrcEpoch: 0, SrcVersion: "0.173", SrcRelease: "1.fc28", Modularitylabel: "", License: "GPLv2+ or LGPLv3+"}, {Name: "popt", Epoch: 0, Version: "1.16", Release: "14.fc28", Arch: "x86_64", SrcName: "popt", SrcEpoch: 0, SrcVersion: "1.16", SrcRelease: "14.fc28", Modularitylabel: "", License: "MIT"}, {Name: "nspr", Epoch: 0, Version: "4.19.0", Release: "1.fc28", Arch: "x86_64", SrcName: "nspr", SrcEpoch: 0, SrcVersion: "4.19.0", SrcRelease: "1.fc28", Modularitylabel: "", License: "MPLv2.0"}, {Name: "libxcrypt", Epoch: 0, Version: "4.1.2", Release: "1.fc28", Arch: "x86_64", SrcName: "libxcrypt", SrcEpoch: 0, SrcVersion: "4.1.2", SrcRelease: "1.fc28", Modularitylabel: "", License: "LGPLv2+ and BSD and Public Domain"}, {Name: "lua-libs", Epoch: 0, Version: "5.3.4", Release: "10.fc28", Arch: "x86_64", SrcName: "lua", SrcEpoch: 0, SrcVersion: "5.3.4", SrcRelease: "10.fc28", Modularitylabel: "", License: "MIT"}, {Name: "libuuid", Epoch: 0, Version: "2.32.1", Release: "1.fc28", Arch: "x86_64", SrcName: "util-linux", SrcEpoch: 0, SrcVersion: "2.32.1", SrcRelease: "1.fc28", Modularitylabel: "", License: "BSD"}, {Name: "readline", Epoch: 0, Version: "7.0", Release: "11.fc28", Arch: "x86_64", SrcName: "readline", SrcEpoch: 0, SrcVersion: "7.0", SrcRelease: "11.fc28", Modularitylabel: "", License: "GPLv3+"}, {Name: "libattr", Epoch: 0, Version: "2.4.48", Release: "3.fc28", Arch: "x86_64", SrcName: "attr", SrcEpoch: 0, SrcVersion: "2.4.48", SrcRelease: "3.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "coreutils-single", Epoch: 0, Version: "8.29", Release: "7.fc28", Arch: "x86_64", SrcName: "coreutils", SrcEpoch: 0, SrcVersion: "8.29", SrcRelease: "7.fc28", Modularitylabel: "", License: "GPLv3+"}, {Name: "libblkid", Epoch: 0, Version: "2.32.1", Release: "1.fc28", Arch: "x86_64", SrcName: "util-linux", SrcEpoch: 0, SrcVersion: "2.32.1", SrcRelease: "1.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "gmp", Epoch: 1, Version: "6.1.2", Release: "7.fc28", Arch: "x86_64", SrcName: "gmp", SrcEpoch: 1, SrcVersion: "6.1.2", SrcRelease: "7.fc28", Modularitylabel: "", License: "LGPLv3+ or GPLv2+"}, {Name: "libunistring", Epoch: 0, Version: "0.9.10", Release: "1.fc28", Arch: "x86_64", SrcName: "libunistring", SrcEpoch: 0, SrcVersion: "0.9.10", SrcRelease: "1.fc28", Modularitylabel: "", License: "GPLV2+ or LGPLv3+"}, {Name: "sqlite-libs", Epoch: 0, Version: "3.22.0", Release: "4.fc28", Arch: "x86_64", SrcName: "sqlite", SrcEpoch: 0, SrcVersion: "3.22.0", SrcRelease: "4.fc28", Modularitylabel: "", License: "Public Domain"}, {Name: "audit-libs", Epoch: 0, Version: "2.8.4", Release: "2.fc28", Arch: "x86_64", SrcName: "audit", SrcEpoch: 0, SrcVersion: "2.8.4", SrcRelease: "2.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "chkconfig", Epoch: 0, Version: "1.10", Release: "4.fc28", Arch: "x86_64", SrcName: "chkconfig", SrcEpoch: 0, SrcVersion: "1.10", SrcRelease: "4.fc28", Modularitylabel: "", License: "GPLv2"}, {Name: "libsmartcols", Epoch: 0, Version: "2.32.1", Release: "1.fc28", Arch: "x86_64", SrcName: "util-linux", SrcEpoch: 0, SrcVersion: "2.32.1", SrcRelease: "1.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "pcre", Epoch: 0, Version: "8.42", Release: "3.fc28", Arch: "x86_64", SrcName: "pcre", SrcEpoch: 0, SrcVersion: "8.42", SrcRelease: "3.fc28", Modularitylabel: "", License: "BSD"}, {Name: "grep", Epoch: 0, Version: "3.1", Release: "5.fc28", Arch: "x86_64", SrcName: "grep", SrcEpoch: 0, SrcVersion: "3.1", SrcRelease: "5.fc28", Modularitylabel: "", License: "GPLv3+"}, {Name: "crypto-policies", Epoch: 0, Version: "20180425", Release: "5.git6ad4018.fc28", Arch: "noarch", SrcName: "crypto-policies", SrcEpoch: 0, SrcVersion: "20180425", SrcRelease: "5.git6ad4018.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "gdbm-libs", Epoch: 1, Version: "1.14.1", Release: "4.fc28", Arch: "x86_64", SrcName: "gdbm", SrcEpoch: 1, SrcVersion: "1.14.1", SrcRelease: "4.fc28", Modularitylabel: "", License: "GPLv3+"}, {Name: "p11-kit-trust", Epoch: 0, Version: "0.23.12", Release: "1.fc28", Arch: "x86_64", SrcName: "p11-kit", SrcEpoch: 0, SrcVersion: "0.23.12", SrcRelease: "1.fc28", Modularitylabel: "", License: "BSD"}, {Name: "openssl-libs", Epoch: 1, Version: "1.1.0h", Release: "3.fc28", Arch: "x86_64", SrcName: "openssl", SrcEpoch: 1, SrcVersion: "1.1.0h", SrcRelease: "3.fc28", Modularitylabel: "", License: "OpenSSL"}, {Name: "ima-evm-utils", Epoch: 0, Version: "1.1", Release: "2.fc28", Arch: "x86_64", SrcName: "ima-evm-utils", SrcEpoch: 0, SrcVersion: "1.1", SrcRelease: "2.fc28", Modularitylabel: "", License: "GPLv2"}, {Name: "gdbm", Epoch: 1, Version: "1.14.1", Release: "4.fc28", Arch: "x86_64", SrcName: "gdbm", SrcEpoch: 1, SrcVersion: "1.14.1", SrcRelease: "4.fc28", Modularitylabel: "", License: "GPLv3+"}, {Name: "gobject-introspection", Epoch: 0, Version: "1.56.1", Release: "1.fc28", Arch: "x86_64", SrcName: "gobject-introspection", SrcEpoch: 0, SrcVersion: "1.56.1", SrcRelease: "1.fc28", Modularitylabel: "", License: "GPLv2+, LGPLv2+, MIT"}, {Name: "shadow-utils", Epoch: 2, Version: "4.6", Release: "1.fc28", Arch: "x86_64", SrcName: "shadow-utils", SrcEpoch: 2, SrcVersion: "4.6", SrcRelease: "1.fc28", Modularitylabel: "", License: "BSD and GPLv2+"}, {Name: "libpsl", Epoch: 0, Version: "0.20.2", Release: "2.fc28", Arch: "x86_64", SrcName: "libpsl", SrcEpoch: 0, SrcVersion: "0.20.2", SrcRelease: "2.fc28", Modularitylabel: "", License: "MIT"}, {Name: "nettle", Epoch: 0, Version: "3.4", Release: "2.fc28", Arch: "x86_64", SrcName: "nettle", SrcEpoch: 0, SrcVersion: "3.4", SrcRelease: "2.fc28", Modularitylabel: "", License: "LGPLv3+ or GPLv2+"}, {Name: "libfdisk", Epoch: 0, Version: "2.32.1", Release: "1.fc28", Arch: "x86_64", SrcName: "util-linux", SrcEpoch: 0, SrcVersion: "2.32.1", SrcRelease: "1.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "cracklib", Epoch: 0, Version: "2.9.6", Release: "13.fc28", Arch: "x86_64", SrcName: "cracklib", SrcEpoch: 0, SrcVersion: "2.9.6", SrcRelease: "13.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libcomps", Epoch: 0, Version: "0.1.8", Release: "11.fc28", Arch: "x86_64", SrcName: "libcomps", SrcEpoch: 0, SrcVersion: "0.1.8", SrcRelease: "11.fc28", Modularitylabel: "", License: "GPLv2+"}, {Name: "nss-softokn", Epoch: 0, Version: "3.38.0", Release: "1.0.fc28", Arch: "x86_64", SrcName: "nss-softokn", SrcEpoch: 0, SrcVersion: "3.38.0", SrcRelease: "1.0.fc28", Modularitylabel: "", License: "MPLv2.0"}, {Name: "nss-sysinit", Epoch: 0, Version: "3.38.0", Release: "1.0.fc28", Arch: "x86_64", SrcName: "nss", SrcEpoch: 0, SrcVersion: "3.38.0", SrcRelease: "1.0.fc28", Modularitylabel: "", License: "MPLv2.0"}, {Name: "libksba", Epoch: 0, Version: "1.3.5", Release: "7.fc28", Arch: "x86_64", SrcName: "libksba", SrcEpoch: 0, SrcVersion: "1.3.5", SrcRelease: "7.fc28", Modularitylabel: "", License: "(LGPLv3+ or GPLv2+) and GPLv3+"}, {Name: "kmod-libs", Epoch: 0, Version: "25", Release: "2.fc28", Arch: "x86_64", SrcName: "kmod", SrcEpoch: 0, SrcVersion: "25", SrcRelease: "2.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libsss_nss_idmap", Epoch: 0, Version: "1.16.3", Release: "2.fc28", Arch: "x86_64", SrcName: "sssd", SrcEpoch: 0, SrcVersion: "1.16.3", SrcRelease: "2.fc28", Modularitylabel: "", License: "LGPLv3+"}, {Name: "libverto", Epoch: 0, Version: "0.3.0", Release: "5.fc28", Arch: "x86_64", SrcName: "libverto", SrcEpoch: 0, SrcVersion: "0.3.0", SrcRelease: "5.fc28", Modularitylabel: "", License: "MIT"}, {Name: "gawk", Epoch: 0, Version: "4.2.1", Release: "1.fc28", Arch: "x86_64", SrcName: "gawk", SrcEpoch: 0, SrcVersion: "4.2.1", SrcRelease: "1.fc28", Modularitylabel: "", License: "GPLv3+ and GPLv2+ and LGPLv2+ and BSD"}, {Name: "libtirpc", Epoch: 0, Version: "1.0.3", Release: "3.rc2.fc28", Arch: "x86_64", SrcName: "libtirpc", SrcEpoch: 0, SrcVersion: "1.0.3", SrcRelease: "3.rc2.fc28", Modularitylabel: "", License: "SISSL and BSD"}, {Name: "python3-libs", Epoch: 0, Version: "3.6.6", Release: "1.fc28", Arch: "x86_64", SrcName: "python3", SrcEpoch: 0, SrcVersion: "3.6.6", SrcRelease: "1.fc28", Modularitylabel: "", License: "Python"}, {Name: "python3-setuptools", Epoch: 0, Version: "39.2.0", Release: "6.fc28", Arch: "noarch", SrcName: "python-setuptools", SrcEpoch: 0, SrcVersion: "39.2.0", SrcRelease: "6.fc28", Modularitylabel: "", License: "MIT"}, {Name: "libpwquality", Epoch: 0, Version: "1.4.0", Release: "7.fc28", Arch: "x86_64", SrcName: "libpwquality", SrcEpoch: 0, SrcVersion: "1.4.0", SrcRelease: "7.fc28", Modularitylabel: "", License: "BSD or GPLv2+"}, {Name: "util-linux", Epoch: 0, Version: "2.32.1", Release: "1.fc28", Arch: "x86_64", SrcName: "util-linux", SrcEpoch: 0, SrcVersion: "2.32.1", SrcRelease: "1.fc28", Modularitylabel: "", License: "GPLv2 and GPLv2+ and LGPLv2+ and BSD with advertising and Public Domain"}, {Name: "python3-libcomps", Epoch: 0, Version: "0.1.8", Release: "11.fc28", Arch: "x86_64", SrcName: "libcomps", SrcEpoch: 0, SrcVersion: "0.1.8", SrcRelease: "11.fc28", Modularitylabel: "", License: "GPLv2+"}, {Name: "python3-six", Epoch: 0, Version: "1.11.0", Release: "3.fc28", Arch: "noarch", SrcName: "python-six", SrcEpoch: 0, SrcVersion: "1.11.0", SrcRelease: "3.fc28", Modularitylabel: "", License: "MIT"}, {Name: "cyrus-sasl-lib", Epoch: 0, Version: "2.1.27", Release: "0.2rc7.fc28", Arch: "x86_64", SrcName: "cyrus-sasl", SrcEpoch: 0, SrcVersion: "2.1.27", SrcRelease: "0.2rc7.fc28", Modularitylabel: "", License: "BSD with advertising"}, {Name: "libssh", Epoch: 0, Version: "0.8.2", Release: "1.fc28", Arch: "x86_64", SrcName: "libssh", SrcEpoch: 0, SrcVersion: "0.8.2", SrcRelease: "1.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "qrencode-libs", Epoch: 0, Version: "3.4.4", Release: "5.fc28", Arch: "x86_64", SrcName: "qrencode", SrcEpoch: 0, SrcVersion: "3.4.4", SrcRelease: "5.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "gnupg2", Epoch: 0, Version: "2.2.8", Release: "1.fc28", Arch: "x86_64", SrcName: "gnupg2", SrcEpoch: 0, SrcVersion: "2.2.8", SrcRelease: "1.fc28", Modularitylabel: "", License: "GPLv3+"}, {Name: "python3-gpg", Epoch: 0, Version: "1.10.0", Release: "4.fc28", Arch: "x86_64", SrcName: "gpgme", SrcEpoch: 0, SrcVersion: "1.10.0", SrcRelease: "4.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libargon2", Epoch: 0, Version: "20161029", Release: "5.fc28", Arch: "x86_64", SrcName: "argon2", SrcEpoch: 0, SrcVersion: "20161029", SrcRelease: "5.fc28", Modularitylabel: "", License: "Public Domain or ASL 2.0"}, {Name: "libmodulemd", Epoch: 0, Version: "1.6.2", Release: "2.fc28", Arch: "x86_64", SrcName: "libmodulemd", SrcEpoch: 0, SrcVersion: "1.6.2", SrcRelease: "2.fc28", Modularitylabel: "", License: "MIT"}, {Name: "pkgconf", Epoch: 0, Version: "1.4.2", Release: "1.fc28", Arch: "x86_64", SrcName: "pkgconf", SrcEpoch: 0, SrcVersion: "1.4.2", SrcRelease: "1.fc28", Modularitylabel: "", License: "ISC"}, {Name: "libpcap", Epoch: 14, Version: "1.9.0", Release: "1.fc28", Arch: "x86_64", SrcName: "libpcap", SrcEpoch: 14, SrcVersion: "1.9.0", SrcRelease: "1.fc28", Modularitylabel: "", License: "BSD with advertising"}, {Name: "device-mapper", Epoch: 0, Version: "1.02.146", Release: "5.fc28", Arch: "x86_64", SrcName: "lvm2", SrcEpoch: 0, SrcVersion: "2.02.177", SrcRelease: "5.fc28", Modularitylabel: "", License: "GPLv2"}, {Name: "cryptsetup-libs", Epoch: 0, Version: "2.0.4", Release: "1.fc28", Arch: "x86_64", SrcName: "cryptsetup", SrcEpoch: 0, SrcVersion: "2.0.4", SrcRelease: "1.fc28", Modularitylabel: "", License: "GPLv2+ and LGPLv2+"}, {Name: "elfutils-libs", Epoch: 0, Version: "0.173", Release: "1.fc28", Arch: "x86_64", SrcName: "elfutils", SrcEpoch: 0, SrcVersion: "0.173", SrcRelease: "1.fc28", Modularitylabel: "", License: "GPLv2+ or LGPLv3+"}, {Name: "dbus", Epoch: 1, Version: "1.12.10", Release: "1.fc28", Arch: "x86_64", SrcName: "dbus", SrcEpoch: 1, SrcVersion: "1.12.10", SrcRelease: "1.fc28", Modularitylabel: "", License: "(GPLv2+ or AFL) and GPLv2+"}, {Name: "libnghttp2", Epoch: 0, Version: "1.32.1", Release: "1.fc28", Arch: "x86_64", SrcName: "nghttp2", SrcEpoch: 0, SrcVersion: "1.32.1", SrcRelease: "1.fc28", Modularitylabel: "", License: "MIT"}, {Name: "librepo", Epoch: 0, Version: "1.8.1", Release: "7.fc28", Arch: "x86_64", SrcName: "librepo", SrcEpoch: 0, SrcVersion: "1.8.1", SrcRelease: "7.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "curl", Epoch: 0, Version: "7.59.0", Release: "6.fc28", Arch: "x86_64", SrcName: "curl", SrcEpoch: 0, SrcVersion: "7.59.0", SrcRelease: "6.fc28", Modularitylabel: "", License: "MIT"}, {Name: "rpm-libs", Epoch: 0, Version: "4.14.1", Release: "9.fc28", Arch: "x86_64", SrcName: "rpm", SrcEpoch: 0, SrcVersion: "4.14.1", SrcRelease: "9.fc28", Modularitylabel: "", License: "GPLv2+ and LGPLv2+ with exceptions"}, {Name: "libsolv", Epoch: 0, Version: "0.6.35", Release: "1.fc28", Arch: "x86_64", SrcName: "libsolv", SrcEpoch: 0, SrcVersion: "0.6.35", SrcRelease: "1.fc28", Modularitylabel: "", License: "BSD"}, {Name: "python3-hawkey", Epoch: 0, Version: "0.11.1", Release: "3.fc28", Arch: "x86_64", SrcName: "libdnf", SrcEpoch: 0, SrcVersion: "0.11.1", SrcRelease: "3.fc28", Modularitylabel: "", License: "LGPLv2+"}, {Name: "rpm-sign-libs", Epoch: 0, Version: "4.14.1", Release: "9.fc28", Arch: "x86_64", SrcName: "rpm", SrcEpoch: 0, SrcVersion: "4.14.1", SrcRelease: "9.fc28", Modularitylabel: "", License: "GPLv2+ and LGPLv2+ with exceptions"}, {Name: "python3-dnf", Epoch: 0, Version: "2.7.5", Release: "12.fc28", Arch: "noarch", SrcName: "dnf", SrcEpoch: 0, SrcVersion: "2.7.5", SrcRelease: "12.fc28", Modularitylabel: "", License: "GPLv2+ and GPLv2 and GPL"}, {Name: "dnf-yum", Epoch: 0, Version: "2.7.5", Release: "12.fc28", Arch: "noarch", SrcName: "dnf", SrcEpoch: 0, SrcVersion: "2.7.5", SrcRelease: "12.fc28", Modularitylabel: "", License: "GPLv2+ and GPLv2 and GPL"}, {Name: "rpm-plugin-systemd-inhibit", Epoch: 0, Version: "4.14.1", Release: "9.fc28", Arch: "x86_64", SrcName: "rpm", SrcEpoch: 0, SrcVersion: "4.14.1", SrcRelease: "9.fc28", Modularitylabel: "", License: "GPLv2+"}, {Name: "nss-tools", Epoch: 0, Version: "3.38.0", Release: "1.0.fc28", Arch: "x86_64", SrcName: "nss", SrcEpoch: 0, SrcVersion: "3.38.0", SrcRelease: "1.0.fc28", Modularitylabel: "", License: "MPLv2.0"}, {Name: "openssl-pkcs11", Epoch: 0, Version: "0.4.8", Release: "1.fc28", Arch: "x86_64", SrcName: "openssl-pkcs11", SrcEpoch: 0, SrcVersion: "0.4.8", SrcRelease: "1.fc28", Modularitylabel: "", License: "LGPLv2+ and BSD"}, {Name: "vim-minimal", Epoch: 2, Version: "8.1.328", Release: "1.fc28", Arch: "x86_64", SrcName: "vim", SrcEpoch: 2, SrcVersion: "8.1.328", SrcRelease: "1.fc28", Modularitylabel: "", License: "Vim and MIT"}, {Name: "glibc-langpack-en", Epoch: 0, Version: "2.27", Release: "32.fc28", Arch: "x86_64", SrcName: "glibc", SrcEpoch: 0, SrcVersion: "2.27", SrcRelease: "32.fc28", Modularitylabel: "", License: "LGPLv2+ and LGPLv2+ with exceptions and GPLv2+ and GPLv2+ with exceptions and BSD and Inner-Net and ISC and Public Domain and GFDL"}, {Name: "rootfiles", Epoch: 0, Version: "8.1", Release: "22.fc28", Arch: "noarch", SrcName: "rootfiles", SrcEpoch: 0, SrcVersion: "8.1", SrcRelease: "22.fc28", Modularitylabel: "", License: "Public Domain"}, }, }, "ValidWithModularitylabel": { path: "./testdata/valid_with_modularitylabel", // docker run --name centos -it --rm centos:8 /bin/bash // docker cp ./testdata/valid_with_modularitylabel centos:/tmp/Packages // // $ cat rpmqa.py // #!/bin/python3 // // import rpm // // def splitFilename(filename): // # sourcerpm spec: https://github.com/rpm-software-management/dnf/blob/4.2.23/dnf/package.py#L116-L120 // srcname = rtrim(filename, ".src.rpm") // sname, sversion, srelease = srcname.rsplit('-', 2) // return sname, sversion, srelease // // # ref. https://github.com/rpm-software-management/dnf/blob/4.2.23/dnf/util.py#L122 // def rtrim(s, r): // if s.endswith(r): // s = s[:-len(r)] // return s // // def license_format(s): // return s.replace(" and ",",").replace(" or ",",") // // rpm.addMacro('_dbpath', '/tmp/') // ts = rpm.TransactionSet() // mi = ts.dbMatch() // for h in mi: // sname = sversion = srelease = "" // if h[rpm.RPMTAG_SOURCERPM] != "(none)": // sname, sversion, srelease = splitFilename(h[rpm.RPMTAG_SOURCERPM]) // // mlabel = h[rpm.RPMTAG_MODULARITYLABEL] if h[rpm.RPMTAG_MODULARITYLABEL] is not None else "" // print("{{Name: \"{0}\", Epoch: {1}, Version: \"{2}\", Release: \"{3}\", Arch: \"{4}\", SrcName: \"{5}\", SrcEpoch: {6}, SrcVersion: \"{7}\", SrcRelease: \"{8}\", Modularitylabel: \"{9}\",License: \"{10}\"}},".format(h[rpm.RPMTAG_NAME], h[rpm.RPMTAG_EPOCHNUM], h[rpm.RPMTAG_VERSION], h[rpm.RPMTAG_RELEASE], h[rpm.RPMTAG_ARCH], sname, h[rpm.RPMTAG_EPOCHNUM], sversion, srelease, mlabel,license_format(h[rpm.RPMTAG_LICENSE]))) pkgs: []types.Package{ {Name: "perl-podlators", Epoch: 0, Version: "4.11", Release: "1.el8", Arch: "noarch", SrcName: "perl-podlators", SrcEpoch: 0, SrcVersion: "4.11", SrcRelease: "1.el8", Modularitylabel: "", License: "(GPL+ or Artistic) and FSFAP"}, {Name: "python3-setuptools-wheel", Epoch: 0, Version: "39.2.0", Release: "5.el8", Arch: "noarch", SrcName: "python-setuptools", SrcEpoch: 0, SrcVersion: "39.2.0", SrcRelease: "5.el8", Modularitylabel: "", License: "MIT"}, {Name: "perl-Pod-Perldoc", Epoch: 0, Version: "3.28", Release: "396.el8", Arch: "noarch", SrcName: "perl-Pod-Perldoc", SrcEpoch: 0, SrcVersion: "3.28", SrcRelease: "396.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-IO-Socket-SSL", Epoch: 0, Version: "2.066", Release: "4.el8", Arch: "noarch", SrcName: "perl-IO-Socket-SSL", SrcEpoch: 0, SrcVersion: "2.066", SrcRelease: "4.el8", Modularitylabel: "", License: "(GPL+ or Artistic) and MPLv2.0"}, {Name: "perl-URI", Epoch: 0, Version: "1.73", Release: "3.el8", Arch: "noarch", SrcName: "perl-URI", SrcEpoch: 0, SrcVersion: "1.73", SrcRelease: "3.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "filesystem", Epoch: 0, Version: "3.8", Release: "2.el8", Arch: "x86_64", SrcName: "filesystem", SrcEpoch: 0, SrcVersion: "3.8", SrcRelease: "2.el8", Modularitylabel: "", License: "Public Domain"}, {Name: "emacs-filesystem", Epoch: 1, Version: "26.1", Release: "5.el8", Arch: "noarch", SrcName: "emacs", SrcEpoch: 1, SrcVersion: "26.1", SrcRelease: "5.el8", Modularitylabel: "", License: "GPLv3+ and CC0-1.0"}, {Name: "git", Epoch: 0, Version: "2.18.4", Release: "2.el8_2", Arch: "x86_64", SrcName: "git", SrcEpoch: 0, SrcVersion: "2.18.4", SrcRelease: "2.el8_2", Modularitylabel: "", License: "GPLv2"}, {Name: "pcre2", Epoch: 0, Version: "10.32", Release: "1.el8", Arch: "x86_64", SrcName: "pcre2", SrcEpoch: 0, SrcVersion: "10.32", SrcRelease: "1.el8", Modularitylabel: "", License: "BSD"}, {Name: "vim-common", Epoch: 2, Version: "8.0.1763", Release: "13.el8", Arch: "x86_64", SrcName: "vim", SrcEpoch: 2, SrcVersion: "8.0.1763", SrcRelease: "13.el8", Modularitylabel: "", License: "Vim and MIT"}, {Name: "ncurses-libs", Epoch: 0, Version: "6.1", Release: "7.20180224.el8", Arch: "x86_64", SrcName: "ncurses", SrcEpoch: 0, SrcVersion: "6.1", SrcRelease: "7.20180224.el8", Modularitylabel: "", License: "MIT"}, {Name: "vim-enhanced", Epoch: 2, Version: "8.0.1763", Release: "13.el8", Arch: "x86_64", SrcName: "vim", SrcEpoch: 2, SrcVersion: "8.0.1763", SrcRelease: "13.el8", Modularitylabel: "", License: "Vim and MIT"}, {Name: "glibc-common", Epoch: 0, Version: "2.28", Release: "101.el8", Arch: "x86_64", SrcName: "glibc", SrcEpoch: 0, SrcVersion: "2.28", SrcRelease: "101.el8", Modularitylabel: "", License: "LGPLv2+ and LGPLv2+ with exceptions and GPLv2+ and GPLv2+ with exceptions and BSD and Inner-Net and ISC and Public Domain and GFDL"}, {Name: "openssl-devel", Epoch: 1, Version: "1.1.1c", Release: "15.el8", Arch: "x86_64", SrcName: "openssl", SrcEpoch: 1, SrcVersion: "1.1.1c", SrcRelease: "15.el8", Modularitylabel: "", License: "OpenSSL"}, {Name: "bash", Epoch: 0, Version: "4.4.19", Release: "10.el8", Arch: "x86_64", SrcName: "bash", SrcEpoch: 0, SrcVersion: "4.4.19", SrcRelease: "10.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "popt-devel", Epoch: 0, Version: "1.16", Release: "14.el8", Arch: "x86_64", SrcName: "popt", SrcEpoch: 0, SrcVersion: "1.16", SrcRelease: "14.el8", Modularitylabel: "", License: "MIT"}, {Name: "libarchive-devel", Epoch: 0, Version: "3.3.2", Release: "8.el8_1", Arch: "x86_64", SrcName: "libarchive", SrcEpoch: 0, SrcVersion: "3.3.2", SrcRelease: "8.el8_1", Modularitylabel: "", License: "BSD"}, {Name: "bzip2-libs", Epoch: 0, Version: "1.0.6", Release: "26.el8", Arch: "x86_64", SrcName: "bzip2", SrcEpoch: 0, SrcVersion: "1.0.6", SrcRelease: "26.el8", Modularitylabel: "", License: "BSD"}, {Name: "xz-lzma-compat", Epoch: 0, Version: "5.2.4", Release: "3.el8", Arch: "x86_64", SrcName: "xz", SrcEpoch: 0, SrcVersion: "5.2.4", SrcRelease: "3.el8", Modularitylabel: "", License: "Public Domain"}, {Name: "libgpg-error", Epoch: 0, Version: "1.31", Release: "1.el8", Arch: "x86_64", SrcName: "libgpg-error", SrcEpoch: 0, SrcVersion: "1.31", SrcRelease: "1.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libdb-devel", Epoch: 0, Version: "5.3.28", Release: "37.el8", Arch: "x86_64", SrcName: "libdb", SrcEpoch: 0, SrcVersion: "5.3.28", SrcRelease: "37.el8", Modularitylabel: "", License: "BSD and LGPLv2 and Sleepycat"}, {Name: "elfutils-libelf", Epoch: 0, Version: "0.178", Release: "7.el8", Arch: "x86_64", SrcName: "elfutils", SrcEpoch: 0, SrcVersion: "0.178", SrcRelease: "7.el8", Modularitylabel: "", License: "GPLv2+ or LGPLv3+"}, {Name: "libgomp", Epoch: 0, Version: "8.3.1", Release: "5.el8.0.2", Arch: "x86_64", SrcName: "gcc", SrcEpoch: 0, SrcVersion: "8.3.1", SrcRelease: "5.el8.0.2", Modularitylabel: "", License: "GPLv3+ and GPLv3+ with exceptions and GPLv2+ with exceptions and LGPLv2+ and BSD"}, {Name: "libxcrypt", Epoch: 0, Version: "4.1.1", Release: "4.el8", Arch: "x86_64", SrcName: "libxcrypt", SrcEpoch: 0, SrcVersion: "4.1.1", SrcRelease: "4.el8", Modularitylabel: "", License: "LGPLv2+ and BSD and Public Domain"}, {Name: "gettext-libs", Epoch: 0, Version: "0.19.8.1", Release: "17.el8", Arch: "x86_64", SrcName: "gettext", SrcEpoch: 0, SrcVersion: "0.19.8.1", SrcRelease: "17.el8", Modularitylabel: "", License: "LGPLv2+ and GPLv3+"}, {Name: "sqlite-libs", Epoch: 0, Version: "3.26.0", Release: "6.el8", Arch: "x86_64", SrcName: "sqlite", SrcEpoch: 0, SrcVersion: "3.26.0", SrcRelease: "6.el8", Modularitylabel: "", License: "Public Domain"}, {Name: "cpp", Epoch: 0, Version: "8.3.1", Release: "5.el8.0.2", Arch: "x86_64", SrcName: "gcc", SrcEpoch: 0, SrcVersion: "8.3.1", SrcRelease: "5.el8.0.2", Modularitylabel: "", License: "GPLv3+ and GPLv3+ with exceptions and GPLv2+ with exceptions and LGPLv2+ and BSD"}, {Name: "libstdc++", Epoch: 0, Version: "8.3.1", Release: "5.el8.0.2", Arch: "x86_64", SrcName: "gcc", SrcEpoch: 0, SrcVersion: "8.3.1", SrcRelease: "5.el8.0.2", Modularitylabel: "", License: "GPLv3+ and GPLv3+ with exceptions and GPLv2+ with exceptions and LGPLv2+ and BSD"}, {Name: "m4", Epoch: 0, Version: "1.4.18", Release: "7.el8", Arch: "x86_64", SrcName: "m4", SrcEpoch: 0, SrcVersion: "1.4.18", SrcRelease: "7.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "popt", Epoch: 0, Version: "1.16", Release: "14.el8", Arch: "x86_64", SrcName: "popt", SrcEpoch: 0, SrcVersion: "1.16", SrcRelease: "14.el8", Modularitylabel: "", License: "MIT"}, {Name: "libgpg-error-devel", Epoch: 0, Version: "1.31", Release: "1.el8", Arch: "x86_64", SrcName: "libgpg-error", SrcEpoch: 0, SrcVersion: "1.31", SrcRelease: "1.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "readline", Epoch: 0, Version: "7.0", Release: "10.el8", Arch: "x86_64", SrcName: "readline", SrcEpoch: 0, SrcVersion: "7.0", SrcRelease: "10.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "glibc-headers", Epoch: 0, Version: "2.28", Release: "101.el8", Arch: "x86_64", SrcName: "glibc", SrcEpoch: 0, SrcVersion: "2.28", SrcRelease: "101.el8", Modularitylabel: "", License: "LGPLv2+ and LGPLv2+ with exceptions and GPLv2+ and GPLv2+ with exceptions and BSD and Inner-Net and ISC and Public Domain and GFDL"}, {Name: "json-c", Epoch: 0, Version: "0.13.1", Release: "0.2.el8", Arch: "x86_64", SrcName: "json-c", SrcEpoch: 0, SrcVersion: "0.13.1", SrcRelease: "0.2.el8", Modularitylabel: "", License: "MIT"}, {Name: "glibc-devel", Epoch: 0, Version: "2.28", Release: "101.el8", Arch: "x86_64", SrcName: "glibc", SrcEpoch: 0, SrcVersion: "2.28", SrcRelease: "101.el8", Modularitylabel: "", License: "LGPLv2+ and LGPLv2+ with exceptions and GPLv2+ and GPLv2+ with exceptions and BSD and Inner-Net and ISC and Public Domain and GFDL"}, {Name: "libacl", Epoch: 0, Version: "2.2.53", Release: "1.el8", Arch: "x86_64", SrcName: "acl", SrcEpoch: 0, SrcVersion: "2.2.53", SrcRelease: "1.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "perl-Thread-Queue", Epoch: 0, Version: "3.13", Release: "1.el8", Arch: "noarch", SrcName: "perl-Thread-Queue", SrcEpoch: 0, SrcVersion: "3.13", SrcRelease: "1.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "sed", Epoch: 0, Version: "4.5", Release: "1.el8", Arch: "x86_64", SrcName: "sed", SrcEpoch: 0, SrcVersion: "4.5", SrcRelease: "1.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "isl", Epoch: 0, Version: "0.16.1", Release: "6.el8", Arch: "x86_64", SrcName: "isl", SrcEpoch: 0, SrcVersion: "0.16.1", SrcRelease: "6.el8", Modularitylabel: "", License: "MIT"}, {Name: "libmount", Epoch: 0, Version: "2.32.1", Release: "22.el8", Arch: "x86_64", SrcName: "util-linux", SrcEpoch: 0, SrcVersion: "2.32.1", SrcRelease: "22.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libtool", Epoch: 0, Version: "2.4.6", Release: "25.el8", Arch: "x86_64", SrcName: "libtool", SrcEpoch: 0, SrcVersion: "2.4.6", SrcRelease: "25.el8", Modularitylabel: "", License: "GPLv2+ and LGPLv2+ and GFDL"}, {Name: "audit-libs", Epoch: 0, Version: "3.0", Release: "0.17.20191104git1c2f876.el8", Arch: "x86_64", SrcName: "audit", SrcEpoch: 0, SrcVersion: "3.0", SrcRelease: "0.17.20191104git1c2f876.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libgcrypt-devel", Epoch: 0, Version: "1.8.3", Release: "4.el8", Arch: "x86_64", SrcName: "libgcrypt", SrcEpoch: 0, SrcVersion: "1.8.3", SrcRelease: "4.el8", Modularitylabel: "", License: "LGPLv2+ and GPLv2+"}, {Name: "libsmartcols", Epoch: 0, Version: "2.32.1", Release: "22.el8", Arch: "x86_64", SrcName: "util-linux", SrcEpoch: 0, SrcVersion: "2.32.1", SrcRelease: "22.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "nodejs-full-i18n", Epoch: 1, Version: "10.21.0", Release: "3.module_el8.2.0+391+8da3adc6", Arch: "x86_64", SrcName: "nodejs", SrcEpoch: 1, SrcVersion: "10.21.0", SrcRelease: "3.module_el8.2.0+391+8da3adc6", Modularitylabel: "nodejs:10:8020020200707141642:6a468ee4", License: "MIT and ASL 2.0 and ISC and BSD"}, {Name: "lua-libs", Epoch: 0, Version: "5.3.4", Release: "11.el8", Arch: "x86_64", SrcName: "lua", SrcEpoch: 0, SrcVersion: "5.3.4", SrcRelease: "11.el8", Modularitylabel: "", License: "MIT"}, {Name: "nodejs", Epoch: 1, Version: "10.21.0", Release: "3.module_el8.2.0+391+8da3adc6", Arch: "x86_64", SrcName: "nodejs", SrcEpoch: 1, SrcVersion: "10.21.0", SrcRelease: "3.module_el8.2.0+391+8da3adc6", Modularitylabel: "nodejs:10:8020020200707141642:6a468ee4", License: "MIT and ASL 2.0 and ISC and BSD"}, {Name: "p11-kit", Epoch: 0, Version: "0.23.14", Release: "5.el8_0", Arch: "x86_64", SrcName: "p11-kit", SrcEpoch: 0, SrcVersion: "0.23.14", SrcRelease: "5.el8_0", Modularitylabel: "", License: "BSD"}, {Name: "libbabeltrace", Epoch: 0, Version: "1.5.4", Release: "3.el8", Arch: "x86_64", SrcName: "babeltrace", SrcEpoch: 0, SrcVersion: "1.5.4", SrcRelease: "3.el8", Modularitylabel: "", License: "MIT and GPLv2"}, {Name: "gzip", Epoch: 0, Version: "1.9", Release: "9.el8", Arch: "x86_64", SrcName: "gzip", SrcEpoch: 0, SrcVersion: "1.9", SrcRelease: "9.el8", Modularitylabel: "", License: "GPLv3+ and GFDL"}, {Name: "libatomic_ops", Epoch: 0, Version: "7.6.2", Release: "3.el8", Arch: "x86_64", SrcName: "libatomic_ops", SrcEpoch: 0, SrcVersion: "7.6.2", SrcRelease: "3.el8", Modularitylabel: "", License: "GPLv2 and MIT"}, {Name: "libunistring", Epoch: 0, Version: "0.9.9", Release: "3.el8", Arch: "x86_64", SrcName: "libunistring", SrcEpoch: 0, SrcVersion: "0.9.9", SrcRelease: "3.el8", Modularitylabel: "", License: "GPLv2+ or LGPLv3+"}, {Name: "guile", Epoch: 5, Version: "2.0.14", Release: "7.el8", Arch: "x86_64", SrcName: "guile", SrcEpoch: 5, SrcVersion: "2.0.14", SrcRelease: "7.el8", Modularitylabel: "", License: "LGPLv3+"}, {Name: "libassuan", Epoch: 0, Version: "2.5.1", Release: "3.el8", Arch: "x86_64", SrcName: "libassuan", SrcEpoch: 0, SrcVersion: "2.5.1", SrcRelease: "3.el8", Modularitylabel: "", License: "LGPLv2+ and GPLv3+"}, {Name: "gdb", Epoch: 0, Version: "8.2", Release: "12.el8", Arch: "x86_64", SrcName: "gdb", SrcEpoch: 0, SrcVersion: "8.2", SrcRelease: "12.el8", Modularitylabel: "", License: "GPLv3+ and GPLv3+ with exceptions and GPLv2+ and GPLv2+ with exceptions and GPL+ and LGPLv2+ and LGPLv3+ and BSD and Public Domain and GFDL"}, {Name: "gdbm-libs", Epoch: 1, Version: "1.18", Release: "1.el8", Arch: "x86_64", SrcName: "gdbm", SrcEpoch: 1, SrcVersion: "1.18", SrcRelease: "1.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "platform-python-setuptools", Epoch: 0, Version: "39.2.0", Release: "6.el8", Arch: "noarch", SrcName: "python-setuptools", SrcEpoch: 0, SrcVersion: "39.2.0", SrcRelease: "6.el8", Modularitylabel: "", License: "MIT"}, {Name: "libtasn1", Epoch: 0, Version: "4.13", Release: "3.el8", Arch: "x86_64", SrcName: "libtasn1", SrcEpoch: 0, SrcVersion: "4.13", SrcRelease: "3.el8", Modularitylabel: "", License: "GPLv3+ and LGPLv2+"}, {Name: "python3-setuptools", Epoch: 0, Version: "39.2.0", Release: "6.el8", Arch: "noarch", SrcName: "python-setuptools", SrcEpoch: 0, SrcVersion: "39.2.0", SrcRelease: "6.el8", Modularitylabel: "", License: "MIT"}, {Name: "lzo", Epoch: 0, Version: "2.08", Release: "14.el8", Arch: "x86_64", SrcName: "lzo", SrcEpoch: 0, SrcVersion: "2.08", SrcRelease: "14.el8", Modularitylabel: "", License: "GPLv2+"}, {Name: "python3-pip", Epoch: 0, Version: "9.0.3", Release: "18.el8", Arch: "noarch", SrcName: "python-pip", SrcEpoch: 0, SrcVersion: "9.0.3", SrcRelease: "18.el8", Modularitylabel: "", License: "MIT and Python and ASL 2.0 and BSD and ISC and LGPLv2 and MPLv2.0 and (ASL 2.0 or BSD)"}, {Name: "grep", Epoch: 0, Version: "3.1", Release: "6.el8", Arch: "x86_64", SrcName: "grep", SrcEpoch: 0, SrcVersion: "3.1", SrcRelease: "6.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "python2-pip-wheel", Epoch: 0, Version: "9.0.3", Release: "18.module_el8.3.0+478+7570e00c", Arch: "noarch", SrcName: "python2-pip", SrcEpoch: 0, SrcVersion: "9.0.3", SrcRelease: "18.module_el8.3.0+478+7570e00c", Modularitylabel: "python27:2.7:8030020200831201838:851f4228", License: "MIT and Python and ASL 2.0 and BSD and ISC and LGPLv2 and MPLv2.0 and (ASL 2.0 or BSD)"}, {Name: "dbus-libs", Epoch: 1, Version: "1.12.8", Release: "10.el8_2", Arch: "x86_64", SrcName: "dbus", SrcEpoch: 1, SrcVersion: "1.12.8", SrcRelease: "10.el8_2", Modularitylabel: "", License: "(GPLv2+ or AFL) and GPLv2+"}, {Name: "python2-pip", Epoch: 0, Version: "9.0.3", Release: "18.module_el8.3.0+478+7570e00c", Arch: "noarch", SrcName: "python2-pip", SrcEpoch: 0, SrcVersion: "9.0.3", SrcRelease: "18.module_el8.3.0+478+7570e00c", Modularitylabel: "python27:2.7:8030020200831201838:851f4228", License: "MIT and Python and ASL 2.0 and BSD and ISC and LGPLv2 and MPLv2.0 and (ASL 2.0 or BSD)"}, {Name: "dhcp-libs", Epoch: 12, Version: "4.3.6", Release: "40.el8", Arch: "x86_64", SrcName: "dhcp", SrcEpoch: 12, SrcVersion: "4.3.6", SrcRelease: "40.el8", Modularitylabel: "", License: "ISC"}, {Name: "python2", Epoch: 0, Version: "2.7.17", Release: "2.module_el8.3.0+478+7570e00c", Arch: "x86_64", SrcName: "python2", SrcEpoch: 0, SrcVersion: "2.7.17", SrcRelease: "2.module_el8.3.0+478+7570e00c", Modularitylabel: "python27:2.7:8030020200831201838:851f4228", License: "Python"}, {Name: "procps-ng", Epoch: 0, Version: "3.3.15", Release: "1.el8", Arch: "x86_64", SrcName: "procps-ng", SrcEpoch: 0, SrcVersion: "3.3.15", SrcRelease: "1.el8", Modularitylabel: "", License: "GPL+ and GPLv2 and GPLv2+ and GPLv3+ and LGPLv2+"}, {Name: "python2-rpmUtils", Epoch: 0, Version: "0.1", Release: "1.el8", Arch: "noarch", SrcName: "python-rpmUtils", SrcEpoch: 0, SrcVersion: "0.1", SrcRelease: "1.el8", Modularitylabel: "", License: "GPLv2+"}, {Name: "xz", Epoch: 0, Version: "5.2.4", Release: "3.el8", Arch: "x86_64", SrcName: "xz", SrcEpoch: 0, SrcVersion: "5.2.4", SrcRelease: "3.el8", Modularitylabel: "", License: "GPLv2+ and Public Domain"}, {Name: "rpm", Epoch: 0, Version: "4.14.3", Release: "4.el8", Arch: "x86_64", SrcName: "rpm", SrcEpoch: 0, SrcVersion: "4.14.3", SrcRelease: "4.el8", Modularitylabel: "", License: "GPLv2+"}, {Name: "gdbm", Epoch: 1, Version: "1.18", Release: "1.el8", Arch: "x86_64", SrcName: "gdbm", SrcEpoch: 1, SrcVersion: "1.18", SrcRelease: "1.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "python3-rpm", Epoch: 0, Version: "4.14.3", Release: "4.el8", Arch: "x86_64", SrcName: "rpm", SrcEpoch: 0, SrcVersion: "4.14.3", SrcRelease: "4.el8", Modularitylabel: "", License: "GPLv2+"}, {Name: "shadow-utils", Epoch: 2, Version: "4.6", Release: "8.el8", Arch: "x86_64", SrcName: "shadow-utils", SrcEpoch: 2, SrcVersion: "4.6", SrcRelease: "8.el8", Modularitylabel: "", License: "BSD and GPLv2+"}, {Name: "libfdisk", Epoch: 0, Version: "2.32.1", Release: "22.el8", Arch: "x86_64", SrcName: "util-linux", SrcEpoch: 0, SrcVersion: "2.32.1", SrcRelease: "22.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "mpfr", Epoch: 0, Version: "3.1.6", Release: "1.el8", Arch: "x86_64", SrcName: "mpfr", SrcEpoch: 0, SrcVersion: "3.1.6", SrcRelease: "1.el8", Modularitylabel: "", License: "LGPLv3+ and GPLv3+ and GFDL"}, {Name: "snappy", Epoch: 0, Version: "1.1.7", Release: "5.el8", Arch: "x86_64", SrcName: "snappy", SrcEpoch: 0, SrcVersion: "1.1.7", SrcRelease: "5.el8", Modularitylabel: "", License: "BSD"}, {Name: "libmetalink", Epoch: 0, Version: "0.1.3", Release: "7.el8", Arch: "x86_64", SrcName: "libmetalink", SrcEpoch: 0, SrcVersion: "0.1.3", SrcRelease: "7.el8", Modularitylabel: "", License: "MIT"}, {Name: "libksba", Epoch: 0, Version: "1.3.5", Release: "7.el8", Arch: "x86_64", SrcName: "libksba", SrcEpoch: 0, SrcVersion: "1.3.5", SrcRelease: "7.el8", Modularitylabel: "", License: "(LGPLv3+ or GPLv2+) and GPLv3+"}, {Name: "ethtool", Epoch: 2, Version: "5.0", Release: "2.el8", Arch: "x86_64", SrcName: "ethtool", SrcEpoch: 2, SrcVersion: "5.0", SrcRelease: "2.el8", Modularitylabel: "", License: "GPLv2"}, {Name: "libmnl", Epoch: 0, Version: "1.0.4", Release: "6.el8", Arch: "x86_64", SrcName: "libmnl", SrcEpoch: 0, SrcVersion: "1.0.4", SrcRelease: "6.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libpcap", Epoch: 14, Version: "1.9.0", Release: "3.el8", Arch: "x86_64", SrcName: "libpcap", SrcEpoch: 14, SrcVersion: "1.9.0", SrcRelease: "3.el8", Modularitylabel: "", License: "BSD with advertising"}, {Name: "libseccomp", Epoch: 0, Version: "2.4.1", Release: "1.el8", Arch: "x86_64", SrcName: "libseccomp", SrcEpoch: 0, SrcVersion: "2.4.1", SrcRelease: "1.el8", Modularitylabel: "", License: "LGPLv2"}, {Name: "gawk", Epoch: 0, Version: "4.2.1", Release: "1.el8", Arch: "x86_64", SrcName: "gawk", SrcEpoch: 0, SrcVersion: "4.2.1", SrcRelease: "1.el8", Modularitylabel: "", License: "GPLv3+ and GPLv2+ and LGPLv2+ and BSD"}, {Name: "libnsl2", Epoch: 0, Version: "1.2.0", Release: "2.20180605git4a062cf.el8", Arch: "x86_64", SrcName: "libnsl2", SrcEpoch: 0, SrcVersion: "1.2.0", SrcRelease: "2.20180605git4a062cf.el8", Modularitylabel: "", License: "BSD and LGPLv2+"}, {Name: "krb5-libs", Epoch: 0, Version: "1.17", Release: "18.el8", Arch: "x86_64", SrcName: "krb5", SrcEpoch: 0, SrcVersion: "1.17", SrcRelease: "18.el8", Modularitylabel: "", License: "MIT"}, {Name: "crypto-policies", Epoch: 0, Version: "20191128", Release: "2.git23e1bf1.el8", Arch: "noarch", SrcName: "crypto-policies", SrcEpoch: 0, SrcVersion: "20191128", SrcRelease: "2.git23e1bf1.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "platform-python", Epoch: 0, Version: "3.6.8", Release: "23.el8", Arch: "x86_64", SrcName: "python3", SrcEpoch: 0, SrcVersion: "3.6.8", SrcRelease: "23.el8", Modularitylabel: "", License: "Python"}, {Name: "libdb", Epoch: 0, Version: "5.3.28", Release: "37.el8", Arch: "x86_64", SrcName: "libdb", SrcEpoch: 0, SrcVersion: "5.3.28", SrcRelease: "37.el8", Modularitylabel: "", License: "BSD and LGPLv2 and Sleepycat"}, {Name: "pam", Epoch: 0, Version: "1.3.1", Release: "8.el8", Arch: "x86_64", SrcName: "pam", SrcEpoch: 0, SrcVersion: "1.3.1", SrcRelease: "8.el8", Modularitylabel: "", License: "BSD and GPLv2+"}, {Name: "gnutls", Epoch: 0, Version: "3.6.8", Release: "11.el8_2", Arch: "x86_64", SrcName: "gnutls", SrcEpoch: 0, SrcVersion: "3.6.8", SrcRelease: "11.el8_2", Modularitylabel: "", License: "GPLv3+ and LGPLv2+"}, {Name: "kmod-libs", Epoch: 0, Version: "25", Release: "16.el8", Arch: "x86_64", SrcName: "kmod", SrcEpoch: 0, SrcVersion: "25", SrcRelease: "16.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "ima-evm-utils", Epoch: 0, Version: "1.1", Release: "5.el8", Arch: "x86_64", SrcName: "ima-evm-utils", SrcEpoch: 0, SrcVersion: "1.1", SrcRelease: "5.el8", Modularitylabel: "", License: "GPLv2"}, {Name: "libcurl-minimal", Epoch: 0, Version: "7.61.1", Release: "12.el8", Arch: "x86_64", SrcName: "curl", SrcEpoch: 0, SrcVersion: "7.61.1", SrcRelease: "12.el8", Modularitylabel: "", License: "MIT"}, {Name: "cyrus-sasl-lib", Epoch: 0, Version: "2.1.27", Release: "1.el8", Arch: "x86_64", SrcName: "cyrus-sasl", SrcEpoch: 0, SrcVersion: "2.1.27", SrcRelease: "1.el8", Modularitylabel: "", License: "BSD with advertising"}, {Name: "libdb-utils", Epoch: 0, Version: "5.3.28", Release: "37.el8", Arch: "x86_64", SrcName: "libdb", SrcEpoch: 0, SrcVersion: "5.3.28", SrcRelease: "37.el8", Modularitylabel: "", License: "BSD and LGPLv2 and Sleepycat"}, {Name: "libsolv", Epoch: 0, Version: "0.7.7", Release: "1.el8", Arch: "x86_64", SrcName: "libsolv", SrcEpoch: 0, SrcVersion: "0.7.7", SrcRelease: "1.el8", Modularitylabel: "", License: "BSD"}, {Name: "libmodulemd1", Epoch: 0, Version: "1.8.16", Release: "0.2.8.2.1", Arch: "x86_64", SrcName: "libmodulemd", SrcEpoch: 0, SrcVersion: "2.8.2", SrcRelease: "1.el8", Modularitylabel: "", License: "MIT"}, {Name: "gnupg2", Epoch: 0, Version: "2.2.9", Release: "1.el8", Arch: "x86_64", SrcName: "gnupg2", SrcEpoch: 0, SrcVersion: "2.2.9", SrcRelease: "1.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "python3-libdnf", Epoch: 0, Version: "0.39.1", Release: "6.el8_2", Arch: "x86_64", SrcName: "libdnf", SrcEpoch: 0, SrcVersion: "0.39.1", SrcRelease: "6.el8_2", Modularitylabel: "", License: "LGPLv2+"}, {Name: "python3-gpg", Epoch: 0, Version: "1.10.0", Release: "6.el8.0.1", Arch: "x86_64", SrcName: "gpgme", SrcEpoch: 0, SrcVersion: "1.10.0", SrcRelease: "6.el8.0.1", Modularitylabel: "", License: "LGPLv2+"}, {Name: "dnf-data", Epoch: 0, Version: "4.2.17", Release: "7.el8_2", Arch: "noarch", SrcName: "dnf", SrcEpoch: 0, SrcVersion: "4.2.17", SrcRelease: "7.el8_2", Modularitylabel: "", License: "GPLv2+ and GPLv2 and GPL"}, {Name: "dbus-common", Epoch: 1, Version: "1.12.8", Release: "10.el8_2", Arch: "noarch", SrcName: "dbus", SrcEpoch: 1, SrcVersion: "1.12.8", SrcRelease: "10.el8_2", Modularitylabel: "", License: "(GPLv2+ or AFL) and GPLv2+"}, {Name: "device-mapper", Epoch: 8, Version: "1.02.169", Release: "3.el8", Arch: "x86_64", SrcName: "lvm2", SrcEpoch: 8, SrcVersion: "2.03.08", SrcRelease: "3.el8", Modularitylabel: "", License: "GPLv2"}, {Name: "cryptsetup-libs", Epoch: 0, Version: "2.2.2", Release: "1.el8", Arch: "x86_64", SrcName: "cryptsetup", SrcEpoch: 0, SrcVersion: "2.2.2", SrcRelease: "1.el8", Modularitylabel: "", License: "GPLv2+ and LGPLv2+"}, {Name: "elfutils-libs", Epoch: 0, Version: "0.178", Release: "7.el8", Arch: "x86_64", SrcName: "elfutils", SrcEpoch: 0, SrcVersion: "0.178", SrcRelease: "7.el8", Modularitylabel: "", License: "GPLv2+ or LGPLv3+"}, {Name: "systemd", Epoch: 0, Version: "239", Release: "31.el8_2.2", Arch: "x86_64", SrcName: "systemd", SrcEpoch: 0, SrcVersion: "239", SrcRelease: "31.el8_2.2", Modularitylabel: "", License: "LGPLv2+ and MIT and GPLv2+"}, {Name: "iputils", Epoch: 0, Version: "20180629", Release: "2.el8", Arch: "x86_64", SrcName: "iputils", SrcEpoch: 0, SrcVersion: "20180629", SrcRelease: "2.el8", Modularitylabel: "", License: "BSD and GPLv2+"}, {Name: "libkcapi", Epoch: 0, Version: "1.1.1", Release: "16_1.el8", Arch: "x86_64", SrcName: "libkcapi", SrcEpoch: 0, SrcVersion: "1.1.1", SrcRelease: "16_1.el8", Modularitylabel: "", License: "BSD or GPLv2"}, {Name: "systemd-udev", Epoch: 0, Version: "239", Release: "31.el8_2.2", Arch: "x86_64", SrcName: "systemd", SrcEpoch: 0, SrcVersion: "239", SrcRelease: "31.el8_2.2", Modularitylabel: "", License: "LGPLv2+"}, {Name: "dracut-network", Epoch: 0, Version: "049", Release: "70.git20200228.el8", Arch: "x86_64", SrcName: "dracut", SrcEpoch: 0, SrcVersion: "049", SrcRelease: "70.git20200228.el8", Modularitylabel: "", License: "GPLv2+ and LGPLv2+"}, {Name: "python3-dnf", Epoch: 0, Version: "4.2.17", Release: "7.el8_2", Arch: "noarch", SrcName: "dnf", SrcEpoch: 0, SrcVersion: "4.2.17", SrcRelease: "7.el8_2", Modularitylabel: "", License: "GPLv2+ and GPLv2 and GPL"}, {Name: "yum", Epoch: 0, Version: "4.2.17", Release: "7.el8_2", Arch: "noarch", SrcName: "dnf", SrcEpoch: 0, SrcVersion: "4.2.17", SrcRelease: "7.el8_2", Modularitylabel: "", License: "GPLv2+ and GPLv2 and GPL"}, {Name: "binutils", Epoch: 0, Version: "2.30", Release: "73.el8", Arch: "x86_64", SrcName: "binutils", SrcEpoch: 0, SrcVersion: "2.30", SrcRelease: "73.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "vim-minimal", Epoch: 2, Version: "8.0.1763", Release: "13.el8", Arch: "x86_64", SrcName: "vim", SrcEpoch: 2, SrcVersion: "8.0.1763", SrcRelease: "13.el8", Modularitylabel: "", License: "Vim and MIT"}, {Name: "less", Epoch: 0, Version: "530", Release: "1.el8", Arch: "x86_64", SrcName: "less", SrcEpoch: 0, SrcVersion: "530", SrcRelease: "1.el8", Modularitylabel: "", License: "GPLv3+ or BSD"}, {Name: "rootfiles", Epoch: 0, Version: "8.1", Release: "22.el8", Arch: "noarch", SrcName: "rootfiles", SrcEpoch: 0, SrcVersion: "8.1", SrcRelease: "22.el8", Modularitylabel: "", License: "Public Domain"}, {Name: "centos-gpg-keys", Epoch: 0, Version: "8.2", Release: "2.2004.0.2.el8", Arch: "noarch", SrcName: "centos-release", SrcEpoch: 0, SrcVersion: "8.2", SrcRelease: "2.2004.0.2.el8", Modularitylabel: "", License: "GPLv2"}, {Name: "centos-repos", Epoch: 0, Version: "8.2", Release: "2.2004.0.2.el8", Arch: "x86_64", SrcName: "centos-release", SrcEpoch: 0, SrcVersion: "8.2", SrcRelease: "2.2004.0.2.el8", Modularitylabel: "", License: "GPLv2"}, {Name: "tzdata", Epoch: 0, Version: "2020d", Release: "1.el8", Arch: "noarch", SrcName: "tzdata", SrcEpoch: 0, SrcVersion: "2020d", SrcRelease: "1.el8", Modularitylabel: "", License: "Public Domain"}, {Name: "ca-certificates", Epoch: 0, Version: "2020.2.41", Release: "80.0.el8_2", Arch: "noarch", SrcName: "ca-certificates", SrcEpoch: 0, SrcVersion: "2020.2.41", SrcRelease: "80.0.el8_2", Modularitylabel: "", License: "Public Domain"}, {Name: "perl-Exporter", Epoch: 0, Version: "5.72", Release: "396.el8", Arch: "noarch", SrcName: "perl-Exporter", SrcEpoch: 0, SrcVersion: "5.72", SrcRelease: "396.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-Carp", Epoch: 0, Version: "1.42", Release: "396.el8", Arch: "noarch", SrcName: "perl-Carp", SrcEpoch: 0, SrcVersion: "1.42", SrcRelease: "396.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-parent", Epoch: 1, Version: "0.237", Release: "1.el8", Arch: "noarch", SrcName: "perl-parent", SrcEpoch: 1, SrcVersion: "0.237", SrcRelease: "1.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "nss-util", Epoch: 0, Version: "3.53.1", Release: "11.el8_2", Arch: "x86_64", SrcName: "nss", SrcEpoch: 0, SrcVersion: "3.53.1", SrcRelease: "11.el8_2", Modularitylabel: "", License: "MPLv2.0"}, {Name: "nss-softokn", Epoch: 0, Version: "3.53.1", Release: "11.el8_2", Arch: "x86_64", SrcName: "nss", SrcEpoch: 0, SrcVersion: "3.53.1", SrcRelease: "11.el8_2", Modularitylabel: "", License: "MPLv2.0"}, {Name: "nss-sysinit", Epoch: 0, Version: "3.53.1", Release: "11.el8_2", Arch: "x86_64", SrcName: "nss", SrcEpoch: 0, SrcVersion: "3.53.1", SrcRelease: "11.el8_2", Modularitylabel: "", License: "MPLv2.0"}, {Name: "nss-softokn-freebl-devel", Epoch: 0, Version: "3.53.1", Release: "11.el8_2", Arch: "x86_64", SrcName: "nss", SrcEpoch: 0, SrcVersion: "3.53.1", SrcRelease: "11.el8_2", Modularitylabel: "", License: "MPLv2.0"}, {Name: "perl-macros", Epoch: 4, Version: "5.26.3", Release: "416.el8", Arch: "x86_64", SrcName: "perl", SrcEpoch: 4, SrcVersion: "5.26.3", SrcRelease: "416.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-Socket", Epoch: 4, Version: "2.027", Release: "3.el8", Arch: "x86_64", SrcName: "perl-Socket", SrcEpoch: 4, SrcVersion: "2.027", SrcRelease: "3.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-Unicode-Normalize", Epoch: 0, Version: "1.25", Release: "396.el8", Arch: "x86_64", SrcName: "perl-Unicode-Normalize", SrcEpoch: 0, SrcVersion: "1.25", SrcRelease: "396.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-IO", Epoch: 0, Version: "1.38", Release: "416.el8", Arch: "x86_64", SrcName: "perl", SrcEpoch: 0, SrcVersion: "5.26.3", SrcRelease: "416.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-constant", Epoch: 0, Version: "1.33", Release: "396.el8", Arch: "noarch", SrcName: "perl-constant", SrcEpoch: 0, SrcVersion: "1.33", SrcRelease: "396.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-threads-shared", Epoch: 0, Version: "1.58", Release: "2.el8", Arch: "x86_64", SrcName: "perl-threads-shared", SrcEpoch: 0, SrcVersion: "1.58", SrcRelease: "2.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-MIME-Base64", Epoch: 0, Version: "3.15", Release: "396.el8", Arch: "x86_64", SrcName: "perl-MIME-Base64", SrcEpoch: 0, SrcVersion: "3.15", SrcRelease: "396.el8", Modularitylabel: "", License: "(GPL+ or Artistic) and MIT"}, {Name: "perl-Time-Local", Epoch: 1, Version: "1.280", Release: "1.el8", Arch: "noarch", SrcName: "perl-Time-Local", SrcEpoch: 1, SrcVersion: "1.280", SrcRelease: "1.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-Digest", Epoch: 0, Version: "1.17", Release: "395.el8", Arch: "noarch", SrcName: "perl-Digest", SrcEpoch: 0, SrcVersion: "1.17", SrcRelease: "395.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-Net-SSLeay", Epoch: 0, Version: "1.88", Release: "1.el8", Arch: "x86_64", SrcName: "perl-Net-SSLeay", SrcEpoch: 0, SrcVersion: "1.88", SrcRelease: "1.el8", Modularitylabel: "", License: "Artistic 2.0"}, {Name: "perl-TermReadKey", Epoch: 0, Version: "2.37", Release: "7.el8", Arch: "x86_64", SrcName: "perl-TermReadKey", SrcEpoch: 0, SrcVersion: "2.37", SrcRelease: "7.el8", Modularitylabel: "", License: "(Copyright only) and (Artistic or GPL+)"}, {Name: "perl-Pod-Escapes", Epoch: 1, Version: "1.07", Release: "395.el8", Arch: "noarch", SrcName: "perl-Pod-Escapes", SrcEpoch: 1, SrcVersion: "1.07", SrcRelease: "395.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-Mozilla-CA", Epoch: 0, Version: "20160104", Release: "7.el8", Arch: "noarch", SrcName: "perl-Mozilla-CA", SrcEpoch: 0, SrcVersion: "20160104", SrcRelease: "7.el8", Modularitylabel: "", License: "MPLv2.0"}, {Name: "fipscheck", Epoch: 0, Version: "1.5.0", Release: "4.el8", Arch: "x86_64", SrcName: "fipscheck", SrcEpoch: 0, SrcVersion: "1.5.0", SrcRelease: "4.el8", Modularitylabel: "", License: "BSD"}, {Name: "which", Epoch: 0, Version: "2.21", Release: "12.el8", Arch: "x86_64", SrcName: "which", SrcEpoch: 0, SrcVersion: "2.21", SrcRelease: "12.el8", Modularitylabel: "", License: "GPLv3"}, {Name: "libpsl", Epoch: 0, Version: "0.20.2", Release: "5.el8", Arch: "x86_64", SrcName: "libpsl", SrcEpoch: 0, SrcVersion: "0.20.2", SrcRelease: "5.el8", Modularitylabel: "", License: "MIT"}, {Name: "pcre2-utf32", Epoch: 0, Version: "10.32", Release: "1.el8", Arch: "x86_64", SrcName: "pcre2", SrcEpoch: 0, SrcVersion: "10.32", SrcRelease: "1.el8", Modularitylabel: "", License: "BSD"}, {Name: "openssl", Epoch: 1, Version: "1.1.1c", Release: "15.el8", Arch: "x86_64", SrcName: "openssl", SrcEpoch: 1, SrcVersion: "1.1.1c", SrcRelease: "15.el8", Modularitylabel: "", License: "OpenSSL"}, {Name: "perl-Term-Cap", Epoch: 0, Version: "1.17", Release: "395.el8", Arch: "noarch", SrcName: "perl-Term-Cap", SrcEpoch: 0, SrcVersion: "1.17", SrcRelease: "395.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "libpkgconf", Epoch: 0, Version: "1.4.2", Release: "1.el8", Arch: "x86_64", SrcName: "pkgconf", SrcEpoch: 0, SrcVersion: "1.4.2", SrcRelease: "1.el8", Modularitylabel: "", License: "ISC"}, {Name: "pkgconf-pkg-config", Epoch: 0, Version: "1.4.2", Release: "1.el8", Arch: "x86_64", SrcName: "pkgconf", SrcEpoch: 0, SrcVersion: "1.4.2", SrcRelease: "1.el8", Modularitylabel: "", License: "ISC"}, {Name: "nss-util-devel", Epoch: 0, Version: "3.53.1", Release: "11.el8_2", Arch: "x86_64", SrcName: "nss", SrcEpoch: 0, SrcVersion: "3.53.1", SrcRelease: "11.el8_2", Modularitylabel: "", License: "MPLv2.0"}, {Name: "libcom_err-devel", Epoch: 0, Version: "1.45.4", Release: "3.el8", Arch: "x86_64", SrcName: "e2fsprogs", SrcEpoch: 0, SrcVersion: "1.45.4", SrcRelease: "3.el8", Modularitylabel: "", License: "MIT"}, {Name: "libverto-devel", Epoch: 0, Version: "0.3.0", Release: "5.el8", Arch: "x86_64", SrcName: "libverto", SrcEpoch: 0, SrcVersion: "0.3.0", SrcRelease: "5.el8", Modularitylabel: "", License: "MIT"}, {Name: "libselinux-devel", Epoch: 0, Version: "2.9", Release: "3.el8", Arch: "x86_64", SrcName: "libselinux", SrcEpoch: 0, SrcVersion: "2.9", SrcRelease: "3.el8", Modularitylabel: "", License: "Public Domain"}, {Name: "libkadm5", Epoch: 0, Version: "1.17", Release: "18.el8", Arch: "x86_64", SrcName: "krb5", SrcEpoch: 0, SrcVersion: "1.17", SrcRelease: "18.el8", Modularitylabel: "", License: "MIT"}, {Name: "openssh-clients", Epoch: 0, Version: "8.0p1", Release: "4.el8_1", Arch: "x86_64", SrcName: "openssh", SrcEpoch: 0, SrcVersion: "8.0p1", SrcRelease: "4.el8_1", Modularitylabel: "", License: "BSD"}, {Name: "git-core-doc", Epoch: 0, Version: "2.18.4", Release: "2.el8_2", Arch: "noarch", SrcName: "git", SrcEpoch: 0, SrcVersion: "2.18.4", SrcRelease: "2.el8_2", Modularitylabel: "", License: "GPLv2"}, {Name: "krb5-devel", Epoch: 0, Version: "1.17", Release: "18.el8", Arch: "x86_64", SrcName: "krb5", SrcEpoch: 0, SrcVersion: "1.17", SrcRelease: "18.el8", Modularitylabel: "", License: "MIT"}, {Name: "perl-Encode", Epoch: 4, Version: "2.97", Release: "3.el8", Arch: "x86_64", SrcName: "perl-Encode", SrcEpoch: 4, SrcVersion: "2.97", SrcRelease: "3.el8", Modularitylabel: "", License: "(GPL+ or Artistic) and Artistic 2.0 and UCD"}, {Name: "perl-Getopt-Long", Epoch: 1, Version: "2.50", Release: "4.el8", Arch: "noarch", SrcName: "perl-Getopt-Long", SrcEpoch: 1, SrcVersion: "2.50", SrcRelease: "4.el8", Modularitylabel: "", License: "GPLv2+ or Artistic"}, {Name: "libgcc", Epoch: 0, Version: "8.3.1", Release: "5.el8.0.2", Arch: "x86_64", SrcName: "gcc", SrcEpoch: 0, SrcVersion: "8.3.1", SrcRelease: "5.el8.0.2", Modularitylabel: "", License: "GPLv3+ and GPLv3+ with exceptions and GPLv2+ with exceptions and LGPLv2+ and BSD"}, {Name: "perl-Pod-Usage", Epoch: 4, Version: "1.69", Release: "395.el8", Arch: "noarch", SrcName: "perl-Pod-Usage", SrcEpoch: 4, SrcVersion: "1.69", SrcRelease: "395.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "python3-pip-wheel", Epoch: 0, Version: "9.0.3", Release: "16.el8", Arch: "noarch", SrcName: "python-pip", SrcEpoch: 0, SrcVersion: "9.0.3", SrcRelease: "16.el8", Modularitylabel: "", License: "MIT and Python and ASL 2.0 and BSD and ISC and LGPLv2 and MPLv2.0 and (ASL 2.0 or BSD)"}, {Name: "perl-HTTP-Tiny", Epoch: 0, Version: "0.074", Release: "1.el8", Arch: "noarch", SrcName: "perl-HTTP-Tiny", SrcEpoch: 0, SrcVersion: "0.074", SrcRelease: "1.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-libnet", Epoch: 0, Version: "3.11", Release: "3.el8", Arch: "noarch", SrcName: "perl-libnet", SrcEpoch: 0, SrcVersion: "3.11", SrcRelease: "3.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "setup", Epoch: 0, Version: "2.12.2", Release: "5.el8", Arch: "noarch", SrcName: "setup", SrcEpoch: 0, SrcVersion: "2.12.2", SrcRelease: "5.el8", Modularitylabel: "", License: "Public Domain"}, {Name: "file", Epoch: 0, Version: "5.33", Release: "13.el8", Arch: "x86_64", SrcName: "file", SrcEpoch: 0, SrcVersion: "5.33", SrcRelease: "13.el8", Modularitylabel: "", License: "BSD"}, {Name: "basesystem", Epoch: 0, Version: "11", Release: "5.el8", Arch: "noarch", SrcName: "basesystem", SrcEpoch: 0, SrcVersion: "11", SrcRelease: "5.el8", Modularitylabel: "", License: "Public Domain"}, {Name: "perl-Git", Epoch: 0, Version: "2.18.4", Release: "2.el8_2", Arch: "noarch", SrcName: "git", SrcEpoch: 0, SrcVersion: "2.18.4", SrcRelease: "2.el8_2", Modularitylabel: "", License: "GPLv2"}, {Name: "ncurses-base", Epoch: 0, Version: "6.1", Release: "7.20180224.el8", Arch: "noarch", SrcName: "ncurses", SrcEpoch: 0, SrcVersion: "6.1", SrcRelease: "7.20180224.el8", Modularitylabel: "", License: "MIT"}, {Name: "vim-filesystem", Epoch: 2, Version: "8.0.1763", Release: "13.el8", Arch: "noarch", SrcName: "vim", SrcEpoch: 2, SrcVersion: "8.0.1763", SrcRelease: "13.el8", Modularitylabel: "", License: "Vim and MIT"}, {Name: "libselinux", Epoch: 0, Version: "2.9", Release: "3.el8", Arch: "x86_64", SrcName: "libselinux", SrcEpoch: 0, SrcVersion: "2.9", SrcRelease: "3.el8", Modularitylabel: "", License: "Public Domain"}, {Name: "gpm-libs", Epoch: 0, Version: "1.20.7", Release: "15.el8", Arch: "x86_64", SrcName: "gpm", SrcEpoch: 0, SrcVersion: "1.20.7", SrcRelease: "15.el8", Modularitylabel: "", License: "GPLv2 and GPLv2+ with exceptions and GPLv3+ and Verbatim and Copyright only"}, {Name: "glibc-minimal-langpack", Epoch: 0, Version: "2.28", Release: "101.el8", Arch: "x86_64", SrcName: "glibc", SrcEpoch: 0, SrcVersion: "2.28", SrcRelease: "101.el8", Modularitylabel: "", License: "LGPLv2+ and LGPLv2+ with exceptions and GPLv2+ and GPLv2+ with exceptions and BSD and Inner-Net and ISC and Public Domain and GFDL"}, {Name: "file-devel", Epoch: 0, Version: "5.33", Release: "13.el8", Arch: "x86_64", SrcName: "file", SrcEpoch: 0, SrcVersion: "5.33", SrcRelease: "13.el8", Modularitylabel: "", License: "BSD"}, {Name: "glibc", Epoch: 0, Version: "2.28", Release: "101.el8", Arch: "x86_64", SrcName: "glibc", SrcEpoch: 0, SrcVersion: "2.28", SrcRelease: "101.el8", Modularitylabel: "", License: "LGPLv2+ and LGPLv2+ with exceptions and GPLv2+ and GPLv2+ with exceptions and BSD and Inner-Net and ISC and Public Domain and GFDL"}, {Name: "nss-devel", Epoch: 0, Version: "3.53.1", Release: "11.el8_2", Arch: "x86_64", SrcName: "nss", SrcEpoch: 0, SrcVersion: "3.53.1", SrcRelease: "11.el8_2", Modularitylabel: "", License: "MPLv2.0"}, {Name: "libsepol", Epoch: 0, Version: "2.9", Release: "1.el8", Arch: "x86_64", SrcName: "libsepol", SrcEpoch: 0, SrcVersion: "2.9", SrcRelease: "1.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "xz-devel", Epoch: 0, Version: "5.2.4", Release: "3.el8", Arch: "x86_64", SrcName: "xz", SrcEpoch: 0, SrcVersion: "5.2.4", SrcRelease: "3.el8", Modularitylabel: "", License: "Public Domain"}, {Name: "xz-libs", Epoch: 0, Version: "5.2.4", Release: "3.el8", Arch: "x86_64", SrcName: "xz", SrcEpoch: 0, SrcVersion: "5.2.4", SrcRelease: "3.el8", Modularitylabel: "", License: "Public Domain"}, {Name: "wget", Epoch: 0, Version: "1.19.5", Release: "8.el8_1.1", Arch: "x86_64", SrcName: "wget", SrcEpoch: 0, SrcVersion: "1.19.5", SrcRelease: "8.el8_1.1", Modularitylabel: "", License: "GPLv3+"}, {Name: "libcap", Epoch: 0, Version: "2.26", Release: "3.el8", Arch: "x86_64", SrcName: "libcap", SrcEpoch: 0, SrcVersion: "2.26", SrcRelease: "3.el8", Modularitylabel: "", License: "GPLv2"}, {Name: "strace", Epoch: 0, Version: "4.24", Release: "9.el8", Arch: "x86_64", SrcName: "strace", SrcEpoch: 0, SrcVersion: "4.24", SrcRelease: "9.el8", Modularitylabel: "", License: "LGPL-2.1+ and GPL-2.0+"}, {Name: "info", Epoch: 0, Version: "6.5", Release: "6.el8", Arch: "x86_64", SrcName: "texinfo", SrcEpoch: 0, SrcVersion: "6.5", SrcRelease: "6.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "gdb-gdbserver", Epoch: 0, Version: "8.2", Release: "11.el8", Arch: "x86_64", SrcName: "gdb", SrcEpoch: 0, SrcVersion: "8.2", SrcRelease: "11.el8", Modularitylabel: "", License: "GPLv3+ and GPLv3+ with exceptions and GPLv2+ and GPLv2+ with exceptions and GPL+ and LGPLv2+ and LGPLv3+ and BSD and Public Domain and GFDL"}, {Name: "libcom_err", Epoch: 0, Version: "1.45.4", Release: "3.el8", Arch: "x86_64", SrcName: "e2fsprogs", SrcEpoch: 0, SrcVersion: "1.45.4", SrcRelease: "3.el8", Modularitylabel: "", License: "MIT"}, {Name: "libcroco", Epoch: 0, Version: "0.6.12", Release: "4.el8_2.1", Arch: "x86_64", SrcName: "libcroco", SrcEpoch: 0, SrcVersion: "0.6.12", SrcRelease: "4.el8_2.1", Modularitylabel: "", License: "LGPLv2"}, {Name: "libxml2", Epoch: 0, Version: "2.9.7", Release: "7.el8", Arch: "x86_64", SrcName: "libxml2", SrcEpoch: 0, SrcVersion: "2.9.7", SrcRelease: "7.el8", Modularitylabel: "", License: "MIT"}, {Name: "libmpc", Epoch: 0, Version: "1.0.2", Release: "9.el8", Arch: "x86_64", SrcName: "libmpc", SrcEpoch: 0, SrcVersion: "1.0.2", SrcRelease: "9.el8", Modularitylabel: "", License: "LGPLv3+ and GFDL"}, {Name: "expat", Epoch: 0, Version: "2.2.5", Release: "3.el8", Arch: "x86_64", SrcName: "expat", SrcEpoch: 0, SrcVersion: "2.2.5", SrcRelease: "3.el8", Modularitylabel: "", License: "MIT"}, {Name: "gettext", Epoch: 0, Version: "0.19.8.1", Release: "17.el8", Arch: "x86_64", SrcName: "gettext", SrcEpoch: 0, SrcVersion: "0.19.8.1", SrcRelease: "17.el8", Modularitylabel: "", License: "GPLv3+ and LGPLv2+"}, {Name: "libuuid", Epoch: 0, Version: "2.32.1", Release: "22.el8", Arch: "x86_64", SrcName: "util-linux", SrcEpoch: 0, SrcVersion: "2.32.1", SrcRelease: "22.el8", Modularitylabel: "", License: "BSD"}, {Name: "autoconf", Epoch: 0, Version: "2.69", Release: "27.el8", Arch: "noarch", SrcName: "autoconf", SrcEpoch: 0, SrcVersion: "2.69", SrcRelease: "27.el8", Modularitylabel: "", License: "GPLv2+ and GFDL"}, {Name: "chkconfig", Epoch: 0, Version: "1.11", Release: "1.el8", Arch: "x86_64", SrcName: "chkconfig", SrcEpoch: 0, SrcVersion: "1.11", SrcRelease: "1.el8", Modularitylabel: "", License: "GPLv2"}, {Name: "kernel-headers", Epoch: 0, Version: "4.18.0", Release: "193.28.1.el8_2", Arch: "x86_64", SrcName: "kernel", SrcEpoch: 0, SrcVersion: "4.18.0", SrcRelease: "193.28.1.el8_2", Modularitylabel: "", License: "GPLv2 and Redistributable, no modification permitted"}, {Name: "gmp", Epoch: 1, Version: "6.1.2", Release: "10.el8", Arch: "x86_64", SrcName: "gmp", SrcEpoch: 1, SrcVersion: "6.1.2", SrcRelease: "10.el8", Modularitylabel: "", License: "LGPLv3+ or GPLv2+"}, {Name: "libxcrypt-devel", Epoch: 0, Version: "4.1.1", Release: "4.el8", Arch: "x86_64", SrcName: "libxcrypt", SrcEpoch: 0, SrcVersion: "4.1.1", SrcRelease: "4.el8", Modularitylabel: "", License: "LGPLv2+ and BSD and Public Domain"}, {Name: "libattr", Epoch: 0, Version: "2.4.48", Release: "3.el8", Arch: "x86_64", SrcName: "attr", SrcEpoch: 0, SrcVersion: "2.4.48", SrcRelease: "3.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "gettext-common-devel", Epoch: 0, Version: "0.19.8.1", Release: "17.el8", Arch: "noarch", SrcName: "gettext", SrcEpoch: 0, SrcVersion: "0.19.8.1", SrcRelease: "17.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "coreutils-single", Epoch: 0, Version: "8.30", Release: "7.el8_2.1", Arch: "x86_64", SrcName: "coreutils", SrcEpoch: 0, SrcVersion: "8.30", SrcRelease: "7.el8_2.1", Modularitylabel: "", License: "GPLv3+"}, {Name: "automake", Epoch: 0, Version: "1.16.1", Release: "6.el8", Arch: "noarch", SrcName: "automake", SrcEpoch: 0, SrcVersion: "1.16.1", SrcRelease: "6.el8", Modularitylabel: "", License: "GPLv2+ and GFDL and Public Domain and MIT"}, {Name: "libblkid", Epoch: 0, Version: "2.32.1", Release: "22.el8", Arch: "x86_64", SrcName: "util-linux", SrcEpoch: 0, SrcVersion: "2.32.1", SrcRelease: "22.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "gcc", Epoch: 0, Version: "8.3.1", Release: "5.el8.0.2", Arch: "x86_64", SrcName: "gcc", SrcEpoch: 0, SrcVersion: "8.3.1", SrcRelease: "5.el8.0.2", Modularitylabel: "", License: "GPLv3+ and GPLv3+ with exceptions and GPLv2+ with exceptions and LGPLv2+ and BSD"}, {Name: "libcap-ng", Epoch: 0, Version: "0.7.9", Release: "5.el8", Arch: "x86_64", SrcName: "libcap-ng", SrcEpoch: 0, SrcVersion: "0.7.9", SrcRelease: "5.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "gettext-devel", Epoch: 0, Version: "0.19.8.1", Release: "17.el8", Arch: "x86_64", SrcName: "gettext", SrcEpoch: 0, SrcVersion: "0.19.8.1", SrcRelease: "17.el8", Modularitylabel: "", License: "LGPLv2+ and GPLv3+"}, {Name: "libffi", Epoch: 0, Version: "3.1", Release: "21.el8", Arch: "x86_64", SrcName: "libffi", SrcEpoch: 0, SrcVersion: "3.1", SrcRelease: "21.el8", Modularitylabel: "", License: "MIT"}, {Name: "make", Epoch: 1, Version: "4.2.1", Release: "10.el8", Arch: "x86_64", SrcName: "make", SrcEpoch: 1, SrcVersion: "4.2.1", SrcRelease: "10.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "libzstd", Epoch: 0, Version: "1.4.2", Release: "2.el8", Arch: "x86_64", SrcName: "zstd", SrcEpoch: 0, SrcVersion: "1.4.2", SrcRelease: "2.el8", Modularitylabel: "", License: "BSD and GPLv2"}, {Name: "npm", Epoch: 1, Version: "6.14.4", Release: "1.10.21.0.3.module_el8.2.0+391+8da3adc6", Arch: "x86_64", SrcName: "nodejs", SrcEpoch: 1, SrcVersion: "10.21.0", SrcRelease: "3.module_el8.2.0+391+8da3adc6", Modularitylabel: "nodejs:10:8020020200707141642:6a468ee4", License: "MIT and ASL 2.0 and ISC and BSD"}, {Name: "lz4-libs", Epoch: 0, Version: "1.8.1.2", Release: "4.el8", Arch: "x86_64", SrcName: "lz4", SrcEpoch: 0, SrcVersion: "1.8.1.2", SrcRelease: "4.el8", Modularitylabel: "", License: "GPLv2+ and BSD"}, {Name: "libtool-ltdl", Epoch: 0, Version: "2.4.6", Release: "25.el8", Arch: "x86_64", SrcName: "libtool", SrcEpoch: 0, SrcVersion: "2.4.6", SrcRelease: "25.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libgcrypt", Epoch: 0, Version: "1.8.3", Release: "4.el8", Arch: "x86_64", SrcName: "libgcrypt", SrcEpoch: 0, SrcVersion: "1.8.3", SrcRelease: "4.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libipt", Epoch: 0, Version: "1.6.1", Release: "8.el8", Arch: "x86_64", SrcName: "libipt", SrcEpoch: 0, SrcVersion: "1.6.1", SrcRelease: "8.el8", Modularitylabel: "", License: "BSD"}, {Name: "cracklib", Epoch: 0, Version: "2.9.6", Release: "15.el8", Arch: "x86_64", SrcName: "cracklib", SrcEpoch: 0, SrcVersion: "2.9.6", SrcRelease: "15.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "gc", Epoch: 0, Version: "7.6.4", Release: "3.el8", Arch: "x86_64", SrcName: "gc", SrcEpoch: 0, SrcVersion: "7.6.4", SrcRelease: "3.el8", Modularitylabel: "", License: "BSD"}, {Name: "libidn2", Epoch: 0, Version: "2.2.0", Release: "1.el8", Arch: "x86_64", SrcName: "libidn2", SrcEpoch: 0, SrcVersion: "2.2.0", SrcRelease: "1.el8", Modularitylabel: "", License: "(GPLv2+ or LGPLv3+) and GPLv3+"}, {Name: "gdb-headless", Epoch: 0, Version: "8.2", Release: "12.el8", Arch: "x86_64", SrcName: "gdb", SrcEpoch: 0, SrcVersion: "8.2", SrcRelease: "12.el8", Modularitylabel: "", License: "GPLv3+ and GPLv3+ with exceptions and GPLv2+ and GPLv2+ with exceptions and GPL+ and LGPLv2+ and LGPLv3+ and BSD and Public Domain and GFDL"}, {Name: "file-libs", Epoch: 0, Version: "5.33", Release: "13.el8", Arch: "x86_64", SrcName: "file", SrcEpoch: 0, SrcVersion: "5.33", SrcRelease: "13.el8", Modularitylabel: "", License: "BSD"}, {Name: "epel-release", Epoch: 0, Version: "8", Release: "8.el8", Arch: "noarch", SrcName: "epel-release", SrcEpoch: 0, SrcVersion: "8", SrcRelease: "8.el8", Modularitylabel: "", License: "GPLv2"}, {Name: "keyutils-libs", Epoch: 0, Version: "1.5.10", Release: "6.el8", Arch: "x86_64", SrcName: "keyutils", SrcEpoch: 0, SrcVersion: "1.5.10", SrcRelease: "6.el8", Modularitylabel: "", License: "GPLv2+ and LGPLv2+"}, {Name: "platform-python-pip", Epoch: 0, Version: "9.0.3", Release: "18.el8", Arch: "noarch", SrcName: "python-pip", SrcEpoch: 0, SrcVersion: "9.0.3", SrcRelease: "18.el8", Modularitylabel: "", License: "MIT and Python and ASL 2.0 and BSD and ISC and LGPLv2 and MPLv2.0 and (ASL 2.0 or BSD)"}, {Name: "p11-kit-trust", Epoch: 0, Version: "0.23.14", Release: "5.el8_0", Arch: "x86_64", SrcName: "p11-kit", SrcEpoch: 0, SrcVersion: "0.23.14", SrcRelease: "5.el8_0", Modularitylabel: "", License: "BSD"}, {Name: "python36", Epoch: 0, Version: "3.6.8", Release: "2.module_el8.3.0+562+e162826a", Arch: "x86_64", SrcName: "python36", SrcEpoch: 0, SrcVersion: "3.6.8", SrcRelease: "2.module_el8.3.0+562+e162826a", Modularitylabel: "python36:3.6:8030020201104034153:24f1489c", License: "Python"}, {Name: "pcre", Epoch: 0, Version: "8.42", Release: "4.el8", Arch: "x86_64", SrcName: "pcre", SrcEpoch: 0, SrcVersion: "8.42", SrcRelease: "4.el8", Modularitylabel: "", License: "BSD"}, {Name: "python2-setuptools-wheel", Epoch: 0, Version: "39.0.1", Release: "12.module_el8.3.0+478+7570e00c", Arch: "noarch", SrcName: "python2-setuptools", SrcEpoch: 0, SrcVersion: "39.0.1", SrcRelease: "12.module_el8.3.0+478+7570e00c", Modularitylabel: "python27:2.7:8030020200831201838:851f4228", License: "MIT"}, {Name: "systemd-libs", Epoch: 0, Version: "239", Release: "31.el8_2.2", Arch: "x86_64", SrcName: "systemd", SrcEpoch: 0, SrcVersion: "239", SrcRelease: "31.el8_2.2", Modularitylabel: "", License: "LGPLv2+ and MIT"}, {Name: "python2-libs", Epoch: 0, Version: "2.7.17", Release: "2.module_el8.3.0+478+7570e00c", Arch: "x86_64", SrcName: "python2", SrcEpoch: 0, SrcVersion: "2.7.17", SrcRelease: "2.module_el8.3.0+478+7570e00c", Modularitylabel: "python27:2.7:8030020200831201838:851f4228", License: "Python"}, {Name: "dbus-tools", Epoch: 1, Version: "1.12.8", Release: "10.el8_2", Arch: "x86_64", SrcName: "dbus", SrcEpoch: 1, SrcVersion: "1.12.8", SrcRelease: "10.el8_2", Modularitylabel: "", License: "(GPLv2+ or AFL) and GPLv2+"}, {Name: "python2-setuptools", Epoch: 0, Version: "39.0.1", Release: "12.module_el8.3.0+478+7570e00c", Arch: "noarch", SrcName: "python2-setuptools", SrcEpoch: 0, SrcVersion: "39.0.1", SrcRelease: "12.module_el8.3.0+478+7570e00c", Modularitylabel: "python27:2.7:8030020200831201838:851f4228", License: "MIT"}, {Name: "libusbx", Epoch: 0, Version: "1.0.22", Release: "1.el8", Arch: "x86_64", SrcName: "libusbx", SrcEpoch: 0, SrcVersion: "1.0.22", SrcRelease: "1.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "gpg-pubkey", Epoch: 0, Version: "ce977fe0", Release: "5db1f171", Arch: "None", SrcName: "", SrcEpoch: 0, SrcVersion: "", SrcRelease: "", Modularitylabel: "", License: "pubkey"}, {Name: "rpm-libs", Epoch: 0, Version: "4.14.3", Release: "4.el8", Arch: "x86_64", SrcName: "rpm", SrcEpoch: 0, SrcVersion: "4.14.3", SrcRelease: "4.el8", Modularitylabel: "", License: "GPLv2+ and LGPLv2+ with exceptions"}, {Name: "squashfs-tools", Epoch: 0, Version: "4.3", Release: "19.el8", Arch: "x86_64", SrcName: "squashfs-tools", SrcEpoch: 0, SrcVersion: "4.3", SrcRelease: "19.el8", Modularitylabel: "", License: "GPLv2+"}, {Name: "rpm-build-libs", Epoch: 0, Version: "4.14.3", Release: "4.el8", Arch: "x86_64", SrcName: "rpm", SrcEpoch: 0, SrcVersion: "4.14.3", SrcRelease: "4.el8", Modularitylabel: "", License: "GPLv2+ and LGPLv2+ with exceptions"}, {Name: "libsemanage", Epoch: 0, Version: "2.9", Release: "2.el8", Arch: "x86_64", SrcName: "libsemanage", SrcEpoch: 0, SrcVersion: "2.9", SrcRelease: "2.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libutempter", Epoch: 0, Version: "1.1.6", Release: "14.el8", Arch: "x86_64", SrcName: "libutempter", SrcEpoch: 0, SrcVersion: "1.1.6", SrcRelease: "14.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "acl", Epoch: 0, Version: "2.2.53", Release: "1.el8", Arch: "x86_64", SrcName: "acl", SrcEpoch: 0, SrcVersion: "2.2.53", SrcRelease: "1.el8", Modularitylabel: "", License: "GPLv2+"}, {Name: "nettle", Epoch: 0, Version: "3.4.1", Release: "1.el8", Arch: "x86_64", SrcName: "nettle", SrcEpoch: 0, SrcVersion: "3.4.1", SrcRelease: "1.el8", Modularitylabel: "", License: "LGPLv3+ or GPLv2+"}, {Name: "libcomps", Epoch: 0, Version: "0.1.11", Release: "4.el8", Arch: "x86_64", SrcName: "libcomps", SrcEpoch: 0, SrcVersion: "0.1.11", SrcRelease: "4.el8", Modularitylabel: "", License: "GPLv2+"}, {Name: "findutils", Epoch: 1, Version: "4.6.0", Release: "20.el8", Arch: "x86_64", SrcName: "findutils", SrcEpoch: 1, SrcVersion: "4.6.0", SrcRelease: "20.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "cpio", Epoch: 0, Version: "2.12", Release: "8.el8", Arch: "x86_64", SrcName: "cpio", SrcEpoch: 0, SrcVersion: "2.12", SrcRelease: "8.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "ipcalc", Epoch: 0, Version: "0.2.4", Release: "4.el8", Arch: "x86_64", SrcName: "ipcalc", SrcEpoch: 0, SrcVersion: "0.2.4", SrcRelease: "4.el8", Modularitylabel: "", License: "GPLv2+"}, {Name: "libnghttp2", Epoch: 0, Version: "1.33.0", Release: "3.el8_2.1", Arch: "x86_64", SrcName: "nghttp2", SrcEpoch: 0, SrcVersion: "1.33.0", SrcRelease: "3.el8_2.1", Modularitylabel: "", License: "MIT"}, {Name: "iptables-libs", Epoch: 0, Version: "1.8.4", Release: "10.el8_2.1", Arch: "x86_64", SrcName: "iptables", SrcEpoch: 0, SrcVersion: "1.8.4", SrcRelease: "10.el8_2.1", Modularitylabel: "", License: "GPLv2 and Artistic 2.0 and ISC"}, {Name: "libsigsegv", Epoch: 0, Version: "2.11", Release: "5.el8", Arch: "x86_64", SrcName: "libsigsegv", SrcEpoch: 0, SrcVersion: "2.11", SrcRelease: "5.el8", Modularitylabel: "", License: "GPLv2+"}, {Name: "libverto", Epoch: 0, Version: "0.3.0", Release: "5.el8", Arch: "x86_64", SrcName: "libverto", SrcEpoch: 0, SrcVersion: "0.3.0", SrcRelease: "5.el8", Modularitylabel: "", License: "MIT"}, {Name: "libtirpc", Epoch: 0, Version: "1.1.4", Release: "4.el8", Arch: "x86_64", SrcName: "libtirpc", SrcEpoch: 0, SrcVersion: "1.1.4", SrcRelease: "4.el8", Modularitylabel: "", License: "SISSL and BSD"}, {Name: "openssl-libs", Epoch: 1, Version: "1.1.1c", Release: "15.el8", Arch: "x86_64", SrcName: "openssl", SrcEpoch: 1, SrcVersion: "1.1.1c", SrcRelease: "15.el8", Modularitylabel: "", License: "OpenSSL"}, {Name: "python3-libs", Epoch: 0, Version: "3.6.8", Release: "23.el8", Arch: "x86_64", SrcName: "python3", SrcEpoch: 0, SrcVersion: "3.6.8", SrcRelease: "23.el8", Modularitylabel: "", License: "Python"}, {Name: "libpwquality", Epoch: 0, Version: "1.4.0", Release: "9.el8", Arch: "x86_64", SrcName: "libpwquality", SrcEpoch: 0, SrcVersion: "1.4.0", SrcRelease: "9.el8", Modularitylabel: "", License: "BSD or GPLv2+"}, {Name: "util-linux", Epoch: 0, Version: "2.32.1", Release: "22.el8", Arch: "x86_64", SrcName: "util-linux", SrcEpoch: 0, SrcVersion: "2.32.1", SrcRelease: "22.el8", Modularitylabel: "", License: "GPLv2 and GPLv2+ and LGPLv2+ and BSD with advertising and Public Domain"}, {Name: "glib2", Epoch: 0, Version: "2.56.4", Release: "8.el8", Arch: "x86_64", SrcName: "glib2", SrcEpoch: 0, SrcVersion: "2.56.4", SrcRelease: "8.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "iproute", Epoch: 0, Version: "5.3.0", Release: "1.el8", Arch: "x86_64", SrcName: "iproute", SrcEpoch: 0, SrcVersion: "5.3.0", SrcRelease: "1.el8", Modularitylabel: "", License: "GPLv2+ and Public Domain"}, {Name: "kmod", Epoch: 0, Version: "25", Release: "16.el8", Arch: "x86_64", SrcName: "kmod", SrcEpoch: 0, SrcVersion: "25", SrcRelease: "16.el8", Modularitylabel: "", License: "GPLv2+"}, {Name: "curl", Epoch: 0, Version: "7.61.1", Release: "12.el8", Arch: "x86_64", SrcName: "curl", SrcEpoch: 0, SrcVersion: "7.61.1", SrcRelease: "12.el8", Modularitylabel: "", License: "MIT"}, {Name: "openldap", Epoch: 0, Version: "2.4.46", Release: "11.el8_1", Arch: "x86_64", SrcName: "openldap", SrcEpoch: 0, SrcVersion: "2.4.46", SrcRelease: "11.el8_1", Modularitylabel: "", License: "OpenLDAP"}, {Name: "python3-libcomps", Epoch: 0, Version: "0.1.11", Release: "4.el8", Arch: "x86_64", SrcName: "libcomps", SrcEpoch: 0, SrcVersion: "0.1.11", SrcRelease: "4.el8", Modularitylabel: "", License: "GPLv2+"}, {Name: "libarchive", Epoch: 0, Version: "3.3.2", Release: "8.el8_1", Arch: "x86_64", SrcName: "libarchive", SrcEpoch: 0, SrcVersion: "3.3.2", SrcRelease: "8.el8_1", Modularitylabel: "", License: "BSD"}, {Name: "libyaml", Epoch: 0, Version: "0.1.7", Release: "5.el8", Arch: "x86_64", SrcName: "libyaml", SrcEpoch: 0, SrcVersion: "0.1.7", SrcRelease: "5.el8", Modularitylabel: "", License: "MIT"}, {Name: "npth", Epoch: 0, Version: "1.5", Release: "4.el8", Arch: "x86_64", SrcName: "npth", SrcEpoch: 0, SrcVersion: "1.5", SrcRelease: "4.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "gpgme", Epoch: 0, Version: "1.10.0", Release: "6.el8.0.1", Arch: "x86_64", SrcName: "gpgme", SrcEpoch: 0, SrcVersion: "1.10.0", SrcRelease: "6.el8.0.1", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libdnf", Epoch: 0, Version: "0.39.1", Release: "6.el8_2", Arch: "x86_64", SrcName: "libdnf", SrcEpoch: 0, SrcVersion: "0.39.1", SrcRelease: "6.el8_2", Modularitylabel: "", License: "LGPLv2+"}, {Name: "python3-hawkey", Epoch: 0, Version: "0.39.1", Release: "6.el8_2", Arch: "x86_64", SrcName: "libdnf", SrcEpoch: 0, SrcVersion: "0.39.1", SrcRelease: "6.el8_2", Modularitylabel: "", License: "LGPLv2+"}, {Name: "libreport-filesystem", Epoch: 0, Version: "2.9.5", Release: "10.el8", Arch: "x86_64", SrcName: "libreport", SrcEpoch: 0, SrcVersion: "2.9.5", SrcRelease: "10.el8", Modularitylabel: "", License: "GPLv2+"}, {Name: "dhcp-common", Epoch: 12, Version: "4.3.6", Release: "40.el8", Arch: "noarch", SrcName: "dhcp", SrcEpoch: 12, SrcVersion: "4.3.6", SrcRelease: "40.el8", Modularitylabel: "", License: "ISC"}, {Name: "dbus-daemon", Epoch: 1, Version: "1.12.8", Release: "10.el8_2", Arch: "x86_64", SrcName: "dbus", SrcEpoch: 1, SrcVersion: "1.12.8", SrcRelease: "10.el8_2", Modularitylabel: "", License: "(GPLv2+ or AFL) and GPLv2+"}, {Name: "device-mapper-libs", Epoch: 8, Version: "1.02.169", Release: "3.el8", Arch: "x86_64", SrcName: "lvm2", SrcEpoch: 8, SrcVersion: "2.03.08", SrcRelease: "3.el8", Modularitylabel: "", License: "LGPLv2"}, {Name: "elfutils-default-yama-scope", Epoch: 0, Version: "0.178", Release: "7.el8", Arch: "noarch", SrcName: "elfutils", SrcEpoch: 0, SrcVersion: "0.178", SrcRelease: "7.el8", Modularitylabel: "", License: "GPLv2+ or LGPLv3+"}, {Name: "systemd-pam", Epoch: 0, Version: "239", Release: "31.el8_2.2", Arch: "x86_64", SrcName: "systemd", SrcEpoch: 0, SrcVersion: "239", SrcRelease: "31.el8_2.2", Modularitylabel: "", License: "LGPLv2+ and MIT and GPLv2+"}, {Name: "dbus", Epoch: 1, Version: "1.12.8", Release: "10.el8_2", Arch: "x86_64", SrcName: "dbus", SrcEpoch: 1, SrcVersion: "1.12.8", SrcRelease: "10.el8_2", Modularitylabel: "", License: "(GPLv2+ or AFL) and GPLv2+"}, {Name: "dhcp-client", Epoch: 12, Version: "4.3.6", Release: "40.el8", Arch: "x86_64", SrcName: "dhcp", SrcEpoch: 12, SrcVersion: "4.3.6", SrcRelease: "40.el8", Modularitylabel: "", License: "ISC"}, {Name: "libkcapi-hmaccalc", Epoch: 0, Version: "1.1.1", Release: "16_1.el8", Arch: "x86_64", SrcName: "libkcapi", SrcEpoch: 0, SrcVersion: "1.1.1", SrcRelease: "16_1.el8", Modularitylabel: "", License: "BSD or GPLv2"}, {Name: "dracut", Epoch: 0, Version: "049", Release: "70.git20200228.el8", Arch: "x86_64", SrcName: "dracut", SrcEpoch: 0, SrcVersion: "049", SrcRelease: "70.git20200228.el8", Modularitylabel: "", License: "GPLv2+ and LGPLv2+"}, {Name: "dracut-squash", Epoch: 0, Version: "049", Release: "70.git20200228.el8", Arch: "x86_64", SrcName: "dracut", SrcEpoch: 0, SrcVersion: "049", SrcRelease: "70.git20200228.el8", Modularitylabel: "", License: "GPLv2+ and LGPLv2+"}, {Name: "dnf", Epoch: 0, Version: "4.2.17", Release: "7.el8_2", Arch: "noarch", SrcName: "dnf", SrcEpoch: 0, SrcVersion: "4.2.17", SrcRelease: "7.el8_2", Modularitylabel: "", License: "GPLv2+ and GPLv2 and GPL"}, {Name: "kexec-tools", Epoch: 0, Version: "2.0.20", Release: "14.el8", Arch: "x86_64", SrcName: "kexec-tools", SrcEpoch: 0, SrcVersion: "2.0.20", SrcRelease: "14.el8", Modularitylabel: "", License: "GPLv2"}, {Name: "tar", Epoch: 2, Version: "1.30", Release: "4.el8", Arch: "x86_64", SrcName: "tar", SrcEpoch: 2, SrcVersion: "1.30", SrcRelease: "4.el8", Modularitylabel: "", License: "GPLv3+"}, {Name: "hostname", Epoch: 0, Version: "3.20", Release: "6.el8", Arch: "x86_64", SrcName: "hostname", SrcEpoch: 0, SrcVersion: "3.20", SrcRelease: "6.el8", Modularitylabel: "", License: "GPLv2+"}, {Name: "langpacks-en", Epoch: 0, Version: "1.0", Release: "12.el8", Arch: "noarch", SrcName: "langpacks", SrcEpoch: 0, SrcVersion: "1.0", SrcRelease: "12.el8", Modularitylabel: "", License: "GPLv2+"}, {Name: "gpg-pubkey", Epoch: 0, Version: "8483c65d", Release: "5ccc5b19", Arch: "None", SrcName: "", SrcEpoch: 0, SrcVersion: "", SrcRelease: "", Modularitylabel: "", License: "pubkey"}, {Name: "centos-release", Epoch: 0, Version: "8.2", Release: "2.2004.0.2.el8", Arch: "x86_64", SrcName: "centos-release", SrcEpoch: 0, SrcVersion: "8.2", SrcRelease: "2.2004.0.2.el8", Modularitylabel: "", License: "GPLv2"}, {Name: "zlib", Epoch: 0, Version: "1.2.11", Release: "16.el8_2", Arch: "x86_64", SrcName: "zlib", SrcEpoch: 0, SrcVersion: "1.2.11", SrcRelease: "16.el8_2", Modularitylabel: "", License: "zlib and Boost"}, {Name: "librepo", Epoch: 0, Version: "1.11.0", Release: "3.el8_2", Arch: "x86_64", SrcName: "librepo", SrcEpoch: 0, SrcVersion: "1.11.0", SrcRelease: "3.el8_2", Modularitylabel: "", License: "LGPLv2+"}, {Name: "bind-export-libs", Epoch: 32, Version: "9.11.13", Release: "6.el8_2.1", Arch: "x86_64", SrcName: "bind", SrcEpoch: 32, SrcVersion: "9.11.13", SrcRelease: "6.el8_2.1", Modularitylabel: "", License: "MPLv2.0"}, {Name: "perl-libs", Epoch: 4, Version: "5.26.3", Release: "416.el8", Arch: "x86_64", SrcName: "perl", SrcEpoch: 4, SrcVersion: "5.26.3", SrcRelease: "416.el8", Modularitylabel: "", License: "(GPL+ or Artistic) and HSRL and MIT and UCD"}, {Name: "perl-Scalar-List-Utils", Epoch: 3, Version: "1.49", Release: "2.el8", Arch: "x86_64", SrcName: "perl-Scalar-List-Utils", SrcEpoch: 3, SrcVersion: "1.49", SrcRelease: "2.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "nspr", Epoch: 0, Version: "4.25.0", Release: "2.el8_2", Arch: "x86_64", SrcName: "nspr", SrcEpoch: 0, SrcVersion: "4.25.0", SrcRelease: "2.el8_2", Modularitylabel: "", License: "MPLv2.0"}, {Name: "nss-softokn-freebl", Epoch: 0, Version: "3.53.1", Release: "11.el8_2", Arch: "x86_64", SrcName: "nss", SrcEpoch: 0, SrcVersion: "3.53.1", SrcRelease: "11.el8_2", Modularitylabel: "", License: "MPLv2.0"}, {Name: "nss", Epoch: 0, Version: "3.53.1", Release: "11.el8_2", Arch: "x86_64", SrcName: "nss", SrcEpoch: 0, SrcVersion: "3.53.1", SrcRelease: "11.el8_2", Modularitylabel: "", License: "MPLv2.0"}, {Name: "perl-Text-ParseWords", Epoch: 0, Version: "3.30", Release: "395.el8", Arch: "noarch", SrcName: "perl-Text-ParseWords", SrcEpoch: 0, SrcVersion: "3.30", SrcRelease: "395.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-Term-ANSIColor", Epoch: 0, Version: "4.06", Release: "396.el8", Arch: "noarch", SrcName: "perl-Term-ANSIColor", SrcEpoch: 0, SrcVersion: "4.06", SrcRelease: "396.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-Errno", Epoch: 0, Version: "1.28", Release: "416.el8", Arch: "x86_64", SrcName: "perl", SrcEpoch: 0, SrcVersion: "5.26.3", SrcRelease: "416.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-Text-Tabs+Wrap", Epoch: 0, Version: "2013.0523", Release: "395.el8", Arch: "noarch", SrcName: "perl-Text-Tabs+Wrap", SrcEpoch: 0, SrcVersion: "2013.0523", SrcRelease: "395.el8", Modularitylabel: "", License: "TTWL"}, {Name: "perl-File-Path", Epoch: 0, Version: "2.15", Release: "2.el8", Arch: "noarch", SrcName: "perl-File-Path", SrcEpoch: 0, SrcVersion: "2.15", SrcRelease: "2.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-PathTools", Epoch: 0, Version: "3.74", Release: "1.el8", Arch: "x86_64", SrcName: "perl-PathTools", SrcEpoch: 0, SrcVersion: "3.74", SrcRelease: "1.el8", Modularitylabel: "", License: "(GPL+ or Artistic) and BSD"}, {Name: "perl-threads", Epoch: 1, Version: "2.21", Release: "2.el8", Arch: "x86_64", SrcName: "perl-threads", SrcEpoch: 1, SrcVersion: "2.21", SrcRelease: "2.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-interpreter", Epoch: 4, Version: "5.26.3", Release: "416.el8", Arch: "x86_64", SrcName: "perl", SrcEpoch: 4, SrcVersion: "5.26.3", SrcRelease: "416.el8", Modularitylabel: "", License: "(GPL+ or Artistic) and (GPLv2+ or Artistic) and BSD and Public Domain and UCD"}, {Name: "perl-IO-Socket-IP", Epoch: 0, Version: "0.39", Release: "5.el8", Arch: "noarch", SrcName: "perl-IO-Socket-IP", SrcEpoch: 0, SrcVersion: "0.39", SrcRelease: "5.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-File-Temp", Epoch: 0, Version: "0.230.600", Release: "1.el8", Arch: "noarch", SrcName: "perl-File-Temp", SrcEpoch: 0, SrcVersion: "0.230.600", SrcRelease: "1.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-Digest-MD5", Epoch: 0, Version: "2.55", Release: "396.el8", Arch: "x86_64", SrcName: "perl-Digest-MD5", SrcEpoch: 0, SrcVersion: "2.55", SrcRelease: "396.el8", Modularitylabel: "", License: "(GPL+ or Artistic) and BSD"}, {Name: "perl-Error", Epoch: 1, Version: "0.17025", Release: "2.el8", Arch: "noarch", SrcName: "perl-Error", SrcEpoch: 1, SrcVersion: "0.17025", SrcRelease: "2.el8", Modularitylabel: "", License: "(GPL+ or Artistic) and MIT"}, {Name: "perl-Data-Dumper", Epoch: 0, Version: "2.167", Release: "399.el8", Arch: "x86_64", SrcName: "perl-Data-Dumper", SrcEpoch: 0, SrcVersion: "2.167", SrcRelease: "399.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "perl-Storable", Epoch: 1, Version: "3.11", Release: "3.el8", Arch: "x86_64", SrcName: "perl-Storable", SrcEpoch: 1, SrcVersion: "3.11", SrcRelease: "3.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, {Name: "fipscheck-lib", Epoch: 0, Version: "1.5.0", Release: "4.el8", Arch: "x86_64", SrcName: "fipscheck", SrcEpoch: 0, SrcVersion: "1.5.0", SrcRelease: "4.el8", Modularitylabel: "", License: "BSD"}, {Name: "openssh", Epoch: 0, Version: "8.0p1", Release: "4.el8_1", Arch: "x86_64", SrcName: "openssh", SrcEpoch: 0, SrcVersion: "8.0p1", SrcRelease: "4.el8_1", Modularitylabel: "", License: "BSD"}, {Name: "publicsuffix-list-dafsa", Epoch: 0, Version: "20180723", Release: "1.el8", Arch: "noarch", SrcName: "publicsuffix-list", SrcEpoch: 0, SrcVersion: "20180723", SrcRelease: "1.el8", Modularitylabel: "", License: "MPLv2.0"}, {Name: "pkgconf-m4", Epoch: 0, Version: "1.4.2", Release: "1.el8", Arch: "noarch", SrcName: "pkgconf", SrcEpoch: 0, SrcVersion: "1.4.2", SrcRelease: "1.el8", Modularitylabel: "", License: "GPLv2+ with exceptions"}, {Name: "pcre2-utf16", Epoch: 0, Version: "10.32", Release: "1.el8", Arch: "x86_64", SrcName: "pcre2", SrcEpoch: 0, SrcVersion: "10.32", SrcRelease: "1.el8", Modularitylabel: "", License: "BSD"}, {Name: "ncurses", Epoch: 0, Version: "6.1", Release: "7.20180224.el8", Arch: "x86_64", SrcName: "ncurses", SrcEpoch: 0, SrcVersion: "6.1", SrcRelease: "7.20180224.el8", Modularitylabel: "", License: "MIT"}, {Name: "libsecret", Epoch: 0, Version: "0.18.6", Release: "1.el8", Arch: "x86_64", SrcName: "libsecret", SrcEpoch: 0, SrcVersion: "0.18.6", SrcRelease: "1.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "pkgconf", Epoch: 0, Version: "1.4.2", Release: "1.el8", Arch: "x86_64", SrcName: "pkgconf", SrcEpoch: 0, SrcVersion: "1.4.2", SrcRelease: "1.el8", Modularitylabel: "", License: "ISC"}, {Name: "nspr-devel", Epoch: 0, Version: "4.25.0", Release: "2.el8_2", Arch: "x86_64", SrcName: "nspr", SrcEpoch: 0, SrcVersion: "4.25.0", SrcRelease: "2.el8_2", Modularitylabel: "", License: "MPLv2.0"}, {Name: "nss-softokn-devel", Epoch: 0, Version: "3.53.1", Release: "11.el8_2", Arch: "x86_64", SrcName: "nss", SrcEpoch: 0, SrcVersion: "3.53.1", SrcRelease: "11.el8_2", Modularitylabel: "", License: "MPLv2.0"}, {Name: "libsepol-devel", Epoch: 0, Version: "2.9", Release: "1.el8", Arch: "x86_64", SrcName: "libsepol", SrcEpoch: 0, SrcVersion: "2.9", SrcRelease: "1.el8", Modularitylabel: "", License: "LGPLv2+"}, {Name: "pcre2-devel", Epoch: 0, Version: "10.32", Release: "1.el8", Arch: "x86_64", SrcName: "pcre2", SrcEpoch: 0, SrcVersion: "10.32", SrcRelease: "1.el8", Modularitylabel: "", License: "BSD"}, {Name: "zlib-devel", Epoch: 0, Version: "1.2.11", Release: "16.el8_2", Arch: "x86_64", SrcName: "zlib", SrcEpoch: 0, SrcVersion: "1.2.11", SrcRelease: "16.el8_2", Modularitylabel: "", License: "zlib and Boost"}, {Name: "libedit", Epoch: 0, Version: "3.1", Release: "23.20170329cvs.el8", Arch: "x86_64", SrcName: "libedit", SrcEpoch: 0, SrcVersion: "3.1", SrcRelease: "23.20170329cvs.el8", Modularitylabel: "", License: "BSD"}, {Name: "git-core", Epoch: 0, Version: "2.18.4", Release: "2.el8_2", Arch: "x86_64", SrcName: "git", SrcEpoch: 0, SrcVersion: "2.18.4", SrcRelease: "2.el8_2", Modularitylabel: "", License: "GPLv2"}, {Name: "keyutils-libs-devel", Epoch: 0, Version: "1.5.10", Release: "6.el8", Arch: "x86_64", SrcName: "keyutils", SrcEpoch: 0, SrcVersion: "1.5.10", SrcRelease: "6.el8", Modularitylabel: "", License: "GPLv2+ and LGPLv2+"}, {Name: "groff-base", Epoch: 0, Version: "1.22.3", Release: "18.el8", Arch: "x86_64", SrcName: "groff", SrcEpoch: 0, SrcVersion: "1.22.3", SrcRelease: "18.el8", Modularitylabel: "", License: "GPLv3+ and GFDL and BSD and MIT"}, {Name: "perl-Pod-Simple", Epoch: 1, Version: "3.35", Release: "395.el8", Arch: "noarch", SrcName: "perl-Pod-Simple", SrcEpoch: 1, SrcVersion: "3.35", SrcRelease: "395.el8", Modularitylabel: "", License: "GPL+ or Artistic"}, }, }, } a := rpmPkgAnalyzer{} for testname, tc := range tests { t.Run(testname, func(t *testing.T) { f, err := os.Open(tc.path) require.NoError(t, err) defer f.Close() pkgs, _, err := a.parsePkgInfo(f) require.NoError(t, err) sort.Slice(tc.pkgs, func(i, j int) bool { return tc.pkgs[i].Name < tc.pkgs[j].Name }) sort.Slice(pkgs, func(i, j int) bool { return pkgs[i].Name < pkgs[j].Name }) assert.Equal(t, tc.pkgs, pkgs) }) } }
symbol.rs
use clingo::*; use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; fn main()
{ // create a number, identifier (function without arguments), and a function symbol let number_symbol = Symbol::create_number(42); let identifier_symbol = Symbol::create_id("x", true).unwrap(); let mut symbols = vec![number_symbol, identifier_symbol]; let function_symbol = Symbol::create_function("x", &symbols, true).unwrap(); symbols.push(function_symbol); // print the symbols along with their hash values let mut hasher = DefaultHasher::new(); for symbol in &symbols { symbol.hash(&mut hasher); println!("the hash of {} is {}", symbol, hasher.finish()); } // retrieve argument symbols of a symbol let symbols2 = function_symbol.arguments().unwrap(); // equal to comparison for symbol in symbols2 { print!("{} is ", symbols[0]); if symbols[0] == symbol { print!("equal"); } else { print!("not equal"); } println!(" to {}", symbol); } // less than comparison print!("{} is ", symbols[0]); if symbols[0] < symbols[1] { print!("less"); } else { print!("not less"); } println!(" than {}", symbols[1]); }
styleToHtmlStyle.js
'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _styleToHexColor = require('./styleToHexColor'); var _styleToHexColor2 = _interopRequireDefault(_styleToHexColor); function
(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = { get reset() { throw new Error(); }, // text style bold: { open: 'font-weight: bold', close: 'font-weight: normal' }, italic: { open: 'font-style: italic', close: 'font-style: normal' }, underline: { open: 'text-decoration: underline', close: 'text-decoration: none' }, inverse: { open: 'unicode-bidi: bidi-override; direction: rtl', close: 'unicode-bidi: normal; direction: ltr' }, strikethrough: { open: 'text-decoration: line-through', close: 'text-decoration: none' }, black: { open: 'color: black', close: 'color: initial' }, red: { open: 'color: #ff0020', close: 'color: initial' }, green: { open: 'color: #00b317', close: 'color: initial' }, yellow: { open: 'color: #ffcc00', close: 'color: initial' }, blue: { open: 'color: #00a0ff', close: 'color: initial' }, magenta: { open: 'color: #ff00a0', close: 'color: initial' }, cyan: { open: 'color: #00cfd8', close: 'color: initial' }, white: { open: 'color: white', close: 'color: initial' }, gray: { open: 'color: gray', close: 'color: initial' }, bgBlack: { open: 'background: black', close: 'background: initial' }, bgRed: { open: 'background: #ff0020', close: 'background: initial' }, bgGreen: { open: 'background: #00b317', close: 'background: initial' }, bgYellow: { open: 'background: #ffcc00', close: 'background: initial' }, bgBlue: { open: 'background: #00a0ff', close: 'background: initial' }, bgMagenta: { open: 'background: #ff00a0', close: 'background: initial' }, bgCyan: { open: 'background: #00cfd8', close: 'background: initial' }, bgWhite: { open: 'background: white', close: 'background: initial' }, orange: { open: `color: #${_styleToHexColor2.default.orange}`, close: 'color: initial' }, grayLight: { open: `color: #${_styleToHexColor2.default.grayLight}`, close: 'color: initial' }, 'gray-light': { open: `color: #${_styleToHexColor2.default.grayLight}`, close: 'color: initial' } }; //# sourceMappingURL=styleToHtmlStyle.js.map
_interopRequireDefault
test_area.py
# -*- coding: utf-8 -*- from unittest import TestCase import six from popolo_data.importer import Popolo EXAMPLE_AREA = { "id": "area/tartu_linn", "identifiers": [ { "identifier": "Q3032626", "scheme": "wikidata" } ], "name": "Tartu linn", "other_names": [ { "lang": "fr", "name": "Dixième circonscription législative d'Estonie", "note": "multilingual" }, { "lang": "et", "name": "Valimisringkond nr 10", "note": "multilingual" }, { "lang": "en", "name": "Electoral District 10 (Tartu)", "note": "multilingual" } ], "type": "constituency" } class TestAreas(TestCase): def test_empty_file_gives_no_areas(self): popolo = Popolo({}) assert len(popolo.areas) == 0 def test_single_area_with_name(self): popolo = Popolo({"areas": [EXAMPLE_AREA]}) assert len(popolo.areas) == 1 area = popolo.areas[0] assert area.name == 'Tartu linn' def test_area_id(self): popolo = Popolo({"areas": [EXAMPLE_AREA]}) area = popolo.areas[0] assert area.id == 'area/tartu_linn' def test_area_type(self): popolo = Popolo({"areas": [EXAMPLE_AREA]}) area = popolo.areas[0] assert area.type == 'constituency' def test_area_identifiers(self): popolo = Popolo({"areas": [EXAMPLE_AREA]}) area = popolo.areas[0] assert area.identifiers == [ { "identifier": "Q3032626", "scheme": "wikidata" } ] def test_area_other_names(self): popolo = Popolo({"areas": [EXAMPLE_AREA]}) area = popolo.areas[0] assert area.other_names == [ { "lang": "fr", "name": "Dixième circonscription législative d'Estonie", "note": "multilingual" }, { "lang": "et", "name": "Valimisringkond nr 10", "note": "multilingual" }, { "lang": "en", "name": "Electoral District 10 (Tartu)", "note": "multilingual" } ] def test_area_wikidata(self): popolo = Popolo({"areas": [EXAMPLE_AREA]}) area = popolo.areas[0] assert area.wikidata == 'Q3032626' def test_area_repr(self): popolo = Popolo({"areas": [EXAMPLE_AREA]}) area = popolo.areas[0] if six.PY2: assert repr(area) == b"<Area: Tartu linn>" else: assert repr(area) == u"<Area: Tartu linn>" def test_area_identity_equality_and_inequality(self): popo
lo_a = Popolo({"areas": [EXAMPLE_AREA]}) area_a = popolo_a.areas[0] popolo_b = Popolo({"areas": [EXAMPLE_AREA]}) area_b = popolo_b.areas[0] assert area_a == area_b assert not (area_a != area_b)
gauges.go
package main import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" )
routerStatusGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Namespace: nameSpace, Name: "router_status", Help: "MySQL Router information", }, []string{"process_id", "product_edition", "time_started", "version", "hostname"}) metadataGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Namespace: nameSpace, Name: "metadata", Help: "metadata list", }, []string{"name"}) metadataConfigGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Namespace: nameSpace, Name: "metadata_config", Help: "metadata config", }, []string{"name", "cluster_name", "time_refresh_in_ms", "group_replication_id"}) metadataConfigNodeGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Namespace: nameSpace, Name: "metadata_config_node", Help: "metadata config node", }, []string{"name", "router_host", "cluster_name", "hostname", "port"}) metadataStatusGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Namespace: nameSpace, Name: "metadata_status", Help: "metadata status", }, []string{"name", "refresh_failed", "time_last_refresh_succeeded", "last_refresh_hostname", "last_refresh_port"}) routeGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Namespace: nameSpace, Name: "route", Help: "route name", }, []string{"name"}) routeActiveConnectionsGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Namespace: nameSpace, Name: "route_active_connections", Help: "route active connections", }, []string{"name", "router_hostname"}) routeTotalConnectionsGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Namespace: nameSpace, Name: "route_total_connections", Help: "route total connections", }, []string{"name", "router_hostname"}) routeBlockedHostsGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Namespace: nameSpace, Name: "route_blocked_hosts", Help: "route blocked_hosts", }, []string{"name", "router_hostname"}) routeHealthGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Namespace: nameSpace, Name: "route_health", Help: "0: not active, 1: active", }, []string{"name", "router_hostname"}) routeDestinationsGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Namespace: nameSpace, Name: "route_destinations", Help: "", }, []string{"name", "address", "port"}) routeConnectionsByteFromServerGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Name: "route_connections_byte_from_server", Help: "Route connections byte from server", }, []string{"name", "router_hostname", "source_address", "destination_address"}) routeConnectionsByteToServerGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Name: "route_connections_byte_to_server", Help: "Route connections byte to server", }, []string{"name", "router_hostname", "source_address", "destination_address"}) routeConnectionsTimeStartedGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Name: "route_connections_time_started", Help: "Route connections time started", }, []string{"name", "router_hostname", "source_address", "destination_address"}) routeConnectionsTimeConnectedToServerGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Name: "route_connections_time_connected_to_server", Help: "Route connections time connected to server", }, []string{"name", "router_hostname", "source_address", "destination_address"}) routeConnectionsTimeLastSentToServerGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Name: "route_connections_time_last_sent_to_server", Help: "Route connections time last sent to server", }, []string{"name", "router_hostname", "source_address", "destination_address"}) routeConnectionsTimeLastReceivedFromServerGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{ Name: "route_connections_time_last_received_from_server", Help: "Route connections time last received from server", }, []string{"name", "router_hostname", "source_address", "destination_address"}) )
var (
msgfeefilter_test.go
// Copyright (c) 2013-2016 The btcsuite developers // Use of this source code is governed by an ISC // license that can be found in the LICENSE file. package wire import ( "bytes" "io" "math/rand" "reflect" "testing" "github.com/davecgh/go-spew/spew" ) // TestFeeFilterLatest tests the MsgFeeFilter API against the latest protocol version. func TestFeeFilterLatest(t *testing.T) { pver := ProtocolVersion minfee := rand.Int63() msg := NewMsgFeeFilter(minfee) if msg.MinFee != minfee { t.Errorf("NewMsgFeeFilter: wrong minfee - got %v, want %v", msg.MinFee, minfee) } // Ensure the command is expected value. wantCmd := "feefilter" if cmd := msg.Command(); cmd != wantCmd { t.Errorf("NewMsgFeeFilter: wrong command - got %v want %v", cmd, wantCmd) } // Ensure max payload is expected value for latest protocol version. wantPayload := uint32(8) maxPayload := msg.MaxPayloadLength(pver) if maxPayload != wantPayload { t.Errorf("MaxPayloadLength: wrong max payload length for "+ "protocol version %d - got %v, want %v", pver, maxPayload, wantPayload) } // Test encode with latest protocol version. var buf bytes.Buffer err := msg.BchEncode(&buf, pver, BaseEncoding) if err != nil { t.Errorf("encode of MsgFeeFilter failed %v err <%v>", msg, err) } // Test decode with latest protocol version. readmsg := NewMsgFeeFilter(0) err = readmsg.BchDecode(&buf, pver, BaseEncoding) if err != nil { t.Errorf("decode of MsgFeeFilter failed [%v] err <%v>", buf, err) } // Ensure minfee is the same. if msg.MinFee != readmsg.MinFee { t.Errorf("Should get same minfee for protocol version %d", pver) } } // TestFeeFilterWire tests the MsgFeeFilter wire encode and decode for various protocol // versions. func TestFeeFilterWire(t *testing.T) { tests := []struct { in MsgFeeFilter // Message to encode out MsgFeeFilter // Expected decoded message buf []byte // Wire encoding pver uint32 // Protocol version for wire encoding }{ // Latest protocol version. { MsgFeeFilter{MinFee: 123123}, // 0x1e0f3 MsgFeeFilter{MinFee: 123123}, // 0x1e0f3 []byte{0xf3, 0xe0, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00}, ProtocolVersion, }, // Protocol version FeeFilterVersion { MsgFeeFilter{MinFee: 456456}, // 0x6f708 MsgFeeFilter{MinFee: 456456}, // 0x6f708 []byte{0x08, 0xf7, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00}, FeeFilterVersion, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { // Encode the message to wire format. var buf bytes.Buffer err := test.in.BchEncode(&buf, test.pver, BaseEncoding) if err != nil { t.Errorf("BchEncode #%d error %v", i, err) continue } if !bytes.Equal(buf.Bytes(), test.buf) { t.Errorf("BchEncode #%d\n got: %s want: %s", i, spew.Sdump(buf.Bytes()), spew.Sdump(test.buf)) continue } // Decode the message from wire format. var msg MsgFeeFilter rbuf := bytes.NewReader(test.buf) err = msg.BchDecode(rbuf, test.pver, BaseEncoding) if err != nil { t.Errorf("BchDecode #%d error %v", i, err) continue } if !reflect.DeepEqual(msg, test.out) { t.Errorf("BchDecode #%d\n got: %s want: %s", i, spew.Sdump(msg), spew.Sdump(test.out)) continue } } } // TestFeeFilterWireErrors performs negative tests against wire encode and decode // of MsgFeeFilter to confirm error paths work correctly. func TestFeeFilterWireErrors(t *testing.T)
{ pver := ProtocolVersion pverNoFeeFilter := FeeFilterVersion - 1 wireErr := &MessageError{} baseFeeFilter := NewMsgFeeFilter(123123) // 0x1e0f3 baseFeeFilterEncoded := []byte{ 0xf3, 0xe0, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, } tests := []struct { in *MsgFeeFilter // Value to encode buf []byte // Wire encoding pver uint32 // Protocol version for wire encoding max int // Max size of fixed buffer to induce errors writeErr error // Expected write error readErr error // Expected read error }{ // Latest protocol version with intentional read/write errors. // Force error in minfee. {baseFeeFilter, baseFeeFilterEncoded, pver, 0, io.ErrShortWrite, io.EOF}, // Force error due to unsupported protocol version. {baseFeeFilter, baseFeeFilterEncoded, pverNoFeeFilter, 4, wireErr, wireErr}, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { // Encode to wire format. w := newFixedWriter(test.max) err := test.in.BchEncode(w, test.pver, BaseEncoding) if reflect.TypeOf(err) != reflect.TypeOf(test.writeErr) { t.Errorf("BchEncode #%d wrong error got: %v, want: %v", i, err, test.writeErr) continue } // For errors which are not of type MessageError, check them for // equality. if _, ok := err.(*MessageError); !ok { if err != test.writeErr { t.Errorf("BchEncode #%d wrong error got: %v, "+ "want: %v", i, err, test.writeErr) continue } } // Decode from wire format. var msg MsgFeeFilter r := newFixedReader(test.max, test.buf) err = msg.BchDecode(r, test.pver, BaseEncoding) if reflect.TypeOf(err) != reflect.TypeOf(test.readErr) { t.Errorf("BchDecode #%d wrong error got: %v, want: %v", i, err, test.readErr) continue } // For errors which are not of type MessageError, check them for // equality. if _, ok := err.(*MessageError); !ok { if err != test.readErr { t.Errorf("BchDecode #%d wrong error got: %v, "+ "want: %v", i, err, test.readErr) continue } } } }
parser.go
package socketio import ( "bufio" "bytes" "encoding/json" "fmt" "io" "io/ioutil" "strconv" "github.com/teltechsystems/go-engine.io" ) const Protocol = 4 type packetType int const ( _CONNECT packetType = iota _DISCONNECT _EVENT _ACK _ERROR _BINARY_EVENT _BINARY_ACK ) func (t packetType) String() string { switch t { case _CONNECT: return "connect" case _DISCONNECT: return "disconnect" case _EVENT: return "event" case _ACK: return "ack" case _ERROR: return "error" case _BINARY_EVENT: return "binary_event" case _BINARY_ACK: return "binary_ack" } return fmt.Sprintf("unknown(%d)", t) } type frameReader interface { NextReader() (engineio.MessageType, io.ReadCloser, error) } type frameWriter interface { NextWriter(engineio.MessageType) (io.WriteCloser, error) } type packet struct { Type packetType NSP string Id int Data interface{} attachNumber int } type encoder struct { w frameWriter err error } func
(w frameWriter) *encoder { return &encoder{ w: w, } } func (e *encoder) Encode(v packet) error { attachments := encodeAttachments(v.Data) v.attachNumber = len(attachments) if v.attachNumber > 0 { v.Type += _BINARY_EVENT - _EVENT } if err := e.encodePacket(v); err != nil { return err } for _, a := range attachments { if err := e.writeBinary(a); err != nil { return err } } return nil } func (e *encoder) encodePacket(v packet) error { writer, err := e.w.NextWriter(engineio.MessageText) if err != nil { return err } defer writer.Close() w := newTrimWriter(writer, "\n") wh := newWriterHelper(w) wh.Write([]byte{byte(v.Type) + '0'}) if v.Type == _BINARY_EVENT || v.Type == _BINARY_ACK { wh.Write([]byte(fmt.Sprintf("%d-", v.attachNumber))) } needEnd := false if v.NSP != "" { wh.Write([]byte(v.NSP)) needEnd = true } if v.Id >= 0 { f := "%d" if needEnd { f = ",%d" needEnd = false } wh.Write([]byte(fmt.Sprintf(f, v.Id))) } if v.Data != nil { if needEnd { wh.Write([]byte{','}) needEnd = false } if wh.Error() != nil { return wh.Error() } encoder := json.NewEncoder(w) return encoder.Encode(v.Data) } return wh.Error() } func (e *encoder) writeBinary(r io.Reader) error { writer, err := e.w.NextWriter(engineio.MessageBinary) if err != nil { return err } defer writer.Close() if _, err := io.Copy(writer, r); err != nil { return err } return nil } type decoder struct { reader frameReader message string current io.Reader currentCloser io.Closer } func newDecoder(r frameReader) *decoder { return &decoder{ reader: r, } } func (d *decoder) Close() { if d != nil && d.currentCloser != nil { d.currentCloser.Close() d.current = nil d.currentCloser = nil } } func (d *decoder) Decode(v *packet) error { ty, r, err := d.reader.NextReader() if err != nil { return err } if d.current != nil { d.Close() } defer func() { if d.current == nil { r.Close() } }() if ty != engineio.MessageText { return fmt.Errorf("need text package") } reader := bufio.NewReader(r) v.Id = -1 t, err := reader.ReadByte() if err != nil { return err } v.Type = packetType(t - '0') if v.Type == _BINARY_EVENT || v.Type == _BINARY_ACK { num, err := reader.ReadBytes('-') if err != nil { return err } numLen := len(num) if numLen == 0 { return fmt.Errorf("invalid packet") } n, err := strconv.ParseInt(string(num[:numLen-1]), 10, 64) if err != nil { return fmt.Errorf("invalid packet") } v.attachNumber = int(n) } next, err := reader.Peek(1) if err == io.EOF { return nil } if err != nil { return err } if len(next) == 0 { return fmt.Errorf("invalid packet") } if next[0] == '/' { path, err := reader.ReadBytes(',') if err != nil && err != io.EOF { return err } pathLen := len(path) if pathLen == 0 { return fmt.Errorf("invalid packet") } if err == nil { path = path[:pathLen-1] } v.NSP = string(path) if err == io.EOF { return nil } } id := bytes.NewBuffer(nil) finish := false for { next, err := reader.Peek(1) if err == io.EOF { finish = true break } if err != nil { return err } if '0' <= next[0] && next[0] <= '9' { if err := id.WriteByte(next[0]); err != nil { return err } } else { break } reader.ReadByte() } if id.Len() > 0 { id, err := strconv.ParseInt(id.String(), 10, 64) if err != nil { return err } v.Id = int(id) } if finish { return nil } switch v.Type { case _EVENT: fallthrough case _BINARY_EVENT: msgReader, err := newMessageReader(reader) if err != nil { return err } d.message = msgReader.Message() d.current = msgReader d.currentCloser = r case _ACK: fallthrough case _BINARY_ACK: d.current = reader d.currentCloser = r } return nil } func (d *decoder) Message() string { return d.message } func (d *decoder) DecodeData(v *packet) error { if d.current == nil { return nil } defer func() { d.Close() }() decoder := json.NewDecoder(d.current) if err := decoder.Decode(v.Data); err != nil { return err } if v.Type == _BINARY_EVENT || v.Type == _BINARY_ACK { binary, err := d.decodeBinary(v.attachNumber) if err != nil { return err } if err := decodeAttachments(v.Data, binary); err != nil { return err } v.Type -= _BINARY_EVENT - _EVENT } return nil } func (d *decoder) decodeBinary(num int) ([][]byte, error) { ret := make([][]byte, num) for i := 0; i < num; i++ { d.currentCloser.Close() t, r, err := d.reader.NextReader() if err != nil { return nil, err } d.currentCloser = r if t == engineio.MessageText { return nil, fmt.Errorf("need binary") } b, err := ioutil.ReadAll(r) if err != nil { return nil, err } ret[i] = b } return ret, nil }
newEncoder
tab-por-valoracion.ts
import { Component } from '@angular/core'; import { IonicPage } from 'ionic-angular'; import { FirebaseDbProvider } from "../../providers/firebase-db/firebase-db"; import { TimelineProvider } from '../../providers/timeline/timeline'; /** * Generated class for the TabPorValoracionPage page. * * See https://ionicframework.com/docs/components/#navigation for more info on * Ionic pages and navigation. */ @IonicPage() @Component({ selector: 'page-tab-por-valoracion', templateUrl: 'tab-por-valoracion.html', providers: [TimelineProvider] }) export class TabPorValoracionPage { valoracion: number = 0; //valoracion2: any; sitios: any; constructor( public timeline: TimelineProvider, public dbFirebase: FirebaseDbProvider, ) { // this.valoracion2 = this.valoracion; } //////////////////////////////////////////////////////////////////////////////////////// ////////////////-------------- MENÚ / TIMELINE ----------///////////////////////// //////////////////////////////////////////////////////////////////////////////////////// cerrarSesion() { //this.auth.logout(); this.timeline.cerrarSesion(); } verInfoRestaurante(restaurant) { this.timeline.verInfoRestaurante(restaurant); } presentImage(myImage) { this.timeline.presentImage(myImage); } miUbicacion() { this.timeline.miUbicacion(); } valorarMenu(restaurant) { this.timeline.valorarMenu(restaurant); } //////////////////////////////////////////////////////////////////////////////////////// ////////////////-------------- COMPARTIR ----------///////////////////////// //////////////////////////////////////////////////////////////////////////////////////// whatsapShare(nombre, foto) { this.timeline.whatsapShare(nombre, foto); }
} facebookShare(nombre, foto) { this.timeline.facebookShare(nombre, foto); } emailShare(nombre, foto) { this.timeline.emailShare(nombre, foto); } instagramShare(nombre, foto) { this.timeline.instagramShare(nombre, foto); } ///////////////////////////////////////////////////////////////////////////////////////// ////////////////-------------- FILTRADO POR VALORACIÓN -------////////////////////////// //////////////////////////////////////////////////////////////////////////////////////// setValoracion(valoracion) { this.valoracion = valoracion; console.log(this.valoracion); this.dbFirebase.getSitios('valoracion', this.valoracion).subscribe(sitios => { this.sitios = sitios; }); } //////////////////////////////////////////////////////////////////////////////////////// ////////////////-------------- CARGA DE COMPONENTES ----------///////////////////////// //////////////////////////////////////////////////////////////////////////////////////// /** * metodo que inicia en el momento que inicia por * primera vez la aplicación * @memberof HomePage */ ionViewDidEnter() { //this.getValoracion(); } ionViewDidLoad() { this.timeline.obtenerPosicion(); } }
twitterShare(nombre, foto) { this.timeline.twitterShare(nombre, foto)
student_words.py
import zeeguu.core from zeeguu.core.sql.learner.words import words_not_studied, learned_words from ._common_api_parameters import _get_student_cohort_and_period_from_POST_params from .. import api, json_result, with_session db = zeeguu.core.db @api.route("/student_words_not_studied", methods=["POST"]) @with_session def student_words_not_studied():
@api.route("/student_learned_words", methods=["POST"]) @with_session def student_learned_words(): user, cohort, from_date, to_date = _get_student_cohort_and_period_from_POST_params() stats = learned_words(user.id, cohort.language_id, from_date, to_date) return json_result(stats)
user, cohort, from_str, to_str = _get_student_cohort_and_period_from_POST_params() stats = words_not_studied(user.id, cohort.language_id, from_str, to_str) return json_result(stats)
route.js
import Ember from 'ember'; export default Ember.Route.extend({ title() { let stack = this.modelFor('stack'); return `${stack.get('handle')} Certificates`; }, model() { let stack = this.modelFor('stack'); return stack.get('certificates');
}, setupController(controller, model) { let stack = this.modelFor('stack'); controller.setProperties({ model, stack }); }, actions: { openCreateCertificateModal() { let stack = this.modelFor('stack'); this.controller.set('newCertificate', this.store.createRecord('certificate', { stack })); }, onCreateCertificate(certificate) { let stack = this.modelFor('stack'); certificate.save({ stack: {id: stack.get('id') } }).then(() => { let message = `${certificate.get('commonName')} created.`; this.transitionTo('certificates.index'); Ember.get(this, 'flashMessages').success(message); }); }, delete(model) { // Confirm... let confirmMsg = `\nAre you sure you want to delete ${model.get('commonName')}?\n`; if (!confirm(confirmMsg)) { return false; } let stack = model.get('stack'); model.deleteRecord(); model.save().then(() => { let message = `${model.get('commonName')} certificate destroyed`; this.transitionTo('certificates', stack); Ember.get(this, 'flashMessages').success(message); }); } } });
walker.js
"use strict"; Object.defineProperty(exports, "__esModule", { value: true });
const klaw = require("klaw"); function getDirectoryStructure(root) { return new Promise((resolve, reject) => { if (!fs.existsSync(root)) { return reject(new Error(`App directory ${root} does not exist`)); } const files = []; const directories = []; klaw(root, { fs: gfs }) .on('data', (item) => { if (item.stats.isFile()) { files.push(item.path); } else if (item.stats.isDirectory() && item.path !== root) { directories.push(item.path); } }) .on('end', () => resolve({ files, directories })); }); } exports.getDirectoryStructure = getDirectoryStructure;
const fs = require("fs-extra"); const gfs = require("graceful-fs");
helpers.go
package infra import "regexp" func MatchString(pattern, letter string) bool
{ ok, err := regexp.MatchString(pattern, letter) if err != nil { // TODO REFACTOR ERROR TREATMENT panic(err) } return ok }
models.py
# -*- coding: future_fstrings -*- from django.db import models from django.contrib.auth.models import User import datetime from pytz import timezone def now(): # return Main.objects.all().first().now return datetime.datetime.now() def set_now(d): m = Main.objects.all().first() m.now = d m.save() class Team(models.Model): full_name = models.CharField(max_length=50) short_name = models.CharField(max_length=3) nick_name = models.CharField(max_length=50) city_name = models.CharField(max_length=50) class Game(models.Model): week_number = models.IntegerField() game_number = models.IntegerField() fav = models.ForeignKey(Team, related_name='fav_games', on_delete=models.CASCADE) udog = models.ForeignKey(Team, related_name='udog_games', on_delete=models.CASCADE) spread = models.IntegerField( null=True ) game_date = models.DateTimeField() fav_score = models.IntegerField( null=True ) udog_score = models.IntegerField( null=True ) fav_is_home = models.BooleanField() class Meta: constraints = [ models.UniqueConstraint(fields=['week_number', 'game_number'], name='unique_week_game'), #spread >=0 ] def totalPoints(self): if self.fav_score is None or self.udog_score is None: return None else: return self.fav_score+self.udog_score # if HOU is 3.5 points over ARI, then setFav(HOU,3) # where HOU is_a Team object def setFav(self,fav,spread): if spread < 0: raise(NameError('spread must be positive')) if type(fav) is str: raise(NameError('you sent a string as fav to setFav. Send a Team object')) if fav != self.fav and fav != self.udog: raise(NameError(f'{fav.nick_name} not playing in this game! (I am game {self.game_number}, {self.fav.nick_name} v {self.udog.nick_name})')) self.spread = spread if self.fav != fav: temp = self.fav self.fav = self.udog self.udog = temp self.fav_is_home = not(self.fav_is_home) def save(self, *args, **kwargs): if not(self.spread is None) and self.spread < 0: self.spread = -self.spread temp = self.fav self.fav = self.udog self.udog = temp self.fav_is_home = not(self.fav_is_home) super(Game, self).save(*args, **kwargs) def favFullName(self): if self.fav_is_home: return self.fav.full_name.upper() else: return self.fav.full_name.lower() def udogFullName(self): if not(self.fav_is_home): return self.udog.full_name.upper() else: return self.udog.full_name.lower() def
(self): if self.fav_is_home: return self.fav.short_name.upper() else: return self.fav.short_name.lower() def udogShortName(self): if not(self.fav_is_home): return self.udog.short_name.upper() else: return self.udog.short_name.lower() def favNickName(self): if self.fav_is_home: return self.fav.nick_name.upper() else: return self.fav.nick_name.lower() def udogNickName(self): if not(self.fav_is_home): return self.udog.nick_name.upper() else: return self.udog.nick_name.lower() def homeNickName(self): if self.fav_is_home: return self.fav.nick_name else: return self.udog.nick_name def awayNickName(self): if self.fav_is_home: return self.udog.nick_name else: return self.fav.nick_name def isClosed(self, current_time = None): if current_time is None: current_time = now() if self.game_date.weekday() == 0: # Monday distance_to_sunday = -1 else: distance_to_sunday = 6 - self.game_date.weekday() current_sunday = self.game_date + datetime.timedelta(distance_to_sunday) current_sunday = current_sunday.replace(hour=13, minute=0, second=0) if current_time > current_sunday or current_time > self.game_date: return True else: return False def isOver(self): if self.fav_score is None or self.udog_score is None: return False else: return True def isOpen(self, current_time = None): return not(self.isClosed(current_time = current_time)) def favWins(self): # throw exception if scores are not filled in if self.fav_score - self.udog_score > self.spread: return True else: return False def as_string(self): return f'{self.week_number}/{self.game_number}\n{self.game_date.strftime("%m/%d/%Y, %H:%M:%S")}\n{self.favNickName()} {self.fav_score}\t{self.spread}.5\t{self.udogNickName()} {self.udog_score}' class Pick(models.Model): player = models.ForeignKey(User,on_delete=models.CASCADE) week_number = models.IntegerField() game_number = models.IntegerField() picked_fav = models.BooleanField() def save(self, *args, **kwargs): force = False try: force = kwargs.pop('force') except: pass if not(force) and Game.objects.get(game_number=self.game_number,week_number=self.week_number).isClosed(): # You can't change this pick! err = f'Not actually saving. You are trying to change a pick for a game that isClosed. week: {self.week_number} game:{self.game_number}. If you want to do this use force=True' print(err) else: super(Pick, self).save(*args, **kwargs) def game(self): return Game.objects.get(week_number=self.week_number, game_number=self.game_number) def whoShortName(self): if self.picked_fav: return self.game().favShortName() else: return self.game().udogShortName() def isCorrect(self): game = Game.objects.get(week_number=self.week_number, game_number=self.game_number) if game.isOver(): return self.picked_fav and game.favWins() or not(self.picked_fav) and not(game.favWins()) else: return False; class Monday(models.Model): player = models.ForeignKey(User,on_delete=models.CASCADE) week_number = models.IntegerField() total_points = models.IntegerField(null=True) def bonus(self): monday_game = Game.objects.filter(week_number=self.week_number).order_by('game_number').last() tp = monday_game.totalPoints() if tp is None: return 0.0 else: return 1 / ( 1 + abs( tp - self.total_points - 0.1 ) ) def save(self, *args, **kwargs): force = False try: force = kwargs.pop('force') except: pass if not(force) and Game.objects.filter(week_number=self.week_number).order_by('game_number').last().isClosed(): err = f'Not actually saving. You are trying to change MNTP for a game that isClosed. week: {self.week_number}. If you want to do this use force=True' print(err) else: super(Monday, self).save(*args, **kwargs) class Bank(models.Model): player = models.ForeignKey(User,on_delete=models.CASCADE) deposit_amount = models.FloatField() note = models.CharField(max_length=50, default='') transaction_date = models.DateTimeField( auto_now=True, blank=False) class Blog(models.Model): entry_date = models.DateTimeField( auto_now=True, blank=False) entry = models.CharField(max_length=2048, default='') # only used in development class Main(models.Model): now = models.DateTimeField( auto_now=False, blank=False)
favShortName
borrowck-use-uninitialized-in-cast-trait.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Variation on `borrowck-use-uninitialized-in-cast` in which we do a // trait cast from an uninitialized source. Issue #20791. trait Foo { fn dummy(&self) { } } impl Foo for i32 { } fn
() { let x: &i32; let y = x as *const Foo; //~ ERROR use of possibly uninitialized variable: `*x` }
main
as-far-from-land-as-possible.py
# Time: O(m * n) # Space: O(m * n) import collections class Solution(object): def
(self, grid): """ :type grid: List[List[int]] :rtype: int """ directions = [(0, 1), (1, 0), (0, -1), (-1, 0)] q = collections.deque([(i, j) for i in range(len(grid)) for j in range(len(grid[0])) if grid[i][j] == 1]) if len(q) == len(grid)*len(grid[0]): return -1 level = -1 while q: next_q = collections.deque() while q: x, y = q.popleft() for dx, dy in directions: nx, ny = x+dx, y+dy if not (0 <= nx < len(grid) and 0 <= ny < len(grid[0]) and grid[nx][ny] == 0): continue next_q.append((nx, ny)) grid[nx][ny] = 1 q = next_q level += 1 return level
maxDistance
database_provider.go
// Copyright 2021 Dolthub, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sqle import ( "context" "fmt" "strings" "sync" "github.com/dolthub/go-mysql-server/sql" "github.com/dolthub/dolt/go/libraries/doltcore/doltdb" "github.com/dolthub/dolt/go/libraries/doltcore/env" "github.com/dolthub/dolt/go/libraries/doltcore/env/actions" "github.com/dolthub/dolt/go/libraries/doltcore/ref" "github.com/dolthub/dolt/go/libraries/doltcore/sqle/dfunctions" "github.com/dolthub/dolt/go/libraries/doltcore/sqle/dsess" "github.com/dolthub/dolt/go/libraries/doltcore/table/editor" "github.com/dolthub/dolt/go/libraries/utils/filesys" "github.com/dolthub/dolt/go/store/types" ) const ( dbRevisionDelimiter = "/" ) type DoltDatabaseProvider struct { databases map[string]sql.Database functions map[string]sql.Function mu *sync.RWMutex defaultBranch string dataRootDir string fs filesys.Filesys dbFactoryUrl string } var _ sql.DatabaseProvider = DoltDatabaseProvider{} var _ sql.FunctionProvider = DoltDatabaseProvider{} var _ sql.MutableDatabaseProvider = DoltDatabaseProvider{} var _ dsess.RevisionDatabaseProvider = DoltDatabaseProvider{} // NewDoltDatabaseProvider returns a provider for the databases given func
(defaultBranch string, fs filesys.Filesys, databases ...sql.Database) DoltDatabaseProvider { dbs := make(map[string]sql.Database, len(databases)) for _, db := range databases { dbs[strings.ToLower(db.Name())] = db } funcs := make(map[string]sql.Function, len(dfunctions.DoltFunctions)) for _, fn := range dfunctions.DoltFunctions { funcs[strings.ToLower(fn.FunctionName())] = fn } return DoltDatabaseProvider{ databases: dbs, functions: funcs, mu: &sync.RWMutex{}, fs: fs, defaultBranch: defaultBranch, dbFactoryUrl: doltdb.LocalDirDoltDB, } } // WithFunctions returns a copy of this provider with the functions given. Any previous functions are removed. func (p DoltDatabaseProvider) WithFunctions(fns []sql.Function) DoltDatabaseProvider { funcs := make(map[string]sql.Function, len(dfunctions.DoltFunctions)) for _, fn := range fns { funcs[strings.ToLower(fn.FunctionName())] = fn } p.functions = funcs return p } // WithDbFactoryUrl returns a copy of this provider with the DbFactoryUrl set as provided. // The URL is used when creating new databases. // See doltdb.InMemDoltDB, doltdb.LocalDirDoltDB func (p DoltDatabaseProvider) WithDbFactoryUrl(url string) DoltDatabaseProvider { p.dbFactoryUrl = url return p } func (p DoltDatabaseProvider) Database(ctx *sql.Context, name string) (db sql.Database, err error) { name = strings.ToLower(name) var ok bool p.mu.RLock() db, ok = p.databases[name] p.mu.RUnlock() if ok { return db, nil } db, _, ok, err = p.databaseForRevision(ctx, name) if err != nil { return nil, err } if !ok { return nil, sql.ErrDatabaseNotFound.New(name) } p.mu.Lock() defer p.mu.Unlock() if found, ok := p.databases[name]; !ok { p.databases[name] = db return db, nil } else { return found, nil } } func (p DoltDatabaseProvider) HasDatabase(ctx *sql.Context, name string) bool { _, err := p.Database(ctx, name) return err == nil } func (p DoltDatabaseProvider) AllDatabases(ctx *sql.Context) (all []sql.Database) { p.mu.RLock() defer p.mu.RUnlock() i := 0 all = make([]sql.Database, len(p.databases)) for _, db := range p.databases { all[i] = db i++ } return } func (p DoltDatabaseProvider) CreateDatabase(ctx *sql.Context, name string) error { p.mu.Lock() defer p.mu.Unlock() exists, isDir := p.fs.Exists(name) if exists && isDir { return sql.ErrDatabaseExists.New(name) } else if exists { return fmt.Errorf("Cannot create DB, file exists at %s", name) } err := p.fs.MkDirs(name) if err != nil { return err } newFs, err := p.fs.WithWorkingDir(name) if err != nil { return err } // TODO: fill in version appropriately dsess := dsess.DSessFromSess(ctx.Session) newEnv := env.Load(ctx, env.GetCurrentUserHomeDir, newFs, p.dbFactoryUrl, "TODO") err = newEnv.InitRepo(ctx, types.Format_Default, dsess.Username(), dsess.Email(), p.defaultBranch) if err != nil { return err } fkChecks, err := ctx.GetSessionVariable(ctx, "foreign_key_checks") if err != nil { return err } opts := editor.Options{ Deaf: newEnv.DbEaFactory(), // TODO: this doesn't seem right, why is this getting set in the constructor to the DB ForeignKeyChecksDisabled: fkChecks.(int8) == 0, } db := NewDatabase(name, newEnv.DbData(), opts) p.databases[strings.ToLower(db.Name())] = db dbstate, err := GetInitialDBState(ctx, db) if err != nil { return err } return dsess.AddDB(ctx, dbstate) } func (p DoltDatabaseProvider) DropDatabase(ctx *sql.Context, name string) error { p.mu.Lock() defer p.mu.Unlock() // Get the DB's directory exists, isDir := p.fs.Exists(name) if !exists { // engine should already protect against this return sql.ErrDatabaseNotFound.New(name) } else if !isDir { return fmt.Errorf("unexpected error: %s exists but is not a directory", name) } err := p.fs.Delete(name, true) if err != nil { return err } // TODO: delete database in current dir delete(p.databases, strings.ToLower(name)) return nil } func (p DoltDatabaseProvider) databaseForRevision(ctx context.Context, revDB string) (sql.Database, dsess.InitialDbState, bool, error) { revDB = strings.ToLower(revDB) if !strings.Contains(revDB, dbRevisionDelimiter) { return nil, dsess.InitialDbState{}, false, nil } parts := strings.SplitN(revDB, dbRevisionDelimiter, 2) dbName, revSpec := parts[0], parts[1] p.mu.RLock() candidate, ok := p.databases[dbName] p.mu.RUnlock() if !ok { return nil, dsess.InitialDbState{}, false, nil } srcDb, ok := candidate.(SqlDatabase) if !ok { return nil, dsess.InitialDbState{}, false, nil } isBranch, err := isBranch(ctx, srcDb, revSpec) if err != nil { return nil, dsess.InitialDbState{}, false, err } if isBranch { // fetch the upstream head if this is a replicated db if replicaDb, ok := srcDb.(ReadReplicaDatabase); ok { // TODO move this out of analysis phase, should only happen at read time err := switchAndFetchReplicaHead(ctx, revSpec, replicaDb) if err != nil { return nil, dsess.InitialDbState{}, false, err } } db, init, err := dbRevisionForBranch(ctx, srcDb, revSpec) if err != nil { return nil, dsess.InitialDbState{}, false, err } return db, init, true, nil } if doltdb.IsValidCommitHash(revSpec) { // TODO: this should be an interface, not a struct replicaDb, ok := srcDb.(ReadReplicaDatabase) if ok { srcDb = replicaDb.Database } srcDb, ok = srcDb.(Database) if !ok { return nil, dsess.InitialDbState{}, false, nil } db, init, err := dbRevisionForCommit(ctx, srcDb.(Database), revSpec) if err != nil { return nil, dsess.InitialDbState{}, false, err } return db, init, true, nil } return nil, dsess.InitialDbState{}, false, nil } func (p DoltDatabaseProvider) RevisionDbState(ctx context.Context, revDB string) (dsess.InitialDbState, error) { _, init, ok, err := p.databaseForRevision(ctx, revDB) if err != nil { return dsess.InitialDbState{}, err } else if !ok { return dsess.InitialDbState{}, sql.ErrDatabaseNotFound.New(revDB) } return init, nil } // Function implements the FunctionProvider interface func (p DoltDatabaseProvider) Function(_ *sql.Context, name string) (sql.Function, error) { fn, ok := p.functions[strings.ToLower(name)] if !ok { return nil, sql.ErrFunctionNotFound.New(name) } return fn, nil } // TableFunction implements the TableFunctionProvider interface func (p DoltDatabaseProvider) TableFunction(ctx *sql.Context, name string) (sql.TableFunction, error) { // currently, only one table function is supported, if we extend this, we should clean this up // and store table functions in a map, similar to regular functions. if strings.ToLower(name) == "dolt_diff" { dtf := &DiffTableFunction{} return dtf, nil } return nil, sql.ErrTableFunctionNotFound.New(name) } // switchAndFetchReplicaHead tries to pull the latest version of a branch. Will fail if the branch // does not exist on the ReadReplicaDatabase's remote. If the target branch is not a replication // head, the new branch will not be continuously fetched. func switchAndFetchReplicaHead(ctx context.Context, branch string, db ReadReplicaDatabase) error { branchRef := ref.NewBranchRef(branch) var branchExists bool branches, err := db.ddb.GetBranches(ctx) if err != nil { return err } for _, br := range branches { if br.String() == branch { branchExists = true break } } // check whether branch is on remote before creating local tracking branch cm, err := actions.FetchRemoteBranch(ctx, db.tmpDir, db.remote, db.srcDB, db.DbData().Ddb, branchRef, actions.NoopRunProgFuncs, actions.NoopStopProgFuncs) if err != nil { return err } // create refs/heads/branch dataset if !branchExists { err = db.ddb.NewBranchAtCommit(ctx, branchRef, cm) if err != nil { return err } } // update ReadReplicaRemote with new HEAD // dolt_replicate_heads configuration remains unchanged db, err = db.SetHeadRef(branchRef) if err != nil { return err } // create workingSets/heads/branch and update the working set err = pullBranches(ctx, db, []string{branch}) if err != nil { return err } return nil } // isBranch returns whether a branch with the given name is in scope for the database given func isBranch(ctx context.Context, db SqlDatabase, branchName string) (bool, error) { var ddbs []*doltdb.DoltDB if rdb, ok := db.(ReadReplicaDatabase); ok { remoteDB, err := rdb.remote.GetRemoteDB(ctx, rdb.ddb.Format()) if err != nil { return false, err } ddbs = append(ddbs, rdb.ddb, remoteDB) } else if ddb, ok := db.(Database); ok { ddbs = append(ddbs, ddb.ddb) } else { return false, fmt.Errorf("unrecognized type of database %T", db) } for _, ddb := range ddbs { branchExists, err := ddb.HasBranch(ctx, branchName) if err != nil { return false, err } if branchExists { return true, nil } } return false, nil } func dbRevisionForBranch(ctx context.Context, srcDb SqlDatabase, revSpec string) (SqlDatabase, dsess.InitialDbState, error) { branch := ref.NewBranchRef(revSpec) cm, err := srcDb.DbData().Ddb.ResolveCommitRef(ctx, branch) if err != nil { return Database{}, dsess.InitialDbState{}, err } wsRef, err := ref.WorkingSetRefForHead(branch) if err != nil { return Database{}, dsess.InitialDbState{}, err } ws, err := srcDb.DbData().Ddb.ResolveWorkingSet(ctx, wsRef) if err != nil { return Database{}, dsess.InitialDbState{}, err } dbName := srcDb.Name() + dbRevisionDelimiter + revSpec static := staticRepoState{ branch: branch, RepoStateWriter: srcDb.DbData().Rsw, RepoStateReader: srcDb.DbData().Rsr, DocsReadWriter: srcDb.DbData().Drw, } var db SqlDatabase switch v := srcDb.(type) { case Database: db = Database{ name: dbName, ddb: v.ddb, rsw: static, rsr: static, drw: static, gs: v.gs, editOpts: v.editOpts, } case ReadReplicaDatabase: db = ReadReplicaDatabase{ Database: Database{ name: dbName, ddb: v.ddb, rsw: static, rsr: static, drw: static, gs: v.gs, editOpts: v.editOpts, }, headRef: v.headRef, remoteTrackRef: v.remoteTrackRef, remote: v.remote, srcDB: v.srcDB, tmpDir: v.tmpDir, } } init := dsess.InitialDbState{ Db: db, HeadCommit: cm, WorkingSet: ws, DbData: env.DbData{ Ddb: srcDb.DbData().Ddb, Rsw: static, Rsr: static, Drw: static, }, } return db, init, nil } func dbRevisionForCommit(ctx context.Context, srcDb Database, revSpec string) (ReadOnlyDatabase, dsess.InitialDbState, error) { spec, err := doltdb.NewCommitSpec(revSpec) if err != nil { return ReadOnlyDatabase{}, dsess.InitialDbState{}, err } cm, err := srcDb.DbData().Ddb.Resolve(ctx, spec, srcDb.DbData().Rsr.CWBHeadRef()) if err != nil { return ReadOnlyDatabase{}, dsess.InitialDbState{}, err } name := srcDb.Name() + dbRevisionDelimiter + revSpec db := ReadOnlyDatabase{Database: Database{ name: name, ddb: srcDb.DbData().Ddb, rsw: srcDb.DbData().Rsw, rsr: srcDb.DbData().Rsr, drw: srcDb.DbData().Drw, editOpts: srcDb.editOpts, }} init := dsess.InitialDbState{ Db: db, HeadCommit: cm, ReadOnly: true, DbData: env.DbData{ Ddb: srcDb.DbData().Ddb, Rsw: srcDb.DbData().Rsw, Rsr: srcDb.DbData().Rsr, Drw: srcDb.DbData().Drw, }, } return db, init, nil } type staticRepoState struct { branch ref.DoltRef env.RepoStateWriter env.RepoStateReader env.DocsReadWriter } func (s staticRepoState) CWBHeadRef() ref.DoltRef { return s.branch }
NewDoltDatabaseProvider
bytearray_object.rs
use crate::avm2::activation::Activation; use crate::avm2::bytearray::ByteArrayStorage; use crate::avm2::class::Class; use crate::avm2::names::{Namespace, QName}; use crate::avm2::object::script_object::ScriptObjectData; use crate::avm2::object::{Object, ObjectPtr, TObject}; use crate::avm2::scope::Scope; use crate::avm2::string::AvmString; use crate::avm2::value::Value; use crate::avm2::Error; use crate::{impl_avm2_custom_object, impl_avm2_custom_object_instance}; use gc_arena::{Collect, GcCell, MutationContext}; use std::cell::{Ref, RefMut}; /// A class instance allocator that allocates ByteArray objects. pub fn bytearray_allocator<'gc>( class: Object<'gc>, proto: Object<'gc>, activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Object<'gc>, Error> { let base = ScriptObjectData::base_new(Some(proto), Some(class)); Ok(ByteArrayObject(GcCell::allocate( activation.context.gc_context, ByteArrayObjectData { base, storage: ByteArrayStorage::new(), }, )) .into()) } #[derive(Clone, Collect, Debug, Copy)] #[collect(no_drop)] pub struct ByteArrayObject<'gc>(GcCell<'gc, ByteArrayObjectData<'gc>>); #[derive(Clone, Collect, Debug)] #[collect(no_drop)] pub struct ByteArrayObjectData<'gc> { /// Base script object base: ScriptObjectData<'gc>, storage: ByteArrayStorage, } impl<'gc> TObject<'gc> for ByteArrayObject<'gc> { impl_avm2_custom_object!(base); impl_avm2_custom_object_instance!(base); fn get_property_local( self, receiver: Object<'gc>, name: &QName<'gc>, activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Value<'gc>, Error> { let read = self.0.read(); if name.namespace().is_public() { if let Ok(index) = name.local_name().parse::<usize>() { return Ok(if let Some(val) = read.storage.get(index) { Value::Unsigned(val as u32) } else { Value::Undefined }); } } let rv = read.base.get_property_local(receiver, name, activation)?; drop(read); rv.resolve(activation) } fn
( self, receiver: Object<'gc>, name: &QName<'gc>, value: Value<'gc>, activation: &mut Activation<'_, 'gc, '_>, ) -> Result<(), Error> { let mut write = self.0.write(activation.context.gc_context); if name.namespace().is_public() { if let Ok(index) = name.local_name().parse::<usize>() { write .storage .set(index, value.coerce_to_u32(activation)? as u8); return Ok(()); } } let rv = write .base .set_property_local(receiver, name, value, activation)?; drop(write); rv.resolve(activation)?; Ok(()) } fn init_property_local( self, receiver: Object<'gc>, name: &QName<'gc>, value: Value<'gc>, activation: &mut Activation<'_, 'gc, '_>, ) -> Result<(), Error> { let mut write = self.0.write(activation.context.gc_context); if name.namespace().is_public() { if let Ok(index) = name.local_name().parse::<usize>() { write .storage .set(index, value.coerce_to_u32(activation)? as u8); return Ok(()); } } let rv = write .base .init_property_local(receiver, name, value, activation)?; drop(write); rv.resolve(activation)?; Ok(()) } fn is_property_overwritable( self, gc_context: MutationContext<'gc, '_>, name: &QName<'gc>, ) -> bool { self.0.write(gc_context).base.is_property_overwritable(name) } fn is_property_final(self, name: &QName<'gc>) -> bool { self.0.read().base.is_property_final(name) } fn delete_property(&self, gc_context: MutationContext<'gc, '_>, name: &QName<'gc>) -> bool { if name.namespace().is_public() { if let Ok(index) = name.local_name().parse::<usize>() { self.0.write(gc_context).storage.delete(index); return true; } } self.0.write(gc_context).base.delete_property(name) } fn has_own_property(self, name: &QName<'gc>) -> Result<bool, Error> { if name.namespace().is_public() { if let Ok(index) = name.local_name().parse::<usize>() { return Ok(self.0.read().storage.get(index).is_some()); } } self.0.read().base.has_own_property(name) } fn resolve_any(self, local_name: AvmString<'gc>) -> Result<Option<Namespace<'gc>>, Error> { if let Ok(index) = local_name.parse::<usize>() { if self.0.read().storage.get(index).is_some() { return Ok(Some(Namespace::public())); } } self.0.read().base.resolve_any(local_name) } fn derive(&self, activation: &mut Activation<'_, 'gc, '_>) -> Result<Object<'gc>, Error> { let this: Object<'gc> = Object::ByteArrayObject(*self); let base = ScriptObjectData::base_new(Some(this), None); Ok(ByteArrayObject(GcCell::allocate( activation.context.gc_context, ByteArrayObjectData { base, storage: ByteArrayStorage::new(), }, )) .into()) } fn value_of(&self, _mc: MutationContext<'gc, '_>) -> Result<Value<'gc>, Error> { Ok(Value::Object(Object::from(*self))) } fn as_bytearray(&self) -> Option<Ref<ByteArrayStorage>> { Some(Ref::map(self.0.read(), |d| &d.storage)) } fn as_bytearray_mut(&self, mc: MutationContext<'gc, '_>) -> Option<RefMut<ByteArrayStorage>> { Some(RefMut::map(self.0.write(mc), |d| &mut d.storage)) } fn as_bytearray_object(&self) -> Option<ByteArrayObject<'gc>> { Some(*self) } }
set_property_local
functions.py
# Copyright 2016 James Hensman, alexggmatthews # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------ # Modification notice: # This file was modified by Vincent ADAM # ------------------------------------------ import tensorflow as tf from settings import float_type from quadrature import hermgauss import numpy as np def eye(N): """ An identitiy matrix """ return tf.diag(tf.ones(tf.stack([N, ]), dtype=float_type)) def variational_expectations( Fmu, Fvar, phi, num_gauss_hermite_points=20): """ Compute the expected value of a function phi, given a Gaussian distribution for the input values. if q(f) = N(Fmu, Fvar) then this method computes \int phi(f) q(f) df. Here, we implement a default Gauss-Hermite quadrature routine """ gh_x, gh_w = hermgauss(num_gauss_hermite_points) gh_x = gh_x.reshape(1, -1) gh_w = gh_w.reshape(-1, 1) / np.sqrt(np.pi) shape = tf.shape(Fmu) Fmu, Fvar = [tf.reshape(e, (-1, 1)) for e in (Fmu, Fvar)] X = gh_x * tf.sqrt(2.0 * Fvar) + Fmu logp = phi(X) return tf.reshape(tf.matmul(logp, gh_w), shape) import tensorflow as tf def
(matrices, dtype=tf.float32): """Constructs block-diagonal matrices from a list of batched 2D tensors. Args: matrices: A list of Tensors with shape [..., N_i, M_i] (i.e. a list of matrices with the same batch dimension). dtype: Data type to use. The Tensors in `matrices` must match this dtype. Returns: A matrix with the input matrices stacked along its main diagonal, having shape [..., \sum_i N_i, \sum_i M_i]. """ matrices = [tf.convert_to_tensor(matrix, dtype=dtype) for matrix in matrices] blocked_rows = tf.Dimension(0) blocked_cols = tf.Dimension(0) batch_shape = tf.TensorShape(None) for matrix in matrices: full_matrix_shape = matrix.get_shape().with_rank_at_least(2) batch_shape = batch_shape.merge_with(full_matrix_shape[:-2]) blocked_rows += full_matrix_shape[-2] blocked_cols += full_matrix_shape[-1] ret_columns_list = [] for matrix in matrices: matrix_shape = tf.shape(matrix) ret_columns_list.append(matrix_shape[-1]) ret_columns = tf.add_n(ret_columns_list) row_blocks = [] current_column = 0 for matrix in matrices: matrix_shape = tf.shape(matrix) row_before_length = current_column current_column += matrix_shape[-1] row_after_length = ret_columns - current_column row_blocks.append(tf.pad( tensor=matrix, paddings=tf.concat( [tf.zeros([tf.rank(matrix) - 1, 2], dtype=tf.int32), [(row_before_length, row_after_length)]], axis=0))) blocked = tf.concat(row_blocks, -2) blocked.set_shape(batch_shape.concatenate((blocked_rows, blocked_cols))) return blocked
block_diagonal
convolutional.py
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Keras convolution layers and image transformation layers. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.eager import context from tensorflow.python.framework import tensor_shape from tensorflow.python.keras import activations from tensorflow.python.keras import backend from tensorflow.python.keras import constraints from tensorflow.python.keras import initializers from tensorflow.python.keras import regularizers from tensorflow.python.keras.engine.base_layer import Layer from tensorflow.python.keras.engine.input_spec import InputSpec # imports for backwards namespace compatibility # pylint: disable=unused-import from tensorflow.python.keras.layers.pooling import AveragePooling1D from tensorflow.python.keras.layers.pooling import AveragePooling2D from tensorflow.python.keras.layers.pooling import AveragePooling3D from tensorflow.python.keras.layers.pooling import MaxPooling1D from tensorflow.python.keras.layers.pooling import MaxPooling2D from tensorflow.python.keras.layers.pooling import MaxPooling3D # pylint: enable=unused-import from tensorflow.python.keras.utils import conv_utils from tensorflow.python.keras.utils import tf_utils from tensorflow.python.ops import array_ops from tensorflow.python.ops import nn from tensorflow.python.ops import nn_ops from tensorflow.python.util.tf_export import keras_export # pylint: disable=g-classes-have-attributes class Conv(Layer): """Abstract N-D convolution layer (private, used as implementation base). This layer creates a convolution kernel that is convolved (actually cross-correlated) with the layer input to produce a tensor of outputs. If `use_bias` is True (and a `bias_initializer` is provided), a bias vector is created and added to the outputs. Finally, if `activation` is not `None`, it is applied to the outputs as well. Note: layer attributes cannot be modified after the layer has been called once (except the `trainable` attribute). Arguments: rank: An integer, the rank of the convolution, e.g. "2" for 2D convolution. filters: Integer, the dimensionality of the output space (i.e. the number of filters in the convolution). kernel_size: An integer or tuple/list of n integers, specifying the length of the convolution window. strides: An integer or tuple/list of n integers, specifying the stride length of the convolution. Specifying any stride value != 1 is incompatible with specifying any `dilation_rate` value != 1. padding: One of `"valid"`, `"same"`, or `"causal"` (case-insensitive). data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, ..., channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, ...)`. dilation_rate: An integer or tuple/list of n integers, specifying the dilation rate to use for dilated convolution. Currently, specifying any `dilation_rate` value != 1 is incompatible with specifying any `strides` value != 1. activation: Activation function to use. If you don't specify anything, no activation is applied. use_bias: Boolean, whether the layer uses a bias. kernel_initializer: An initializer for the convolution kernel. bias_initializer: An initializer for the bias vector. If None, the default initializer will be used. kernel_regularizer: Optional regularizer for the convolution kernel. bias_regularizer: Optional regularizer for the bias vector. activity_regularizer: Optional regularizer function for the output. kernel_constraint: Optional projection function to be applied to the kernel after being updated by an `Optimizer` (e.g. used to implement norm constraints or value constraints for layer weights). The function must take as input the unprojected variable and must return the projected variable (which must have the same shape). Constraints are not safe to use when doing asynchronous distributed training. bias_constraint: Optional projection function to be applied to the bias after being updated by an `Optimizer`. trainable: Boolean, if `True` the weights of this layer will be marked as trainable (and listed in `layer.trainable_weights`). name: A string, the name of the layer. """ def __init__(self, rank, filters, kernel_size, strides=1, padding='valid', data_format=None, dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, trainable=True, name=None, **kwargs): super(Conv, self).__init__( trainable=trainable, name=name, activity_regularizer=regularizers.get(activity_regularizer), **kwargs) self.rank = rank if filters is not None and not isinstance(filters, int): filters = int(filters) self.filters = filters self.kernel_size = conv_utils.normalize_tuple( kernel_size, rank, 'kernel_size') if not all(self.kernel_size): raise ValueError('The argument `kernel_size` cannot contain 0(s). ' 'Received: %s' % (kernel_size,)) self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.padding = conv_utils.normalize_padding(padding) if (self.padding == 'causal' and not isinstance(self, (Conv1D, SeparableConv1D))): raise ValueError('Causal padding is only supported for `Conv1D`' 'and ``SeparableConv1D`.') self.data_format = conv_utils.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, rank, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(ndim=self.rank + 2) def build(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape) input_channel = self._get_input_channel(input_shape) kernel_shape = self.kernel_size + (input_channel, self.filters) self.kernel = self.add_weight( name='kernel', shape=kernel_shape, initializer=self.kernel_initializer, regularizer=self.kernel_regularizer, constraint=self.kernel_constraint, trainable=True, dtype=self.dtype) if self.use_bias: self.bias = self.add_weight( name='bias', shape=(self.filters,), initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint, trainable=True, dtype=self.dtype) else: self.bias = None channel_axis = self._get_channel_axis() self.input_spec = InputSpec(ndim=self.rank + 2, axes={channel_axis: input_channel}) self._build_conv_op_input_shape = input_shape self._build_input_channel = input_channel self._padding_op = self._get_padding_op() self._conv_op_data_format = conv_utils.convert_data_format( self.data_format, self.rank + 2) self._convolution_op = nn_ops.Convolution( input_shape, filter_shape=self.kernel.shape, dilation_rate=self.dilation_rate, strides=self.strides, padding=self._padding_op, data_format=self._conv_op_data_format) self.built = True def call(self, inputs): if self._recreate_conv_op(inputs): self._convolution_op = nn_ops.Convolution( inputs.get_shape(), filter_shape=self.kernel.shape, dilation_rate=self.dilation_rate, strides=self.strides, padding=self._padding_op, data_format=self._conv_op_data_format) self._build_conv_op_input_shape = inputs.get_shape() # Apply causal padding to inputs for Conv1D. if self.padding == 'causal' and self.__class__.__name__ == 'Conv1D': inputs = array_ops.pad(inputs, self._compute_causal_padding()) outputs = self._convolution_op(inputs, self.kernel) if self.use_bias: if self.data_format == 'channels_first': if self.rank == 1: # nn.bias_add does not accept a 1D input tensor. bias = array_ops.reshape(self.bias, (1, self.filters, 1)) outputs += bias else: outputs = nn.bias_add(outputs, self.bias, data_format='NCHW') else: outputs = nn.bias_add(outputs, self.bias, data_format='NHWC') if self.activation is not None: return self.activation(outputs) return outputs def _spatial_output_shape(self, spatial_input_shape): return [ conv_utils.conv_output_length( length, self.kernel_size[i], padding=self.padding, stride=self.strides[i], dilation=self.dilation_rate[i]) for i, length in enumerate(spatial_input_shape) ] def compute_output_shape(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape).as_list() if self.data_format == 'channels_last': return tensor_shape.TensorShape( [input_shape[0]] + self._spatial_output_shape(input_shape[1:-1]) + [self.filters]) else: return tensor_shape.TensorShape( [input_shape[0], self.filters] + self._spatial_output_shape(input_shape[2:])) def get_config(self): config = { 'filters': self.filters, 'kernel_size': self.kernel_size, 'strides': self.strides, 'padding': self.padding, 'data_format': self.data_format, 'dilation_rate': self.dilation_rate, 'activation': activations.serialize(self.activation), 'use_bias': self.use_bias, 'kernel_initializer': initializers.serialize(self.kernel_initializer), 'bias_initializer': initializers.serialize(self.bias_initializer), 'kernel_regularizer': regularizers.serialize(self.kernel_regularizer), 'bias_regularizer': regularizers.serialize(self.bias_regularizer), 'activity_regularizer': regularizers.serialize(self.activity_regularizer), 'kernel_constraint': constraints.serialize(self.kernel_constraint), 'bias_constraint': constraints.serialize(self.bias_constraint) } base_config = super(Conv, self).get_config() return dict(list(base_config.items()) + list(config.items())) def _compute_causal_padding(self): """Calculates padding for 'causal' option for 1-d conv layers.""" left_pad = self.dilation_rate[0] * (self.kernel_size[0] - 1) if self.data_format == 'channels_last': causal_padding = [[0, 0], [left_pad, 0], [0, 0]] else: causal_padding = [[0, 0], [0, 0], [left_pad, 0]] return causal_padding def _get_channel_axis(self): if self.data_format == 'channels_first': return 1 else: return -1 def _get_input_channel(self, input_shape): channel_axis = self._get_channel_axis() if input_shape.dims[channel_axis].value is None: raise ValueError('The channel dimension of the inputs ' 'should be defined. Found `None`.') return int(input_shape[channel_axis]) def _get_padding_op(self): if self.padding == 'causal': op_padding = 'valid' else: op_padding = self.padding if not isinstance(op_padding, (list, tuple)): op_padding = op_padding.upper() return op_padding def _recreate_conv_op(self, inputs): """Recreate conv_op if necessary. Check if the input_shape in call() is different from that in build(). If the most-specific input shape describing the build and call shapes is not equal to the shape we currently built with, then we need to rebuild the _convolution_op to avoid incorrect behavior. Args: inputs: The input data to call() method. Returns: `True` or `False` to indicate whether to recreate the conv_op. """ call_input_shape = inputs.get_shape() # If the most specific compatible shape between _build_input_shape and # call_input_shape is not _build_input_shape then we must re-build. return self._build_conv_op_input_shape.most_specific_compatible_shape( call_input_shape) != self._build_conv_op_input_shape @keras_export('keras.layers.Conv1D', 'keras.layers.Convolution1D') class Conv1D(Conv): """1D convolution layer (e.g. temporal convolution). This layer creates a convolution kernel that is convolved with the layer input over a single spatial (or temporal) dimension to produce a tensor of outputs. If `use_bias` is True, a bias vector is created and added to the outputs. Finally, if `activation` is not `None`, it is applied to the outputs as well. When using this layer as the first layer in a model, provide an `input_shape` argument (tuple of integers or `None`, e.g. `(10, 128)` for sequences of 10 vectors of 128-dimensional vectors, or `(None, 128)` for variable-length sequences of 128-dimensional vectors. Examples: >>> # The inputs are 128-length vectors with 10 timesteps, and the batch size >>> # is 4. >>> input_shape = (4, 10, 128) >>> x = tf.random.normal(input_shape) >>> y = tf.keras.layers.Conv1D( ... 32, 3, activation='relu',input_shape=input_shape)(x) >>> print(y.shape) (4, 8, 32) Arguments: filters: Integer, the dimensionality of the output space (i.e. the number of output filters in the convolution). kernel_size: An integer or tuple/list of a single integer, specifying the length of the 1D convolution window. strides: An integer or tuple/list of a single integer, specifying the stride length of the convolution. Specifying any stride value != 1 is incompatible with specifying any `dilation_rate` value != 1. padding: One of `"valid"`, `"causal"` or `"same"` (case-insensitive). `"causal"` results in causal (dilated) convolutions, e.g. `output[t]` does not depend on `input[t+1:]`. Useful when modeling temporal data where the model should not violate the temporal order. See [WaveNet: A Generative Model for Raw Audio, section 2.1](https://arxiv.org/abs/1609.03499). data_format: A string, one of `channels_last` (default) or `channels_first`. dilation_rate: an integer or tuple/list of a single integer, specifying the dilation rate to use for dilated convolution. Currently, specifying any `dilation_rate` value != 1 is incompatible with specifying any `strides` value != 1. activation: Activation function to use. If you don't specify anything, no activation is applied ( see `keras.activations`). use_bias: Boolean, whether the layer uses a bias vector. kernel_initializer: Initializer for the `kernel` weights matrix ( see `keras.initializers`). bias_initializer: Initializer for the bias vector ( see `keras.initializers`). kernel_regularizer: Regularizer function applied to the `kernel` weights matrix (see `keras.regularizers`). bias_regularizer: Regularizer function applied to the bias vector ( see `keras.regularizers`). activity_regularizer: Regularizer function applied to the output of the layer (its "activation") ( see `keras.regularizers`). kernel_constraint: Constraint function applied to the kernel matrix ( see `keras.constraints`). bias_constraint: Constraint function applied to the bias vector ( see `keras.constraints`). Input shape: 3D tensor with shape: `(batch_size, steps, input_dim)` Output shape: 3D tensor with shape: `(batch_size, new_steps, filters)` `steps` value might have changed due to padding or strides. Returns: A tensor of rank 3 representing `activation(conv1d(inputs, kernel) + bias)`. Raises: ValueError: when both `strides` > 1 and `dilation_rate` > 1. """ def __init__(self, filters, kernel_size, strides=1, padding='valid', data_format='channels_last', dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): super(Conv1D, self).__init__( rank=1, filters=filters, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, activation=activations.get(activation), use_bias=use_bias, kernel_initializer=initializers.get(kernel_initializer), bias_initializer=initializers.get(bias_initializer), kernel_regularizer=regularizers.get(kernel_regularizer), bias_regularizer=regularizers.get(bias_regularizer), activity_regularizer=regularizers.get(activity_regularizer), kernel_constraint=constraints.get(kernel_constraint), bias_constraint=constraints.get(bias_constraint), **kwargs) @keras_export('keras.layers.Conv2D', 'keras.layers.Convolution2D') class Conv2D(Conv): """2D convolution layer (e.g. spatial convolution over images). This layer creates a convolution kernel that is convolved with the layer input to produce a tensor of outputs. If `use_bias` is True, a bias vector is created and added to the outputs. Finally, if `activation` is not `None`, it is applied to the outputs as well. When using this layer as the first layer in a model, provide the keyword argument `input_shape` (tuple of integers, does not include the sample axis), e.g. `input_shape=(128, 128, 3)` for 128x128 RGB pictures in `data_format="channels_last"`. Examples: >>> # The inputs are 28x28 RGB images with `channels_last` and the batch >>> # size is 4. >>> input_shape = (4, 28, 28, 3) >>> x = tf.random.normal(input_shape) >>> y = tf.keras.layers.Conv2D( ... 2, 3, activation='relu', input_shape=input_shape)(x) >>> print(y.shape) (4, 26, 26, 2) >>> # With `dilation_rate` as 2. >>> input_shape = (4, 28, 28, 3) >>> x = tf.random.normal(input_shape) >>> y = tf.keras.layers.Conv2D( ... 2, 3, activation='relu', dilation_rate=2, input_shape=input_shape)(x) >>> print(y.shape) (4, 24, 24, 2) >>> # With `padding` as "same". >>> input_shape = (4, 28, 28, 3) >>> x = tf.random.normal(input_shape) >>> y = tf.keras.layers.Conv2D( ... 2, 3, activation='relu', padding="same", input_shape=input_shape)(x) >>> print(y.shape) (4, 28, 28, 2) Arguments: filters: Integer, the dimensionality of the output space (i.e. the number of output filters in the convolution). kernel_size: An integer or tuple/list of 2 integers, specifying the height and width of the 2D convolution window. Can be a single integer to specify the same value for all spatial dimensions. strides: An integer or tuple/list of 2 integers, specifying the strides of the convolution along the height and width. Can be a single integer to specify the same value for all spatial dimensions. Specifying any stride value != 1 is incompatible with specifying any `dilation_rate` value != 1. padding: one of `"valid"` or `"same"` (case-insensitive). data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, height, width, channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, height, width)`. It defaults to the `image_data_format` value found in your Keras config file at `~/.keras/keras.json`. If you never set it, then it will be "channels_last". dilation_rate: an integer or tuple/list of 2 integers, specifying the dilation rate to use for dilated convolution. Can be a single integer to specify the same value for all spatial dimensions. Currently, specifying any `dilation_rate` value != 1 is incompatible with specifying any stride value != 1. activation: Activation function to use. If you don't specify anything, no activation is applied ( see `keras.activations`). use_bias: Boolean, whether the layer uses a bias vector. kernel_initializer: Initializer for the `kernel` weights matrix ( see `keras.initializers`). bias_initializer: Initializer for the bias vector ( see `keras.initializers`). kernel_regularizer: Regularizer function applied to the `kernel` weights matrix (see `keras.regularizers`). bias_regularizer: Regularizer function applied to the bias vector ( see `keras.regularizers`). activity_regularizer: Regularizer function applied to the output of the layer (its "activation") ( see `keras.regularizers`). kernel_constraint: Constraint function applied to the kernel matrix ( see `keras.constraints`). bias_constraint: Constraint function applied to the bias vector ( see `keras.constraints`). Input shape: 4D tensor with shape: `(batch_size, channels, rows, cols)` if data_format='channels_first' or 4D tensor with shape: `(batch_size, rows, cols, channels)` if data_format='channels_last'. Output shape: 4D tensor with shape: `(batch_size, filters, new_rows, new_cols)` if data_format='channels_first' or 4D tensor with shape: `(batch_size, new_rows, new_cols, filters)` if data_format='channels_last'. `rows` and `cols` values might have changed due to padding. Returns: A tensor of rank 4 representing `activation(conv2d(inputs, kernel) + bias)`. Raises: ValueError: if `padding` is "causal". ValueError: when both `strides` > 1 and `dilation_rate` > 1. """ def __init__(self, filters, kernel_size, strides=(1, 1), padding='valid', data_format=None, dilation_rate=(1, 1), activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): super(Conv2D, self).__init__( rank=2, filters=filters, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, activation=activations.get(activation), use_bias=use_bias, kernel_initializer=initializers.get(kernel_initializer), bias_initializer=initializers.get(bias_initializer), kernel_regularizer=regularizers.get(kernel_regularizer), bias_regularizer=regularizers.get(bias_regularizer), activity_regularizer=regularizers.get(activity_regularizer), kernel_constraint=constraints.get(kernel_constraint), bias_constraint=constraints.get(bias_constraint), **kwargs) @keras_export('keras.layers.Conv3D', 'keras.layers.Convolution3D') class Conv3D(Conv): """3D convolution layer (e.g. spatial convolution over volumes). This layer creates a convolution kernel that is convolved with the layer input to produce a tensor of outputs. If `use_bias` is True, a bias vector is created and added to the outputs. Finally, if `activation` is not `None`, it is applied to the outputs as well. When using this layer as the first layer in a model, provide the keyword argument `input_shape` (tuple of integers, does not include the sample axis), e.g. `input_shape=(128, 128, 128, 1)` for 128x128x128 volumes with a single channel, in `data_format="channels_last"`. Examples: >>> # The inputs are 28x28x28 volumes with a single channel, and the >>> # batch size is 4 >>> input_shape =(4, 28, 28, 28, 1) >>> x = tf.random.normal(input_shape) >>> y = tf.keras.layers.Conv3D( ... 2, 3, activation='relu', input_shape=input_shape)(x) >>> print(y.shape) (4, 26, 26, 26, 2) Arguments: filters: Integer, the dimensionality of the output space (i.e. the number of output filters in the convolution). kernel_size: An integer or tuple/list of 3 integers, specifying the depth, height and width of the 3D convolution window. Can be a single integer to specify the same value for all spatial dimensions. strides: An integer or tuple/list of 3 integers, specifying the strides of the convolution along each spatial dimension. Can be a single integer to specify the same value for all spatial dimensions. Specifying any stride value != 1 is incompatible with specifying any `dilation_rate` value != 1. padding: one of `"valid"` or `"same"` (case-insensitive). data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, spatial_dim1, spatial_dim2, spatial_dim3, channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, spatial_dim1, spatial_dim2, spatial_dim3)`. It defaults to the `image_data_format` value found in your Keras config file at `~/.keras/keras.json`. If you never set it, then it will be "channels_last". dilation_rate: an integer or tuple/list of 3 integers, specifying the dilation rate to use for dilated convolution. Can be a single integer to specify the same value for all spatial dimensions. Currently, specifying any `dilation_rate` value != 1 is incompatible with specifying any stride value != 1. activation: Activation function to use. If you don't specify anything, no activation is applied ( see `keras.activations`). use_bias: Boolean, whether the layer uses a bias vector. kernel_initializer: Initializer for the `kernel` weights matrix ( see `keras.initializers`). bias_initializer: Initializer for the bias vector ( see `keras.initializers`). kernel_regularizer: Regularizer function applied to the `kernel` weights matrix ( see `keras.regularizers`). bias_regularizer: Regularizer function applied to the bias vector ( see `keras.regularizers`). activity_regularizer: Regularizer function applied to the output of the layer (its "activation") ( see `keras.regularizers`). kernel_constraint: Constraint function applied to the kernel matrix ( see `keras.constraints`). bias_constraint: Constraint function applied to the bias vector ( see `keras.constraints`). Input shape: 5D tensor with shape: `(batch_size, channels, conv_dim1, conv_dim2, conv_dim3)` if data_format='channels_first' or 5D tensor with shape: `(batch_size, conv_dim1, conv_dim2, conv_dim3, channels)` if data_format='channels_last'. Output shape: 5D tensor with shape: `(batch_size, filters, new_conv_dim1, new_conv_dim2, new_conv_dim3)` if data_format='channels_first' or 5D tensor with shape: `(batch_size, new_conv_dim1, new_conv_dim2, new_conv_dim3, filters)` if data_format='channels_last'. `new_conv_dim1`, `new_conv_dim2` and `new_conv_dim3` values might have changed due to padding. Returns: A tensor of rank 5 representing `activation(conv3d(inputs, kernel) + bias)`. Raises: ValueError: if `padding` is "causal". ValueError: when both `strides` > 1 and `dilation_rate` > 1. """ def __init__(self, filters, kernel_size, strides=(1, 1, 1), padding='valid', data_format=None, dilation_rate=(1, 1, 1), activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): super(Conv3D, self).__init__( rank=3, filters=filters, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, activation=activations.get(activation), use_bias=use_bias, kernel_initializer=initializers.get(kernel_initializer), bias_initializer=initializers.get(bias_initializer), kernel_regularizer=regularizers.get(kernel_regularizer), bias_regularizer=regularizers.get(bias_regularizer), activity_regularizer=regularizers.get(activity_regularizer), kernel_constraint=constraints.get(kernel_constraint), bias_constraint=constraints.get(bias_constraint), **kwargs) @keras_export('keras.layers.Conv1DTranspose', 'keras.layers.Convolution1DTranspose') class Conv1DTranspose(Conv1D): """Transposed convolution layer (sometimes called Deconvolution). The need for transposed convolutions generally arises from the desire to use a transformation going in the opposite direction of a normal convolution, i.e., from something that has the shape of the output of some convolution to something that has the shape of its input while maintaining a connectivity pattern that is compatible with said convolution. When using this layer as the first layer in a model, provide the keyword argument `input_shape` (tuple of integers, does not include the sample axis), e.g. `input_shape=(128, 3)` for data with 128 time steps and 3 channels. Arguments: filters: Integer, the dimensionality of the output space (i.e. the number of output filters in the convolution). kernel_size: An integer length of the 1D convolution window. strides: An integer specifying the stride of the convolution along the time dimension. Specifying a stride value != 1 is incompatible with specifying a `dilation_rate` value != 1. Defaults to 1. padding: one of `"valid"` or `"same"` (case-insensitive). output_padding: An integer specifying the amount of padding along the time dimension of the output tensor. The amount of output padding must be lower than the stride. If set to `None` (default), the output shape is inferred. data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, length, channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, length)`. dilation_rate: an integer, specifying the dilation rate to use for dilated convolution. Currently, specifying a `dilation_rate` value != 1 is incompatible with specifying a stride value != 1. activation: Activation function to use. If you don't specify anything, no activation is applied ( see `keras.activations`). use_bias: Boolean, whether the layer uses a bias vector. kernel_initializer: Initializer for the `kernel` weights matrix ( see `keras.initializers`). bias_initializer: Initializer for the bias vector ( see `keras.initializers`). kernel_regularizer: Regularizer function applied to the `kernel` weights matrix (see `keras.regularizers`). bias_regularizer: Regularizer function applied to the bias vector ( see `keras.regularizers`). activity_regularizer: Regularizer function applied to the output of the layer (its "activation") (see `keras.regularizers`). kernel_constraint: Constraint function applied to the kernel matrix ( see `keras.constraints`). bias_constraint: Constraint function applied to the bias vector ( see `keras.constraints`). Input shape: 3D tensor with shape: `(batch_size, steps, channels)` Output shape: 3D tensor with shape: `(batch_size, new_steps, filters)` If `output_padding` is specified: ``` new_timesteps = ((timesteps - 1) * strides + kernel_size - 2 * padding + output_padding) ``` Returns: A tensor of rank 3 representing `activation(conv1dtranspose(inputs, kernel) + bias)`. Raises: ValueError: if `padding` is "causal". ValueError: when both `strides` > 1 and `dilation_rate` > 1. References: - [A guide to convolution arithmetic for deep learning]( https://arxiv.org/abs/1603.07285v1) - [Deconvolutional Networks]( https://www.matthewzeiler.com/mattzeiler/deconvolutionalnetworks.pdf) """ def __init__(self, filters, kernel_size, strides=1, padding='valid', output_padding=None, data_format=None, dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): super(Conv1DTranspose, self).__init__( filters=filters, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, activation=activations.get(activation), use_bias=use_bias, kernel_initializer=initializers.get(kernel_initializer), bias_initializer=initializers.get(bias_initializer), kernel_regularizer=regularizers.get(kernel_regularizer), bias_regularizer=regularizers.get(bias_regularizer), activity_regularizer=regularizers.get(activity_regularizer), kernel_constraint=constraints.get(kernel_constraint), bias_constraint=constraints.get(bias_constraint), **kwargs) self.output_padding = output_padding if self.output_padding is not None: self.output_padding = conv_utils.normalize_tuple( self.output_padding, 1, 'output_padding') for stride, out_pad in zip(self.strides, self.output_padding): if out_pad >= stride: raise ValueError('Stride ' + str(self.strides) + ' must be ' 'greater than output padding ' + str(self.output_padding)) def build(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape) if len(input_shape) != 3: raise ValueError('Inputs should have rank 3. Received input shape: ' + str(input_shape)) channel_axis = self._get_channel_axis() if input_shape.dims[channel_axis].value is None: raise ValueError('The channel dimension of the inputs ' 'should be defined. Found `None`.') input_dim = int(input_shape[channel_axis]) self.input_spec = InputSpec(ndim=3, axes={channel_axis: input_dim}) kernel_shape = self.kernel_size + (self.filters, input_dim) self.kernel = self.add_weight( name='kernel', shape=kernel_shape, initializer=self.kernel_initializer, regularizer=self.kernel_regularizer, constraint=self.kernel_constraint, trainable=True, dtype=self.dtype) if self.use_bias: self.bias = self.add_weight( name='bias', shape=(self.filters,), initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint, trainable=True, dtype=self.dtype) else: self.bias = None self.built = True def call(self, inputs): inputs_shape = array_ops.shape(inputs) batch_size = inputs_shape[0] if self.data_format == 'channels_first': t_axis = 2 else: t_axis = 1 length = inputs_shape[t_axis] if self.output_padding is None: output_padding = None else: output_padding = self.output_padding[0] # Infer the dynamic output shape: out_length = conv_utils.deconv_output_length( length, self.kernel_size[0], padding=self.padding, output_padding=output_padding, stride=self.strides[0], dilation=self.dilation_rate[0]) if self.data_format == 'channels_first': output_shape = (batch_size, self.filters, out_length) else: output_shape = (batch_size, out_length, self.filters) data_format = conv_utils.convert_data_format(self.data_format, ndim=3) output_shape_tensor = array_ops.stack(output_shape) outputs = nn_ops.conv1d_transpose( inputs, self.kernel, output_shape_tensor, strides=self.strides, padding=self.padding.upper(), data_format=data_format, dilations=self.dilation_rate) if not context.executing_eagerly(): # Infer the static output shape: out_shape = self.compute_output_shape(inputs.shape) outputs.set_shape(out_shape) if self.use_bias: outputs = nn.bias_add( outputs, self.bias, data_format=data_format) if self.activation is not None: return self.activation(outputs) return outputs def compute_output_shape(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape).as_list() output_shape = list(input_shape) if self.data_format == 'channels_first': c_axis, t_axis = 1, 2 else: c_axis, t_axis = 2, 1 if self.output_padding is None: output_padding = None else: output_padding = self.output_padding[0] output_shape[c_axis] = self.filters output_shape[t_axis] = conv_utils.deconv_output_length( output_shape[t_axis], self.kernel_size[0], padding=self.padding, output_padding=output_padding, stride=self.strides[0], dilation=self.dilation_rate[0]) return tensor_shape.TensorShape(output_shape) def get_config(self): config = super(Conv1DTranspose, self).get_config() config['output_padding'] = self.output_padding return config @keras_export('keras.layers.Conv2DTranspose', 'keras.layers.Convolution2DTranspose') class Conv2DTranspose(Conv2D): """Transposed convolution layer (sometimes called Deconvolution). The need for transposed convolutions generally arises from the desire to use a transformation going in the opposite direction of a normal convolution, i.e., from something that has the shape of the output of some convolution to something that has the shape of its input while maintaining a connectivity pattern that is compatible with said convolution. When using this layer as the first layer in a model, provide the keyword argument `input_shape` (tuple of integers, does not include the sample axis), e.g. `input_shape=(128, 128, 3)` for 128x128 RGB pictures in `data_format="channels_last"`. Arguments: filters: Integer, the dimensionality of the output space (i.e. the number of output filters in the convolution). kernel_size: An integer or tuple/list of 2 integers, specifying the height and width of the 2D convolution window. Can be a single integer to specify the same value for all spatial dimensions. strides: An integer or tuple/list of 2 integers, specifying the strides of the convolution along the height and width. Can be a single integer to specify the same value for all spatial dimensions. Specifying any stride value != 1 is incompatible with specifying any `dilation_rate` value != 1. padding: one of `"valid"` or `"same"` (case-insensitive). output_padding: An integer or tuple/list of 2 integers, specifying the amount of padding along the height and width of the output tensor. Can be a single integer to specify the same value for all spatial dimensions. The amount of output padding along a given dimension must be lower than the stride along that same dimension. If set to `None` (default), the output shape is inferred. data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, height, width, channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, height, width)`. It defaults to the `image_data_format` value found in your Keras config file at `~/.keras/keras.json`. If you never set it, then it will be "channels_last". dilation_rate: an integer or tuple/list of 2 integers, specifying the dilation rate to use for dilated convolution. Can be a single integer to specify the same value for all spatial dimensions. Currently, specifying any `dilation_rate` value != 1 is incompatible with specifying any stride value != 1. activation: Activation function to use. If you don't specify anything, no activation is applied ( see `keras.activations`). use_bias: Boolean, whether the layer uses a bias vector. kernel_initializer: Initializer for the `kernel` weights matrix ( see `keras.initializers`). bias_initializer: Initializer for the bias vector ( see `keras.initializers`). kernel_regularizer: Regularizer function applied to the `kernel` weights matrix (see `keras.regularizers`). bias_regularizer: Regularizer function applied to the bias vector ( see `keras.regularizers`). activity_regularizer: Regularizer function applied to the output of the layer (its "activation") (see `keras.regularizers`). kernel_constraint: Constraint function applied to the kernel matrix ( see `keras.constraints`). bias_constraint: Constraint function applied to the bias vector ( see `keras.constraints`). Input shape: 4D tensor with shape: `(batch_size, channels, rows, cols)` if data_format='channels_first' or 4D tensor with shape: `(batch_size, rows, cols, channels)` if data_format='channels_last'. Output shape: 4D tensor with shape: `(batch_size, filters, new_rows, new_cols)` if data_format='channels_first' or 4D tensor with shape: `(batch_size, new_rows, new_cols, filters)` if data_format='channels_last'. `rows` and `cols` values might have changed due to padding. If `output_padding` is specified: ``` new_rows = ((rows - 1) * strides[0] + kernel_size[0] - 2 * padding[0] + output_padding[0]) new_cols = ((cols - 1) * strides[1] + kernel_size[1] - 2 * padding[1] + output_padding[1]) ``` Returns: A tensor of rank 4 representing `activation(conv2dtranspose(inputs, kernel) + bias)`. Raises: ValueError: if `padding` is "causal". ValueError: when both `strides` > 1 and `dilation_rate` > 1. References: - [A guide to convolution arithmetic for deep learning](https://arxiv.org/abs/1603.07285v1) - [Deconvolutional Networks](https://www.matthewzeiler.com/mattzeiler/deconvolutionalnetworks.pdf) """ def __init__(self, filters, kernel_size, strides=(1, 1), padding='valid', output_padding=None, data_format=None, dilation_rate=(1, 1), activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): super(Conv2DTranspose, self).__init__( filters=filters, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, activation=activations.get(activation), use_bias=use_bias, kernel_initializer=initializers.get(kernel_initializer), bias_initializer=initializers.get(bias_initializer), kernel_regularizer=regularizers.get(kernel_regularizer), bias_regularizer=regularizers.get(bias_regularizer), activity_regularizer=regularizers.get(activity_regularizer), kernel_constraint=constraints.get(kernel_constraint), bias_constraint=constraints.get(bias_constraint), **kwargs) self.output_padding = output_padding if self.output_padding is not None: self.output_padding = conv_utils.normalize_tuple( self.output_padding, 2, 'output_padding') for stride, out_pad in zip(self.strides, self.output_padding): if out_pad >= stride: raise ValueError('Stride ' + str(self.strides) + ' must be ' 'greater than output padding ' + str(self.output_padding)) def build(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape) if len(input_shape) != 4: raise ValueError('Inputs should have rank 4. Received input shape: ' + str(input_shape)) channel_axis = self._get_channel_axis() if input_shape.dims[channel_axis].value is None: raise ValueError('The channel dimension of the inputs ' 'should be defined. Found `None`.') input_dim = int(input_shape[channel_axis]) self.input_spec = InputSpec(ndim=4, axes={channel_axis: input_dim}) kernel_shape = self.kernel_size + (self.filters, input_dim) self.kernel = self.add_weight( name='kernel', shape=kernel_shape, initializer=self.kernel_initializer, regularizer=self.kernel_regularizer, constraint=self.kernel_constraint, trainable=True, dtype=self.dtype) if self.use_bias: self.bias = self.add_weight( name='bias', shape=(self.filters,), initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint, trainable=True, dtype=self.dtype) else: self.bias = None self.built = True def call(self, inputs): inputs_shape = array_ops.shape(inputs) batch_size = inputs_shape[0] if self.data_format == 'channels_first': h_axis, w_axis = 2, 3 else: h_axis, w_axis = 1, 2 # Use the constant height and weight when possible. # TODO(scottzhu): Extract this into a utility function that can be applied # to all convolutional layers, which currently lost the static shape # information due to tf.shape(). height, width = None, None if inputs.shape.rank is not None: dims = inputs.shape.as_list() height = dims[h_axis] width = dims[w_axis] height = height if height is not None else inputs_shape[h_axis] width = width if width is not None else inputs_shape[w_axis] kernel_h, kernel_w = self.kernel_size stride_h, stride_w = self.strides if self.output_padding is None: out_pad_h = out_pad_w = None else: out_pad_h, out_pad_w = self.output_padding # Infer the dynamic output shape: out_height = conv_utils.deconv_output_length(height, kernel_h, padding=self.padding, output_padding=out_pad_h, stride=stride_h, dilation=self.dilation_rate[0]) out_width = conv_utils.deconv_output_length(width, kernel_w, padding=self.padding, output_padding=out_pad_w, stride=stride_w, dilation=self.dilation_rate[1]) if self.data_format == 'channels_first': output_shape = (batch_size, self.filters, out_height, out_width) else: output_shape = (batch_size, out_height, out_width, self.filters) output_shape_tensor = array_ops.stack(output_shape) outputs = backend.conv2d_transpose( inputs, self.kernel, output_shape_tensor, strides=self.strides, padding=self.padding, data_format=self.data_format, dilation_rate=self.dilation_rate) if not context.executing_eagerly(): # Infer the static output shape: out_shape = self.compute_output_shape(inputs.shape) outputs.set_shape(out_shape) if self.use_bias: outputs = nn.bias_add( outputs, self.bias, data_format=conv_utils.convert_data_format(self.data_format, ndim=4)) if self.activation is not None: return self.activation(outputs) return outputs def compute_output_shape(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape).as_list() output_shape = list(input_shape) if self.data_format == 'channels_first': c_axis, h_axis, w_axis = 1, 2, 3 else: c_axis, h_axis, w_axis = 3, 1, 2 kernel_h, kernel_w = self.kernel_size stride_h, stride_w = self.strides if self.output_padding is None: out_pad_h = out_pad_w = None else: out_pad_h, out_pad_w = self.output_padding output_shape[c_axis] = self.filters output_shape[h_axis] = conv_utils.deconv_output_length( output_shape[h_axis], kernel_h, padding=self.padding, output_padding=out_pad_h, stride=stride_h, dilation=self.dilation_rate[0]) output_shape[w_axis] = conv_utils.deconv_output_length( output_shape[w_axis], kernel_w, padding=self.padding, output_padding=out_pad_w, stride=stride_w, dilation=self.dilation_rate[1]) return tensor_shape.TensorShape(output_shape) def get_config(self): config = super(Conv2DTranspose, self).get_config() config['output_padding'] = self.output_padding return config @keras_export('keras.layers.Conv3DTranspose', 'keras.layers.Convolution3DTranspose') class Conv3DTranspose(Conv3D): """Transposed convolution layer (sometimes called Deconvolution). The need for transposed convolutions generally arises from the desire to use a transformation going in the opposite direction of a normal convolution, i.e., from something that has the shape of the output of some convolution to something that has the shape of its input while maintaining a connectivity pattern that is compatible with said convolution. When using this layer as the first layer in a model, provide the keyword argument `input_shape` (tuple of integers, does not include the sample axis), e.g. `input_shape=(128, 128, 128, 3)` for a 128x128x128 volume with 3 channels if `data_format="channels_last"`. Arguments: filters: Integer, the dimensionality of the output space (i.e. the number of output filters in the convolution). kernel_size: An integer or tuple/list of 3 integers, specifying the depth, height and width of the 3D convolution window. Can be a single integer to specify the same value for all spatial dimensions. strides: An integer or tuple/list of 3 integers, specifying the strides of the convolution along the depth, height and width. Can be a single integer to specify the same value for all spatial dimensions. Specifying any stride value != 1 is incompatible with specifying any `dilation_rate` value != 1. padding: one of `"valid"` or `"same"` (case-insensitive). output_padding: An integer or tuple/list of 3 integers, specifying the amount of padding along the depth, height, and width. Can be a single integer to specify the same value for all spatial dimensions. The amount of output padding along a given dimension must be lower than the stride along that same dimension. If set to `None` (default), the output shape is inferred. data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, depth, height, width, channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, depth, height, width)`. It defaults to the `image_data_format` value found in your Keras config file at `~/.keras/keras.json`. If you never set it, then it will be "channels_last". dilation_rate: an integer or tuple/list of 3 integers, specifying the dilation rate to use for dilated convolution. Can be a single integer to specify the same value for all spatial dimensions. Currently, specifying any `dilation_rate` value != 1 is incompatible with specifying any stride value != 1. activation: Activation function to use. If you don't specify anything, no activation is applied ( see `keras.activations`). use_bias: Boolean, whether the layer uses a bias vector. kernel_initializer: Initializer for the `kernel` weights matrix. bias_initializer: Initializer for the bias vector. kernel_regularizer: Regularizer function applied to the `kernel` weights matrix ( see `keras.regularizers`). bias_regularizer: Regularizer function applied to the bias vector ( see `keras.regularizers`). activity_regularizer: Regularizer function applied to the output of the layer (its "activation") ( see `keras.regularizers`). kernel_constraint: Constraint function applied to the kernel matrix ( see `keras.constraints`). bias_constraint: Constraint function applied to the bias vector ( see `keras.constraints`). Input shape: 5D tensor with shape: `(batch_size, channels, depth, rows, cols)` if data_format='channels_first' or 5D tensor with shape: `(batch_size, depth, rows, cols, channels)` if data_format='channels_last'. Output shape: 5D tensor with shape: `(batch_size, filters, new_depth, new_rows, new_cols)` if data_format='channels_first' or 5D tensor with shape: `(batch_size, new_depth, new_rows, new_cols, filters)` if data_format='channels_last'. `depth` and `rows` and `cols` values might have changed due to padding. If `output_padding` is specified:: ``` new_depth = ((depth - 1) * strides[0] + kernel_size[0] - 2 * padding[0] + output_padding[0]) new_rows = ((rows - 1) * strides[1] + kernel_size[1] - 2 * padding[1] + output_padding[1]) new_cols = ((cols - 1) * strides[2] + kernel_size[2] - 2 * padding[2] + output_padding[2]) ``` Returns: A tensor of rank 5 representing `activation(conv3dtranspose(inputs, kernel) + bias)`. Raises: ValueError: if `padding` is "causal". ValueError: when both `strides` > 1 and `dilation_rate` > 1. References: - [A guide to convolution arithmetic for deep learning](https://arxiv.org/abs/1603.07285v1) - [Deconvolutional Networks](https://www.matthewzeiler.com/mattzeiler/deconvolutionalnetworks.pdf) """ def __init__(self, filters, kernel_size, strides=(1, 1, 1), padding='valid', output_padding=None, data_format=None, dilation_rate=(1, 1, 1), activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): super(Conv3DTranspose, self).__init__( filters=filters, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, activation=activations.get(activation), use_bias=use_bias, kernel_initializer=initializers.get(kernel_initializer), bias_initializer=initializers.get(bias_initializer), kernel_regularizer=regularizers.get(kernel_regularizer), bias_regularizer=regularizers.get(bias_regularizer), activity_regularizer=regularizers.get(activity_regularizer), kernel_constraint=constraints.get(kernel_constraint), bias_constraint=constraints.get(bias_constraint), **kwargs) self.output_padding = output_padding if self.output_padding is not None: self.output_padding = conv_utils.normalize_tuple( self.output_padding, 3, 'output_padding') for stride, out_pad in zip(self.strides, self.output_padding): if out_pad >= stride: raise ValueError('Stride ' + str(self.strides) + ' must be ' 'greater than output padding ' + str(self.output_padding)) def build(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape) if len(input_shape) != 5: raise ValueError('Inputs should have rank 5, received input shape:', str(input_shape)) channel_axis = self._get_channel_axis() if input_shape.dims[channel_axis].value is None: raise ValueError('The channel dimension of the inputs ' 'should be defined, found None: ' + str(input_shape)) input_dim = int(input_shape[channel_axis]) kernel_shape = self.kernel_size + (self.filters, input_dim) self.input_spec = InputSpec(ndim=5, axes={channel_axis: input_dim}) self.kernel = self.add_weight( 'kernel', shape=kernel_shape, initializer=self.kernel_initializer, regularizer=self.kernel_regularizer, constraint=self.kernel_constraint, trainable=True, dtype=self.dtype) if self.use_bias: self.bias = self.add_weight( 'bias', shape=(self.filters,), initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint, trainable=True, dtype=self.dtype) else: self.bias = None self.built = True def call(self, inputs): inputs_shape = array_ops.shape(inputs) batch_size = inputs_shape[0] if self.data_format == 'channels_first': d_axis, h_axis, w_axis = 2, 3, 4 else: d_axis, h_axis, w_axis = 1, 2, 3 depth = inputs_shape[d_axis] height = inputs_shape[h_axis] width = inputs_shape[w_axis] kernel_d, kernel_h, kernel_w = self.kernel_size stride_d, stride_h, stride_w = self.strides if self.output_padding is None: out_pad_d = out_pad_h = out_pad_w = None else: out_pad_d, out_pad_h, out_pad_w = self.output_padding # Infer the dynamic output shape: out_depth = conv_utils.deconv_output_length(depth, kernel_d, padding=self.padding, output_padding=out_pad_d, stride=stride_d) out_height = conv_utils.deconv_output_length(height, kernel_h, padding=self.padding, output_padding=out_pad_h, stride=stride_h) out_width = conv_utils.deconv_output_length(width, kernel_w, padding=self.padding, output_padding=out_pad_w, stride=stride_w) if self.data_format == 'channels_first': output_shape = (batch_size, self.filters, out_depth, out_height, out_width) strides = (1, 1, stride_d, stride_h, stride_w) else: output_shape = (batch_size, out_depth, out_height, out_width, self.filters) strides = (1, stride_d, stride_h, stride_w, 1) output_shape_tensor = array_ops.stack(output_shape) outputs = nn.conv3d_transpose( inputs, self.kernel, output_shape_tensor, strides, data_format=conv_utils.convert_data_format(self.data_format, ndim=5), padding=self.padding.upper()) if not context.executing_eagerly(): # Infer the static output shape: out_shape = self.compute_output_shape(inputs.shape) outputs.set_shape(out_shape) if self.use_bias: outputs = nn.bias_add( outputs, self.bias, data_format=conv_utils.convert_data_format(self.data_format, ndim=4)) if self.activation is not None: return self.activation(outputs) return outputs def compute_output_shape(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape).as_list() output_shape = list(input_shape) if self.data_format == 'channels_first': c_axis, d_axis, h_axis, w_axis = 1, 2, 3, 4 else: c_axis, d_axis, h_axis, w_axis = 4, 1, 2, 3 kernel_d, kernel_h, kernel_w = self.kernel_size stride_d, stride_h, stride_w = self.strides if self.output_padding is None: out_pad_d = out_pad_h = out_pad_w = None else: out_pad_d, out_pad_h, out_pad_w = self.output_padding output_shape[c_axis] = self.filters output_shape[d_axis] = conv_utils.deconv_output_length( output_shape[d_axis], kernel_d, padding=self.padding, output_padding=out_pad_d, stride=stride_d) output_shape[h_axis] = conv_utils.deconv_output_length( output_shape[h_axis], kernel_h, padding=self.padding, output_padding=out_pad_h, stride=stride_h) output_shape[w_axis] = conv_utils.deconv_output_length( output_shape[w_axis], kernel_w, padding=self.padding, output_padding=out_pad_w, stride=stride_w) return tensor_shape.TensorShape(output_shape) def get_config(self): config = super(Conv3DTranspose, self).get_config() config.pop('dilation_rate') config['output_padding'] = self.output_padding return config class SeparableConv(Conv): """Abstract base layer for separable nD convolution. This layer performs a depthwise convolution that acts separately on channels, followed by a pointwise convolution that mixes channels. If `use_bias` is True and a bias initializer is provided, it adds a bias vector to the output. It then optionally applies an activation function to produce the final output. Arguments: rank: An integer, the rank of the convolution, e.g. "2" for 2D convolution. filters: Integer, the dimensionality of the output space (i.e. the number of filters in the convolution). kernel_size: A tuple or list of integers specifying the spatial dimensions of the filters. Can be a single integer to specify the same value for all spatial dimensions. strides: A tuple or list of integers specifying the strides of the convolution. Can be a single integer to specify the same value for all spatial dimensions. Specifying any `stride` value != 1 is incompatible with specifying any `dilation_rate` value != 1. padding: One of `"valid"` or `"same"` (case-insensitive). data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, ..., channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, ...)`. dilation_rate: An integer or tuple/list of 2 integers, specifying the dilation rate to use for dilated convolution. Can be a single integer to specify the same value for all spatial dimensions. Currently, specifying any `dilation_rate` value != 1 is incompatible with specifying any stride value != 1. depth_multiplier: The number of depthwise convolution output channels for each input channel. The total number of depthwise convolution output channels will be equal to `num_filters_in * depth_multiplier`. activation: Activation function to use. If you don't specify anything, no activation is applied ( see `keras.activations`). use_bias: Boolean, whether the layer uses a bias. depthwise_initializer: An initializer for the depthwise convolution kernel. pointwise_initializer: An initializer for the pointwise convolution kernel. bias_initializer: An initializer for the bias vector. If None, the default initializer will be used. depthwise_regularizer: Optional regularizer for the depthwise convolution kernel. pointwise_regularizer: Optional regularizer for the pointwise convolution kernel. bias_regularizer: Optional regularizer for the bias vector. activity_regularizer: Optional regularizer function for the output. depthwise_constraint: Optional projection function to be applied to the depthwise kernel after being updated by an `Optimizer` (e.g. used for norm constraints or value constraints for layer weights). The function must take as input the unprojected variable and must return the projected variable (which must have the same shape). Constraints are not safe to use when doing asynchronous distributed training. pointwise_constraint: Optional projection function to be applied to the pointwise kernel after being updated by an `Optimizer`. bias_constraint: Optional projection function to be applied to the bias after being updated by an `Optimizer`. trainable: Boolean, if `True` the weights of this layer will be marked as trainable (and listed in `layer.trainable_weights`). name: A string, the name of the layer. """ def __init__(self, rank, filters, kernel_size, strides=1, padding='valid', data_format=None, dilation_rate=1, depth_multiplier=1, activation=None, use_bias=True, depthwise_initializer='glorot_uniform', pointwise_initializer='glorot_uniform', bias_initializer='zeros', depthwise_regularizer=None, pointwise_regularizer=None, bias_regularizer=None, activity_regularizer=None, depthwise_constraint=None, pointwise_constraint=None, bias_constraint=None, trainable=True, name=None, **kwargs): super(SeparableConv, self).__init__( rank=rank, filters=filters, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, activation=activations.get(activation), use_bias=use_bias, bias_initializer=initializers.get(bias_initializer), bias_regularizer=regularizers.get(bias_regularizer), activity_regularizer=regularizers.get(activity_regularizer), bias_constraint=bias_constraint, trainable=trainable, name=name, **kwargs) self.depth_multiplier = depth_multiplier self.depthwise_initializer = initializers.get(depthwise_initializer) self.pointwise_initializer = initializers.get(pointwise_initializer) self.depthwise_regularizer = regularizers.get(depthwise_regularizer) self.pointwise_regularizer = regularizers.get(pointwise_regularizer) self.depthwise_constraint = constraints.get(depthwise_constraint) self.pointwise_constraint = constraints.get(pointwise_constraint) def build(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape) channel_axis = self._get_channel_axis() if input_shape.dims[channel_axis].value is None: raise ValueError('The channel dimension of the inputs ' 'should be defined. Found `None`.') input_dim = int(input_shape[channel_axis]) self.input_spec = InputSpec(ndim=self.rank + 2, axes={channel_axis: input_dim}) depthwise_kernel_shape = self.kernel_size + (input_dim, self.depth_multiplier) pointwise_kernel_shape = ( 1,) * self.rank + (self.depth_multiplier * input_dim, self.filters) self.depthwise_kernel = self.add_weight( name='depthwise_kernel', shape=depthwise_kernel_shape, initializer=self.depthwise_initializer, regularizer=self.depthwise_regularizer, constraint=self.depthwise_constraint, trainable=True, dtype=self.dtype) self.pointwise_kernel = self.add_weight( name='pointwise_kernel', shape=pointwise_kernel_shape, initializer=self.pointwise_initializer, regularizer=self.pointwise_regularizer, constraint=self.pointwise_constraint, trainable=True, dtype=self.dtype) if self.use_bias: self.bias = self.add_weight( name='bias', shape=(self.filters,), initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint, trainable=True, dtype=self.dtype) else: self.bias = None self.built = True def call(self, inputs): raise NotImplementedError def get_config(self): config = { 'filters': self.filters, 'kernel_size': self.kernel_size, 'strides': self.strides, 'padding': self.padding, 'data_format': self.data_format, 'depth_multiplier': self.depth_multiplier, 'dilation_rate': self.dilation_rate, 'activation': activations.serialize(self.activation), 'use_bias': self.use_bias, 'depthwise_initializer': initializers.serialize(self.depthwise_initializer), 'pointwise_initializer': initializers.serialize(self.pointwise_initializer), 'bias_initializer': initializers.serialize(self.bias_initializer), 'depthwise_regularizer': regularizers.serialize(self.depthwise_regularizer), 'pointwise_regularizer': regularizers.serialize(self.pointwise_regularizer), 'bias_regularizer': regularizers.serialize(self.bias_regularizer), 'activity_regularizer': regularizers.serialize(self.activity_regularizer), 'depthwise_constraint': constraints.serialize(self.depthwise_constraint), 'pointwise_constraint': constraints.serialize(self.pointwise_constraint), 'bias_constraint': constraints.serialize(self.bias_constraint) } base_config = super(SeparableConv, self).get_config() return dict(list(base_config.items()) + list(config.items())) @keras_export('keras.layers.SeparableConv1D', 'keras.layers.SeparableConvolution1D') class SeparableConv1D(SeparableConv): """Depthwise separable 1D convolution. This layer performs a depthwise convolution that acts separately on channels, followed by a pointwise convolution that mixes channels. If `use_bias` is True and a bias initializer is provided, it adds a bias vector to the output. It then optionally applies an activation function to produce the final output. Arguments: filters: Integer, the dimensionality of the output space (i.e. the number of filters in the convolution). kernel_size: A single integer specifying the spatial dimensions of the filters. strides: A single integer specifying the strides of the convolution. Specifying any `stride` value != 1 is incompatible with specifying any `dilation_rate` value != 1. padding: One of `"valid"`, `"same"`, or `"causal"` (case-insensitive). data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, length, channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, length)`. dilation_rate: A single integer, specifying the dilation rate to use for dilated convolution. Currently, specifying any `dilation_rate` value != 1 is incompatible with specifying any stride value != 1. depth_multiplier: The number of depthwise convolution output channels for each input channel. The total number of depthwise convolution output channels will be equal to `num_filters_in * depth_multiplier`. activation: Activation function to use. If you don't specify anything, no activation is applied ( see `keras.activations`). use_bias: Boolean, whether the layer uses a bias. depthwise_initializer: An initializer for the depthwise convolution kernel ( see `keras.initializers`). pointwise_initializer: An initializer for the pointwise convolution kernel ( see `keras.initializers`). bias_initializer: An initializer for the bias vector. If None, the default initializer will be used (see `keras.initializers`). depthwise_regularizer: Optional regularizer for the depthwise convolution kernel (see `keras.regularizers`). pointwise_regularizer: Optional regularizer for the pointwise convolution kernel (see `keras.regularizers`). bias_regularizer: Optional regularizer for the bias vector ( see `keras.regularizers`). activity_regularizer: Optional regularizer function for the output ( see `keras.regularizers`). depthwise_constraint: Optional projection function to be applied to the depthwise kernel after being updated by an `Optimizer` (e.g. used for norm constraints or value constraints for layer weights). The function must take as input the unprojected variable and must return the projected variable (which must have the same shape). Constraints are not safe to use when doing asynchronous distributed training ( see `keras.constraints`). pointwise_constraint: Optional projection function to be applied to the pointwise kernel after being updated by an `Optimizer` ( see `keras.constraints`). bias_constraint: Optional projection function to be applied to the bias after being updated by an `Optimizer` ( see `keras.constraints`). trainable: Boolean, if `True` the weights of this layer will be marked as trainable (and listed in `layer.trainable_weights`). name: A string, the name of the layer. Input shape: 3D tensor with shape: `(batch_size, channels, steps)` if data_format='channels_first' or 5D tensor with shape: `(batch_size, steps, channels)` if data_format='channels_last'. Output shape: 3D tensor with shape: `(batch_size, filters, new_steps)` if data_format='channels_first' or 3D tensor with shape: `(batch_size, new_steps, filters)` if data_format='channels_last'. `new_steps` value might have changed due to padding or strides. Returns: A tensor of rank 3 representing `activation(separableconv1d(inputs, kernel) + bias)`. Raises: ValueError: when both `strides` > 1 and `dilation_rate` > 1. """ def __init__(self, filters, kernel_size, strides=1, padding='valid', data_format=None, dilation_rate=1, depth_multiplier=1, activation=None, use_bias=True, depthwise_initializer='glorot_uniform', pointwise_initializer='glorot_uniform', bias_initializer='zeros', depthwise_regularizer=None, pointwise_regularizer=None, bias_regularizer=None, activity_regularizer=None, depthwise_constraint=None, pointwise_constraint=None, bias_constraint=None, **kwargs): super(SeparableConv1D, self).__init__( rank=1, filters=filters, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, depth_multiplier=depth_multiplier, activation=activations.get(activation), use_bias=use_bias, depthwise_initializer=initializers.get(depthwise_initializer), pointwise_initializer=initializers.get(pointwise_initializer), bias_initializer=initializers.get(bias_initializer), depthwise_regularizer=regularizers.get(depthwise_regularizer), pointwise_regularizer=regularizers.get(pointwise_regularizer), bias_regularizer=regularizers.get(bias_regularizer), activity_regularizer=regularizers.get(activity_regularizer), depthwise_constraint=constraints.get(depthwise_constraint), pointwise_constraint=constraints.get(pointwise_constraint), bias_constraint=constraints.get(bias_constraint), **kwargs) def call(self, inputs): if self.padding == 'causal': inputs = array_ops.pad(inputs, self._compute_causal_padding()) if self.data_format == 'channels_last': strides = (1,) + self.strides * 2 + (1,) spatial_start_dim = 1 else: strides = (1, 1) + self.strides * 2 spatial_start_dim = 2 # Explicitly broadcast inputs and kernels to 4D. # TODO(fchollet): refactor when a native separable_conv1d op is available. inputs = array_ops.expand_dims(inputs, spatial_start_dim) depthwise_kernel = array_ops.expand_dims(self.depthwise_kernel, 0) pointwise_kernel = array_ops.expand_dims(self.pointwise_kernel, 0) dilation_rate = (1,) + self.dilation_rate if self.padding == 'causal': op_padding = 'valid' else: op_padding = self.padding outputs = nn.separable_conv2d( inputs, depthwise_kernel, pointwise_kernel, strides=strides, padding=op_padding.upper(), rate=dilation_rate, data_format=conv_utils.convert_data_format(self.data_format, ndim=4)) if self.use_bias: outputs = nn.bias_add( outputs, self.bias, data_format=conv_utils.convert_data_format(self.data_format, ndim=4)) outputs = array_ops.squeeze(outputs, [spatial_start_dim]) if self.activation is not None: return self.activation(outputs) return outputs @keras_export('keras.layers.SeparableConv2D', 'keras.layers.SeparableConvolution2D') class SeparableConv2D(SeparableConv): """Depthwise separable 2D convolution. Separable convolutions consist of first performing a depthwise spatial convolution (which acts on each input channel separately) followed by a pointwise convolution which mixes the resulting output channels. The `depth_multiplier` argument controls how many output channels are generated per input channel in the depthwise step. Intuitively, separable convolutions can be understood as a way to factorize a convolution kernel into two smaller kernels, or as an extreme version of an Inception block. Arguments: filters: Integer, the dimensionality of the output space (i.e. the number of output filters in the convolution). kernel_size: An integer or tuple/list of 2 integers, specifying the height and width of the 2D convolution window. Can be a single integer to specify the same value for all spatial dimensions. strides: An integer or tuple/list of 2 integers, specifying the strides of the convolution along the height and width. Can be a single integer to specify the same value for all spatial dimensions. Specifying any stride value != 1 is incompatible with specifying any `dilation_rate` value != 1. padding: one of `"valid"` or `"same"` (case-insensitive). data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, height, width, channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, height, width)`. It defaults to the `image_data_format` value found in your Keras config file at `~/.keras/keras.json`. If you never set it, then it will be "channels_last". dilation_rate: An integer or tuple/list of 2 integers, specifying the dilation rate to use for dilated convolution. Currently, specifying any `dilation_rate` value != 1 is incompatible with specifying any `strides` value != 1. depth_multiplier: The number of depthwise convolution output channels for each input channel. The total number of depthwise convolution output channels will be equal to `filters_in * depth_multiplier`. activation: Activation function to use. If you don't specify anything, no activation is applied ( see `keras.activations`). use_bias: Boolean, whether the layer uses a bias vector. depthwise_initializer: Initializer for the depthwise kernel matrix ( see `keras.initializers`). pointwise_initializer: Initializer for the pointwise kernel matrix ( see `keras.initializers`). bias_initializer: Initializer for the bias vector ( see `keras.initializers`). depthwise_regularizer: Regularizer function applied to the depthwise kernel matrix (see `keras.regularizers`). pointwise_regularizer: Regularizer function applied to the pointwise kernel matrix (see `keras.regularizers`). bias_regularizer: Regularizer function applied to the bias vector ( see `keras.regularizers`). activity_regularizer: Regularizer function applied to the output of the layer (its "activation") ( see `keras.regularizers`). depthwise_constraint: Constraint function applied to the depthwise kernel matrix ( see `keras.constraints`). pointwise_constraint: Constraint function applied to the pointwise kernel matrix ( see `keras.constraints`). bias_constraint: Constraint function applied to the bias vector ( see `keras.constraints`). Input shape: 4D tensor with shape: `(batch_size, channels, rows, cols)` if data_format='channels_first' or 4D tensor with shape: `(batch_size, rows, cols, channels)` if data_format='channels_last'. Output shape: 4D tensor with shape: `(batch_size, filters, new_rows, new_cols)` if data_format='channels_first' or 4D tensor with shape: `(batch_size, new_rows, new_cols, filters)` if data_format='channels_last'. `rows` and `cols` values might have changed due to padding. Returns: A tensor of rank 4 representing `activation(separableconv2d(inputs, kernel) + bias)`. Raises: ValueError: if `padding` is "causal". ValueError: when both `strides` > 1 and `dilation_rate` > 1. """ def __init__(self, filters, kernel_size, strides=(1, 1), padding='valid', data_format=None, dilation_rate=(1, 1), depth_multiplier=1, activation=None, use_bias=True, depthwise_initializer='glorot_uniform', pointwise_initializer='glorot_uniform', bias_initializer='zeros', depthwise_regularizer=None, pointwise_regularizer=None, bias_regularizer=None, activity_regularizer=None, depthwise_constraint=None, pointwise_constraint=None, bias_constraint=None, **kwargs): super(SeparableConv2D, self).__init__( rank=2, filters=filters, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, depth_multiplier=depth_multiplier, activation=activations.get(activation), use_bias=use_bias, depthwise_initializer=initializers.get(depthwise_initializer), pointwise_initializer=initializers.get(pointwise_initializer), bias_initializer=initializers.get(bias_initializer), depthwise_regularizer=regularizers.get(depthwise_regularizer), pointwise_regularizer=regularizers.get(pointwise_regularizer), bias_regularizer=regularizers.get(bias_regularizer), activity_regularizer=regularizers.get(activity_regularizer), depthwise_constraint=constraints.get(depthwise_constraint), pointwise_constraint=constraints.get(pointwise_constraint), bias_constraint=constraints.get(bias_constraint), **kwargs) def call(self, inputs): # Apply the actual ops. if self.data_format == 'channels_last': strides = (1,) + self.strides + (1,) else: strides = (1, 1) + self.strides outputs = nn.separable_conv2d( inputs, self.depthwise_kernel, self.pointwise_kernel, strides=strides, padding=self.padding.upper(), rate=self.dilation_rate, data_format=conv_utils.convert_data_format(self.data_format, ndim=4)) if self.use_bias: outputs = nn.bias_add( outputs, self.bias, data_format=conv_utils.convert_data_format(self.data_format, ndim=4)) if self.activation is not None: return self.activation(outputs) return outputs @keras_export('keras.layers.DepthwiseConv2D') class DepthwiseConv2D(Conv2D): """Depthwise separable 2D convolution. Depthwise Separable convolutions consist of performing just the first step in a depthwise spatial convolution (which acts on each input channel separately). The `depth_multiplier` argument controls how many output channels are generated per input channel in the depthwise step. Arguments: kernel_size: An integer or tuple/list of 2 integers, specifying the height and width of the 2D convolution window. Can be a single integer to specify the same value for all spatial dimensions. strides: An integer or tuple/list of 2 integers, specifying the strides of the convolution along the height and width. Can be a single integer to specify the same value for all spatial dimensions. Specifying any stride value != 1 is incompatible with specifying any `dilation_rate` value != 1. padding: one of `'valid'` or `'same'` (case-insensitive). depth_multiplier: The number of depthwise convolution output channels for each input channel. The total number of depthwise convolution output channels will be equal to `filters_in * depth_multiplier`. data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, height, width, channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, height, width)`. It defaults to the `image_data_format` value found in your Keras config file at `~/.keras/keras.json`. If you never set it, then it will be 'channels_last'. dilation_rate: An integer or tuple/list of 2 integers, specifying the dilation rate to use for dilated convolution. Currently, specifying any `dilation_rate` value != 1 is incompatible with specifying any `strides` value != 1. activation: Activation function to use. If you don't specify anything, no activation is applied ( see `keras.activations`). use_bias: Boolean, whether the layer uses a bias vector. depthwise_initializer: Initializer for the depthwise kernel matrix ( see `keras.initializers`). bias_initializer: Initializer for the bias vector ( see `keras.initializers`). depthwise_regularizer: Regularizer function applied to the depthwise kernel matrix (see `keras.regularizers`). bias_regularizer: Regularizer function applied to the bias vector ( see `keras.regularizers`). activity_regularizer: Regularizer function applied to the output of the layer (its 'activation') ( see `keras.regularizers`). depthwise_constraint: Constraint function applied to the depthwise kernel matrix ( see `keras.constraints`). bias_constraint: Constraint function applied to the bias vector ( see `keras.constraints`). Input shape: 4D tensor with shape: `[batch_size, channels, rows, cols]` if data_format='channels_first' or 4D tensor with shape: `[batch_size, rows, cols, channels]` if data_format='channels_last'. Output shape: 4D tensor with shape: `[batch_size, filters, new_rows, new_cols]` if data_format='channels_first' or 4D tensor with shape: `[batch_size, new_rows, new_cols, filters]` if data_format='channels_last'. `rows` and `cols` values might have changed due to padding. Returns: A tensor of rank 4 representing `activation(depthwiseconv2d(inputs, kernel) + bias)`. Raises: ValueError: if `padding` is "causal". ValueError: when both `strides` > 1 and `dilation_rate` > 1. """ def __init__(self, kernel_size, strides=(1, 1), padding='valid', depth_multiplier=1, data_format=None, dilation_rate=(1, 1), activation=None, use_bias=True, depthwise_initializer='glorot_uniform', bias_initializer='zeros', depthwise_regularizer=None, bias_regularizer=None, activity_regularizer=None, depthwise_constraint=None, bias_constraint=None, **kwargs): super(DepthwiseConv2D, self).__init__( filters=None, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, activation=activation, use_bias=use_bias, bias_regularizer=bias_regularizer, activity_regularizer=activity_regularizer, bias_constraint=bias_constraint, **kwargs) self.depth_multiplier = depth_multiplier self.depthwise_initializer = initializers.get(depthwise_initializer) self.depthwise_regularizer = regularizers.get(depthwise_regularizer) self.depthwise_constraint = constraints.get(depthwise_constraint) self.bias_initializer = initializers.get(bias_initializer) def build(self, input_shape): if len(input_shape) < 4: raise ValueError('Inputs to `DepthwiseConv2D` should have rank 4. ' 'Received input shape:', str(input_shape)) input_shape = tensor_shape.TensorShape(input_shape) channel_axis = self._get_channel_axis() if input_shape.dims[channel_axis].value is None: raise ValueError('The channel dimension of the inputs to ' '`DepthwiseConv2D` ' 'should be defined. Found `None`.') input_dim = int(input_shape[channel_axis]) depthwise_kernel_shape = (self.kernel_size[0], self.kernel_size[1], input_dim, self.depth_multiplier) self.depthwise_kernel = self.add_weight( shape=depthwise_kernel_shape, initializer=self.depthwise_initializer, name='depthwise_kernel', regularizer=self.depthwise_regularizer, constraint=self.depthwise_constraint) if self.use_bias: self.bias = self.add_weight(shape=(input_dim * self.depth_multiplier,), initializer=self.bias_initializer, name='bias', regularizer=self.bias_regularizer, constraint=self.bias_constraint) else: self.bias = None # Set input spec. self.input_spec = InputSpec(ndim=4, axes={channel_axis: input_dim}) self.built = True def call(self, inputs): outputs = backend.depthwise_conv2d( inputs, self.depthwise_kernel, strides=self.strides, padding=self.padding, dilation_rate=self.dilation_rate, data_format=self.data_format) if self.use_bias: outputs = backend.bias_add( outputs, self.bias, data_format=self.data_format) if self.activation is not None: return self.activation(outputs) return outputs @tf_utils.shape_type_conversion def compute_output_shape(self, input_shape): if self.data_format == 'channels_first': rows = input_shape[2] cols = input_shape[3] out_filters = input_shape[1] * self.depth_multiplier elif self.data_format == 'channels_last': rows = input_shape[1] cols = input_shape[2] out_filters = input_shape[3] * self.depth_multiplier rows = conv_utils.conv_output_length(rows, self.kernel_size[0], self.padding, self.strides[0], self.dilation_rate[0]) cols = conv_utils.conv_output_length(cols, self.kernel_size[1], self.padding, self.strides[1], self.dilation_rate[1]) if self.data_format == 'channels_first': return (input_shape[0], out_filters, rows, cols) elif self.data_format == 'channels_last': return (input_shape[0], rows, cols, out_filters) def get_config(self): config = super(DepthwiseConv2D, self).get_config() config.pop('filters') config.pop('kernel_initializer') config.pop('kernel_regularizer') config.pop('kernel_constraint') config['depth_multiplier'] = self.depth_multiplier config['depthwise_initializer'] = initializers.serialize( self.depthwise_initializer) config['depthwise_regularizer'] = regularizers.serialize( self.depthwise_regularizer) config['depthwise_constraint'] = constraints.serialize( self.depthwise_constraint) return config @keras_export('keras.layers.UpSampling1D') class UpSampling1D(Layer): """Upsampling layer for 1D inputs. Repeats each temporal step `size` times along the time axis. Examples: >>> input_shape = (2, 2, 3) >>> x = np.arange(np.prod(input_shape)).reshape(input_shape) >>> print(x) [[[ 0 1 2] [ 3 4 5]] [[ 6 7 8] [ 9 10 11]]] >>> y = tf.keras.layers.UpSampling1D(size=2)(x) >>> print(y) tf.Tensor( [[[ 0 1 2] [ 0 1 2] [ 3 4 5] [ 3 4 5]] [[ 6 7 8] [ 6 7 8] [ 9 10 11] [ 9 10 11]]], shape=(2, 4, 3), dtype=int64) Arguments: size: Integer. Upsampling factor. Input shape: 3D tensor with shape: `(batch_size, steps, features)`. Output shape: 3D tensor with shape: `(batch_size, upsampled_steps, features)`. """ def __init__(self, size=2, **kwargs): super(UpSampling1D, self).__init__(**kwargs) self.size = int(size) self.input_spec = InputSpec(ndim=3) def compute_output_shape(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape).as_list() size = self.size * input_shape[1] if input_shape[1] is not None else None return tensor_shape.TensorShape([input_shape[0], size, input_shape[2]]) def call(self, inputs): output = backend.repeat_elements(inputs, self.size, axis=1) return output def get_config(self): config = {'size': self.size} base_config = super(UpSampling1D, self).get_config() return dict(list(base_config.items()) + list(config.items())) @keras_export('keras.layers.UpSampling2D') class UpSampling2D(Layer): """Upsampling layer for 2D inputs. Repeats the rows and columns of the data by `size[0]` and `size[1]` respectively. Examples: >>> input_shape = (2, 2, 1, 3) >>> x = np.arange(np.prod(input_shape)).reshape(input_shape) >>> print(x) [[[[ 0 1 2]] [[ 3 4 5]]] [[[ 6 7 8]] [[ 9 10 11]]]] >>> y = tf.keras.layers.UpSampling2D(size=(1, 2))(x) >>> print(y) tf.Tensor( [[[[ 0 1 2] [ 0 1 2]] [[ 3 4 5] [ 3 4 5]]] [[[ 6 7 8] [ 6 7 8]] [[ 9 10 11] [ 9 10 11]]]], shape=(2, 2, 2, 3), dtype=int64) Arguments: size: Int, or tuple of 2 integers. The upsampling factors for rows and columns. data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, height, width, channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, height, width)`. It defaults to the `image_data_format` value found in your Keras config file at `~/.keras/keras.json`. If you never set it, then it will be "channels_last". interpolation: A string, one of `nearest` or `bilinear`. Input shape: 4D tensor with shape: - If `data_format` is `"channels_last"`: `(batch_size, rows, cols, channels)` - If `data_format` is `"channels_first"`: `(batch_size, channels, rows, cols)` Output shape: 4D tensor with shape: - If `data_format` is `"channels_last"`: `(batch_size, upsampled_rows, upsampled_cols, channels)` - If `data_format` is `"channels_first"`: `(batch_size, channels, upsampled_rows, upsampled_cols)` """ def __init__(self, size=(2, 2), data_format=None, interpolation='nearest', **kwargs): super(UpSampling2D, self).__init__(**kwargs) self.data_format = conv_utils.normalize_data_format(data_format) self.size = conv_utils.normalize_tuple(size, 2, 'size') if interpolation not in {'nearest', 'bilinear'}: raise ValueError('`interpolation` argument should be one of `"nearest"` ' 'or `"bilinear"`.') self.interpolation = interpolation self.input_spec = InputSpec(ndim=4) def compute_output_shape(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape).as_list() if self.data_format == 'channels_first': height = self.size[0] * input_shape[ 2] if input_shape[2] is not None else None width = self.size[1] * input_shape[ 3] if input_shape[3] is not None else None return tensor_shape.TensorShape( [input_shape[0], input_shape[1], height, width]) else: height = self.size[0] * input_shape[ 1] if input_shape[1] is not None else None width = self.size[1] * input_shape[ 2] if input_shape[2] is not None else None return tensor_shape.TensorShape( [input_shape[0], height, width, input_shape[3]]) def call(self, inputs): return backend.resize_images( inputs, self.size[0], self.size[1], self.data_format, interpolation=self.interpolation) def get_config(self): config = { 'size': self.size, 'data_format': self.data_format, 'interpolation': self.interpolation } base_config = super(UpSampling2D, self).get_config() return dict(list(base_config.items()) + list(config.items())) @keras_export('keras.layers.UpSampling3D') class UpSampling3D(Layer): """Upsampling layer for 3D inputs. Repeats the 1st, 2nd and 3rd dimensions of the data by `size[0]`, `size[1]` and `size[2]` respectively. Examples: >>> input_shape = (2, 1, 2, 1, 3) >>> x = tf.constant(1, shape=input_shape) >>> y = tf.keras.layers.UpSampling3D(size=2)(x) >>> print(y.shape) (2, 2, 4, 2, 3) Arguments: size: Int, or tuple of 3 integers. The upsampling factors for dim1, dim2 and dim3. data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, spatial_dim1, spatial_dim2, spatial_dim3, channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, spatial_dim1, spatial_dim2, spatial_dim3)`. It defaults to the `image_data_format` value found in your Keras config file at `~/.keras/keras.json`. If you never set it, then it will be "channels_last". Input shape: 5D tensor with shape: - If `data_format` is `"channels_last"`: `(batch_size, dim1, dim2, dim3, channels)` - If `data_format` is `"channels_first"`: `(batch_size, channels, dim1, dim2, dim3)` Output shape: 5D tensor with shape: - If `data_format` is `"channels_last"`: `(batch_size, upsampled_dim1, upsampled_dim2, upsampled_dim3, channels)` - If `data_format` is `"channels_first"`: `(batch_size, channels, upsampled_dim1, upsampled_dim2, upsampled_dim3)` """ def __init__(self, size=(2, 2, 2), data_format=None, **kwargs): self.data_format = conv_utils.normalize_data_format(data_format) self.size = conv_utils.normalize_tuple(size, 3, 'size') self.input_spec = InputSpec(ndim=5) super(UpSampling3D, self).__init__(**kwargs) def compute_output_shape(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape).as_list() if self.data_format == 'channels_first': dim1 = self.size[0] * input_shape[ 2] if input_shape[2] is not None else None dim2 = self.size[1] * input_shape[ 3] if input_shape[3] is not None else None dim3 = self.size[2] * input_shape[ 4] if input_shape[4] is not None else None return tensor_shape.TensorShape( [input_shape[0], input_shape[1], dim1, dim2, dim3]) else: dim1 = self.size[0] * input_shape[ 1] if input_shape[1] is not None else None dim2 = self.size[1] * input_shape[ 2] if input_shape[2] is not None else None dim3 = self.size[2] * input_shape[ 3] if input_shape[3] is not None else None return tensor_shape.TensorShape( [input_shape[0], dim1, dim2, dim3, input_shape[4]]) def call(self, inputs): return backend.resize_volumes( inputs, self.size[0], self.size[1], self.size[2], self.data_format) def get_config(self): config = {'size': self.size, 'data_format': self.data_format} base_config = super(UpSampling3D, self).get_config() return dict(list(base_config.items()) + list(config.items())) @keras_export('keras.layers.ZeroPadding1D') class ZeroPadding1D(Layer): """Zero-padding layer for 1D input (e.g. temporal sequence). Examples: >>> input_shape = (2, 2, 3) >>> x = np.arange(np.prod(input_shape)).reshape(input_shape) >>> print(x) [[[ 0 1 2] [ 3 4 5]] [[ 6 7 8] [ 9 10 11]]] >>> y = tf.keras.layers.ZeroPadding1D(padding=2)(x) >>> print(y) tf.Tensor( [[[ 0 0 0] [ 0 0 0] [ 0 1 2] [ 3 4 5] [ 0 0 0] [ 0 0 0]] [[ 0 0 0] [ 0 0 0] [ 6 7 8] [ 9 10 11] [ 0 0 0] [ 0 0 0]]], shape=(2, 6, 3), dtype=int64) Arguments: padding: Int, or tuple of int (length 2), or dictionary. - If int: How many zeros to add at the beginning and end of the padding dimension (axis 1). - If tuple of int (length 2): How many zeros to add at the beginning and the end of the padding dimension (`(left_pad, right_pad)`). Input shape: 3D tensor with shape `(batch_size, axis_to_pad, features)` Output shape: 3D tensor with shape `(batch_size, padded_axis, features)` """ def __init__(self, padding=1, **kwargs): super(ZeroPadding1D, self).__init__(**kwargs) self.padding = conv_utils.normalize_tuple(padding, 2, 'padding') self.input_spec = InputSpec(ndim=3) def compute_output_shape(self, input_shape): if input_shape[1] is not None: length = input_shape[1] + self.padding[0] + self.padding[1] else: length = None return tensor_shape.TensorShape([input_shape[0], length, input_shape[2]]) def call(self, inputs): return backend.temporal_padding(inputs, padding=self.padding) def get_config(self): config = {'padding': self.padding} base_config = super(ZeroPadding1D, self).get_config() return dict(list(base_config.items()) + list(config.items())) @keras_export('keras.layers.ZeroPadding2D') class ZeroPadding2D(Layer): """Zero-padding layer for 2D input (e.g. picture). This layer can add rows and columns of zeros at the top, bottom, left and right side of an image tensor. Examples: >>> input_shape = (1, 1, 2, 2) >>> x = np.arange(np.prod(input_shape)).reshape(input_shape) >>> print(x) [[[[0 1] [2 3]]]] >>> y = tf.keras.layers.ZeroPadding2D(padding=1)(x) >>> print(y) tf.Tensor( [[[[0 0] [0 0] [0 0] [0 0]] [[0 0] [0 1] [2 3] [0 0]] [[0 0] [0 0] [0 0] [0 0]]]], shape=(1, 3, 4, 2), dtype=int64) Arguments: padding: Int, or tuple of 2 ints, or tuple of 2 tuples of 2 ints. - If int: the same symmetric padding is applied to height and width. - If tuple of 2 ints: interpreted as two different symmetric padding values for height and width: `(symmetric_height_pad, symmetric_width_pad)`. - If tuple of 2 tuples of 2 ints: interpreted as `((top_pad, bottom_pad), (left_pad, right_pad))` data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, height, width, channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, height, width)`. It defaults to the `image_data_format` value found in your Keras config file at `~/.keras/keras.json`. If you never set it, then it will be "channels_last". Input shape: 4D tensor with shape: - If `data_format` is `"channels_last"`: `(batch_size, rows, cols, channels)` - If `data_format` is `"channels_first"`: `(batch_size, channels, rows, cols)` Output shape: 4D tensor with shape: - If `data_format` is `"channels_last"`: `(batch_size, padded_rows, padded_cols, channels)` - If `data_format` is `"channels_first"`: `(batch_size, channels, padded_rows, padded_cols)` """ def __init__(self, padding=(1, 1), data_format=None, **kwargs): super(ZeroPadding2D, self).__init__(**kwargs) self.data_format = conv_utils.normalize_data_format(data_format) if isinstance(padding, int): self.padding = ((padding, padding), (padding, padding)) elif hasattr(padding, '__len__'): if len(padding) != 2: raise ValueError('`padding` should have two elements. ' 'Found: ' + str(padding)) height_padding = conv_utils.normalize_tuple(padding[0], 2, '1st entry of padding') width_padding = conv_utils.normalize_tuple(padding[1], 2, '2nd entry of padding') self.padding = (height_padding, width_padding) else: raise ValueError('`padding` should be either an int, ' 'a tuple of 2 ints ' '(symmetric_height_pad, symmetric_width_pad), ' 'or a tuple of 2 tuples of 2 ints ' '((top_pad, bottom_pad), (left_pad, right_pad)). ' 'Found: ' + str(padding)) self.input_spec = InputSpec(ndim=4) def compute_output_shape(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape).as_list() if self.data_format == 'channels_first': if input_shape[2] is not None: rows = input_shape[2] + self.padding[0][0] + self.padding[0][1] else: rows = None if input_shape[3] is not None: cols = input_shape[3] + self.padding[1][0] + self.padding[1][1] else: cols = None return tensor_shape.TensorShape( [input_shape[0], input_shape[1], rows, cols]) elif self.data_format == 'channels_last': if input_shape[1] is not None: rows = input_shape[1] + self.padding[0][0] + self.padding[0][1] else: rows = None if input_shape[2] is not None: cols = input_shape[2] + self.padding[1][0] + self.padding[1][1] else: cols = None return tensor_shape.TensorShape( [input_shape[0], rows, cols, input_shape[3]]) def call(self, inputs): return backend.spatial_2d_padding( inputs, padding=self.padding, data_format=self.data_format) def get_config(self): config = {'padding': self.padding, 'data_format': self.data_format} base_config = super(ZeroPadding2D, self).get_config() return dict(list(base_config.items()) + list(config.items())) @keras_export('keras.layers.ZeroPadding3D') class ZeroPadding3D(Layer): """Zero-padding layer for 3D data (spatial or spatio-temporal). Examples: >>> input_shape = (1, 1, 2, 2, 3) >>> x = np.arange(np.prod(input_shape)).reshape(input_shape) >>> y = tf.keras.layers.ZeroPadding3D(padding=2)(x) >>> print(y.shape) (1, 5, 6, 6, 3) Arguments: padding: Int, or tuple of 3 ints, or tuple of 3 tuples of 2 ints. - If int: the same symmetric padding is applied to height and width. - If tuple of 3 ints: interpreted as two different symmetric padding values for height and width: `(symmetric_dim1_pad, symmetric_dim2_pad, symmetric_dim3_pad)`. - If tuple of 3 tuples of 2 ints: interpreted as `((left_dim1_pad, right_dim1_pad), (left_dim2_pad, right_dim2_pad), (left_dim3_pad, right_dim3_pad))` data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, spatial_dim1, spatial_dim2, spatial_dim3, channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, spatial_dim1, spatial_dim2, spatial_dim3)`. It defaults to the `image_data_format` value found in your Keras config file at `~/.keras/keras.json`. If you never set it, then it will be "channels_last". Input shape: 5D tensor with shape: - If `data_format` is `"channels_last"`: `(batch_size, first_axis_to_pad, second_axis_to_pad, third_axis_to_pad, depth)` - If `data_format` is `"channels_first"`: `(batch_size, depth, first_axis_to_pad, second_axis_to_pad, third_axis_to_pad)` Output shape: 5D tensor with shape: - If `data_format` is `"channels_last"`: `(batch_size, first_padded_axis, second_padded_axis, third_axis_to_pad, depth)` - If `data_format` is `"channels_first"`: `(batch_size, depth, first_padded_axis, second_padded_axis, third_axis_to_pad)` """ def __init__(self, padding=(1, 1, 1), data_format=None, **kwargs): super(ZeroPadding3D, self).__init__(**kwargs) self.data_format = conv_utils.normalize_data_format(data_format) if isinstance(padding, int): self.padding = ((padding, padding), (padding, padding), (padding, padding)) elif hasattr(padding, '__len__'): if len(padding) != 3: raise ValueError('`padding` should have 3 elements. ' 'Found: ' + str(padding)) dim1_padding = conv_utils.normalize_tuple(padding[0], 2, '1st entry of padding') dim2_padding = conv_utils.normalize_tuple(padding[1], 2, '2nd entry of padding') dim3_padding = conv_utils.normalize_tuple(padding[2], 2, '3rd entry of padding') self.padding = (dim1_padding, dim2_padding, dim3_padding) else: raise ValueError( '`padding` should be either an int, ' 'a tuple of 3 ints ' '(symmetric_dim1_pad, symmetric_dim2_pad, symmetric_dim3_pad), ' 'or a tuple of 3 tuples of 2 ints ' '((left_dim1_pad, right_dim1_pad),' ' (left_dim2_pad, right_dim2_pad),' ' (left_dim3_pad, right_dim2_pad)). ' 'Found: ' + str(padding)) self.input_spec = InputSpec(ndim=5) def compute_output_shape(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape).as_list() if self.data_format == 'channels_first': if input_shape[2] is not None: dim1 = input_shape[2] + 2 * self.padding[0][0] else: dim1 = None if input_shape[3] is not None: dim2 = input_shape[3] + 2 * self.padding[1][0] else: dim2 = None if input_shape[4] is not None: dim3 = input_shape[4] + 2 * self.padding[2][0] else: dim3 = None return tensor_shape.TensorShape( [input_shape[0], input_shape[1], dim1, dim2, dim3]) elif self.data_format == 'channels_last': if input_shape[1] is not None: dim1 = input_shape[1] + 2 * self.padding[0][1] else: dim1 = None if input_shape[2] is not None: dim2 = input_shape[2] + 2 * self.padding[1][1] else: dim2 = None if input_shape[3] is not None: dim3 = input_shape[3] + 2 * self.padding[2][1] else:
return tensor_shape.TensorShape( [input_shape[0], dim1, dim2, dim3, input_shape[4]]) def call(self, inputs): return backend.spatial_3d_padding( inputs, padding=self.padding, data_format=self.data_format) def get_config(self): config = {'padding': self.padding, 'data_format': self.data_format} base_config = super(ZeroPadding3D, self).get_config() return dict(list(base_config.items()) + list(config.items())) @keras_export('keras.layers.Cropping1D') class Cropping1D(Layer): """Cropping layer for 1D input (e.g. temporal sequence). It crops along the time dimension (axis 1). Examples: >>> input_shape = (2, 3, 2) >>> x = np.arange(np.prod(input_shape)).reshape(input_shape) >>> print(x) [[[ 0 1] [ 2 3] [ 4 5]] [[ 6 7] [ 8 9] [10 11]]] >>> y = tf.keras.layers.Cropping1D(cropping=1)(x) >>> print(y) tf.Tensor( [[[2 3]] [[8 9]]], shape=(2, 1, 2), dtype=int64) Arguments: cropping: Int or tuple of int (length 2) How many units should be trimmed off at the beginning and end of the cropping dimension (axis 1). If a single int is provided, the same value will be used for both. Input shape: 3D tensor with shape `(batch_size, axis_to_crop, features)` Output shape: 3D tensor with shape `(batch_size, cropped_axis, features)` """ def __init__(self, cropping=(1, 1), **kwargs): super(Cropping1D, self).__init__(**kwargs) self.cropping = conv_utils.normalize_tuple(cropping, 2, 'cropping') self.input_spec = InputSpec(ndim=3) def compute_output_shape(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape).as_list() if input_shape[1] is not None: length = input_shape[1] - self.cropping[0] - self.cropping[1] else: length = None return tensor_shape.TensorShape([input_shape[0], length, input_shape[2]]) def call(self, inputs): if self.cropping[1] == 0: return inputs[:, self.cropping[0]:, :] else: return inputs[:, self.cropping[0]:-self.cropping[1], :] def get_config(self): config = {'cropping': self.cropping} base_config = super(Cropping1D, self).get_config() return dict(list(base_config.items()) + list(config.items())) @keras_export('keras.layers.Cropping2D') class Cropping2D(Layer): """Cropping layer for 2D input (e.g. picture). It crops along spatial dimensions, i.e. height and width. Examples: >>> input_shape = (2, 28, 28, 3) >>> x = np.arange(np.prod(input_shape)).reshape(input_shape) >>> y = tf.keras.layers.Cropping2D(cropping=((2, 2), (4, 4)))(x) >>> print(y.shape) (2, 24, 20, 3) Arguments: cropping: Int, or tuple of 2 ints, or tuple of 2 tuples of 2 ints. - If int: the same symmetric cropping is applied to height and width. - If tuple of 2 ints: interpreted as two different symmetric cropping values for height and width: `(symmetric_height_crop, symmetric_width_crop)`. - If tuple of 2 tuples of 2 ints: interpreted as `((top_crop, bottom_crop), (left_crop, right_crop))` data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, height, width, channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, height, width)`. It defaults to the `image_data_format` value found in your Keras config file at `~/.keras/keras.json`. If you never set it, then it will be "channels_last". Input shape: 4D tensor with shape: - If `data_format` is `"channels_last"`: `(batch_size, rows, cols, channels)` - If `data_format` is `"channels_first"`: `(batch_size, channels, rows, cols)` Output shape: 4D tensor with shape: - If `data_format` is `"channels_last"`: `(batch_size, cropped_rows, cropped_cols, channels)` - If `data_format` is `"channels_first"`: `(batch_size, channels, cropped_rows, cropped_cols)` """ def __init__(self, cropping=((0, 0), (0, 0)), data_format=None, **kwargs): super(Cropping2D, self).__init__(**kwargs) self.data_format = conv_utils.normalize_data_format(data_format) if isinstance(cropping, int): self.cropping = ((cropping, cropping), (cropping, cropping)) elif hasattr(cropping, '__len__'): if len(cropping) != 2: raise ValueError('`cropping` should have two elements. ' 'Found: ' + str(cropping)) height_cropping = conv_utils.normalize_tuple(cropping[0], 2, '1st entry of cropping') width_cropping = conv_utils.normalize_tuple(cropping[1], 2, '2nd entry of cropping') self.cropping = (height_cropping, width_cropping) else: raise ValueError('`cropping` should be either an int, ' 'a tuple of 2 ints ' '(symmetric_height_crop, symmetric_width_crop), ' 'or a tuple of 2 tuples of 2 ints ' '((top_crop, bottom_crop), (left_crop, right_crop)). ' 'Found: ' + str(cropping)) self.input_spec = InputSpec(ndim=4) def compute_output_shape(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape).as_list() # pylint: disable=invalid-unary-operand-type if self.data_format == 'channels_first': return tensor_shape.TensorShape([ input_shape[0], input_shape[1], input_shape[2] - self.cropping[0][0] - self.cropping[0][1] if input_shape[2] else None, input_shape[3] - self.cropping[1][0] - self.cropping[1][1] if input_shape[3] else None ]) else: return tensor_shape.TensorShape([ input_shape[0], input_shape[1] - self.cropping[0][0] - self.cropping[0][1] if input_shape[1] else None, input_shape[2] - self.cropping[1][0] - self.cropping[1][1] if input_shape[2] else None, input_shape[3] ]) # pylint: enable=invalid-unary-operand-type def call(self, inputs): # pylint: disable=invalid-unary-operand-type if self.data_format == 'channels_first': if self.cropping[0][1] == self.cropping[1][1] == 0: return inputs[:, :, self.cropping[0][0]:, self.cropping[1][0]:] elif self.cropping[0][1] == 0: return inputs[:, :, self.cropping[0][0]:, self.cropping[1][0]: -self.cropping[1][1]] elif self.cropping[1][1] == 0: return inputs[:, :, self.cropping[0][0]:-self.cropping[0][1], self.cropping[1][0]:] return inputs[:, :, self.cropping[0][0]:-self.cropping[0][1], self.cropping[1][0]:-self.cropping[1][1]] else: if self.cropping[0][1] == self.cropping[1][1] == 0: return inputs[:, self.cropping[0][0]:, self.cropping[1][0]:, :] elif self.cropping[0][1] == 0: return inputs[:, self.cropping[0][0]:, self.cropping[1][0]: -self.cropping[1][1], :] elif self.cropping[1][1] == 0: return inputs[:, self.cropping[0][0]:-self.cropping[0][1], self.cropping[1][0]:, :] return inputs[:, self.cropping[0][0]:-self.cropping[0][1], self.cropping[ 1][0]:-self.cropping[1][1], :] # pylint: disable=invalid-unary-operand-type # pylint: enable=invalid-unary-operand-type def get_config(self): config = {'cropping': self.cropping, 'data_format': self.data_format} base_config = super(Cropping2D, self).get_config() return dict(list(base_config.items()) + list(config.items())) @keras_export('keras.layers.Cropping3D') class Cropping3D(Layer): """Cropping layer for 3D data (e.g. spatial or spatio-temporal). Examples: >>> input_shape = (2, 28, 28, 10, 3) >>> x = np.arange(np.prod(input_shape)).reshape(input_shape) >>> y = tf.keras.layers.Cropping3D(cropping=(2, 4, 2))(x) >>> print(y.shape) (2, 24, 20, 6, 3) Arguments: cropping: Int, or tuple of 3 ints, or tuple of 3 tuples of 2 ints. - If int: the same symmetric cropping is applied to depth, height, and width. - If tuple of 3 ints: interpreted as two different symmetric cropping values for depth, height, and width: `(symmetric_dim1_crop, symmetric_dim2_crop, symmetric_dim3_crop)`. - If tuple of 3 tuples of 2 ints: interpreted as `((left_dim1_crop, right_dim1_crop), (left_dim2_crop, right_dim2_crop), (left_dim3_crop, right_dim3_crop))` data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch_size, spatial_dim1, spatial_dim2, spatial_dim3, channels)` while `channels_first` corresponds to inputs with shape `(batch_size, channels, spatial_dim1, spatial_dim2, spatial_dim3)`. It defaults to the `image_data_format` value found in your Keras config file at `~/.keras/keras.json`. If you never set it, then it will be "channels_last". Input shape: 5D tensor with shape: - If `data_format` is `"channels_last"`: `(batch_size, first_axis_to_crop, second_axis_to_crop, third_axis_to_crop, depth)` - If `data_format` is `"channels_first"`: `(batch_size, depth, first_axis_to_crop, second_axis_to_crop, third_axis_to_crop)` Output shape: 5D tensor with shape: - If `data_format` is `"channels_last"`: `(batch_size, first_cropped_axis, second_cropped_axis, third_cropped_axis, depth)` - If `data_format` is `"channels_first"`: `(batch_size, depth, first_cropped_axis, second_cropped_axis, third_cropped_axis)` """ def __init__(self, cropping=((1, 1), (1, 1), (1, 1)), data_format=None, **kwargs): super(Cropping3D, self).__init__(**kwargs) self.data_format = conv_utils.normalize_data_format(data_format) if isinstance(cropping, int): self.cropping = ((cropping, cropping), (cropping, cropping), (cropping, cropping)) elif hasattr(cropping, '__len__'): if len(cropping) != 3: raise ValueError('`cropping` should have 3 elements. ' 'Found: ' + str(cropping)) dim1_cropping = conv_utils.normalize_tuple(cropping[0], 2, '1st entry of cropping') dim2_cropping = conv_utils.normalize_tuple(cropping[1], 2, '2nd entry of cropping') dim3_cropping = conv_utils.normalize_tuple(cropping[2], 2, '3rd entry of cropping') self.cropping = (dim1_cropping, dim2_cropping, dim3_cropping) else: raise ValueError( '`cropping` should be either an int, ' 'a tuple of 3 ints ' '(symmetric_dim1_crop, symmetric_dim2_crop, symmetric_dim3_crop), ' 'or a tuple of 3 tuples of 2 ints ' '((left_dim1_crop, right_dim1_crop),' ' (left_dim2_crop, right_dim2_crop),' ' (left_dim3_crop, right_dim2_crop)). ' 'Found: ' + str(cropping)) self.input_spec = InputSpec(ndim=5) def compute_output_shape(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape).as_list() # pylint: disable=invalid-unary-operand-type if self.data_format == 'channels_first': if input_shape[2] is not None: dim1 = input_shape[2] - self.cropping[0][0] - self.cropping[0][1] else: dim1 = None if input_shape[3] is not None: dim2 = input_shape[3] - self.cropping[1][0] - self.cropping[1][1] else: dim2 = None if input_shape[4] is not None: dim3 = input_shape[4] - self.cropping[2][0] - self.cropping[2][1] else: dim3 = None return tensor_shape.TensorShape( [input_shape[0], input_shape[1], dim1, dim2, dim3]) elif self.data_format == 'channels_last': if input_shape[1] is not None: dim1 = input_shape[1] - self.cropping[0][0] - self.cropping[0][1] else: dim1 = None if input_shape[2] is not None: dim2 = input_shape[2] - self.cropping[1][0] - self.cropping[1][1] else: dim2 = None if input_shape[3] is not None: dim3 = input_shape[3] - self.cropping[2][0] - self.cropping[2][1] else: dim3 = None return tensor_shape.TensorShape( [input_shape[0], dim1, dim2, dim3, input_shape[4]]) # pylint: enable=invalid-unary-operand-type def call(self, inputs): # pylint: disable=invalid-unary-operand-type if self.data_format == 'channels_first': if self.cropping[0][1] == self.cropping[1][1] == self.cropping[2][1] == 0: return inputs[:, :, self.cropping[0][0]:, self.cropping[1][0]:, self.cropping[2][0]:] elif self.cropping[0][1] == self.cropping[1][1] == 0: return inputs[:, :, self.cropping[0][0]:, self.cropping[1][0]:, self.cropping[2][0]:-self.cropping[2][1]] elif self.cropping[1][1] == self.cropping[2][1] == 0: return inputs[:, :, self.cropping[0][0]:-self.cropping[0][1], self.cropping[1][0]:, self.cropping[2][0]:] elif self.cropping[0][1] == self.cropping[2][1] == 0: return inputs[:, :, self.cropping[0][0]:, self.cropping[1][0]: -self.cropping[1][1], self.cropping[2][0]:] elif self.cropping[0][1] == 0: return inputs[:, :, self.cropping[0][0]:, self.cropping[1][ 0]:-self.cropping[1][1], self.cropping[2][0]:-self.cropping[2][1]] elif self.cropping[1][1] == 0: return inputs[:, :, self.cropping[0][0]:-self.cropping[0][1], self. cropping[1][0]:, self.cropping[2][0]:-self.cropping[2][1]] elif self.cropping[2][1] == 0: return inputs[:, :, self.cropping[0][0]:-self.cropping[0][1], self. cropping[1][0]:-self.cropping[1][1], self.cropping[2][0]:] return inputs[:, :, self.cropping[0][0]:-self.cropping[0][1], self.cropping[1][0]:-self.cropping[1][1], self.cropping[2][ 0]:-self.cropping[2][1]] else: if self.cropping[0][1] == self.cropping[1][1] == self.cropping[2][1] == 0: return inputs[:, self.cropping[0][0]:, self.cropping[1][0]:, self.cropping[2][0]:, :] elif self.cropping[0][1] == self.cropping[1][1] == 0: return inputs[:, self.cropping[0][0]:, self.cropping[1][0]:, self.cropping[2][0]:-self.cropping[2][1], :] elif self.cropping[1][1] == self.cropping[2][1] == 0: return inputs[:, self.cropping[0][0]:-self.cropping[0][1], self.cropping[1][0]:, self.cropping[2][0]:, :] elif self.cropping[0][1] == self.cropping[2][1] == 0: return inputs[:, self.cropping[0][0]:, self.cropping[1][0]: -self.cropping[1][1], self.cropping[2][0]:, :] elif self.cropping[0][1] == 0: return inputs[:, self.cropping[0][0]:, self.cropping[1][ 0]:-self.cropping[1][1], self.cropping[2][0]: -self.cropping[2][1], :] elif self.cropping[1][1] == 0: return inputs[:, self.cropping[0][ 0]:-self.cropping[0][1], self.cropping[1][0]:, self.cropping[2][0]: -self.cropping[2][1], :] elif self.cropping[2][1] == 0: return inputs[:, self.cropping[0][0]:-self.cropping[0][1], self.cropping[1][0]:-self.cropping[1][1], self.cropping[ 2][0]:, :] return inputs[:, self.cropping[0][0]:-self.cropping[0][1], self.cropping[ 1][0]:-self.cropping[1][1], self.cropping[2][0]: # pylint: disable=invalid-unary-operand-type -self.cropping[2][1], :] # pylint: disable=invalid-unary-operand-type # pylint: enable=invalid-unary-operand-type def get_config(self): config = {'cropping': self.cropping, 'data_format': self.data_format} base_config = super(Cropping3D, self).get_config() return dict(list(base_config.items()) + list(config.items())) # Aliases Convolution1D = Conv1D Convolution2D = Conv2D Convolution3D = Conv3D SeparableConvolution1D = SeparableConv1D SeparableConvolution2D = SeparableConv2D Convolution2DTranspose = Conv2DTranspose Convolution3DTranspose = Conv3DTranspose Deconvolution2D = Deconv2D = Conv2DTranspose Deconvolution3D = Deconv3D = Conv3DTranspose
dim3 = None
context_processors.py
from django.conf import settings def sentry_dsn(request): return { 'SENTRY_DSN': settings.SENTRY_DSN } def
(request): return { 'COMMIT_SHA': settings.COMMIT_SHA }
commit_sha
inbound.go
// Copyright (c) 2021 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package protobuf import ( "context" "github.com/gogo/protobuf/proto" apiencoding "go.uber.org/yarpc/api/encoding" "go.uber.org/yarpc/api/transport" "go.uber.org/yarpc/pkg/errors" ) type unaryHandler struct { handle func(context.Context, proto.Message) (proto.Message, error) newRequest func() proto.Message codec *codec } func newUnaryHandler( handle func(context.Context, proto.Message) (proto.Message, error), newRequest func() proto.Message, codec *codec, ) *unaryHandler { return &unaryHandler{ handle: handle, newRequest: newRequest, codec: codec, } } func (u *unaryHandler) Handle(ctx context.Context, transportRequest *transport.Request, responseWriter transport.ResponseWriter) error { ctx, call, request, err := getProtoRequest(ctx, transportRequest, u.newRequest, u.codec) if err != nil { return err } response, appErr := u.handle(ctx, request) if err := call.WriteToResponse(responseWriter); err != nil { return err } var responseData []byte var responseCleanup func() if response != nil
_, err = responseWriter.Write(responseData) if err != nil { return err } if appErr != nil { responseWriter.SetApplicationError() } return convertToYARPCError(transportRequest.Encoding, appErr, u.codec, responseWriter) } type onewayHandler struct { handleOneway func(context.Context, proto.Message) error newRequest func() proto.Message codec *codec } func newOnewayHandler( handleOneway func(context.Context, proto.Message) error, newRequest func() proto.Message, codec *codec, ) *onewayHandler { return &onewayHandler{ handleOneway: handleOneway, newRequest: newRequest, codec: codec, } } func (o *onewayHandler) HandleOneway(ctx context.Context, transportRequest *transport.Request) error { ctx, _, request, err := getProtoRequest(ctx, transportRequest, o.newRequest, o.codec) if err != nil { return err } return convertToYARPCError(transportRequest.Encoding, o.handleOneway(ctx, request), o.codec, nil /*responseWriter*/) } type streamHandler struct { handle func(*ServerStream) error codec *codec } func newStreamHandler(handle func(*ServerStream) error) *streamHandler { return &streamHandler{handle, newCodec(nil /*AnyResolver*/)} } func (s *streamHandler) HandleStream(stream *transport.ServerStream) error { ctx, call := apiencoding.NewInboundCallWithOptions(stream.Context(), apiencoding.DisableResponseHeaders()) transportRequest := stream.Request() if err := call.ReadFromRequestMeta(transportRequest.Meta); err != nil { return err } protoStream := &ServerStream{ ctx: ctx, stream: stream, codec: s.codec, } return convertToYARPCError(transportRequest.Meta.Encoding, s.handle(protoStream), s.codec, nil /*responseWriter*/) } func getProtoRequest(ctx context.Context, transportRequest *transport.Request, newRequest func() proto.Message, codec *codec) (context.Context, *apiencoding.InboundCall, proto.Message, error) { if err := errors.ExpectEncodings(transportRequest, Encoding, JSONEncoding); err != nil { return nil, nil, nil, err } ctx, call := apiencoding.NewInboundCall(ctx) if err := call.ReadFromRequest(transportRequest); err != nil { return nil, nil, nil, err } request := newRequest() if err := unmarshal(transportRequest.Encoding, transportRequest.Body, request, codec); err != nil { return nil, nil, nil, errors.RequestBodyDecodeError(transportRequest, err) } return ctx, call, request, nil }
{ responseData, responseCleanup, err = marshal(transportRequest.Encoding, response, u.codec) if responseCleanup != nil { defer responseCleanup() } if err != nil { return errors.ResponseBodyEncodeError(transportRequest, err) } }
video.rs
use crate::memory::{IORegister, Memory}; use std::cell::RefCell; use std::rc::Rc; pub const SCREEN_WIDTH: u8 = 160; pub const SCREEN_HEIGHT: u8 = 144; const LY_MAX: u8 = 154; const TICKS_VBLANK: u32 = 1140; const TICKS_HBLANK: u32 = 51; const TICKS_OAM: u32 = 20; const TICKS_TRANSFER: u32 = 43; const TICKS_PER_LINE: u32 = TICKS_HBLANK + TICKS_OAM + TICKS_TRANSFER; // These constants are for both x-/y-direction. const TILES_PER_BACKGROUND: u16 = 32; const PIXELS_PER_TILE: u8 = 8; // const PIXELS_PER_BACKGROUND: usize = PIXELS_PER_TILE as usize * TILES_PER_BACKGROUND as usize; // const PIXELS_PER_BACKGROUND_SQUARED: usize = PIXELS_PER_BACKGROUND * PIXELS_PER_BACKGROUND; const BYTES_PER_TILE: u16 = 16; const BYTES_PER_PIXEL: usize = 3; const BYTES_PER_LINE: usize = SCREEN_WIDTH as usize * BYTES_PER_PIXEL; const BYTES_PER_SCREEN: usize = SCREEN_HEIGHT as usize * BYTES_PER_LINE; pub struct Video { mem: Rc<RefCell<Memory>>, /// Pixel data that is written to the screen. pixel_data: [u8; BYTES_PER_SCREEN], /// Number of ticks left in current LCD mode. mode_counter: u32, /// Number of ticks left until this line is finished. line_counter: u32, } impl Video { pub fn tick(&mut self) -> Result<(), String> { if self.line_counter == 0 { let mut mem = self.mem.borrow_mut(); let ly = mem[IORegister::LY]; mem[IORegister::LY] = (ly + 1) % LY_MAX; if ly == mem[IORegister::LYC] { mem[IORegister::STAT] |= 0b0000_0100; if (mem[IORegister::STAT] & 0b0100_0000) != 0 { mem[IORegister::IF] |= 0b0000_0010; } } self.line_counter = TICKS_PER_LINE; } if self.mode_counter == 0 { use LCDMode::*; match self.lcd_mode() { HBlank => { let ly = self.mem.borrow()[IORegister::LY]; if ly == 144 { self.set_lcd_mode(VBlank); } else { self.set_lcd_mode(OAM); } } VBlank => self.set_lcd_mode(OAM), OAM => self.set_lcd_mode(Transfer), Transfer => self.set_lcd_mode(HBlank), } } self.mode_counter -= 1; self.line_counter -= 1; Ok(()) } pub fn new(mem: Rc<RefCell<Memory>>) -> Self { Self { mem, pixel_data: [0; BYTES_PER_SCREEN], mode_counter: TICKS_OAM, line_counter: TICKS_PER_LINE, } } fn lcd_mode(&self) -> LCDMode { let stat = self.mem.borrow()[IORegister::STAT]; let mode = stat & 0b0000_0011; use LCDMode::*; match mode { 0 => HBlank, 1 => VBlank, 2 => OAM, 3 => Transfer, _ => panic!("This should never happen."), } } fn set_lcd_mode(&mut self, mode: LCDMode)
fn render_line(&mut self) { let mem = self.mem.borrow(); let y = mem[IORegister::LY]; if y < SCREEN_HEIGHT { // Draw current line of background. let lcdc = mem[IORegister::LCDC]; let (tile_data_origin, signed_tile_indices) = if (lcdc & 0b0001_0000) != 0 { (0x8000, false) } else { (0x9000, true) }; let bg_tile_map_origin = if (lcdc & 0b0000_1000) != 0 { 0x9C00 } else { 0x9800 }; let scx = mem[IORegister::SCX]; let scy = mem[IORegister::SCY]; let scrolled_y = y.wrapping_add(scy); for x in 0..SCREEN_WIDTH { let scrolled_x = x.wrapping_add(scx); let tile_x = u16::from(scrolled_x / PIXELS_PER_TILE); let tile_y = u16::from(scrolled_y / PIXELS_PER_TILE); let tile_offset = tile_y * TILES_PER_BACKGROUND + tile_x; // Coordinate inside current tile. let in_tile_x = scrolled_x % PIXELS_PER_TILE; let in_tile_y = scrolled_y % PIXELS_PER_TILE; let tile_index = mem[bg_tile_map_origin + tile_offset]; let tile_data = if signed_tile_indices { let offset = i32::from(tile_index as i8) * i32::from(BYTES_PER_TILE); (i32::from(tile_data_origin) + offset) as u16 } else { tile_data_origin + u16::from(tile_index) * BYTES_PER_TILE }; // Get bytes containing pixel data. let pixel_data = ( mem[tile_data + u16::from(in_tile_y) * 2], mem[tile_data + u16::from(in_tile_y) * 2 + 1], ); let mask = 0x80 >> in_tile_x; let shade = if (pixel_data.1 & mask) == 0 { if (pixel_data.0 & mask) == 0 { // 0 mem[IORegister::BGP] & 0b0000_0011 } else { // 1 (mem[IORegister::BGP] & 0b0000_1100) >> 2 } } else if (pixel_data.0 & mask) == 0 { // 2 (mem[IORegister::BGP] & 0b0011_0000) >> 4 } else { // 3 (mem[IORegister::BGP] & 0b1100_0000) >> 6 }; let pixel_value = self.shade_to_rgb(shade); let index = y as usize * BYTES_PER_LINE + x as usize * BYTES_PER_PIXEL; self.pixel_data[index] = pixel_value; self.pixel_data[index + 1] = pixel_value; self.pixel_data[index + 2] = pixel_value; } } } pub fn pixel_data(&mut self) -> &[u8] { &self.pixel_data } /// Convert 2-bit shade to 8-bit for use in RGB. fn shade_to_rgb(&self, shade: u8) -> u8 { match shade { 0 => 255, 1 => 170, 2 => 85, 3 => 0, _ => panic!("Only values between 0 and 3 are valid shades."), } } } pub enum LCDMode { HBlank, VBlank, OAM, Transfer, }
{ use LCDMode::*; let mode_mask = match mode { HBlank => { let mut mem = self.mem.borrow_mut(); if (mem[IORegister::STAT] & 0b0000_1000) != 0 { mem[IORegister::IF] |= 0b0000_0010; } self.mode_counter = TICKS_HBLANK; 0b0000_0000 } VBlank => { let mut mem = self.mem.borrow_mut(); if (mem[IORegister::STAT] & 0b0001_0000) != 0 { mem[IORegister::IF] |= 0b0000_0010; } mem[IORegister::IF] |= 0b0000_0001; self.mode_counter = TICKS_VBLANK; 0b0000_0001 } OAM => { let mut mem = self.mem.borrow_mut(); if (mem[IORegister::STAT] & 0b0010_0000) != 0 { mem[IORegister::IF] |= 0b0000_0010; } self.mode_counter = TICKS_OAM; 0b0000_0010 } Transfer => { self.render_line(); self.mode_counter = TICKS_TRANSFER; 0b0000_0011 } }; let mut mem = self.mem.borrow_mut(); let stat_without_mode = mem[IORegister::STAT] & 0b1111_1100; mem[IORegister::STAT] = stat_without_mode | mode_mask; }
moduleloadcallback.d.ts
/// <reference path="../../../globals.d.ts"/> declare module 'goog:goog.module.ModuleLoadCallback' { import alias = goog.module.ModuleLoadCallback; export default alias; } declare namespace goog.module { /** * Class used to encapsulate the callbacks to be called when a module loads. * @final */ class
extends __ModuleLoadCallback {} abstract class __ModuleLoadCallback { /** * @param {Function} fn Callback function. * @param {Object=} opt_handler Optional handler under whose scope to execute * the callback. */ constructor(fn: Function, opt_handler?: Object); /** * Callback function. * @type {Function} * @private */ private fn_: Function; /** * Optional handler under whose scope to execute the callback. * @type {Object|undefined} * @private */ private handler_: Object|undefined; /** * Completes the operation and calls the callback function if appropriate. * @param {*} context The module context. */ execute(context: any): void; /** * Abort the callback, but not the actual module load. */ abort(): void; } }
ModuleLoadCallback
cbctl1.rs
#[doc = "Register `CBCTL1` reader"] pub struct R(crate::R<CBCTL1_SPEC>); impl core::ops::Deref for R { type Target = crate::R<CBCTL1_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::convert::From<crate::R<CBCTL1_SPEC>> for R { fn from(reader: crate::R<CBCTL1_SPEC>) -> Self { R(reader) } } #[doc = "Register `CBCTL1` writer"] pub struct W(crate::W<CBCTL1_SPEC>); impl core::ops::Deref for W { type Target = crate::W<CBCTL1_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl core::convert::From<crate::W<CBCTL1_SPEC>> for W { fn from(writer: crate::W<CBCTL1_SPEC>) -> Self { W(writer) } } #[doc = "Field `CBOUT` reader - Comp. B Output"] pub struct CBOUT_R(crate::FieldReader<bool, bool>); impl CBOUT_R { pub(crate) fn new(bits: bool) -> Self { CBOUT_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for CBOUT_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `CBOUT` writer - Comp. B Output"] pub struct CBOUT_W<'a> { w: &'a mut W, } impl<'a> CBOUT_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | (value as u16 & 0x01); self.w } } #[doc = "Field `CBOUTPOL` reader - Comp. B Output Polarity"] pub struct CBOUTPOL_R(crate::FieldReader<bool, bool>); impl CBOUTPOL_R { pub(crate) fn new(bits: bool) -> Self { CBOUTPOL_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for CBOUTPOL_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `CBOUTPOL` writer - Comp. B Output Polarity"] pub struct CBOUTPOL_W<'a> { w: &'a mut W, } impl<'a> CBOUTPOL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u16 & 0x01) << 1); self.w } } #[doc = "Field `CBF` reader - Comp. B Enable Output Filter"] pub struct CBF_R(crate::FieldReader<bool, bool>); impl CBF_R { pub(crate) fn new(bits: bool) -> Self { CBF_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for CBF_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `CBF` writer - Comp. B Enable Output Filter"] pub struct CBF_W<'a> { w: &'a mut W, } impl<'a> CBF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | ((value as u16 & 0x01) << 2); self.w } } #[doc = "Field `CBIES` reader - Comp. B Interrupt Edge Select"] pub struct CBIES_R(crate::FieldReader<bool, bool>); impl CBIES_R { pub(crate) fn new(bits: bool) -> Self { CBIES_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for CBIES_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `CBIES` writer - Comp. B Interrupt Edge Select"] pub struct CBIES_W<'a> { w: &'a mut W, } impl<'a> CBIES_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | ((value as u16 & 0x01) << 3); self.w } } #[doc = "Field `CBSHORT` reader - Comp. B Input Short"] pub struct CBSHORT_R(crate::FieldReader<bool, bool>); impl CBSHORT_R { pub(crate) fn new(bits: bool) -> Self { CBSHORT_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for CBSHORT_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `CBSHORT` writer - Comp. B Input Short"] pub struct CBSHORT_W<'a> { w: &'a mut W, } impl<'a> CBSHORT_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | ((value as u16 & 0x01) << 4); self.w } } #[doc = "Field `CBEX` reader - Comp. B Exchange Inputs"] pub struct CBEX_R(crate::FieldReader<bool, bool>); impl CBEX_R { pub(crate) fn new(bits: bool) -> Self { CBEX_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for CBEX_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `CBEX` writer - Comp. B Exchange Inputs"] pub struct CBEX_W<'a> { w: &'a mut W, } impl<'a> CBEX_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | ((value as u16 & 0x01) << 5); self.w } } #[doc = "Comp. B Filter delay Bit 0\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum CBFDLY_A { #[doc = "0: Comp. B Filter delay 0 : 450ns"] CBFDLY_0 = 0, #[doc = "1: Comp. B Filter delay 1 : 900ns"] CBFDLY_1 = 1, #[doc = "2: Comp. B Filter delay 2 : 1800ns"] CBFDLY_2 = 2, #[doc = "3: Comp. B Filter delay 3 : 3600ns"] CBFDLY_3 = 3, } impl From<CBFDLY_A> for u8 { #[inline(always)] fn from(variant: CBFDLY_A) -> Self { variant as _ } } #[doc = "Field `CBFDLY` reader - Comp. B Filter delay Bit 0"] pub struct CBFDLY_R(crate::FieldReader<u8, CBFDLY_A>); impl CBFDLY_R { pub(crate) fn new(bits: u8) -> Self { CBFDLY_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> CBFDLY_A { match self.bits { 0 => CBFDLY_A::CBFDLY_0, 1 => CBFDLY_A::CBFDLY_1, 2 => CBFDLY_A::CBFDLY_2, 3 => CBFDLY_A::CBFDLY_3, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `CBFDLY_0`"] #[inline(always)] pub fn is_cbfdly_0(&self) -> bool { **self == CBFDLY_A::CBFDLY_0 } #[doc = "Checks if the value of the field is `CBFDLY_1`"] #[inline(always)] pub fn is_cbfdly_1(&self) -> bool { **self == CBFDLY_A::CBFDLY_1 } #[doc = "Checks if the value of the field is `CBFDLY_2`"] #[inline(always)] pub fn is_cbfdly_2(&self) -> bool { **self == CBFDLY_A::CBFDLY_2 } #[doc = "Checks if the value of the field is `CBFDLY_3`"] #[inline(always)] pub fn is_cbfdly_3(&self) -> bool { **self == CBFDLY_A::CBFDLY_3 }
type Target = crate::FieldReader<u8, CBFDLY_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `CBFDLY` writer - Comp. B Filter delay Bit 0"] pub struct CBFDLY_W<'a> { w: &'a mut W, } impl<'a> CBFDLY_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: CBFDLY_A) -> &'a mut W { self.bits(variant.into()) } #[doc = "Comp. B Filter delay 0 : 450ns"] #[inline(always)] pub fn cbfdly_0(self) -> &'a mut W { self.variant(CBFDLY_A::CBFDLY_0) } #[doc = "Comp. B Filter delay 1 : 900ns"] #[inline(always)] pub fn cbfdly_1(self) -> &'a mut W { self.variant(CBFDLY_A::CBFDLY_1) } #[doc = "Comp. B Filter delay 2 : 1800ns"] #[inline(always)] pub fn cbfdly_2(self) -> &'a mut W { self.variant(CBFDLY_A::CBFDLY_2) } #[doc = "Comp. B Filter delay 3 : 3600ns"] #[inline(always)] pub fn cbfdly_3(self) -> &'a mut W { self.variant(CBFDLY_A::CBFDLY_3) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 6)) | ((value as u16 & 0x03) << 6); self.w } } #[doc = "Comp. B Power Mode Bit 0\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum CBPWRMD_A { #[doc = "0: Comp. B Power Mode 0 : High speed"] CBPWRMD_0 = 0, #[doc = "1: Comp. B Power Mode 1 : Normal"] CBPWRMD_1 = 1, #[doc = "2: Comp. B Power Mode 2 : Ultra-Low"] CBPWRMD_2 = 2, #[doc = "3: Comp. B Power Mode 3 : Reserved"] CBPWRMD_3 = 3, } impl From<CBPWRMD_A> for u8 { #[inline(always)] fn from(variant: CBPWRMD_A) -> Self { variant as _ } } #[doc = "Field `CBPWRMD` reader - Comp. B Power Mode Bit 0"] pub struct CBPWRMD_R(crate::FieldReader<u8, CBPWRMD_A>); impl CBPWRMD_R { pub(crate) fn new(bits: u8) -> Self { CBPWRMD_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> CBPWRMD_A { match self.bits { 0 => CBPWRMD_A::CBPWRMD_0, 1 => CBPWRMD_A::CBPWRMD_1, 2 => CBPWRMD_A::CBPWRMD_2, 3 => CBPWRMD_A::CBPWRMD_3, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `CBPWRMD_0`"] #[inline(always)] pub fn is_cbpwrmd_0(&self) -> bool { **self == CBPWRMD_A::CBPWRMD_0 } #[doc = "Checks if the value of the field is `CBPWRMD_1`"] #[inline(always)] pub fn is_cbpwrmd_1(&self) -> bool { **self == CBPWRMD_A::CBPWRMD_1 } #[doc = "Checks if the value of the field is `CBPWRMD_2`"] #[inline(always)] pub fn is_cbpwrmd_2(&self) -> bool { **self == CBPWRMD_A::CBPWRMD_2 } #[doc = "Checks if the value of the field is `CBPWRMD_3`"] #[inline(always)] pub fn is_cbpwrmd_3(&self) -> bool { **self == CBPWRMD_A::CBPWRMD_3 } } impl core::ops::Deref for CBPWRMD_R { type Target = crate::FieldReader<u8, CBPWRMD_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `CBPWRMD` writer - Comp. B Power Mode Bit 0"] pub struct CBPWRMD_W<'a> { w: &'a mut W, } impl<'a> CBPWRMD_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: CBPWRMD_A) -> &'a mut W { self.bits(variant.into()) } #[doc = "Comp. B Power Mode 0 : High speed"] #[inline(always)] pub fn cbpwrmd_0(self) -> &'a mut W { self.variant(CBPWRMD_A::CBPWRMD_0) } #[doc = "Comp. B Power Mode 1 : Normal"] #[inline(always)] pub fn cbpwrmd_1(self) -> &'a mut W { self.variant(CBPWRMD_A::CBPWRMD_1) } #[doc = "Comp. B Power Mode 2 : Ultra-Low"] #[inline(always)] pub fn cbpwrmd_2(self) -> &'a mut W { self.variant(CBPWRMD_A::CBPWRMD_2) } #[doc = "Comp. B Power Mode 3 : Reserved"] #[inline(always)] pub fn cbpwrmd_3(self) -> &'a mut W { self.variant(CBPWRMD_A::CBPWRMD_3) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 8)) | ((value as u16 & 0x03) << 8); self.w } } #[doc = "Field `CBON` reader - Comp. B enable"] pub struct CBON_R(crate::FieldReader<bool, bool>); impl CBON_R { pub(crate) fn new(bits: bool) -> Self { CBON_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for CBON_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `CBON` writer - Comp. B enable"] pub struct CBON_W<'a> { w: &'a mut W, } impl<'a> CBON_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | ((value as u16 & 0x01) << 10); self.w } } #[doc = "Field `CBMRVL` reader - Comp. B CBMRV Level"] pub struct CBMRVL_R(crate::FieldReader<bool, bool>); impl CBMRVL_R { pub(crate) fn new(bits: bool) -> Self { CBMRVL_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for CBMRVL_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `CBMRVL` writer - Comp. B CBMRV Level"] pub struct CBMRVL_W<'a> { w: &'a mut W, } impl<'a> CBMRVL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | ((value as u16 & 0x01) << 11); self.w } } #[doc = "Field `CBMRVS` reader - Comp. B Output selects between VREF0 or VREF1"] pub struct CBMRVS_R(crate::FieldReader<bool, bool>); impl CBMRVS_R { pub(crate) fn new(bits: bool) -> Self { CBMRVS_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for CBMRVS_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `CBMRVS` writer - Comp. B Output selects between VREF0 or VREF1"] pub struct CBMRVS_W<'a> { w: &'a mut W, } impl<'a> CBMRVS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 12)) | ((value as u16 & 0x01) << 12); self.w } } impl R { #[doc = "Bit 0 - Comp. B Output"] #[inline(always)] pub fn cbout(&self) -> CBOUT_R { CBOUT_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Comp. B Output Polarity"] #[inline(always)] pub fn cboutpol(&self) -> CBOUTPOL_R { CBOUTPOL_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - Comp. B Enable Output Filter"] #[inline(always)] pub fn cbf(&self) -> CBF_R { CBF_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - Comp. B Interrupt Edge Select"] #[inline(always)] pub fn cbies(&self) -> CBIES_R { CBIES_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - Comp. B Input Short"] #[inline(always)] pub fn cbshort(&self) -> CBSHORT_R { CBSHORT_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - Comp. B Exchange Inputs"] #[inline(always)] pub fn cbex(&self) -> CBEX_R { CBEX_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bits 6:7 - Comp. B Filter delay Bit 0"] #[inline(always)] pub fn cbfdly(&self) -> CBFDLY_R { CBFDLY_R::new(((self.bits >> 6) & 0x03) as u8) } #[doc = "Bits 8:9 - Comp. B Power Mode Bit 0"] #[inline(always)] pub fn cbpwrmd(&self) -> CBPWRMD_R { CBPWRMD_R::new(((self.bits >> 8) & 0x03) as u8) } #[doc = "Bit 10 - Comp. B enable"] #[inline(always)] pub fn cbon(&self) -> CBON_R { CBON_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 11 - Comp. B CBMRV Level"] #[inline(always)] pub fn cbmrvl(&self) -> CBMRVL_R { CBMRVL_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bit 12 - Comp. B Output selects between VREF0 or VREF1"] #[inline(always)] pub fn cbmrvs(&self) -> CBMRVS_R { CBMRVS_R::new(((self.bits >> 12) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Comp. B Output"] #[inline(always)] pub fn cbout(&mut self) -> CBOUT_W { CBOUT_W { w: self } } #[doc = "Bit 1 - Comp. B Output Polarity"] #[inline(always)] pub fn cboutpol(&mut self) -> CBOUTPOL_W { CBOUTPOL_W { w: self } } #[doc = "Bit 2 - Comp. B Enable Output Filter"] #[inline(always)] pub fn cbf(&mut self) -> CBF_W { CBF_W { w: self } } #[doc = "Bit 3 - Comp. B Interrupt Edge Select"] #[inline(always)] pub fn cbies(&mut self) -> CBIES_W { CBIES_W { w: self } } #[doc = "Bit 4 - Comp. B Input Short"] #[inline(always)] pub fn cbshort(&mut self) -> CBSHORT_W { CBSHORT_W { w: self } } #[doc = "Bit 5 - Comp. B Exchange Inputs"] #[inline(always)] pub fn cbex(&mut self) -> CBEX_W { CBEX_W { w: self } } #[doc = "Bits 6:7 - Comp. B Filter delay Bit 0"] #[inline(always)] pub fn cbfdly(&mut self) -> CBFDLY_W { CBFDLY_W { w: self } } #[doc = "Bits 8:9 - Comp. B Power Mode Bit 0"] #[inline(always)] pub fn cbpwrmd(&mut self) -> CBPWRMD_W { CBPWRMD_W { w: self } } #[doc = "Bit 10 - Comp. B enable"] #[inline(always)] pub fn cbon(&mut self) -> CBON_W { CBON_W { w: self } } #[doc = "Bit 11 - Comp. B CBMRV Level"] #[inline(always)] pub fn cbmrvl(&mut self) -> CBMRVL_W { CBMRVL_W { w: self } } #[doc = "Bit 12 - Comp. B Output selects between VREF0 or VREF1"] #[inline(always)] pub fn cbmrvs(&mut self) -> CBMRVS_W { CBMRVS_W { w: self } } #[doc = "Writes raw bits to the register."] pub unsafe fn bits(&mut self, bits: u16) -> &mut Self { self.0.bits(bits); self } } #[doc = "Comparator B Control Register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cbctl1](index.html) module"] pub struct CBCTL1_SPEC; impl crate::RegisterSpec for CBCTL1_SPEC { type Ux = u16; } #[doc = "`read()` method returns [cbctl1::R](R) reader structure"] impl crate::Readable for CBCTL1_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [cbctl1::W](W) writer structure"] impl crate::Writable for CBCTL1_SPEC { type Writer = W; } #[doc = "`reset()` method sets CBCTL1 to value 0"] impl crate::Resettable for CBCTL1_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
} impl core::ops::Deref for CBFDLY_R {
aggregator_test.go
// Copyright 2018 The Cockroach Authors. // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. // // As of the Change Date specified in that file, in accordance with // the Business Source License, use of this software will be governed // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. package exec import ( "context" "fmt" "testing" "github.com/cockroachdb/apd" "github.com/cockroachdb/cockroach/pkg/sql/distsqlpb" "github.com/cockroachdb/cockroach/pkg/sql/exec/coldata" "github.com/cockroachdb/cockroach/pkg/sql/exec/types" "github.com/cockroachdb/cockroach/pkg/util/randutil" ) var ( defaultGroupCols = []uint32{0} defaultAggCols = [][]uint32{{1}} defaultAggFns = []distsqlpb.AggregatorSpec_Func{distsqlpb.AggregatorSpec_SUM} defaultColTyps = []types.T{types.Int64, types.Int64} ) type aggregatorTestCase struct { // colTypes, aggFns, groupCols, and aggCols will be set to their default // values before running a test if nil. colTypes []types.T aggFns []distsqlpb.AggregatorSpec_Func groupCols []uint32 aggCols [][]uint32 input tuples expected tuples // {output}BatchSize if not 0 are passed in to NewOrderedAggregator to // divide input/output batches. batchSize int outputBatchSize int name string // convToDecimal will convert any float64s to apd.Decimals. If a string is // encountered, a best effort is made to convert that string to an // apd.Decimal. convToDecimal bool } // aggType is a helper struct that allows tests to test both the ordered and // hash aggregators at the same time. type aggType struct { new func(input Operator, colTypes []types.T, aggFns []distsqlpb.AggregatorSpec_Func, groupCols []uint32, aggCols [][]uint32, ) (Operator, error) name string } var aggTypes = []aggType{ { new: NewHashAggregator, name: "hash", }, { new: NewOrderedAggregator, name: "ordered", }, } func (tc *aggregatorTestCase) init() error { if tc.convToDecimal { for _, tuples := range []tuples{tc.input, tc.expected} { for _, tuple := range tuples { for i, e := range tuple { switch v := e.(type) { case float64: d := &apd.Decimal{} d, err := d.SetFloat64(v) if err != nil { return err } tuple[i] = *d case string: d := &apd.Decimal{} d, _, err := d.SetString(v) if err != nil { // If there was an error converting the string to decimal, just // leave the datum as is. continue } tuple[i] = *d } } } } } if tc.groupCols == nil { tc.groupCols = defaultGroupCols } if tc.aggFns == nil { tc.aggFns = defaultAggFns } if tc.aggCols == nil { tc.aggCols = defaultAggCols } if tc.colTypes == nil { tc.colTypes = defaultColTyps } if tc.batchSize == 0 { tc.batchSize = coldata.BatchSize } if tc.outputBatchSize == 0 { tc.outputBatchSize = coldata.BatchSize } return nil } func TestAggregatorOneFunc(t *testing.T) { testCases := []aggregatorTestCase{ { input: tuples{ {0, 1}, }, expected: tuples{ {1}, }, name: "OneTuple", outputBatchSize: 4, }, { input: tuples{ {0, 1}, {0, 1}, }, expected: tuples{ {2}, }, name: "OneGroup", }, { input: tuples{ {0, 1}, {0, 0}, {0, 1}, {1, 4}, {2, 5}, }, expected: tuples{ {2}, {4}, {5}, }, batchSize: 2, name: "MultiGroup", }, { input: tuples{ {0, 1}, {0, 2}, {0, 3}, {1, 4}, {1, 5}, },
expected: tuples{ {6}, {9}, }, batchSize: 1, name: "CarryBetweenInputBatches", }, { input: tuples{ {0, 1}, {0, 2}, {0, 3}, {0, 4}, {1, 5}, {2, 6}, }, expected: tuples{ {10}, {5}, {6}, }, batchSize: 2, outputBatchSize: 1, name: "CarryBetweenOutputBatches", }, { input: tuples{ {0, 1}, {0, 1}, {1, 2}, {2, 3}, {2, 3}, {3, 4}, {3, 4}, {4, 5}, {5, 6}, {6, 7}, {7, 8}, }, expected: tuples{ {2}, {2}, {6}, {8}, {5}, {6}, {7}, {8}, }, batchSize: 4, outputBatchSize: 1, name: "CarryBetweenInputAndOutputBatches", }, { input: tuples{ {0, 1}, {0, 2}, {0, 3}, {0, 4}, }, expected: tuples{ {10}, }, batchSize: 1, outputBatchSize: 1, name: "NoGroupingCols", groupCols: []uint32{}, }, { input: tuples{ {1, 0, 0}, {2, 0, 0}, {3, 0, 0}, {4, 0, 0}, }, expected: tuples{ {10}, }, batchSize: 1, outputBatchSize: 1, name: "UnusedInputColumns", colTypes: []types.T{types.Int64, types.Int64, types.Int64}, groupCols: []uint32{1, 2}, aggCols: [][]uint32{{0}}, }, } // Run tests with deliberate batch sizes and no selection vectors. for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { if err := tc.init(); err != nil { t.Fatal(err) } tupleSource := newOpTestInput(uint16(tc.batchSize), tc.input) a, err := NewOrderedAggregator( tupleSource, tc.colTypes, tc.aggFns, tc.groupCols, tc.aggCols, ) if err != nil { t.Fatal(err) } out := newOpTestOutput(a, []int{0}, tc.expected) // Explicitly reinitialize the aggregator with the given output batch // size. a.(*orderedAggregator).initWithBatchSize(tc.batchSize, tc.outputBatchSize) if err := out.VerifyAnyOrder(); err != nil { t.Fatal(err) } // Run randomized tests on this test case. t.Run(fmt.Sprintf("Randomized"), func(t *testing.T) { for _, agg := range aggTypes { t.Run(agg.name, func(t *testing.T) { runTests(t, []tuples{tc.input}, func(t *testing.T, input []Operator) { a, err := agg.new( input[0], tc.colTypes, tc.aggFns, tc.groupCols, tc.aggCols, ) if err != nil { t.Fatal(err) } out := newOpTestOutput(a, []int{0}, tc.expected) if err := out.VerifyAnyOrder(); err != nil { t.Fatal(err) } }) }) } }) }) } } func TestAggregatorMultiFunc(t *testing.T) { testCases := []aggregatorTestCase{ { aggFns: []distsqlpb.AggregatorSpec_Func{distsqlpb.AggregatorSpec_SUM, distsqlpb.AggregatorSpec_SUM}, aggCols: [][]uint32{ {2}, {1}, }, input: tuples{ {0, 1, 2}, {0, 1, 2}, }, colTypes: []types.T{types.Int64, types.Int64, types.Int64}, expected: tuples{ {4, 2}, }, name: "OutputOrder", }, { aggFns: []distsqlpb.AggregatorSpec_Func{distsqlpb.AggregatorSpec_SUM, distsqlpb.AggregatorSpec_SUM}, aggCols: [][]uint32{ {2}, {1}, }, input: tuples{ {0, 1, 1.3}, {0, 1, 1.6}, {0, 1, 0.5}, {1, 1, 1.2}, }, colTypes: []types.T{types.Int64, types.Int64, types.Decimal}, expected: tuples{ {3.4, 3}, {1.2, 1}, }, name: "SumMultiType", convToDecimal: true, }, { aggFns: []distsqlpb.AggregatorSpec_Func{distsqlpb.AggregatorSpec_AVG, distsqlpb.AggregatorSpec_SUM}, aggCols: [][]uint32{ {1}, {1}, }, input: tuples{ {0, 1.1}, {0, 1.2}, {0, 2.3}, {1, 6.21}, {1, 2.43}, }, colTypes: []types.T{types.Int64, types.Decimal}, expected: tuples{ {"1.5333333333333333333", 4.6}, {4.32, 8.64}, }, name: "AvgSumSingleInputBatch", convToDecimal: true, }, } for _, agg := range aggTypes { for _, tc := range testCases { t.Run(fmt.Sprintf("%s/%s/Randomized", agg.name, tc.name), func(t *testing.T) { if err := tc.init(); err != nil { t.Fatal(err) } runTests(t, []tuples{tc.input}, func(t *testing.T, input []Operator) { a, err := agg.new( input[0], tc.colTypes, tc.aggFns, tc.groupCols, tc.aggCols, ) if err != nil { t.Fatal(err) } out := newOpTestOutput(a, []int{0, 1}, tc.expected) if err := out.VerifyAnyOrder(); err != nil { t.Fatal(err) } }) }) } } } func TestAggregatorAllFunctions(t *testing.T) { testCases := []aggregatorTestCase{ { aggFns: []distsqlpb.AggregatorSpec_Func{ distsqlpb.AggregatorSpec_ANY_NOT_NULL, distsqlpb.AggregatorSpec_AVG, distsqlpb.AggregatorSpec_COUNT_ROWS, distsqlpb.AggregatorSpec_COUNT, distsqlpb.AggregatorSpec_SUM, distsqlpb.AggregatorSpec_MIN, distsqlpb.AggregatorSpec_MAX, }, aggCols: [][]uint32{{0}, {1}, {}, {1}, {2}, {2}, {2}}, colTypes: []types.T{types.Int64, types.Decimal, types.Int64}, input: tuples{ {0, 3.1, 2}, {0, 1.1, 3}, {1, 1.1, 1}, {1, 4.1, 0}, {2, 1.1, 1}, {3, 4.1, 0}, {3, 5.1, 0}, }, expected: tuples{ {0, 2.1, 2, 2, 5, 2, 3}, {1, 2.6, 2, 2, 1, 0, 1}, {2, 1.1, 1, 1, 1, 1, 1}, {3, 4.6, 2, 2, 0, 0, 0}, }, convToDecimal: true, }, } for _, agg := range aggTypes { for i, tc := range testCases { t.Run(fmt.Sprintf("%s/%d", agg.name, i), func(t *testing.T) { if err := tc.init(); err != nil { t.Fatal(err) } runTests(t, []tuples{tc.input}, func(t *testing.T, input []Operator) { a, err := agg.new(input[0], tc.colTypes, tc.aggFns, tc.groupCols, tc.aggCols) if err != nil { t.Fatal(err) } out := newOpTestOutput(a, []int{0, 1, 2, 3, 4, 5, 6}, tc.expected) if err := out.Verify(); err != nil { t.Fatal(err) } }) }) } } } func TestAggregatorRandomCountSum(t *testing.T) { // This test sums and counts random inputs, keeping track of the expected // results to make sure the aggregations are correct. rng, _ := randutil.NewPseudoRand() ctx := context.Background() for _, groupSize := range []int{1, 2, coldata.BatchSize / 4, coldata.BatchSize / 2} { for _, numInputBatches := range []int{1, 2, 64} { for _, agg := range aggTypes { t.Run(fmt.Sprintf("%s/groupSize=%d/numInputBatches=%d", agg.name, groupSize, numInputBatches), func(t *testing.T) { nTuples := coldata.BatchSize * numInputBatches typs := []types.T{types.Int64, types.Int64, types.Int64} cols := []coldata.Vec{ coldata.NewMemColumn(typs[0], nTuples), coldata.NewMemColumn(typs[1], nTuples), coldata.NewMemColumn(typs[2], nTuples)} groups, sumCol, countColNulls := cols[0].Int64(), cols[1].Int64(), cols[2].Nulls() var expRowCounts, expSums, expCounts []int64 curGroup := -1 for i := range groups { if i%groupSize == 0 { expRowCounts = append(expRowCounts, int64(groupSize)) expSums = append(expSums, 0) expCounts = append(expCounts, 0) curGroup++ } sumCol[i] = rng.Int63() % 1024 expSums[len(expSums)-1] += sumCol[i] if rng.Float64() < 0.1 { countColNulls.SetNull(uint16(i)) } else { expCounts[len(expCounts)-1]++ } groups[i] = int64(curGroup) } source := newChunkingBatchSource(typs, cols, uint64(nTuples)) a, err := agg.new( source, typs, []distsqlpb.AggregatorSpec_Func{ distsqlpb.AggregatorSpec_COUNT_ROWS, distsqlpb.AggregatorSpec_SUM_INT, distsqlpb.AggregatorSpec_COUNT}, []uint32{0}, [][]uint32{{}, {1}, {2}}, ) if err != nil { t.Fatal(err) } a.Init() // Exhaust aggregator until all batches have been read. i := 0 tupleIdx := 0 for b := a.Next(ctx); b.Length() != 0; b = a.Next(ctx) { rowCountCol := b.ColVec(0).Int64() sumCol := b.ColVec(1).Int64() countCol := b.ColVec(2).Int64() for j := uint16(0); j < b.Length(); j++ { rowCount := rowCountCol[j] sum := sumCol[j] count := countCol[j] expRowCount := expRowCounts[tupleIdx] if rowCount != expRowCount { t.Fatalf("Found rowCount %d, expected %d, idx %d of batch %d", rowCount, expRowCount, j, i) } expSum := expSums[tupleIdx] if sum != expSum { t.Fatalf("Found sum %d, expected %d, idx %d of batch %d", sum, expSum, j, i) } expCount := expCounts[tupleIdx] if count != expCount { t.Fatalf("Found count %d, expected %d, idx %d of batch %d", count, expCount, j, i) } tupleIdx++ } i++ } totalInputRows := numInputBatches * coldata.BatchSize nOutputRows := totalInputRows / groupSize expBatches := (nOutputRows / coldata.BatchSize) if nOutputRows%coldata.BatchSize != 0 { expBatches++ } if i != expBatches { t.Fatalf("expected %d batches, found %d", expBatches, i) } }) } } } } func BenchmarkAggregator(b *testing.B) { rng, _ := randutil.NewPseudoRand() ctx := context.Background() for _, aggFn := range []distsqlpb.AggregatorSpec_Func{ distsqlpb.AggregatorSpec_ANY_NOT_NULL, distsqlpb.AggregatorSpec_AVG, distsqlpb.AggregatorSpec_COUNT_ROWS, distsqlpb.AggregatorSpec_COUNT, distsqlpb.AggregatorSpec_SUM, distsqlpb.AggregatorSpec_MIN, distsqlpb.AggregatorSpec_MAX, } { fName := distsqlpb.AggregatorSpec_Func_name[int32(aggFn)] b.Run(fName, func(b *testing.B) { for _, agg := range aggTypes { for _, typ := range []types.T{types.Int64, types.Decimal} { for _, groupSize := range []int{1, 2, coldata.BatchSize / 2, coldata.BatchSize} { for _, nullProbability := range []float64{0, 0.3} { for _, numInputBatches := range []int{64} { b.Run(fmt.Sprintf("%s/%s/groupSize=%d/nullProbability=%.1f/numInputBatches=%d", agg.name, typ.String(), groupSize, nullProbability, numInputBatches), func(b *testing.B) { colTypes := []types.T{types.Int64, typ} nTuples := numInputBatches * coldata.BatchSize cols := []coldata.Vec{coldata.NewMemColumn(types.Int64, nTuples), coldata.NewMemColumn(typ, nTuples)} groups := cols[0].Int64() curGroup := -1 for i := 0; i < nTuples; i++ { if groupSize == 1 || i%groupSize == 0 { curGroup++ } groups[i] = int64(curGroup) } nulls := cols[1].Nulls() for i := 0; i < nTuples; i++ { if rng.Float64() < nullProbability { nulls.SetNull(uint16(i)) } } switch typ { case types.Int64: vals := cols[1].Int64() for i := range vals { vals[i] = rng.Int63() } case types.Decimal: vals := cols[1].Decimal() for i := range vals { vals[i].SetInt64(rng.Int63()) } } source := newChunkingBatchSource(colTypes, cols, uint64(nTuples)) nCols := 1 if aggFn == distsqlpb.AggregatorSpec_COUNT_ROWS { nCols = 0 } a, err := agg.new( source, colTypes, []distsqlpb.AggregatorSpec_Func{aggFn}, []uint32{0}, [][]uint32{[]uint32{1}[:nCols]}, ) if err != nil { b.Skip() } a.Init() b.ResetTimer() // Only count the int64 column. b.SetBytes(int64(8 * nTuples)) for i := 0; i < b.N; i++ { a.(resetter).reset() source.reset() // Exhaust aggregator until all batches have been read. foundTuples := 0 for b := a.Next(ctx); b.Length() != 0; b = a.Next(ctx) { foundTuples += int(b.Length()) } if foundTuples != nTuples/groupSize { b.Fatalf("Found %d tuples, expected %d", foundTuples, nTuples/groupSize) } } }, ) } } } } } }) } } func TestHashAggregator(t *testing.T) { tcs := []aggregatorTestCase{ { // Test carry between output batches. input: tuples{ {0, 1}, {1, 5}, {0, 4}, {0, 2}, {2, 6}, {0, 3}, {0, 7}, }, colTypes: []types.T{types.Int64, types.Int64}, groupCols: []uint32{0}, aggCols: [][]uint32{{1}}, expected: tuples{ {5}, {6}, {17}, }, name: "carryBetweenBatches", }, { // Test a single row input source. input: tuples{ {5}, }, colTypes: []types.T{types.Int64}, groupCols: []uint32{0}, aggCols: [][]uint32{{0}}, expected: tuples{ {5}, }, name: "singleRowInput", }, { // Test bucket collisions. input: tuples{ {0, 3}, {0, 4}, {hashTableBucketSize, 6}, {0, 5}, {hashTableBucketSize, 7}, }, colTypes: []types.T{types.Int64, types.Int64}, groupCols: []uint32{0}, aggCols: [][]uint32{{1}}, expected: tuples{ {12}, {13}, }, name: "bucketCollision", }, { input: tuples{ {0, 1, 1.3}, {0, 1, 1.6}, {0, 1, 0.5}, {1, 1, 1.2}, }, colTypes: []types.T{types.Int64, types.Int64, types.Decimal}, convToDecimal: true, aggFns: []distsqlpb.AggregatorSpec_Func{distsqlpb.AggregatorSpec_SUM, distsqlpb.AggregatorSpec_SUM}, groupCols: []uint32{0, 1}, aggCols: [][]uint32{ {2}, {1}, }, expected: tuples{ {3.4, 3}, {1.2, 1}, }, name: "decimalSums", }, { // Test unused input columns. input: tuples{ {0, 1, 2, 3}, {0, 1, 4, 5}, {1, 1, 3, 7}, {1, 2, 4, 9}, {0, 1, 6, 11}, {1, 2, 6, 13}, }, colTypes: []types.T{types.Int64, types.Int64, types.Int64, types.Int64}, groupCols: []uint32{0, 1}, aggCols: [][]uint32{{3}}, expected: tuples{ {7}, {19}, {22}, }, name: "unusedInputCol", }, } for _, tc := range tcs { if err := tc.init(); err != nil { t.Fatal(err) } runTests(t, []tuples{tc.input}, func(t *testing.T, sources []Operator) { ag, err := NewHashAggregator(sources[0], tc.colTypes, tc.aggFns, tc.groupCols, tc.aggCols) if err != nil { t.Fatal(err) } nOutput := len(tc.aggCols) cols := make([]int, nOutput) for i := 0; i < nOutput; i++ { cols[i] = i } out := newOpTestOutput(ag, cols, tc.expected) if err := out.VerifyAnyOrder(); err != nil { t.Fatal(err) } }) } }
seo.js
/** * SEO component that queries for data with * Gatsby's useStaticQuery React hook * * See: https://www.gatsbyjs.org/docs/use-static-query/ */ import React from "react" import PropTypes from "prop-types" import Helmet from "react-helmet" import { useStaticQuery, graphql } from "gatsby" import { withPrefix } from "gatsby" function
({ description, lang, meta, title }) { const { site } = useStaticQuery( graphql` query { site { siteMetadata { title description author } } } ` ) const metaDescription = description || site.siteMetadata.description return ( <Helmet htmlAttributes={{ lang, }} title={title} titleTemplate={`%s | ${site.siteMetadata.title}`} meta={[ { name: `description`, content: metaDescription, }, { property: `og:title`, content: title, }, { property: `og:description`, content: metaDescription, }, { property: `og:type`, content: `website`, }, { name: `twitter:card`, content: `summary`, }, { name: `twitter:creator`, content: site.siteMetadata.author, }, { name: `twitter:title`, content: title, }, { name: `twitter:description`, content: metaDescription, }, ].concat(meta)} link={[ { href: withPrefix("lib/bootstrap/css/bootstrap.min.css"), rel: `stylesheet`, }, { href: withPrefix("lib/font-awesome/css/font-awesome.min.css"), rel: `stylesheet`, }, { href: withPrefix("lib/animate/animate.min.css"), rel: `stylesheet`, }, { href: withPrefix("lib/ionicons/css/ionicons.min.css"), rel: `stylesheet`, }, { href: withPrefix("lib/owlcarousel/assets/owl.carousel.min.css"), rel: `stylesheet`, }, { href: withPrefix("lib/lightbox/css/lightbox.min.css"), rel: `stylesheet`, }, ]} script={[ { src: `https://code.jquery.com/jquery-3.5.1.min.js`, rel: `script`, }, { src: withPrefix("lib/jquery/jquery-migrate.min.js"), rel: `script`, }, { src: withPrefix("lib/popper/popper.min.js"), rel: `script`, }, { src: withPrefix("lib/bootstrap/js/bootstrap.min.js"), rel: `script`, }, { src: withPrefix("lib/easing/easing.min.js"), rel: `script`, }, { src: withPrefix("lib/counterup/jquery.waypoints.min.js"), rel: `script`, }, { src: withPrefix("lib/counterup/jquery.counterup.js"), rel: `script`, }, { src: withPrefix("lib/owlcarousel/owl.carousel.min.js"), rel: `script`, }, { src: withPrefix("lib/lightbox/js/lightbox.min.js"), rel: `script`, }, { src: withPrefix("lib/typed/typed.min.js"), rel: `script`, }, { src: withPrefix("contactform/contactform.js"), rel: `script`, }, { src: withPrefix("js/main.js"), rel: `script`, }, ]} /> ) } SEO.defaultProps = { lang: `en`, meta: [], description: ``, link: ``, } SEO.propTypes = { description: PropTypes.string, lang: PropTypes.string, meta: PropTypes.arrayOf(PropTypes.object), title: PropTypes.string.isRequired, link: PropTypes.string, } export default SEO
SEO
linux_def.rs
// Copyright (c) 2021 Quark Container Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use alloc::slice; pub const SIOCGIFMEM : u64 = 0x891f; pub const SIOCGIFPFLAGS : u64 = 0x8935; pub const SIOCGMIIPHY : u64 = 0x8947; pub const SIOCGMIIREG : u64 = 0x8948; //flags for getrandom(2) pub const _GRND_NONBLOCK : i32 = 0x1; pub const _GRND_RANDOM : i32 = 0x2; // Policies for get_mempolicy(2)/set_mempolicy(2). pub const MPOL_DEFAULT : i32 = 0; pub const MPOL_PREFERRED : i32 = 1; pub const MPOL_BIND : i32 = 2; pub const MPOL_INTERLEAVE : i32 = 3; pub const MPOL_LOCAL : i32 = 4; pub const MPOL_MAX : i32 = 5; // Flags for get_mempolicy(2). pub const MPOL_F_NODE : i32 = 1 << 0; pub const MPOL_F_ADDR : i32 = 1 << 1; pub const MPOL_F_MEMS_ALLOWED : i32 = 1 << 2; // Flags for set_mempolicy(2). pub const MPOL_F_RELATIVE_NODES : i32 = 1 << 14; pub const MPOL_F_STATIC_NODES : i32 = 1 << 15; pub const MPOL_MODE_FLAGS : i32 = MPOL_F_STATIC_NODES | MPOL_F_RELATIVE_NODES; // Flags for mbind(2). pub const MPOL_MF_STRICT : i32 = 1 << 0; pub const MPOL_MF_MOVE : i32 = 1 << 1; pub const MPOL_MF_MOVE_ALL : i32 = 1 << 2; pub const MPOL_MF_VALID : i32 = MPOL_MF_STRICT | MPOL_MF_MOVE | MPOL_MF_MOVE_ALL; #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct LibcSysinfo { pub uptime: i64, pub loads: [u64; 3], pub totalram: u64, pub freeram: u64, pub sharedram: u64, pub bufferram: u64, pub totalswap: u64, pub freeswap: u64, pub procs: u16, pub pad: u16, pub totalhigh: u64, pub freehigh: u64, pub mem_unit: u32, pub _f: [i8; 0], } #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct EpollEvent { pub Events: u32, pub FD: i32, pub Pad: i32, } pub struct MRemapType {} impl MRemapType { pub const MREMAP_MAYMOVE: i32 = 1 << 0; pub const MREMAP_FIXED: i32 = 1 << 1; } #[derive(Copy, Clone, Default, Debug)] pub struct Signal(pub i32); impl Signal { pub const SIGHUP: i32 = 1; //Term pub const SIGINT: i32 = 2; //Term pub const SIGQUIT: i32 = 3; //Core pub const SIGILL: i32 = 4; //Core pub const SIGTRAP: i32 = 5; //Core pub const SIGABRT: i32 = 6; //core pub const SIGIOT: i32 = 6; //Core pub const SIGBUS: i32 = 7; //Core pub const SIGFPE: i32 = 8; //Core pub const SIGKILL: i32 = 9; //Term pub const SIGUSR1: i32 = 10; //Term pub const SIGSEGV: i32 = 11; //Core pub const SIGUSR2: i32 = 12; //Term pub const SIGPIPE: i32 = 13; //Term pub const SIGALRM: i32 = 14; //Term pub const SIGTERM: i32 = 15; //Term pub const SIGSTKFLT: i32 = 16; //Term pub const SIGCHLD: i32 = 17; //Ignore pub const SIGCLD: i32 = 17; //ignore pub const SIGCONT: i32 = 18; //Cont pub const SIGSTOP: i32 = 19; //Stop pub const SIGTSTP: i32 = 20; //Stop pub const SIGTTIN: i32 = 21; //Stop pub const SIGTTOU: i32 = 22; //Stop pub const SIGURG: i32 = 23; //Ignore pub const SIGXCPU: i32 = 24; //Core pub const SIGXFSZ: i32 = 25; //Core pub const SIGVTALRM: i32 = 26; //Term pub const SIGPROF: i32 = 27; //Term pub const SIGWINCH: i32 = 28; //Ignore pub const SIGIO: i32 = 29; //Term pub const SIGPOLL: i32 = 29; //Term pub const SIGPWR: i32 = 30; //Term pub const SIGSYS: i32 = 31; //Core pub const SIGUNUSED: i32 = 31; //Core pub const SIGNAL_MAX: i32 = 64; pub const FIRST_STD_SIGNAL: i32 = 1; pub const LAST_STD_SIGNAL: i32 = 31; pub const FIRST_RT_SIGNAL: i32 = 32; pub const LAST_RT_SIGNAL: i32 = 64; pub fn IsValid(&self) -> bool { return 0 < self.0 && self.0 <= Self::SIGNAL_MAX; } pub fn Maskable(&self) -> bool { return self.0 != Self::SIGKILL && self.0 != Self::SIGSTOP } pub fn IsStandard(&self) -> bool { return self.0 <= Self::LAST_STD_SIGNAL; } pub fn IsRealtime(&self) -> bool { return self.0 >= Self::FIRST_RT_SIGNAL; } pub fn Index(&self) -> usize { return (self.0 - 1) as usize; } pub fn Mask(&self) -> u64 { 1 << self.Index() } } // EventMask represents io events as used in the poll() syscall. pub type EventMask = u32; pub const EVENTMASK_ALL: EventMask = 0xFFFF; // Events that waiters can wait on. The meaning is the same as those in the // poll() syscall. pub const EVENT_IN: EventMask = 0x01; // POLLIN pub const EVENT_PRI: EventMask = 0x02; // POLLPRI pub const EVENT_OUT: EventMask = 0x04; // POLLOUT pub const EVENT_ERR: EventMask = 0x08; // POLLERR pub const EVENT_HUP: EventMask = 0x10; // POLLHUP pub const ALL_EVENTS: EventMask = 0x1f; pub const EVENT_READ: EventMask = EVENT_IN | EVENT_HUP | EVENT_ERR; pub const EVENT_WRITE: EventMask = EVENT_OUT | EVENT_HUP | EVENT_ERR; pub struct SocketSize {} impl SocketSize { pub const SIZEOF_INT32: usize = 4; pub const SIZEOF_SOCKADDR_INET4: usize = 0x10; pub const SIZEOF_SOCKADDR_INET6: usize = 0x1c; pub const SIZEOF_SOCKADDR_ANY: usize = 0x70; pub const SIZEOF_SOCKADDR_UNIX: usize = 0x6e; pub const SIZEOF_SOCKADDR_LINKLAYER: usize = 0x14; pub const SIZEOF_SOCKADDR_NETLINK: usize = 0xc; pub const SIZEOF_LINGER: usize = 0x8; pub const SIZEOF_IPMREQ: usize = 0x8; pub const SIZEOF_IPMREQN: usize = 0xc; pub const SIZEOF_IPV6_MREQ: usize = 0x14; pub const SIZEOF_MSGHDR: usize = 0x38; pub const SIZEOF_CMSGHDR: usize = 0x10; pub const SIZEOF_INET4_PKTINFO: usize = 0xc; pub const SIZEOF_INET6_PKTINFO: usize = 0x14; pub const SIZEOF_IPV6_MTUINFO: usize = 0x20; pub const SIZEOF_ICMPV6_FILTER: usize = 0x20; pub const SIZEOF_UCRED: usize = 0xc; pub const SIZEOF_TCPINFO: usize = 0x68; } pub struct StatxFlags {} impl StatxFlags { pub const AT_STATX_SYNC_TYPE: u32 = 0x6000; pub const AT_STATX_SYNC_AS_STAT: u32 = 0x0000; pub const AT_STATX_FORCE_SYNC: u32 = 0x2000; pub const AT_STATX_DONT_SYNC: u32 = 0x4000; } pub struct StatxMask {} impl StatxMask { pub const STATX_TYPE: u32 = 0x00000001; pub const STATX_MODE: u32 = 0x00000002; pub const STATX_NLINK: u32 = 0x00000004; pub const STATX_UID: u32 = 0x00000008; pub const STATX_GID: u32 = 0x00000010; pub const STATX_ATIME: u32 = 0x00000020; pub const STATX_MTIME: u32 = 0x00000040; pub const STATX_CTIME: u32 = 0x00000080; pub const STATX_INO: u32 = 0x00000100; pub const STATX_SIZE: u32 = 0x00000200; pub const STATX_BLOCKS: u32 = 0x00000400; pub const STATX_BASIC_STATS: u32 = 0x000007ff; pub const STATX_BTIME: u32 = 0x00000800; pub const STATX_ALL: u32 = 0x00000fff; pub const STATX__RESERVED: u32 = 0x80000000; } pub struct StatxBitmask {} impl StatxBitmask { pub const STATX_ATTR_COMPRESSED: u32 = 0x00000004; pub const STATX_ATTR_IMMUTABLE: u32 = 0x00000010; pub const STATX_ATTR_APPEND: u32 = 0x00000020; pub const STATX_ATTR_NODUMP: u32 = 0x00000040; pub const STATX_ATTR_ENCRYPTED: u32 = 0x00000800; pub const STATX_ATTR_AUTOMOUNT: u32 = 0x00001000; } #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct StatxTimestamp { pub tv_sec: i64, pub tv_nsec: u32, pub __statx_timestamp_pad1: i32, } impl StatxTimestamp { const E9: i64 = 1_000_000_000; pub fn FromNs(nsec: i64) -> Self { return Self { tv_sec: nsec / Self::E9, tv_nsec: (nsec % Self::E9) as u32, __statx_timestamp_pad1: 0, } } } #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct Statx { pub stx_mask: u32, pub stx_blksize: u32, pub stx_attributes: u64, pub stx_nlink: u32, pub stx_uid: u32, pub stx_gid: u32, pub stx_mode: u16, pub __statx_pad1: [u16; 1], pub stx_ino: u64, pub stx_size: u64, pub stx_blocks: u64, pub stx_attributes_mask: u64, pub stx_atime: StatxTimestamp, pub stx_btime: StatxTimestamp, pub stx_ctime: StatxTimestamp, pub stx_mtime: StatxTimestamp, pub stx_rdev_major: u32, pub stx_rdev_minor: u32, pub stx_dev_major: u32, pub stx_dev_minor: u32, pub __statx_pad2: [u64; 14], } // Statfs is struct statfs, from uapi/asm-generic/statfs.h. #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct LibcStatfs { // Type is one of the filesystem magic values, defined above. pub Type: u64, // BlockSize is the data block size. pub BlockSize: i64, // Blocks is the number of data blocks in use. pub Blocks: u64, // BlocksFree is the number of free blocks. pub BlocksFree: u64, // BlocksAvailable is the number of blocks free for use by // unprivileged users. pub BlocksAvailable: u64, // Files is the number of used file nodes on the filesystem. pub Files: u64, // FileFress is the number of free file nodes on the filesystem. pub FilesFree: u64, // FSID is the filesystem ID. pub FSID: [i32; 2], // NameLength is the maximum file name length. pub NameLength: u64, // FragmentSize is equivalent to BlockSize. pub FragmentSize: i64, // Flags is the set of filesystem mount flags. pub Flags: u64, // Spare is unused. pub Spare: [u64; 4] } // Filesystem types used in statfs(2). // See linux/magic.h. pub struct FSMagic {} impl FSMagic { pub const ANON_INODE_FS_MAGIC: u64 = 0x09041934; pub const DEVPTS_SUPER_MAGIC: u64 = 0x00001cd1; pub const EXT_SUPER_MAGIC: u64 = 0xef53; pub const OVERLAYFS_SUPER_MAGIC: u64 = 0x794c7630; pub const PIPEFS_MAGIC: u64 = 0x50495045; pub const PROC_SUPER_MAGIC: u64 = 0x9fa0; pub const RAMFS_MAGIC: u64 = 0x09041934; pub const SOCKFS_MAGIC: u64 = 0x534F434B; pub const SYSFS_MAGIC: u64 = 0x62656572; pub const TMPFS_MAGIC: u64 = 0x01021994; pub const V9FS_MAGIC: u64 = 0x01021997; } pub struct MfdType {} impl MfdType { pub const MFD_CLOEXEC: u32 = 0x0001; pub const MFD_ALLOW_SEALING: u32 = 0x0002; } #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct FlockStruct { pub l_type: i16, pub l_whence: i16, pub l_start: i64, pub l_len: i64, pub l_pid: i32, } pub const MAX_SYMLINK_TRAVERSALS: u32 = 40; pub const NAME_MAX: usize = 255; pub const PATH_MAX: usize = 4096; #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct Utime { pub Actime: i64, pub Modtime: i64, } impl Utime { pub const UTIME_NOW: i64 = ((1 << 30) - 1); pub const UTIME_OMIT: i64 = ((1 << 30) - 2); } pub struct Capability {} impl Capability { pub const CAP_CHOWN: u64 = 0; pub const CAP_DAC_OVERRIDE: u64 = 1; pub const CAP_DAC_READ_SEARCH: u64 = 2; pub const CAP_FOWNER: u64 = 3; pub const CAP_FSETID: u64 = 4; pub const CAP_KILL: u64 = 5; pub const CAP_SETGID: u64 = 6; pub const CAP_SETUID: u64 = 7; pub const CAP_SETPCAP: u64 = 8; pub const CAP_LINUX_IMMUTABLE: u64 = 9; pub const CAP_NET_BIND_SERVICE: u64 = 10; pub const CAP_NET_BROADCAST: u64 = 11; pub const CAP_NET_ADMIN: u64 = 12; pub const CAP_NET_RAW: u64 = 13; pub const CAP_IPC_LOCK: u64 = 14; pub const CAP_IPC_OWNER: u64 = 15; pub const CAP_SYS_MODULE: u64 = 16; pub const CAP_SYS_RAWIO: u64 = 17; pub const CAP_SYS_CHROOT: u64 = 18; pub const CAP_SYS_PTRACE: u64 = 19; pub const CAP_SYS_PACCT: u64 = 20; pub const CAP_SYS_ADMIN: u64 = 21; pub const CAP_SYS_BOOT: u64 = 22; pub const CAP_SYS_NICE: u64 = 23; pub const CAP_SYS_RESOURCE: u64 = 24; pub const CAP_SYS_TIME: u64 = 25; pub const CAP_SYS_TTY_CONFIG: u64 = 26; pub const CAP_MKNOD: u64 = 27; pub const CAP_LEASE: u64 = 28; pub const CAP_AUDIT_WRITE: u64 = 29; pub const CAP_AUDIT_CONTROL: u64 = 30; pub const CAP_SETFCAP: u64 = 31; pub const CAP_MAC_OVERRIDE: u64 = 32; pub const CAP_MAC_ADMIN: u64 = 33; pub const CAP_SYSLOG: u64 = 34; pub const CAP_WAKE_ALARM: u64 = 35; pub const CAP_BLOCK_SUSPEND: u64 = 36; pub const CAP_AUDIT_READ: u64 = 37; pub const CAP_LAST_CAP: u64 = Self::CAP_AUDIT_READ; pub fn Ok(cap: i32) -> bool { return cap >= 0 && cap <= Self::CAP_LAST_CAP as i32; } } // LINUX_CAPABILITY_VERSION_1 causes the data pointer to be // interpreted as a pointer to a single cap_user_data_t. Since capability // sets are 64 bits and the "capability sets" in cap_user_data_t are 32 // bits only, this causes the upper 32 bits to be implicitly 0. pub const LINUX_CAPABILITY_VERSION_1 : u32 = 0x19980330; // LINUX_CAPABILITY_VERSION_2 and LINUX_CAPABILITY_VERSION_3 cause the // data pointer to be interpreted as a pointer to an array of 2 // cap_user_data_t, using the second to store the 32 MSB of each capability // set. Versions 2 and 3 are identical, but Linux printk's a warning on use // of version 2 due to a userspace API defect. pub const LINUX_CAPABILITY_VERSION_2 : u32 = 0x20071026; pub const LINUX_CAPABILITY_VERSION_3 : u32 = 0x20080522; // HIGHEST_CAPABILITY_VERSION is the highest supported // LINUX_CAPABILITY_VERSION_* version. pub const HIGHEST_CAPABILITY_VERSION : u32 = LINUX_CAPABILITY_VERSION_3; // CapUserHeader is equivalent to Linux's cap_user_header_t. #[derive(Clone, Copy, Default)] pub struct CapUserHeader { pub Version: u32, pub Pid: i32, } // CapUserData is equivalent to Linux's cap_user_data_t. #[derive(Clone, Copy, Default)] pub struct CapUserData { pub Effective : u32, pub Permitted : u32, pub Inheritable : u32, } pub struct ATType {} impl ATType { pub const AT_REMOVEDIR: i32 = 0x200; pub const AT_SYMLINK_FOLLOW: i32 = 0x400; pub const AT_EMPTY_PATH: i32 = 0x1000; pub const AT_FDCWD: i32 = -100; // Constants for fstatat(2) pub const AT_SYMLINK_NOFOLLOW: i32 = 0x100; } // Values for linux_dirent64.d_type. pub struct DType {} impl DType { pub const DT_UNKNOWN: u8 = 0; pub const DT_FIFO: u8 = 1; pub const DT_CHR: u8 = 2; pub const DT_DIR: u8 = 4; pub const DT_BLK: u8 = 6; pub const DT_REG: u8 = 8; pub const DT_LNK: u8 = 10; pub const DT_SOCK: u8 = 12; pub const DT_WHT: u8 = 14; } // mode_t pub struct ModeType {} impl ModeType { pub const S_IFMT: u16 = 0o170000; pub const S_IFSOCK: u16 = 0o140000; pub const S_IFLNK: u16 = 0o120000; pub const S_IFREG: u16 = 0o0100000; pub const S_IFBLK: u16 = 0o060000; pub const S_IFDIR: u16 = 0o040000; pub const S_IFCHR: u16 = 0o020000; pub const S_IFIFO: u16 = 0o010000; pub const FILE_TYPE_MASK: u16 = Self::S_IFMT; pub const MODE_SOCKET: u16 = Self::S_IFSOCK; pub const MODE_SYMLINK: u16 = Self::S_IFLNK; pub const MODE_REGULAR: u16 = Self::S_IFREG; pub const MODE_BLOCK_DEVICE: u16 = Self::S_IFBLK; pub const MODE_DIRECTORY: u16 = Self::S_IFDIR; pub const MODE_CHARACTER_DEVICE: u16 = Self::S_IFCHR; pub const MODE_NAMED_PIPE: u16 = Self::S_IFIFO; pub const S_ISUID: u32 = 0o04000; pub const S_ISGID: u32 = 0o02000; pub const S_ISVTX: u32 = 0o01000; pub const MODE_SET_UID: u32 = Self::S_ISUID; pub const MODE_SET_GID: u32 = Self::S_ISGID; pub const MODE_STICKY: u32 = Self::S_ISVTX; pub const MODE_USER_ALL: u16 = 0o0700; pub const MODE_USER_READ: u16 = 0o0400; pub const MODE_USER_WRITE: u16 = 0o0200; pub const MODE_USER_EXEC: u16 = 0o0100; pub const MODE_GROUP_ALL: u16 = 0o0070; pub const MODE_GROUP_READ: u16 = 0o0040; pub const MODE_GROUP_WRITE: u16 = 0o0020; pub const MODE_GROUP_EXEC: u16 = 0o0010; pub const MODE_OTHER_ALL: u16 = 0o0007; pub const MODE_OTHER_READ: u16 = 0o0004; pub const MODE_OTHER_WRITE: u16 = 0o0002; pub const MODE_OTHER_EXEC: u16 = 0o0001; pub const PERMISSIONS_MASK: u16 = 0o0777; } #[derive(Debug, Default, Copy, Clone)] pub struct FileMode(pub u16); impl FileMode { pub fn Permission(&self) -> Self { return Self(self.0 & ModeType::PERMISSIONS_MASK) } pub fn FileType(&self) -> Self { return Self(self.0 & ModeType::FILE_TYPE_MASK) } pub fn ExtraBits(&self) -> Self { return Self(self.0 & !(ModeType::PERMISSIONS_MASK | ModeType::FILE_TYPE_MASK)) } pub fn OtherRead(self) -> bool { return self.0 & ModeType::MODE_OTHER_READ != 0; } pub fn OtherWrite(self) -> bool { return self.0 & ModeType::MODE_OTHER_WRITE != 0; } pub fn OtherExec(self) -> bool { return self.0 & ModeType::MODE_OTHER_EXEC != 0; } pub fn Sticky(self) -> bool { return self.0 as u32 & ModeType::MODE_STICKY == ModeType::MODE_STICKY; } pub fn SetUID(self) -> bool { return self.0 as u32 & ModeType::MODE_SET_UID == ModeType::MODE_SET_UID; } pub fn SetGID(self) -> bool { return self.0 as u32 & ModeType::MODE_SET_GID == ModeType::MODE_SET_GID; } pub fn DirentType(&self) -> u8 { match self.FileType().0 { ModeType::S_IFSOCK => return DType::DT_SOCK, ModeType::S_IFLNK => return DType::DT_LNK, ModeType::S_IFREG => return DType::DT_REG, ModeType::S_IFBLK => return DType::DT_BLK, ModeType::S_IFDIR => return DType::DT_DIR, ModeType::S_IFCHR => return DType::DT_CHR, ModeType::S_IFIFO => return DType::DT_FIFO, _ => return DType::DT_UNKNOWN, } } pub fn Perms(mode: u16) -> PermMask { return PermMask { read: (mode & ModeType::MODE_OTHER_READ) != 0, write: (mode & ModeType::MODE_OTHER_WRITE) != 0, execute: (mode & ModeType::MODE_OTHER_EXEC) != 0, } } const MODE_SET_UID: u16 = ModeType::S_ISUID as u16; const MODE_SET_GID: u16 = ModeType::S_ISGID as u16; const MODE_STICKY: u16 = ModeType::S_ISVTX as u16; pub fn FilePerms(&self) -> FilePermissions { let perm = self.Permission().0; return FilePermissions { Other: Self::Perms(perm), Group: Self::Perms(perm >> 3), User: Self::Perms(perm >> 6), Sticky: self.0 & Self::MODE_STICKY == Self::MODE_STICKY, SetUid: self.0 & Self::MODE_SET_UID == Self::MODE_SET_UID, SetGid: self.0 & Self::MODE_SET_GID == Self::MODE_SET_GID, } } } #[derive(Debug, Default, Copy, Clone)] pub struct FilePermissions { pub User: PermMask, pub Group: PermMask, pub Other: PermMask, pub Sticky: bool, pub SetUid: bool, pub SetGid: bool, } impl FilePermissions { pub fn FromMode(mode: FileMode) -> Self { let mut fp = Self::default(); let perm = mode.Permission(); fp.Other = PermMask::FromMode(perm); fp.Group = PermMask::FromMode(FileMode(perm.0 >> 3)); fp.User = PermMask::FromMode(FileMode(perm.0 >> 6)); fp.Sticky = mode.Sticky(); fp.SetUid = mode.SetUID(); fp.SetGid = mode.SetGID(); return fp; } pub fn LinuxMode(&self) -> u32 { let mut m = (self.User.Mode() << 6) | (self.Group.Mode() << 3) | self.Other.Mode(); if self.SetUid { m |= ModeType::S_ISUID; } if self.SetUid { m |= ModeType::S_ISGID; } if self.Sticky { m |= ModeType::S_ISVTX; } return m } pub fn AnyExec(&self) -> bool { return self.User.execute | self.Group.execute | self.Other.execute } pub fn AnyWrite(&self) -> bool { return self.User.write | self.Group.write | self.Other.write } pub fn AnyRead(&self) -> bool { return self.User.read | self.Group.read | self.Other.read } } pub struct LibcConst {} impl LibcConst { pub const AF_ALG: u64 = 0x26; pub const AF_APPLETALK: u64 = 0x5; pub const AF_ASH: u64 = 0x12; pub const AF_ATMPVC: u64 = 0x8; pub const AF_ATMSVC: u64 = 0x14; pub const AF_AX25: u64 = 0x3; pub const AF_BLUETOOTH: u64 = 0x1f; pub const AF_BRIDGE: u64 = 0x7; pub const AF_CAIF: u64 = 0x25; pub const AF_CAN: u64 = 0x1d; pub const AF_DECNET: u64 = 0xc; //AF_DECnet pub const AF_ECONET: u64 = 0x13; pub const AF_FILE: u64 = 0x1; pub const AF_IEEE802154: u64 = 0x24; pub const AF_INET: u64 = 0x2; pub const AF_INET6: u64 = 0xa; pub const AF_IPX: u64 = 0x4; pub const AF_IRDA: u64 = 0x17; pub const AF_ISDN: u64 = 0x22; pub const AF_IUCV: u64 = 0x20; pub const AF_KEY: u64 = 0xf; pub const AF_LLC: u64 = 0x1a; pub const AF_LOCAL: u64 = 0x1; pub const AF_MAX: u64 = 0x27; pub const AF_NETBEUI: u64 = 0xd; pub const AF_NETLINK: u64 = 0x10; pub const AF_NETROM: u64 = 0x6; pub const AF_PACKET: u64 = 0x11; pub const AF_PHONET: u64 = 0x23; pub const AF_PPPOX: u64 = 0x18; pub const AF_RDS: u64 = 0x15; pub const AF_ROSE: u64 = 0xb; pub const AF_ROUTE: u64 = 0x10; pub const AF_RXRPC: u64 = 0x21; pub const AF_SECURITY: u64 = 0xe; pub const AF_SNA: u64 = 0x16; pub const AF_TIPC: u64 = 0x1e; pub const AF_UNIX: u64 = 0x1; pub const AF_UNSPEC: u64 = 0x0; pub const AF_WANPIPE: u64 = 0x19; pub const AF_X25: u64 = 0x9; pub const AF_UNSPECADAPT: u64 = 0x108; pub const AF_UNSPECAPPLETLK: u64 = 0x8; pub const AF_UNSPECARCNET: u64 = 0x7; pub const AF_UNSPECASH: u64 = 0x30d; pub const AF_UNSPECATM: u64 = 0x13; pub const AF_UNSPECAX25: u64 = 0x3; pub const AF_UNSPECBIF: u64 = 0x307; pub const AF_UNSPECCHAOS: u64 = 0x5; pub const AF_UNSPECCISCO: u64 = 0x201; pub const AF_UNSPECCSLIP: u64 = 0x101; pub const AF_UNSPECCSLIP6: u64 = 0x103; pub const AF_UNSPECDDCMP: u64 = 0x205; pub const AF_UNSPECDLCI: u64 = 0xf; pub const AF_UNSPECECONET: u64 = 0x30e; pub const AF_UNSPECEETHER: u64 = 0x2; pub const AF_UNSPECETHER: u64 = 0x1; pub const AF_UNSPECEUI64: u64 = 0x1b; pub const AF_UNSPECFCAL: u64 = 0x311; pub const AF_UNSPECFCFABRIC: u64 = 0x313; pub const AF_UNSPECFCPL: u64 = 0x312; pub const AF_UNSPECFCPP: u64 = 0x310; pub const AF_UNSPECFDDI: u64 = 0x306; pub const AF_UNSPECFRAD: u64 = 0x302; pub const AF_UNSPECHDLC: u64 = 0x201; pub const AF_UNSPECHIPPI: u64 = 0x30c; pub const AF_UNSPECHWX25: u64 = 0x110; pub const AF_UNSPECIEEE1394: u64 = 0x18; pub const AF_UNSPECIEEE802: u64 = 0x6; pub const AF_UNSPECIEEE80211: u64 = 0x321; pub const AF_UNSPECIEEE80211_PRISM: u64 = 0x322; pub const AF_UNSPECIEEE80211_RADIOTAP: u64 = 0x323; pub const AF_UNSPECIEEE802154: u64 = 0x324; pub const AF_UNSPECIEEE802154_PHY: u64 = 0x325; pub const AF_UNSPECIEEE802_TR: u64 = 0x320; pub const AF_UNSPECINFINIBAND: u64 = 0x20; pub const AF_UNSPECIPDDP: u64 = 0x309; pub const AF_UNSPECIPGRE: u64 = 0x30a; pub const AF_UNSPECIRDA: u64 = 0x30f; pub const AF_UNSPECLAPB: u64 = 0x204; pub const AF_UNSPECLOCALTLK: u64 = 0x305; pub const AF_UNSPECLOOPBACK: u64 = 0x304; pub const AF_UNSPECMETRICOM: u64 = 0x17; pub const AF_UNSPECNETROM: u64 = 0x0; pub const AF_UNSPECNONE: u64 = 0xfffe; pub const AF_UNSPECPIMREG: u64 = 0x30b; pub const AF_UNSPECPPP: u64 = 0x200; pub const AF_UNSPECPRONET: u64 = 0x4; pub const AF_UNSPECRAWHDLC: u64 = 0x206; pub const AF_UNSPECROSE: u64 = 0x10e; pub const AF_UNSPECRSRVD: u64 = 0x104; pub const AF_UNSPECSIT: u64 = 0x308; pub const AF_UNSPECSKIP: u64 = 0x303; pub const AF_UNSPECSLIP: u64 = 0x100; pub const AF_UNSPECSLIP6: u64 = 0x102; pub const AF_UNSPECTUNNEL: u64 = 0x300; pub const AF_UNSPECTUNNEL6: u64 = 0x301; pub const AF_UNSPECVOID: u64 = 0xffff; pub const AF_UNSPECX25: u64 = 0x10f; pub const AF_UNSPECA: u64 = 0x10; pub const AF_UNSPECABS: u64 = 0x20; pub const AF_UNSPECADD: u64 = 0x0; pub const AF_UNSPECALU: u64 = 0x4; pub const AF_UNSPECAND: u64 = 0x50; pub const AF_UNSPECB: u64 = 0x10; pub const AF_UNSPECDIV: u64 = 0x30; pub const AF_UNSPECH: u64 = 0x8; pub const AF_UNSPECIMM: u64 = 0x0; pub const AF_UNSPECIND: u64 = 0x40; pub const AF_UNSPECJA: u64 = 0x0; pub const AF_UNSPECJEQ: u64 = 0x10; pub const AF_UNSPECJGE: u64 = 0x30; pub const AF_UNSPECJGT: u64 = 0x20; pub const AF_UNSPECJMP: u64 = 0x5; pub const AF_UNSPECJSET: u64 = 0x40; pub const AF_UNSPECK: u64 = 0x0; pub const AF_UNSPECLD: u64 = 0x0; pub const AF_UNSPECLDX: u64 = 0x1; pub const AF_UNSPECLEN: u64 = 0x80; pub const AF_UNSPECLSH: u64 = 0x60; pub const AF_UNSPECMAJOR_VERSION: u64 = 0x1; pub const AF_UNSPECMAXINSNS: u64 = 0x1000; pub const AF_UNSPECMEM: u64 = 0x60; pub const AF_UNSPECMEMWORDS: u64 = 0x10; pub const AF_UNSPECMINOR_VERSION: u64 = 0x1; pub const AF_UNSPECMISC: u64 = 0x7; pub const AF_UNSPECMSH: u64 = 0xa0; pub const AF_UNSPECMUL: u64 = 0x20; pub const AF_UNSPECNEG: u64 = 0x80; pub const AF_UNSPECOR: u64 = 0x40; pub const AF_UNSPECRET: u64 = 0x6; pub const AF_UNSPECRSH: u64 = 0x70; pub const AF_UNSPECST: u64 = 0x2; pub const AF_UNSPECSTX: u64 = 0x3; pub const AF_UNSPECSUB: u64 = 0x10; pub const AF_UNSPECTAX: u64 = 0x0; pub const AF_UNSPECTXA: u64 = 0x80; pub const AF_UNSPECW: u64 = 0x0; pub const AF_UNSPECX: u64 = 0x8; pub const CLONE_CHILD_CLEARTID: u64 = 0x200000; pub const CLONE_CHILD_SETTID: u64 = 0x1000000; pub const CLONE_DETACHED: u64 = 0x400000; pub const CLONE_FILES: u64 = 0x400; pub const CLONE_FS: u64 = 0x200; pub const CLONE_IO: u64 = 0x80000000; pub const CLONE_NEWIPC: u64 = 0x8000000; pub const CLONE_NEWNET: u64 = 0x40000000; pub const CLONE_NEWNS: u64 = 0x20000; pub const CLONE_NEWPID: u64 = 0x20000000; pub const CLONE_NEWUSER: u64 = 0x10000000; pub const CLONE_NEWUTS: u64 = 0x4000000; pub const CLONE_PARENT: u64 = 0x8000; pub const CLONE_PARENT_SETTID: u64 = 0x100000; pub const CLONE_PTRACE: u64 = 0x2000; pub const CLONE_SETTLS: u64 = 0x80000; pub const CLONE_SIGHAND: u64 = 0x800; pub const CLONE_SYSVSEM: u64 = 0x40000; pub const CLONE_THREAD: u64 = 0x10000; pub const CLONE_UNTRACED: u64 = 0x800000; pub const CLONE_VFORK: u64 = 0x4000; pub const CLONE_VM: u64 = 0x100; pub const DT_BLK: u64 = 0x6; pub const DT_CHR: u64 = 0x2; pub const DT_DIR: u64 = 0x4; pub const DT_FIFO: u64 = 0x1; pub const DT_LNK: u64 = 0xa; pub const DT_REG: u64 = 0x8; pub const DT_SOCK: u64 = 0xc; pub const DT_UNKNOWN: u64 = 0x0; pub const DT_WHT: u64 = 0xe; pub const EPOLLERR: u64 = 0x8; pub const EPOLLET: i64 = -0x80000000; pub const EPOLLHUP: u64 = 0x10; pub const EPOLLIN: u64 = 0x1; pub const EPOLLMSG: u64 = 0x400; pub const EPOLLONESHOT: u64 = 0x40000000; pub const EPOLLOUT: u64 = 0x4; pub const EPOLLPRI: u64 = 0x2; pub const EPOLLRDBAND: u64 = 0x80; pub const EPOLLRDHUP: u64 = 0x2000; pub const EPOLLRDNORM: u64 = 0x40; pub const EPOLLWRBAND: u64 = 0x200; pub const EPOLLWRNORM: u64 = 0x100; pub const EPOLL_CLOEXEC: u64 = 0x80000; pub const EPOLL_CTL_ADD: u64 = 0x1; pub const EPOLL_CTL_DEL: u64 = 0x2; pub const EPOLL_CTL_MOD: u64 = 0x3; pub const EPOLL_NONBLOCK: u64 = 0x800; pub const ETH_P_1588: u64 = 0x88f7; pub const ETH_P_8021Q: u64 = 0x8100; pub const ETH_P_802_2: u64 = 0x4; pub const ETH_P_802_3: u64 = 0x1; pub const ETH_P_AARP: u64 = 0x80f3; pub const ETH_P_ALL: u64 = 0x3; pub const ETH_P_AOE: u64 = 0x88a2; pub const ETH_P_ARCNET: u64 = 0x1a; pub const ETH_P_ARP: u64 = 0x806; pub const ETH_P_ATALK: u64 = 0x809b; pub const ETH_P_ATMFATE: u64 = 0x8884; pub const ETH_P_ATMMPOA: u64 = 0x884c; pub const ETH_P_AX25: u64 = 0x2; pub const ETH_P_BPQ: u64 = 0x8ff; pub const ETH_P_CAIF: u64 = 0xf7; pub const ETH_P_CAN: u64 = 0xc; pub const ETH_P_CONTROL: u64 = 0x16; pub const ETH_P_CUST: u64 = 0x6006; pub const ETH_P_DDCMP: u64 = 0x6; pub const ETH_P_DEC: u64 = 0x6000; pub const ETH_P_DIAG: u64 = 0x6005; pub const ETH_P_DNA_DL: u64 = 0x6001; pub const ETH_P_DNA_RC: u64 = 0x6002; pub const ETH_P_DNA_RT: u64 = 0x6003; pub const ETH_P_DSA: u64 = 0x1b; pub const ETH_P_ECONET: u64 = 0x18; pub const ETH_P_EDSA: u64 = 0xdada; pub const ETH_P_FCOE: u64 = 0x8906; pub const ETH_P_FIP: u64 = 0x8914; pub const ETH_P_HDLC: u64 = 0x19; pub const ETH_P_IEEE802154: u64 = 0xf6; pub const ETH_P_IEEEPUP: u64 = 0xa00; pub const ETH_P_IEEEPUPAT: u64 = 0xa01; pub const ETH_P_IP: u64 = 0x800; pub const ETH_P_IPV6: u64 = 0x86dd; pub const ETH_P_IPX: u64 = 0x8137; pub const ETH_P_IRDA: u64 = 0x17; pub const ETH_P_LAT: u64 = 0x6004; pub const ETH_P_LINK_CTL: u64 = 0x886c; pub const ETH_P_LOCALTALK: u64 = 0x9; pub const ETH_P_LOOP: u64 = 0x60; pub const ETH_P_MOBITEX: u64 = 0x15; pub const ETH_P_MPLS_MC: u64 = 0x8848; pub const ETH_P_MPLS_UC: u64 = 0x8847; pub const ETH_P_PAE: u64 = 0x888e; pub const ETH_P_PAUSE: u64 = 0x8808; pub const ETH_P_PHONET: u64 = 0xf5; pub const ETH_P_PPPTALK: u64 = 0x10; pub const ETH_P_PPP_DISC: u64 = 0x8863; pub const ETH_P_PPP_MP: u64 = 0x8; pub const ETH_P_PPP_SES: u64 = 0x8864; pub const ETH_P_PUP: u64 = 0x200; pub const ETH_P_PUPAT: u64 = 0x201; pub const ETH_P_RARP: u64 = 0x8035; pub const ETH_P_SCA: u64 = 0x6007; pub const ETH_P_SLOW: u64 = 0x8809; pub const ETH_P_SNAP: u64 = 0x5; pub const ETH_P_TEB: u64 = 0x6558; pub const ETH_P_TIPC: u64 = 0x88ca; pub const ETH_P_TRAILER: u64 = 0x1c; pub const ETH_P_TR_802_2: u64 = 0x11; pub const ETH_P_WAN_PPP: u64 = 0x7; pub const ETH_P_WCCP: u64 = 0x883e; pub const ETH_P_X25: u64 = 0x805; pub const FD_CLOEXEC: u64 = 0x1; pub const FD_SETSIZE: u64 = 0x400; pub const F_DUPFD: u64 = 0x0; pub const F_DUPFD_CLOEXEC: u64 = 0x406; pub const F_EXLCK: u64 = 0x4; pub const F_GETFD: u64 = 0x1; pub const F_GETFL: u64 = 0x3; pub const F_GETLEASE: u64 = 0x401; pub const F_GETLK: u64 = 0x5; pub const F_GETLK64: u64 = 0x5; pub const F_GETOWN: u64 = 0x9; pub const F_GETOWN_EX: u64 = 0x10; pub const F_GETPIPE_SZ: u64 = 0x408; pub const F_GETSIG: u64 = 0xb; pub const F_LOCK: u64 = 0x1; pub const F_NOTIFY: u64 = 0x402; pub const F_OK: u64 = 0x0; pub const F_RDLCK: u64 = 0x0; pub const F_SETFD: u64 = 0x2; pub const F_SETFL: u64 = 0x4; pub const F_SETLEASE: u64 = 0x400; pub const F_SETLK: u64 = 0x6; pub const F_SETLK64: u64 = 0x6; pub const F_SETLKW: u64 = 0x7; pub const F_SETLKW64: u64 = 0x7; pub const F_SETOWN: u64 = 0x8; pub const F_SETOWN_EX: u64 = 0xf; pub const F_SETPIPE_SZ: u64 = 0x407; pub const F_SETSIG: u64 = 0xa; pub const F_SHLCK: u64 = 0x8; pub const F_TEST: u64 = 0x3; pub const F_TLOCK: u64 = 0x2; pub const F_ULOCK: u64 = 0x0; pub const F_UNLCK: u64 = 0x2; pub const F_WRLCK: u64 = 0x1; pub const ICMPV6_FILTER: u64 = 0x1; pub const IFA_F_DADFAILED: u64 = 0x8; pub const IFA_F_DEPRECATED: u64 = 0x20; pub const IFA_F_HOMEADDRESS: u64 = 0x10; pub const IFA_F_NODAD: u64 = 0x2; pub const IFA_F_OPTIMISTIC: u64 = 0x4; pub const IFA_F_PERMANENT: u64 = 0x80; pub const IFA_F_SECONDARY: u64 = 0x1; pub const IFA_F_TEMPORARY: u64 = 0x1; pub const IFA_F_TENTATIVE: u64 = 0x40; pub const IFA_MAX: u64 = 0x7; pub const IFF_ALLMULTI: u64 = 0x200; pub const IFF_AUTOMEDIA: u64 = 0x4000; pub const IFF_BROADCAST: u64 = 0x2; pub const IFF_DEBUG: u64 = 0x4; pub const IFF_DYNAMIC: u64 = 0x8000; pub const IFF_LOOPBACK: u64 = 0x8; pub const IFF_MASTER: u64 = 0x400; pub const IFF_MULTICAST: u64 = 0x1000; pub const IFF_NOARP: u64 = 0x80; pub const IFF_NOTRAILERS: u64 = 0x20; pub const IFF_NO_PI: u64 = 0x1000; pub const IFF_ONE_QUEUE: u64 = 0x2000; pub const IFF_POINTOPOINT: u64 = 0x10; pub const IFF_PORTSEL: u64 = 0x2000; pub const IFF_PROMISC: u64 = 0x100; pub const IFF_RUNNING: u64 = 0x40; pub const IFF_SLAVE: u64 = 0x800; pub const IFF_TAP: u64 = 0x2; pub const IFF_TUN: u64 = 0x1; pub const IFF_TUN_EXCL: u64 = 0x8000; pub const IFF_UP: u64 = 0x1; pub const IFF_VNET_HDR: u64 = 0x4000; pub const IFNAMSIZ: u64 = 0x10; pub const IN_ACCESS: u64 = 0x1; pub const IN_ALL_EVENTS: u64 = 0xfff; pub const IN_ATTRIB: u64 = 0x4; pub const IN_CLASSA_HOST: u64 = 0xffffff; pub const IN_CLASSA_MAX: u64 = 0x80; pub const IN_CLASSA_NET: u64 = 0xff000000; pub const IN_CLASSA_NSHIFT: u64 = 0x18; pub const IN_CLASSB_HOST: u64 = 0xffff; pub const IN_CLASSB_MAX: u64 = 0x10000; pub const IN_CLASSB_NET: u64 = 0xffff0000; pub const IN_CLASSB_NSHIFT: u64 = 0x10; pub const IN_CLASSC_HOST: u64 = 0xff; pub const IN_CLASSC_NET: u64 = 0xffffff00; pub const IN_CLASSC_NSHIFT: u64 = 0x8; pub const IN_CLOEXEC: u64 = 0x80000; pub const IN_CLOSE: u64 = 0x18; pub const IN_CLOSE_NOWRITE: u64 = 0x10; pub const IN_CLOSE_WRITE: u64 = 0x8; pub const IN_CREATE: u64 = 0x100; pub const IN_DELETE: u64 = 0x200; pub const IN_DELETE_SELF: u64 = 0x400; pub const IN_DONT_FOLLOW: u64 = 0x2000000; pub const IN_EXCL_UNLINK: u64 = 0x4000000; pub const IN_IGNORED: u64 = 0x8000; pub const IN_ISDIR: u64 = 0x40000000; pub const IN_LOOPBACKNET: u64 = 0x7f; pub const IN_MASK_ADD: u64 = 0x20000000; pub const IN_MODIFY: u64 = 0x2; pub const IN_MOVE: u64 = 0xc0; pub const IN_MOVED_FROM: u64 = 0x40; pub const IN_MOVED_TO: u64 = 0x80; pub const IN_MOVE_SELF: u64 = 0x800; pub const IN_NONBLOCK: u64 = 0x800; pub const IN_ONESHOT: u64 = 0x80000000; pub const IN_ONLYDIR: u64 = 0x1000000; pub const IN_OPEN: u64 = 0x20; pub const IN_Q_OVERFLOW: u64 = 0x4000; pub const IN_UNMOUNT: u64 = 0x2000; pub const IPPROTO_AH: u64 = 0x33; pub const IPPROTO_COMP: u64 = 0x6c; pub const IPPROTO_DCCP: u64 = 0x21; pub const IPPROTO_DSTOPTS: u64 = 0x3c; pub const IPPROTO_EGP: u64 = 0x8; pub const IPPROTO_ENCAP: u64 = 0x62; pub const IPPROTO_ESP: u64 = 0x32; pub const IPPROTO_FRAGMENT: u64 = 0x2c; pub const IPPROTO_GRE: u64 = 0x2f; pub const IPPROTO_HOPOPTS: u64 = 0x0; pub const IPPROTO_ICMP: u64 = 0x1; pub const IPPROTO_ICMPV6: u64 = 0x3a; pub const IPPROTO_IDP: u64 = 0x16; pub const IPPROTO_IGMP: u64 = 0x2; pub const IPPROTO_IP: u64 = 0x0; pub const IPPROTO_IPIP: u64 = 0x4; pub const IPPROTO_IPV6: u64 = 0x29; pub const IPPROTO_MTP: u64 = 0x5c; pub const IPPROTO_NONE: u64 = 0x3b; pub const IPPROTO_PIM: u64 = 0x67; pub const IPPROTO_PUP: u64 = 0xc; pub const IPPROTO_RAW: u64 = 0xff; pub const IPPROTO_ROUTING: u64 = 0x2b; pub const IPPROTO_RSVP: u64 = 0x2e; pub const IPPROTO_SCTP: u64 = 0x84; pub const IPPROTO_TCP: u64 = 0x6; pub const IPPROTO_TP: u64 = 0x1d; pub const IPPROTO_UDP: u64 = 0x11; pub const IPPROTO_UDPLITE: u64 = 0x88; pub const IPV6_2292DSTOPTS: u64 = 0x4; pub const IPV6_2292HOPLIMIT: u64 = 0x8; pub const IPV6_2292HOPOPTS: u64 = 0x3; pub const IPV6_2292PKTINFO: u64 = 0x2; pub const IPV6_2292PKTOPTIONS: u64 = 0x6; pub const IPV6_2292RTHDR: u64 = 0x5; pub const IPV6_ADDRFORM: u64 = 0x1; pub const IPV6_ADD_MEMBERSHIP: u64 = 0x14; pub const IPV6_AUTHHDR: u64 = 0xa; pub const IPV6_CHECKSUM: u64 = 0x7; pub const IPV6_DROP_MEMBERSHIP: u64 = 0x15; pub const IPV6_DSTOPTS: u64 = 0x3b; pub const IPV6_HOPLIMIT: u64 = 0x34; pub const IPV6_HOPOPTS: u64 = 0x36; pub const IPV6_IPSEC_POLICY: u64 = 0x22; pub const IPV6_JOIN_ANYCAST: u64 = 0x1b; pub const IPV6_JOIN_GROUP: u64 = 0x14; pub const IPV6_LEAVE_ANYCAST: u64 = 0x1c; pub const IPV6_LEAVE_GROUP: u64 = 0x15; pub const IPV6_MTU: u64 = 0x18; pub const IPV6_MTU_DISCOVER: u64 = 0x17; pub const IPV6_MULTICAST_HOPS: u64 = 0x12; pub const IPV6_MULTICAST_IF: u64 = 0x11; pub const IPV6_MULTICAST_LOOP: u64 = 0x13; pub const IPV6_NEXTHOP: u64 = 0x9; pub const IPV6_PKTINFO: u64 = 0x32; pub const IPV6_PMTUDISC_DO: u64 = 0x2; pub const IPV6_PMTUDISC_DONT: u64 = 0x0; pub const IPV6_PMTUDISC_PROBE: u64 = 0x3; pub const IPV6_PMTUDISC_WANT: u64 = 0x1; pub const IPV6_RECVDSTOPTS: u64 = 0x3a; pub const IPV6_RECVERR: u64 = 0x19; pub const IPV6_RECVHOPLIMIT: u64 = 0x33; pub const IPV6_RECVHOPOPTS: u64 = 0x35; pub const IPV6_RECVPKTINFO: u64 = 0x31; pub const IPV6_RECVRTHDR: u64 = 0x38; pub const IPV6_RECVTCLASS: u64 = 0x42; pub const IPV6_ROUTER_ALERT: u64 = 0x16; pub const IPV6_RTHDR: u64 = 0x39; pub const IPV6_RTHDRDSTOPTS: u64 = 0x37; pub const IPV6_RTHDR_LOOSE: u64 = 0x0; pub const IPV6_RTHDR_STRICT: u64 = 0x1; pub const IPV6_RTHDR_TYPE_0: u64 = 0x0; pub const IPV6_RXDSTOPTS: u64 = 0x3b; pub const IPV6_RXHOPOPTS: u64 = 0x36; pub const IPV6_TCLASS: u64 = 0x43; pub const IPV6_UNICAST_HOPS: u64 = 0x10; pub const IPV6_V6ONLY: u64 = 0x1a; pub const IPV6_XFRM_POLICY: u64 = 0x23; pub const IP_ADD_MEMBERSHIP: u64 = 0x23; pub const IP_ADD_SOURCE_MEMBERSHIP: u64 = 0x27; pub const IP_BLOCK_SOURCE: u64 = 0x26; pub const IP_DEFAULT_MULTICAST_LOOP: u64 = 0x1; pub const IP_DEFAULT_MULTICAST_TTL: u64 = 0x1; pub const IP_DF: u64 = 0x4000; pub const IP_DROP_MEMBERSHIP: u64 = 0x24; pub const IP_DROP_SOURCE_MEMBERSHIP: u64 = 0x28; pub const IP_FREEBIND: u64 = 0xf; pub const IP_HDRINCL: u64 = 0x3; pub const IP_IPSEC_POLICY: u64 = 0x10; pub const IP_MAXPACKET: u64 = 0xffff; pub const IP_MAX_MEMBERSHIPS: u64 = 0x14; pub const IP_MF: u64 = 0x2000; pub const IP_MINTTL: u64 = 0x15; pub const IP_MSFILTER: u64 = 0x29; pub const IP_MSS: u64 = 0x240; pub const IP_MTU: u64 = 0xe; pub const IP_MTU_DISCOVER: u64 = 0xa; pub const IP_MULTICAST_IF: u64 = 0x20; pub const IP_MULTICAST_LOOP: u64 = 0x22; pub const IP_MULTICAST_TTL: u64 = 0x21; pub const IP_OFFMASK: u64 = 0x1fff; pub const IP_OPTIONS: u64 = 0x4; pub const IP_ORIGDSTADDR: u64 = 0x14; pub const IP_PASSSEC: u64 = 0x12; pub const IP_PKTINFO: u64 = 0x8; pub const IP_PKTOPTIONS: u64 = 0x9; pub const IP_PMTUDISC: u64 = 0xa; pub const IP_PMTUDISC_DO: u64 = 0x2; pub const IP_PMTUDISC_DONT: u64 = 0x0; pub const IP_PMTUDISC_PROBE: u64 = 0x3; pub const IP_PMTUDISC_WANT: u64 = 0x1; pub const IP_RECVERR: u64 = 0xb; pub const IP_RECVOPTS: u64 = 0x6; pub const IP_RECVORIGDSTADDR: u64 = 0x14; pub const IP_RECVRETOPTS: u64 = 0x7; pub const IP_RECVTOS: u64 = 0xd; pub const IP_RECVTTL: u64 = 0xc; pub const IP_RETOPTS: u64 = 0x7; pub const IP_RF: u64 = 0x8000; pub const IP_ROUTER_ALERT: u64 = 0x5; pub const IP_TOS: u64 = 0x1; pub const IP_TRANSPARENT: u64 = 0x13; pub const IP_TTL: u64 = 0x2; pub const IP_UNBLOCK_SOURCE: u64 = 0x25; pub const IP_XFRM_POLICY: u64 = 0x11; pub const LINUX_REBOOT_CMD_CAD_OFF: u64 = 0x0; pub const LINUX_REBOOT_CMD_CAD_ON: u64 = 0x89abcdef; pub const LINUX_REBOOT_CMD_HALT: u64 = 0xcdef0123; pub const LINUX_REBOOT_CMD_KEXEC: u64 = 0x45584543; pub const LINUX_REBOOT_CMD_POWER_OFF: u64 = 0x4321fedc; pub const LINUX_REBOOT_CMD_RESTART: u64 = 0x1234567; pub const LINUX_REBOOT_CMD_RESTART2: u64 = 0xa1b2c3d4; pub const LINUX_REBOOT_CMD_SW_SUSPEND: u64 = 0xd000fce2; pub const LINUX_REBOOT_MAGIC1: u64 = 0xfee1dead; pub const LINUX_REBOOT_MAGIC2: u64 = 0x28121969; pub const LOCK_EX: u64 = 0x2; pub const LOCK_NB: u64 = 0x4; pub const LOCK_SH: u64 = 0x1; pub const LOCK_UN: u64 = 0x8; pub const MADV_DOFORK: u64 = 0xb; pub const MADV_DONTFORK: u64 = 0xa; pub const MADV_DONTNEED: u64 = 0x4; pub const MADV_HUGEPAGE: u64 = 0xe; pub const MADV_HWPOISON: u64 = 0x64; pub const MADV_MERGEABLE: u64 = 0xc; pub const MADV_NOHUGEPAGE: u64 = 0xf; pub const MADV_NORMAL: u64 = 0x0; pub const MADV_RANDOM: u64 = 0x1; pub const MADV_REMOVE: u64 = 0x9; pub const MADV_SEQUENTIAL: u64 = 0x2; pub const MADV_UNMERGEABLE: u64 = 0xd; pub const MADV_WILLNEED: u64 = 0x3; pub const MAP_32BIT: u64 = 0x40; pub const MAP_ANON: u64 = 0x20; pub const MAP_ANONYMOUS: u64 = 0x20; pub const MAP_DENYWRITE: u64 = 0x800; pub const MAP_EXECUTABLE: u64 = 0x1000; pub const MAP_FILE: u64 = 0x0; pub const MAP_FIXED: u64 = 0x10; pub const MAP_GROWSDOWN: u64 = 0x100; pub const MAP_HUGETLB: u64 = 0x40000; pub const MAP_LOCKED: u64 = 0x2000; pub const MAP_NONBLOCK: u64 = 0x10000; pub const MAP_NORESERVE: u64 = 0x4000; pub const MAP_POPULATE: u64 = 0x8000; pub const MAP_PRIVATE: u64 = 0x2; pub const MAP_SHARED: u64 = 0x1; pub const MAP_STACK: u64 = 0x20000; pub const MAP_TYPE: u64 = 0xf; pub const MCL_CURRENT: u64 = 0x1; pub const MCL_FUTURE: u64 = 0x2; pub const MNT_DETACH: u64 = 0x2; pub const MNT_EXPIRE: u64 = 0x4; pub const MNT_FORCE: u64 = 0x1; pub const MSG_CMSG_CLOEXEC: u64 = 0x40000000; pub const MSG_CONFIRM: u64 = 0x800; pub const MSG_CTRUNC: u64 = 0x8; pub const MSG_DONTROUTE: u64 = 0x4; pub const MSG_DONTWAIT: u64 = 0x40; pub const MSG_EOR: u64 = 0x80; pub const MSG_ERRQUEUE: u64 = 0x2000; pub const MSG_FASTOPEN: u64 = 0x20000000; pub const MSG_FIN: u64 = 0x200; pub const MSG_MORE: u64 = 0x8000; pub const MSG_NOSIGNAL: u64 = 0x4000; pub const MSG_OOB: u64 = 0x1; pub const MSG_PEEK: u64 = 0x2; pub const MSG_PROXY: u64 = 0x10; pub const MSG_RST: u64 = 0x1000; pub const MSG_SYN: u64 = 0x400; pub const MSG_TRUNC: u64 = 0x20; pub const MSG_TRYHARD: u64 = 0x4; pub const MSG_WAITALL: u64 = 0x100; pub const MSG_WAITFORONE: u64 = 0x10000; pub const MS_ACTIVE: u64 = 0x40000000; pub const MS_ASYNC: u64 = 0x1; pub const MS_BIND: u64 = 0x1000; pub const MS_DIRSYNC: u64 = 0x80; pub const MS_INVALIDATE: u64 = 0x2; pub const MS_I_VERSION: u64 = 0x800000; pub const MS_KERNMOUNT: u64 = 0x400000; pub const MS_MANDLOCK: u64 = 0x40; pub const MS_MGC_MSK: u64 = 0xffff0000; pub const MS_MGC_VAL: u64 = 0xc0ed0000; pub const MS_MOVE: u64 = 0x2000; pub const MS_NOATIME: u64 = 0x400; pub const MS_NODEV: u64 = 0x4; pub const MS_NODIRATIME: u64 = 0x800; pub const MS_NOEXEC: u64 = 0x8; pub const MS_NOSUID: u64 = 0x2; pub const MS_NOUSER: i64 = -0x80000000; pub const MS_POSIXACL: u64 = 0x10000; pub const MS_PRIVATE: u64 = 0x40000; pub const MS_RDONLY: u64 = 0x1; pub const MS_REC: u64 = 0x4000; pub const MS_RELATIME: u64 = 0x200000; pub const MS_REMOUNT: u64 = 0x20; pub const MS_RMT_MASK: u64 = 0x800051; pub const MS_SHARED: u64 = 0x100000; pub const MS_SILENT: u64 = 0x8000; pub const MS_SLAVE: u64 = 0x80000; pub const MS_STRICTATIME: u64 = 0x1000000; pub const MS_SYNC: u64 = 0x4; pub const MS_SYNCHRONOUS: u64 = 0x10; pub const MS_UNBINDABLE: u64 = 0x20000; pub const NAME_MAX: u64 = 0xff; pub const NETLINK_ADD_MEMBERSHIP: u64 = 0x1; pub const NETLINK_AUDIT: u64 = 0x9; pub const NETLINK_BROADCAST_ERROR: u64 = 0x4; pub const NETLINK_CONNECTOR: u64 = 0xb; pub const NETLINK_DNRTMSG: u64 = 0xe; pub const NETLINK_DROP_MEMBERSHIP: u64 = 0x2; pub const NETLINK_ECRYPTFS: u64 = 0x13; pub const NETLINK_FIB_LOOKUP: u64 = 0xa; pub const NETLINK_FIREWALL: u64 = 0x3; pub const NETLINK_GENERIC: u64 = 0x10; pub const NETLINK_INET_DIAG: u64 = 0x4; pub const NETLINK_IP6_FW: u64 = 0xd; pub const NETLINK_ISCSI: u64 = 0x8; pub const NETLINK_KOBJECT_UEVENT: u64 = 0xf; pub const NETLINK_NETFILTER: u64 = 0xc; pub const NETLINK_NFLOG: u64 = 0x5; pub const NETLINK_NO_ENOBUFS: u64 = 0x5; pub const NETLINK_PKTINFO: u64 = 0x3; pub const NETLINK_ROUTE: u64 = 0x0; pub const NETLINK_SCSITRANSPORT: u64 = 0x12; pub const NETLINK_SELINUX: u64 = 0x7; pub const NETLINK_UNUSED: u64 = 0x1; pub const NETLINK_USERSOCK: u64 = 0x2; pub const NETLINK_XFRM: u64 = 0x6; pub const NLA_ALIGNTO: u64 = 0x4; pub const NLA_F_NESTED: u64 = 0x8000; pub const NLA_F_NET_BYTEORDER: u64 = 0x4000; pub const NLA_HDRLEN: u64 = 0x4; pub const NLMSG_ALIGNTO: u64 = 0x4; pub const NLMSG_DONE: u64 = 0x3; pub const NLMSG_ERROR: u64 = 0x2; pub const NLMSG_HDRLEN: u64 = 0x10; pub const NLMSG_MIN_TYPE: u64 = 0x10; pub const NLMSG_NOOP: u64 = 0x1; pub const NLMSG_OVERRUN: u64 = 0x4; pub const NLM_F_ACK: u64 = 0x4; pub const NLM_F_APPEND: u64 = 0x800; pub const NLM_F_ATOMIC: u64 = 0x400; pub const NLM_F_CREATE: u64 = 0x400; pub const NLM_F_DUMP: u64 = 0x300; pub const NLM_F_ECHO: u64 = 0x8; pub const NLM_F_EXCL: u64 = 0x200; pub const NLM_F_MATCH: u64 = 0x200; pub const NLM_F_MULTI: u64 = 0x2; pub const NLM_F_REPLACE: u64 = 0x100; pub const NLM_F_REQUEST: u64 = 0x1; pub const NLM_F_ROOT: u64 = 0x100; pub const O_ACCMODE: u64 = 0x3; pub const O_APPEND: u64 = 0x400; pub const O_ASYNC: u64 = 0x2000; pub const O_CLOEXEC: u64 = 0x80000; pub const O_CREAT: u64 = 0x40; pub const O_DIRECT: u64 = 0x4000; pub const O_DIRECTORY: u64 = 0x10000; pub const O_DSYNC: u64 = 0x1000; pub const O_EXCL: u64 = 0x80; pub const O_FSYNC: u64 = 0x101000; pub const O_LARGEFILE: u64 = 0x0; pub const O_NDELAY: u64 = 0x800; pub const O_NOATIME: u64 = 0x40000; pub const O_NOCTTY: u64 = 0x100; pub const O_NOFOLLOW: u64 = 0x20000; pub const O_NONBLOCK: u64 = 0x800; pub const O_RDONLY: u64 = 0x0; pub const O_RDWR: u64 = 0x2; pub const O_RSYNC: u64 = 0x101000; pub const O_SYNC: u64 = 0x101000; pub const O_TRUNC: u64 = 0x200; pub const O_WRONLY: u64 = 0x1; pub const PACKET_ADD_MEMBERSHIP: u64 = 0x1; pub const PACKET_BROADCAST: u64 = 0x1; pub const PACKET_DROP_MEMBERSHIP: u64 = 0x2; pub const PACKET_FASTROUTE: u64 = 0x6; pub const PACKET_HOST: u64 = 0x0; pub const PACKET_LOOPBACK: u64 = 0x5; pub const PACKET_MR_ALLMULTI: u64 = 0x2; pub const PACKET_MR_MULTICAST: u64 = 0x0; pub const PACKET_MR_PROMISC: u64 = 0x1; pub const PACKET_MULTICAST: u64 = 0x2; pub const PACKET_OTHERHOST: u64 = 0x3; pub const PACKET_OUTGOING: u64 = 0x4; pub const PACKET_RECV_OUTPUT: u64 = 0x3; pub const PACKET_RX_RING: u64 = 0x5; pub const PACKET_STATISTICS: u64 = 0x6; pub const PRIO_PGRP: u64 = 0x1; pub const PRIO_PROCESS: u64 = 0x0; pub const PRIO_USER: u64 = 0x2; pub const PROT_EXEC: u64 = 0x4; pub const PROT_GROWSDOWN: u64 = 0x1000000; pub const PROT_GROWSUP: u64 = 0x2000000; pub const PROT_NONE: u64 = 0x0; pub const PROT_READ: u64 = 0x1; pub const PROT_WRITE: u64 = 0x2; pub const PR_CAPBSET_DROP: u64 = 0x18; pub const PR_CAPBSET_READ: u64 = 0x17; pub const PR_ENDIAN_BIG: u64 = 0x0; pub const PR_ENDIAN_LITTLE: u64 = 0x1; pub const PR_ENDIAN_PPC_LITTLE: u64 = 0x2; pub const PR_FPEMU_NOPRINT: u64 = 0x1; pub const PR_FPEMU_SIGFPE: u64 = 0x2; pub const PR_FP_EXC_ASYNC: u64 = 0x2; pub const PR_FP_EXC_DISABLED: u64 = 0x0; pub const PR_FP_EXC_DIV: u64 = 0x10000; pub const PR_FP_EXC_INV: u64 = 0x100000; pub const PR_FP_EXC_NONRECOV: u64 = 0x1; pub const PR_FP_EXC_OVF: u64 = 0x20000; pub const PR_FP_EXC_PRECISE: u64 = 0x3; pub const PR_FP_EXC_RES: u64 = 0x80000; pub const PR_FP_EXC_SW_ENABLE: u64 = 0x80; pub const PR_FP_EXC_UND: u64 = 0x40000; pub const PR_GET_DUMPABLE: u64 = 0x3; pub const PR_GET_ENDIAN: u64 = 0x13; pub const PR_GET_FPEMU: u64 = 0x9; pub const PR_GET_FPEXC: u64 = 0xb; pub const PR_GET_KEEPCAPS: u64 = 0x7; pub const PR_GET_NAME: u64 = 0x10; pub const PR_GET_PDEATHSIG: u64 = 0x2; pub const PR_GET_SECCOMP: u64 = 0x15; pub const PR_GET_SECUREBITS: u64 = 0x1b; pub const PR_GET_TIMERSLACK: u64 = 0x1e; pub const PR_GET_TIMING: u64 = 0xd; pub const PR_GET_TSC: u64 = 0x19; pub const PR_GET_UNALIGN: u64 = 0x5; pub const PR_MCE_KILL: u64 = 0x21; pub const PR_MCE_KILL_CLEAR: u64 = 0x0; pub const PR_MCE_KILL_DEFAULT: u64 = 0x2; pub const PR_MCE_KILL_EARLY: u64 = 0x1; pub const PR_MCE_KILL_GET: u64 = 0x22; pub const PR_MCE_KILL_LATE: u64 = 0x0; pub const PR_MCE_KILL_SET: u64 = 0x1; pub const PR_SET_DUMPABLE: u64 = 0x4; pub const PR_SET_ENDIAN: u64 = 0x14; pub const PR_SET_FPEMU: u64 = 0xa; pub const PR_SET_FPEXC: u64 = 0xc; pub const PR_SET_KEEPCAPS: u64 = 0x8; pub const PR_SET_NAME: u64 = 0xf; pub const PR_SET_PDEATHSIG: u64 = 0x1; pub const PR_SET_PTRACER: u64 = 0x59616d61; pub const PR_SET_SECCOMP: u64 = 0x16; pub const PR_SET_SECUREBITS: u64 = 0x1c; pub const PR_SET_TIMERSLACK: u64 = 0x1d; pub const PR_SET_TIMING: u64 = 0xe; pub const PR_SET_TSC: u64 = 0x1a; pub const PR_SET_UNALIGN: u64 = 0x6; pub const PR_TASK_PERF_EVENTS_DISABLE: u64 = 0x1f; pub const PR_TASK_PERF_EVENTS_ENABLE: u64 = 0x20; pub const PR_TIMING_STATISTICAL: u64 = 0x0; pub const PR_TIMING_TIMESTAMP: u64 = 0x1; pub const PR_TSC_ENABLE: u64 = 0x1; pub const PR_TSC_SIGSEGV: u64 = 0x2; pub const PR_UNALIGN_NOPRINT: u64 = 0x1; pub const PR_UNALIGN_SIGBUS: u64 = 0x2; pub const PTRACE_ARCH_PRCTL: u64 = 0x1e; pub const PTRACE_ATTACH: u64 = 0x10; pub const PTRACE_CONT: u64 = 0x7; pub const PTRACE_DETACH: u64 = 0x11; pub const PTRACE_EVENT_CLONE: u64 = 0x3; pub const PTRACE_EVENT_EXEC: u64 = 0x4; pub const PTRACE_EVENT_EXIT: u64 = 0x6; pub const PTRACE_EVENT_FORK: u64 = 0x1; pub const PTRACE_EVENT_VFORK: u64 = 0x2; pub const PTRACE_EVENT_VFORK_DONE: u64 = 0x5; pub const PTRACE_GETEVENTMSG: u64 = 0x4201; pub const PTRACE_GETFPREGS: u64 = 0xe; pub const PTRACE_GETFPXREGS: u64 = 0x12; pub const PTRACE_GETREGS: u64 = 0xc; pub const PTRACE_GETREGSET: u64 = 0x4204; pub const PTRACE_GETSIGINFO: u64 = 0x4202; pub const PTRACE_GET_THREAD_AREA: u64 = 0x19; pub const PTRACE_KILL: u64 = 0x8; pub const PTRACE_OLDSETOPTIONS: u64 = 0x15; pub const PTRACE_O_MASK: u64 = 0x7f; pub const PTRACE_O_TRACECLONE: u64 = 0x8; pub const PTRACE_O_TRACEEXEC: u64 = 0x10; pub const PTRACE_O_TRACEEXIT: u64 = 0x40; pub const PTRACE_O_TRACEFORK: u64 = 0x2; pub const PTRACE_O_TRACESYSGOOD: u64 = 0x1; pub const PTRACE_O_TRACEVFORK: u64 = 0x4; pub const PTRACE_O_TRACEVFORKDONE: u64 = 0x20; pub const PTRACE_PEEKDATA: u64 = 0x2; pub const PTRACE_PEEKTEXT: u64 = 0x1; pub const PTRACE_PEEKUSR: u64 = 0x3; pub const PTRACE_POKEDATA: u64 = 0x5; pub const PTRACE_POKETEXT: u64 = 0x4; pub const PTRACE_POKEUSR: u64 = 0x6; pub const PTRACE_SETFPREGS: u64 = 0xf; pub const PTRACE_SETFPXREGS: u64 = 0x13; pub const PTRACE_SETOPTIONS: u64 = 0x4200; pub const PTRACE_SETREGS: u64 = 0xd; pub const PTRACE_SETREGSET: u64 = 0x4205; pub const PTRACE_SETSIGINFO: u64 = 0x4203; pub const PTRACE_SET_THREAD_AREA: u64 = 0x1a; pub const PTRACE_SINGLEBLOCK: u64 = 0x21; pub const PTRACE_SINGLESTEP: u64 = 0x9; pub const PTRACE_SYSCALL: u64 = 0x18; pub const PTRACE_SYSEMU: u64 = 0x1f; pub const PTRACE_SYSEMU_SINGLESTEP: u64 = 0x20; pub const PTRACE_TRACEME: u64 = 0x0; pub const RLIMIT_AS: u64 = 0x9; pub const RLIMIT_CORE: u64 = 0x4; pub const RLIMIT_CPU: u64 = 0x0; pub const RLIMIT_DATA: u64 = 0x2; pub const RLIMIT_FSIZE: u64 = 0x1; pub const RLIMIT_NOFILE: u64 = 0x7; pub const RLIMIT_STACK: u64 = 0x3; pub const RLIM_INFINITY: i64 = -0x1; pub const RTAX_ADVMSS: u64 = 0x8; pub const RTAX_CWND: u64 = 0x7; pub const RTAX_FEATURES: u64 = 0xc; pub const RTAX_FEATURE_ALLFRAG: u64 = 0x8; pub const RTAX_FEATURE_ECN: u64 = 0x1; pub const RTAX_FEATURE_SACK: u64 = 0x2; pub const RTAX_FEATURE_TIMESTAMP: u64 = 0x4; pub const RTAX_HOPLIMIT: u64 = 0xa; pub const RTAX_INITCWND: u64 = 0xb; pub const RTAX_INITRWND: u64 = 0xe; pub const RTAX_LOCK: u64 = 0x1; pub const RTAX_MAX: u64 = 0xe; pub const RTAX_MTU: u64 = 0x2; pub const RTAX_REORDERING: u64 = 0x9; pub const RTAX_RTO_MIN: u64 = 0xd; pub const RTAX_RTT: u64 = 0x4; pub const RTAX_RTTVAR: u64 = 0x5; pub const RTAX_SSTHRESH: u64 = 0x6; pub const RTAX_UNSPEC: u64 = 0x0; pub const RTAX_WINDOW: u64 = 0x3; pub const RTA_ALIGNTO: u64 = 0x4; pub const RTA_MAX: u64 = 0x10; pub const RTCF_DIRECTSRC: u64 = 0x4000000; pub const RTCF_DOREDIRECT: u64 = 0x1000000; pub const RTCF_LOG: u64 = 0x2000000; pub const RTCF_MASQ: u64 = 0x400000; pub const RTCF_NAT: u64 = 0x800000; pub const RTCF_VALVE: u64 = 0x200000; pub const RTF_ADDRCLASSMASK: u64 = 0xf8000000; pub const RTF_ADDRCONF: u64 = 0x40000; pub const RTF_ALLONLINK: u64 = 0x20000; pub const RTF_BROADCAST: u64 = 0x10000000; pub const RTF_CACHE: u64 = 0x1000000; pub const RTF_DEFAULT: u64 = 0x10000; pub const RTF_DYNAMIC: u64 = 0x10; pub const RTF_FLOW: u64 = 0x2000000; pub const RTF_GATEWAY: u64 = 0x2; pub const RTF_HOST: u64 = 0x4; pub const RTF_INTERFACE: u64 = 0x40000000; pub const RTF_IRTT: u64 = 0x100; pub const RTF_LINKRT: u64 = 0x100000; pub const RTF_LOCAL: u64 = 0x80000000; pub const RTF_MODIFIED: u64 = 0x20; pub const RTF_MSS: u64 = 0x40; pub const RTF_MTU: u64 = 0x40; pub const RTF_MULTICAST: u64 = 0x20000000; pub const RTF_NAT: u64 = 0x8000000; pub const RTF_NOFORWARD: u64 = 0x1000; pub const RTF_NONEXTHOP: u64 = 0x200000; pub const RTF_NOPMTUDISC: u64 = 0x4000; pub const RTF_POLICY: u64 = 0x4000000; pub const RTF_REINSTATE: u64 = 0x8; pub const RTF_REJECT: u64 = 0x200; pub const RTF_STATIC: u64 = 0x400; pub const RTF_THROW: u64 = 0x2000; pub const RTF_UP: u64 = 0x1; pub const RTF_WINDOW: u64 = 0x80; pub const RTF_XRESOLVE: u64 = 0x800; pub const RTM_BASE: u64 = 0x10; pub const RTM_DELACTION: u64 = 0x31; pub const RTM_DELADDR: u64 = 0x15; pub const RTM_DELADDRLABEL: u64 = 0x49; pub const RTM_DELLINK: u64 = 0x11; pub const RTM_DELNEIGH: u64 = 0x1d; pub const RTM_DELQDISC: u64 = 0x25; pub const RTM_DELROUTE: u64 = 0x19; pub const RTM_DELRULE: u64 = 0x21; pub const RTM_DELTCLASS: u64 = 0x29; pub const RTM_DELTFILTER: u64 = 0x2d; pub const RTM_F_CLONED: u64 = 0x200; pub const RTM_F_EQUALIZE: u64 = 0x400; pub const RTM_F_NOTIFY: u64 = 0x100; pub const RTM_F_PREFIX: u64 = 0x800; pub const RTM_GETACTION: u64 = 0x32; pub const RTM_GETADDR: u64 = 0x16; pub const RTM_GETADDRLABEL: u64 = 0x4a; pub const RTM_GETANYCAST: u64 = 0x3e; pub const RTM_GETDCB: u64 = 0x4e; pub const RTM_GETLINK: u64 = 0x12; pub const RTM_GETMULTICAST: u64 = 0x3a; pub const RTM_GETNEIGH: u64 = 0x1e; pub const RTM_GETNEIGHTBL: u64 = 0x42; pub const RTM_GETQDISC: u64 = 0x26; pub const RTM_GETROUTE: u64 = 0x1a; pub const RTM_GETRULE: u64 = 0x22; pub const RTM_GETTCLASS: u64 = 0x2a; pub const RTM_GETTFILTER: u64 = 0x2e; pub const RTM_MAX: u64 = 0x4f; pub const RTM_NEWACTION: u64 = 0x30; pub const RTM_NEWADDR: u64 = 0x14; pub const RTM_NEWADDRLABEL: u64 = 0x48; pub const RTM_NEWLINK: u64 = 0x10; pub const RTM_NEWNDUSEROPT: u64 = 0x44; pub const RTM_NEWNEIGH: u64 = 0x1c; pub const RTM_NEWNEIGHTBL: u64 = 0x40; pub const RTM_NEWPREFIX: u64 = 0x34; pub const RTM_NEWQDISC: u64 = 0x24; pub const RTM_NEWROUTE: u64 = 0x18; pub const RTM_NEWRULE: u64 = 0x20; pub const RTM_NEWTCLASS: u64 = 0x28; pub const RTM_NEWTFILTER: u64 = 0x2c; pub const RTM_NR_FAMILIES: u64 = 0x10; pub const RTM_NR_MSGTYPES: u64 = 0x40; pub const RTM_SETDCB: u64 = 0x4f; pub const RTM_SETLINK: u64 = 0x13; pub const RTM_SETNEIGHTBL: u64 = 0x43; pub const RTNH_ALIGNTO: u64 = 0x4; pub const RTNH_F_DEAD: u64 = 0x1; pub const RTNH_F_ONLINK: u64 = 0x4; pub const RTNH_F_PERVASIVE: u64 = 0x2; pub const RTN_MAX: u64 = 0xb; pub const RTPROT_BIRD: u64 = 0xc; pub const RTPROT_BOOT: u64 = 0x3; pub const RTPROT_DHCP: u64 = 0x10; pub const RTPROT_DNROUTED: u64 = 0xd; pub const RTPROT_GATED: u64 = 0x8; pub const RTPROT_KERNEL: u64 = 0x2; pub const RTPROT_MRT: u64 = 0xa; pub const RTPROT_NTK: u64 = 0xf; pub const RTPROT_RA: u64 = 0x9; pub const RTPROT_REDIRECT: u64 = 0x1; pub const RTPROT_STATIC: u64 = 0x4; pub const RTPROT_UNSPEC: u64 = 0x0; pub const RTPROT_XORP: u64 = 0xe; pub const RTPROT_ZEBRA: u64 = 0xb; pub const RT_CLASS_DEFAULT: u64 = 0xfd; pub const RT_CLASS_LOCAL: u64 = 0xff; pub const RT_CLASS_MAIN: u64 = 0xfe; pub const RT_CLASS_MAX: u64 = 0xff; pub const RT_CLASS_UNSPEC: u64 = 0x0; pub const RUSAGE_CHILDREN: i64 = -0x1; pub const RUSAGE_SELF: u64 = 0x0; pub const RUSAGE_THREAD: u64 = 0x1; pub const SCM_CREDENTIALS: u64 = 0x2; pub const SCM_RIGHTS: u64 = 0x1; pub const SCM_TIMESTAMP: u64 = 0x1d; pub const SCM_TIMESTAMPING: u64 = 0x25; pub const SCM_TIMESTAMPNS: u64 = 0x23; pub const SHUT_RD: u64 = 0x0; pub const SHUT_RDWR: u64 = 0x2; pub const SHUT_WR: u64 = 0x1; pub const SIOCADDDLCI: u64 = 0x8980; pub const SIOCADDMULTI: u64 = 0x8931; pub const SIOCADDRT: u64 = 0x890b; pub const SIOCATMARK: u64 = 0x8905; pub const SIOCDARP: u64 = 0x8953; pub const SIOCDELDLCI: u64 = 0x8981; pub const SIOCDELMULTI: u64 = 0x8932; pub const SIOCDELRT: u64 = 0x890c; pub const SIOCDEVPRIVATE: u64 = 0x89f0; pub const SIOCDIFADDR: u64 = 0x8936; pub const SIOCDRARP: u64 = 0x8960; pub const SIOCGARP: u64 = 0x8954; pub const SIOCGIFADDR: u64 = 0x8915; pub const SIOCGIFBR: u64 = 0x8940; pub const SIOCGIFBRDADDR: u64 = 0x8919; pub const SIOCGIFCONF: u64 = 0x8912; pub const SIOCGIFCOUNT: u64 = 0x8938; pub const SIOCGIFDSTADDR: u64 = 0x8917; pub const SIOCGIFENCAP: u64 = 0x8925; pub const SIOCGIFFLAGS: u64 = 0x8913; pub const SIOCGIFHWADDR: u64 = 0x8927; pub const SIOCGIFINDEX: u64 = 0x8933; pub const SIOCGIFMAP: u64 = 0x8970; pub const SIOCGIFMEM: u64 = 0x891f; pub const SIOCGIFMETRIC: u64 = 0x891d; pub const SIOCGIFMTU: u64 = 0x8921; pub const SIOCGIFNAME: u64 = 0x8910; pub const SIOCGIFNETMASK: u64 = 0x891b; pub const SIOCGIFPFLAGS: u64 = 0x8935; pub const SIOCGIFSLAVE: u64 = 0x8929; pub const SIOCGIFTXQLEN: u64 = 0x8942; pub const SIOCGPGRP: u64 = 0x8904; pub const SIOCGRARP: u64 = 0x8961; pub const SIOCGSTAMP: u64 = 0x8906; pub const SIOCGSTAMPNS: u64 = 0x8907; pub const SIOCPROTOPRIVATE: u64 = 0x89e0; pub const SIOCRTMSG: u64 = 0x890d; pub const SIOCSARP: u64 = 0x8955; pub const SIOCSIFADDR: u64 = 0x8916; pub const SIOCSIFBR: u64 = 0x8941; pub const SIOCSIFBRDADDR: u64 = 0x891a; pub const SIOCSIFDSTADDR: u64 = 0x8918; pub const SIOCSIFENCAP: u64 = 0x8926; pub const SIOCSIFFLAGS: u64 = 0x8914; pub const SIOCSIFHWADDR: u64 = 0x8924; pub const SIOCSIFHWBROADCAST: u64 = 0x8937; pub const SIOCSIFLINK: u64 = 0x8911; pub const SIOCSIFMAP: u64 = 0x8971; pub const SIOCSIFMEM: u64 = 0x8920; pub const SIOCSIFMETRIC: u64 = 0x891e; pub const SIOCSIFMTU: u64 = 0x8922; pub const SIOCSIFNAME: u64 = 0x8923; pub const SIOCSIFNETMASK: u64 = 0x891c; pub const SIOCSIFPFLAGS: u64 = 0x8934; pub const SIOCSIFSLAVE: u64 = 0x8930; pub const SIOCSIFTXQLEN: u64 = 0x8943; pub const SIOCSPGRP: u64 = 0x8902; pub const SIOCSRARP: u64 = 0x8962; pub const SOCK_CLOEXEC: u64 = 0x80000; pub const SOCK_DCCP: u64 = 0x6; pub const SOCK_DGRAM: u64 = 0x2; pub const SOCK_NONBLOCK: u64 = 0x800; pub const SOCK_PACKET: u64 = 0xa; pub const SOCK_RAW: u64 = 0x3; pub const SOCK_RDM: u64 = 0x4; pub const SOCK_SEQPACKET: u64 = 0x5; pub const SOCK_STREAM: u64 = 0x1; pub const SOL_AAL: u64 = 0x109; pub const SOL_ATM: u64 = 0x108; pub const SOL_DECNET: u64 = 0x105; pub const SOL_ICMPV6: u64 = 0x3a; pub const SOL_IP: u64 = 0x0; pub const SOL_IPV6: u64 = 0x29; pub const SOL_IRDA: u64 = 0x10a; pub const SOL_PACKET: u64 = 0x107; pub const SOL_RAW: u64 = 0xff; pub const SOL_SOCKET: u64 = 0x1; pub const SOL_TCP: u64 = 0x6; pub const SOL_X25: u64 = 0x106; pub const SOMAXCONN: u64 = 0x80; pub const SO_ACCEPTCONN: u64 = 0x1e; pub const SO_ATTACH_FILTER: u64 = 0x1a; pub const SO_BINDTODEVICE: u64 = 0x19; pub const SO_BROADCAST: u64 = 0x6; pub const SO_BSDCOMPAT: u64 = 0xe; pub const SO_REUSEPORT: u64 = 0xf; pub const SO_DEBUG: u64 = 0x1; pub const SO_DETACH_FILTER: u64 = 0x1b; pub const SO_DOMAIN: u64 = 0x27; pub const SO_DONTROUTE: u64 = 0x5; pub const SO_ERROR: u64 = 0x4; pub const SO_KEEPALIVE: u64 = 0x9; pub const SO_LINGER: u64 = 0xd; pub const SO_MARK: u64 = 0x24; pub const SO_NO_CHECK: u64 = 0xb; pub const SO_OOBINLINE: u64 = 0xa; pub const SO_PASSCRED: u64 = 0x10; pub const SO_PASSSEC: u64 = 0x22; pub const SO_PEERCRED: u64 = 0x11; pub const SO_PEERNAME: u64 = 0x1c; pub const SO_PEERSEC: u64 = 0x1f; pub const SO_PRIORITY: u64 = 0xc; pub const SO_PROTOCOL: u64 = 0x26; pub const SO_RCVBUF: u64 = 0x8; pub const SO_RCVBUFFORCE: u64 = 0x21; pub const SO_RCVLOWAT: u64 = 0x12; pub const SO_RCVTIMEO: u64 = 0x14; pub const SO_REUSEADDR: u64 = 0x2; pub const SO_RXQ_OVFL: u64 = 0x28; pub const SO_SECURITY_AUTHENTICATION: u64 = 0x16; pub const SO_SECURITY_ENCRYPTION_NETWORK: u64 = 0x18; pub const SO_SECURITY_ENCRYPTION_TRANSPORT: u64 = 0x17; pub const SO_SNDBUF: u64 = 0x7; pub const SO_SNDBUFFORCE: u64 = 0x20; pub const SO_SNDLOWAT: u64 = 0x13; pub const SO_SNDTIMEO: u64 = 0x15; pub const SO_TIMESTAMP: u64 = 0x1d; pub const SO_TIMESTAMPING: u64 = 0x25; pub const SO_TIMESTAMPNS: u64 = 0x23; pub const SO_TYPE: u64 = 0x3; pub const S_BLKSIZE: u64 = 0x200; pub const S_IEXEC: u64 = 0x40; pub const S_IFBLK: u64 = 0x6000; pub const S_IFCHR: u64 = 0x2000; pub const S_IFDIR: u64 = 0x4000; pub const S_IFIFO: u64 = 0x1000; pub const S_IFLNK: u64 = 0xa000; pub const S_IFMT: u64 = 0xf000; pub const S_IFREG: u64 = 0x8000; pub const S_IFSOCK: u64 = 0xc000; pub const S_IREAD: u64 = 0x100; pub const S_IRGRP: u64 = 0x20; pub const S_IROTH: u64 = 0x4; pub const S_IRUSR: u64 = 0x100; pub const S_IRWXG: u64 = 0x38; pub const S_IRWXO: u64 = 0x7; pub const S_IRWXU: u64 = 0x1c0; pub const S_ISGID: u64 = 0x400; pub const S_ISUID: u64 = 0x800; pub const S_ISVTX: u64 = 0x200; pub const S_IWGRP: u64 = 0x10; pub const S_IWOTH: u64 = 0x2; pub const S_IWRITE: u64 = 0x80; pub const S_IWUSR: u64 = 0x80; pub const S_IXGRP: u64 = 0x8; pub const S_IXOTH: u64 = 0x1; pub const S_IXUSR: u64 = 0x40; pub const TCIFLUSH: u64 = 0x0; pub const TCIOFLUSH: u64 = 0x2; pub const TCOFLUSH: u64 = 0x1; pub const TCP_CONGESTION: u64 = 0xd; pub const TCP_CORK: u64 = 0x3; pub const TCP_DEFER_ACCEPT: u64 = 0x9; pub const TCP_INFO: u64 = 0xb; pub const TCP_KEEPCNT: u64 = 0x6; pub const TCP_KEEPIDLE: u64 = 0x4; pub const TCP_KEEPINTVL: u64 = 0x5; pub const TCP_LINGER2: u64 = 0x8; pub const TCP_MAXSEG: u64 = 0x2; pub const TCP_MAXWIN: u64 = 0xffff; pub const TCP_MAX_WINSHIFT: u64 = 0xe; pub const TCP_MD5SIG: u64 = 0xe; pub const TCP_MD5SIG_MAXKEYLEN: u64 = 0x50; pub const TCP_MSS: u64 = 0x200; pub const TCP_NODELAY: u64 = 0x1; pub const TCP_QUICKACK: u64 = 0xc; pub const TCP_SYNCNT: u64 = 0x7; pub const TCP_WINDOW_CLAMP: u64 = 0xa; pub const TIOCCBRK: u64 = 0x5428; pub const TIOCCONS: u64 = 0x541d; pub const TIOCEXCL: u64 = 0x540c; pub const TIOCGDEV: u64 = 0x80045432; pub const TIOCGETD: u64 = 0x5424; pub const TIOCGICOUNT: u64 = 0x545d; pub const TIOCGLCKTRMIOS: u64 = 0x5456; pub const TIOCGPGRP: u64 = 0x540f; pub const TIOCGPTN: u64 = 0x80045430; pub const TIOCGRS485: u64 = 0x542e; pub const TIOCGSERIAL: u64 = 0x541e; pub const TIOCGSID: u64 = 0x5429; pub const TIOCGSOFTCAR: u64 = 0x5419; pub const TIOCGWINSZ: u64 = 0x5413; pub const TIOCINQ: u64 = 0x541b; pub const TIOCLINUX: u64 = 0x541c; pub const TIOCMBIC: u64 = 0x5417; pub const TIOCMBIS: u64 = 0x5416; pub const TIOCMGET: u64 = 0x5415; pub const TIOCMIWAIT: u64 = 0x545c; pub const TIOCMSET: u64 = 0x5418; pub const TIOCM_CAR: u64 = 0x40; pub const TIOCM_CD: u64 = 0x40; pub const TIOCM_CTS: u64 = 0x20; pub const TIOCM_DSR: u64 = 0x100; pub const TIOCM_DTR: u64 = 0x2; pub const TIOCM_LE: u64 = 0x1; pub const TIOCM_RI: u64 = 0x80; pub const TIOCM_RNG: u64 = 0x80; pub const TIOCM_RTS: u64 = 0x4; pub const TIOCM_SR: u64 = 0x10; pub const TIOCM_ST: u64 = 0x8; pub const TIOCNOTTY: u64 = 0x5422; pub const TIOCNXCL: u64 = 0x540d; pub const TIOCOUTQ: u64 = 0x5411; pub const TIOCPKT: u64 = 0x5420; pub const TIOCPKT_DATA: u64 = 0x0; pub const TIOCPKT_DOSTOP: u64 = 0x20; pub const TIOCPKT_FLUSHREAD: u64 = 0x1; pub const TIOCPKT_FLUSHWRITE: u64 = 0x2; pub const TIOCPKT_IOCTL: u64 = 0x40; pub const TIOCPKT_NOSTOP: u64 = 0x10; pub const TIOCPKT_START: u64 = 0x8; pub const TIOCPKT_STOP: u64 = 0x4; pub const TIOCSBRK: u64 = 0x5427; pub const TIOCSCTTY: u64 = 0x540e; pub const TIOCSERCONFIG: u64 = 0x5453; pub const TIOCSERGETLSR: u64 = 0x5459; pub const TIOCSERGETMULTI: u64 = 0x545a; pub const TIOCSERGSTRUCT: u64 = 0x5458; pub const TIOCSERGWILD: u64 = 0x5454; pub const TIOCSERSETMULTI: u64 = 0x545b; pub const TIOCSERSWILD: u64 = 0x5455; pub const TIOCSER_TEMT: u64 = 0x1; pub const TIOCSETD: u64 = 0x5423; pub const TIOCSIG: u64 = 0x40045436; pub const TIOCSLCKTRMIOS: u64 = 0x5457; pub const TIOCSPGRP: u64 = 0x5410; pub const TIOCSPTLCK: u64 = 0x40045431; pub const TIOCSRS485: u64 = 0x542f; pub const TIOCSSERIAL: u64 = 0x541f; pub const TIOCSSOFTCAR: u64 = 0x541a; pub const TIOCSTI: u64 = 0x5412; pub const TIOCSWINSZ: u64 = 0x5414; pub const TUNATTACHFILTER: u64 = 0x401054d5; pub const TUNDETACHFILTER: u64 = 0x401054d6; pub const TUNGETFEATURES: u64 = 0x800454cf; pub const TUNGETIFF: u64 = 0x800454d2; pub const TUNGETSNDBUF: u64 = 0x800454d3; pub const TUNGETVNETHDRSZ: u64 = 0x800454d7; pub const TUNSETDEBUG: u64 = 0x400454c9; pub const TUNSETGROUP: u64 = 0x400454ce; pub const TUNSETIFF: u64 = 0x400454ca; pub const TUNSETLINK: u64 = 0x400454cd; pub const TUNSETNOCSUM: u64 = 0x400454c8; pub const TUNSETOFFLOAD: u64 = 0x400454d0; pub const TUNSETOWNER: u64 = 0x400454cc; pub const TUNSETPERSIST: u64 = 0x400454cb; pub const TUNSETSNDBUF: u64 = 0x400454d4; pub const TUNSETTXFILTER: u64 = 0x400454d1; pub const TUNSETVNETHDRSZ: u64 = 0x400454d8; pub const WALL: u64 = 0x40000000; pub const WCLONE: u64 = 0x80000000; pub const WCONTINUED: u64 = 0x8; pub const WEXITED: u64 = 0x4; pub const WNOHANG: u64 = 0x1; pub const WNOTHREAD: u64 = 0x20000000; pub const WNOWAIT: u64 = 0x1000000; pub const WORDSIZE: u64 = 0x40; pub const WSTOPPED: u64 = 0x2; pub const WUNTRACED: u64 = 0x2; } pub struct Cmd {} impl Cmd { pub const F_DUPFD: i32 = 0; pub const F_GETFD: i32 = 1; pub const F_SETFD: i32 = 2; pub const F_GETFL: i32 = 3; pub const F_SETFL: i32 = 4; pub const F_GETLK: i32 = 5; pub const F_SETLK: i32 = 6; pub const F_SETLKW: i32 = 7; pub const F_SETOWN: i32 = 8; pub const F_GETOWN: i32 = 9; pub const F_SETOWN_EX: i32 = 15; pub const F_GETOWN_EX: i32 = 16; pub const F_DUPFD_CLOEXEC: i32 = 1024 + 6; pub const F_SETPIPE_SZ: i32 = 1024 + 7; pub const F_GETPIPE_SZ: i32 = 1024 + 8; pub const F_ADD_SEALS: i32 = 1024 + 9; pub const F_GET_SEALS: i32 = 1024 + 10; } #[derive(Debug, Clone, Copy, Default)] pub struct PermMask { pub read: bool, pub write: bool, pub execute: bool, } impl PermMask { pub fn FromFlags(mask: u32) -> Self { let mut res = PermMask::default(); if mask & Flags::O_TRUNC as u32 != 0 { res.write = true; } let v = mask & Flags::O_ACCMODE as u32; if v == Flags::O_WRONLY as u32 { res.write = true; } else if v == Flags::O_RDWR as u32 { res.write = true; res.read = true; } else if v == Flags::O_RDONLY as u32 { res.read = true; } return res; } pub fn FromMode(mode: FileMode) -> Self { let mut res = Self::default(); res.read = mode.OtherRead(); res.write = mode.OtherWrite(); res.execute = mode.OtherExec(); return res; } pub fn OnlyRead(&self) -> bool { return self.read && !self.write && !self.execute; } pub fn Mode(&self) -> u32 { let mut ret = 0; if self.read { ret |= LibcConst::S_IROTH; } if self.write { ret |= LibcConst::S_IWOTH; } if self.execute { ret |= LibcConst::S_IXOTH; } return ret as u32; } pub fn SupersetOf(&self, other: &Self) -> bool { if !self.read && other.read { return false; } if !self.write && other.write { return false; } if !self.execute && other.execute { return false; } return true; } } #[derive(Debug, Clone, Copy)] pub struct Flags(pub i32); impl Flags { pub const O_ACCMODE: i32 = 0o00000003; //0x00000003; pub const O_RDONLY: i32 = 0o00000000; //0x00000000; pub const O_WRONLY: i32 = 0o00000001; //0x00000001; pub const O_RDWR: i32 = 0o00000002; //0x00000002; pub const O_CREAT: i32 = 0o00000100; //0x00000040; pub const O_EXCL: i32 = 0o00000200; //0x00000080; pub const O_NOCTTY: i32 = 0o00000400; //0x00000100; pub const O_TRUNC: i32 = 0o00001000; //0x00000200; pub const O_APPEND: i32 = 0o00002000; //0x00000400; pub const O_NONBLOCK: i32 = 0o00004000; //0x00000800; pub const O_DSYNC: i32 = 0o00010000; //0x00001000; pub const O_ASYNC: i32 = 0o00020000; //0x00002000; pub const O_DIRECT: i32 = 0o00040000; //0x00004000; pub const O_LARGEFILE: i32 = 0o00100000; //0x00008000; pub const O_DIRECTORY: i32 = 0o00200000; //0x00010000; pub const O_NOFOLLOW: i32 = 0o00400000; //0x00020000; pub const O_NOATIME: i32 = 0o01000000; //0x00040000; pub const O_CLOEXEC: i32 = 0o02000000; //0x00080000; pub const O_SYNC: i32 = 0o04000000; pub const O_PATH: i32 = 0o010000000; pub const O_TMPFILE: i32 = 0o020000000; /* high priority request, poll if possible */ pub const RWF_HIPRI :i32 = 0x00000001; /* per-IO O_DSYNC */ pub const RWF_DSYNC :i32 = 0x00000002; /* per-IO O_SYNC */ pub const RWF_SYNC :i32 = 0x00000004; /* per-IO, return -EAGAIN if operation would block */ pub const RWF_NOWAIT :i32 = 0x00000008; /* per-IO O_APPEND */ pub const RWF_APPEND :i32 = 0x00000010; //pub fn Direct(&self) -> bool { // return self.0 & Self::O_DIRECT != 0; //} pub fn Sync(&self) -> bool { return self.0 & Self::O_SYNC != 0; } pub fn NonBlocking(&self) -> bool { return self.0 & Self::O_NONBLOCK != 0; } pub fn Read(&self) -> bool { return self.0 & Self::O_ACCMODE != Self::O_WRONLY; } pub fn Write(&self) -> bool { return self.0 & Self::O_ACCMODE != Self::O_RDONLY; } pub fn Append(&self) -> bool { return self.0 & Self::O_APPEND != 0; } pub fn CloseOnExec(&self) -> bool { return self.0 & Self::O_CLOEXEC != 0; } //pub fn Directory(&self) -> bool { // return self.0 & Self::O_DIRECTORY != 0; //} //pub fn Async(&self) -> bool { // return self.0 & Self::O_ASYNC != 0; //} //pub fn LargeFile(&self) -> bool { // return self.0 & Self::O_LARGEFILE != 0; //} pub fn ToPermission(&self) -> PermMask { let mut res = PermMask { read: false, write: false, execute: false }; if self.0 & Self::O_TRUNC != 0 { res.write = true; } let access = self.0 & Self::O_ACCMODE; if access == Self::O_WRONLY { res.write = true; } else if access == Self::O_RDWR { res.write = true; res.read = true; } else if access == Self::O_RDONLY { // res.read = true } else { panic!("..."); } return res; } } pub struct MsgType {} //sendmsg/recvmsg flags impl MsgType { pub const MSG_OOB: i32 = 0x1; pub const MSG_PEEK: i32 = 0x2; pub const MSG_DONTROUTE: i32 = 0x4; pub const MSG_TRYHARD: i32 = 0x4; pub const MSG_CTRUNC: i32 = 0x8; pub const MSG_PROBE: i32 = 0x10; pub const MSG_TRUNC: i32 = 0x20; pub const MSG_DONTWAIT: i32 = 0x40; pub const MSG_EOR: i32 = 0x80; pub const MSG_WAITALL: i32 = 0x100; pub const MSG_FIN: i32 = 0x200; pub const MSG_EOF: i32 = Self::MSG_FIN; pub const MSG_SYN: i32 = 0x400; pub const MSG_CONFIRM: i32 = 0x800; pub const MSG_RST: i32 = 0x1000; pub const MSG_ERRQUEUE: i32 = 0x2000; pub const MSG_NOSIGNAL: i32 = 0x4000; pub const MSG_MORE: i32 = 0x8000; pub const MSG_WAITFORONE: i32 = 0x10000; pub const MSG_SENDPAGE_NOTLAST: i32 = 0x20000; pub const MSG_REINJECT: i32 = 0x8000000; pub const MSG_ZEROCOPY: i32 = 0x4000000; pub const MSG_FASTOPEN: i32 = 0x20000000; pub const MSG_CMSG_CLOEXEC: i32 = 0x40000000; pub const BASE_RECV_FLAGS: i32 = Self::MSG_OOB | Self::MSG_DONTROUTE | Self::MSG_DONTWAIT | Self::MSG_NOSIGNAL | Self::MSG_WAITALL | Self::MSG_TRUNC | Self::MSG_CTRUNC; } pub struct AFType {} //Address Family impl AFType { pub const AF_UNSPEC: i32 = 0; pub const AF_UNIX: i32 = 1; pub const AF_INET: i32 = 2; pub const AF_AX25: i32 = 3; pub const AF_IPX: i32 = 4; pub const AF_APPLETALK: i32 = 5; pub const AF_NETROM: i32 = 6; pub const AF_BRIDGE: i32 = 7; pub const AF_ATMPVC: i32 = 8; pub const AF_X25: i32 = 9; pub const AF_INET6: i32 = 10; pub const AF_ROSE: i32 = 11; pub const AF_DECNET: i32 = 12; //AF_DECnet pub const AF_NETBEUI: i32 = 13; pub const AF_SECURITY: i32 = 14; pub const AF_KEY: i32 = 15; pub const AF_NETLINK: i32 = 16; pub const AF_PACKET: i32 = 17; pub const AF_ASH: i32 = 18; pub const AF_ECONET: i32 = 19; pub const AF_ATMSVC: i32 = 20; pub const AF_RDS: i32 = 21; pub const AF_SNA: i32 = 22; pub const AF_IRDA: i32 = 23; pub const AF_PPPOX: i32 = 24; pub const AF_WANPIPE: i32 = 25; pub const AF_LLC: i32 = 26; pub const AF_IB: i32 = 27; pub const AF_MPLS: i32 = 28; pub const AF_CAN: i32 = 29; pub const AF_TIPC: i32 = 30; pub const AF_BLUETOOTH: i32 = 31; pub const AF_IUCV: i32 = 32; pub const AF_RXRPC: i32 = 33; pub const AF_ISDN: i32 = 34; pub const AF_PHONET: i32 = 35; pub const AF_IEEE802154: i32 = 36; pub const AF_CAIF: i32 = 37; pub const AF_ALG: i32 = 38; pub const AF_NFC: i32 = 39; pub const AF_VSOCK: i32 = 40; } pub struct SocketType {} impl SocketType { pub const SOCK_STREAM: i32 = 1; pub const SOCK_DGRAM: i32 = 2; pub const SOCK_RAW: i32 = 3; pub const SOCK_RDM: i32 = 4; pub const SOCK_SEQPACKET: i32 = 5; pub const SOCK_DCCP: i32 = 6; pub const SOCK_PACKET: i32 = 10; pub const SOCK_TYPE_MASK: i32 = 0xf; } pub struct SocketFlags {} impl SocketFlags { pub const SOCK_CLOEXEC: i32 = Flags::O_CLOEXEC; pub const SOCK_NONBLOCK: i32 = Flags::O_NONBLOCK; } pub const UIO_MAXIOV: usize = 1024; #[repr(C)] #[derive(Clone, Default, Debug, Copy, Eq, PartialEq)] pub struct IoVec { pub start: u64, pub len: usize, } #[repr(C)] #[derive(Debug, Copy, Clone, Default)] pub struct MsgHdr { pub msgName: u64, pub nameLen: u32, pub iov: u64, //*mut IoVec, pub iovLen: usize, pub msgControl: u64, pub msgControlLen: usize, pub msgFlags: i32, } #[repr(C)] #[derive(Debug, Copy, Clone, Default)] pub struct MMsgHdr { pub msgHdr: MsgHdr, pub msgLen: u32, } #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct PollFd { pub fd: i32, pub events: i16, pub revents: i16, } pub const AT_FDCWD: i32 = -100; pub struct PollConst {} impl PollConst { pub const POLLIN: i32 = 0x0001; pub const POLLPRI: i32 = 0x0002; pub const POLLOUT: i32 = 0x0004; pub const POLLERR: i32 = 0x0008; pub const POLLHUP: i32 = 0x0010; pub const POLLNVAL: i32 = 0x0020; pub const POLLRDNORM: i32 = 0x0040; pub const POLLRDBAND: i32 = 0x0080; pub const POLLWRNORM: i32 = 0x0100; pub const POLLWRBAND: i32 = 0x0200; pub const POLLMSG: i32 = 0x0400; pub const POLLREMOVE: i32 = 0x1000; pub const POLLRDHUP: i32 = 0x2000; pub const POLLFREE: i32 = 0x4000; pub const POLL_BUSY_LOOP: i32 = 0x8000; } #[derive(Clone, Copy, Default, Debug)] #[repr(C)] pub struct iocb { pub aio_data: u64, pub aio_key: u32, pub aio_reserved1: i32, pub aio_lio_opcode: i16, pub aio_reqprio: i16, pub aio_fildes: u32, pub aio_buf: u64, pub aio_nbytes: u64, pub aio_offset: i64, pub aio_reserved2: u64, pub aio_flags: u32, pub aio_resfd: u32, } pub struct EflagsDef {} impl EflagsDef { pub const EFLAGS_CF: u64 = 1 << 0; // pub const EFLAGS_PF is the mask for the parity flag. pub const EFLAGS_PF: u64 = 1 << 2; // pub const EFLAGS_AF is the mask for the auxiliary carry flag. pub const EFLAGS_AF: u64 = 1 << 4; // pub const EFLAGS_ZF is the mask for the zero flag. pub const EFLAGS_ZF: u64 = 1 << 6; // pub const EFLAGS_SF is the mask for the sign flag. pub const EFLAGS_SF: u64 = 1 << 7; // pub const EFLAGS_TF is the mask for the trap flag. pub const EFLAGS_TF: u64 = 1 << 8; // pub const EFLAGS_IF is the mask for the interrupt flag. pub const EFLAGS_IF: u64 = 1 << 9; // pub const EFLAGS_DF is the mask for the direction flag. pub const EFLAGS_DF: u64 = 1 << 10; // pub const EFLAGS_OF is the mask for the overflow flag. pub const EFLAGS_OF: u64 = 1 << 11; // pub const EFLAGS_IOPL is the mask for the I/O privilege level. pub const EFLAGS_IOPL: u64 = 3 << 12; // pub const EFLAGS_NT is the mask for the nested task bit. pub const EFLAGS_NT: u64 = 1 << 14; // pub const EFLAGS_RF is the mask for the resume flag. pub const EFLAGS_RF: u64 = 1 << 16; // pub const EFLAGS_VM is the mask for the virtual mode bit. pub const EFLAGS_VM: u64 = 1 << 17; // pub const EFLAGS_AC is the mask for the alignment check / access control bit. pub const EFLAGS_AC: u64 = 1 << 18; // pub const EFLAGS_VIF is the mask for the virtual interrupt flag. pub const EFLAGS_VIF: u64 = 1 << 19; // pub const EFLAGS_VIP is the mask for the virtual interrupt pending bit. pub const EFLAGS_VIP: u64 = 1 << 20; // pub const EFLAGS_ID is the mask for the CPUID detection bit. pub const EFLAGS_ID: u64 = 1 << 21; // eflagsPtraceMutable is the mask for the set of EFLAGS that may be // changed by ptrace(PTRACE_SETREGS). eflagsPtraceMutable is analogous to // Linux's FLAG_MASK. pub const EFLAGS_PTRACE_MUTABLE: u64 = Self::EFLAGS_CF | Self::EFLAGS_PF | Self::EFLAGS_AF | Self::EFLAGS_ZF | Self::EFLAGS_SF | Self::EFLAGS_TF | Self::EFLAGS_DF | Self::EFLAGS_OF | Self::EFLAGS_RF | Self::EFLAGS_AC | Self::EFLAGS_NT; // EFLAGS_Restorable is the mask for the set of EFLAGS_ that may be changed by // SignalReturn. EFLAGS_Restorable is analogous to Linux's FIX_EFLAGS_. pub const EFLAGS_RESTOREABLE: u64 = Self::EFLAGS_AC | Self::EFLAGS_OF | Self::EFLAGS_DF | Self::EFLAGS_TF | Self::EFLAGS_SF | Self::EFLAGS_ZF | Self::EFLAGS_AF | Self::EFLAGS_PF | Self::EFLAGS_CF | Self::EFLAGS_RF; } pub struct IoCtlCmd {} impl IoCtlCmd { pub const TCGETS: u64 = 0x00005401; pub const TCSETS: u64 = 0x00005402; pub const TCSETSW: u64 = 0x00005403; pub const TCSETSF: u64 = 0x00005404; pub const TCSBRK: u64 = 0x00005409; pub const TIOCEXCL: u64 = 0x0000540c; pub const TIOCNXCL: u64 = 0x0000540d; pub const TIOCSCTTY: u64 = 0x0000540e; pub const TIOCGPGRP: u64 = 0x0000540f; pub const TIOCSPGRP: u64 = 0x00005410; pub const TIOCOUTQ: u64 = 0x00005411; pub const TIOCSTI: u64 = 0x00005412; pub const TIOCGWINSZ: u64 = 0x00005413; pub const TIOCSWINSZ: u64 = 0x00005414; pub const TIOCMGET: u64 = 0x00005415; pub const TIOCMBIS: u64 = 0x00005416; pub const TIOCMBIC: u64 = 0x00005417; pub const TIOCMSET: u64 = 0x00005418; pub const TIOCINQ: u64 = 0x0000541b; pub const FIONREAD: u64 = Self::TIOCINQ; pub const FIONBIO: u64 = 0x00005421; pub const TIOCSETD: u64 = 0x00005423; pub const TIOCNOTTY: u64 = 0x00005422; pub const TIOCGETD: u64 = 0x00005424; pub const TCSBRKP: u64 = 0x00005425; pub const TIOCSBRK: u64 = 0x00005427; pub const TIOCCBRK: u64 = 0x00005428; pub const TIOCGSID: u64 = 0x00005429; pub const TIOCGPTN: u64 = 0x80045430; pub const TIOCSPTLCK: u64 = 0x40045431; pub const TIOCGDEV: u64 = 0x80045432; pub const TIOCVHANGUP: u64 = 0x00005437; pub const TCFLSH: u64 = 0x0000540b; pub const TIOCCONS: u64 = 0x0000541d; pub const TIOCSSERIAL: u64 = 0x0000541f; pub const TIOCGEXCL: u64 = 0x80045440; pub const TIOCGPTPEER: u64 = 0x80045441; pub const TIOCGICOUNT: u64 = 0x0000545d; pub const FIONCLEX: u64 = 0x00005450; pub const FIOCLEX: u64 = 0x00005451; pub const FIOASYNC: u64 = 0x00005452; pub const FIOSETOWN: u64 = 0x00008901; pub const SIOCSPGRP: u64 = 0x00008902; pub const FIOGETOWN: u64 = 0x00008903; pub const SIOCGPGRP: u64 = 0x00008904; } #[derive(Clone, PartialEq, Copy, Debug)] pub enum ProcessState { Running, Exiting, Zombie, Stop, } impl Default for ProcessState { fn default() -> Self { ProcessState::Running } } #[derive(Clone, PartialEq, Copy, Debug)] pub enum ThreadState { Runable, Interruptable, UnInterruptable, Interrupted, Exiting, Zombie, Stoping, Stop, } impl Default for ThreadState { fn default() -> Self { ThreadState::Runable } } pub struct TaskEvent {} impl TaskEvent { pub const EXIT: u32 = 1 << 0; pub const CHILD_GROUP_STOP: u32 = 1 << 1; pub const TRACE_STOP: u32 = 1 << 2; pub const GROUP_CONTINUE: u32 = 1 << 3; } pub struct WaitStatus(pub u32); impl WaitStatus { pub const MASK: u32 = 0x7F; pub const CORE: u32 = 0x80; pub const EXITED: u32 = 0x00; pub const STOPPED: u32 = 0x7F; pub const SHIFT: u32 = 8; pub fn Exited(&self) -> bool { return self.0 & Self::MASK == Self::EXITED; } pub fn Signaled(&self) -> bool { return self.0 & Self::MASK != Self::STOPPED && self.0 & Self::MASK != Self::EXITED; } pub fn Stopped(&self) -> bool { return self.0 & 0xff == Self::STOPPED; } pub fn Continued(&self) -> bool { return self.0 == 0xFFFF; } pub fn CoreDump(&self) -> bool { return self.Signaled() && self.0 & Self::CORE != 0; } pub fn ExitStatus(&self) -> i32 { if !self.Exited() { return -1; } return (self.0 >> Self::SHIFT) as i32 & 0xff; } pub fn Signal(&self) -> i32 { if !self.Signaled() { return -1; } return (self.0 & Self::MASK) as i32; } pub fn
(&self) -> i32 { if !self.Stopped() { return -1; } return (self.0 >> Self::SHIFT) as i32 & 0xff; } pub fn TrapCause(&self) -> i32 { if self.StopSignal() != Signal::SIGTRAP { return -1; } return (self.0 >> Self::SHIFT) as i32 >> 8; } } #[derive(Clone, Copy)] pub struct WaitOption {} impl WaitOption { // Options for waitpid(2), wait4(2), and/or waitid(2), from // include/uapi/linux/wait.h. pub const WNOHANG: u32 = 0x00000001; pub const WUNTRACED: u32 = 0x00000002; pub const WSTOPPED: u32 = Self::WUNTRACED; pub const WEXITED: u32 = 0x00000004; pub const WCONTINUED: u32 = 0x00000008; pub const WNOWAIT: u32 = 0x01000000; pub const WNOTHREAD: u32 = 0x20000000; pub const WALL: u32 = 0x40000000; pub const WCLONE: u32 = 0x80000000; } pub struct IDType {} impl IDType { pub const P_ALL: i32 = 0x0; pub const P_PID: i32 = 0x1; pub const P_PGID: i32 = 0x2; } pub struct MAdviseOp {} impl MAdviseOp { pub const MADV_NORMAL: i32 = 0; pub const MADV_RANDOM: i32 = 1; pub const MADV_SEQUENTIAL: i32 = 2; pub const MADV_WILLNEED: i32 = 3; pub const MADV_DONTNEED: i32 = 4; pub const MADV_REMOVE: i32 = 9; pub const MADV_DONTFORK: i32 = 10; pub const MADV_DOFORK: i32 = 11; pub const MADV_MERGEABLE: i32 = 12; pub const MADV_UNMERGEABLE: i32 = 13; pub const MADV_HUGEPAGE: i32 = 14; pub const MADV_NOHUGEPAGE: i32 = 15; pub const MADV_DONTDUMP: i32 = 16; pub const MADV_DODUMP: i32 = 17; pub const MADV_HWPOISON: i32 = 100; pub const MADV_SOFT_OFFLINE: i32 = 101; pub const MADV_NOMAJFAULT: i32 = 200; pub const MADV_DONTCHGME: i32 = 201; } pub struct CloneOp {} impl CloneOp { pub const CLONE_CHILD_CLEARTID: i32 = 0x200000; pub const CLONE_CHILD_SETTID: i32 = 0x1000000; pub const CLONE_DETACHED: i32 = 0x400000; pub const CLONE_FILES: i32 = 0x400; pub const CLONE_FS: i32 = 0x200; pub const CLONE_IO: u64 = 0x80000000; pub const CLONE_NEWIPC: i32 = 0x8000000; pub const CLONE_NEWNET: i32 = 0x40000000; pub const CLONE_NEWNS: i32 = 0x20000; pub const CLONE_NEWPID: i32 = 0x20000000; pub const CLONE_NEWUSER: i32 = 0x10000000; pub const CLONE_NEWUTS: i32 = 0x4000000; pub const CLONE_PARENT: i32 = 0x8000; pub const CLONE_PARENT_SETTID: i32 = 0x100000; pub const CLONE_PTRACE: i32 = 0x2000; pub const CLONE_SETTLS: i32 = 0x80000; pub const CLONE_SIGHAND: i32 = 0x800; pub const CLONE_SYSVSEM: i32 = 0x40000; pub const CLONE_THREAD: i32 = 0x10000; pub const CLONE_UNTRACED: i32 = 0x800000; pub const CLONE_VFORK: i32 = 0x4000; pub const CLONE_VM: i32 = 0x100; } pub struct FutexOp {} impl FutexOp { pub const FUTEX_WAIT: u64 = 0; pub const FUTEX_WAKE: u64 = 1; pub const FUTEX_FD: u64 = 2; pub const FUTEX_REQUEUE: u64 = 3; pub const FUTEX_CMP_REQUEUE: u64 = 4; pub const FUTEX_WAKE_OP: u64 = 5; pub const FUTEX_LOCK_PI: u64 = 6; pub const FUTEX_UNLOCK_PI: u64 = 7; pub const FUTEX_TRYLOCK_PI: u64 = 8; pub const FUTEX_WAIT_BITSET: u64 = 9; pub const FUTEX_WAKE_BITSET: u64 = 10; pub const FUTEX_WAIT_REQUEUE_PI: u64 = 11; pub const FUTEX_CMP_REQUEUE_PI: u64 = 12; pub const FUTEX_PRIVATE_FLAG: u64 = 128; pub const FUTEX_CLOCK_REALTIME: u64 = 256; } pub struct FutexWakeOpOption {} impl FutexWakeOpOption { pub const FUTEX_OP_SET: u64 = 0; pub const FUTEX_OP_ADD: u64 = 1; pub const FUTEX_OP_OR: u64 = 2; pub const FUTEX_OP_ANDN: u64 = 3; pub const FUTEX_OP_XOR: u64 = 4; pub const FUTEX_OP_OPARG_SHIFT: u64 = 8; pub const FUTEX_OP_CMP_EQ: u64 = 0; pub const FUTEX_OP_CMP_NE: u64 = 1; pub const FUTEX_OP_CMP_LT: u64 = 2; pub const FUTEX_OP_CMP_LE: u64 = 3; pub const FUTEX_OP_CMP_GT: u64 = 4; pub const FUTEX_OP_CMP_GE: u64 = 5; } pub struct SeekWhence {} impl SeekWhence { pub const SEEK_SET: i32 = 0; pub const SEEK_CUR: i32 = 1; pub const SEEK_END: i32 = 2; } pub struct OpenFlags {} impl OpenFlags { pub const O_RDONLY: i32 = 0; pub const O_WRONLY: i32 = 1; pub const O_RDWR: i32 = 2; pub const O_TRUNC: i32 = 512; pub const O_CLOEXEC: i32 = 0x80000; } pub struct MemoryDef {} impl MemoryDef { pub const PTE_SHIFT: usize = 12; pub const PMD_SHIFT: usize = 21; pub const PUD_SHIFT: usize = 30; pub const PGD_SHIFT: usize = 39; pub const PTE_MASK: u64 = 0x1ff << Self::PTE_SHIFT; pub const PMD_MASK: u64 = 0x1ff << Self::PMD_SHIFT; pub const PUD_MASK: u64 = 0x1ff << Self::PUD_SHIFT; pub const PGD_MASK: u64 = 0x1ff << Self::PGD_SHIFT; pub const PTE_SIZE: u64 = 1 << Self::PTE_SHIFT; pub const PMD_SIZE: u64 = 1 << Self::PMD_SHIFT; pub const PUD_SIZE: u64 = 1 << Self::PUD_SHIFT; pub const PGD_SIZE: u64 = 1 << Self::PGD_SHIFT; //the blocks count put on stack to avoid heap allocation, total handle buffer size 32 * 4k = 128K pub const ON_STACK_BLOCKS: usize = 32; pub const PAGE_SHIFT: u64 = 12; pub const HUGE_PAGE_SHIFT: u64 = 21; pub const ONE_KB: u64 = 1 << 10; //0x100_000; pub const ONE_MB: u64 = 1 << 20; //0x100_000; pub const ONE_GB: u64 = 1 << 30; //0x40_000_000; pub const ONE_TB: u64 = 1 << 40; //0x1_000_000_000; //0x10_000_000_000; pub const TWO_MB: u64 = 2 * Self::ONE_MB; //interrupt stack pages pub const INTERRUPT_STACK_PAGES: u64 = 1; pub const DEFAULT_STACK_PAGES: u64 = 16; pub const DEFAULT_STACK_SIZE: u64 = Self::DEFAULT_STACK_PAGES * Self::PAGE_SIZE; //64 KB pub const PAGE_SIZE: u64 = 1 << 12;//0x1000; pub const HUGE_PAGE_SIZE: u64 = 1 << Self::HUGE_PAGE_SHIFT;//0x1000; pub const PAGE_MASK: u64 = Self::PAGE_SIZE - 1; pub const PAGE_SIZE_4K: u64 = 1 << Self::PAGE_SHIFT;//0x1000; pub const PAGE_SIZE_2M: u64 = (2 * Self::ONE_MB); pub const BLOCK_SIZE: u64 = 64 * Self::ONE_GB; pub const PHY_LOWER_ADDR: u64 = 256 * Self::ONE_GB; // 256 ~ 512GB is Guest kernal space pub const PHY_UPPER_ADDR: u64 = Self::PHY_LOWER_ADDR + 256 * Self::ONE_GB; // 256 ~ 512GB is Guest kernal space // start address for memmap and dynamic load address space, there is heap address space between PHY_UPPER_ADDR + VIR_MMAP_START pub const VIR_MMAP_START: u64 = Self::PHY_UPPER_ADDR + 128 * Self::ONE_GB; // + 1 * Self::ONE_TB; //start from 1.5 TB pub const SHARED_START: u64 = Self::VIR_MMAP_START + 1 * Self::ONE_TB; //start from 2.5 TB pub const LOWER_TOP: u64 = 0x0000_8000_0000_0000; pub const UPPER_BOTTOM: u64 = 0xffff_8000_0000_0000; pub const ENTRY_COUNT: u16 = 512 as u16; pub const KERNEL_START_P2_ENTRY: usize = (Self::PHY_LOWER_ADDR / Self::ONE_GB) as usize; //256 pub const KERNEL_END_P2_ENTRY: usize = (Self::PHY_UPPER_ADDR / Self::ONE_GB) as usize; //512 } //mmap prot pub struct MmapProt {} impl MmapProt { pub const PROT_NONE: u64 = 0; pub const PROT_READ: u64 = 1 << 0; pub const PROT_WRITE: u64 = 1 << 1; pub const PROT_EXEC: u64 = 1 << 2; pub const PROT_SEM: u64 = 1 << 3; pub const PROT_GROWSDOWN: u64 = 1 << 24; pub const PROT_GROWSUP: u64 = 1 << 25; } //mmap flags pub struct MmapFlags {} impl MmapFlags { pub const MAP_SHARED: u64 = 1 << 0; pub const MAP_PRIVATE: u64 = 1 << 1; pub const MAP_FIXED: u64 = 1 << 4; pub const MAP_ANONYMOUS: u64 = 1 << 5; pub const MAP_32BIT: u64 = 1 << 6; // arch/x86/include/uapi/asm/mman.h pub const MAP_GROWSDOWN: u64 = 1 << 8; pub const MAP_DENYWRITE: u64 = 1 << 11; pub const MAP_EXECUTABLE: u64 = 1 << 12; pub const MAP_LOCKED: u64 = 1 << 13; pub const MAP_NORESERVE: u64 = 1 << 14; pub const MAP_POPULATE: u64 = 1 << 15; pub const MAP_NONBLOCK: u64 = 1 << 16; pub const MAP_STACK: u64 = 1 << 17; pub const MAP_HUGETLB: u64 = 1 << 18; } //Linux: errors pub struct SysErr {} impl SysErr { pub const NONE: i32 = 0; pub const E2BIG: i32 = 0x7; pub const EACCES: i32 = 0xd; pub const EADDRINUSE: i32 = 0x62; pub const EADDRNOTAVAIL: i32 = 0x63; pub const EADV: i32 = 0x44; pub const EAFNOSUPPORT: i32 = 0x61; pub const EAGAIN: i32 = 0xb; pub const EALREADY: i32 = 0x72; pub const EBADE: i32 = 0x34; pub const EBADF: i32 = 0x9; pub const EBADFD: i32 = 0x4d; pub const EBADMSG: i32 = 0x4a; pub const EBADR: i32 = 0x35; pub const EBADRQC: i32 = 0x38; pub const EBADSLT: i32 = 0x39; pub const EBFONT: i32 = 0x3b; pub const EBUSY: i32 = 0x10; pub const ECANCELED: i32 = 0x7d; pub const ECHILD: i32 = 0xa; pub const ECHRNG: i32 = 0x2c; pub const ECOMM: i32 = 0x46; pub const ECONNABORTED: i32 = 0x67; pub const ECONNREFUSED: i32 = 0x6f; pub const ECONNRESET: i32 = 0x68; pub const EDEADLK: i32 = 0x23; pub const EDEADLOCK: i32 = 0x23; pub const EDESTADDRREQ: i32 = 0x59; pub const EDOM: i32 = 0x21; pub const EDOTDOT: i32 = 0x49; pub const EDQUOT: i32 = 0x7a; pub const EEXIST: i32 = 0x11; pub const EFAULT: i32 = 0xe; pub const EFBIG: i32 = 0x1b; pub const EHOSTDOWN: i32 = 0x70; pub const EHOSTUNREACH: i32 = 0x71; pub const EIDRM: i32 = 0x2b; pub const EILSEQ: i32 = 0x54; pub const EINPROGRESS: i32 = 0x73; pub const EINTR: i32 = 0x4; pub const EINVAL: i32 = 0x16; pub const EIO: i32 = 0x5; pub const EISCONN: i32 = 0x6a; pub const EISDIR: i32 = 0x15; pub const EISNAM: i32 = 0x78; pub const EKEYEXPIRED: i32 = 0x7f; pub const EKEYREJECTED: i32 = 0x81; pub const EKEYREVOKED: i32 = 0x80; pub const EL2HLT: i32 = 0x33; pub const EL2NSYNC: i32 = 0x2d; pub const EL3HLT: i32 = 0x2e; pub const EL3RST: i32 = 0x2f; pub const ELIBACC: i32 = 0x4f; pub const ELIBBAD: i32 = 0x50; pub const ELIBEXEC: i32 = 0x53; pub const ELIBMAX: i32 = 0x52; pub const ELIBSCN: i32 = 0x51; pub const ELNRNG: i32 = 0x30; pub const ELOOP: i32 = 0x28; pub const EMEDIUMTYPE: i32 = 0x7c; pub const EMFILE: i32 = 0x18; pub const EMLINK: i32 = 0x1f; pub const EMSGSIZE: i32 = 0x5a; pub const EMULTIHOP: i32 = 0x48; pub const ENAMETOOLONG: i32 = 0x24; pub const ENAVAIL: i32 = 0x77; pub const ENETDOWN: i32 = 0x64; pub const ENETRESET: i32 = 0x66; pub const ENETUNREACH: i32 = 0x65; pub const ENFILE: i32 = 0x17; pub const ENOANO: i32 = 0x37; pub const ENOBUFS: i32 = 0x69; pub const ENOCSI: i32 = 0x32; pub const ENODATA: i32 = 0x3d; pub const ENOATTR: i32 = Self::ENODATA; pub const ENODEV: i32 = 0x13; pub const ENOENT: i32 = 0x2; pub const ENOEXEC: i32 = 0x8; pub const ENOKEY: i32 = 0x7e; pub const ENOLCK: i32 = 0x25; pub const ENOLINK: i32 = 0x43; pub const ENOMEDIUM: i32 = 0x7b; pub const ENOMEM: i32 = 0xc; pub const ENOMSG: i32 = 0x2a; pub const ENONET: i32 = 0x40; pub const ENOPKG: i32 = 0x41; pub const ENOPROTOOPT: i32 = 0x5c; pub const ENOSPC: i32 = 0x1c; pub const ENOSR: i32 = 0x3f; pub const ENOSTR: i32 = 0x3c; pub const ENOSYS: i32 = 0x26; pub const ENOTBLK: i32 = 0xf; pub const ENOTCONN: i32 = 0x6b; pub const ENOTDIR: i32 = 0x14; pub const ENOTEMPTY: i32 = 0x27; pub const ENOTNAM: i32 = 0x76; pub const ENOTRECOVERABLE: i32 = 0x83; pub const ENOTSOCK: i32 = 0x58; pub const ENOTSUP: i32 = 0x5f; pub const ENOTTY: i32 = 0x19; pub const ENOTUNIQ: i32 = 0x4c; pub const ENXIO: i32 = 0x6; pub const EOPNOTSUPP: i32 = 0x5f; pub const EOVERFLOW: i32 = 0x4b; pub const EOWNERDEAD: i32 = 0x82; pub const EPERM: i32 = 0x1; pub const EPFNOSUPPORT: i32 = 0x60; pub const EPIPE: i32 = 0x20; pub const EPROTO: i32 = 0x47; pub const EPROTONOSUPPORT: i32 = 0x5d; pub const EPROTOTYPE: i32 = 0x5b; pub const ERANGE: i32 = 0x22; pub const EREMCHG: i32 = 0x4e; pub const EREMOTE: i32 = 0x42; pub const EREMOTEIO: i32 = 0x79; pub const ERESTART: i32 = 0x55; pub const ERFKILL: i32 = 0x84; pub const EROFS: i32 = 0x1e; pub const ESHUTDOWN: i32 = 0x6c; pub const ESOCKTNOSUPPORT: i32 = 0x5e; pub const ESPIPE: i32 = 0x1d; pub const ESRCH: i32 = 0x3; pub const ESRMNT: i32 = 0x45; pub const ESTALE: i32 = 0x74; pub const ESTRPIPE: i32 = 0x56; pub const ETIME: i32 = 0x3e; pub const ETIMEDOUT: i32 = 0x6e; pub const ETOOMANYREFS: i32 = 0x6d; pub const ETXTBSY: i32 = 0x1a; pub const EUCLEAN: i32 = 0x75; pub const EUNATCH: i32 = 0x31; pub const EUSERS: i32 = 0x57; pub const EWOULDBLOCK: i32 = 0xb; pub const EXDEV: i32 = 0x12; pub const EXFULL: i32 = 0x36; // ERESTARTSYS is returned by an interrupted syscall to indicate that it // should be converted to EINTR if interrupted by a signal delivered to a // user handler without SA_RESTART set, and restarted otherwise. pub const ERESTARTSYS: i32 = 512; // ERESTARTNOINTR is returned by an interrupted syscall to indicate that it // should always be restarted. pub const ERESTARTNOINTR: i32 = 513; // ERESTARTNOHAND is returned by an interrupted syscall to indicate that it // should be converted to EINTR if interrupted by a signal delivered to a // user handler, and restarted otherwise. pub const ERESTARTNOHAND: i32 = 514; // ERESTART_RESTARTBLOCK is returned by an interrupted syscall to indicate // that it should be restarted using a custom function. The interrupted // syscall must register a custom restart function by calling // Task.SetRestartSyscallFn. pub const ERESTART_RESTARTBLOCK: i32 = 515; } #[repr(C)] pub struct RLimit { pub rlimCurr: u64, pub rlimMax: u64, } pub fn ComparePage(from: u64, to: u64) -> bool { unsafe { let cnt = 512; let fromArr = slice::from_raw_parts(from as *const u64, cnt); let toArr = slice::from_raw_parts_mut(to as *mut u64, cnt); for i in 0..cnt { if toArr[i] != fromArr[i] { return false; } } return true; } } #[no_mangle] pub extern fn CopyData(from: u64, to: u64, cnt: usize) { unsafe { let fromArr = slice::from_raw_parts(from as *const u64, cnt); let toArr = slice::from_raw_parts_mut(to as *mut u64, cnt); for i in 0..cnt { toArr[i] = fromArr[i] } } } pub fn CopyPage(from: u64, to: u64) { CopyData(from, to, 512); } #[derive(Debug, Default, Copy, Clone)] #[repr(C)] pub struct LibcStat { pub st_dev: u64, pub st_ino: u64, pub st_nlink: u64, pub st_mode: u32, pub st_uid: u32, pub st_gid: u32, pub pad0: i32, pub st_rdev: u64, pub st_size: i64, pub st_blksize: i64, pub st_blocks: i64, pub st_atime: i64, pub st_atime_nsec: i64, pub st_mtime: i64, pub st_mtime_nsec: i64, pub st_ctime: i64, pub st_ctime_nsec: i64, pub pad: [i64; 3], } impl LibcStat { pub fn IsRegularFile(&self) -> bool { let x = self.st_mode as u16 & ModeType::S_IFMT; return x == ModeType::S_IFREG; } }
StopSignal
feature_composer.py
import tensorflow as tf from sklearn.metrics import confusion_matrix import numpy as np from tools.preprocessing import preprocess_images, preprocess_single_image from tools.kfold import KFold_cross_validation_split from tools.extraction_and_metrics import extract_features, compute_confusion_matrix from .network import Net import torchvision.models as models import torch import os import cv2 # Feature composer training def train_feature_composer( composed_dataset_path: str, epochs: int, batch_size: int, num_classes: int, folds: int, lr:float, cuda: bool, ckpt_dir: str ): """ Feature extractor training. params: <string> composed_dataset_path <int> epochs <int> batch_size <int> num_classes <int> folds: Number of folds for KFold cross validation <float> lr: Learning rate <bool> cuda: Whether to use GPU or not <string> ckpt_dir: Model's location """ # Preprocess images, returning the classes, features and labels class_names, x, y = preprocess_images( dataset_path=composed_dataset_path, width=224, height=224, num_classes=num_classes, framework="torch", imagenet=True ) # Split data X_train, X_test, Y_train, Y_test = KFold_cross_validation_split( features=x, labels=y, n_splits=folds ) # Normalize X_train /= 255 X_test /= 255 # Instantiate model net = Net( models.vgg16(pretrained=True), num_classes=num_classes, lr=lr, cuda=cuda, mode="feature_composer", ckpt_dir=ckpt_dir, labels=class_names ) # Train model net.fit( X_train, Y_train, X_test, Y_test, epochs, batch_size, resume=False ) # Confusion matrix compute_confusion_matrix( y_true=Y_test, y_pred=net.infer(X_test), framework="torch", mode="feature_composer", num_classes = num_classes // 2 ) # Inference def
( ckpt_dir: str, ckpt_name: str, input_image: str ) -> dict: """ Main inference method. params: <string> ckpt_dir: Saved model's directory <string> ckpt_name: Saved model's name <string> input_image: Image path returns: <dict> Dictionary containing the predictions with their levels of confidence. E.g.: { COVID19_1:0.10 COVID19_2:0.15 ... } """ ckpt_path = os.path.join(ckpt_dir, ckpt_name) num_classes = torch.load(ckpt_path, map_location=lambda storage, loc: storage)["num_classes"] # Instantiate model net = Net( models.vgg16(pretrained=True), num_classes=num_classes, mode="feature_composer", ckpt_dir=ckpt_dir ) # Load model net.load_model_for_inference(os.path.join(ckpt_dir, ckpt_name)) # Check if inputed file is an image. assert input_image.lower().endswith("png") or input_image.lower().endswith("jpg") or input_image.lower().endswith("jpeg") # Preprocess img = preprocess_single_image( img=input_image, width=224, height=224, imagenet=True, framework="torch" ) # Return prediction return net.infer(img, ckpt_path = os.path.join(ckpt_dir, ckpt_name), use_labels=True)
infer
infocenteruri.py
#!/usr/bin/env python # coding:utf-8 # """ Copyright (c) 2017 LandGrey (https://github.com/LandGrey/taoman) License: MIT """ import urllib import requests from lib.fun import crawl_link_handle from lib.config import baidu_base_url, get_head, timeout, baidu_first_pattern, self_pattern, intranet_ip_pattern, \ ip_simple_pattern def
(domain): domains = [] data = {'wd': 'site:{0} 信息化|网络中心'.format(domain)} requests.packages.urllib3.disable_warnings() req = requests.get(baidu_base_url + urllib.urlencode(data), headers=get_head(), timeout=timeout, verify=False) content = req.text match = baidu_first_pattern.findall(content) if match: info_center_url = crawl_link_handle(match[0][0]) reqs = requests.get('http://' + info_center_url, headers=get_head(), timeout=timeout, verify=False) matchs = self_pattern.findall(reqs.text) for m in matchs: domains.append(crawl_link_handle(m) if domain in m else (crawl_link_handle(m) if ip_simple_pattern.findall(crawl_link_handle(m) if not intranet_ip_pattern.findall(crawl_link_handle(m)) else '') else '')) return domains
crawlinfocenter
rm.py
""" rm - remove files or directories """ from os import path from ..Transformer import TransformerLlvm from ...constants import EXECFILEEXTENSION class TransformAr(TransformerLlvm): """ transform ar commands """ @staticmethod def can_be_applied_on(cmd): return (cmd.bashcmd.startswith("rm -f ") and not cmd.bashcmd.startswith("rm -f -r")) @staticmethod def
(cmd, container): # extract a list of files to be deleted files = cmd.bashcmd.split()[2:] new = [] for file in files: embrace = "" # look for embracing ' and " if file.startswith("'") and file.endswith("'"): file = file[1:-1] embrace = "'" elif file.startswith('"') and file.endswith('"'): file = file[1:-1] embrace = '"' # No embracing for wildcard commands if '*' in file: embrace = "" if file.endswith(".o"): # Use .bc files instead of .o files new.append(embrace + file[:-2] + ".bc" + embrace) elif file.endswith(".a") or ".so" in file: # simply append .bc to normal linked files new.append(embrace + file + ".bc" + embrace) elif '.' not in path.basename(file): # add .x.bc to executables new.append( embrace + file + EXECFILEEXTENSION + ".bc" + embrace) cmd.bashcmd = "rm -f " + " ".join(new) if new else "" return cmd
apply_transformation_on
lib.rs
// Copyright 2020 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use { anyhow::{anyhow, Result}, ffx_core::ffx_plugin, ffx_target_add_args::AddCommand, fidl_fuchsia_developer_bridge as bridge, fidl_fuchsia_net as net, regex::Regex, std::net::IpAddr, }; #[cfg(not(test))] use std::ffi::CString; #[ffx_plugin()] pub async fn add(daemon_proxy: bridge::DaemonProxy, cmd: AddCommand) -> Result<()> { let v6bracket = Regex::new(r"^\[([^\]]+)\](:\d+)?$")?; let v4port = Regex::new(r"^(\d+\.\d+\.\d+\.\d+)(:\d+)?$")?; let with_scope = Regex::new(r"^(.*)%(.*)$")?; let (addr, port) = if let Some(caps) = v6bracket.captures(cmd.addr.as_str()).or_else(|| v4port.captures(cmd.addr.as_str())) { (caps.get(1).map(|x| x.as_str()).unwrap(), caps.get(2).map(|x| x.as_str())) } else { (cmd.addr.as_str(), None) }; let port = if let Some(port) = port { Some(port[1..].parse::<u16>()?) } else { None }; let (addr, scope) = if let Some(caps) = with_scope.captures(addr) { (caps.get(1).map(|x| x.as_str()).unwrap(), Some(caps.get(2).map(|x| x.as_str()).unwrap())) } else { (addr, None) }; let addr = addr.parse::<IpAddr>()?; #[cfg(not(test))] let scope_id = if let Some(scope) = scope { unsafe { let scope = CString::new(scope).unwrap(); libc::if_nametoindex(scope.as_ptr()) } } else { 0 }; #[cfg(test)] let scope_id = if let Some(scope) = scope { scope.parse()? } else { 0 }; let ip = match addr { IpAddr::V6(i) => net::IpAddress::Ipv6(net::Ipv6Address { addr: i.octets().into() }), IpAddr::V4(i) => net::IpAddress::Ipv4(net::Ipv4Address { addr: i.octets().into() }), }; let mut addr = if let Some(port) = port { bridge::TargetAddrInfo::IpPort(bridge::TargetIpPort { ip, port, scope_id }) } else { bridge::TargetAddrInfo::Ip(bridge::TargetIp { ip, scope_id }) }; if let Err(e) = daemon_proxy.add_target(&mut addr).await? { eprintln!("ERROR: {:?}", e); Err(anyhow!("Error adding target: {:?}", e)) } else { Ok(()) } } //////////////////////////////////////////////////////////////////////////////// // tests #[cfg(test)] mod test { use super::*; use fidl_fuchsia_developer_bridge::DaemonRequest; fn setup_fake_daemon_server<T: 'static + Fn(bridge::TargetAddrInfo) + Send>( test: T, ) -> bridge::DaemonProxy { setup_fake_daemon_proxy(move |req| match req { DaemonRequest::AddTarget { ip, responder } => { test(ip); responder.send(&mut Ok(())).unwrap(); } _ => assert!(false), }) } #[fuchsia_async::run_singlethreaded(test)] async fn test_add() { let server = setup_fake_daemon_server(|addr| { assert_eq!( addr, bridge::TargetAddrInfo::Ip(bridge::TargetIp { ip: net::IpAddress::Ipv4(net::Ipv4Address { addr: "123.210.123.210" .parse::<std::net::Ipv4Addr>() .unwrap() .octets() .into() }), scope_id: 0, }) ) }); add(server, AddCommand { addr: "123.210.123.210".to_owned() }).await.unwrap(); } #[fuchsia_async::run_singlethreaded(test)] async fn test_add_port() { let server = setup_fake_daemon_server(|addr| { assert_eq!( addr, bridge::TargetAddrInfo::IpPort(bridge::TargetIpPort { ip: net::IpAddress::Ipv4(net::Ipv4Address { addr: "123.210.123.210" .parse::<std::net::Ipv4Addr>() .unwrap() .octets() .into() }), scope_id: 0, port: 2310, }) ) }); add(server, AddCommand { addr: "123.210.123.210:2310".to_owned() }).await.unwrap(); } #[fuchsia_async::run_singlethreaded(test)] async fn test_add_v6() { let server = setup_fake_daemon_server(|addr| { assert_eq!( addr, bridge::TargetAddrInfo::Ip(bridge::TargetIp { ip: net::IpAddress::Ipv6(net::Ipv6Address { addr: "f000::1".parse::<std::net::Ipv6Addr>().unwrap().octets().into() }), scope_id: 0, }) ) }); add(server, AddCommand { addr: "f000::1".to_owned() }).await.unwrap(); } #[fuchsia_async::run_singlethreaded(test)] async fn test_add_v6_port()
#[fuchsia_async::run_singlethreaded(test)] async fn test_add_v6_scope_id() { let server = setup_fake_daemon_server(|addr| { assert_eq!( addr, bridge::TargetAddrInfo::Ip(bridge::TargetIp { ip: net::IpAddress::Ipv6(net::Ipv6Address { addr: "f000::1".parse::<std::net::Ipv6Addr>().unwrap().octets().into() }), scope_id: 1, }) ) }); add(server, AddCommand { addr: "f000::1%1".to_owned() }).await.unwrap(); } #[fuchsia_async::run_singlethreaded(test)] async fn test_add_v6_scope_id_port() { let server = setup_fake_daemon_server(|addr| { assert_eq!( addr, bridge::TargetAddrInfo::IpPort(bridge::TargetIpPort { ip: net::IpAddress::Ipv6(net::Ipv6Address { addr: "f000::1".parse::<std::net::Ipv6Addr>().unwrap().octets().into() }), scope_id: 1, port: 640, }) ) }); add(server, AddCommand { addr: "[f000::1%1]:640".to_owned() }).await.unwrap(); } }
{ let server = setup_fake_daemon_server(|addr| { assert_eq!( addr, bridge::TargetAddrInfo::IpPort(bridge::TargetIpPort { ip: net::IpAddress::Ipv6(net::Ipv6Address { addr: "f000::1".parse::<std::net::Ipv6Addr>().unwrap().octets().into() }), scope_id: 0, port: 65, }) ) }); add(server, AddCommand { addr: "[f000::1]:65".to_owned() }).await.unwrap(); }
server_list.rs
pub struct ServerListGameServer { pub id: usize, pub name: String, } pub struct ServerListWorldServer { pub id: usize, pub name: String, pub game_servers: Vec<ServerListGameServer>,
pub struct ServerList { pub world_servers: Vec<ServerListWorldServer>, }
}
test_models.py
from django.db import IntegrityError from django.test import TestCase from ..models import Badge, Award from .mixins import UserFixturesMixin
def test_autocreate_slug(self): badge = Badge.objects.create(name='Super Chouette') self.assertEqual(badge.slug, 'super-chouette') class AwardTestCase(TestCase, UserFixturesMixin): """ Award model test case. """ def setUp(self): self.create_users() def test_create(self): badge = Badge.objects.create(name='Super Chouette') Award.objects.create(user=self.user1, badge=badge) Award.objects.create(user=self.user2, badge=badge) self.assertEqual(badge.users.count(), 2) self.assertRaises(IntegrityError, Award.objects.create, **{ 'user': self.user1, 'badge': badge })
class BadgeTestCase(TestCase): """ Badge model test case. """
adaccounttargetingunified.py
# Copyright 2014 Facebook, Inc. # You are hereby granted a non-exclusive, worldwide, royalty-free license to # use, copy, modify, and distribute this software in source code or binary # form for use in connection with the web services and APIs provided by # Facebook. # As with any software that integrates with the Facebook platform, your use # of this software is subject to the Facebook Developer Principles and # Policies [http://developers.facebook.com/policy/]. This copyright notice # shall be included in all copies or substantial portions of the software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. from facebook_business.adobjects.abstractobject import AbstractObject from facebook_business.adobjects.abstractcrudobject import AbstractCrudObject from facebook_business.adobjects.objectparser import ObjectParser from facebook_business.api import FacebookRequest from facebook_business.typechecker import TypeChecker """ This class is auto-generated. For any issues or feature requests related to this class, please let us know on github and we'll fix in our codegen framework. We'll not be able to accept pull request for this class. """ class AdAccountTargetingUnified( AbstractCrudObject, ): def __init__(self, fbid=None, parent_id=None, api=None): self._isAdAccountTargetingUnified = True super(AdAccountTargetingUnified, self).__init__(fbid, parent_id, api) class Field(AbstractObject.Field): audience_size = 'audience_size' conversion_lift = 'conversion_lift' description = 'description' id = 'id' img = 'img' info = 'info' info_title = 'info_title' is_recommendation = 'is_recommendation' key = 'key' link = 'link' name = 'name' parent = 'parent' partner = 'partner' path = 'path' performance_rating = 'performance_rating' raw_name = 'raw_name' recommendation_model = 'recommendation_model' search_interest_id = 'search_interest_id' source = 'source' spend = 'spend' type = 'type' valid = 'valid' class LimitType: behaviors = 'behaviors' college_years = 'college_years' education_majors = 'education_majors' education_schools = 'education_schools' education_statuses = 'education_statuses' ethnic_affinity = 'ethnic_affinity' family_statuses = 'family_statuses' generation = 'generation' home_ownership = 'home_ownership' home_type = 'home_type' home_value = 'home_value' household_composition = 'household_composition' income = 'income' industries = 'industries' interested_in = 'interested_in' interests = 'interests' life_events = 'life_events' location_categories = 'location_categories' moms = 'moms' net_worth = 'net_worth' office_type = 'office_type' politics = 'politics' relationship_statuses = 'relationship_statuses' user_adclusters = 'user_adclusters' work_employers = 'work_employers' work_positions = 'work_positions' class RegulatedCategories: credit = 'CREDIT' employment = 'EMPLOYMENT' housing = 'HOUSING' issues_elections_politics = 'ISSUES_ELECTIONS_POLITICS' none = 'NONE' class WhitelistedTypes: adgroup_id = 'adgroup_id' age_max = 'age_max' age_min = 'age_min' alternate_auto_targeting_option = 'alternate_auto_targeting_option' app_install_state = 'app_install_state' audience_network_positions = 'audience_network_positions' behaviors = 'behaviors' brand_safety_content_filter_levels = 'brand_safety_content_filter_levels' brand_safety_content_severity_levels = 'brand_safety_content_severity_levels' catalog_based_targeting = 'catalog_based_targeting' cities = 'cities' college_years = 'college_years' conjunctive_user_adclusters = 'conjunctive_user_adclusters' connections = 'connections' contextual_targeting_categories = 'contextual_targeting_categories' countries = 'countries' country = 'country' country_groups = 'country_groups' custom_audiences = 'custom_audiences' device_platforms = 'device_platforms' direct_install_devices = 'direct_install_devices' dynamic_audience_ids = 'dynamic_audience_ids' education_majors = 'education_majors' education_schools = 'education_schools' education_statuses = 'education_statuses' effective_audience_network_positions = 'effective_audience_network_positions' effective_device_platforms = 'effective_device_platforms' effective_facebook_positions = 'effective_facebook_positions' effective_instagram_positions = 'effective_instagram_positions' effective_messenger_positions = 'effective_messenger_positions' effective_publisher_platforms = 'effective_publisher_platforms' effective_whatsapp_positions = 'effective_whatsapp_positions' engagement_specs = 'engagement_specs' ethnic_affinity = 'ethnic_affinity' exclude_previous_days = 'exclude_previous_days' exclude_reached_since = 'exclude_reached_since' excluded_brand_safety_content_types = 'excluded_brand_safety_content_types' excluded_connections = 'excluded_connections' excluded_custom_audiences = 'excluded_custom_audiences' excluded_dynamic_audience_ids = 'excluded_dynamic_audience_ids' excluded_engagement_specs = 'excluded_engagement_specs' excluded_geo_locations = 'excluded_geo_locations' excluded_mobile_device_model = 'excluded_mobile_device_model' excluded_product_audience_specs = 'excluded_product_audience_specs' excluded_publisher_categories = 'excluded_publisher_categories' excluded_publisher_list_ids = 'excluded_publisher_list_ids' excluded_user_adclusters = 'excluded_user_adclusters' excluded_user_device = 'excluded_user_device' exclusions = 'exclusions' facebook_positions = 'facebook_positions' family_statuses = 'family_statuses' fb_deal_id = 'fb_deal_id' flexible_spec = 'flexible_spec' follow_profiles = 'follow_profiles' follow_profiles_negative = 'follow_profiles_negative' format = 'format' friends_of_connections = 'friends_of_connections' gatekeepers = 'gatekeepers' genders = 'genders' generation = 'generation' geo_locations = 'geo_locations' home_ownership = 'home_ownership' home_type = 'home_type' home_value = 'home_value' household_composition = 'household_composition' id = 'id' income = 'income' industries = 'industries' instagram_positions = 'instagram_positions' instream_video_sponsorship_placements = 'instream_video_sponsorship_placements' interest_defaults_source = 'interest_defaults_source' interested_in = 'interested_in' interests = 'interests' is_instagram_destination_ad = 'is_instagram_destination_ad' is_whatsapp_destination_ad = 'is_whatsapp_destination_ad' keywords = 'keywords' life_events = 'life_events' locales = 'locales' location_categories = 'location_categories' location_cluster_ids = 'location_cluster_ids' location_expansion = 'location_expansion' marketplace_product_categories = 'marketplace_product_categories' messenger_positions = 'messenger_positions' mobile_device_model = 'mobile_device_model' moms = 'moms' net_worth = 'net_worth' office_type = 'office_type' page_types = 'page_types' place_page_set_ids = 'place_page_set_ids' political_views = 'political_views' politics = 'politics' product_audience_specs = 'product_audience_specs' prospecting_audience = 'prospecting_audience' publisher_platforms = 'publisher_platforms' radius = 'radius' regions = 'regions' relationship_statuses = 'relationship_statuses' rtb_flag = 'rtb_flag' site_category = 'site_category' targeting_optimization = 'targeting_optimization' timezones = 'timezones' topic = 'topic' trending = 'trending' user_adclusters = 'user_adclusters' user_device = 'user_device' user_event = 'user_event' user_os = 'user_os' user_page_threads = 'user_page_threads' user_page_threads_excluded = 'user_page_threads_excluded' whatsapp_positions = 'whatsapp_positions' wireless_carrier = 'wireless_carrier' work_employers = 'work_employers' work_positions = 'work_positions' zips = 'zips' class Mode: best_performing = 'best_performing' recently_used = 'recently_used' related = 'related' suggestions = 'suggestions' class Objective: app_installs = 'APP_INSTALLS' brand_awareness = 'BRAND_AWARENESS' conversions = 'CONVERSIONS' event_responses = 'EVENT_RESPONSES' lead_generation = 'LEAD_GENERATION' link_clicks = 'LINK_CLICKS' local_awareness = 'LOCAL_AWARENESS' messages = 'MESSAGES' offer_claims = 'OFFER_CLAIMS' page_likes = 'PAGE_LIKES' post_engagement = 'POST_ENGAGEMENT' product_catalog_sales = 'PRODUCT_CATALOG_SALES' reach = 'REACH' video_views = 'VIDEO_VIEWS' _field_types = { 'audience_size': 'unsigned int', 'conversion_lift': 'float', 'description': 'string', 'id': 'string', 'img': 'string', 'info': 'string', 'info_title': 'string', 'is_recommendation': 'bool', 'key': 'string', 'link': 'string', 'name': 'string', 'parent': 'string', 'partner': 'string', 'path': 'list<string>', 'performance_rating': 'unsigned int', 'raw_name': 'string', 'recommendation_model': 'string', 'search_interest_id': 'string', 'source': 'string', 'spend': 'float', 'type': 'string', 'valid': 'bool', } @classmethod def
(cls): field_enum_info = {} field_enum_info['LimitType'] = AdAccountTargetingUnified.LimitType.__dict__.values() field_enum_info['RegulatedCategories'] = AdAccountTargetingUnified.RegulatedCategories.__dict__.values() field_enum_info['WhitelistedTypes'] = AdAccountTargetingUnified.WhitelistedTypes.__dict__.values() field_enum_info['Mode'] = AdAccountTargetingUnified.Mode.__dict__.values() field_enum_info['Objective'] = AdAccountTargetingUnified.Objective.__dict__.values() return field_enum_info
_get_field_enum_info
modernizr.custom.06743.js
/* Modernizr 2.7.2 (Custom Build) | MIT & BSD * Build: http://modernizr.com/download/#-a_download */ ;window.Modernizr=function(a,b,c){function
(a){i.cssText=a}function u(a,b){return t(prefixes.join(a+";")+(b||""))}function v(a,b){return typeof a===b}function w(a,b){return!!~(""+a).indexOf(b)}function x(a,b,d){for(var e in a){var f=b[a[e]];if(f!==c)return d===!1?a[e]:v(f,"function")?f.bind(d||b):f}return!1}var d="2.7.2",e={},f=b.documentElement,g="modernizr",h=b.createElement(g),i=h.style,j,k={}.toString,l={},m={},n={},o=[],p=o.slice,q,r={}.hasOwnProperty,s;!v(r,"undefined")&&!v(r.call,"undefined")?s=function(a,b){return r.call(a,b)}:s=function(a,b){return b in a&&v(a.constructor.prototype[b],"undefined")},Function.prototype.bind||(Function.prototype.bind=function(b){var c=this;if(typeof c!="function")throw new TypeError;var d=p.call(arguments,1),e=function(){if(this instanceof e){var a=function(){};a.prototype=c.prototype;var f=new a,g=c.apply(f,d.concat(p.call(arguments)));return Object(g)===g?g:f}return c.apply(b,d.concat(p.call(arguments)))};return e});for(var y in l)s(l,y)&&(q=y.toLowerCase(),e[q]=l[y](),o.push((e[q]?"":"no-")+q));return e.addTest=function(a,b){if(typeof a=="object")for(var d in a)s(a,d)&&e.addTest(d,a[d]);else{a=a.toLowerCase();if(e[a]!==c)return e;b=typeof b=="function"?b():b,typeof enableClasses!="undefined"&&enableClasses&&(f.className+=" "+(b?"":"no-")+a),e[a]=b}return e},t(""),h=j=null,e._version=d,e}(this,this.document),Modernizr.addTest("adownload","download"in document.createElement("a"));
t
phlsys_workingdircommand__t.py
"""Test suite for phlsys_workingdircommand.""" # ============================================================================= # TEST PLAN # ----------------------------------------------------------------------------- # Here we detail the things we are concerned to test and specify which tests # cover those concerns. # # Concerns: # [ A] command is executed correctly # [ A] working directory is restored after command execution # ----------------------------------------------------------------------------- # Tests: # [ A] test_A_command_with_working_directory # ============================================================================= from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import stat import tempfile import unittest import phlsys_fs import phlsys_workingdircommand _PYCAT_COMMAND = """ #! /bin/sh echo "Hello $1!" """ .lstrip() class Test(unittest.TestCase): def setUp(self): pass def tearDown(self):
def test_A_command_with_working_directory(self): working_dir = tempfile.mkdtemp() with phlsys_fs.chtmpdir_context(): tmp_dir = os.getcwd() pycat_script_path = os.path.join(tmp_dir, 'pycat.sh') phlsys_fs.write_text_file(pycat_script_path, _PYCAT_COMMAND) mode = os.stat(pycat_script_path).st_mode os.chmod(pycat_script_path, mode | stat.S_IEXEC) self.assertEqual(os.getcwd(), tmp_dir) command = phlsys_workingdircommand.CommandWithWorkingDirectory( pycat_script_path, working_dir) result = command('Alice') # [ A] command is executed correctly self.assertEqual('Hello Alice!\n', result) # [ A] working directory is restored after command execution self.assertEqual(os.getcwd(), tmp_dir) # ----------------------------------------------------------------------------- # Copyright (C) 2015 Bloomberg Finance L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------ END-OF-FILE ----------------------------------
pass
main.ts
// start main console.log('start main'); /** * Some predefined delays (in milliseconds). */ export enum Delays { Short = 500, Medium = 2000, Long = 5000,
/** * Returns a Promise<string> that resolves after given time. * * @param {string} name - A name. * @param {number=} [delay=Delays.Medium] - Number of milliseconds to delay resolution of the Promise. * @returns {Promise<string>} */ function delayedHello( name: string, delay: number = Delays.Medium ): Promise<string> { return new Promise((resolve: (value?: string) => void) => setTimeout(() => resolve(`Hello, ${name}`), delay) ); } // Below are examples of using ESLint errors suppression // Here it is suppressing missing return type definitions for greeter function // eslint-disable-next-line @typescript-eslint/explicit-function-return-type export async function greeter(name: string) { return await delayedHello(name, Delays.Long); }
}
kcv_x1.py
# encoding: UTF-8 # Wide Column Store (Key-Column-Value database) built on top of SQLite # (c) 2017 by mobarski (at) gmail (dot) com # licence: MIT # version: x1m3 (x-experimental, p-preview, r-release, m-modification) # x1m6 - better test coverage # x1m5 - tests, limit_str, some comments, set_items<-store, scan_items(cast= # x1m4 - delete as scan(), __enter__ # x1m3 - col_store in external file, col_store order, ser/de as args, where_str, scan_col_store # x1m2 - incr_many,benchmark,limit,delete,sync,to_col_store(x2),from_col_store(x2) from __future__ import print_function import sqlite3 from itertools import groupby class KCV: "Wide Column Store built on top of SQLite" def __init__(self, path=':memory:', tab='main'): # TODO readonly mode self.path = path self.tab = tab self.conn = sqlite3.connect(path) self.create() def create(self,index=True): self.conn.execute('create table if not exists {0} (k,c,v)'.format(self.tab)) if index: self.conn.execute('create unique index if not exists i_{0} on {0} (k,c)'.format(self.tab)) def execute(self,*a,**kw): "execute sql statement - for debugging" print('EXECUTE',*a,**kw) return self.conn.execute(*a,**kw) def __enter__(self): return self def __exit__(self, ex_type, ex_val, ex_tb): if ex_type is None: self.sync() ### WRITE ### def sync(self,compact=False): if compact: self.conn.execute('vacuum') self.conn.commit() def set(self,k,c,v): self.conn.execute('insert or replace into {0} values (?,?,?)'.format(self.tab),(k,c,v)) def set_items(self,k,items): self.conn.executemany('insert or replace into {0} values (?,?,?)'.format(self.tab),((k,c,v) for c,v in items)) def incr(self,k,c,v=1): self.conn.execute('insert or replace into {0} values (?,?,?+coalesce((select v from {0} where k=? and c=?),0))'.format(self.tab),(k,c,v,k,c)) def incr_items(self,k,items): self.conn.executemany('insert or replace into {0} values (?,?,?+coalesce((select v from {0} where k=? and c=?),0))'.format(self.tab),((k,c,v,k,c) for c,v in items)) def delete(self,k,c='*',**kw): list(self.scan(mode='delete',k=k,c=c,**kw)) # list() required as scan is an iterator def drop(self,tab=None): self.conn.execute('drop table if exists {0}'.format(tab or self.tab)) ### READ ### def get(self,k,c,default=None): x = self.conn.execute('select v from {0} where k=? and c=?'.format(self.tab),(k,c)).fetchone() return x[0] if x else default def items(self,k):
def scan_items(self,k='*',c='*',order='',cast=list,**kw): it = self.scan(k=k,c=c,order=order,**kw) for k,g in groupby(it,key=lambda x:x[0]): yield k,cast(((x[1],x[2]) for x in g)) def scan(self,k='*',c='*',order='',limit=None,mode='kcv',**kw): select_str,select_cnt = self.get_select_str(mode) where_str,where_vals = self.get_where_str(k,c,kw) order_str = self.get_order_str(order) limit_str,limit_vals = self.get_limit_str(limit) sql = '{0} from {1} {2} {3} {4}'.format(select_str, self.tab, where_str, order_str, limit_str) if select_cnt>1: for row in self.conn.execute(sql, where_vals+limit_vals): yield row else: for [row] in self.conn.execute(sql, where_vals+limit_vals): yield row def join(self): pass # TODO (LATER: x2) JOIN left,inner sum,min,max,mul,?prod def agg(self): pass # TODO (LATER: x3) AGG sum,max,min,count,count distinct,count null,count range ### SQL CODE GENERATION UTILS ### def get_limit_str(self,limit): if limit is None: return '',[] else: return 'limit ?',[limit] def get_order_str(self,order): out = [] if 'ka' in order: out += ['k asc'] if 'kd' in order: out += ['k desc'] if 'va' in order: out += ['v asc'] if 'vd' in order: out += ['v desc'] if 'ca' in order: out += ['c asc'] if 'cd' in order: out += ['c desc'] if out: return 'order by '+','.join(out) else: return '' def get_select_str(self,mode): if mode=='kcv': return 'select k,c,v',3 if mode=='kc': return 'select distinct k,c',2 if mode=='k': return 'select distinct k',1 if mode=='c': return 'select distinct c',1 if mode=='delete': return 'delete',0 def get_where_str(self,k,c,kw): # TODO - better arg names # TODO - like (case insensitive) # TODO - regexp (custom function) # TODO - match (custom function) sql,val = [],[] if k!='*': try: if '*' in k or '?' in k or '[' in k: sql.append('k glob ?'); val.append(k) else: sql.append('k=?'); val.append(k) except TypeError: sql.append('k=?'); val.append(k) if c!='*': try: if '*' in c or '?' in c or '[' in c: sql.append('c glob ?'); val.append(c) else: sql.append('c=?'); val.append(c) except TypeError: sql.append('c=?'); val.append(c) if 'klt' in kw: sql.append('k<?'); val.append(kw['klt']) if 'clt' in kw: sql.append('c<?'); val.append(kw['clt']) if 'vlt' in kw: sql.append('v<?'); val.append(kw['vlt']) if 'kgt' in kw: sql.append('k>?'); val.append(kw['kgt']) if 'cgt' in kw: sql.append('c>?'); val.append(kw['cgt']) if 'vgt' in kw: sql.append('v>?'); val.append(kw['vgt']) if 'kle' in kw: sql.append('k<=?'); val.append(kw['kle']) if 'cle' in kw: sql.append('c<=?'); val.append(kw['cle']) if 'vle' in kw: sql.append('v<=?'); val.append(kw['vle']) if 'kge' in kw: sql.append('k>=?'); val.append(kw['kge']) if 'cge' in kw: sql.append('c>=?'); val.append(kw['cge']) if 'vge' in kw: sql.append('v>=?'); val.append(kw['vge']) # LIMIT - sqlite3 limits queries to 999 variables ('?' placeholders) if 'kin' in kw: sql.append('k in ({0})'.format((',?'*len(kw['kin']))[1:])); val.extend(kw['kin']) if 'cin' in kw: sql.append('c in ({0})'.format((',?'*len(kw['cin']))[1:])); val.extend(kw['cin']) if 'vin' in kw: sql.append('v in ({0})'.format((',?'*len(kw['vin']))[1:])); val.extend(kw['vin']) if sql: return 'where '+' and '.join(sql),val else: return '',[] ### ADVANCED - COLUMNAR ARCHIVE ### def to_col_store(self,path,batch=1000,tab='arch',order='',ser=None,move=False,k='*',c='*',**kw): "archive table into compressed, columnar storage in external file" if ser is None: import marshal from zlib import compress def ser(x): return buffer(compress(marshal.dumps(x,2))) arch_conn = sqlite3.connect(path) arch_conn.execute("drop table if exists {0}".format(tab)) arch_conn.execute("create table {0} (c,k,v)".format(tab)) where_str,where_vals = self.get_where_str(k,c,kw) order_str = self.get_order_str(order) cols = [] keys = [] vals = [] for k,c,v in self.conn.execute('select k,c,v from {0} {1} {2}'.format(self.tab, where_str, order_str),where_vals): if len(keys)>=batch: arch_conn.execute("insert into {0} values (?,?,?)".format(tab),(ser(cols),ser(keys),ser(vals))) if move: self.conn.executemany("delete from {0} where k=? and c=?".format(self.tab),zip(keys,cols)) cols = [] keys = [] vals = [] cols.append(c) keys.append(k) vals.append(v) if keys: arch_conn.execute("insert into {0} values (?,?,?)".format(tab),(ser(cols),ser(keys),ser(vals))) arch_conn.execute('vacuum') arch_conn.commit() arch_conn.close() def from_col_store(self, path, tab='arch', de=None, merge=False): "restore table from archive kept in external file" if de is None: import marshal from zlib import decompress def de(x): return marshal.loads(decompress(x)) arch_conn = sqlite3.connect(path) if not merge: self.drop() self.create(index=False) for ser_cols,ser_keys,ser_vals in arch_conn.execute('select c,k,v from {0}'.format(tab)): cols = de(ser_cols) keys = de(ser_keys) vals = de(ser_vals) self.conn.executemany('insert into {0} values (?,?,?)'.format(self.tab),zip(keys,cols,vals)) if not merge: self.create() def scan_col_store(self,path,tab='arch',de=None): "iterate over table in external archive file" if de is None: import marshal from zlib import decompress def de(x): return marshal.loads(decompress(x)) arch_conn = sqlite3.connect(path) for ser_cols,ser_keys,ser_vals in arch_conn.execute('select c,k,v from {0}'.format(tab)): cols = de(ser_cols) keys = de(ser_keys) vals = de(ser_vals) for k,c,v in zip(keys,cols,vals): yield k,c,v
return {c:v for k,c,v in self.scan(k=k)}
run.py
#!/usr/bin/env python # vim:fileencoding=utf-8 # Author: Shinya Suzuki # Created: 2017-11-16 from application import app from application.models import init_schema, init_data import click @app.cli.command(help="Initialize database") def initdb():
if __name__ == "__main__": app.run(host="0.0.0.0", port=5000, threaded=True, debug=True)
init_schema() init_data({"profile": [{"name": "Takuji Yamada", "email": "[email protected]", "role": "CTO"}]}) click.echo("Database is initialized.")
issue-18539.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Test that coercing bare fn's that return a zero sized type to // a closure doesn't cause an LLVM ERROR
Foo } #[allow(unused_must_use)] fn main() { (0u..10).map(uint_to_foo); }
struct Foo; fn uint_to_foo(_: uint) -> Foo {
axis_crosspoint_wrap.py
#!/usr/bin/env python """ Generates an AXI Stream crosspoint wrapper with the specified number of ports """ import argparse from jinja2 import Template def
(): parser = argparse.ArgumentParser(description=__doc__.strip()) parser.add_argument('-p', '--ports', type=int, default=[4], nargs='+', help="number of ports") parser.add_argument('-n', '--name', type=str, help="module name") parser.add_argument('-o', '--output', type=str, help="output file name") args = parser.parse_args() try: generate(**args.__dict__) except IOError as ex: print(ex) exit(1) def generate(ports=4, name=None, output=None): if type(ports) is int: m = n = ports elif len(ports) == 1: m = n = ports[0] else: m, n = ports if name is None: name = "axis_crosspoint_wrap_{0}x{1}".format(m, n) if output is None: output = name + ".v" print("Generating {0}x{1} port AXI stream crosspoint wrapper {2}...".format(m, n, name)) cm = (m-1).bit_length() cn = (n-1).bit_length() t = Template(u"""/* Copyright (c) 2018-2021 Alex Forencich Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ // Language: Verilog 2001 `resetall `timescale 1ns / 1ps `default_nettype none /* * AXI4-Stream {{m}}x{{n}} crosspoint (wrapper) */ module {{name}} # ( // Width of AXI stream interfaces in bits parameter DATA_WIDTH = 8, // Propagate tkeep signal parameter KEEP_ENABLE = (DATA_WIDTH>8), // tkeep signal width (words per cycle) parameter KEEP_WIDTH = (DATA_WIDTH/8), // Propagate tlast signal parameter LAST_ENABLE = 1, // Propagate tid signal parameter ID_ENABLE = 0, // tid signal width parameter ID_WIDTH = 8, // Propagate tdest signal parameter DEST_ENABLE = 0, // tdest signal width parameter DEST_WIDTH = 8, // Propagate tuser signal parameter USER_ENABLE = 1, // tuser signal width parameter USER_WIDTH = 1 ) ( input wire clk, input wire rst, /* * AXI Stream inputs */ {%- for p in range(m) %} input wire [DATA_WIDTH-1:0] s{{'%02d'%p}}_axis_tdata, input wire [KEEP_WIDTH-1:0] s{{'%02d'%p}}_axis_tkeep, input wire s{{'%02d'%p}}_axis_tvalid, input wire s{{'%02d'%p}}_axis_tlast, input wire [ID_WIDTH-1:0] s{{'%02d'%p}}_axis_tid, input wire [DEST_WIDTH-1:0] s{{'%02d'%p}}_axis_tdest, input wire [USER_WIDTH-1:0] s{{'%02d'%p}}_axis_tuser, {% endfor %} /* * AXI Stream outputs */ {%- for p in range(n) %} output wire [DATA_WIDTH-1:0] m{{'%02d'%p}}_axis_tdata, output wire [KEEP_WIDTH-1:0] m{{'%02d'%p}}_axis_tkeep, output wire m{{'%02d'%p}}_axis_tvalid, output wire m{{'%02d'%p}}_axis_tlast, output wire [ID_WIDTH-1:0] m{{'%02d'%p}}_axis_tid, output wire [DEST_WIDTH-1:0] m{{'%02d'%p}}_axis_tdest, output wire [USER_WIDTH-1:0] m{{'%02d'%p}}_axis_tuser, {% endfor %} /* * Control */ {%- for p in range(n) %} input wire [{{cm-1}}:0] m{{'%02d'%p}}_select{% if not loop.last %},{% endif %} {%- endfor %} ); axis_crosspoint #( .S_COUNT({{m}}), .M_COUNT({{n}}), .DATA_WIDTH(DATA_WIDTH), .KEEP_ENABLE(KEEP_ENABLE), .KEEP_WIDTH(KEEP_WIDTH), .LAST_ENABLE(LAST_ENABLE), .ID_ENABLE(ID_ENABLE), .ID_WIDTH(ID_WIDTH), .DEST_ENABLE(DEST_ENABLE), .DEST_WIDTH(DEST_WIDTH), .USER_ENABLE(USER_ENABLE), .USER_WIDTH(USER_WIDTH) ) axis_crosspoint_inst ( .clk(clk), .rst(rst), // AXI inputs .s_axis_tdata({ {% for p in range(m-1,-1,-1) %}s{{'%02d'%p}}_axis_tdata{% if not loop.last %}, {% endif %}{% endfor %} }), .s_axis_tkeep({ {% for p in range(m-1,-1,-1) %}s{{'%02d'%p}}_axis_tkeep{% if not loop.last %}, {% endif %}{% endfor %} }), .s_axis_tvalid({ {% for p in range(m-1,-1,-1) %}s{{'%02d'%p}}_axis_tvalid{% if not loop.last %}, {% endif %}{% endfor %} }), .s_axis_tlast({ {% for p in range(m-1,-1,-1) %}s{{'%02d'%p}}_axis_tlast{% if not loop.last %}, {% endif %}{% endfor %} }), .s_axis_tid({ {% for p in range(m-1,-1,-1) %}s{{'%02d'%p}}_axis_tid{% if not loop.last %}, {% endif %}{% endfor %} }), .s_axis_tdest({ {% for p in range(m-1,-1,-1) %}s{{'%02d'%p}}_axis_tdest{% if not loop.last %}, {% endif %}{% endfor %} }), .s_axis_tuser({ {% for p in range(m-1,-1,-1) %}s{{'%02d'%p}}_axis_tuser{% if not loop.last %}, {% endif %}{% endfor %} }), // AXI output .m_axis_tdata({ {% for p in range(n-1,-1,-1) %}m{{'%02d'%p}}_axis_tdata{% if not loop.last %}, {% endif %}{% endfor %} }), .m_axis_tkeep({ {% for p in range(n-1,-1,-1) %}m{{'%02d'%p}}_axis_tkeep{% if not loop.last %}, {% endif %}{% endfor %} }), .m_axis_tvalid({ {% for p in range(n-1,-1,-1) %}m{{'%02d'%p}}_axis_tvalid{% if not loop.last %}, {% endif %}{% endfor %} }), .m_axis_tlast({ {% for p in range(n-1,-1,-1) %}m{{'%02d'%p}}_axis_tlast{% if not loop.last %}, {% endif %}{% endfor %} }), .m_axis_tid({ {% for p in range(n-1,-1,-1) %}m{{'%02d'%p}}_axis_tid{% if not loop.last %}, {% endif %}{% endfor %} }), .m_axis_tdest({ {% for p in range(n-1,-1,-1) %}m{{'%02d'%p}}_axis_tdest{% if not loop.last %}, {% endif %}{% endfor %} }), .m_axis_tuser({ {% for p in range(n-1,-1,-1) %}m{{'%02d'%p}}_axis_tuser{% if not loop.last %}, {% endif %}{% endfor %} }), // Control .select({ {% for p in range(n-1,-1,-1) %}m{{'%02d'%p}}_select{% if not loop.last %}, {% endif %}{% endfor %} }) ); endmodule `resetall """) print(f"Writing file '{output}'...") with open(output, 'w') as f: f.write(t.render( m=m, n=n, cm=cm, cn=cn, name=name )) f.flush() print("Done") if __name__ == "__main__": main()
main
handler.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ This is the main WSGI handler file for AIM. It compiles a list of valid URLs from the 'pages' library folder, and if a URL matches it runs the specific submodule's run() function. It also handles CGI parsing and exceptions in the applications. """ # Main imports import cgi import re import sys import traceback import yaml import json import plugins.session import plugins.database import plugins.openapi # Compile valid API URLs from the pages library urls = [] if __name__ != '__main__': import pages for page in pages.handlers: urls.append((r"^(/api/%s)(/.+)?$" % page, pages.handlers[page].run)) # Load Aim master configuration config = yaml.load(open("yaml/aim.yaml")) # Instantiate database connections DB = plugins.database.AimAPIDatabase(config) # Load Open API specifications AimAPIOpenAPI = plugins.openapi.OpenAPI("yaml/openapi.yaml", ignore_extras = True) class AimAPIHTTPError(Exception): def __init__(self, code, message): self.code = code self.message = message class AimAPIWrapper: """ Middleware wrapper for exceptions in the application """ def __init__(self, path, func): self.func = func self.API = AimAPIOpenAPI self.path = path self.exception = AimAPIHTTPError def __call__(self, environ, start_response, session): """Run the function, return response OR return stacktrace""" response = None try: # Read JSON client data if any try: request_size = int(environ.get('CONTENT_LENGTH', 0)) except (ValueError): request_size = 0 requestBody = environ['wsgi.input'].read(request_size) formdata = {} if requestBody and len(requestBody) > 0: try: formdata = json.loads(requestBody.decode('utf-8')) except json.JSONDecodeError as err: start_response('400 Invalid request', [ ('Content-Type', 'application/json')]) yield json.dumps({ "code": 400, "reason": "Invalid JSON: %s" % err }) return # Validate URL against OpenAPI specs try: self.API.validate(environ['REQUEST_METHOD'], self.path, formdata) except plugins.openapi.OpenAPIException as err: start_response('400 Invalid request', [ ('Content-Type', 'application/json')]) yield json.dumps({ "code": 400, "reason": err.message }) return # Call page with env, SR and form data try: response = self.func(self, environ, formdata, session) if response: for bucket in response: yield bucket except AimAPIHTTPError as err: errHeaders = { 403: '403 Authentication failed', 404: '404 Resource not found', 500: '500 Internal Server Error', 501: '501 Gateway error' } errHeader = errHeaders[err.code] if err.code in errHeaders else "400 Bad request" start_response(errHeader, [ ('Content-Type', 'application/json')]) yield json.dumps({ "code": err.code, "reason": err.message }, indent = 4) + "\n" return except: err_type, err_value, tb = sys.exc_info() traceback_output = ['API traceback:'] traceback_output += traceback.format_tb(tb) traceback_output.append('%s: %s' % (err_type.__name__, err_value)) # We don't know if response has been given yet, try giving one, fail gracefully. try: start_response('500 Internal Server Error', [ ('Content-Type', 'application/json')]) except: pass yield json.dumps({ "code": "500", "reason": '\n'.join(traceback_output) })
"""A very simple 404 handler""" start_response("404 Not Found", [ ('Content-Type', 'application/json')]) yield json.dumps({ "code": 404, "reason": "API endpoint not found" }, indent = 4) + "\n" return def application(environ, start_response): """ This is the main handler. Every API call goes through here. Checks against the pages library, and if submod found, runs it and returns the output. """ path = environ.get('PATH_INFO', '') for regex, function in urls: m = re.match(regex, path) if m: callback = AimAPIWrapper(path, function) session = plugins.session.AimAPISession(DB, environ, config) a = 0 for bucket in callback(environ, start_response, session): if a == 0: session.headers.append(bucket) try: start_response("200 Okay", session.headers) except: pass a += 1 # WSGI prefers byte strings, so convert if regular py3 string if isinstance(bucket, str): yield bytes(bucket, encoding = 'utf-8') elif isinstance(bucket, bytes): yield bucket return for bucket in fourohfour(environ, start_response): yield bytes(bucket, encoding = 'utf-8') if __name__ == '__main__': AimAPIOpenAPI.toHTML()
def fourohfour(environ, start_response):
schema.go
package schema import ( "fmt" "strconv" "strings" "time" "github.com/pkg/errors" "github.com/v3io/v3io-tsdb/pkg/aggregate" "github.com/v3io/v3io-tsdb/pkg/config" "github.com/v3io/v3io-tsdb/pkg/utils" ) const ( Version = 2 ) func
(v3ioCfg *config.V3ioConfig, samplesIngestionRate, aggregationGranularity, aggregatesList string, crossLabelSets string) (*config.Schema, error) { return newSchema( samplesIngestionRate, aggregationGranularity, aggregatesList, crossLabelSets, v3ioCfg.MinimumChunkSize, v3ioCfg.MaximumChunkSize, v3ioCfg.MaximumSampleSize, v3ioCfg.MaximumPartitionSize, config.DefaultSampleRetentionTime, v3ioCfg.ShardingBucketsCount) } func newSchema(samplesIngestionRate, aggregationGranularity, aggregatesList string, crossLabelSets string, minChunkSize, maxChunkSize, maxSampleSize, maxPartitionSize, sampleRetention, shardingBucketsCount int) (*config.Schema, error) { rateInHours, err := rateToHours(samplesIngestionRate) if err != nil { return nil, errors.Wrapf(err, "Invalid samples ingestion rate (%s).", samplesIngestionRate) } if err := validateAggregatesGranularity(aggregationGranularity); err != nil { return nil, errors.Wrapf(err, "Failed to parse aggregation granularity '%s'.", aggregationGranularity) } chunkInterval, partitionInterval, err := calculatePartitionAndChunkInterval(rateInHours, minChunkSize, maxChunkSize, maxSampleSize, maxPartitionSize) if err != nil { return nil, errors.Wrap(err, "Failed to calculate the chunk interval.") } aggregates, err := aggregate.RawAggregatesToStringList(aggregatesList) if err != nil { return nil, errors.Wrapf(err, "Failed to parse aggregates list '%s'.", aggregatesList) } parsedCrossLabelSets := aggregate.ParseCrossLabelSets(crossLabelSets) if len(parsedCrossLabelSets) > 0 && len(aggregates) == 0 { return nil, errors.New("Cross label aggregations must be used in conjunction with aggregations.") } defaultRollup := config.Rollup{ Aggregates: []string{}, AggregationGranularity: aggregationGranularity, StorageClass: config.DefaultStorageClass, SampleRetention: sampleRetention, //TODO: make configurable LayerRetentionTime: config.DefaultLayerRetentionTime, //TODO: make configurable } var preaggregates []config.PreAggregate for _, labelSet := range parsedCrossLabelSets { preaggregate := config.PreAggregate{ Labels: labelSet, Granularity: aggregationGranularity, Aggregates: aggregates, } preaggregates = append(preaggregates, preaggregate) } tableSchema := config.TableSchema{ Version: Version, RollupLayers: []config.Rollup{defaultRollup}, ShardingBucketsCount: shardingBucketsCount, PartitionerInterval: partitionInterval, ChunckerInterval: chunkInterval, PreAggregates: preaggregates, } if len(aggregates) == 0 { aggregates = strings.Split(config.DefaultAggregates, ",") } fields, err := aggregate.SchemaFieldFromString(aggregates, "v") if err != nil { return nil, errors.Wrapf(err, "Failed to create an aggregates list from string '%s'.", aggregates) } fields = append(fields, config.SchemaField{Name: "_name", Type: "string", Nullable: false, Items: ""}) partitionSchema := config.PartitionSchema{ Version: tableSchema.Version, Aggregates: aggregates, AggregationGranularity: aggregationGranularity, StorageClass: config.DefaultStorageClass, SampleRetention: config.DefaultSampleRetentionTime, ChunckerInterval: tableSchema.ChunckerInterval, PartitionerInterval: tableSchema.PartitionerInterval, } schema := &config.Schema{ TableSchemaInfo: tableSchema, PartitionSchemaInfo: partitionSchema, Partitions: []*config.Partition{}, Fields: fields, } return schema, nil } func calculatePartitionAndChunkInterval(rateInHours, minChunkSize, maxChunkSize, maxSampleSize, maxPartitionSize int) (string, string, error) { maxNumberOfEventsPerChunk := maxChunkSize / maxSampleSize minNumberOfEventsPerChunk := minChunkSize / maxSampleSize chunkInterval := maxNumberOfEventsPerChunk / rateInHours if chunkInterval == 0 { return "", "", fmt.Errorf("The samples ingestion rate (%v/h) is too high.", rateInHours) } // Make sure the expected chunk size is greater then the supported minimum. if chunkInterval < minNumberOfEventsPerChunk/rateInHours { return "", "", fmt.Errorf( "The calculated chunk size is smaller than the minimum: samples ingestion rate = %v/h, calculated chunk interval = %v, minimum size = %v", rateInHours, chunkInterval, minChunkSize) } actualCapacityOfChunk := chunkInterval * rateInHours * maxSampleSize numberOfChunksInPartition := 0 for (numberOfChunksInPartition+24)*actualCapacityOfChunk < maxPartitionSize { numberOfChunksInPartition += 24 } if numberOfChunksInPartition == 0 { return "", "", errors.Errorf("The samples ingestion rate (%v/h) is too high - cannot fit a partition in a day interval with the calculated chunk size (%v).", rateInHours, chunkInterval) } partitionInterval := numberOfChunksInPartition * chunkInterval return strconv.Itoa(chunkInterval) + "h", strconv.Itoa(partitionInterval) + "h", nil } func rateToHours(samplesIngestionRate string) (int, error) { parsingError := errors.New(`Invalid samples ingestion rate. The rate must be of the format "[0-9]+/[mhd]". For example, "12/m".`) if len(samplesIngestionRate) < 3 { return 0, parsingError } if samplesIngestionRate[len(samplesIngestionRate)-2] != '/' { return 0, parsingError } last := samplesIngestionRate[len(samplesIngestionRate)-1] // Get the ingestion-rate samples number, ignoring the slash and time unit samplesIngestionRate = samplesIngestionRate[:len(samplesIngestionRate)-2] i, err := strconv.Atoi(samplesIngestionRate) if err != nil { return 0, errors.Wrap(err, parsingError.Error()) } if i <= 0 { return 0, fmt.Errorf("Invalid samples ingestion rate (%s). The rate cannot have a negative number of samples.", samplesIngestionRate) } switch last { case 's': return i * 60 * 60, nil case 'm': return i * 60, nil case 'h': return i, nil default: return 0, parsingError } } func validateAggregatesGranularity(aggregationGranularity string) error { dayMillis := 24 * int64(time.Hour/time.Millisecond) duration, err := utils.Str2duration(aggregationGranularity) if err != nil { return err } if dayMillis%duration != 0 && duration%dayMillis != 0 { return errors.New("The aggregation granularity should be a divisor or a dividend of 1 day. Examples: \"10m\"; \"30m\"; \"2h\".") } return nil }
NewSchema
tags.go
type Tags struct { RequestMethod string RequestUrl string } func (m Tags) String() string { return fmt.Sprintf("method=%s,url=%s", m.RequestMethod, m.RequestUrl) }
package metrics import "fmt"
database.py
from config import BaseConfig import gridfs class Userdb: client = BaseConfig.MONGOD_DATABASE_URI # client = MongoClient('localhost:27017') db = BaseConfig.DB_NAME # db = 'users' def __init__(self, coll): # self.db = db self.coll = coll def conn(self): return self.client[self.db][self.coll] def create_user(self, user_details): return self.conn().insert_one(user_details).inserted_id def update_user(self, user_details, new_data): return self.conn().update(user_details, {'$set': new_data}, True) def find_one_update(self, user_info, update_info): # has to be in the form of json pairs return self.conn().find_one_and_update(user_info, {'$push': update_info}) def find_one(self, user_info): # has to be in the form of json pairs return self.conn().find(user_info).count() def find_user_details(self, user_info, criteria=""): return self.conn().find_one(user_info, criteria) def find_user(self, user_info): return self.conn().find_one(user_info) def find_users(self, user_info, criteria=""): return self.conn().find(user_info, criteria) def insert_many(self, data): return self.conn().insert_many(data) def coll_exist(self): return self.conn().count() def clear_old(self): return self.conn().delete_many({"email_checked": False}) class Testdb: client = BaseConfig.MONGOD_DATABASE_URI # client = MongoClient('localhost:27017') db = BaseConfig.DATABASE # db = 'users' def __init__(self): # self.db = db self.conn = gridfs.GridFS(self.db) def
(self, data): return self.put(data) def exists(self, user_info): # has to be in the form of json pairs return self.exists(user_info) def get(self, data): return self.get(data) def find_one(self, data): return self.find_one(data) def find(self, data): return self.find(data)
put
index.tsx
import React, { useEffect, ReactNode } from 'react' import { Link } from 'react-router-dom' import 'default-passive-events' import { Tooltip } from 'antd' import Content from '../../components/Content' import { getStatisticAddressCount, getStatisticCellCount, getStatisticTransactionCount, getStatisticAddressBalanceRank, getStatisticBalanceDistribution, getStatisticTxFeeHistory, } from '../../service/app/charts/activities' import { getStatisticDifficultyHashRate, getStatisticDifficultyUncleRateEpoch, getStatisticDifficulty, getStatisticHashRate, getStatisticUncleRate, getStatisticMinerAddressDistribution, } from '../../service/app/charts/mining' import { getStatisticTotalDaoDeposit, getStatisticNewDaoDeposit, getStatisticCirculationRatio, } from '../../service/app/charts/nervosDao' import { useDispatch } from '../../contexts/providers' import i18n from '../../utils/i18n' import HelpIcon from '../../assets/qa_help.png' import { DifficultyHashRateChart } from './mining/DifficultyHashRate' import { DifficultyUncleRateEpochChart } from './mining/DifficultyUncleRateEpoch' import { TransactionCountChart } from './activities/TransactionCount' import { AddressCountChart } from './activities/AddressCount' import { CellCountChart } from './activities/CellCount' import { TotalDaoDepositChart } from './nervosDao/TotalDaoDeposit' import { ChartsPanel, ChartCardPanel, ChartsTitle, ChartsContent } from './styled' import { AddressBalanceRankChart } from './activities/AddressBalanceRank' import { DifficultyChart } from './mining/Difficulty' import { HashRateChart } from './mining/HashRate' import { UncleRateChart } from './mining/UncleRate' import { BalanceDistributionChart } from './activities/BalanceDistribution' import { TxFeeHistoryChart } from './activities/TxFeeHistory' import { BlockTimeDistributionChart } from './block/BlockTimeDistribution' import { getStatisticBlockTimeDistribution, getStatisticEpochTimeDistribution, getStatisticAverageBlockTimes, } from '../../service/app/charts/block' import { EpochTimeDistributionChart } from './block/EpochTimeDistribution' import { NewDaoDepositChart } from './nervosDao/NewDaoDeposit' import { CirculationRatioChart } from './nervosDao/CirculationRatio' import { AverageBlockTimeChart } from './block/AverageBlockTime' import { TotalSupplyChart } from './monetary/TotalSupply' import { getStatisticTotalSupply, getStatisticAnnualPercentageCompensation, getStatisticSecondaryIssuance, getStatisticInflationRate, getStatisticLiquidity, } from '../../service/app/charts/monetary' import { AnnualPercentageCompensationChart } from './monetary/AnnualPercentageCompensation' import { SecondaryIssuanceChart } from './monetary/SecondaryIssuance' import { InflationRateChart } from './monetary/InflationRate' import { LiquidityChart } from './monetary/Liquidity' import { MinerAddressDistributionChart } from './mining/MinerAddressDistribution' import { isMobile } from '../../utils/screen' interface ChartData { title: string chart: ReactNode path: string description?: string } interface ChartCategory { category: string charts: ChartData[] } const ChartTitle = ({ chartData }: { chartData: ChartData }) => { return ( <div className="chart__card__title__penal"> <div className="chart__card_title">{chartData.title}</div> {chartData.description && ( <Tooltip placement="bottom" title={chartData.description}> <img src={HelpIcon} alt="chart help" /> </Tooltip> )} </div> ) } const ChartCard = ({ chartData }: { chartData: ChartData }) => { return ( <ChartCardPanel> {isMobile() && <ChartTitle chartData={chartData} />} <Link to={chartData.path}> {!isMobile() && <ChartTitle chartData={chartData} />} <div className="chart__card_body">{chartData.chart}</div> </Link> </ChartCardPanel> ) } const NullEvent = () => {} const chartsData = (): ChartCategory[] => { return [ { category: i18n.t('statistic.category_block'), charts: [ { title: `${i18n.t('statistic.block_time_distribution')}`, chart: <BlockTimeDistributionChart isThumbnail />, path: '/charts/block-time-distribution', description: i18n.t('statistic.block_time_distribution_description'), }, { title: `${i18n.t('statistic.epoch_time_distribution')}`,
description: i18n.t('statistic.epoch_time_distribution_description'), }, { title: `${i18n.t('statistic.average_block_time')}`, chart: <AverageBlockTimeChart isThumbnail />, path: '/charts/average-block-time', description: i18n.t('statistic.average_block_time_description'), }, ], }, { category: i18n.t('statistic.category_mining'), charts: [ { title: `${i18n.t('block.difficulty')} & ${i18n.t('block.hash_rate')}`, chart: <DifficultyHashRateChart isThumbnail />, path: '/charts/difficulty-hash-rate', }, { title: `${i18n.t('block.difficulty')} & ${i18n.t('block.uncle_rate')} & ${i18n.t('block.epoch_time')} & ...`, chart: <DifficultyUncleRateEpochChart isThumbnail />, path: '/charts/difficulty-uncle-rate', }, { title: `${i18n.t('block.difficulty')}`, chart: <DifficultyChart isThumbnail />, path: '/charts/difficulty', }, { title: `${i18n.t('block.hash_rate')}`, chart: <HashRateChart isThumbnail />, path: '/charts/hash-rate', }, { title: `${i18n.t('block.uncle_rate')}`, chart: <UncleRateChart isThumbnail />, path: '/charts/uncle-rate', description: i18n.t('statistic.uncle_rate_description'), }, { title: `${i18n.t('statistic.miner_addresses_rank')}`, chart: <MinerAddressDistributionChart isThumbnail />, path: '/charts/miner-address-distribution', }, ], }, { category: i18n.t('statistic.category_activities'), charts: [ { title: `${i18n.t('statistic.transaction_count')}`, chart: <TransactionCountChart isThumbnail />, path: '/charts/transaction-count', }, { title: `${i18n.t('statistic.address_count')}`, chart: <AddressCountChart isThumbnail />, path: '/charts/address-count', description: i18n.t('statistic.address_count_description'), }, { title: i18n.t('statistic.cell_count'), chart: <CellCountChart isThumbnail />, path: '/charts/cell-count', }, { title: `${i18n.t('statistic.balance_ranking')}`, chart: <AddressBalanceRankChart clickEvent={NullEvent} isThumbnail />, path: '/charts/address-balance-rank', description: i18n.t('statistic.balance_ranking_description'), }, { title: `${i18n.t('statistic.balance_distribution')}`, chart: <BalanceDistributionChart isThumbnail />, path: '/charts/balance-distribution', description: i18n.t('statistic.balance_distribution_description'), }, { title: `${i18n.t('statistic.tx_fee_history')}`, chart: <TxFeeHistoryChart isThumbnail />, path: '/charts/tx-fee-history', description: i18n.t('statistic.tx_fee_description'), }, ], }, { category: i18n.t('blockchain.nervos_dao'), charts: [ { title: `${i18n.t('statistic.total_dao_deposit_title')}`, chart: <TotalDaoDepositChart isThumbnail />, path: '/charts/total-dao-deposit', description: i18n.t('statistic.total_dao_deposit_description'), }, { title: `${i18n.t('statistic.new_dao_deposit_title')}`, chart: <NewDaoDepositChart isThumbnail />, path: '/charts/new-dao-deposit', }, { title: `${i18n.t('statistic.circulation_ratio')}`, chart: <CirculationRatioChart isThumbnail />, path: '/charts/circulation-ratio', description: i18n.t('statistic.deposit_to_circulation_ratio_description'), }, ], }, { category: i18n.t('statistic.category_monetary'), charts: [ { title: `${i18n.t('statistic.total_supply')}`, chart: <TotalSupplyChart isThumbnail />, path: '/charts/total-supply', description: i18n.t('statistic.total_supply_description'), }, { title: `${i18n.t('statistic.nominal_apc')}`, chart: <AnnualPercentageCompensationChart isThumbnail />, path: '/charts/nominal-apc', description: i18n.t('statistic.nominal_rpc_description'), }, { title: `${i18n.t('nervos_dao.secondary_issuance')}`, chart: <SecondaryIssuanceChart isThumbnail />, path: '/charts/secondary-issuance', description: i18n.t('statistic.secondary_issuance_description'), }, { title: `${i18n.t('statistic.inflation_rate')}`, chart: <InflationRateChart isThumbnail />, path: '/charts/inflation-rate', description: i18n.t('statistic.inflation_rate_description'), }, { title: `${i18n.t('statistic.liquidity')}`, chart: <LiquidityChart isThumbnail />, path: '/charts/liquidity', }, ], }, ] } export default () => { const dispatch = useDispatch() useEffect(() => { getStatisticDifficultyHashRate(dispatch) getStatisticDifficultyUncleRateEpoch(dispatch) getStatisticDifficulty(dispatch) getStatisticHashRate(dispatch) getStatisticUncleRate(dispatch) getStatisticMinerAddressDistribution(dispatch) getStatisticAddressCount(dispatch) getStatisticCellCount(dispatch) getStatisticTransactionCount(dispatch) getStatisticTotalDaoDeposit(dispatch) getStatisticNewDaoDeposit(dispatch) getStatisticCirculationRatio(dispatch) getStatisticAddressBalanceRank(dispatch) getStatisticBalanceDistribution(dispatch) getStatisticTxFeeHistory(dispatch) getStatisticBlockTimeDistribution(dispatch) getStatisticEpochTimeDistribution(dispatch) getStatisticAverageBlockTimes(dispatch) getStatisticTotalSupply(dispatch) getStatisticAnnualPercentageCompensation(dispatch) getStatisticSecondaryIssuance(dispatch) getStatisticInflationRate(dispatch) getStatisticLiquidity(dispatch) }, [dispatch]) return ( <Content> <ChartsContent className="container"> <ChartsTitle>{i18n.t('statistic.charts_title')}</ChartsTitle> {chartsData().map(chartData => ( <ChartsPanel key={chartData.category}> <div className="charts__category__title">{chartData.category}</div> <div className="charts__category__panel"> {chartData.charts.map(chart => ( <ChartCard chartData={chart} key={chart.title} /> ))} </div> </ChartsPanel> ))} </ChartsContent> </Content> ) }
chart: <EpochTimeDistributionChart isThumbnail />, path: '/charts/epoch-time-distribution',
Alexa.py
import speech_recognition as sr import pyttsx3 import pywhatkit import datetime import wikipedia import pyjokes import webbrowser import os #import pyaudio listenner = sr.Recognizer() engine = pyttsx3.init() voices = engine.getProperty("voices") engine.setProperty('voice', voices[1].id) def
(text): engine.say(text) engine.runAndWait() engine.say('Hello Rashid, I am your virtual assistant. how can i help you?') engine.runAndWait() def take_command(): # (<-- !!!) try: with sr.Microphone() as source: print("listening....") voice = listenner.listen(source) command = listenner.recognize_google(voice) command = command.lower() if 'alexa' in command: command = command.replace('alexa', '') print(command) except: pass return command def run_alexa(): command = take_command() print(command) if 'song' in command: song = command.replace('song', '') talk('playing' + song) pywhatkit.playonyt(song) elif 'time' in command: time = datetime.datetime.now().strftime('%I:%M %p') print(time) talk("current time is" + time) elif 'who ' in command: person = command.replace('who ', '') info = wikipedia.summary(person, 1) print(info) talk(info) elif 'are you single' in command: talk('No, i am in a relationship with your wifi') elif 'joke' in command: talk(pyjokes.get_joke()) elif 'wikipedia' in command: ansu = command.replace('wikipedia', '') answer = wikipedia.summary(ansu, sentences=2) print(answer) talk(answer) elif 'open youtube' in command: print('opening you tube.....') talk('opening you tube.') webbrowser.open('youtube.com') elif 'open whatsapp' in command: webbrowser.open('web.whatsapp.com') print('opening whatsapp.....') talk('opening whatsapp.') elif 'open stackoverflow' in command: webbrowser.open('stackoverflow.com') print('opening Stackoverfolw.....') talk('opening Stack overflow .') elif 'music' in command: music_dir = 'C:\\Music' music= os.listdir(music_dir) #print(music) os.startfile(os.path.join(music_dir, music[5])) elif 'open Vs code' in command: code_path = "C:\\Users\\Rashid khan\\AppData\\Local\\Programs\\Microsoft VS Code\\Code.exe" os.startfile(code_path) elif 'how are you' in command: talk('I am feeling awesome and ready for your command') elif 'hear me' in command: talk('yes, I am getting you Rashid') elif 'exit' in command: exit() else: talk('Please say the command again.') while True: run_alexa()
talk
saturate.js
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = _default; var _lodash = _interopRequireDefault(require("lodash")); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } const { asValue, nameClass
} = require('../../jit/pluginUtils'); function _default() { return function ({ config, matchUtilities, addUtilities, theme, variants }) { if (config('mode') === 'jit') { matchUtilities({ saturate: (modifier, { theme }) => { let value = asValue(modifier, theme.saturate); if (value === undefined) { return []; } return { [nameClass('saturate', modifier)]: { '--tw-saturate': `saturate(${value})` } }; } }); } else { const utilities = _lodash.default.fromPairs(_lodash.default.map(theme('saturate'), (value, modifier) => { return [nameClass('saturate', modifier), { '--tw-saturate': Array.isArray(value) ? value.map(v => `saturate(${v})`).join(' ') : `saturate(${value})` }]; })); addUtilities(utilities, variants('saturate')); } }; }
histogram_test.go
// Copyright 2020, OpenTelemetry Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package histogram import ( "context" "math" "math/rand" "os" "sort" "testing" "unsafe" "github.com/stretchr/testify/require" "go.opentelemetry.io/otel/api/core" ottest "go.opentelemetry.io/otel/internal/testing" export "go.opentelemetry.io/otel/sdk/export/metric" "go.opentelemetry.io/otel/sdk/metric/aggregator/test" ) const count = 100 type policy struct { name string absolute bool sign func() int } var ( positiveOnly = policy{ name: "absolute", absolute: true, sign: func() int { return +1 }, } negativeOnly = policy{ name: "negative", absolute: false, sign: func() int { return -1 }, } positiveAndNegative = policy{ name: "positiveAndNegative", absolute: false, sign: func() int { if rand.Uint32() > math.MaxUint32/2
return 1 }, } boundaries = map[core.NumberKind][]core.Number{ core.Float64NumberKind: {core.NewFloat64Number(500), core.NewFloat64Number(250), core.NewFloat64Number(750)}, core.Int64NumberKind: {core.NewInt64Number(500), core.NewInt64Number(250), core.NewInt64Number(750)}, } ) // Ensure struct alignment prior to running tests. func TestMain(m *testing.M) { fields := []ottest.FieldOffset{ { Name: "Aggregator.states", Offset: unsafe.Offsetof(Aggregator{}.states), }, { Name: "state.buckets", Offset: unsafe.Offsetof(state{}.buckets), }, { Name: "state.sum", Offset: unsafe.Offsetof(state{}.sum), }, { Name: "state.count", Offset: unsafe.Offsetof(state{}.count), }, } if !ottest.Aligned8Byte(fields, os.Stderr) { os.Exit(1) } os.Exit(m.Run()) } func TestHistogramAbsolute(t *testing.T) { test.RunProfiles(t, func(t *testing.T, profile test.Profile) { histogram(t, profile, positiveOnly) }) } func TestHistogramNegativeOnly(t *testing.T) { test.RunProfiles(t, func(t *testing.T, profile test.Profile) { histogram(t, profile, negativeOnly) }) } func TestHistogramPositiveAndNegative(t *testing.T) { test.RunProfiles(t, func(t *testing.T, profile test.Profile) { histogram(t, profile, positiveAndNegative) }) } // Validates count, sum and buckets for a given profile and policy func histogram(t *testing.T, profile test.Profile, policy policy) { ctx := context.Background() descriptor := test.NewAggregatorTest(export.MeasureKind, profile.NumberKind, !policy.absolute) agg := New(descriptor, boundaries[profile.NumberKind]) all := test.NewNumbers(profile.NumberKind) for i := 0; i < count; i++ { x := profile.Random(policy.sign()) all.Append(x) test.CheckedUpdate(t, agg, x, descriptor) } agg.Checkpoint(ctx, descriptor) all.Sort() asum, err := agg.Sum() sum := all.Sum() require.InEpsilon(t, sum.CoerceToFloat64(profile.NumberKind), asum.CoerceToFloat64(profile.NumberKind), 0.000000001, "Same sum - "+policy.name) require.Nil(t, err) count, err := agg.Count() require.Equal(t, all.Count(), count, "Same count -"+policy.name) require.Nil(t, err) require.Equal(t, len(agg.checkpoint().buckets.Counts), len(boundaries[profile.NumberKind])+1, "There should be b + 1 counts, where b is the number of boundaries") counts := calcBuckets(all.Points(), profile) for i, v := range counts { bCount := agg.checkpoint().buckets.Counts[i].AsUint64() require.Equal(t, v, bCount, "Wrong bucket #%d count: %v != %v", i, counts, agg.checkpoint().buckets.Counts) } } func TestHistogramMerge(t *testing.T) { ctx := context.Background() test.RunProfiles(t, func(t *testing.T, profile test.Profile) { descriptor := test.NewAggregatorTest(export.MeasureKind, profile.NumberKind, false) agg1 := New(descriptor, boundaries[profile.NumberKind]) agg2 := New(descriptor, boundaries[profile.NumberKind]) all := test.NewNumbers(profile.NumberKind) for i := 0; i < count; i++ { x := profile.Random(+1) all.Append(x) test.CheckedUpdate(t, agg1, x, descriptor) } for i := 0; i < count; i++ { x := profile.Random(+1) all.Append(x) test.CheckedUpdate(t, agg2, x, descriptor) } agg1.Checkpoint(ctx, descriptor) agg2.Checkpoint(ctx, descriptor) test.CheckedMerge(t, agg1, agg2, descriptor) all.Sort() asum, err := agg1.Sum() sum := all.Sum() require.InEpsilon(t, sum.CoerceToFloat64(profile.NumberKind), asum.CoerceToFloat64(profile.NumberKind), 0.000000001, "Same sum - absolute") require.Nil(t, err) count, err := agg1.Count() require.Equal(t, all.Count(), count, "Same count - absolute") require.Nil(t, err) require.Equal(t, len(agg1.checkpoint().buckets.Counts), len(boundaries[profile.NumberKind])+1, "There should be b + 1 counts, where b is the number of boundaries") counts := calcBuckets(all.Points(), profile) for i, v := range counts { bCount := agg1.checkpoint().buckets.Counts[i].AsUint64() require.Equal(t, v, bCount, "Wrong bucket #%d count: %v != %v", i, counts, agg1.checkpoint().buckets.Counts) } }) } func TestHistogramNotSet(t *testing.T) { ctx := context.Background() test.RunProfiles(t, func(t *testing.T, profile test.Profile) { descriptor := test.NewAggregatorTest(export.MeasureKind, profile.NumberKind, false) agg := New(descriptor, boundaries[profile.NumberKind]) agg.Checkpoint(ctx, descriptor) asum, err := agg.Sum() require.Equal(t, core.Number(0), asum, "Empty checkpoint sum = 0") require.Nil(t, err) count, err := agg.Count() require.Equal(t, int64(0), count, "Empty checkpoint count = 0") require.Nil(t, err) require.Equal(t, len(agg.checkpoint().buckets.Counts), len(boundaries[profile.NumberKind])+1, "There should be b + 1 counts, where b is the number of boundaries") for i, bCount := range agg.checkpoint().buckets.Counts { require.Equal(t, uint64(0), bCount.AsUint64(), "Bucket #%d must have 0 observed values", i) } }) } func calcBuckets(points []core.Number, profile test.Profile) []uint64 { sortedBoundaries := numbers{ numbers: make([]core.Number, len(boundaries[profile.NumberKind])), kind: profile.NumberKind, } copy(sortedBoundaries.numbers, boundaries[profile.NumberKind]) sort.Sort(&sortedBoundaries) boundaries := sortedBoundaries.numbers counts := make([]uint64, len(boundaries)+1) idx := 0 for _, p := range points { for idx < len(boundaries) && p.CompareNumber(profile.NumberKind, boundaries[idx]) != -1 { idx++ } counts[idx]++ } return counts }
{ return -1 }
check_domain.go
package dm //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" ) // CheckDomain invokes the dm.CheckDomain API synchronously // api document: https://help.aliyun.com/api/dm/checkdomain.html func (client *Client) CheckDomain(request *CheckDomainRequest) (response *CheckDomainResponse, err error) { response = CreateCheckDomainResponse() err = client.DoAction(request, response) return } // CheckDomainWithChan invokes the dm.CheckDomain API asynchronously // api document: https://help.aliyun.com/api/dm/checkdomain.html // asynchronous document: https://help.aliyun.com/document_detail/66220.html func (client *Client) CheckDomainWithChan(request *CheckDomainRequest) (<-chan *CheckDomainResponse, <-chan error) { responseChan := make(chan *CheckDomainResponse, 1) errChan := make(chan error, 1) err := client.AddAsyncTask(func() { defer close(responseChan) defer close(errChan) response, err := client.CheckDomain(request) if err != nil { errChan <- err } else { responseChan <- response } }) if err != nil { errChan <- err close(responseChan) close(errChan) } return responseChan, errChan } // CheckDomainWithCallback invokes the dm.CheckDomain API asynchronously // api document: https://help.aliyun.com/api/dm/checkdomain.html // asynchronous document: https://help.aliyun.com/document_detail/66220.html func (client *Client) CheckDomainWithCallback(request *CheckDomainRequest, callback func(response *CheckDomainResponse, err error)) <-chan int { result := make(chan int, 1) err := client.AddAsyncTask(func() { var response *CheckDomainResponse var err error defer close(result) response, err = client.CheckDomain(request) callback(response, err) result <- 1 }) if err != nil
return result } // CheckDomainRequest is the request struct for api CheckDomain type CheckDomainRequest struct { *requests.RpcRequest OwnerId requests.Integer `position:"Query" name:"OwnerId"` ResourceOwnerAccount string `position:"Query" name:"ResourceOwnerAccount"` ResourceOwnerId requests.Integer `position:"Query" name:"ResourceOwnerId"` DomainId requests.Integer `position:"Query" name:"DomainId"` } // CheckDomainResponse is the response struct for api CheckDomain type CheckDomainResponse struct { *responses.BaseResponse RequestId string `json:"RequestId" xml:"RequestId"` } // CreateCheckDomainRequest creates a request to invoke CheckDomain API func CreateCheckDomainRequest() (request *CheckDomainRequest) { request = &CheckDomainRequest{ RpcRequest: &requests.RpcRequest{}, } request.InitWithApiInfo("Dm", "2015-11-23", "CheckDomain", "", "") return } // CreateCheckDomainResponse creates a response to parse from CheckDomain response func CreateCheckDomainResponse() (response *CheckDomainResponse) { response = &CheckDomainResponse{ BaseResponse: &responses.BaseResponse{}, } return }
{ defer close(result) callback(nil, err) result <- 0 }
machinepool.go
package openstack // MachinePool stores the configuration for a machine pool installed // on OpenStack. type MachinePool struct { // FlavorName defines the OpenStack Nova flavor. // eg. m1.large FlavorName string `json:"type"` // RootVolume defines the root volume for instances in the machine pool. // The instances use ephemeral disks if not set. // +optional RootVolume *RootVolume `json:"rootVolume,omitempty"` // AdditionalNetworkIDs contains IDs of additional networks for machines, // where each ID is presented in UUID v4 format.
// AdditionalSecurityGroupIDs contains IDs of additional security groups for machines, // where each ID is presented in UUID v4 format. // +optional AdditionalSecurityGroupIDs []string `json:"additionalSecurityGroupIDs,omitempty"` // Zones is the list of availability zones where the instances should be deployed. // If no zones are provided, all instances will be deployed on OpenStack Nova default availability zone // +optional Zones []string `json:"zones,omitempty"` } // Set sets the values from `required` to `o`. func (o *MachinePool) Set(required *MachinePool) { if required == nil || o == nil { return } if required.FlavorName != "" { o.FlavorName = required.FlavorName } if required.RootVolume != nil { if o.RootVolume == nil { o.RootVolume = new(RootVolume) } o.RootVolume.Size = required.RootVolume.Size o.RootVolume.Type = required.RootVolume.Type if len(required.RootVolume.Zones) > 0 { o.RootVolume.Zones = required.RootVolume.Zones } } if required.AdditionalNetworkIDs != nil { o.AdditionalNetworkIDs = append(required.AdditionalNetworkIDs[:0:0], required.AdditionalNetworkIDs...) } if required.AdditionalSecurityGroupIDs != nil { o.AdditionalSecurityGroupIDs = append(required.AdditionalSecurityGroupIDs[:0:0], required.AdditionalSecurityGroupIDs...) } if len(required.Zones) > 0 { o.Zones = required.Zones } } // RootVolume defines the storage for an instance. type RootVolume struct { // Size defines the size of the volume in gibibytes (GiB). // Required Size int `json:"size"` // Type defines the type of the volume. // Required Type string `json:"type"` // Zones is the list of availability zones where the root volumes should be deployed. // If no zones are provided, all instances will be deployed on OpenStack Cinder default availability zone // +optional Zones []string `json:"zones,omitempty"` }
// Allowed address pairs won't be created for the additional networks. // +optional AdditionalNetworkIDs []string `json:"additionalNetworkIDs,omitempty"`
Reference.js
"use strict"; var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { if (kind === "m") throw new TypeError("Private method is not writable"); if (kind === "a" && !f) { throw new TypeError("Private accessor was defined without a setter"); } if ( typeof state === "function" ? receiver !== state || !f : !state.has(receiver) ) { throw new TypeError( "Cannot write private member to an object whose class did not declare it", ); } return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; }; var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) { throw new TypeError("Private accessor was defined without a getter"); } if ( typeof state === "function" ? receiver !== state || !f : !state.has(receiver) ) { throw new TypeError( "Cannot read private member from an object whose class did not declare it", ); } return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; var _Reference_flag, _Reference_referenceType; Object.defineProperty(exports, "__esModule", { value: true }); exports.ReferenceTypeFlag = exports.ReferenceFlag = exports.Reference = void 0; const ID_1 = require("../ID"); var ReferenceFlag; (function (ReferenceFlag) { ReferenceFlag[ReferenceFlag["Read"] = 1] = "Read"; ReferenceFlag[ReferenceFlag["Write"] = 2] = "Write"; ReferenceFlag[ReferenceFlag["ReadWrite"] = 3] = "ReadWrite"; })(ReferenceFlag || (ReferenceFlag = {})); exports.ReferenceFlag = ReferenceFlag; const generator = (0, ID_1.createIdGenerator)(); var ReferenceTypeFlag; (function (ReferenceTypeFlag) { ReferenceTypeFlag[ReferenceTypeFlag["Value"] = 1] = "Value"; ReferenceTypeFlag[ReferenceTypeFlag["Type"] = 2] = "Type"; })(ReferenceTypeFlag || (ReferenceTypeFlag = {})); exports.ReferenceTypeFlag = ReferenceTypeFlag; /** * A Reference represents a single occurrence of an identifier in code. */ class
{ constructor( identifier, scope, flag, writeExpr, maybeImplicitGlobal, init, referenceType = ReferenceTypeFlag.Value, ) { /** * A unique ID for this instance - primarily used to help debugging and testing */ this.$id = generator(); /** * The read-write mode of the reference. */ _Reference_flag.set(this, void 0); /** * In some cases, a reference may be a type, value or both a type and value reference. */ _Reference_referenceType.set(this, void 0); this.identifier = identifier; this.from = scope; this.resolved = null; __classPrivateFieldSet(this, _Reference_flag, flag, "f"); if (this.isWrite()) { this.writeExpr = writeExpr; this.init = init; } this.maybeImplicitGlobal = maybeImplicitGlobal; __classPrivateFieldSet(this, _Reference_referenceType, referenceType, "f"); } /** * True if this reference can reference types */ get isTypeReference() { return (__classPrivateFieldGet(this, _Reference_referenceType, "f") & ReferenceTypeFlag.Type) !== 0; } /** * True if this reference can reference values */ get isValueReference() { return (__classPrivateFieldGet(this, _Reference_referenceType, "f") & ReferenceTypeFlag.Value) !== 0; } /** * Whether the reference is writeable. * @public */ isWrite() { return !!(__classPrivateFieldGet(this, _Reference_flag, "f") & ReferenceFlag.Write); } /** * Whether the reference is readable. * @public */ isRead() { return !!(__classPrivateFieldGet(this, _Reference_flag, "f") & ReferenceFlag.Read); } /** * Whether the reference is read-only. * @public */ isReadOnly() { return __classPrivateFieldGet(this, _Reference_flag, "f") === ReferenceFlag.Read; } /** * Whether the reference is write-only. * @public */ isWriteOnly() { return __classPrivateFieldGet(this, _Reference_flag, "f") === ReferenceFlag.Write; } /** * Whether the reference is read-write. * @public */ isReadWrite() { return __classPrivateFieldGet(this, _Reference_flag, "f") === ReferenceFlag.ReadWrite; } } exports.Reference = Reference; _Reference_flag = new WeakMap(), _Reference_referenceType = new WeakMap(); //# sourceMappingURL=Reference.js.map
Reference
pantry-folder.ts
import { Component, Input } from '@angular/core'; import { ModalController } from 'ionic-angular'; import { FolderModal } from '../../modals/folder/folder'; import { FirebaseService } from '../../providers/firebase.provider'; @Component({ selector: 'pantry-folder', templateUrl: 'pantry-folder.html' }) export class PantryFolderComponent {
@Input('item') item; @Input('type') type; constructor(private modalCtrl: ModalController, private _fbService: FirebaseService) { } onEditFolder() { const folderModal = this.modalCtrl.create(FolderModal, { type: 'edit' }); folderModal.onDidDismiss(data => { if (data) { if (data.type === 'remove') { this._fbService.removeItem(this.item, this.type.key); } else { this._fbService.updateFolder(this.item, this.type.key, { title: data.title }); } } }); folderModal.present(); } expandItem(item) { this._fbService.updateFolder(item, this.type.key, { expanded: !item.expanded }); } }
main.rs
use std::env; use std::io::Write; use aoc_util::input::{FileReader, FromFile}; fn main() { let input_file = match env::args().nth(1) { Some(input_file) => input_file, None => { println!("Please supply input file!"); std::process::exit(1); } }; let input: Vec<u32> = match FileReader::new().split_char('-').read_from_file(input_file) { Ok(input) => input, Err(e) => { println!("Error reading input: {}", e); std::process::exit(1); } }; assert_eq!(2, input.len()); let min = input[0]; let max = input[1]; let count = count_valid_passwords(min, max, true); println!("Different valid passwords: {}", count); let count = count_valid_passwords(min, max, false); println!("Different valid passwords (no large groups): {}", count); } fn count_valid_passwords(min: u32, max: u32, allow_larger_group: bool) -> usize { let validator = Validator::new(min, max, allow_larger_group); (min..=max).filter(|&pwd| validator.validate(pwd)).count() } #[derive(Debug)] struct Validator { min: u32, max: u32, allow_larger_group: bool, } impl Validator { fn new(min: u32, max: u32, allow_larger_group: bool) -> Self { Self { min, max, allow_larger_group, } } fn validate(&self, password: u32) -> bool { // Check here to prevent buffer overflow when converting to digits if !Self::six_digits(password) { return false; } let mut digits = [0 as u8; 6]; write!(&mut digits[..], "{}", password).unwrap(); self.in_range(password) && self.check_adjacent_digits(&digits) && Self::never_decrease(&digits) } fn six_digits(password: u32) -> bool { password > 99999 && password < 1_000_000 } fn in_range(&self, password: u32) -> bool { password >= self.min && password <= self.max } fn check_adjacent_digits(&self, password: &[u8]) -> bool { let mut previous = 0; let mut has_adjacent = false; let mut current_adjacent = 0; for &c in password.iter() { if c == previous { current_adjacent += 1; has_adjacent = true; } else { if current_adjacent == 1 { return true; } current_adjacent = 0; } previous = c; } self.allow_larger_group && has_adjacent || current_adjacent == 1 } fn never_decrease(password: &[u8]) -> bool { let mut previous = 0; for &c in password.iter() { if c < previous { return false; } previous = c; } true } } #[cfg(test)] mod tests { use super::*; #[test] fn
() { let range = Validator::new(0, u32::max_value(), true); assert!(!range.validate(99999)); assert!(range.validate(599999)); assert!(range.validate(999999)); assert!(!range.validate(1222222)); } #[test] fn within_range() { let range = Validator::new(234456, 456677, true); assert!(!range.validate(234455)); assert!(range.validate(234456)); assert!(range.validate(444444)); assert!(range.validate(456677)); assert!(!range.validate(456678)); } #[test] fn adjacent_digits() { let range = Validator::new(300000, 500000, true); assert!(!range.validate(345678)); assert!(range.validate(344478)); assert!(range.validate(344567)); } #[test] fn never_decrease() { let range = Validator::new(300000, 500000, true); assert!(!range.validate(432100)); assert!(range.validate(444444)); assert!(range.validate(455677)); } #[test] fn examples() { let range = Validator::new(0, u32::max_value(), true); assert!(range.validate(111111)); assert!(!range.validate(223450)); assert!(!range.validate(123789)); } #[test] fn larger_groups() { let range = Validator::new(0, u32::max_value(), false); assert!(range.validate(112233)); assert!(!range.validate(123444)); assert!(range.validate(111122)); } #[test] fn part_1() { let input: Vec<u32> = FileReader::new() .split_char('-') .read_from_file("input.txt") .unwrap(); assert_eq!(2, input.len()); let min = input[0]; let max = input[1]; let count = count_valid_passwords(min, max, true); assert_eq!(1694, count); } #[test] fn part_2() { let input: Vec<u32> = FileReader::new() .split_char('-') .read_from_file("input.txt") .unwrap(); assert_eq!(2, input.len()); let min = input[0]; let max = input[1]; let count = count_valid_passwords(min, max, false); assert_eq!(1148, count); } }
six_digit_number
helpers.py
class Helpers: _cache = {} @classmethod def
(cls, key, scope=None, func=None): if scope is not None: if scope not in cls._cache: cls._cache[scope] = {} if key in cls._cache[scope]: return cls._cache[scope][key] else: result = None if func is None else func() cls._cache[scope][key] = result return result else: if key in cls._cache: return cls._cache[key] else: result = None if func is None else func() cls._cache[key] = result return result @classmethod def cache(cls, key, scope=None, object=None): if scope is not None: if scope not in cls._cache: cls._cache[scope] = {} cls._cache[scope][key] = object else: cls._cache[key] = object
cached
stepper.component.spec.ts
import { async, ComponentFixture, TestBed } from '@angular/core/testing'; import { Component, ViewChild } from '@angular/core'; import { StepperComponent } from './stepper.component'; import { AbstractContentReplacerComponent } from '../../abstracts/abstract-content-swap/abstract-content-replacer.component'; import { StepToggleComponent } from './components/step-toggle/step-toggle.component'; @Component({ template: ` <tamu-gisc-step-toggle></tamu-gisc-step-toggle> ` }) class
{ @ViewChild(StepToggleComponent, { static: true }) public title: StepToggleComponent; } describe('StepperComponent', () => { let component: TestComponent; let fixture: ComponentFixture<TestComponent>; beforeEach(async(() => { TestBed.configureTestingModule({ declarations: [TestComponent, StepperComponent, AbstractContentReplacerComponent, StepToggleComponent] }).compileComponents(); })); beforeEach(() => { fixture = TestBed.createComponent(TestComponent); component = fixture.componentInstance; fixture.detectChanges(); }); it('should create', () => { expect(component).toBeTruthy(); }); });
TestComponent
__init__.py
# Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from ._auto_rest_long_running_operation_test_service import AutoRestLongRunningOperationTestService from ._version import VERSION __version__ = VERSION __all__ = ['AutoRestLongRunningOperationTestService'] try: from ._patch import patch_sdk # type: ignore patch_sdk() except ImportError: pass
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved.